mirror of
https://github.com/openai/codex.git
synced 2026-02-02 15:03:38 +00:00
Compare commits
467 Commits
tokencount
...
search
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
47bf6b9667 | ||
|
|
897d4d5f17 | ||
|
|
8a281cd1f4 | ||
|
|
e8863b233b | ||
|
|
8fed0b53c4 | ||
|
|
00debb6399 | ||
|
|
0a0a10d8b3 | ||
|
|
13035561cd | ||
|
|
9be704a934 | ||
|
|
f7b4e29609 | ||
|
|
d6c5df9a0a | ||
|
|
8662162f45 | ||
|
|
57584d6f34 | ||
|
|
268a10f917 | ||
|
|
5346cc422d | ||
|
|
26f7c46856 | ||
|
|
90af046c5c | ||
|
|
961ed31901 | ||
|
|
85e7357973 | ||
|
|
f98fa85b44 | ||
|
|
ddcaf3dccd | ||
|
|
56296cad82 | ||
|
|
95b41dd7f1 | ||
|
|
bf82353f45 | ||
|
|
0308febc23 | ||
|
|
7b4a4c2219 | ||
|
|
3ddd4d47d0 | ||
|
|
ca6a0358de | ||
|
|
0026b12615 | ||
|
|
4300236681 | ||
|
|
ec238a2c39 | ||
|
|
b6165aee0c | ||
|
|
f4bc03d7c0 | ||
|
|
3c5e12e2a4 | ||
|
|
c89229db97 | ||
|
|
d3820f4782 | ||
|
|
e896db1180 | ||
|
|
96acb8a74e | ||
|
|
687a13bbe5 | ||
|
|
fe8122e514 | ||
|
|
876d4f450a | ||
|
|
f52320be86 | ||
|
|
a43ae86b6c | ||
|
|
496cb801e1 | ||
|
|
abd517091f | ||
|
|
b8b04514bc | ||
|
|
0e5d72cc57 | ||
|
|
60f9e85c16 | ||
|
|
b016a3e7d8 | ||
|
|
a0d56541cf | ||
|
|
226215f36d | ||
|
|
338c2c873c | ||
|
|
4b0f5eb6a8 | ||
|
|
75176dae70 | ||
|
|
12fd2b4160 | ||
|
|
f2555422b9 | ||
|
|
27f169bb91 | ||
|
|
b16c985ed2 | ||
|
|
35a770e871 | ||
|
|
b09f62a1c3 | ||
|
|
5833508a17 | ||
|
|
d73055c5b1 | ||
|
|
7e3a272b29 | ||
|
|
661663c98a | ||
|
|
721003c552 | ||
|
|
36f1cca1b1 | ||
|
|
d3e1beb26c | ||
|
|
c264ae6021 | ||
|
|
8cd882c4bd | ||
|
|
90fe5e4a7e | ||
|
|
a90a58f7a1 | ||
|
|
b2d81a7cac | ||
|
|
77a8b7fdeb | ||
|
|
7fa5e95c1f | ||
|
|
191d620707 | ||
|
|
53504a38d2 | ||
|
|
5c42419b02 | ||
|
|
aecbe0f333 | ||
|
|
a30a902db5 | ||
|
|
f3b4a26f32 | ||
|
|
dc3c6bf62a | ||
|
|
3203862167 | ||
|
|
06853d94f0 | ||
|
|
cc2f4aafd7 | ||
|
|
356ea6ea34 | ||
|
|
4764fc1ee7 | ||
|
|
90ef94d3b3 | ||
|
|
6c2969d22d | ||
|
|
0ad1b0782b | ||
|
|
d7acd146fb | ||
|
|
c5465aed60 | ||
|
|
a95605a867 | ||
|
|
848058f05b | ||
|
|
a4f1c9d67e | ||
|
|
665341c9b1 | ||
|
|
fae0e6c52c | ||
|
|
1b4a79f03c | ||
|
|
640192ac3d | ||
|
|
205c36e393 | ||
|
|
d13ee79c41 | ||
|
|
bde468ff8d | ||
|
|
e292d1ed21 | ||
|
|
de8d77274a | ||
|
|
a5b7675e42 | ||
|
|
9823de3cc6 | ||
|
|
c32e9cfe86 | ||
|
|
1d17ca1fa3 | ||
|
|
bfe3328129 | ||
|
|
e0b38bd7a2 | ||
|
|
153338c20f | ||
|
|
3495a7dc37 | ||
|
|
042d4d55d9 | ||
|
|
5af08e0719 | ||
|
|
33d3ecbccc | ||
|
|
69cb72f842 | ||
|
|
69ac5153d4 | ||
|
|
16b6951648 | ||
|
|
231c36f8d3 | ||
|
|
1e4541b982 | ||
|
|
7be3b484ad | ||
|
|
9617b69c8a | ||
|
|
1d94b9111c | ||
|
|
2d6cd6951a | ||
|
|
310e3c32e5 | ||
|
|
37786593a0 | ||
|
|
819a5782b6 | ||
|
|
c0a84473a4 | ||
|
|
591a8ecc16 | ||
|
|
c405d8c06c | ||
|
|
138be0fd73 | ||
|
|
25a2e15ec5 | ||
|
|
62cc8a4b8d | ||
|
|
f895d4cbb3 | ||
|
|
ed5d656fa8 | ||
|
|
c43a561916 | ||
|
|
b93cc0f431 | ||
|
|
4c566d484a | ||
|
|
06e34d4607 | ||
|
|
45936f8fbd | ||
|
|
ec98445abf | ||
|
|
b07aafa5f5 | ||
|
|
b727d3f98a | ||
|
|
2f6fb37d72 | ||
|
|
35c76ad47d | ||
|
|
c07fb71186 | ||
|
|
e899ae7d8a | ||
|
|
6f97ec4990 | ||
|
|
07c1db351a | ||
|
|
31102af54b | ||
|
|
5d78c1edd3 | ||
|
|
170c685882 | ||
|
|
609f75acec | ||
|
|
eabe18714f | ||
|
|
ceaba36c7f | ||
|
|
d94e8bad8b | ||
|
|
8a367ef6bf | ||
|
|
400a5a90bf | ||
|
|
2f370e946d | ||
|
|
751b3b50ac | ||
|
|
d78d0764aa | ||
|
|
699c121606 | ||
|
|
dde615f482 | ||
|
|
325fad1d92 | ||
|
|
f815157dd9 | ||
|
|
b8195a17e5 | ||
|
|
349ef7edc6 | ||
|
|
5881c0d6d4 | ||
|
|
8dd771d217 | ||
|
|
32853ecbc5 | ||
|
|
7fc3edf8a7 | ||
|
|
01e6503672 | ||
|
|
9c259737d3 | ||
|
|
b8e1fe60c5 | ||
|
|
ddfb7eb548 | ||
|
|
6910be3224 | ||
|
|
a534356fe1 | ||
|
|
c89b0e1235 | ||
|
|
f6a152848a | ||
|
|
3592ecb23c | ||
|
|
516acc030b | ||
|
|
5b038135de | ||
|
|
d9dbf48828 | ||
|
|
2e95e5602d | ||
|
|
87a654cf6b | ||
|
|
27c6c5d7a7 | ||
|
|
c09e131653 | ||
|
|
ea82f86662 | ||
|
|
a8edc57740 | ||
|
|
52e591ce60 | ||
|
|
079303091f | ||
|
|
4a80059b1b | ||
|
|
bf76258cdc | ||
|
|
c64da4ff71 | ||
|
|
98efd352ae | ||
|
|
80ccec6530 | ||
|
|
c81baaabda | ||
|
|
55b74c95e2 | ||
|
|
16057e76b0 | ||
|
|
adbc38a978 | ||
|
|
83a4d4d8ed | ||
|
|
197f45a3be | ||
|
|
04c1782e52 | ||
|
|
d15253415a | ||
|
|
c4120a265b | ||
|
|
618a42adf5 | ||
|
|
a9d54b9e92 | ||
|
|
79e51dd607 | ||
|
|
ff6dbff0b6 | ||
|
|
99841332e2 | ||
|
|
7407469791 | ||
|
|
43615becf0 | ||
|
|
9ee6e6f342 | ||
|
|
d7286e9829 | ||
|
|
bcf2bc0aa5 | ||
|
|
68765214b3 | ||
|
|
5c67dc3af1 | ||
|
|
c0960c0f49 | ||
|
|
90c3a5650c | ||
|
|
a3254696c8 | ||
|
|
2719fdd12a | ||
|
|
3a1be084f9 | ||
|
|
43b63ccae8 | ||
|
|
cc1b21e47f | ||
|
|
55801700de | ||
|
|
1fba99ed85 | ||
|
|
d3f6f6629b | ||
|
|
e555a36c6a | ||
|
|
ea095e30c1 | ||
|
|
c549481513 | ||
|
|
8797145678 | ||
|
|
a53720e278 | ||
|
|
41f5d61f24 | ||
|
|
02609184be | ||
|
|
1fc3413a46 | ||
|
|
eb2b739d6a | ||
|
|
a10403d697 | ||
|
|
8e3a048fec | ||
|
|
9f2ab97fbc | ||
|
|
38c9d7dca1 | ||
|
|
67aab04c66 | ||
|
|
7355ca48c5 | ||
|
|
affb5fc1d0 | ||
|
|
4a5f05c136 | ||
|
|
acc2b63dfb | ||
|
|
344d4a1d68 | ||
|
|
a0c37f5d07 | ||
|
|
103adcdf2d | ||
|
|
d61dea6fe6 | ||
|
|
e363dac249 | ||
|
|
250b244ab4 | ||
|
|
d1ed3a4cef | ||
|
|
e85742635f | ||
|
|
87b299aa3f | ||
|
|
0e58870634 | ||
|
|
42847baaf7 | ||
|
|
6032d784ee | ||
|
|
7bff8df10e | ||
|
|
addc946d13 | ||
|
|
bffdbec2c5 | ||
|
|
353a5c2046 | ||
|
|
00c7f7a16c | ||
|
|
82e65975b2 | ||
|
|
639a6fd2f3 | ||
|
|
db4aa6f916 | ||
|
|
cb96f4f596 | ||
|
|
5b910f1f05 | ||
|
|
af6304c641 | ||
|
|
b90eeabd74 | ||
|
|
f7d2f3e54d | ||
|
|
3fe3b6328b | ||
|
|
8144ddb3da | ||
|
|
9336f2b84b | ||
|
|
af37785bca | ||
|
|
594248f415 | ||
|
|
8227a5ba1b | ||
|
|
fdb8dadcae | ||
|
|
0f9a796617 | ||
|
|
c6e8671b2a | ||
|
|
b84a920067 | ||
|
|
6cd5309d91 | ||
|
|
664ee07540 | ||
|
|
51c465bddc | ||
|
|
e0fbc112c7 | ||
|
|
76ecbb3d8e | ||
|
|
2451b19d13 | ||
|
|
5c7d9e27b1 | ||
|
|
c93e77b68b | ||
|
|
c415827ac2 | ||
|
|
4e0550b995 | ||
|
|
f54a49157b | ||
|
|
dd56750612 | ||
|
|
8bc73a2bfd | ||
|
|
be366a31ab | ||
|
|
c75920a071 | ||
|
|
8daba53808 | ||
|
|
d2940bd4c3 | ||
|
|
76a9b11678 | ||
|
|
fa80bbb587 | ||
|
|
434eb4fd49 | ||
|
|
19f46439ae | ||
|
|
e258ca61b4 | ||
|
|
e5fe50d3ce | ||
|
|
14a115d488 | ||
|
|
5996ee0e5f | ||
|
|
a4ebd069e5 | ||
|
|
04504d8218 | ||
|
|
42d335deb8 | ||
|
|
ad0c2b4db3 | ||
|
|
ff389dc52f | ||
|
|
9b18875a42 | ||
|
|
881c7978f1 | ||
|
|
a7fda70053 | ||
|
|
de64f5f007 | ||
|
|
8595237505 | ||
|
|
62258df92f | ||
|
|
b34e906396 | ||
|
|
71038381aa | ||
|
|
277fc6254e | ||
|
|
992b531180 | ||
|
|
84a0ba9bf5 | ||
|
|
4a5d6f7c71 | ||
|
|
1b3c8b8e94 | ||
|
|
d4aba772cb | ||
|
|
4c97eeb32a | ||
|
|
c9505488a1 | ||
|
|
530382db05 | ||
|
|
208089e58e | ||
|
|
e5fdb5b0fd | ||
|
|
5332f6e215 | ||
|
|
5d87f5d24a | ||
|
|
791d7b125f | ||
|
|
72733e34c4 | ||
|
|
b8d2b1a576 | ||
|
|
7fe4021f95 | ||
|
|
11285655c4 | ||
|
|
244687303b | ||
|
|
5e2c4f7e35 | ||
|
|
a8026d3846 | ||
|
|
45bccd36b0 | ||
|
|
404c126fc3 | ||
|
|
88027552dd | ||
|
|
ca8bd09d56 | ||
|
|
39ed8a7d26 | ||
|
|
2df7f7efe5 | ||
|
|
0560079c41 | ||
|
|
0de154194d | ||
|
|
5c583fe89b | ||
|
|
cf63cbf153 | ||
|
|
b1c291e2bb | ||
|
|
934d728946 | ||
|
|
f037b2fd56 | ||
|
|
d60cbed691 | ||
|
|
6aafe37752 | ||
|
|
d555b68469 | ||
|
|
9baa5c33da | ||
|
|
fdf4a68646 | ||
|
|
adc9e1526b | ||
|
|
b9af1d2b16 | ||
|
|
2d52e3b40a | ||
|
|
6039f8a126 | ||
|
|
50262a44ce | ||
|
|
839b2ae7cf | ||
|
|
6a8e743d57 | ||
|
|
a797051921 | ||
|
|
d7d9d96d6c | ||
|
|
26f1246a89 | ||
|
|
6581da9b57 | ||
|
|
900bb01486 | ||
|
|
2ad6a37192 | ||
|
|
e5dd7f0934 | ||
|
|
b6673838e8 | ||
|
|
1823906215 | ||
|
|
5185d69f13 | ||
|
|
4dffa496ac | ||
|
|
ce984b2c71 | ||
|
|
c47febf221 | ||
|
|
76c37c5493 | ||
|
|
2aa84b8891 | ||
|
|
9177bdae5e | ||
|
|
a30e5e40ee | ||
|
|
99e1d33bd1 | ||
|
|
b2f6fc3b9a | ||
|
|
51f88fd04a | ||
|
|
916fdc2a37 | ||
|
|
863d9c237e | ||
|
|
7e1543f5d8 | ||
|
|
d701eb32d7 | ||
|
|
9baae77533 | ||
|
|
e932722292 | ||
|
|
bbea6bbf7e | ||
|
|
4891ee29c5 | ||
|
|
bac8a427f3 | ||
|
|
14ab1063a7 | ||
|
|
a77364bbaa | ||
|
|
19b4ed3c96 | ||
|
|
3d4acbaea0 | ||
|
|
414b8be8b6 | ||
|
|
90a0fd342f | ||
|
|
8d56d2f655 | ||
|
|
8408f3e8ed | ||
|
|
b8ccfe9b65 | ||
|
|
e3c6903199 | ||
|
|
5f6e95b592 | ||
|
|
a2e9cc5530 | ||
|
|
ea225df22e | ||
|
|
d4848e558b | ||
|
|
1a6a95fb2a | ||
|
|
c6fd056aa6 | ||
|
|
abdcb40f4c | ||
|
|
4ae6b9787a | ||
|
|
bba567cee9 | ||
|
|
ba6af23cb6 | ||
|
|
f805d17930 | ||
|
|
90965fbc84 | ||
|
|
c172e8e997 | ||
|
|
9bbeb75361 | ||
|
|
6ccd32c601 | ||
|
|
3b5a5412bb | ||
|
|
44bb53df1e | ||
|
|
8453915e02 | ||
|
|
44587c2443 | ||
|
|
8f7b22b652 | ||
|
|
027944c64e | ||
|
|
bec51f6c05 | ||
|
|
66967500bb | ||
|
|
167b4f0e25 | ||
|
|
167154178b | ||
|
|
674e3d3c90 | ||
|
|
114ce9ff4d | ||
|
|
e13b35ecb0 | ||
|
|
377af75730 | ||
|
|
86e0f31a7e | ||
|
|
8f837f1093 | ||
|
|
162e1235a8 | ||
|
|
c09ed74a16 | ||
|
|
65f3528cad | ||
|
|
44262d8fd8 | ||
|
|
95a9938d3a | ||
|
|
f69f07b028 | ||
|
|
8d766088e6 | ||
|
|
87654ec0b7 | ||
|
|
51d9e05de7 | ||
|
|
8068cc75f8 | ||
|
|
acb28bf914 | ||
|
|
97338de578 | ||
|
|
5200b7a95d | ||
|
|
64e6c4afbb | ||
|
|
39db113cc9 | ||
|
|
45bd5ca4b9 | ||
|
|
c13c3dadbf | ||
|
|
8636bff46d | ||
|
|
43809a454e | ||
|
|
5c48600bb3 | ||
|
|
de6559f2ab | ||
|
|
5bcc9d8b77 | ||
|
|
5eab4c7ab4 | ||
|
|
f656e192bf | ||
|
|
ee5ecae7c0 | ||
|
|
58bb2048ac | ||
|
|
ac8a3155d6 | ||
|
|
ace14e8d36 | ||
|
|
2a76a08a9e | ||
|
|
16309d6b68 | ||
|
|
62bd0e3d9d | ||
|
|
a9c68ea270 | ||
|
|
ac58749bd3 | ||
|
|
79cbd2ab1b |
@@ -1,6 +1,6 @@
|
||||
[codespell]
|
||||
# Ref: https://github.com/codespell-project/codespell#using-a-config-file
|
||||
skip = .git*,vendor,*-lock.yaml,*.lock,.codespellrc,*test.ts,*.jsonl
|
||||
skip = .git*,vendor,*-lock.yaml,*.lock,.codespellrc,*test.ts,*.jsonl,frame*.txt
|
||||
check-hidden = true
|
||||
ignore-regex = ^\s*"image/\S+": ".*|\b(afterAll)\b
|
||||
ignore-words-list = ratatui,ser
|
||||
|
||||
22
.github/ISSUE_TEMPLATE/2-bug-report.yml
vendored
22
.github/ISSUE_TEMPLATE/2-bug-report.yml
vendored
@@ -20,6 +20,14 @@ body:
|
||||
attributes:
|
||||
label: What version of Codex is running?
|
||||
description: Copy the output of `codex --version`
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: plan
|
||||
attributes:
|
||||
label: What subscription do you have?
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: model
|
||||
attributes:
|
||||
@@ -32,11 +40,18 @@ body:
|
||||
description: |
|
||||
For MacOS and Linux: copy the output of `uname -mprs`
|
||||
For Windows: copy the output of `"$([Environment]::OSVersion | ForEach-Object VersionString) $(if ([Environment]::Is64BitOperatingSystem) { "x64" } else { "x86" })"` in the PowerShell console
|
||||
- type: textarea
|
||||
id: actual
|
||||
attributes:
|
||||
label: What issue are you seeing?
|
||||
description: Please include the full error messages and prompts with PII redacted. If possible, please provide text instead of a screenshot.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: steps
|
||||
attributes:
|
||||
label: What steps can reproduce the bug?
|
||||
description: Explain the bug and provide a code snippet that can reproduce it.
|
||||
description: Explain the bug and provide a code snippet that can reproduce it. Please include session id, token limit usage, context window usage if applicable.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
@@ -44,11 +59,6 @@ body:
|
||||
attributes:
|
||||
label: What is the expected behavior?
|
||||
description: If possible, please provide text instead of a screenshot.
|
||||
- type: textarea
|
||||
id: actual
|
||||
attributes:
|
||||
label: What do you see instead?
|
||||
description: If possible, please provide text instead of a screenshot.
|
||||
- type: textarea
|
||||
id: notes
|
||||
attributes:
|
||||
|
||||
6
.github/ISSUE_TEMPLATE/4-feature-request.yml
vendored
6
.github/ISSUE_TEMPLATE/4-feature-request.yml
vendored
@@ -2,7 +2,6 @@ name: 🎁 Feature Request
|
||||
description: Propose a new feature for Codex
|
||||
labels:
|
||||
- enhancement
|
||||
- needs triage
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
@@ -19,11 +18,6 @@ body:
|
||||
label: What feature would you like to see?
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: author
|
||||
attributes:
|
||||
label: Are you interested in implementing this feature?
|
||||
description: Please wait for acknowledgement before implementing or opening a PR.
|
||||
- type: textarea
|
||||
id: notes
|
||||
attributes:
|
||||
|
||||
24
.github/ISSUE_TEMPLATE/5-vs-code-extension.yml
vendored
24
.github/ISSUE_TEMPLATE/5-vs-code-extension.yml
vendored
@@ -14,11 +14,21 @@ body:
|
||||
id: version
|
||||
attributes:
|
||||
label: What version of the VS Code extension are you using?
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: plan
|
||||
attributes:
|
||||
label: What subscription do you have?
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: ide
|
||||
attributes:
|
||||
label: Which IDE are you using?
|
||||
description: Like `VS Code`, `Cursor`, `Windsurf`, etc.
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: platform
|
||||
attributes:
|
||||
@@ -26,11 +36,18 @@ body:
|
||||
description: |
|
||||
For MacOS and Linux: copy the output of `uname -mprs`
|
||||
For Windows: copy the output of `"$([Environment]::OSVersion | ForEach-Object VersionString) $(if ([Environment]::Is64BitOperatingSystem) { "x64" } else { "x86" })"` in the PowerShell console
|
||||
- type: textarea
|
||||
id: actual
|
||||
attributes:
|
||||
label: What issue are you seeing?
|
||||
description: Please include the full error messages and prompts with PII redacted. If possible, please provide text instead of a screenshot.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: steps
|
||||
attributes:
|
||||
label: What steps can reproduce the bug?
|
||||
description: Explain the bug and provide a code snippet that can reproduce it.
|
||||
description: Explain the bug and provide a code snippet that can reproduce it. Please include session id, token limit usage, context window usage if applicable.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
@@ -38,11 +55,6 @@ body:
|
||||
attributes:
|
||||
label: What is the expected behavior?
|
||||
description: If possible, please provide text instead of a screenshot.
|
||||
- type: textarea
|
||||
id: actual
|
||||
attributes:
|
||||
label: What do you see instead?
|
||||
description: If possible, please provide text instead of a screenshot.
|
||||
- type: textarea
|
||||
id: notes
|
||||
attributes:
|
||||
|
||||
28
.github/dotslash-config.json
vendored
28
.github/dotslash-config.json
vendored
@@ -27,6 +27,34 @@
|
||||
"path": "codex.exe"
|
||||
}
|
||||
}
|
||||
},
|
||||
"codex-responses-api-proxy": {
|
||||
"platforms": {
|
||||
"macos-aarch64": {
|
||||
"regex": "^codex-responses-api-proxy-aarch64-apple-darwin\\.zst$",
|
||||
"path": "codex-responses-api-proxy"
|
||||
},
|
||||
"macos-x86_64": {
|
||||
"regex": "^codex-responses-api-proxy-x86_64-apple-darwin\\.zst$",
|
||||
"path": "codex-responses-api-proxy"
|
||||
},
|
||||
"linux-x86_64": {
|
||||
"regex": "^codex-responses-api-proxy-x86_64-unknown-linux-musl\\.zst$",
|
||||
"path": "codex-responses-api-proxy"
|
||||
},
|
||||
"linux-aarch64": {
|
||||
"regex": "^codex-responses-api-proxy-aarch64-unknown-linux-musl\\.zst$",
|
||||
"path": "codex-responses-api-proxy"
|
||||
},
|
||||
"windows-x86_64": {
|
||||
"regex": "^codex-responses-api-proxy-x86_64-pc-windows-msvc\\.exe\\.zst$",
|
||||
"path": "codex-responses-api-proxy.exe"
|
||||
},
|
||||
"windows-aarch64": {
|
||||
"regex": "^codex-responses-api-proxy-aarch64-pc-windows-msvc\\.exe\\.zst$",
|
||||
"path": "codex-responses-api-proxy.exe"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
18
.github/prompts/issue-deduplicator.txt
vendored
Normal file
18
.github/prompts/issue-deduplicator.txt
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
You are an assistant that triages new GitHub issues by identifying potential duplicates.
|
||||
|
||||
You will receive the following JSON files located in the current working directory:
|
||||
- `codex-current-issue.json`: JSON object describing the newly created issue (fields: number, title, body).
|
||||
- `codex-existing-issues.json`: JSON array of recent issues (each element includes number, title, body, createdAt).
|
||||
|
||||
Instructions:
|
||||
- Load both files as JSON and review their contents carefully. The codex-existing-issues.json file is large, ensure you explore all of it.
|
||||
- Compare the current issue against the existing issues to find up to five that appear to describe the same underlying problem or request.
|
||||
- Only consider an issue a potential duplicate if there is a clear overlap in symptoms, feature requests, reproduction steps, or error messages.
|
||||
- Prioritize newer issues when similarity is comparable.
|
||||
- Ignore pull requests and issues whose similarity is tenuous.
|
||||
- When unsure, prefer returning fewer matches.
|
||||
|
||||
Output requirements:
|
||||
- Respond with a JSON array of issue numbers (integers), ordered from most likely duplicate to least.
|
||||
- Include at most five numbers.
|
||||
- If you find no plausible duplicates, respond with `[]`.
|
||||
26
.github/prompts/issue-labeler.txt
vendored
Normal file
26
.github/prompts/issue-labeler.txt
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
You are an assistant that reviews GitHub issues for the repository.
|
||||
|
||||
Your job is to choose the most appropriate existing labels for the issue described later in this prompt.
|
||||
Follow these rules:
|
||||
- Only pick labels out of the list below.
|
||||
- Prefer a small set of precise labels over many broad ones.
|
||||
- If none of the labels fit, respond with an empty JSON array: []
|
||||
- Output must be a JSON array of label names (strings) with no additional commentary.
|
||||
|
||||
Labels to apply:
|
||||
1. bug — Reproducible defects in Codex products (CLI, VS Code extension, web, auth).
|
||||
2. enhancement — Feature requests or usability improvements that ask for new capabilities, better ergonomics, or quality-of-life tweaks.
|
||||
3. extension — VS Code (or other IDE) extension-specific issues.
|
||||
4. windows-os — Bugs or friction specific to Windows environments (PowerShell behavior, path handling, copy/paste, OS-specific auth or tooling failures).
|
||||
5. mcp — Topics involving Model Context Protocol servers/clients.
|
||||
6. codex-web — Issues targeting the Codex web UI/Cloud experience.
|
||||
8. azure — Problems or requests tied to Azure OpenAI deployments.
|
||||
9. documentation — Updates or corrections needed in docs/README/config references (broken links, missing examples, outdated keys, clarification requests).
|
||||
10. model-behavior — Undesirable LLM behavior: forgetting goals, refusing work, hallucinating environment details, quota misreports, or other reasoning/performance anomalies.
|
||||
|
||||
Issue information is available in environment variables:
|
||||
|
||||
ISSUE_NUMBER
|
||||
ISSUE_TITLE
|
||||
ISSUE_BODY
|
||||
REPO_FULL_NAME
|
||||
28
.github/workflows/ci.yml
vendored
28
.github/workflows/ci.yml
vendored
@@ -1,7 +1,7 @@
|
||||
name: ci
|
||||
|
||||
on:
|
||||
pull_request: { branches: [main] }
|
||||
pull_request: {}
|
||||
push: { branches: [main] }
|
||||
|
||||
jobs:
|
||||
@@ -27,12 +27,29 @@ jobs:
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
# Run all tasks using workspace filters
|
||||
# stage_npm_packages.py requires DotSlash when staging releases.
|
||||
- uses: facebook/install-dotslash@v2
|
||||
|
||||
- name: Ensure staging a release works.
|
||||
- name: Stage npm package
|
||||
id: stage_npm_package
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: ./codex-cli/scripts/stage_release.sh
|
||||
run: |
|
||||
set -euo pipefail
|
||||
CODEX_VERSION=0.40.0
|
||||
OUTPUT_DIR="${RUNNER_TEMP}"
|
||||
python3 ./scripts/stage_npm_packages.py \
|
||||
--release-version "$CODEX_VERSION" \
|
||||
--package codex \
|
||||
--output-dir "$OUTPUT_DIR"
|
||||
PACK_OUTPUT="${OUTPUT_DIR}/codex-npm-${CODEX_VERSION}.tgz"
|
||||
echo "pack_output=$PACK_OUTPUT" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Upload staged npm package artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: codex-npm-staging
|
||||
path: ${{ steps.stage_npm_package.outputs.pack_output }}
|
||||
|
||||
- name: Ensure root README.md contains only ASCII and certain Unicode code points
|
||||
run: ./scripts/asciicheck.py README.md
|
||||
@@ -43,3 +60,6 @@ jobs:
|
||||
run: ./scripts/asciicheck.py codex-cli/README.md
|
||||
- name: Check codex-cli/README ToC
|
||||
run: python3 scripts/readme_toc.py codex-cli/README.md
|
||||
|
||||
- name: Prettier (run `pnpm run format:fix` to fix)
|
||||
run: pnpm run format
|
||||
|
||||
2
.github/workflows/codespell.yml
vendored
2
.github/workflows/codespell.yml
vendored
@@ -22,6 +22,6 @@ jobs:
|
||||
- name: Annotate locations with typos
|
||||
uses: codespell-project/codespell-problem-matcher@b80729f885d32f78a716c2f107b4db1025001c42 # v1
|
||||
- name: Codespell
|
||||
uses: codespell-project/actions-codespell@406322ec52dd7b488e48c1c4b82e2a8b3a1bf630 # v2
|
||||
uses: codespell-project/actions-codespell@406322ec52dd7b488e48c1c4b82e2a8b3a1bf630 # v2.1
|
||||
with:
|
||||
ignore_words_file: .codespellignore
|
||||
|
||||
140
.github/workflows/issue-deduplicator.yml
vendored
Normal file
140
.github/workflows/issue-deduplicator.yml
vendored
Normal file
@@ -0,0 +1,140 @@
|
||||
name: Issue Deduplicator
|
||||
|
||||
on:
|
||||
issues:
|
||||
types:
|
||||
- opened
|
||||
- labeled
|
||||
|
||||
jobs:
|
||||
gather-duplicates:
|
||||
name: Identify potential duplicates
|
||||
if: ${{ github.event.action == 'opened' || (github.event.action == 'labeled' && github.event.label.name == 'codex-deduplicate') }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
codex_output: ${{ steps.codex.outputs.final-message }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Prepare Codex inputs
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
set -eo pipefail
|
||||
|
||||
CURRENT_ISSUE_FILE=codex-current-issue.json
|
||||
EXISTING_ISSUES_FILE=codex-existing-issues.json
|
||||
|
||||
gh issue list --repo "${{ github.repository }}" \
|
||||
--json number,title,body,createdAt \
|
||||
--limit 1000 \
|
||||
--state all \
|
||||
--search "sort:created-desc" \
|
||||
| jq '.' \
|
||||
> "$EXISTING_ISSUES_FILE"
|
||||
|
||||
gh issue view "${{ github.event.issue.number }}" \
|
||||
--repo "${{ github.repository }}" \
|
||||
--json number,title,body \
|
||||
| jq '.' \
|
||||
> "$CURRENT_ISSUE_FILE"
|
||||
|
||||
- id: codex
|
||||
uses: openai/codex-action@main
|
||||
with:
|
||||
openai-api-key: ${{ secrets.CODEX_OPENAI_API_KEY }}
|
||||
allow-users: "*"
|
||||
model: gpt-5
|
||||
prompt: |
|
||||
You are an assistant that triages new GitHub issues by identifying potential duplicates.
|
||||
|
||||
You will receive the following JSON files located in the current working directory:
|
||||
- `codex-current-issue.json`: JSON object describing the newly created issue (fields: number, title, body).
|
||||
- `codex-existing-issues.json`: JSON array of recent issues (each element includes number, title, body, createdAt).
|
||||
|
||||
Instructions:
|
||||
- Compare the current issue against the existing issues to find up to five that appear to describe the same underlying problem or request.
|
||||
- Focus on the underlying intent and context of each issue—such as reported symptoms, feature requests, reproduction steps, or error messages—rather than relying solely on string similarity or synthetic metrics.
|
||||
- After your analysis, validate your results in 1-2 lines explaining your decision to return the selected matches.
|
||||
- When unsure, prefer returning fewer matches.
|
||||
- Include at most five numbers.
|
||||
|
||||
output-schema: |
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"issues": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"reason": { "type": "string" }
|
||||
},
|
||||
"required": ["issues", "reason"],
|
||||
"additionalProperties": false
|
||||
}
|
||||
|
||||
comment-on-issue:
|
||||
name: Comment with potential duplicates
|
||||
needs: gather-duplicates
|
||||
if: ${{ needs.gather-duplicates.result != 'skipped' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
steps:
|
||||
- name: Comment on issue
|
||||
uses: actions/github-script@v7
|
||||
env:
|
||||
CODEX_OUTPUT: ${{ needs.gather-duplicates.outputs.codex_output }}
|
||||
with:
|
||||
github-token: ${{ github.token }}
|
||||
script: |
|
||||
const raw = process.env.CODEX_OUTPUT ?? '';
|
||||
let parsed;
|
||||
try {
|
||||
parsed = JSON.parse(raw);
|
||||
} catch (error) {
|
||||
core.info(`Codex output was not valid JSON. Raw output: ${raw}`);
|
||||
core.info(`Parse error: ${error.message}`);
|
||||
return;
|
||||
}
|
||||
|
||||
const issues = Array.isArray(parsed?.issues) ? parsed.issues : [];
|
||||
const currentIssueNumber = String(context.payload.issue.number);
|
||||
|
||||
console.log(`Current issue number: ${currentIssueNumber}`);
|
||||
console.log(issues);
|
||||
|
||||
const filteredIssues = issues.filter((value) => String(value) !== currentIssueNumber);
|
||||
|
||||
if (filteredIssues.length === 0) {
|
||||
core.info('Codex reported no potential duplicates.');
|
||||
return;
|
||||
}
|
||||
|
||||
const lines = [
|
||||
'Potential duplicates detected. Please review them and close your issue if it is a duplicate.',
|
||||
'',
|
||||
...filteredIssues.map((value) => `- #${String(value)}`),
|
||||
'',
|
||||
'*Powered by [Codex Action](https://github.com/openai/codex-action)*'];
|
||||
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.payload.issue.number,
|
||||
body: lines.join("\n"),
|
||||
});
|
||||
|
||||
- name: Remove codex-deduplicate label
|
||||
if: ${{ always() && github.event.action == 'labeled' && github.event.label.name == 'codex-deduplicate' }}
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
GH_REPO: ${{ github.repository }}
|
||||
run: |
|
||||
gh issue edit "${{ github.event.issue.number }}" --remove-label codex-deduplicate || true
|
||||
echo "Attempted to remove label: codex-deduplicate"
|
||||
115
.github/workflows/issue-labeler.yml
vendored
Normal file
115
.github/workflows/issue-labeler.yml
vendored
Normal file
@@ -0,0 +1,115 @@
|
||||
name: Issue Labeler
|
||||
|
||||
on:
|
||||
issues:
|
||||
types:
|
||||
- opened
|
||||
- labeled
|
||||
|
||||
jobs:
|
||||
gather-labels:
|
||||
name: Generate label suggestions
|
||||
if: ${{ github.event.action == 'opened' || (github.event.action == 'labeled' && github.event.label.name == 'codex-label') }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
codex_output: ${{ steps.codex.outputs.final-message }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- id: codex
|
||||
uses: openai/codex-action@main
|
||||
with:
|
||||
openai-api-key: ${{ secrets.CODEX_OPENAI_API_KEY }}
|
||||
allow-users: "*"
|
||||
prompt: |
|
||||
You are an assistant that reviews GitHub issues for the repository.
|
||||
|
||||
Your job is to choose the most appropriate existing labels for the issue described later in this prompt.
|
||||
Follow these rules:
|
||||
- Only pick labels out of the list below.
|
||||
- Prefer a small set of precise labels over many broad ones.
|
||||
|
||||
Labels to apply:
|
||||
1. bug — Reproducible defects in Codex products (CLI, VS Code extension, web, auth).
|
||||
2. enhancement — Feature requests or usability improvements that ask for new capabilities, better ergonomics, or quality-of-life tweaks.
|
||||
3. extension — VS Code (or other IDE) extension-specific issues.
|
||||
4. windows-os — Bugs or friction specific to Windows environments (always when PowerShell is mentioned, path handling, copy/paste, OS-specific auth or tooling failures).
|
||||
5. mcp — Topics involving Model Context Protocol servers/clients.
|
||||
6. codex-web — Issues targeting the Codex web UI/Cloud experience.
|
||||
8. azure — Problems or requests tied to Azure OpenAI deployments.
|
||||
9. documentation — Updates or corrections needed in docs/README/config references (broken links, missing examples, outdated keys, clarification requests).
|
||||
10. model-behavior — Undesirable LLM behavior: forgetting goals, refusing work, hallucinating environment details, quota misreports, or other reasoning/performance anomalies.
|
||||
|
||||
Issue number: ${{ github.event.issue.number }}
|
||||
|
||||
Issue title:
|
||||
${{ github.event.issue.title }}
|
||||
|
||||
Issue body:
|
||||
${{ github.event.issue.body }}
|
||||
|
||||
Repository full name:
|
||||
${{ github.repository }}
|
||||
|
||||
output-schema: |
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"labels": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": ["labels"],
|
||||
"additionalProperties": false
|
||||
}
|
||||
|
||||
apply-labels:
|
||||
name: Apply labels from Codex output
|
||||
needs: gather-labels
|
||||
if: ${{ needs.gather-labels.result != 'skipped' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
GH_REPO: ${{ github.repository }}
|
||||
ISSUE_NUMBER: ${{ github.event.issue.number }}
|
||||
CODEX_OUTPUT: ${{ needs.gather-labels.outputs.codex_output }}
|
||||
steps:
|
||||
- name: Apply labels
|
||||
run: |
|
||||
json=${CODEX_OUTPUT//$'\r'/}
|
||||
if [ -z "$json" ]; then
|
||||
echo "Codex produced no output. Skipping label application."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if ! printf '%s' "$json" | jq -e 'type == "object" and (.labels | type == "array")' >/dev/null 2>&1; then
|
||||
echo "Codex output did not include a labels array. Raw output: $json"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
labels=$(printf '%s' "$json" | jq -r '.labels[] | tostring')
|
||||
if [ -z "$labels" ]; then
|
||||
echo "Codex returned an empty array. Nothing to do."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
cmd=(gh issue edit "$ISSUE_NUMBER")
|
||||
while IFS= read -r label; do
|
||||
cmd+=(--add-label "$label")
|
||||
done <<< "$labels"
|
||||
|
||||
"${cmd[@]}" || true
|
||||
|
||||
- name: Remove codex-label trigger
|
||||
if: ${{ always() && github.event.action == 'labeled' && github.event.label.name == 'codex-label' }}
|
||||
run: |
|
||||
gh issue edit "$ISSUE_NUMBER" --remove-label codex-label || true
|
||||
echo "Attempted to remove label: codex-label"
|
||||
81
.github/workflows/rust-ci.yml
vendored
81
.github/workflows/rust-ci.yml
vendored
@@ -57,11 +57,31 @@ jobs:
|
||||
working-directory: codex-rs
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
with:
|
||||
components: rustfmt
|
||||
- name: cargo fmt
|
||||
run: cargo fmt -- --config imports_granularity=Item --check
|
||||
- name: Verify codegen for mcp-types
|
||||
run: ./mcp-types/check_lib_rs.py
|
||||
|
||||
cargo_shear:
|
||||
name: cargo shear
|
||||
runs-on: ubuntu-24.04
|
||||
needs: changed
|
||||
if: ${{ needs.changed.outputs.codex == 'true' || needs.changed.outputs.workflows == 'true' || github.event_name == 'push' }}
|
||||
defaults:
|
||||
run:
|
||||
working-directory: codex-rs
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
- uses: taiki-e/install-action@0c5db7f7f897c03b771660e91d065338615679f4 # v2
|
||||
with:
|
||||
tool: cargo-shear
|
||||
version: 1.5.1
|
||||
- name: cargo shear
|
||||
run: cargo shear
|
||||
|
||||
# --- CI to validate on different os/targets --------------------------------
|
||||
lint_build_test:
|
||||
@@ -123,20 +143,31 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
components: clippy
|
||||
|
||||
- uses: actions/cache@v4
|
||||
# Explicit cache restore: split cargo home vs target, so we can
|
||||
# avoid caching the large target dir on the gnu-dev job.
|
||||
- name: Restore cargo home cache
|
||||
id: cache_cargo_home_restore
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
${{ github.workspace }}/codex-rs/target/
|
||||
key: cargo-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Restore target cache (except gnu-dev)
|
||||
id: cache_target_restore
|
||||
if: ${{ !(matrix.target == 'x86_64-unknown-linux-gnu' && matrix.profile != 'release') }}
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: ${{ github.workspace }}/codex-rs/target/
|
||||
key: cargo-target-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Install musl build tools
|
||||
@@ -160,15 +191,45 @@ jobs:
|
||||
find . -name Cargo.toml -mindepth 2 -maxdepth 2 -print0 \
|
||||
| xargs -0 -n1 -I{} bash -c 'cd "$(dirname "{}")" && cargo check --profile ${{ matrix.profile }}'
|
||||
|
||||
- name: cargo test
|
||||
- uses: taiki-e/install-action@0c5db7f7f897c03b771660e91d065338615679f4 # v2
|
||||
with:
|
||||
tool: nextest
|
||||
version: 0.9.103
|
||||
|
||||
- name: tests
|
||||
id: test
|
||||
# `cargo test` takes too long for release builds to run them on every PR
|
||||
# Tests take too long for release builds to run them on every PR.
|
||||
if: ${{ matrix.profile != 'release' }}
|
||||
continue-on-error: true
|
||||
run: cargo test --all-features --target ${{ matrix.target }} --profile ${{ matrix.profile }}
|
||||
run: cargo nextest run --all-features --no-fail-fast --target ${{ matrix.target }}
|
||||
env:
|
||||
RUST_BACKTRACE: 1
|
||||
|
||||
# Save caches explicitly; make non-fatal so cache packaging
|
||||
# never fails the overall job. Only save when key wasn't hit.
|
||||
- name: Save cargo home cache
|
||||
if: always() && !cancelled() && steps.cache_cargo_home_restore.outputs.cache-hit != 'true'
|
||||
continue-on-error: true
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Save target cache (except gnu-dev)
|
||||
if: >-
|
||||
always() && !cancelled() &&
|
||||
(steps.cache_target_restore.outputs.cache-hit != 'true') &&
|
||||
!(matrix.target == 'x86_64-unknown-linux-gnu' && matrix.profile != 'release')
|
||||
continue-on-error: true
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: ${{ github.workspace }}/codex-rs/target/
|
||||
key: cargo-target-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
# Fail the job if any of the previous steps failed.
|
||||
- name: verify all steps passed
|
||||
if: |
|
||||
@@ -182,7 +243,7 @@ jobs:
|
||||
# --- Gatherer job that you mark as the ONLY required status -----------------
|
||||
results:
|
||||
name: CI results (required)
|
||||
needs: [changed, general, lint_build_test]
|
||||
needs: [changed, general, cargo_shear, lint_build_test]
|
||||
if: always()
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
@@ -190,6 +251,7 @@ jobs:
|
||||
shell: bash
|
||||
run: |
|
||||
echo "general: ${{ needs.general.result }}"
|
||||
echo "shear : ${{ needs.cargo_shear.result }}"
|
||||
echo "matrix : ${{ needs.lint_build_test.result }}"
|
||||
|
||||
# If nothing relevant changed (PR touching only root README, etc.),
|
||||
@@ -201,4 +263,5 @@ jobs:
|
||||
|
||||
# Otherwise require the jobs to have succeeded
|
||||
[[ '${{ needs.general.result }}' == 'success' ]] || { echo 'general failed'; exit 1; }
|
||||
[[ '${{ needs.cargo_shear.result }}' == 'success' ]] || { echo 'cargo_shear failed'; exit 1; }
|
||||
[[ '${{ needs.lint_build_test.result }}' == 'success' ]] || { echo 'matrix failed'; exit 1; }
|
||||
|
||||
285
.github/workflows/rust-release.yml
vendored
285
.github/workflows/rust-release.yml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
|
||||
build:
|
||||
needs: tag-check
|
||||
name: ${{ matrix.runner }} - ${{ matrix.target }}
|
||||
name: Build - ${{ matrix.runner }} - ${{ matrix.target }}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
timeout-minutes: 30
|
||||
defaults:
|
||||
@@ -77,7 +77,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
@@ -94,10 +94,117 @@ jobs:
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Install musl build tools
|
||||
run: |
|
||||
sudo apt install -y musl-tools pkg-config
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y musl-tools pkg-config
|
||||
|
||||
- name: Cargo build
|
||||
run: cargo build --target ${{ matrix.target }} --release --bin codex
|
||||
run: cargo build --target ${{ matrix.target }} --release --bin codex --bin codex-responses-api-proxy
|
||||
|
||||
- if: ${{ matrix.runner == 'macos-14' }}
|
||||
name: Configure Apple code signing
|
||||
shell: bash
|
||||
env:
|
||||
KEYCHAIN_PASSWORD: actions
|
||||
APPLE_CERTIFICATE: ${{ secrets.APPLE_CERTIFICATE_P12 }}
|
||||
APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
if [[ -z "${APPLE_CERTIFICATE:-}" ]]; then
|
||||
echo "APPLE_CERTIFICATE is required for macOS signing"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ -z "${APPLE_CERTIFICATE_PASSWORD:-}" ]]; then
|
||||
echo "APPLE_CERTIFICATE_PASSWORD is required for macOS signing"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cert_path="${RUNNER_TEMP}/apple_signing_certificate.p12"
|
||||
echo "$APPLE_CERTIFICATE" | base64 -d > "$cert_path"
|
||||
|
||||
keychain_path="${RUNNER_TEMP}/codex-signing.keychain-db"
|
||||
security create-keychain -p "$KEYCHAIN_PASSWORD" "$keychain_path"
|
||||
security set-keychain-settings -lut 21600 "$keychain_path"
|
||||
security unlock-keychain -p "$KEYCHAIN_PASSWORD" "$keychain_path"
|
||||
|
||||
keychain_args=()
|
||||
cleanup_keychain() {
|
||||
if ((${#keychain_args[@]} > 0)); then
|
||||
security list-keychains -s "${keychain_args[@]}" || true
|
||||
security default-keychain -s "${keychain_args[0]}" || true
|
||||
else
|
||||
security list-keychains -s || true
|
||||
fi
|
||||
if [[ -f "$keychain_path" ]]; then
|
||||
security delete-keychain "$keychain_path" || true
|
||||
fi
|
||||
}
|
||||
|
||||
while IFS= read -r keychain; do
|
||||
[[ -n "$keychain" ]] && keychain_args+=("$keychain")
|
||||
done < <(security list-keychains | sed 's/^[[:space:]]*//;s/[[:space:]]*$//;s/"//g')
|
||||
|
||||
if ((${#keychain_args[@]} > 0)); then
|
||||
security list-keychains -s "$keychain_path" "${keychain_args[@]}"
|
||||
else
|
||||
security list-keychains -s "$keychain_path"
|
||||
fi
|
||||
|
||||
security default-keychain -s "$keychain_path"
|
||||
security import "$cert_path" -k "$keychain_path" -P "$APPLE_CERTIFICATE_PASSWORD" -T /usr/bin/codesign -T /usr/bin/security
|
||||
security set-key-partition-list -S apple-tool:,apple: -s -k "$KEYCHAIN_PASSWORD" "$keychain_path" > /dev/null
|
||||
|
||||
codesign_hashes=()
|
||||
while IFS= read -r hash; do
|
||||
[[ -n "$hash" ]] && codesign_hashes+=("$hash")
|
||||
done < <(security find-identity -v -p codesigning "$keychain_path" \
|
||||
| sed -n 's/.*\([0-9A-F]\{40\}\).*/\1/p' \
|
||||
| sort -u)
|
||||
|
||||
if ((${#codesign_hashes[@]} == 0)); then
|
||||
echo "No signing identities found in $keychain_path"
|
||||
cleanup_keychain
|
||||
rm -f "$cert_path"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ((${#codesign_hashes[@]} > 1)); then
|
||||
echo "Multiple signing identities found in $keychain_path:"
|
||||
printf ' %s\n' "${codesign_hashes[@]}"
|
||||
cleanup_keychain
|
||||
rm -f "$cert_path"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
APPLE_CODESIGN_IDENTITY="${codesign_hashes[0]}"
|
||||
|
||||
rm -f "$cert_path"
|
||||
|
||||
echo "APPLE_CODESIGN_IDENTITY=$APPLE_CODESIGN_IDENTITY" >> "$GITHUB_ENV"
|
||||
echo "APPLE_CODESIGN_KEYCHAIN=$keychain_path" >> "$GITHUB_ENV"
|
||||
echo "::add-mask::$APPLE_CODESIGN_IDENTITY"
|
||||
|
||||
- if: ${{ matrix.runner == 'macos-14' }}
|
||||
name: Sign macOS binaries
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
if [[ -z "${APPLE_CODESIGN_IDENTITY:-}" ]]; then
|
||||
echo "APPLE_CODESIGN_IDENTITY is required for macOS signing"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
keychain_args=()
|
||||
if [[ -n "${APPLE_CODESIGN_KEYCHAIN:-}" && -f "${APPLE_CODESIGN_KEYCHAIN}" ]]; then
|
||||
keychain_args+=(--keychain "${APPLE_CODESIGN_KEYCHAIN}")
|
||||
fi
|
||||
|
||||
for binary in codex codex-responses-api-proxy; do
|
||||
path="target/${{ matrix.target }}/release/${binary}"
|
||||
codesign --force --options runtime --timestamp --sign "$APPLE_CODESIGN_IDENTITY" "${keychain_args[@]}" "$path"
|
||||
done
|
||||
|
||||
- name: Stage artifacts
|
||||
shell: bash
|
||||
@@ -107,8 +214,10 @@ jobs:
|
||||
|
||||
if [[ "${{ matrix.runner }}" == windows* ]]; then
|
||||
cp target/${{ matrix.target }}/release/codex.exe "$dest/codex-${{ matrix.target }}.exe"
|
||||
cp target/${{ matrix.target }}/release/codex-responses-api-proxy.exe "$dest/codex-responses-api-proxy-${{ matrix.target }}.exe"
|
||||
else
|
||||
cp target/${{ matrix.target }}/release/codex "$dest/codex-${{ matrix.target }}"
|
||||
cp target/${{ matrix.target }}/release/codex-responses-api-proxy "$dest/codex-responses-api-proxy-${{ matrix.target }}"
|
||||
fi
|
||||
|
||||
- if: ${{ matrix.runner == 'windows-11-arm' }}
|
||||
@@ -155,6 +264,29 @@ jobs:
|
||||
zstd -T0 -19 --rm "$dest/$base"
|
||||
done
|
||||
|
||||
- name: Remove signing keychain
|
||||
if: ${{ always() && matrix.runner == 'macos-14' }}
|
||||
shell: bash
|
||||
env:
|
||||
APPLE_CODESIGN_KEYCHAIN: ${{ env.APPLE_CODESIGN_KEYCHAIN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [[ -n "${APPLE_CODESIGN_KEYCHAIN:-}" ]]; then
|
||||
keychain_args=()
|
||||
while IFS= read -r keychain; do
|
||||
[[ "$keychain" == "$APPLE_CODESIGN_KEYCHAIN" ]] && continue
|
||||
[[ -n "$keychain" ]] && keychain_args+=("$keychain")
|
||||
done < <(security list-keychains | sed 's/^[[:space:]]*//;s/[[:space:]]*$//;s/"//g')
|
||||
if ((${#keychain_args[@]} > 0)); then
|
||||
security list-keychains -s "${keychain_args[@]}"
|
||||
security default-keychain -s "${keychain_args[0]}"
|
||||
fi
|
||||
|
||||
if [[ -f "$APPLE_CODESIGN_KEYCHAIN" ]]; then
|
||||
security delete-keychain "$APPLE_CODESIGN_KEYCHAIN"
|
||||
fi
|
||||
fi
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.target }}
|
||||
@@ -167,6 +299,14 @@ jobs:
|
||||
needs: build
|
||||
name: release
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
actions: read
|
||||
outputs:
|
||||
version: ${{ steps.release_name.outputs.name }}
|
||||
tag: ${{ github.ref_name }}
|
||||
should_publish_npm: ${{ steps.npm_publish_settings.outputs.should_publish }}
|
||||
npm_tag: ${{ steps.npm_publish_settings.outputs.npm_tag }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
@@ -187,21 +327,49 @@ jobs:
|
||||
version="${GITHUB_REF_NAME#rust-v}"
|
||||
echo "name=${version}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Stage npm package
|
||||
- name: Determine npm publish settings
|
||||
id: npm_publish_settings
|
||||
env:
|
||||
VERSION: ${{ steps.release_name.outputs.name }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
version="${VERSION}"
|
||||
|
||||
if [[ "${version}" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
echo "should_publish=true" >> "$GITHUB_OUTPUT"
|
||||
echo "npm_tag=" >> "$GITHUB_OUTPUT"
|
||||
elif [[ "${version}" =~ ^[0-9]+\.[0-9]+\.[0-9]+-alpha\.[0-9]+$ ]]; then
|
||||
echo "should_publish=true" >> "$GITHUB_OUTPUT"
|
||||
echo "npm_tag=alpha" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "should_publish=false" >> "$GITHUB_OUTPUT"
|
||||
echo "npm_tag=" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js for npm packaging
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: 22
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
# stage_npm_packages.py requires DotSlash when staging releases.
|
||||
- uses: facebook/install-dotslash@v2
|
||||
- name: Stage npm packages
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
TMP_DIR="${RUNNER_TEMP}/npm-stage"
|
||||
python3 codex-cli/scripts/stage_rust_release.py \
|
||||
./scripts/stage_npm_packages.py \
|
||||
--release-version "${{ steps.release_name.outputs.name }}" \
|
||||
--tmp "${TMP_DIR}"
|
||||
mkdir -p dist/npm
|
||||
# Produce an npm-ready tarball using `npm pack` and store it in dist/npm.
|
||||
# We then rename it to a stable name used by our publishing script.
|
||||
(cd "$TMP_DIR" && npm pack --pack-destination "${GITHUB_WORKSPACE}/dist/npm")
|
||||
mv "${GITHUB_WORKSPACE}"/dist/npm/*.tgz \
|
||||
"${GITHUB_WORKSPACE}/dist/npm/codex-npm-${{ steps.release_name.outputs.name }}.tgz"
|
||||
--package codex \
|
||||
--package codex-responses-api-proxy \
|
||||
--package codex-sdk
|
||||
|
||||
- name: Create GitHub Release
|
||||
uses: softprops/action-gh-release@v2
|
||||
@@ -219,3 +387,90 @@ jobs:
|
||||
with:
|
||||
tag: ${{ github.ref_name }}
|
||||
config: .github/dotslash-config.json
|
||||
|
||||
# Publish to npm using OIDC authentication.
|
||||
# July 31, 2025: https://github.blog/changelog/2025-07-31-npm-trusted-publishing-with-oidc-is-generally-available/
|
||||
# npm docs: https://docs.npmjs.com/trusted-publishers
|
||||
publish-npm:
|
||||
# Publish to npm for stable releases and alpha pre-releases with numeric suffixes.
|
||||
if: ${{ needs.release.outputs.should_publish_npm == 'true' }}
|
||||
name: publish-npm
|
||||
needs: release
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write # Required for OIDC
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: 22
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
scope: "@openai"
|
||||
|
||||
# Trusted publishing requires npm CLI version 11.5.1 or later.
|
||||
- name: Update npm
|
||||
run: npm install -g npm@latest
|
||||
|
||||
- name: Download npm tarballs from release
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
version="${{ needs.release.outputs.version }}"
|
||||
tag="${{ needs.release.outputs.tag }}"
|
||||
mkdir -p dist/npm
|
||||
gh release download "$tag" \
|
||||
--repo "${GITHUB_REPOSITORY}" \
|
||||
--pattern "codex-npm-${version}.tgz" \
|
||||
--dir dist/npm
|
||||
gh release download "$tag" \
|
||||
--repo "${GITHUB_REPOSITORY}" \
|
||||
--pattern "codex-responses-api-proxy-npm-${version}.tgz" \
|
||||
--dir dist/npm
|
||||
gh release download "$tag" \
|
||||
--repo "${GITHUB_REPOSITORY}" \
|
||||
--pattern "codex-sdk-npm-${version}.tgz" \
|
||||
--dir dist/npm
|
||||
|
||||
# No NODE_AUTH_TOKEN needed because we use OIDC.
|
||||
- name: Publish to npm
|
||||
env:
|
||||
VERSION: ${{ needs.release.outputs.version }}
|
||||
NPM_TAG: ${{ needs.release.outputs.npm_tag }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
tag_args=()
|
||||
if [[ -n "${NPM_TAG}" ]]; then
|
||||
tag_args+=(--tag "${NPM_TAG}")
|
||||
fi
|
||||
|
||||
tarballs=(
|
||||
"codex-npm-${VERSION}.tgz"
|
||||
"codex-responses-api-proxy-npm-${VERSION}.tgz"
|
||||
"codex-sdk-npm-${VERSION}.tgz"
|
||||
)
|
||||
|
||||
for tarball in "${tarballs[@]}"; do
|
||||
npm publish "${GITHUB_WORKSPACE}/dist/npm/${tarball}" "${tag_args[@]}"
|
||||
done
|
||||
|
||||
update-branch:
|
||||
name: Update latest-alpha-cli branch
|
||||
permissions:
|
||||
contents: write
|
||||
needs: release
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Update latest-alpha-cli branch
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
gh api \
|
||||
repos/${GITHUB_REPOSITORY}/git/refs/heads/latest-alpha-cli \
|
||||
-X PATCH \
|
||||
-f sha="${GITHUB_SHA}" \
|
||||
-F force=true
|
||||
|
||||
43
.github/workflows/sdk.yml
vendored
Normal file
43
.github/workflows/sdk.yml
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
name: sdk
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
pull_request: {}
|
||||
|
||||
jobs:
|
||||
sdks:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: 22
|
||||
cache: pnpm
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
|
||||
- name: build codex
|
||||
run: cargo build --bin codex
|
||||
working-directory: codex-rs
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build SDK packages
|
||||
run: pnpm -r --filter ./sdk/typescript run build
|
||||
|
||||
- name: Lint SDK packages
|
||||
run: pnpm -r --filter ./sdk/typescript run lint
|
||||
|
||||
- name: Test SDK packages
|
||||
run: pnpm -r --filter ./sdk/typescript run test
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -30,6 +30,7 @@ result
|
||||
# cli tools
|
||||
CLAUDE.md
|
||||
.claude/
|
||||
AGENTS.override.md
|
||||
|
||||
# caches
|
||||
.cache/
|
||||
|
||||
36
AGENTS.md
36
AGENTS.md
@@ -4,14 +4,20 @@ In the codex-rs folder where the rust code lives:
|
||||
|
||||
- Crate names are prefixed with `codex-`. For example, the `core` folder's crate is named `codex-core`
|
||||
- When using format! and you can inline variables into {}, always do that.
|
||||
- Install any commands the repo relies on (for example `just`, `rg`, or `cargo-insta`) if they aren't already available before running instructions here.
|
||||
- Never add or modify any code related to `CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR` or `CODEX_SANDBOX_ENV_VAR`.
|
||||
- You operate in a sandbox where `CODEX_SANDBOX_NETWORK_DISABLED=1` will be set whenever you use the `shell` tool. Any existing code that uses `CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR` was authored with this fact in mind. It is often used to early exit out of tests that the author knew you would not be able to run given your sandbox limitations.
|
||||
- Similarly, when you spawn a process using Seatbelt (`/usr/bin/sandbox-exec`), `CODEX_SANDBOX=seatbelt` will be set on the child process. Integration tests that want to run Seatbelt themselves cannot be run under Seatbelt, so checks for `CODEX_SANDBOX=seatbelt` are also often used to early exit out of tests, as appropriate.
|
||||
- Always collapse if statements per https://rust-lang.github.io/rust-clippy/master/index.html#collapsible_if
|
||||
- Always inline format! args when possible per https://rust-lang.github.io/rust-clippy/master/index.html#uninlined_format_args
|
||||
- Use method references over closures when possible per https://rust-lang.github.io/rust-clippy/master/index.html#redundant_closure_for_method_calls
|
||||
- When writing tests, prefer comparing the equality of entire objects over fields one by one.
|
||||
|
||||
Run `just fmt` (in `codex-rs` directory) automatically after making Rust code changes; do not ask for approval to run it. Before finalizing a change to `codex-rs`, run `just fix -p <project>` (in `codex-rs` directory) to fix any linter issues in the code. Prefer scoping with `-p` to avoid slow workspace‑wide Clippy builds; only run `just fix` without `-p` if you changed shared crates. Additionally, run the tests:
|
||||
|
||||
1. Run the test for the specific project that was changed. For example, if changes were made in `codex-rs/tui`, run `cargo test -p codex-tui`.
|
||||
2. Once those pass, if any changes were made in common, core, or protocol, run the complete test suite with `cargo test --all-features`.
|
||||
When running interactively, ask the user before running `just fix` to finalize. `just fmt` does not require approval. project-specific or individual tests can be run without asking the user, but do ask the user before running the complete test suite.
|
||||
When running interactively, ask the user before running `just fix` to finalize. `just fmt` does not require approval. project-specific or individual tests can be run without asking the user, but do ask the user before running the complete test suite.
|
||||
|
||||
## TUI style conventions
|
||||
|
||||
@@ -27,6 +33,7 @@ See `codex-rs/tui/styles.md`.
|
||||
- Desired: vec![" └ ".into(), "M".red(), " ".dim(), "tui/src/app.rs".dim()]
|
||||
|
||||
### TUI Styling (ratatui)
|
||||
|
||||
- Prefer Stylize helpers: use "text".dim(), .bold(), .cyan(), .italic(), .underlined() instead of manual Style where possible.
|
||||
- Prefer simple conversions: use "text".into() for spans and vec![…].into() for lines; when inference is ambiguous (e.g., Paragraph::new/Cell::from), use Line::from(spans) or Span::from(text).
|
||||
- Computed styles: if the Style is computed at runtime, using `Span::styled` is OK (`Span::from(text).set_style(style)` is also acceptable).
|
||||
@@ -38,6 +45,7 @@ See `codex-rs/tui/styles.md`.
|
||||
- Compactness: prefer the form that stays on one line after rustfmt; if only one of Line::from(vec![…]) or vec![…].into() avoids wrapping, choose that. If both wrap, pick the one with fewer wrapped lines.
|
||||
|
||||
### Text wrapping
|
||||
|
||||
- Always use textwrap::wrap to wrap plain strings.
|
||||
- If you have a ratatui Line and you want to wrap it, use the helpers in tui/src/wrapping.rs, e.g. word_wrap_lines / word_wrap_line.
|
||||
- If you need to indent wrapped lines, use the initial_indent / subsequent_indent options from RtOptions if you can, rather than writing custom logic.
|
||||
@@ -59,8 +67,34 @@ This repo uses snapshot tests (via `insta`), especially in `codex-rs/tui`, to va
|
||||
- `cargo insta accept -p codex-tui`
|
||||
|
||||
If you don’t have the tool:
|
||||
|
||||
- `cargo install cargo-insta`
|
||||
|
||||
### Test assertions
|
||||
|
||||
- Tests should use pretty_assertions::assert_eq for clearer diffs. Import this at the top of the test module if it isn't already.
|
||||
|
||||
### Integration tests (core)
|
||||
|
||||
- Prefer the utilities in `core_test_support::responses` when writing end-to-end Codex tests.
|
||||
|
||||
- All `mount_sse*` helpers return a `ResponseMock`; hold onto it so you can assert against outbound `/responses` POST bodies.
|
||||
- Use `ResponseMock::single_request()` when a test should only issue one POST, or `ResponseMock::requests()` to inspect every captured `ResponsesRequest`.
|
||||
- `ResponsesRequest` exposes helpers (`body_json`, `input`, `function_call_output`, `custom_tool_call_output`, `call_output`, `header`, `path`, `query_param`) so assertions can target structured payloads instead of manual JSON digging.
|
||||
- Build SSE payloads with the provided `ev_*` constructors and the `sse(...)`.
|
||||
|
||||
- Typical pattern:
|
||||
|
||||
```rust
|
||||
let mock = responses::mount_sse_once(&server, responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_function_call(call_id, "shell", &serde_json::to_string(&args)?),
|
||||
responses::ev_completed("resp-1"),
|
||||
])).await;
|
||||
|
||||
codex.submit(Op::UserTurn { ... }).await?;
|
||||
|
||||
// Assert request body if needed.
|
||||
let request = mock.single_request();
|
||||
// assert using request.function_call_output(call_id) or request.json_body() or other helpers.
|
||||
```
|
||||
|
||||
16
README.md
16
README.md
@@ -1,8 +1,9 @@
|
||||
<h1 align="center">OpenAI Codex CLI</h1>
|
||||
|
||||
<p align="center"><code>npm i -g @openai/codex</code><br />or <code>brew install codex</code></p>
|
||||
|
||||
<p align="center"><strong>Codex CLI</strong> is a coding agent from OpenAI that runs locally on your computer.</br>If you are looking for the <em>cloud-based agent</em> from OpenAI, <strong>Codex Web</strong>, see <a href="https://chatgpt.com/codex">chatgpt.com/codex</a>.</p>
|
||||
<p align="center"><strong>Codex CLI</strong> is a coding agent from OpenAI that runs locally on your computer.
|
||||
</br>
|
||||
</br>If you want Codex in your code editor (VS Code, Cursor, Windsurf), <a href="https://developers.openai.com/codex/ide">install in your IDE</a>
|
||||
</br>If you are looking for the <em>cloud-based agent</em> from OpenAI, <strong>Codex Web</strong>, go to <a href="https://chatgpt.com/codex">chatgpt.com/codex</a></p>
|
||||
|
||||
<p align="center">
|
||||
<img src="./.github/codex-cli-splash.png" alt="Codex CLI splash" width="80%" />
|
||||
@@ -60,8 +61,7 @@ You can also use Codex with an API key, but this requires [additional setup](./d
|
||||
|
||||
### Model Context Protocol (MCP)
|
||||
|
||||
Codex CLI supports [MCP servers](./docs/advanced.md#model-context-protocol-mcp). Enable by adding an `mcp_servers` section to your `~/.codex/config.toml`.
|
||||
|
||||
Codex can access MCP servers. To configure them, refer to the [config docs](./docs/config.md#mcp_servers).
|
||||
|
||||
### Configuration
|
||||
|
||||
@@ -81,8 +81,11 @@ Codex CLI supports a rich set of configuration options, with preferences stored
|
||||
- [**Authentication**](./docs/authentication.md)
|
||||
- [Auth methods](./docs/authentication.md#forcing-a-specific-auth-method-advanced)
|
||||
- [Login on a "Headless" machine](./docs/authentication.md#connecting-on-a-headless-machine)
|
||||
- **Automating Codex**
|
||||
- [GitHub Action](https://github.com/openai/codex-action)
|
||||
- [TypeScript SDK](./sdk/typescript/README.md)
|
||||
- [Non-interactive mode (`codex exec`)](./docs/exec.md)
|
||||
- [**Advanced**](./docs/advanced.md)
|
||||
- [Non-interactive / CI mode](./docs/advanced.md#non-interactive--ci-mode)
|
||||
- [Tracing / verbose logging](./docs/advanced.md#tracing--verbose-logging)
|
||||
- [Model Context Protocol (MCP)](./docs/advanced.md#model-context-protocol-mcp)
|
||||
- [**Zero data retention (ZDR)**](./docs/zdr.md)
|
||||
@@ -99,4 +102,3 @@ Codex CLI supports a rich set of configuration options, with preferences stored
|
||||
## License
|
||||
|
||||
This repository is licensed under the [Apache-2.0 License](LICENSE).
|
||||
|
||||
|
||||
8
codex-cli/.gitignore
vendored
8
codex-cli/.gitignore
vendored
@@ -1,7 +1 @@
|
||||
# Added by ./scripts/install_native_deps.sh
|
||||
/bin/codex-aarch64-apple-darwin
|
||||
/bin/codex-aarch64-unknown-linux-musl
|
||||
/bin/codex-linux-sandbox-arm64
|
||||
/bin/codex-linux-sandbox-x64
|
||||
/bin/codex-x86_64-apple-darwin
|
||||
/bin/codex-x86_64-unknown-linux-musl
|
||||
/vendor/
|
||||
|
||||
@@ -208,7 +208,7 @@ The hardening mechanism Codex uses depends on your OS:
|
||||
| Requirement | Details |
|
||||
| --------------------------- | --------------------------------------------------------------- |
|
||||
| Operating systems | macOS 12+, Ubuntu 20.04+/Debian 10+, or Windows 11 **via WSL2** |
|
||||
| Node.js | **22 or newer** (LTS recommended) |
|
||||
| Node.js | **16 or newer** (Node 20 LTS recommended) |
|
||||
| Git (optional, recommended) | 2.23+ for built-in PR helpers |
|
||||
| RAM | 4-GB minimum (8-GB recommended) |
|
||||
|
||||
@@ -513,7 +513,7 @@ Codex runs model-generated commands in a sandbox. If a proposed command or file
|
||||
<details>
|
||||
<summary>Does it work on Windows?</summary>
|
||||
|
||||
Not directly. It requires [Windows Subsystem for Linux (WSL2)](https://learn.microsoft.com/en-us/windows/wsl/install) - Codex has been tested on macOS and Linux with Node 22.
|
||||
Not directly. It requires [Windows Subsystem for Linux (WSL2)](https://learn.microsoft.com/en-us/windows/wsl/install) - Codex is regularly tested on macOS and Linux with Node 20+, and also supports Node 16.
|
||||
|
||||
</details>
|
||||
|
||||
|
||||
70
codex-cli/bin/codex.js
Executable file → Normal file
70
codex-cli/bin/codex.js
Executable file → Normal file
@@ -1,6 +1,8 @@
|
||||
#!/usr/bin/env node
|
||||
// Unified entry point for the Codex CLI.
|
||||
|
||||
import { spawn } from "node:child_process";
|
||||
import { existsSync } from "fs";
|
||||
import path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
@@ -40,10 +42,10 @@ switch (platform) {
|
||||
case "win32":
|
||||
switch (arch) {
|
||||
case "x64":
|
||||
targetTriple = "x86_64-pc-windows-msvc.exe";
|
||||
targetTriple = "x86_64-pc-windows-msvc";
|
||||
break;
|
||||
case "arm64":
|
||||
targetTriple = "aarch64-pc-windows-msvc.exe";
|
||||
targetTriple = "aarch64-pc-windows-msvc";
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
@@ -57,31 +59,16 @@ if (!targetTriple) {
|
||||
throw new Error(`Unsupported platform: ${platform} (${arch})`);
|
||||
}
|
||||
|
||||
const binaryPath = path.join(__dirname, "..", "bin", `codex-${targetTriple}`);
|
||||
const vendorRoot = path.join(__dirname, "..", "vendor");
|
||||
const archRoot = path.join(vendorRoot, targetTriple);
|
||||
const codexBinaryName = process.platform === "win32" ? "codex.exe" : "codex";
|
||||
const binaryPath = path.join(archRoot, "codex", codexBinaryName);
|
||||
|
||||
// Use an asynchronous spawn instead of spawnSync so that Node is able to
|
||||
// respond to signals (e.g. Ctrl-C / SIGINT) while the native binary is
|
||||
// executing. This allows us to forward those signals to the child process
|
||||
// and guarantees that when either the child terminates or the parent
|
||||
// receives a fatal signal, both processes exit in a predictable manner.
|
||||
const { spawn } = await import("child_process");
|
||||
|
||||
async function tryImport(moduleName) {
|
||||
try {
|
||||
// eslint-disable-next-line node/no-unsupported-features/es-syntax
|
||||
return await import(moduleName);
|
||||
} catch (err) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async function resolveRgDir() {
|
||||
const ripgrep = await tryImport("@vscode/ripgrep");
|
||||
if (!ripgrep?.rgPath) {
|
||||
return null;
|
||||
}
|
||||
return path.dirname(ripgrep.rgPath);
|
||||
}
|
||||
|
||||
function getUpdatedPath(newDirs) {
|
||||
const pathSep = process.platform === "win32" ? ";" : ":";
|
||||
@@ -93,16 +80,49 @@ function getUpdatedPath(newDirs) {
|
||||
return updatedPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Use heuristics to detect the package manager that was used to install Codex
|
||||
* in order to give the user a hint about how to update it.
|
||||
*/
|
||||
function detectPackageManager() {
|
||||
const userAgent = process.env.npm_config_user_agent || "";
|
||||
if (/\bbun\//.test(userAgent)) {
|
||||
return "bun";
|
||||
}
|
||||
|
||||
const execPath = process.env.npm_execpath || "";
|
||||
if (execPath.includes("bun")) {
|
||||
return "bun";
|
||||
}
|
||||
|
||||
if (
|
||||
process.env.BUN_INSTALL ||
|
||||
process.env.BUN_INSTALL_GLOBAL_DIR ||
|
||||
process.env.BUN_INSTALL_BIN_DIR
|
||||
) {
|
||||
return "bun";
|
||||
}
|
||||
|
||||
return userAgent ? "npm" : null;
|
||||
}
|
||||
|
||||
const additionalDirs = [];
|
||||
const rgDir = await resolveRgDir();
|
||||
if (rgDir) {
|
||||
additionalDirs.push(rgDir);
|
||||
const pathDir = path.join(archRoot, "path");
|
||||
if (existsSync(pathDir)) {
|
||||
additionalDirs.push(pathDir);
|
||||
}
|
||||
const updatedPath = getUpdatedPath(additionalDirs);
|
||||
|
||||
const env = { ...process.env, PATH: updatedPath };
|
||||
const packageManagerEnvVar =
|
||||
detectPackageManager() === "bun"
|
||||
? "CODEX_MANAGED_BY_BUN"
|
||||
: "CODEX_MANAGED_BY_NPM";
|
||||
env[packageManagerEnvVar] = "1";
|
||||
|
||||
const child = spawn(binaryPath, process.argv.slice(2), {
|
||||
stdio: "inherit",
|
||||
env: { ...process.env, PATH: updatedPath, CODEX_MANAGED_BY_NPM: "1" },
|
||||
env,
|
||||
});
|
||||
|
||||
child.on("error", (err) => {
|
||||
|
||||
79
codex-cli/bin/rg
Executable file
79
codex-cli/bin/rg
Executable file
@@ -0,0 +1,79 @@
|
||||
#!/usr/bin/env dotslash
|
||||
|
||||
{
|
||||
"name": "rg",
|
||||
"platforms": {
|
||||
"macos-aarch64": {
|
||||
"size": 1787248,
|
||||
"hash": "blake3",
|
||||
"digest": "8d9942032585ea8ee805937634238d9aee7b210069f4703c88fbe568e26fb78a",
|
||||
"format": "tar.gz",
|
||||
"path": "ripgrep-14.1.1-aarch64-apple-darwin/rg",
|
||||
"providers": [
|
||||
{
|
||||
"url": "https://github.com/BurntSushi/ripgrep/releases/download/14.1.1/ripgrep-14.1.1-aarch64-apple-darwin.tar.gz"
|
||||
}
|
||||
]
|
||||
},
|
||||
"linux-aarch64": {
|
||||
"size": 2047405,
|
||||
"hash": "blake3",
|
||||
"digest": "0b670b8fa0a3df2762af2fc82cc4932f684ca4c02dbd1260d4f3133fd4b2a515",
|
||||
"format": "tar.gz",
|
||||
"path": "ripgrep-14.1.1-aarch64-unknown-linux-gnu/rg",
|
||||
"providers": [
|
||||
{
|
||||
"url": "https://github.com/BurntSushi/ripgrep/releases/download/14.1.1/ripgrep-14.1.1-aarch64-unknown-linux-gnu.tar.gz"
|
||||
}
|
||||
]
|
||||
},
|
||||
"macos-x86_64": {
|
||||
"size": 2082672,
|
||||
"hash": "blake3",
|
||||
"digest": "e9b862fc8da3127f92791f0ff6a799504154ca9d36c98bf3e60a81c6b1f7289e",
|
||||
"format": "tar.gz",
|
||||
"path": "ripgrep-14.1.1-x86_64-apple-darwin/rg",
|
||||
"providers": [
|
||||
{
|
||||
"url": "https://github.com/BurntSushi/ripgrep/releases/download/14.1.1/ripgrep-14.1.1-x86_64-apple-darwin.tar.gz"
|
||||
}
|
||||
]
|
||||
},
|
||||
"linux-x86_64": {
|
||||
"size": 2566310,
|
||||
"hash": "blake3",
|
||||
"digest": "f73cca4e54d78c31f832c7f6e2c0b4db8b04fa3eaa747915727d570893dbee76",
|
||||
"format": "tar.gz",
|
||||
"path": "ripgrep-14.1.1-x86_64-unknown-linux-musl/rg",
|
||||
"providers": [
|
||||
{
|
||||
"url": "https://github.com/BurntSushi/ripgrep/releases/download/14.1.1/ripgrep-14.1.1-x86_64-unknown-linux-musl.tar.gz"
|
||||
}
|
||||
]
|
||||
},
|
||||
"windows-x86_64": {
|
||||
"size": 2058893,
|
||||
"hash": "blake3",
|
||||
"digest": "a8ce1a6fed4f8093ee997e57f33254e94b2cd18e26358b09db599c89882eadbd",
|
||||
"format": "zip",
|
||||
"path": "ripgrep-14.1.1-x86_64-pc-windows-msvc/rg.exe",
|
||||
"providers": [
|
||||
{
|
||||
"url": "https://github.com/BurntSushi/ripgrep/releases/download/14.1.1/ripgrep-14.1.1-x86_64-pc-windows-msvc.zip"
|
||||
}
|
||||
]
|
||||
},
|
||||
"windows-aarch64": {
|
||||
"size": 1667740,
|
||||
"hash": "blake3",
|
||||
"digest": "47b971a8c4fca1d23a4e7c19bd4d88465ebc395598458133139406d3bf85f3fa",
|
||||
"format": "zip",
|
||||
"path": "rg.exe",
|
||||
"providers": [
|
||||
{
|
||||
"url": "https://github.com/microsoft/ripgrep-prebuilt/releases/download/v13.0.0-13/ripgrep-v13.0.0-13-aarch64-pc-windows-msvc.zip"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
103
codex-cli/package-lock.json
generated
103
codex-cli/package-lock.json
generated
@@ -2,117 +2,16 @@
|
||||
"name": "@openai/codex",
|
||||
"version": "0.0.0-dev",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@openai/codex",
|
||||
"version": "0.0.0-dev",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@vscode/ripgrep": "^1.15.14"
|
||||
},
|
||||
"bin": {
|
||||
"codex": "bin/codex.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=20"
|
||||
}
|
||||
},
|
||||
"node_modules/@vscode/ripgrep": {
|
||||
"version": "1.15.14",
|
||||
"resolved": "https://registry.npmjs.org/@vscode/ripgrep/-/ripgrep-1.15.14.tgz",
|
||||
"integrity": "sha512-/G1UJPYlm+trBWQ6cMO3sv6b8D1+G16WaJH1/DSqw32JOVlzgZbLkDxRyzIpTpv30AcYGMkCf5tUqGlW6HbDWw==",
|
||||
"hasInstallScript": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"https-proxy-agent": "^7.0.2",
|
||||
"proxy-from-env": "^1.1.0",
|
||||
"yauzl": "^2.9.2"
|
||||
}
|
||||
},
|
||||
"node_modules/agent-base": {
|
||||
"version": "7.1.4",
|
||||
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.4.tgz",
|
||||
"integrity": "sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 14"
|
||||
}
|
||||
},
|
||||
"node_modules/buffer-crc32": {
|
||||
"version": "0.2.13",
|
||||
"resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz",
|
||||
"integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/debug": {
|
||||
"version": "4.4.1",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz",
|
||||
"integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"ms": "^2.1.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"supports-color": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/fd-slicer": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz",
|
||||
"integrity": "sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"pend": "~1.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/https-proxy-agent": {
|
||||
"version": "7.0.6",
|
||||
"resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz",
|
||||
"integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"agent-base": "^7.1.2",
|
||||
"debug": "4"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 14"
|
||||
}
|
||||
},
|
||||
"node_modules/ms": {
|
||||
"version": "2.1.3",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
|
||||
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/pend": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz",
|
||||
"integrity": "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/proxy-from-env": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
|
||||
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/yauzl": {
|
||||
"version": "2.10.0",
|
||||
"resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz",
|
||||
"integrity": "sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"buffer-crc32": "~0.2.3",
|
||||
"fd-slicer": "~1.1.0"
|
||||
"node": ">=16"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,20 +7,15 @@
|
||||
},
|
||||
"type": "module",
|
||||
"engines": {
|
||||
"node": ">=20"
|
||||
"node": ">=16"
|
||||
},
|
||||
"files": [
|
||||
"bin",
|
||||
"dist"
|
||||
"vendor"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/openai/codex.git"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vscode/ripgrep": "^1.15.14"
|
||||
},
|
||||
"devDependencies": {
|
||||
"prettier": "^3.3.3"
|
||||
"url": "git+https://github.com/openai/codex.git",
|
||||
"directory": "codex-cli"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,19 @@
|
||||
# npm releases
|
||||
|
||||
Run the following:
|
||||
|
||||
To build the 0.2.x or later version of the npm module, which runs the Rust version of the CLI, build it as follows:
|
||||
Use the staging helper in the repo root to generate npm tarballs for a release. For
|
||||
example, to stage the CLI, responses proxy, and SDK packages for version `0.6.0`:
|
||||
|
||||
```bash
|
||||
./codex-cli/scripts/stage_rust_release.py --release-version 0.6.0
|
||||
./scripts/stage_npm_packages.py \
|
||||
--release-version 0.6.0 \
|
||||
--package codex \
|
||||
--package codex-responses-api-proxy \
|
||||
--package codex-sdk
|
||||
```
|
||||
|
||||
This downloads the native artifacts once, hydrates `vendor/` for each package, and writes
|
||||
tarballs to `dist/npm/`.
|
||||
|
||||
If you need to invoke `build_npm_package.py` directly, run
|
||||
`codex-cli/scripts/install_native_deps.py` first and pass `--vendor-src` pointing to the
|
||||
directory that contains the populated `vendor/` tree.
|
||||
|
||||
308
codex-cli/scripts/build_npm_package.py
Executable file
308
codex-cli/scripts/build_npm_package.py
Executable file
@@ -0,0 +1,308 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Stage and optionally package the @openai/codex npm module."""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
SCRIPT_DIR = Path(__file__).resolve().parent
|
||||
CODEX_CLI_ROOT = SCRIPT_DIR.parent
|
||||
REPO_ROOT = CODEX_CLI_ROOT.parent
|
||||
RESPONSES_API_PROXY_NPM_ROOT = REPO_ROOT / "codex-rs" / "responses-api-proxy" / "npm"
|
||||
CODEX_SDK_ROOT = REPO_ROOT / "sdk" / "typescript"
|
||||
|
||||
PACKAGE_NATIVE_COMPONENTS: dict[str, list[str]] = {
|
||||
"codex": ["codex", "rg"],
|
||||
"codex-responses-api-proxy": ["codex-responses-api-proxy"],
|
||||
"codex-sdk": ["codex"],
|
||||
}
|
||||
COMPONENT_DEST_DIR: dict[str, str] = {
|
||||
"codex": "codex",
|
||||
"codex-responses-api-proxy": "codex-responses-api-proxy",
|
||||
"rg": "path",
|
||||
}
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description="Build or stage the Codex CLI npm package.")
|
||||
parser.add_argument(
|
||||
"--package",
|
||||
choices=("codex", "codex-responses-api-proxy", "codex-sdk"),
|
||||
default="codex",
|
||||
help="Which npm package to stage (default: codex).",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--version",
|
||||
help="Version number to write to package.json inside the staged package.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--release-version",
|
||||
help=(
|
||||
"Version to stage for npm release."
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--staging-dir",
|
||||
type=Path,
|
||||
help=(
|
||||
"Directory to stage the package contents. Defaults to a new temporary directory "
|
||||
"if omitted. The directory must be empty when provided."
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--tmp",
|
||||
dest="staging_dir",
|
||||
type=Path,
|
||||
help=argparse.SUPPRESS,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--pack-output",
|
||||
type=Path,
|
||||
help="Path where the generated npm tarball should be written.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--vendor-src",
|
||||
type=Path,
|
||||
help="Directory containing pre-installed native binaries to bundle (vendor root).",
|
||||
)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def main() -> int:
|
||||
args = parse_args()
|
||||
|
||||
package = args.package
|
||||
version = args.version
|
||||
release_version = args.release_version
|
||||
if release_version:
|
||||
if version and version != release_version:
|
||||
raise RuntimeError("--version and --release-version must match when both are provided.")
|
||||
version = release_version
|
||||
|
||||
if not version:
|
||||
raise RuntimeError("Must specify --version or --release-version.")
|
||||
|
||||
staging_dir, created_temp = prepare_staging_dir(args.staging_dir)
|
||||
|
||||
try:
|
||||
stage_sources(staging_dir, version, package)
|
||||
|
||||
vendor_src = args.vendor_src.resolve() if args.vendor_src else None
|
||||
native_components = PACKAGE_NATIVE_COMPONENTS.get(package, [])
|
||||
|
||||
if native_components:
|
||||
if vendor_src is None:
|
||||
components_str = ", ".join(native_components)
|
||||
raise RuntimeError(
|
||||
"Native components "
|
||||
f"({components_str}) required for package '{package}'. Provide --vendor-src "
|
||||
"pointing to a directory containing pre-installed binaries."
|
||||
)
|
||||
|
||||
copy_native_binaries(vendor_src, staging_dir, native_components)
|
||||
|
||||
if release_version:
|
||||
staging_dir_str = str(staging_dir)
|
||||
if package == "codex":
|
||||
print(
|
||||
f"Staged version {version} for release in {staging_dir_str}\n\n"
|
||||
"Verify the CLI:\n"
|
||||
f" node {staging_dir_str}/bin/codex.js --version\n"
|
||||
f" node {staging_dir_str}/bin/codex.js --help\n\n"
|
||||
)
|
||||
elif package == "codex-responses-api-proxy":
|
||||
print(
|
||||
f"Staged version {version} for release in {staging_dir_str}\n\n"
|
||||
"Verify the responses API proxy:\n"
|
||||
f" node {staging_dir_str}/bin/codex-responses-api-proxy.js --help\n\n"
|
||||
)
|
||||
else:
|
||||
print(
|
||||
f"Staged version {version} for release in {staging_dir_str}\n\n"
|
||||
"Verify the SDK contents:\n"
|
||||
f" ls {staging_dir_str}/dist\n"
|
||||
f" ls {staging_dir_str}/vendor\n"
|
||||
" node -e \"import('./dist/index.js').then(() => console.log('ok'))\"\n\n"
|
||||
)
|
||||
else:
|
||||
print(f"Staged package in {staging_dir}")
|
||||
|
||||
if args.pack_output is not None:
|
||||
output_path = run_npm_pack(staging_dir, args.pack_output)
|
||||
print(f"npm pack output written to {output_path}")
|
||||
finally:
|
||||
if created_temp:
|
||||
# Preserve the staging directory for further inspection.
|
||||
pass
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def prepare_staging_dir(staging_dir: Path | None) -> tuple[Path, bool]:
|
||||
if staging_dir is not None:
|
||||
staging_dir = staging_dir.resolve()
|
||||
staging_dir.mkdir(parents=True, exist_ok=True)
|
||||
if any(staging_dir.iterdir()):
|
||||
raise RuntimeError(f"Staging directory {staging_dir} is not empty.")
|
||||
return staging_dir, False
|
||||
|
||||
temp_dir = Path(tempfile.mkdtemp(prefix="codex-npm-stage-"))
|
||||
return temp_dir, True
|
||||
|
||||
|
||||
def stage_sources(staging_dir: Path, version: str, package: str) -> None:
|
||||
if package == "codex":
|
||||
bin_dir = staging_dir / "bin"
|
||||
bin_dir.mkdir(parents=True, exist_ok=True)
|
||||
shutil.copy2(CODEX_CLI_ROOT / "bin" / "codex.js", bin_dir / "codex.js")
|
||||
rg_manifest = CODEX_CLI_ROOT / "bin" / "rg"
|
||||
if rg_manifest.exists():
|
||||
shutil.copy2(rg_manifest, bin_dir / "rg")
|
||||
|
||||
readme_src = REPO_ROOT / "README.md"
|
||||
if readme_src.exists():
|
||||
shutil.copy2(readme_src, staging_dir / "README.md")
|
||||
|
||||
package_json_path = CODEX_CLI_ROOT / "package.json"
|
||||
elif package == "codex-responses-api-proxy":
|
||||
bin_dir = staging_dir / "bin"
|
||||
bin_dir.mkdir(parents=True, exist_ok=True)
|
||||
launcher_src = RESPONSES_API_PROXY_NPM_ROOT / "bin" / "codex-responses-api-proxy.js"
|
||||
shutil.copy2(launcher_src, bin_dir / "codex-responses-api-proxy.js")
|
||||
|
||||
readme_src = RESPONSES_API_PROXY_NPM_ROOT / "README.md"
|
||||
if readme_src.exists():
|
||||
shutil.copy2(readme_src, staging_dir / "README.md")
|
||||
|
||||
package_json_path = RESPONSES_API_PROXY_NPM_ROOT / "package.json"
|
||||
elif package == "codex-sdk":
|
||||
package_json_path = CODEX_SDK_ROOT / "package.json"
|
||||
stage_codex_sdk_sources(staging_dir)
|
||||
else:
|
||||
raise RuntimeError(f"Unknown package '{package}'.")
|
||||
|
||||
with open(package_json_path, "r", encoding="utf-8") as fh:
|
||||
package_json = json.load(fh)
|
||||
package_json["version"] = version
|
||||
|
||||
if package == "codex-sdk":
|
||||
scripts = package_json.get("scripts")
|
||||
if isinstance(scripts, dict):
|
||||
scripts.pop("prepare", None)
|
||||
|
||||
files = package_json.get("files")
|
||||
if isinstance(files, list):
|
||||
if "vendor" not in files:
|
||||
files.append("vendor")
|
||||
else:
|
||||
package_json["files"] = ["dist", "vendor"]
|
||||
|
||||
with open(staging_dir / "package.json", "w", encoding="utf-8") as out:
|
||||
json.dump(package_json, out, indent=2)
|
||||
out.write("\n")
|
||||
|
||||
|
||||
def run_command(cmd: list[str], cwd: Path | None = None) -> None:
|
||||
print("+", " ".join(cmd))
|
||||
subprocess.run(cmd, cwd=cwd, check=True)
|
||||
|
||||
|
||||
def stage_codex_sdk_sources(staging_dir: Path) -> None:
|
||||
package_root = CODEX_SDK_ROOT
|
||||
|
||||
run_command(["pnpm", "install", "--frozen-lockfile"], cwd=package_root)
|
||||
run_command(["pnpm", "run", "build"], cwd=package_root)
|
||||
|
||||
dist_src = package_root / "dist"
|
||||
if not dist_src.exists():
|
||||
raise RuntimeError("codex-sdk build did not produce a dist directory.")
|
||||
|
||||
shutil.copytree(dist_src, staging_dir / "dist")
|
||||
|
||||
readme_src = package_root / "README.md"
|
||||
if readme_src.exists():
|
||||
shutil.copy2(readme_src, staging_dir / "README.md")
|
||||
|
||||
license_src = REPO_ROOT / "LICENSE"
|
||||
if license_src.exists():
|
||||
shutil.copy2(license_src, staging_dir / "LICENSE")
|
||||
|
||||
|
||||
def copy_native_binaries(vendor_src: Path, staging_dir: Path, components: list[str]) -> None:
|
||||
vendor_src = vendor_src.resolve()
|
||||
if not vendor_src.exists():
|
||||
raise RuntimeError(f"Vendor source directory not found: {vendor_src}")
|
||||
|
||||
components_set = {component for component in components if component in COMPONENT_DEST_DIR}
|
||||
if not components_set:
|
||||
return
|
||||
|
||||
vendor_dest = staging_dir / "vendor"
|
||||
if vendor_dest.exists():
|
||||
shutil.rmtree(vendor_dest)
|
||||
vendor_dest.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
for target_dir in vendor_src.iterdir():
|
||||
if not target_dir.is_dir():
|
||||
continue
|
||||
|
||||
dest_target_dir = vendor_dest / target_dir.name
|
||||
dest_target_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
for component in components_set:
|
||||
dest_dir_name = COMPONENT_DEST_DIR.get(component)
|
||||
if dest_dir_name is None:
|
||||
continue
|
||||
|
||||
src_component_dir = target_dir / dest_dir_name
|
||||
if not src_component_dir.exists():
|
||||
raise RuntimeError(
|
||||
f"Missing native component '{component}' in vendor source: {src_component_dir}"
|
||||
)
|
||||
|
||||
dest_component_dir = dest_target_dir / dest_dir_name
|
||||
if dest_component_dir.exists():
|
||||
shutil.rmtree(dest_component_dir)
|
||||
shutil.copytree(src_component_dir, dest_component_dir)
|
||||
|
||||
|
||||
def run_npm_pack(staging_dir: Path, output_path: Path) -> Path:
|
||||
output_path = output_path.resolve()
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
with tempfile.TemporaryDirectory(prefix="codex-npm-pack-") as pack_dir_str:
|
||||
pack_dir = Path(pack_dir_str)
|
||||
stdout = subprocess.check_output(
|
||||
["npm", "pack", "--json", "--pack-destination", str(pack_dir)],
|
||||
cwd=staging_dir,
|
||||
text=True,
|
||||
)
|
||||
try:
|
||||
pack_output = json.loads(stdout)
|
||||
except json.JSONDecodeError as exc:
|
||||
raise RuntimeError("Failed to parse npm pack output.") from exc
|
||||
|
||||
if not pack_output:
|
||||
raise RuntimeError("npm pack did not produce an output tarball.")
|
||||
|
||||
tarball_name = pack_output[0].get("filename") or pack_output[0].get("name")
|
||||
if not tarball_name:
|
||||
raise RuntimeError("Unable to determine npm pack output filename.")
|
||||
|
||||
tarball_path = pack_dir / tarball_name
|
||||
if not tarball_path.exists():
|
||||
raise RuntimeError(f"Expected npm pack output not found: {tarball_path}")
|
||||
|
||||
shutil.move(str(tarball_path), output_path)
|
||||
|
||||
return output_path
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
|
||||
sys.exit(main())
|
||||
383
codex-cli/scripts/install_native_deps.py
Executable file
383
codex-cli/scripts/install_native_deps.py
Executable file
@@ -0,0 +1,383 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Install Codex native binaries (Rust CLI plus ripgrep helpers)."""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import tarfile
|
||||
import tempfile
|
||||
import zipfile
|
||||
from dataclasses import dataclass
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
from pathlib import Path
|
||||
from typing import Iterable, Sequence
|
||||
from urllib.parse import urlparse
|
||||
from urllib.request import urlopen
|
||||
|
||||
SCRIPT_DIR = Path(__file__).resolve().parent
|
||||
CODEX_CLI_ROOT = SCRIPT_DIR.parent
|
||||
DEFAULT_WORKFLOW_URL = "https://github.com/openai/codex/actions/runs/17952349351" # rust-v0.40.0
|
||||
VENDOR_DIR_NAME = "vendor"
|
||||
RG_MANIFEST = CODEX_CLI_ROOT / "bin" / "rg"
|
||||
BINARY_TARGETS = (
|
||||
"x86_64-unknown-linux-musl",
|
||||
"aarch64-unknown-linux-musl",
|
||||
"x86_64-apple-darwin",
|
||||
"aarch64-apple-darwin",
|
||||
"x86_64-pc-windows-msvc",
|
||||
"aarch64-pc-windows-msvc",
|
||||
)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class BinaryComponent:
|
||||
artifact_prefix: str # matches the artifact filename prefix (e.g. codex-<target>.zst)
|
||||
dest_dir: str # directory under vendor/<target>/ where the binary is installed
|
||||
binary_basename: str # executable name inside dest_dir (before optional .exe)
|
||||
|
||||
|
||||
BINARY_COMPONENTS = {
|
||||
"codex": BinaryComponent(
|
||||
artifact_prefix="codex",
|
||||
dest_dir="codex",
|
||||
binary_basename="codex",
|
||||
),
|
||||
"codex-responses-api-proxy": BinaryComponent(
|
||||
artifact_prefix="codex-responses-api-proxy",
|
||||
dest_dir="codex-responses-api-proxy",
|
||||
binary_basename="codex-responses-api-proxy",
|
||||
),
|
||||
}
|
||||
|
||||
RG_TARGET_PLATFORM_PAIRS: list[tuple[str, str]] = [
|
||||
("x86_64-unknown-linux-musl", "linux-x86_64"),
|
||||
("aarch64-unknown-linux-musl", "linux-aarch64"),
|
||||
("x86_64-apple-darwin", "macos-x86_64"),
|
||||
("aarch64-apple-darwin", "macos-aarch64"),
|
||||
("x86_64-pc-windows-msvc", "windows-x86_64"),
|
||||
("aarch64-pc-windows-msvc", "windows-aarch64"),
|
||||
]
|
||||
RG_TARGET_TO_PLATFORM = {target: platform for target, platform in RG_TARGET_PLATFORM_PAIRS}
|
||||
DEFAULT_RG_TARGETS = [target for target, _ in RG_TARGET_PLATFORM_PAIRS]
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description="Install native Codex binaries.")
|
||||
parser.add_argument(
|
||||
"--workflow-url",
|
||||
help=(
|
||||
"GitHub Actions workflow URL that produced the artifacts. Defaults to a "
|
||||
"known good run when omitted."
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--component",
|
||||
dest="components",
|
||||
action="append",
|
||||
choices=tuple(list(BINARY_COMPONENTS) + ["rg"]),
|
||||
help=(
|
||||
"Limit installation to the specified components."
|
||||
" May be repeated. Defaults to 'codex' and 'rg'."
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"root",
|
||||
nargs="?",
|
||||
type=Path,
|
||||
help=(
|
||||
"Directory containing package.json for the staged package. If omitted, the "
|
||||
"repository checkout is used."
|
||||
),
|
||||
)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def main() -> int:
|
||||
args = parse_args()
|
||||
|
||||
codex_cli_root = (args.root or CODEX_CLI_ROOT).resolve()
|
||||
vendor_dir = codex_cli_root / VENDOR_DIR_NAME
|
||||
vendor_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
components = args.components or ["codex", "rg"]
|
||||
|
||||
workflow_url = (args.workflow_url or DEFAULT_WORKFLOW_URL).strip()
|
||||
if not workflow_url:
|
||||
workflow_url = DEFAULT_WORKFLOW_URL
|
||||
|
||||
workflow_id = workflow_url.rstrip("/").split("/")[-1]
|
||||
print(f"Downloading native artifacts from workflow {workflow_id}...")
|
||||
|
||||
with tempfile.TemporaryDirectory(prefix="codex-native-artifacts-") as artifacts_dir_str:
|
||||
artifacts_dir = Path(artifacts_dir_str)
|
||||
_download_artifacts(workflow_id, artifacts_dir)
|
||||
install_binary_components(
|
||||
artifacts_dir,
|
||||
vendor_dir,
|
||||
BINARY_TARGETS,
|
||||
[name for name in components if name in BINARY_COMPONENTS],
|
||||
)
|
||||
|
||||
if "rg" in components:
|
||||
print("Fetching ripgrep binaries...")
|
||||
fetch_rg(vendor_dir, DEFAULT_RG_TARGETS, manifest_path=RG_MANIFEST)
|
||||
|
||||
print(f"Installed native dependencies into {vendor_dir}")
|
||||
return 0
|
||||
|
||||
|
||||
def fetch_rg(
|
||||
vendor_dir: Path,
|
||||
targets: Sequence[str] | None = None,
|
||||
*,
|
||||
manifest_path: Path,
|
||||
) -> list[Path]:
|
||||
"""Download ripgrep binaries described by the DotSlash manifest."""
|
||||
|
||||
if targets is None:
|
||||
targets = DEFAULT_RG_TARGETS
|
||||
|
||||
if not manifest_path.exists():
|
||||
raise FileNotFoundError(f"DotSlash manifest not found: {manifest_path}")
|
||||
|
||||
manifest = _load_manifest(manifest_path)
|
||||
platforms = manifest.get("platforms", {})
|
||||
|
||||
vendor_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
targets = list(targets)
|
||||
if not targets:
|
||||
return []
|
||||
|
||||
task_configs: list[tuple[str, str, dict]] = []
|
||||
for target in targets:
|
||||
platform_key = RG_TARGET_TO_PLATFORM.get(target)
|
||||
if platform_key is None:
|
||||
raise ValueError(f"Unsupported ripgrep target '{target}'.")
|
||||
|
||||
platform_info = platforms.get(platform_key)
|
||||
if platform_info is None:
|
||||
raise RuntimeError(f"Platform '{platform_key}' not found in manifest {manifest_path}.")
|
||||
|
||||
task_configs.append((target, platform_key, platform_info))
|
||||
|
||||
results: dict[str, Path] = {}
|
||||
max_workers = min(len(task_configs), max(1, (os.cpu_count() or 1)))
|
||||
|
||||
print("Installing ripgrep binaries for targets: " + ", ".join(targets))
|
||||
|
||||
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
||||
future_map = {
|
||||
executor.submit(
|
||||
_fetch_single_rg,
|
||||
vendor_dir,
|
||||
target,
|
||||
platform_key,
|
||||
platform_info,
|
||||
manifest_path,
|
||||
): target
|
||||
for target, platform_key, platform_info in task_configs
|
||||
}
|
||||
|
||||
for future in as_completed(future_map):
|
||||
target = future_map[future]
|
||||
results[target] = future.result()
|
||||
print(f" installed ripgrep for {target}")
|
||||
|
||||
return [results[target] for target in targets]
|
||||
|
||||
|
||||
def _download_artifacts(workflow_id: str, dest_dir: Path) -> None:
|
||||
cmd = [
|
||||
"gh",
|
||||
"run",
|
||||
"download",
|
||||
"--dir",
|
||||
str(dest_dir),
|
||||
"--repo",
|
||||
"openai/codex",
|
||||
workflow_id,
|
||||
]
|
||||
subprocess.check_call(cmd)
|
||||
|
||||
|
||||
def install_binary_components(
|
||||
artifacts_dir: Path,
|
||||
vendor_dir: Path,
|
||||
targets: Iterable[str],
|
||||
component_names: Sequence[str],
|
||||
) -> None:
|
||||
selected_components = [BINARY_COMPONENTS[name] for name in component_names if name in BINARY_COMPONENTS]
|
||||
if not selected_components:
|
||||
return
|
||||
|
||||
targets = list(targets)
|
||||
if not targets:
|
||||
return
|
||||
|
||||
for component in selected_components:
|
||||
print(
|
||||
f"Installing {component.binary_basename} binaries for targets: "
|
||||
+ ", ".join(targets)
|
||||
)
|
||||
max_workers = min(len(targets), max(1, (os.cpu_count() or 1)))
|
||||
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
||||
futures = {
|
||||
executor.submit(
|
||||
_install_single_binary,
|
||||
artifacts_dir,
|
||||
vendor_dir,
|
||||
target,
|
||||
component,
|
||||
): target
|
||||
for target in targets
|
||||
}
|
||||
for future in as_completed(futures):
|
||||
installed_path = future.result()
|
||||
print(f" installed {installed_path}")
|
||||
|
||||
|
||||
def _install_single_binary(
|
||||
artifacts_dir: Path,
|
||||
vendor_dir: Path,
|
||||
target: str,
|
||||
component: BinaryComponent,
|
||||
) -> Path:
|
||||
artifact_subdir = artifacts_dir / target
|
||||
archive_name = _archive_name_for_target(component.artifact_prefix, target)
|
||||
archive_path = artifact_subdir / archive_name
|
||||
if not archive_path.exists():
|
||||
raise FileNotFoundError(f"Expected artifact not found: {archive_path}")
|
||||
|
||||
dest_dir = vendor_dir / target / component.dest_dir
|
||||
dest_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
binary_name = (
|
||||
f"{component.binary_basename}.exe" if "windows" in target else component.binary_basename
|
||||
)
|
||||
dest = dest_dir / binary_name
|
||||
dest.unlink(missing_ok=True)
|
||||
extract_archive(archive_path, "zst", None, dest)
|
||||
if "windows" not in target:
|
||||
dest.chmod(0o755)
|
||||
return dest
|
||||
|
||||
|
||||
def _archive_name_for_target(artifact_prefix: str, target: str) -> str:
|
||||
if "windows" in target:
|
||||
return f"{artifact_prefix}-{target}.exe.zst"
|
||||
return f"{artifact_prefix}-{target}.zst"
|
||||
|
||||
|
||||
def _fetch_single_rg(
|
||||
vendor_dir: Path,
|
||||
target: str,
|
||||
platform_key: str,
|
||||
platform_info: dict,
|
||||
manifest_path: Path,
|
||||
) -> Path:
|
||||
providers = platform_info.get("providers", [])
|
||||
if not providers:
|
||||
raise RuntimeError(f"No providers listed for platform '{platform_key}' in {manifest_path}.")
|
||||
|
||||
url = providers[0]["url"]
|
||||
archive_format = platform_info.get("format", "zst")
|
||||
archive_member = platform_info.get("path")
|
||||
|
||||
dest_dir = vendor_dir / target / "path"
|
||||
dest_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
is_windows = platform_key.startswith("win")
|
||||
binary_name = "rg.exe" if is_windows else "rg"
|
||||
dest = dest_dir / binary_name
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmp_dir_str:
|
||||
tmp_dir = Path(tmp_dir_str)
|
||||
archive_filename = os.path.basename(urlparse(url).path)
|
||||
download_path = tmp_dir / archive_filename
|
||||
_download_file(url, download_path)
|
||||
|
||||
dest.unlink(missing_ok=True)
|
||||
extract_archive(download_path, archive_format, archive_member, dest)
|
||||
|
||||
if not is_windows:
|
||||
dest.chmod(0o755)
|
||||
|
||||
return dest
|
||||
|
||||
|
||||
def _download_file(url: str, dest: Path) -> None:
|
||||
dest.parent.mkdir(parents=True, exist_ok=True)
|
||||
with urlopen(url) as response, open(dest, "wb") as out:
|
||||
shutil.copyfileobj(response, out)
|
||||
|
||||
|
||||
def extract_archive(
|
||||
archive_path: Path,
|
||||
archive_format: str,
|
||||
archive_member: str | None,
|
||||
dest: Path,
|
||||
) -> None:
|
||||
dest.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if archive_format == "zst":
|
||||
output_path = archive_path.parent / dest.name
|
||||
subprocess.check_call(
|
||||
["zstd", "-f", "-d", str(archive_path), "-o", str(output_path)]
|
||||
)
|
||||
shutil.move(str(output_path), dest)
|
||||
return
|
||||
|
||||
if archive_format == "tar.gz":
|
||||
if not archive_member:
|
||||
raise RuntimeError("Missing 'path' for tar.gz archive in DotSlash manifest.")
|
||||
with tarfile.open(archive_path, "r:gz") as tar:
|
||||
try:
|
||||
member = tar.getmember(archive_member)
|
||||
except KeyError as exc:
|
||||
raise RuntimeError(
|
||||
f"Entry '{archive_member}' not found in archive {archive_path}."
|
||||
) from exc
|
||||
tar.extract(member, path=archive_path.parent, filter="data")
|
||||
extracted = archive_path.parent / archive_member
|
||||
shutil.move(str(extracted), dest)
|
||||
return
|
||||
|
||||
if archive_format == "zip":
|
||||
if not archive_member:
|
||||
raise RuntimeError("Missing 'path' for zip archive in DotSlash manifest.")
|
||||
with zipfile.ZipFile(archive_path) as archive:
|
||||
try:
|
||||
with archive.open(archive_member) as src, open(dest, "wb") as out:
|
||||
shutil.copyfileobj(src, out)
|
||||
except KeyError as exc:
|
||||
raise RuntimeError(
|
||||
f"Entry '{archive_member}' not found in archive {archive_path}."
|
||||
) from exc
|
||||
return
|
||||
|
||||
raise RuntimeError(f"Unsupported archive format '{archive_format}'.")
|
||||
|
||||
|
||||
def _load_manifest(manifest_path: Path) -> dict:
|
||||
cmd = ["dotslash", "--", "parse", str(manifest_path)]
|
||||
stdout = subprocess.check_output(cmd, text=True)
|
||||
try:
|
||||
manifest = json.loads(stdout)
|
||||
except json.JSONDecodeError as exc:
|
||||
raise RuntimeError(f"Invalid DotSlash manifest output from {manifest_path}.") from exc
|
||||
|
||||
if not isinstance(manifest, dict):
|
||||
raise RuntimeError(
|
||||
f"Unexpected DotSlash manifest structure for {manifest_path}: {type(manifest)!r}"
|
||||
)
|
||||
|
||||
return manifest
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
|
||||
sys.exit(main())
|
||||
@@ -1,94 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Install native runtime dependencies for codex-cli.
|
||||
#
|
||||
# Usage
|
||||
# install_native_deps.sh [--workflow-url URL] [CODEX_CLI_ROOT]
|
||||
#
|
||||
# The optional RELEASE_ROOT is the path that contains package.json. Omitting
|
||||
# it installs the binaries into the repository's own bin/ folder to support
|
||||
# local development.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# ------------------
|
||||
# Parse arguments
|
||||
# ------------------
|
||||
|
||||
CODEX_CLI_ROOT=""
|
||||
|
||||
# Until we start publishing stable GitHub releases, we have to grab the binaries
|
||||
# from the GitHub Action that created them. Update the URL below to point to the
|
||||
# appropriate workflow run:
|
||||
WORKFLOW_URL="https://github.com/openai/codex/actions/runs/17417194663" # rust-v0.28.0
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--workflow-url)
|
||||
shift || { echo "--workflow-url requires an argument"; exit 1; }
|
||||
if [ -n "$1" ]; then
|
||||
WORKFLOW_URL="$1"
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
if [[ -z "$CODEX_CLI_ROOT" ]]; then
|
||||
CODEX_CLI_ROOT="$1"
|
||||
else
|
||||
echo "Unexpected argument: $1" >&2
|
||||
exit 1
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Determine where the binaries should be installed.
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
if [ -n "$CODEX_CLI_ROOT" ]; then
|
||||
# The caller supplied a release root directory.
|
||||
BIN_DIR="$CODEX_CLI_ROOT/bin"
|
||||
else
|
||||
# No argument; fall back to the repo’s own bin directory.
|
||||
# Resolve the path of this script, then walk up to the repo root.
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
CODEX_CLI_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
BIN_DIR="$CODEX_CLI_ROOT/bin"
|
||||
fi
|
||||
|
||||
# Make sure the destination directory exists.
|
||||
mkdir -p "$BIN_DIR"
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Download and decompress the artifacts from the GitHub Actions workflow.
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
WORKFLOW_ID="${WORKFLOW_URL##*/}"
|
||||
|
||||
ARTIFACTS_DIR="$(mktemp -d)"
|
||||
trap 'rm -rf "$ARTIFACTS_DIR"' EXIT
|
||||
|
||||
# NB: The GitHub CLI `gh` must be installed and authenticated.
|
||||
gh run download --dir "$ARTIFACTS_DIR" --repo openai/codex "$WORKFLOW_ID"
|
||||
|
||||
# x64 Linux
|
||||
zstd -d "$ARTIFACTS_DIR/x86_64-unknown-linux-musl/codex-x86_64-unknown-linux-musl.zst" \
|
||||
-o "$BIN_DIR/codex-x86_64-unknown-linux-musl"
|
||||
# ARM64 Linux
|
||||
zstd -d "$ARTIFACTS_DIR/aarch64-unknown-linux-musl/codex-aarch64-unknown-linux-musl.zst" \
|
||||
-o "$BIN_DIR/codex-aarch64-unknown-linux-musl"
|
||||
# x64 macOS
|
||||
zstd -d "$ARTIFACTS_DIR/x86_64-apple-darwin/codex-x86_64-apple-darwin.zst" \
|
||||
-o "$BIN_DIR/codex-x86_64-apple-darwin"
|
||||
# ARM64 macOS
|
||||
zstd -d "$ARTIFACTS_DIR/aarch64-apple-darwin/codex-aarch64-apple-darwin.zst" \
|
||||
-o "$BIN_DIR/codex-aarch64-apple-darwin"
|
||||
# x64 Windows
|
||||
zstd -d "$ARTIFACTS_DIR/x86_64-pc-windows-msvc/codex-x86_64-pc-windows-msvc.exe.zst" \
|
||||
-o "$BIN_DIR/codex-x86_64-pc-windows-msvc.exe"
|
||||
# ARM64 Windows
|
||||
zstd -d "$ARTIFACTS_DIR/aarch64-pc-windows-msvc/codex-aarch64-pc-windows-msvc.exe.zst" \
|
||||
-o "$BIN_DIR/codex-aarch64-pc-windows-msvc.exe"
|
||||
|
||||
echo "Installed native dependencies into $BIN_DIR"
|
||||
@@ -1,120 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# -----------------------------------------------------------------------------
|
||||
# stage_release.sh
|
||||
# -----------------------------------------------------------------------------
|
||||
# Stages an npm release for @openai/codex.
|
||||
#
|
||||
# Usage:
|
||||
#
|
||||
# --tmp <dir> : Use <dir> instead of a freshly created temp directory.
|
||||
# -h|--help : Print usage.
|
||||
#
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Helper - usage / flag parsing
|
||||
|
||||
usage() {
|
||||
cat <<EOF
|
||||
Usage: $(basename "$0") [--tmp DIR] [--version VERSION]
|
||||
|
||||
Options
|
||||
--tmp DIR Use DIR to stage the release (defaults to a fresh mktemp dir)
|
||||
--version Specify the version to release (defaults to a timestamp-based version)
|
||||
-h, --help Show this help
|
||||
|
||||
Legacy positional argument: the first non-flag argument is still interpreted
|
||||
as the temporary directory (for backwards compatibility) but is deprecated.
|
||||
EOF
|
||||
exit "${1:-0}"
|
||||
}
|
||||
|
||||
TMPDIR=""
|
||||
# Default to a timestamp-based version (keep same scheme as before)
|
||||
VERSION="$(printf '0.1.%d' "$(date +%y%m%d%H%M)")"
|
||||
WORKFLOW_URL=""
|
||||
|
||||
# Manual flag parser - Bash getopts does not handle GNU long options well.
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--tmp)
|
||||
shift || { echo "--tmp requires an argument"; usage 1; }
|
||||
TMPDIR="$1"
|
||||
;;
|
||||
--tmp=*)
|
||||
TMPDIR="${1#*=}"
|
||||
;;
|
||||
--version)
|
||||
shift || { echo "--version requires an argument"; usage 1; }
|
||||
VERSION="$1"
|
||||
;;
|
||||
--workflow-url)
|
||||
shift || { echo "--workflow-url requires an argument"; exit 1; }
|
||||
WORKFLOW_URL="$1"
|
||||
;;
|
||||
-h|--help)
|
||||
usage 0
|
||||
;;
|
||||
--*)
|
||||
echo "Unknown option: $1" >&2
|
||||
usage 1
|
||||
;;
|
||||
*)
|
||||
echo "Unexpected extra argument: $1" >&2
|
||||
usage 1
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
# Fallback when the caller did not specify a directory.
|
||||
# If no directory was specified create a fresh temporary one.
|
||||
if [[ -z "$TMPDIR" ]]; then
|
||||
TMPDIR="$(mktemp -d)"
|
||||
fi
|
||||
|
||||
# Ensure the directory exists, then resolve to an absolute path.
|
||||
mkdir -p "$TMPDIR"
|
||||
TMPDIR="$(cd "$TMPDIR" && pwd)"
|
||||
|
||||
# Main build logic
|
||||
|
||||
echo "Staging release in $TMPDIR"
|
||||
|
||||
# The script lives in codex-cli/scripts/ - change into codex-cli root so that
|
||||
# relative paths keep working.
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
CODEX_CLI_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
|
||||
pushd "$CODEX_CLI_ROOT" >/dev/null
|
||||
|
||||
# 1. Build the JS artifacts ---------------------------------------------------
|
||||
|
||||
# Paths inside the staged package
|
||||
mkdir -p "$TMPDIR/bin"
|
||||
|
||||
cp -r bin/codex.js "$TMPDIR/bin/codex.js"
|
||||
cp ../README.md "$TMPDIR" || true # README is one level up - ignore if missing
|
||||
|
||||
# Modify package.json - bump version and optionally add the native directory to
|
||||
# the files array so that the binaries are published to npm.
|
||||
|
||||
jq --arg version "$VERSION" \
|
||||
'.version = $version' \
|
||||
package.json > "$TMPDIR/package.json"
|
||||
|
||||
# 2. Native runtime deps (sandbox plus optional Rust binaries)
|
||||
|
||||
./scripts/install_native_deps.sh --workflow-url "$WORKFLOW_URL" "$TMPDIR"
|
||||
|
||||
popd >/dev/null
|
||||
|
||||
echo "Staged version $VERSION for release in $TMPDIR"
|
||||
|
||||
echo "Verify the CLI:"
|
||||
echo " node ${TMPDIR}/bin/codex.js --version"
|
||||
echo " node ${TMPDIR}/bin/codex.js --help"
|
||||
|
||||
# Print final hint for convenience
|
||||
echo "Next: cd \"$TMPDIR\" && npm publish"
|
||||
@@ -1,70 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import json
|
||||
import subprocess
|
||||
import sys
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser(
|
||||
description="""Stage a release for the npm module.
|
||||
|
||||
Run this after the GitHub Release has been created and use
|
||||
`--release-version` to specify the version to release.
|
||||
|
||||
Optionally pass `--tmp` to control the temporary staging directory that will be
|
||||
forwarded to stage_release.sh.
|
||||
"""
|
||||
)
|
||||
parser.add_argument(
|
||||
"--release-version", required=True, help="Version to release, e.g., 0.3.0"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--tmp",
|
||||
help="Optional path to stage the npm package; forwarded to stage_release.sh",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
version = args.release_version
|
||||
|
||||
gh_run = subprocess.run(
|
||||
[
|
||||
"gh",
|
||||
"run",
|
||||
"list",
|
||||
"--branch",
|
||||
f"rust-v{version}",
|
||||
"--json",
|
||||
"workflowName,url,headSha",
|
||||
"--jq",
|
||||
'first(.[] | select(.workflowName == "rust-release"))',
|
||||
],
|
||||
stdout=subprocess.PIPE,
|
||||
check=True,
|
||||
)
|
||||
gh_run.check_returncode()
|
||||
workflow = json.loads(gh_run.stdout)
|
||||
sha = workflow["headSha"]
|
||||
|
||||
print(f"should `git checkout {sha}`")
|
||||
|
||||
current_dir = Path(__file__).parent.resolve()
|
||||
cmd = [
|
||||
str(current_dir / "stage_release.sh"),
|
||||
"--version",
|
||||
version,
|
||||
"--workflow-url",
|
||||
workflow["url"],
|
||||
]
|
||||
if args.tmp:
|
||||
cmd.extend(["--tmp", args.tmp])
|
||||
|
||||
stage_release = subprocess.run(cmd)
|
||||
stage_release.check_returncode()
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
2567
codex-rs/Cargo.lock
generated
2567
codex-rs/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,23 +1,38 @@
|
||||
[workspace]
|
||||
members = [
|
||||
"backend-client",
|
||||
"ansi-escape",
|
||||
"app-server",
|
||||
"app-server-protocol",
|
||||
"apply-patch",
|
||||
"arg0",
|
||||
"codex-backend-openapi-models",
|
||||
"cloud-tasks",
|
||||
"cloud-tasks-client",
|
||||
"cli",
|
||||
"common",
|
||||
"core",
|
||||
"exec",
|
||||
"execpolicy",
|
||||
"file-search",
|
||||
"git-tooling",
|
||||
"linux-sandbox",
|
||||
"login",
|
||||
"mcp-client",
|
||||
"mcp-server",
|
||||
"mcp-types",
|
||||
"ollama",
|
||||
"process-hardening",
|
||||
"protocol",
|
||||
"protocol-ts",
|
||||
"rmcp-client",
|
||||
"responses-api-proxy",
|
||||
"otel",
|
||||
"tui",
|
||||
"git-apply",
|
||||
"utils/json-to-toml",
|
||||
"utils/readiness",
|
||||
"utils/string",
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
@@ -29,14 +44,198 @@ version = "0.0.0"
|
||||
# edition.
|
||||
edition = "2024"
|
||||
|
||||
[workspace.dependencies]
|
||||
# Internal
|
||||
app_test_support = { path = "app-server/tests/common" }
|
||||
codex-ansi-escape = { path = "ansi-escape" }
|
||||
codex-app-server = { path = "app-server" }
|
||||
codex-app-server-protocol = { path = "app-server-protocol" }
|
||||
codex-apply-patch = { path = "apply-patch" }
|
||||
codex-arg0 = { path = "arg0" }
|
||||
codex-chatgpt = { path = "chatgpt" }
|
||||
codex-common = { path = "common" }
|
||||
codex-core = { path = "core" }
|
||||
codex-exec = { path = "exec" }
|
||||
codex-file-search = { path = "file-search" }
|
||||
codex-git-tooling = { path = "git-tooling" }
|
||||
codex-linux-sandbox = { path = "linux-sandbox" }
|
||||
codex-login = { path = "login" }
|
||||
codex-mcp-client = { path = "mcp-client" }
|
||||
codex-mcp-server = { path = "mcp-server" }
|
||||
codex-ollama = { path = "ollama" }
|
||||
codex-otel = { path = "otel" }
|
||||
codex-process-hardening = { path = "process-hardening" }
|
||||
codex-protocol = { path = "protocol" }
|
||||
codex-protocol-ts = { path = "protocol-ts" }
|
||||
codex-responses-api-proxy = { path = "responses-api-proxy" }
|
||||
codex-rmcp-client = { path = "rmcp-client" }
|
||||
codex-tui = { path = "tui" }
|
||||
codex-utils-json-to-toml = { path = "utils/json-to-toml" }
|
||||
codex-utils-readiness = { path = "utils/readiness" }
|
||||
codex-utils-string = { path = "utils/string" }
|
||||
core_test_support = { path = "core/tests/common" }
|
||||
mcp-types = { path = "mcp-types" }
|
||||
mcp_test_support = { path = "mcp-server/tests/common" }
|
||||
|
||||
# External
|
||||
allocative = "0.3.3"
|
||||
ansi-to-tui = "7.0.0"
|
||||
anyhow = "1"
|
||||
arboard = "3"
|
||||
askama = "0.12"
|
||||
assert_matches = "1.5.0"
|
||||
assert_cmd = "2"
|
||||
async-channel = "2.3.1"
|
||||
async-stream = "0.3.6"
|
||||
async-trait = "0.1.89"
|
||||
axum = { version = "0.8", default-features = false }
|
||||
base64 = "0.22.1"
|
||||
bytes = "1.10.1"
|
||||
chrono = "0.4.42"
|
||||
clap = "4"
|
||||
clap_complete = "4"
|
||||
color-eyre = "0.6.3"
|
||||
crossterm = "0.28.1"
|
||||
ctor = "0.5.0"
|
||||
derive_more = "2"
|
||||
diffy = "0.4.2"
|
||||
dirs = "6"
|
||||
dotenvy = "0.15.7"
|
||||
dunce = "1.0.4"
|
||||
env-flags = "0.1.1"
|
||||
env_logger = "0.11.5"
|
||||
escargot = "0.5"
|
||||
eventsource-stream = "0.2.3"
|
||||
futures = { version = "0.3", default-features = false }
|
||||
icu_decimal = "2.0.0"
|
||||
icu_locale_core = "2.0.0"
|
||||
ignore = "0.4.23"
|
||||
image = { version = "^0.25.8", default-features = false }
|
||||
indexmap = "2.6.0"
|
||||
insta = "1.43.2"
|
||||
itertools = "0.14.0"
|
||||
keyring = "3.6"
|
||||
landlock = "0.4.1"
|
||||
lazy_static = "1"
|
||||
libc = "0.2.175"
|
||||
log = "0.4"
|
||||
maplit = "1.0.2"
|
||||
mime_guess = "2.0.5"
|
||||
multimap = "0.10.0"
|
||||
notify = "8.2.0"
|
||||
nucleo-matcher = "0.3.1"
|
||||
openssl-sys = "*"
|
||||
opentelemetry = "0.30.0"
|
||||
opentelemetry-appender-tracing = "0.30.0"
|
||||
opentelemetry-otlp = "0.30.0"
|
||||
opentelemetry-semantic-conventions = "0.30.0"
|
||||
opentelemetry_sdk = "0.30.0"
|
||||
os_info = "3.12.0"
|
||||
owo-colors = "4.2.0"
|
||||
paste = "1.0.15"
|
||||
path-absolutize = "3.1.1"
|
||||
path-clean = "1.0.1"
|
||||
pathdiff = "0.2"
|
||||
portable-pty = "0.9.0"
|
||||
predicates = "3"
|
||||
pretty_assertions = "1.4.1"
|
||||
pulldown-cmark = "0.10"
|
||||
rand = "0.9"
|
||||
ratatui = "0.29.0"
|
||||
regex-lite = "0.1.7"
|
||||
reqwest = "0.12"
|
||||
rmcp = { version = "0.8.0", default-features = false }
|
||||
schemars = "0.8.22"
|
||||
seccompiler = "0.5.0"
|
||||
serde = "1"
|
||||
serde_json = "1"
|
||||
serde_with = "3.14"
|
||||
serial_test = "3.2.0"
|
||||
sha1 = "0.10.6"
|
||||
sha2 = "0.10"
|
||||
shlex = "1.3.0"
|
||||
similar = "2.7.0"
|
||||
starlark = "0.13.0"
|
||||
strum = "0.27.2"
|
||||
strum_macros = "0.27.2"
|
||||
supports-color = "3.0.2"
|
||||
sys-locale = "0.3.2"
|
||||
tempfile = "3.23.0"
|
||||
textwrap = "0.16.2"
|
||||
thiserror = "2.0.16"
|
||||
time = "0.3"
|
||||
tiny_http = "0.12"
|
||||
tokio = "1"
|
||||
tokio-stream = "0.1.17"
|
||||
tokio-test = "0.4"
|
||||
tokio-util = "0.7.16"
|
||||
toml = "0.9.5"
|
||||
toml_edit = "0.23.4"
|
||||
tonic = "0.13.1"
|
||||
tracing = "0.1.41"
|
||||
tracing-appender = "0.2.3"
|
||||
tracing-subscriber = "0.3.20"
|
||||
tracing-test = "0.2.5"
|
||||
tree-sitter = "0.25.10"
|
||||
tree-sitter-bash = "0.25"
|
||||
tree-sitter-highlight = "0.25.10"
|
||||
ts-rs = "11"
|
||||
unicode-segmentation = "1.12.0"
|
||||
unicode-width = "0.2"
|
||||
url = "2"
|
||||
urlencoding = "2.1"
|
||||
uuid = "1"
|
||||
vt100 = "0.16.2"
|
||||
walkdir = "2.5.0"
|
||||
webbrowser = "1.0"
|
||||
which = "6"
|
||||
wildmatch = "2.5.0"
|
||||
wiremock = "0.6"
|
||||
zeroize = "1.8.1"
|
||||
|
||||
[workspace.lints]
|
||||
rust = {}
|
||||
|
||||
[workspace.lints.clippy]
|
||||
expect_used = "deny"
|
||||
identity_op = "deny"
|
||||
manual_clamp = "deny"
|
||||
manual_filter = "deny"
|
||||
manual_find = "deny"
|
||||
manual_flatten = "deny"
|
||||
manual_map = "deny"
|
||||
manual_memcpy = "deny"
|
||||
manual_non_exhaustive = "deny"
|
||||
manual_ok_or = "deny"
|
||||
manual_range_contains = "deny"
|
||||
manual_retain = "deny"
|
||||
manual_strip = "deny"
|
||||
manual_try_fold = "deny"
|
||||
manual_unwrap_or = "deny"
|
||||
needless_borrow = "deny"
|
||||
needless_borrowed_reference = "deny"
|
||||
needless_collect = "deny"
|
||||
needless_late_init = "deny"
|
||||
needless_option_as_deref = "deny"
|
||||
needless_question_mark = "deny"
|
||||
needless_update = "deny"
|
||||
redundant_clone = "deny"
|
||||
redundant_closure = "deny"
|
||||
redundant_closure_for_method_calls = "deny"
|
||||
redundant_static_lifetimes = "deny"
|
||||
trivially_copy_pass_by_ref = "deny"
|
||||
uninlined_format_args = "deny"
|
||||
unnecessary_filter_map = "deny"
|
||||
unnecessary_lazy_evaluations = "deny"
|
||||
unnecessary_sort_by = "deny"
|
||||
unnecessary_to_owned = "deny"
|
||||
unwrap_used = "deny"
|
||||
|
||||
# cargo-shear cannot see the platform-specific openssl-sys usage, so we
|
||||
# silence the false positive here instead of deleting a real dependency.
|
||||
[workspace.metadata.cargo-shear]
|
||||
ignored = ["openssl-sys", "codex-utils-readiness"]
|
||||
|
||||
[profile.release]
|
||||
lto = "fat"
|
||||
# Because we bundle some of these executables with the TypeScript CLI, we
|
||||
@@ -47,5 +246,9 @@ strip = "symbols"
|
||||
codegen-units = 1
|
||||
|
||||
[patch.crates-io]
|
||||
# Uncomment to debug local changes.
|
||||
# ratatui = { path = "../../ratatui" }
|
||||
ratatui = { git = "https://github.com/nornagon/ratatui", branch = "nornagon-v0.29.0-patch" }
|
||||
|
||||
# Uncomment to debug local changes.
|
||||
# rmcp = { path = "../../rust-sdk/crates/rmcp" }
|
||||
|
||||
@@ -4,18 +4,18 @@ We provide Codex CLI as a standalone, native executable to ensure a zero-depende
|
||||
|
||||
## Installing Codex
|
||||
|
||||
Today, the easiest way to install Codex is via `npm`, though we plan to publish Codex to other package managers soon.
|
||||
Today, the easiest way to install Codex is via `npm`:
|
||||
|
||||
```shell
|
||||
npm i -g @openai/codex@native
|
||||
npm i -g @openai/codex
|
||||
codex
|
||||
```
|
||||
|
||||
You can also download a platform-specific release directly from our [GitHub Releases](https://github.com/openai/codex/releases).
|
||||
You can also install via Homebrew (`brew install codex`) or download a platform-specific release directly from our [GitHub Releases](https://github.com/openai/codex/releases).
|
||||
|
||||
## What's new in the Rust CLI
|
||||
|
||||
While we are [working to close the gap between the TypeScript and Rust implementations of Codex CLI](https://github.com/openai/codex/issues/1262), note that the Rust CLI has a number of features that the TypeScript CLI does not!
|
||||
The Rust implementation is now the maintained Codex CLI and serves as the default experience. It includes a number of features that the legacy TypeScript CLI never supported.
|
||||
|
||||
### Config
|
||||
|
||||
@@ -23,19 +23,27 @@ Codex supports a rich set of configuration options. Note that the Rust CLI uses
|
||||
|
||||
### Model Context Protocol Support
|
||||
|
||||
Codex CLI functions as an MCP client that can connect to MCP servers on startup. See the [`mcp_servers`](../docs/config.md#mcp_servers) section in the configuration documentation for details.
|
||||
#### MCP client
|
||||
|
||||
It is still experimental, but you can also launch Codex as an MCP _server_ by running `codex mcp`. Use the [`@modelcontextprotocol/inspector`](https://github.com/modelcontextprotocol/inspector) to try it out:
|
||||
Codex CLI functions as an MCP client that allows the Codex CLI and IDE extension to connect to MCP servers on startup. See the [`configuration documentation`](../docs/config.md#mcp_servers) for details.
|
||||
|
||||
#### MCP server (experimental)
|
||||
|
||||
Codex can be launched as an MCP _server_ by running `codex mcp-server`. This allows _other_ MCP clients to use Codex as a tool for another agent.
|
||||
|
||||
Use the [`@modelcontextprotocol/inspector`](https://github.com/modelcontextprotocol/inspector) to try it out:
|
||||
|
||||
```shell
|
||||
npx @modelcontextprotocol/inspector codex mcp
|
||||
npx @modelcontextprotocol/inspector codex mcp-server
|
||||
```
|
||||
|
||||
Use `codex mcp` to add/list/get/remove MCP server launchers defined in `config.toml`, and `codex mcp-server` to run the MCP server directly.
|
||||
|
||||
### Notifications
|
||||
|
||||
You can enable notifications by configuring a script that is run whenever the agent finishes a turn. The [notify documentation](../docs/config.md#notify) includes a detailed example that explains how to get desktop notifications via [terminal-notifier](https://github.com/julienXX/terminal-notifier) on macOS.
|
||||
|
||||
### `codex exec` to run Codex programmatially/non-interactively
|
||||
### `codex exec` to run Codex programmatically/non-interactively
|
||||
|
||||
To run Codex non-interactively, run `codex exec PROMPT` (you can also pass the prompt via `stdin`) and Codex will work on your task until it decides that it is done and exits. Output is printed to the terminal directly. You can set the `RUST_LOG` environment variable to see more about what's going on.
|
||||
|
||||
@@ -69,9 +77,13 @@ To test to see what happens when a command is run under the sandbox provided by
|
||||
|
||||
```
|
||||
# macOS
|
||||
codex debug seatbelt [--full-auto] [COMMAND]...
|
||||
codex sandbox macos [--full-auto] [COMMAND]...
|
||||
|
||||
# Linux
|
||||
codex sandbox linux [--full-auto] [COMMAND]...
|
||||
|
||||
# Legacy aliases
|
||||
codex debug seatbelt [--full-auto] [COMMAND]...
|
||||
codex debug landlock [--full-auto] [COMMAND]...
|
||||
```
|
||||
|
||||
|
||||
@@ -8,9 +8,9 @@ name = "codex_ansi_escape"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[dependencies]
|
||||
ansi-to-tui = "7.0.0"
|
||||
ratatui = { version = "0.29.0", features = [
|
||||
ansi-to-tui = { workspace = true }
|
||||
ratatui = { workspace = true, features = [
|
||||
"unstable-rendered-line-info",
|
||||
"unstable-widget-ref",
|
||||
] }
|
||||
tracing = { version = "0.1.41", features = ["log"] }
|
||||
tracing = { workspace = true, features = ["log"] }
|
||||
|
||||
@@ -3,11 +3,30 @@ use ansi_to_tui::IntoText;
|
||||
use ratatui::text::Line;
|
||||
use ratatui::text::Text;
|
||||
|
||||
// Expand tabs in a best-effort way for transcript rendering.
|
||||
// Tabs can interact poorly with left-gutter prefixes in our TUI and CLI
|
||||
// transcript views (e.g., `nl` separates line numbers from content with a tab).
|
||||
// Replacing tabs with spaces avoids odd visual artifacts without changing
|
||||
// semantics for our use cases.
|
||||
fn expand_tabs(s: &str) -> std::borrow::Cow<'_, str> {
|
||||
if s.contains('\t') {
|
||||
// Keep it simple: replace each tab with 4 spaces.
|
||||
// We do not try to align to tab stops since most usages (like `nl`)
|
||||
// look acceptable with a fixed substitution and this avoids stateful math
|
||||
// across spans.
|
||||
std::borrow::Cow::Owned(s.replace('\t', " "))
|
||||
} else {
|
||||
std::borrow::Cow::Borrowed(s)
|
||||
}
|
||||
}
|
||||
|
||||
/// This function should be used when the contents of `s` are expected to match
|
||||
/// a single line. If multiple lines are found, a warning is logged and only the
|
||||
/// first line is returned.
|
||||
pub fn ansi_escape_line(s: &str) -> Line<'static> {
|
||||
let text = ansi_escape(s);
|
||||
// Normalize tabs to spaces to avoid odd gutter collisions in transcript mode.
|
||||
let s = expand_tabs(s);
|
||||
let text = ansi_escape(&s);
|
||||
match text.lines.as_slice() {
|
||||
[] => "".into(),
|
||||
[only] => only.clone(),
|
||||
|
||||
24
codex-rs/app-server-protocol/Cargo.toml
Normal file
24
codex-rs/app-server-protocol/Cargo.toml
Normal file
@@ -0,0 +1,24 @@
|
||||
[package]
|
||||
edition = "2024"
|
||||
name = "codex-app-server-protocol"
|
||||
version = { workspace = true }
|
||||
|
||||
[lib]
|
||||
name = "codex_app_server_protocol"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
codex-protocol = { workspace = true }
|
||||
paste = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
strum_macros = { workspace = true }
|
||||
ts-rs = { workspace = true }
|
||||
uuid = { workspace = true, features = ["serde", "v7"] }
|
||||
|
||||
[dev-dependencies]
|
||||
anyhow = { workspace = true }
|
||||
pretty_assertions = { workspace = true }
|
||||
67
codex-rs/app-server-protocol/src/jsonrpc_lite.rs
Normal file
67
codex-rs/app-server-protocol/src/jsonrpc_lite.rs
Normal file
@@ -0,0 +1,67 @@
|
||||
//! We do not do true JSON-RPC 2.0, as we neither send nor expect the
|
||||
//! "jsonrpc": "2.0" field.
|
||||
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use ts_rs::TS;
|
||||
|
||||
pub const JSONRPC_VERSION: &str = "2.0";
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, Hash, Eq, TS)]
|
||||
#[serde(untagged)]
|
||||
pub enum RequestId {
|
||||
String(String),
|
||||
#[ts(type = "number")]
|
||||
Integer(i64),
|
||||
}
|
||||
|
||||
pub type Result = serde_json::Value;
|
||||
|
||||
/// Refers to any valid JSON-RPC object that can be decoded off the wire, or encoded to be sent.
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, TS)]
|
||||
#[serde(untagged)]
|
||||
pub enum JSONRPCMessage {
|
||||
Request(JSONRPCRequest),
|
||||
Notification(JSONRPCNotification),
|
||||
Response(JSONRPCResponse),
|
||||
Error(JSONRPCError),
|
||||
}
|
||||
|
||||
/// A request that expects a response.
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, TS)]
|
||||
pub struct JSONRPCRequest {
|
||||
pub id: RequestId,
|
||||
pub method: String,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub params: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
/// A notification which does not expect a response.
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, TS)]
|
||||
pub struct JSONRPCNotification {
|
||||
pub method: String,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub params: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
/// A successful (non-error) response to a request.
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, TS)]
|
||||
pub struct JSONRPCResponse {
|
||||
pub id: RequestId,
|
||||
pub result: Result,
|
||||
}
|
||||
|
||||
/// A response to a request that indicates an error occurred.
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, TS)]
|
||||
pub struct JSONRPCError {
|
||||
pub error: JSONRPCErrorError,
|
||||
pub id: RequestId,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, TS)]
|
||||
pub struct JSONRPCErrorError {
|
||||
pub code: i64,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub data: Option<serde_json::Value>,
|
||||
pub message: String,
|
||||
}
|
||||
5
codex-rs/app-server-protocol/src/lib.rs
Normal file
5
codex-rs/app-server-protocol/src/lib.rs
Normal file
@@ -0,0 +1,5 @@
|
||||
mod jsonrpc_lite;
|
||||
mod protocol;
|
||||
|
||||
pub use jsonrpc_lite::*;
|
||||
pub use protocol::*;
|
||||
@@ -1,58 +1,27 @@
|
||||
use std::collections::HashMap;
|
||||
use std::fmt::Display;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::config_types::ReasoningEffort;
|
||||
use crate::config_types::ReasoningSummary;
|
||||
use crate::config_types::SandboxMode;
|
||||
use crate::config_types::Verbosity;
|
||||
use crate::protocol::AskForApproval;
|
||||
use crate::protocol::EventMsg;
|
||||
use crate::protocol::FileChange;
|
||||
use crate::protocol::ReviewDecision;
|
||||
use crate::protocol::SandboxPolicy;
|
||||
use crate::protocol::TurnAbortReason;
|
||||
use mcp_types::RequestId;
|
||||
use crate::JSONRPCNotification;
|
||||
use crate::JSONRPCRequest;
|
||||
use crate::RequestId;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::config_types::ReasoningEffort;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use codex_protocol::config_types::SandboxMode;
|
||||
use codex_protocol::config_types::Verbosity;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use codex_protocol::protocol::FileChange;
|
||||
use codex_protocol::protocol::ReviewDecision;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
use codex_protocol::protocol::TurnAbortReason;
|
||||
use paste::paste;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use strum_macros::Display;
|
||||
use ts_rs::TS;
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, TS, Hash)]
|
||||
#[ts(type = "string")]
|
||||
pub struct ConversationId(pub Uuid);
|
||||
|
||||
impl ConversationId {
|
||||
pub fn new() -> Self {
|
||||
Self(Uuid::new_v4())
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for ConversationId {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for ConversationId {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Uuid> for ConversationId {
|
||||
fn from(value: Uuid) -> Self {
|
||||
Self(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ConversationId> for Uuid {
|
||||
fn from(value: ConversationId) -> Self {
|
||||
value.0
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, TS)]
|
||||
#[ts(type = "string")]
|
||||
pub struct GitSha(pub String);
|
||||
@@ -63,98 +32,168 @@ impl GitSha {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, TS)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Display, TS)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum AuthMode {
|
||||
ApiKey,
|
||||
ChatGPT,
|
||||
}
|
||||
|
||||
/// Request from the client to the server.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[serde(tag = "method", rename_all = "camelCase")]
|
||||
pub enum ClientRequest {
|
||||
/// Generates an `enum ClientRequest` where each variant is a request that the
|
||||
/// client can send to the server. Each variant has associated `params` and
|
||||
/// `response` types. Also generates a `export_client_responses()` function to
|
||||
/// export all response types to TypeScript.
|
||||
macro_rules! client_request_definitions {
|
||||
(
|
||||
$(
|
||||
$(#[$variant_meta:meta])*
|
||||
$variant:ident {
|
||||
params: $(#[$params_meta:meta])* $params:ty,
|
||||
response: $response:ty,
|
||||
}
|
||||
),* $(,)?
|
||||
) => {
|
||||
/// Request from the client to the server.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[serde(tag = "method", rename_all = "camelCase")]
|
||||
pub enum ClientRequest {
|
||||
$(
|
||||
$(#[$variant_meta])*
|
||||
$variant {
|
||||
#[serde(rename = "id")]
|
||||
request_id: RequestId,
|
||||
$(#[$params_meta])*
|
||||
params: $params,
|
||||
},
|
||||
)*
|
||||
}
|
||||
|
||||
pub fn export_client_responses(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::std::result::Result<(), ::ts_rs::ExportError> {
|
||||
$(
|
||||
<$response as ::ts_rs::TS>::export_all_to(out_dir)?;
|
||||
)*
|
||||
Ok(())
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
client_request_definitions! {
|
||||
Initialize {
|
||||
params: InitializeParams,
|
||||
response: InitializeResponse,
|
||||
},
|
||||
NewConversation {
|
||||
#[serde(rename = "id")]
|
||||
request_id: RequestId,
|
||||
params: NewConversationParams,
|
||||
response: NewConversationResponse,
|
||||
},
|
||||
/// List recorded Codex conversations (rollouts) with optional pagination and search.
|
||||
ListConversations {
|
||||
#[serde(rename = "id")]
|
||||
request_id: RequestId,
|
||||
params: ListConversationsParams,
|
||||
response: ListConversationsResponse,
|
||||
},
|
||||
/// Resume a recorded Codex conversation from a rollout file.
|
||||
ResumeConversation {
|
||||
#[serde(rename = "id")]
|
||||
request_id: RequestId,
|
||||
params: ResumeConversationParams,
|
||||
response: ResumeConversationResponse,
|
||||
},
|
||||
ArchiveConversation {
|
||||
params: ArchiveConversationParams,
|
||||
response: ArchiveConversationResponse,
|
||||
},
|
||||
SendUserMessage {
|
||||
#[serde(rename = "id")]
|
||||
request_id: RequestId,
|
||||
params: SendUserMessageParams,
|
||||
response: SendUserMessageResponse,
|
||||
},
|
||||
SendUserTurn {
|
||||
#[serde(rename = "id")]
|
||||
request_id: RequestId,
|
||||
params: SendUserTurnParams,
|
||||
response: SendUserTurnResponse,
|
||||
},
|
||||
InterruptConversation {
|
||||
#[serde(rename = "id")]
|
||||
request_id: RequestId,
|
||||
params: InterruptConversationParams,
|
||||
response: InterruptConversationResponse,
|
||||
},
|
||||
AddConversationListener {
|
||||
#[serde(rename = "id")]
|
||||
request_id: RequestId,
|
||||
params: AddConversationListenerParams,
|
||||
response: AddConversationSubscriptionResponse,
|
||||
},
|
||||
RemoveConversationListener {
|
||||
#[serde(rename = "id")]
|
||||
request_id: RequestId,
|
||||
params: RemoveConversationListenerParams,
|
||||
response: RemoveConversationSubscriptionResponse,
|
||||
},
|
||||
GitDiffToRemote {
|
||||
#[serde(rename = "id")]
|
||||
request_id: RequestId,
|
||||
params: GitDiffToRemoteParams,
|
||||
response: GitDiffToRemoteResponse,
|
||||
},
|
||||
LoginApiKey {
|
||||
params: LoginApiKeyParams,
|
||||
response: LoginApiKeyResponse,
|
||||
},
|
||||
LoginChatGpt {
|
||||
#[serde(rename = "id")]
|
||||
request_id: RequestId,
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: LoginChatGptResponse,
|
||||
},
|
||||
CancelLoginChatGpt {
|
||||
#[serde(rename = "id")]
|
||||
request_id: RequestId,
|
||||
params: CancelLoginChatGptParams,
|
||||
response: CancelLoginChatGptResponse,
|
||||
},
|
||||
LogoutChatGpt {
|
||||
#[serde(rename = "id")]
|
||||
request_id: RequestId,
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: LogoutChatGptResponse,
|
||||
},
|
||||
GetAuthStatus {
|
||||
#[serde(rename = "id")]
|
||||
request_id: RequestId,
|
||||
params: GetAuthStatusParams,
|
||||
response: GetAuthStatusResponse,
|
||||
},
|
||||
GetUserSavedConfig {
|
||||
#[serde(rename = "id")]
|
||||
request_id: RequestId,
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: GetUserSavedConfigResponse,
|
||||
},
|
||||
SetDefaultModel {
|
||||
params: SetDefaultModelParams,
|
||||
response: SetDefaultModelResponse,
|
||||
},
|
||||
GetUserAgent {
|
||||
#[serde(rename = "id")]
|
||||
request_id: RequestId,
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: GetUserAgentResponse,
|
||||
},
|
||||
UserInfo {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: UserInfoResponse,
|
||||
},
|
||||
FuzzyFileSearch {
|
||||
params: FuzzyFileSearchParams,
|
||||
response: FuzzyFileSearchResponse,
|
||||
},
|
||||
/// Execute a command (argv vector) under the server's sandbox.
|
||||
ExecOneOffCommand {
|
||||
#[serde(rename = "id")]
|
||||
request_id: RequestId,
|
||||
params: ExecOneOffCommandParams,
|
||||
response: ExecOneOffCommandResponse,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InitializeParams {
|
||||
pub client_info: ClientInfo,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ClientInfo {
|
||||
pub name: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub title: Option<String>,
|
||||
pub version: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InitializeResponse {
|
||||
pub user_agent: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct NewConversationParams {
|
||||
@@ -203,6 +242,10 @@ pub struct NewConversationParams {
|
||||
pub struct NewConversationResponse {
|
||||
pub conversation_id: ConversationId,
|
||||
pub model: String,
|
||||
/// Note this could be ignored by the model.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
pub rollout_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, TS)]
|
||||
@@ -262,10 +305,32 @@ pub struct AddConversationSubscriptionResponse {
|
||||
pub subscription_id: Uuid,
|
||||
}
|
||||
|
||||
/// The [`ConversationId`] must match the `rollout_path`.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ArchiveConversationParams {
|
||||
pub conversation_id: ConversationId,
|
||||
pub rollout_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ArchiveConversationResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct RemoveConversationSubscriptionResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LoginApiKeyParams {
|
||||
pub api_key: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LoginApiKeyResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LoginChatGptResponse {
|
||||
@@ -334,7 +399,7 @@ pub struct ExecOneOffCommandParams {
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ExecArbitraryCommandResponse {
|
||||
pub struct ExecOneOffCommandResponse {
|
||||
pub exit_code: i32,
|
||||
pub stdout: String,
|
||||
pub stderr: String,
|
||||
@@ -345,9 +410,14 @@ pub struct ExecArbitraryCommandResponse {
|
||||
pub struct GetAuthStatusResponse {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub auth_method: Option<AuthMode>,
|
||||
pub preferred_auth_method: AuthMode,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub auth_token: Option<String>,
|
||||
|
||||
// Indicates that auth method must be valid to use the server.
|
||||
// This can be false if using a custom provider that is configured
|
||||
// with requires_openai_auth == false.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub requires_openai_auth: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
@@ -356,12 +426,38 @@ pub struct GetUserAgentResponse {
|
||||
pub user_agent: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct UserInfoResponse {
|
||||
/// Note: `alleged_user_email` is not currently verified. We read it from
|
||||
/// the local auth.json, which the user could theoretically modify. In the
|
||||
/// future, we may add logic to verify the email against the server before
|
||||
/// returning it.
|
||||
pub alleged_user_email: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetUserSavedConfigResponse {
|
||||
pub config: UserSavedConfig,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SetDefaultModelParams {
|
||||
/// If set to None, this means `model` should be cleared in config.toml.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub model: Option<String>,
|
||||
/// If set to None, this means `model_reasoning_effort` should be cleared
|
||||
/// in config.toml.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SetDefaultModelResponse {}
|
||||
|
||||
/// UserSavedConfig contains a subset of the config. It is meant to expose mcp
|
||||
/// client-configurable settings that can be specified in the NewConversation
|
||||
/// and SendUserTurn requests.
|
||||
@@ -451,7 +547,8 @@ pub struct SendUserTurnParams {
|
||||
pub approval_policy: AskForApproval,
|
||||
pub sandbox_policy: SandboxPolicy,
|
||||
pub model: String,
|
||||
pub effort: ReasoningEffort,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub effort: Option<ReasoningEffort>,
|
||||
pub summary: ReasoningSummary,
|
||||
}
|
||||
|
||||
@@ -506,30 +603,74 @@ pub enum InputItem {
|
||||
},
|
||||
}
|
||||
|
||||
// TODO(mbolin): Need test to ensure these constants match the enum variants.
|
||||
/// Generates an `enum ServerRequest` where each variant is a request that the
|
||||
/// server can send to the client along with the corresponding params and
|
||||
/// response types. It also generates helper types used by the app/server
|
||||
/// infrastructure (payload enum, request constructor, and export helpers).
|
||||
macro_rules! server_request_definitions {
|
||||
(
|
||||
$(
|
||||
$(#[$variant_meta:meta])*
|
||||
$variant:ident
|
||||
),* $(,)?
|
||||
) => {
|
||||
paste! {
|
||||
/// Request initiated from the server and sent to the client.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[serde(tag = "method", rename_all = "camelCase")]
|
||||
pub enum ServerRequest {
|
||||
$(
|
||||
$(#[$variant_meta])*
|
||||
$variant {
|
||||
#[serde(rename = "id")]
|
||||
request_id: RequestId,
|
||||
params: [<$variant Params>],
|
||||
},
|
||||
)*
|
||||
}
|
||||
|
||||
pub const APPLY_PATCH_APPROVAL_METHOD: &str = "applyPatchApproval";
|
||||
pub const EXEC_COMMAND_APPROVAL_METHOD: &str = "execCommandApproval";
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum ServerRequestPayload {
|
||||
$( $variant([<$variant Params>]), )*
|
||||
}
|
||||
|
||||
/// Request initiated from the server and sent to the client.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[serde(tag = "method", rename_all = "camelCase")]
|
||||
pub enum ServerRequest {
|
||||
impl ServerRequestPayload {
|
||||
pub fn request_with_id(self, request_id: RequestId) -> ServerRequest {
|
||||
match self {
|
||||
$(Self::$variant(params) => ServerRequest::$variant { request_id, params },)*
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn export_server_responses(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::std::result::Result<(), ::ts_rs::ExportError> {
|
||||
paste! {
|
||||
$(<[<$variant Response>] as ::ts_rs::TS>::export_all_to(out_dir)?;)*
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl TryFrom<JSONRPCRequest> for ServerRequest {
|
||||
type Error = serde_json::Error;
|
||||
|
||||
fn try_from(value: JSONRPCRequest) -> Result<Self, Self::Error> {
|
||||
serde_json::from_value(serde_json::to_value(value)?)
|
||||
}
|
||||
}
|
||||
|
||||
server_request_definitions! {
|
||||
/// Request to approve a patch.
|
||||
ApplyPatchApproval {
|
||||
#[serde(rename = "id")]
|
||||
request_id: RequestId,
|
||||
params: ApplyPatchApprovalParams,
|
||||
},
|
||||
ApplyPatchApproval,
|
||||
/// Request to exec a command.
|
||||
ExecCommandApproval {
|
||||
#[serde(rename = "id")]
|
||||
request_id: RequestId,
|
||||
params: ExecCommandApprovalParams,
|
||||
},
|
||||
ExecCommandApproval,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ApplyPatchApprovalParams {
|
||||
pub conversation_id: ConversationId,
|
||||
/// Use to correlate this with [codex_core::protocol::PatchApplyBeginEvent]
|
||||
@@ -546,6 +687,7 @@ pub struct ApplyPatchApprovalParams {
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ExecCommandApprovalParams {
|
||||
pub conversation_id: ConversationId,
|
||||
/// Use to correlate this with [codex_core::protocol::ExecCommandBeginEvent]
|
||||
@@ -567,6 +709,33 @@ pub struct ApplyPatchApprovalResponse {
|
||||
pub decision: ReviewDecision,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename_all = "camelCase")]
|
||||
pub struct FuzzyFileSearchParams {
|
||||
pub query: String,
|
||||
pub roots: Vec<String>,
|
||||
// if provided, will cancel any previous request that used the same value
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub cancellation_token: Option<String>,
|
||||
}
|
||||
|
||||
/// Superset of [`codex_file_search::FileMatch`]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
pub struct FuzzyFileSearchResult {
|
||||
pub root: String,
|
||||
pub path: String,
|
||||
pub file_name: String,
|
||||
pub score: u32,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub indices: Option<Vec<u32>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
pub struct FuzzyFileSearchResponse {
|
||||
pub files: Vec<FuzzyFileSearchResult>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LoginChatGptCompleteNotification {
|
||||
@@ -576,6 +745,34 @@ pub struct LoginChatGptCompleteNotification {
|
||||
pub error: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SessionConfiguredNotification {
|
||||
/// Name left as session_id instead of conversation_id for backwards compatibility.
|
||||
pub session_id: ConversationId,
|
||||
|
||||
/// Tell the client what model is being queried.
|
||||
pub model: String,
|
||||
|
||||
/// The effort the model is putting into reasoning about the user's request.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
|
||||
/// Identifier of the history log file (inode on Unix, 0 otherwise).
|
||||
pub history_log_id: u64,
|
||||
|
||||
/// Current number of entries in the history log.
|
||||
#[ts(type = "number")]
|
||||
pub history_entry_count: usize,
|
||||
|
||||
/// Optional initial messages (as events) for resumed sessions.
|
||||
/// When present, UIs can use these to seed the history.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub initial_messages: Option<Vec<EventMsg>>,
|
||||
|
||||
pub rollout_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct AuthStatusChangeNotification {
|
||||
@@ -584,7 +781,8 @@ pub struct AuthStatusChangeNotification {
|
||||
pub auth_method: Option<AuthMode>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS, Display)]
|
||||
/// Notification sent from the server to the client.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, TS, Display)]
|
||||
#[serde(tag = "method", content = "params", rename_all = "camelCase")]
|
||||
#[strum(serialize_all = "camelCase")]
|
||||
pub enum ServerNotification {
|
||||
@@ -593,6 +791,9 @@ pub enum ServerNotification {
|
||||
|
||||
/// ChatGPT login flow completed
|
||||
LoginChatGptComplete(LoginChatGptCompleteNotification),
|
||||
|
||||
/// The special session configured event for a new or resumed conversation.
|
||||
SessionConfigured(SessionConfiguredNotification),
|
||||
}
|
||||
|
||||
impl ServerNotification {
|
||||
@@ -600,22 +801,40 @@ impl ServerNotification {
|
||||
match self {
|
||||
ServerNotification::AuthStatusChange(params) => serde_json::to_value(params),
|
||||
ServerNotification::LoginChatGptComplete(params) => serde_json::to_value(params),
|
||||
ServerNotification::SessionConfigured(params) => serde_json::to_value(params),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<JSONRPCNotification> for ServerNotification {
|
||||
type Error = serde_json::Error;
|
||||
|
||||
fn try_from(value: JSONRPCNotification) -> Result<Self, Self::Error> {
|
||||
serde_json::from_value(serde_json::to_value(value)?)
|
||||
}
|
||||
}
|
||||
|
||||
/// Notification sent from the client to the server.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, TS, Display)]
|
||||
#[serde(tag = "method", content = "params", rename_all = "camelCase")]
|
||||
#[strum(serialize_all = "camelCase")]
|
||||
pub enum ClientNotification {
|
||||
Initialized,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use anyhow::Result;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
|
||||
#[test]
|
||||
fn serialize_new_conversation() {
|
||||
fn serialize_new_conversation() -> Result<()> {
|
||||
let request = ClientRequest::NewConversation {
|
||||
request_id: RequestId::Integer(42),
|
||||
params: NewConversationParams {
|
||||
model: Some("gpt-5".to_string()),
|
||||
model: Some("gpt-5-codex".to_string()),
|
||||
profile: None,
|
||||
cwd: None,
|
||||
approval_policy: Some(AskForApproval::OnRequest),
|
||||
@@ -631,17 +850,83 @@ mod tests {
|
||||
"method": "newConversation",
|
||||
"id": 42,
|
||||
"params": {
|
||||
"model": "gpt-5",
|
||||
"model": "gpt-5-codex",
|
||||
"approvalPolicy": "on-request"
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request).unwrap(),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_conversation_id_default_is_not_zeroes() {
|
||||
let id = ConversationId::default();
|
||||
assert_ne!(id.0, Uuid::nil());
|
||||
fn conversation_id_serializes_as_plain_string() -> Result<()> {
|
||||
let id = ConversationId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?;
|
||||
|
||||
assert_eq!(
|
||||
json!("67e55044-10b1-426f-9247-bb680e5fe0c8"),
|
||||
serde_json::to_value(id)?
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn conversation_id_deserializes_from_plain_string() -> Result<()> {
|
||||
let id: ConversationId =
|
||||
serde_json::from_value(json!("67e55044-10b1-426f-9247-bb680e5fe0c8"))?;
|
||||
|
||||
assert_eq!(
|
||||
ConversationId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?,
|
||||
id,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_client_notification() -> Result<()> {
|
||||
let notification = ClientNotification::Initialized;
|
||||
// Note there is no "params" field for this notification.
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "initialized",
|
||||
}),
|
||||
serde_json::to_value(¬ification)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_server_request() -> Result<()> {
|
||||
let conversation_id = ConversationId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?;
|
||||
let params = ExecCommandApprovalParams {
|
||||
conversation_id,
|
||||
call_id: "call-42".to_string(),
|
||||
command: vec!["echo".to_string(), "hello".to_string()],
|
||||
cwd: PathBuf::from("/tmp"),
|
||||
reason: Some("because tests".to_string()),
|
||||
};
|
||||
let request = ServerRequest::ExecCommandApproval {
|
||||
request_id: RequestId::Integer(7),
|
||||
params: params.clone(),
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "execCommandApproval",
|
||||
"id": 7,
|
||||
"params": {
|
||||
"conversationId": "67e55044-10b1-426f-9247-bb680e5fe0c8",
|
||||
"callId": "call-42",
|
||||
"command": ["echo", "hello"],
|
||||
"cwd": "/tmp",
|
||||
"reason": "because tests",
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
|
||||
let payload = ServerRequestPayload::ExecCommandApproval(params);
|
||||
assert_eq!(payload.request_with_id(RequestId::Integer(7)), request);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
49
codex-rs/app-server/Cargo.toml
Normal file
49
codex-rs/app-server/Cargo.toml
Normal file
@@ -0,0 +1,49 @@
|
||||
[package]
|
||||
edition = "2024"
|
||||
name = "codex-app-server"
|
||||
version = { workspace = true }
|
||||
|
||||
[[bin]]
|
||||
name = "codex-app-server"
|
||||
path = "src/main.rs"
|
||||
|
||||
[lib]
|
||||
name = "codex_app_server"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = { workspace = true }
|
||||
codex-arg0 = { workspace = true }
|
||||
codex-common = { workspace = true, features = ["cli"] }
|
||||
codex-core = { workspace = true }
|
||||
codex-file-search = { workspace = true }
|
||||
codex-login = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
codex-app-server-protocol = { workspace = true }
|
||||
codex-utils-json-to-toml = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
tokio = { workspace = true, features = [
|
||||
"io-std",
|
||||
"macros",
|
||||
"process",
|
||||
"rt-multi-thread",
|
||||
"signal",
|
||||
] }
|
||||
tracing = { workspace = true, features = ["log"] }
|
||||
tracing-subscriber = { workspace = true, features = ["env-filter", "fmt"] }
|
||||
uuid = { workspace = true, features = ["serde", "v7"] }
|
||||
|
||||
[dev-dependencies]
|
||||
app_test_support = { workspace = true }
|
||||
assert_cmd = { workspace = true }
|
||||
base64 = { workspace = true }
|
||||
core_test_support = { workspace = true }
|
||||
os_info = { workspace = true }
|
||||
pretty_assertions = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
wiremock = { workspace = true }
|
||||
15
codex-rs/app-server/README.md
Normal file
15
codex-rs/app-server/README.md
Normal file
@@ -0,0 +1,15 @@
|
||||
# codex-app-server
|
||||
|
||||
`codex app-server` is the harness Codex uses to power rich interfaces such as the [Codex VS Code extension](https://marketplace.visualstudio.com/items?itemName=openai.chatgpt). The message schema is currently unstable, but those who wish to build experimental UIs on top of Codex may find it valuable.
|
||||
|
||||
## Protocol
|
||||
|
||||
Similar to [MCP](https://modelcontextprotocol.io/), `codex app-server` supports bidirectional communication, streaming JSONL over stdio. The protocol is JSON-RPC 2.0, though the `"jsonrpc":"2.0"` header is omitted.
|
||||
|
||||
## Message Schema
|
||||
|
||||
Currently, you can dump a TypeScript version of the schema using `codex generate-ts`. It is specific to the version of Codex you used to run `generate-ts`, so the two are guaranteed to be compatible.
|
||||
|
||||
```
|
||||
codex generate-ts --out DIR
|
||||
```
|
||||
@@ -1,20 +1,73 @@
|
||||
use crate::error_code::INTERNAL_ERROR_CODE;
|
||||
use crate::error_code::INVALID_REQUEST_ERROR_CODE;
|
||||
use crate::json_to_toml::json_to_toml;
|
||||
use crate::fuzzy_file_search::run_fuzzy_file_search;
|
||||
use crate::outgoing_message::OutgoingMessageSender;
|
||||
use crate::outgoing_message::OutgoingNotification;
|
||||
use codex_app_server_protocol::AddConversationListenerParams;
|
||||
use codex_app_server_protocol::AddConversationSubscriptionResponse;
|
||||
use codex_app_server_protocol::ApplyPatchApprovalParams;
|
||||
use codex_app_server_protocol::ApplyPatchApprovalResponse;
|
||||
use codex_app_server_protocol::ArchiveConversationParams;
|
||||
use codex_app_server_protocol::ArchiveConversationResponse;
|
||||
use codex_app_server_protocol::AuthStatusChangeNotification;
|
||||
use codex_app_server_protocol::ClientRequest;
|
||||
use codex_app_server_protocol::ConversationSummary;
|
||||
use codex_app_server_protocol::ExecCommandApprovalParams;
|
||||
use codex_app_server_protocol::ExecCommandApprovalResponse;
|
||||
use codex_app_server_protocol::ExecOneOffCommandParams;
|
||||
use codex_app_server_protocol::ExecOneOffCommandResponse;
|
||||
use codex_app_server_protocol::FuzzyFileSearchParams;
|
||||
use codex_app_server_protocol::FuzzyFileSearchResponse;
|
||||
use codex_app_server_protocol::GetUserAgentResponse;
|
||||
use codex_app_server_protocol::GetUserSavedConfigResponse;
|
||||
use codex_app_server_protocol::GitDiffToRemoteResponse;
|
||||
use codex_app_server_protocol::InputItem as WireInputItem;
|
||||
use codex_app_server_protocol::InterruptConversationParams;
|
||||
use codex_app_server_protocol::InterruptConversationResponse;
|
||||
use codex_app_server_protocol::JSONRPCErrorError;
|
||||
use codex_app_server_protocol::ListConversationsParams;
|
||||
use codex_app_server_protocol::ListConversationsResponse;
|
||||
use codex_app_server_protocol::LoginApiKeyParams;
|
||||
use codex_app_server_protocol::LoginApiKeyResponse;
|
||||
use codex_app_server_protocol::LoginChatGptCompleteNotification;
|
||||
use codex_app_server_protocol::LoginChatGptResponse;
|
||||
use codex_app_server_protocol::NewConversationParams;
|
||||
use codex_app_server_protocol::NewConversationResponse;
|
||||
use codex_app_server_protocol::RemoveConversationListenerParams;
|
||||
use codex_app_server_protocol::RemoveConversationSubscriptionResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::Result as JsonRpcResult;
|
||||
use codex_app_server_protocol::ResumeConversationParams;
|
||||
use codex_app_server_protocol::SendUserMessageParams;
|
||||
use codex_app_server_protocol::SendUserMessageResponse;
|
||||
use codex_app_server_protocol::SendUserTurnParams;
|
||||
use codex_app_server_protocol::SendUserTurnResponse;
|
||||
use codex_app_server_protocol::ServerNotification;
|
||||
use codex_app_server_protocol::ServerRequestPayload;
|
||||
use codex_app_server_protocol::SessionConfiguredNotification;
|
||||
use codex_app_server_protocol::SetDefaultModelParams;
|
||||
use codex_app_server_protocol::SetDefaultModelResponse;
|
||||
use codex_app_server_protocol::UserInfoResponse;
|
||||
use codex_app_server_protocol::UserSavedConfig;
|
||||
use codex_core::AuthManager;
|
||||
use codex_core::CodexConversation;
|
||||
use codex_core::ConversationManager;
|
||||
use codex_core::Cursor as RolloutCursor;
|
||||
use codex_core::INTERACTIVE_SESSION_SOURCES;
|
||||
use codex_core::NewConversation;
|
||||
use codex_core::RolloutRecorder;
|
||||
use codex_core::SessionMeta;
|
||||
use codex_core::auth::CLIENT_ID;
|
||||
use codex_core::auth::get_auth_file;
|
||||
use codex_core::auth::login_with_api_key;
|
||||
use codex_core::auth::try_read_auth_json;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_core::config::ConfigToml;
|
||||
use codex_core::config::load_config_as_toml;
|
||||
use codex_core::config_edit::CONFIG_KEY_EFFORT;
|
||||
use codex_core::config_edit::CONFIG_KEY_MODEL;
|
||||
use codex_core::config_edit::persist_overrides_and_clear_if_none;
|
||||
use codex_core::default_client::get_codex_user_agent;
|
||||
use codex_core::exec::ExecParams;
|
||||
use codex_core::exec_env::create_env;
|
||||
@@ -30,55 +83,25 @@ use codex_core::protocol::ReviewDecision;
|
||||
use codex_login::ServerOptions as LoginServerOptions;
|
||||
use codex_login::ShutdownHandle;
|
||||
use codex_login::run_login_server;
|
||||
use codex_protocol::mcp_protocol::APPLY_PATCH_APPROVAL_METHOD;
|
||||
use codex_protocol::mcp_protocol::AddConversationListenerParams;
|
||||
use codex_protocol::mcp_protocol::AddConversationSubscriptionResponse;
|
||||
use codex_protocol::mcp_protocol::ApplyPatchApprovalParams;
|
||||
use codex_protocol::mcp_protocol::ApplyPatchApprovalResponse;
|
||||
use codex_protocol::mcp_protocol::AuthMode;
|
||||
use codex_protocol::mcp_protocol::AuthStatusChangeNotification;
|
||||
use codex_protocol::mcp_protocol::ClientRequest;
|
||||
use codex_protocol::mcp_protocol::ConversationId;
|
||||
use codex_protocol::mcp_protocol::ConversationSummary;
|
||||
use codex_protocol::mcp_protocol::EXEC_COMMAND_APPROVAL_METHOD;
|
||||
use codex_protocol::mcp_protocol::ExecArbitraryCommandResponse;
|
||||
use codex_protocol::mcp_protocol::ExecCommandApprovalParams;
|
||||
use codex_protocol::mcp_protocol::ExecCommandApprovalResponse;
|
||||
use codex_protocol::mcp_protocol::ExecOneOffCommandParams;
|
||||
use codex_protocol::mcp_protocol::GetUserAgentResponse;
|
||||
use codex_protocol::mcp_protocol::GetUserSavedConfigResponse;
|
||||
use codex_protocol::mcp_protocol::GitDiffToRemoteResponse;
|
||||
use codex_protocol::mcp_protocol::InputItem as WireInputItem;
|
||||
use codex_protocol::mcp_protocol::InterruptConversationParams;
|
||||
use codex_protocol::mcp_protocol::InterruptConversationResponse;
|
||||
use codex_protocol::mcp_protocol::ListConversationsParams;
|
||||
use codex_protocol::mcp_protocol::ListConversationsResponse;
|
||||
use codex_protocol::mcp_protocol::LoginChatGptCompleteNotification;
|
||||
use codex_protocol::mcp_protocol::LoginChatGptResponse;
|
||||
use codex_protocol::mcp_protocol::NewConversationParams;
|
||||
use codex_protocol::mcp_protocol::NewConversationResponse;
|
||||
use codex_protocol::mcp_protocol::RemoveConversationListenerParams;
|
||||
use codex_protocol::mcp_protocol::RemoveConversationSubscriptionResponse;
|
||||
use codex_protocol::mcp_protocol::ResumeConversationParams;
|
||||
use codex_protocol::mcp_protocol::SendUserMessageParams;
|
||||
use codex_protocol::mcp_protocol::SendUserMessageResponse;
|
||||
use codex_protocol::mcp_protocol::SendUserTurnParams;
|
||||
use codex_protocol::mcp_protocol::SendUserTurnResponse;
|
||||
use codex_protocol::mcp_protocol::ServerNotification;
|
||||
use codex_protocol::mcp_protocol::UserSavedConfig;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::protocol::InputMessageKind;
|
||||
use codex_protocol::protocol::USER_MESSAGE_BEGIN;
|
||||
use mcp_types::JSONRPCErrorError;
|
||||
use mcp_types::RequestId;
|
||||
use codex_utils_json_to_toml::json_to_toml;
|
||||
use std::collections::HashMap;
|
||||
use std::ffi::OsStr;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::sync::atomic::Ordering;
|
||||
use std::time::Duration;
|
||||
use tokio::select;
|
||||
use tokio::sync::Mutex;
|
||||
use tokio::sync::oneshot;
|
||||
use tracing::error;
|
||||
use tracing::info;
|
||||
use tracing::warn;
|
||||
use uuid::Uuid;
|
||||
|
||||
// Duration before a ChatGPT login attempt is abandoned.
|
||||
@@ -106,6 +129,7 @@ pub(crate) struct CodexMessageProcessor {
|
||||
active_login: Arc<Mutex<Option<ActiveLogin>>>,
|
||||
// Queue of pending interrupt requests per conversation. We reply when TurnAborted arrives.
|
||||
pending_interrupts: Arc<Mutex<HashMap<ConversationId, Vec<RequestId>>>>,
|
||||
pending_fuzzy_searches: Arc<Mutex<HashMap<String, Arc<AtomicBool>>>>,
|
||||
}
|
||||
|
||||
impl CodexMessageProcessor {
|
||||
@@ -125,11 +149,15 @@ impl CodexMessageProcessor {
|
||||
conversation_listeners: HashMap::new(),
|
||||
active_login: Arc::new(Mutex::new(None)),
|
||||
pending_interrupts: Arc::new(Mutex::new(HashMap::new())),
|
||||
pending_fuzzy_searches: Arc::new(Mutex::new(HashMap::new())),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn process_request(&mut self, request: ClientRequest) {
|
||||
match request {
|
||||
ClientRequest::Initialize { .. } => {
|
||||
panic!("Initialize should be handled in MessageProcessor");
|
||||
}
|
||||
ClientRequest::NewConversation { request_id, params } => {
|
||||
// Do not tokio::spawn() to process new_conversation()
|
||||
// asynchronously because we need to ensure the conversation is
|
||||
@@ -142,6 +170,9 @@ impl CodexMessageProcessor {
|
||||
ClientRequest::ResumeConversation { request_id, params } => {
|
||||
self.handle_resume_conversation(request_id, params).await;
|
||||
}
|
||||
ClientRequest::ArchiveConversation { request_id, params } => {
|
||||
self.archive_conversation(request_id, params).await;
|
||||
}
|
||||
ClientRequest::SendUserMessage { request_id, params } => {
|
||||
self.send_user_message(request_id, params).await;
|
||||
}
|
||||
@@ -160,40 +191,96 @@ impl CodexMessageProcessor {
|
||||
ClientRequest::GitDiffToRemote { request_id, params } => {
|
||||
self.git_diff_to_origin(request_id, params.cwd).await;
|
||||
}
|
||||
ClientRequest::LoginChatGpt { request_id } => {
|
||||
ClientRequest::LoginApiKey { request_id, params } => {
|
||||
self.login_api_key(request_id, params).await;
|
||||
}
|
||||
ClientRequest::LoginChatGpt {
|
||||
request_id,
|
||||
params: _,
|
||||
} => {
|
||||
self.login_chatgpt(request_id).await;
|
||||
}
|
||||
ClientRequest::CancelLoginChatGpt { request_id, params } => {
|
||||
self.cancel_login_chatgpt(request_id, params.login_id).await;
|
||||
}
|
||||
ClientRequest::LogoutChatGpt { request_id } => {
|
||||
ClientRequest::LogoutChatGpt {
|
||||
request_id,
|
||||
params: _,
|
||||
} => {
|
||||
self.logout_chatgpt(request_id).await;
|
||||
}
|
||||
ClientRequest::GetAuthStatus { request_id, params } => {
|
||||
self.get_auth_status(request_id, params).await;
|
||||
}
|
||||
ClientRequest::GetUserSavedConfig { request_id } => {
|
||||
ClientRequest::GetUserSavedConfig {
|
||||
request_id,
|
||||
params: _,
|
||||
} => {
|
||||
self.get_user_saved_config(request_id).await;
|
||||
}
|
||||
ClientRequest::GetUserAgent { request_id } => {
|
||||
ClientRequest::SetDefaultModel { request_id, params } => {
|
||||
self.set_default_model(request_id, params).await;
|
||||
}
|
||||
ClientRequest::GetUserAgent {
|
||||
request_id,
|
||||
params: _,
|
||||
} => {
|
||||
self.get_user_agent(request_id).await;
|
||||
}
|
||||
ClientRequest::UserInfo {
|
||||
request_id,
|
||||
params: _,
|
||||
} => {
|
||||
self.get_user_info(request_id).await;
|
||||
}
|
||||
ClientRequest::FuzzyFileSearch { request_id, params } => {
|
||||
self.fuzzy_file_search(request_id, params).await;
|
||||
}
|
||||
ClientRequest::ExecOneOffCommand { request_id, params } => {
|
||||
self.exec_one_off_command(request_id, params).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn login_api_key(&mut self, request_id: RequestId, params: LoginApiKeyParams) {
|
||||
{
|
||||
let mut guard = self.active_login.lock().await;
|
||||
if let Some(active) = guard.take() {
|
||||
active.drop();
|
||||
}
|
||||
}
|
||||
|
||||
match login_with_api_key(&self.config.codex_home, ¶ms.api_key) {
|
||||
Ok(()) => {
|
||||
self.auth_manager.reload();
|
||||
self.outgoing
|
||||
.send_response(request_id, LoginApiKeyResponse {})
|
||||
.await;
|
||||
|
||||
let payload = AuthStatusChangeNotification {
|
||||
auth_method: self.auth_manager.auth().map(|auth| auth.mode),
|
||||
};
|
||||
self.outgoing
|
||||
.send_server_notification(ServerNotification::AuthStatusChange(payload))
|
||||
.await;
|
||||
}
|
||||
Err(err) => {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INTERNAL_ERROR_CODE,
|
||||
message: format!("failed to save api key: {err}"),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn login_chatgpt(&mut self, request_id: RequestId) {
|
||||
let config = self.config.as_ref();
|
||||
|
||||
let opts = LoginServerOptions {
|
||||
open_browser: false,
|
||||
..LoginServerOptions::new(
|
||||
config.codex_home.clone(),
|
||||
CLIENT_ID.to_string(),
|
||||
config.responses_originator_header.clone(),
|
||||
)
|
||||
..LoginServerOptions::new(config.codex_home.clone(), CLIENT_ID.to_string())
|
||||
};
|
||||
|
||||
enum LoginChatGptReply {
|
||||
@@ -300,7 +387,7 @@ impl CodexMessageProcessor {
|
||||
self.outgoing
|
||||
.send_response(
|
||||
request_id,
|
||||
codex_protocol::mcp_protocol::CancelLoginChatGptResponse {},
|
||||
codex_app_server_protocol::CancelLoginChatGptResponse {},
|
||||
)
|
||||
.await;
|
||||
} else {
|
||||
@@ -336,12 +423,12 @@ impl CodexMessageProcessor {
|
||||
self.outgoing
|
||||
.send_response(
|
||||
request_id,
|
||||
codex_protocol::mcp_protocol::LogoutChatGptResponse {},
|
||||
codex_app_server_protocol::LogoutChatGptResponse {},
|
||||
)
|
||||
.await;
|
||||
|
||||
// Send auth status change notification reflecting the current auth mode
|
||||
// after logout (which may fall back to API key via env var).
|
||||
// after logout.
|
||||
let current_auth_method = self.auth_manager.auth().map(|auth| auth.mode);
|
||||
let payload = AuthStatusChangeNotification {
|
||||
auth_method: current_auth_method,
|
||||
@@ -354,9 +441,8 @@ impl CodexMessageProcessor {
|
||||
async fn get_auth_status(
|
||||
&self,
|
||||
request_id: RequestId,
|
||||
params: codex_protocol::mcp_protocol::GetAuthStatusParams,
|
||||
params: codex_app_server_protocol::GetAuthStatusParams,
|
||||
) {
|
||||
let preferred_auth_method: AuthMode = self.auth_manager.preferred_auth_method();
|
||||
let include_token = params.include_token.unwrap_or(false);
|
||||
let do_refresh = params.refresh_token.unwrap_or(false);
|
||||
|
||||
@@ -364,43 +450,57 @@ impl CodexMessageProcessor {
|
||||
tracing::warn!("failed to refresh token while getting auth status: {err}");
|
||||
}
|
||||
|
||||
let response = match self.auth_manager.auth() {
|
||||
Some(auth) => {
|
||||
let (reported_auth_method, token_opt) = match auth.get_token().await {
|
||||
Ok(token) if !token.is_empty() => {
|
||||
let tok = if include_token { Some(token) } else { None };
|
||||
(Some(auth.mode), tok)
|
||||
}
|
||||
Ok(_) => (None, None),
|
||||
Err(err) => {
|
||||
tracing::warn!("failed to get token for auth status: {err}");
|
||||
(None, None)
|
||||
}
|
||||
};
|
||||
codex_protocol::mcp_protocol::GetAuthStatusResponse {
|
||||
auth_method: reported_auth_method,
|
||||
preferred_auth_method,
|
||||
auth_token: token_opt,
|
||||
}
|
||||
}
|
||||
None => codex_protocol::mcp_protocol::GetAuthStatusResponse {
|
||||
// Determine whether auth is required based on the active model provider.
|
||||
// If a custom provider is configured with `requires_openai_auth == false`,
|
||||
// then no auth step is required; otherwise, default to requiring auth.
|
||||
let requires_openai_auth = self.config.model_provider.requires_openai_auth;
|
||||
|
||||
let response = if !requires_openai_auth {
|
||||
codex_app_server_protocol::GetAuthStatusResponse {
|
||||
auth_method: None,
|
||||
preferred_auth_method,
|
||||
auth_token: None,
|
||||
},
|
||||
requires_openai_auth: Some(false),
|
||||
}
|
||||
} else {
|
||||
match self.auth_manager.auth() {
|
||||
Some(auth) => {
|
||||
let auth_mode = auth.mode;
|
||||
let (reported_auth_method, token_opt) = match auth.get_token().await {
|
||||
Ok(token) if !token.is_empty() => {
|
||||
let tok = if include_token { Some(token) } else { None };
|
||||
(Some(auth_mode), tok)
|
||||
}
|
||||
Ok(_) => (None, None),
|
||||
Err(err) => {
|
||||
tracing::warn!("failed to get token for auth status: {err}");
|
||||
(None, None)
|
||||
}
|
||||
};
|
||||
codex_app_server_protocol::GetAuthStatusResponse {
|
||||
auth_method: reported_auth_method,
|
||||
auth_token: token_opt,
|
||||
requires_openai_auth: Some(true),
|
||||
}
|
||||
}
|
||||
None => codex_app_server_protocol::GetAuthStatusResponse {
|
||||
auth_method: None,
|
||||
auth_token: None,
|
||||
requires_openai_auth: Some(true),
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
self.outgoing.send_response(request_id, response).await;
|
||||
}
|
||||
|
||||
async fn get_user_agent(&self, request_id: RequestId) {
|
||||
let user_agent = get_codex_user_agent(Some(&self.config.responses_originator_header));
|
||||
let user_agent = get_codex_user_agent();
|
||||
let response = GetUserAgentResponse { user_agent };
|
||||
self.outgoing.send_response(request_id, response).await;
|
||||
}
|
||||
|
||||
async fn get_user_saved_config(&self, request_id: RequestId) {
|
||||
let toml_value = match load_config_as_toml(&self.config.codex_home) {
|
||||
let toml_value = match load_config_as_toml(&self.config.codex_home).await {
|
||||
Ok(val) => val,
|
||||
Err(err) => {
|
||||
let error = JSONRPCErrorError {
|
||||
@@ -434,6 +534,52 @@ impl CodexMessageProcessor {
|
||||
self.outgoing.send_response(request_id, response).await;
|
||||
}
|
||||
|
||||
async fn get_user_info(&self, request_id: RequestId) {
|
||||
// Read alleged user email from auth.json (best-effort; not verified).
|
||||
let auth_path = get_auth_file(&self.config.codex_home);
|
||||
let alleged_user_email = match try_read_auth_json(&auth_path) {
|
||||
Ok(auth) => auth.tokens.and_then(|t| t.id_token.email),
|
||||
Err(_) => None,
|
||||
};
|
||||
|
||||
let response = UserInfoResponse { alleged_user_email };
|
||||
self.outgoing.send_response(request_id, response).await;
|
||||
}
|
||||
|
||||
async fn set_default_model(&self, request_id: RequestId, params: SetDefaultModelParams) {
|
||||
let SetDefaultModelParams {
|
||||
model,
|
||||
reasoning_effort,
|
||||
} = params;
|
||||
let effort_str = reasoning_effort.map(|effort| effort.to_string());
|
||||
|
||||
let overrides: [(&[&str], Option<&str>); 2] = [
|
||||
(&[CONFIG_KEY_MODEL], model.as_deref()),
|
||||
(&[CONFIG_KEY_EFFORT], effort_str.as_deref()),
|
||||
];
|
||||
|
||||
match persist_overrides_and_clear_if_none(
|
||||
&self.config.codex_home,
|
||||
self.config.active_profile.as_deref(),
|
||||
&overrides,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(()) => {
|
||||
let response = SetDefaultModelResponse {};
|
||||
self.outgoing.send_response(request_id, response).await;
|
||||
}
|
||||
Err(err) => {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INTERNAL_ERROR_CODE,
|
||||
message: format!("failed to persist overrides: {err}"),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn exec_one_off_command(&self, request_id: RequestId, params: ExecOneOffCommandParams) {
|
||||
tracing::debug!("ExecOneOffCommand params: {params:?}");
|
||||
|
||||
@@ -473,19 +619,21 @@ impl CodexMessageProcessor {
|
||||
let codex_linux_sandbox_exe = self.config.codex_linux_sandbox_exe.clone();
|
||||
let outgoing = self.outgoing.clone();
|
||||
let req_id = request_id;
|
||||
let sandbox_cwd = self.config.cwd.clone();
|
||||
|
||||
tokio::spawn(async move {
|
||||
match codex_core::exec::process_exec_tool_call(
|
||||
exec_params,
|
||||
sandbox_type,
|
||||
&effective_policy,
|
||||
sandbox_cwd.as_path(),
|
||||
&codex_linux_sandbox_exe,
|
||||
None,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(output) => {
|
||||
let response = ExecArbitraryCommandResponse {
|
||||
let response = ExecOneOffCommandResponse {
|
||||
exit_code: output.exit_code,
|
||||
stdout: output.stdout.text,
|
||||
stderr: output.stderr.text,
|
||||
@@ -505,18 +653,19 @@ impl CodexMessageProcessor {
|
||||
}
|
||||
|
||||
async fn process_new_conversation(&self, request_id: RequestId, params: NewConversationParams) {
|
||||
let config = match derive_config_from_params(params, self.codex_linux_sandbox_exe.clone()) {
|
||||
Ok(config) => config,
|
||||
Err(err) => {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: format!("error deriving config: {err}"),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
};
|
||||
let config =
|
||||
match derive_config_from_params(params, self.codex_linux_sandbox_exe.clone()).await {
|
||||
Ok(config) => config,
|
||||
Err(err) => {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: format!("error deriving config: {err}"),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
match self.conversation_manager.new_conversation(config).await {
|
||||
Ok(conversation_id) => {
|
||||
@@ -528,6 +677,8 @@ impl CodexMessageProcessor {
|
||||
let response = NewConversationResponse {
|
||||
conversation_id,
|
||||
model: session_configured.model,
|
||||
reasoning_effort: session_configured.reasoning_effort,
|
||||
rollout_path: session_configured.rollout_path,
|
||||
};
|
||||
self.outgoing.send_response(request_id, response).await;
|
||||
}
|
||||
@@ -559,6 +710,7 @@ impl CodexMessageProcessor {
|
||||
&self.config.codex_home,
|
||||
page_size,
|
||||
cursor_ref,
|
||||
INTERACTIVE_SESSION_SOURCES,
|
||||
)
|
||||
.await
|
||||
{
|
||||
@@ -601,7 +753,7 @@ impl CodexMessageProcessor {
|
||||
// Derive a Config using the same logic as new conversation, honoring overrides if provided.
|
||||
let config = match params.overrides {
|
||||
Some(overrides) => {
|
||||
derive_config_from_params(overrides, self.codex_linux_sandbox_exe.clone())
|
||||
derive_config_from_params(overrides, self.codex_linux_sandbox_exe.clone()).await
|
||||
}
|
||||
None => Ok(self.config.as_ref().clone()),
|
||||
};
|
||||
@@ -632,11 +784,19 @@ impl CodexMessageProcessor {
|
||||
session_configured,
|
||||
..
|
||||
}) => {
|
||||
let event = Event {
|
||||
id: "".to_string(),
|
||||
msg: EventMsg::SessionConfigured(session_configured.clone()),
|
||||
};
|
||||
self.outgoing.send_event_as_notification(&event, None).await;
|
||||
self.outgoing
|
||||
.send_server_notification(ServerNotification::SessionConfigured(
|
||||
SessionConfiguredNotification {
|
||||
session_id: session_configured.session_id,
|
||||
model: session_configured.model.clone(),
|
||||
reasoning_effort: session_configured.reasoning_effort,
|
||||
history_log_id: session_configured.history_log_id,
|
||||
history_entry_count: session_configured.history_entry_count,
|
||||
initial_messages: session_configured.initial_messages.clone(),
|
||||
rollout_path: session_configured.rollout_path.clone(),
|
||||
},
|
||||
))
|
||||
.await;
|
||||
let initial_messages = session_configured.initial_messages.map(|msgs| {
|
||||
msgs.into_iter()
|
||||
.filter(|event| {
|
||||
@@ -651,7 +811,7 @@ impl CodexMessageProcessor {
|
||||
});
|
||||
|
||||
// Reply with conversation id + model and initial messages (when present)
|
||||
let response = codex_protocol::mcp_protocol::ResumeConversationResponse {
|
||||
let response = codex_app_server_protocol::ResumeConversationResponse {
|
||||
conversation_id,
|
||||
model: session_configured.model.clone(),
|
||||
initial_messages,
|
||||
@@ -669,6 +829,141 @@ impl CodexMessageProcessor {
|
||||
}
|
||||
}
|
||||
|
||||
async fn archive_conversation(&self, request_id: RequestId, params: ArchiveConversationParams) {
|
||||
let ArchiveConversationParams {
|
||||
conversation_id,
|
||||
rollout_path,
|
||||
} = params;
|
||||
|
||||
// Verify that the rollout path is in the sessions directory or else
|
||||
// a malicious client could specify an arbitrary path.
|
||||
let rollout_folder = self.config.codex_home.join(codex_core::SESSIONS_SUBDIR);
|
||||
let canonical_rollout_path = tokio::fs::canonicalize(&rollout_path).await;
|
||||
let canonical_rollout_path = if let Ok(path) = canonical_rollout_path
|
||||
&& path.starts_with(&rollout_folder)
|
||||
{
|
||||
path
|
||||
} else {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: format!(
|
||||
"rollout path `{}` must be in sessions directory",
|
||||
rollout_path.display()
|
||||
),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
};
|
||||
|
||||
let required_suffix = format!("{conversation_id}.jsonl");
|
||||
let Some(file_name) = canonical_rollout_path.file_name().map(OsStr::to_owned) else {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: format!(
|
||||
"rollout path `{}` missing file name",
|
||||
rollout_path.display()
|
||||
),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
};
|
||||
|
||||
if !file_name
|
||||
.to_string_lossy()
|
||||
.ends_with(required_suffix.as_str())
|
||||
{
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: format!(
|
||||
"rollout path `{}` does not match conversation id {conversation_id}",
|
||||
rollout_path.display()
|
||||
),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
|
||||
let removed_conversation = self
|
||||
.conversation_manager
|
||||
.remove_conversation(&conversation_id)
|
||||
.await;
|
||||
if let Some(conversation) = removed_conversation {
|
||||
info!("conversation {conversation_id} was active; shutting down");
|
||||
let conversation_clone = conversation.clone();
|
||||
let notify = Arc::new(tokio::sync::Notify::new());
|
||||
let notify_clone = notify.clone();
|
||||
|
||||
// Establish the listener for ShutdownComplete before submitting
|
||||
// Shutdown so it is not missed.
|
||||
let is_shutdown = tokio::spawn(async move {
|
||||
loop {
|
||||
select! {
|
||||
_ = notify_clone.notified() => {
|
||||
break;
|
||||
}
|
||||
event = conversation_clone.next_event() => {
|
||||
if let Ok(event) = event && matches!(event.msg, EventMsg::ShutdownComplete) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Request shutdown.
|
||||
match conversation.submit(Op::Shutdown).await {
|
||||
Ok(_) => {
|
||||
// Successfully submitted Shutdown; wait before proceeding.
|
||||
select! {
|
||||
_ = is_shutdown => {
|
||||
// Normal shutdown: proceed with archive.
|
||||
}
|
||||
_ = tokio::time::sleep(Duration::from_secs(10)) => {
|
||||
warn!("conversation {conversation_id} shutdown timed out; proceeding with archive");
|
||||
notify.notify_one();
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
error!("failed to submit Shutdown to conversation {conversation_id}: {err}");
|
||||
notify.notify_one();
|
||||
// Perhaps we lost a shutdown race, so let's continue to
|
||||
// clean up the .jsonl file.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Move the .jsonl file to the archived sessions subdir.
|
||||
let result: std::io::Result<()> = async {
|
||||
let archive_folder = self
|
||||
.config
|
||||
.codex_home
|
||||
.join(codex_core::ARCHIVED_SESSIONS_SUBDIR);
|
||||
tokio::fs::create_dir_all(&archive_folder).await?;
|
||||
tokio::fs::rename(&canonical_rollout_path, &archive_folder.join(&file_name)).await?;
|
||||
Ok(())
|
||||
}
|
||||
.await;
|
||||
|
||||
match result {
|
||||
Ok(()) => {
|
||||
let response = ArchiveConversationResponse {};
|
||||
self.outgoing.send_response(request_id, response).await;
|
||||
}
|
||||
Err(err) => {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INTERNAL_ERROR_CODE,
|
||||
message: format!("failed to archive conversation: {err}"),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn send_user_message(&self, request_id: RequestId, params: SendUserMessageParams) {
|
||||
let SendUserMessageParams {
|
||||
conversation_id,
|
||||
@@ -754,6 +1049,8 @@ impl CodexMessageProcessor {
|
||||
model,
|
||||
effort,
|
||||
summary,
|
||||
final_output_json_schema: None,
|
||||
disabled_tools: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
@@ -911,6 +1208,46 @@ impl CodexMessageProcessor {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn fuzzy_file_search(&mut self, request_id: RequestId, params: FuzzyFileSearchParams) {
|
||||
let FuzzyFileSearchParams {
|
||||
query,
|
||||
roots,
|
||||
cancellation_token,
|
||||
} = params;
|
||||
|
||||
let cancel_flag = match cancellation_token.clone() {
|
||||
Some(token) => {
|
||||
let mut pending_fuzzy_searches = self.pending_fuzzy_searches.lock().await;
|
||||
// if a cancellation_token is provided and a pending_request exists for
|
||||
// that token, cancel it
|
||||
if let Some(existing) = pending_fuzzy_searches.get(&token) {
|
||||
existing.store(true, Ordering::Relaxed);
|
||||
}
|
||||
let flag = Arc::new(AtomicBool::new(false));
|
||||
pending_fuzzy_searches.insert(token.clone(), flag.clone());
|
||||
flag
|
||||
}
|
||||
None => Arc::new(AtomicBool::new(false)),
|
||||
};
|
||||
|
||||
let results = match query.as_str() {
|
||||
"" => vec![],
|
||||
_ => run_fuzzy_file_search(query, roots, cancel_flag.clone()).await,
|
||||
};
|
||||
|
||||
if let Some(token) = cancellation_token {
|
||||
let mut pending_fuzzy_searches = self.pending_fuzzy_searches.lock().await;
|
||||
if let Some(current_flag) = pending_fuzzy_searches.get(&token)
|
||||
&& Arc::ptr_eq(current_flag, &cancel_flag)
|
||||
{
|
||||
pending_fuzzy_searches.remove(&token);
|
||||
}
|
||||
}
|
||||
|
||||
let response = FuzzyFileSearchResponse { files: results };
|
||||
self.outgoing.send_response(request_id, response).await;
|
||||
}
|
||||
}
|
||||
|
||||
async fn apply_bespoke_event_handling(
|
||||
@@ -935,9 +1272,8 @@ async fn apply_bespoke_event_handling(
|
||||
reason,
|
||||
grant_root,
|
||||
};
|
||||
let value = serde_json::to_value(¶ms).unwrap_or_default();
|
||||
let rx = outgoing
|
||||
.send_request(APPLY_PATCH_APPROVAL_METHOD, Some(value))
|
||||
.send_request(ServerRequestPayload::ApplyPatchApproval(params))
|
||||
.await;
|
||||
// TODO(mbolin): Enforce a timeout so this task does not live indefinitely?
|
||||
tokio::spawn(async move {
|
||||
@@ -957,9 +1293,8 @@ async fn apply_bespoke_event_handling(
|
||||
cwd,
|
||||
reason,
|
||||
};
|
||||
let value = serde_json::to_value(¶ms).unwrap_or_default();
|
||||
let rx = outgoing
|
||||
.send_request(EXEC_COMMAND_APPROVAL_METHOD, Some(value))
|
||||
.send_request(ServerRequestPayload::ExecCommandApproval(params))
|
||||
.await;
|
||||
|
||||
// TODO(mbolin): Enforce a timeout so this task does not live indefinitely?
|
||||
@@ -987,7 +1322,7 @@ async fn apply_bespoke_event_handling(
|
||||
}
|
||||
}
|
||||
|
||||
fn derive_config_from_params(
|
||||
async fn derive_config_from_params(
|
||||
params: NewConversationParams,
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
) -> std::io::Result<Config> {
|
||||
@@ -1004,6 +1339,7 @@ fn derive_config_from_params(
|
||||
} = params;
|
||||
let overrides = ConfigOverrides {
|
||||
model,
|
||||
review_model: None,
|
||||
config_profile: profile,
|
||||
cwd: cwd.map(PathBuf::from),
|
||||
approval_policy,
|
||||
@@ -1024,12 +1360,12 @@ fn derive_config_from_params(
|
||||
.map(|(k, v)| (k, json_to_toml(v)))
|
||||
.collect();
|
||||
|
||||
Config::load_with_cli_overrides(cli_overrides, overrides)
|
||||
Config::load_with_cli_overrides(cli_overrides, overrides).await
|
||||
}
|
||||
|
||||
async fn on_patch_approval_response(
|
||||
event_id: String,
|
||||
receiver: oneshot::Receiver<mcp_types::Result>,
|
||||
receiver: oneshot::Receiver<JsonRpcResult>,
|
||||
codex: Arc<CodexConversation>,
|
||||
) {
|
||||
let response = receiver.await;
|
||||
@@ -1071,7 +1407,7 @@ async fn on_patch_approval_response(
|
||||
|
||||
async fn on_exec_approval_response(
|
||||
event_id: String,
|
||||
receiver: oneshot::Receiver<mcp_types::Result>,
|
||||
receiver: oneshot::Receiver<JsonRpcResult>,
|
||||
conversation: Arc<CodexConversation>,
|
||||
) {
|
||||
let response = receiver.await;
|
||||
@@ -1110,10 +1446,7 @@ fn extract_conversation_summary(
|
||||
head: &[serde_json::Value],
|
||||
) -> Option<ConversationSummary> {
|
||||
let session_meta = match head.first() {
|
||||
Some(first_line) => match serde_json::from_value::<SessionMeta>(first_line.clone()) {
|
||||
Ok(session_meta) => session_meta,
|
||||
Err(..) => return None,
|
||||
},
|
||||
Some(first_line) => serde_json::from_value::<SessionMeta>(first_line.clone()).ok()?,
|
||||
None => return None,
|
||||
};
|
||||
|
||||
@@ -1157,20 +1490,24 @@ fn extract_conversation_summary(
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use anyhow::Result;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
|
||||
#[test]
|
||||
fn extract_conversation_summary_prefers_plain_user_messages() {
|
||||
let conversation_id =
|
||||
ConversationId(Uuid::parse_str("3f941c35-29b3-493b-b0a4-e25800d9aeb0").unwrap());
|
||||
fn extract_conversation_summary_prefers_plain_user_messages() -> Result<()> {
|
||||
let conversation_id = ConversationId::from_string("3f941c35-29b3-493b-b0a4-e25800d9aeb0")?;
|
||||
let timestamp = Some("2025-09-05T16:53:11.850Z".to_string());
|
||||
let path = PathBuf::from("rollout.jsonl");
|
||||
|
||||
let head = vec![
|
||||
json!({
|
||||
"id": conversation_id.0,
|
||||
"id": conversation_id.to_string(),
|
||||
"timestamp": timestamp,
|
||||
"cwd": "/",
|
||||
"originator": "codex",
|
||||
"cli_version": "0.0.0",
|
||||
"instructions": null
|
||||
}),
|
||||
json!({
|
||||
"type": "message",
|
||||
@@ -1199,5 +1536,6 @@ mod tests {
|
||||
);
|
||||
assert_eq!(summary.path, path);
|
||||
assert_eq!(summary.preview, "Count to 5");
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
2
codex-rs/app-server/src/error_code.rs
Normal file
2
codex-rs/app-server/src/error_code.rs
Normal file
@@ -0,0 +1,2 @@
|
||||
pub(crate) const INVALID_REQUEST_ERROR_CODE: i64 = -32600;
|
||||
pub(crate) const INTERNAL_ERROR_CODE: i64 = -32603;
|
||||
92
codex-rs/app-server/src/fuzzy_file_search.rs
Normal file
92
codex-rs/app-server/src/fuzzy_file_search.rs
Normal file
@@ -0,0 +1,92 @@
|
||||
use std::num::NonZero;
|
||||
use std::num::NonZeroUsize;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
|
||||
use codex_app_server_protocol::FuzzyFileSearchResult;
|
||||
use codex_file_search as file_search;
|
||||
use tokio::task::JoinSet;
|
||||
use tracing::warn;
|
||||
|
||||
const LIMIT_PER_ROOT: usize = 50;
|
||||
const MAX_THREADS: usize = 12;
|
||||
const COMPUTE_INDICES: bool = true;
|
||||
|
||||
pub(crate) async fn run_fuzzy_file_search(
|
||||
query: String,
|
||||
roots: Vec<String>,
|
||||
cancellation_flag: Arc<AtomicBool>,
|
||||
) -> Vec<FuzzyFileSearchResult> {
|
||||
#[expect(clippy::expect_used)]
|
||||
let limit_per_root =
|
||||
NonZero::new(LIMIT_PER_ROOT).expect("LIMIT_PER_ROOT should be a valid non-zero usize");
|
||||
|
||||
let cores = std::thread::available_parallelism()
|
||||
.map(std::num::NonZero::get)
|
||||
.unwrap_or(1);
|
||||
let threads = cores.min(MAX_THREADS);
|
||||
let threads_per_root = (threads / roots.len()).max(1);
|
||||
let threads = NonZero::new(threads_per_root).unwrap_or(NonZeroUsize::MIN);
|
||||
|
||||
let mut files: Vec<FuzzyFileSearchResult> = Vec::new();
|
||||
let mut join_set = JoinSet::new();
|
||||
|
||||
for root in roots {
|
||||
let search_dir = PathBuf::from(&root);
|
||||
let query = query.clone();
|
||||
let cancel_flag = cancellation_flag.clone();
|
||||
join_set.spawn_blocking(move || {
|
||||
match file_search::run(
|
||||
query.as_str(),
|
||||
limit_per_root,
|
||||
&search_dir,
|
||||
Vec::new(),
|
||||
threads,
|
||||
cancel_flag,
|
||||
COMPUTE_INDICES,
|
||||
) {
|
||||
Ok(res) => Ok((root, res)),
|
||||
Err(err) => Err((root, err)),
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
while let Some(res) = join_set.join_next().await {
|
||||
match res {
|
||||
Ok(Ok((root, res))) => {
|
||||
for m in res.matches {
|
||||
let path = m.path;
|
||||
//TODO(shijie): Move file name generation to file_search lib.
|
||||
let file_name = Path::new(&path)
|
||||
.file_name()
|
||||
.map(|name| name.to_string_lossy().into_owned())
|
||||
.unwrap_or_else(|| path.clone());
|
||||
let result = FuzzyFileSearchResult {
|
||||
root: root.clone(),
|
||||
path,
|
||||
file_name,
|
||||
score: m.score,
|
||||
indices: m.indices,
|
||||
};
|
||||
files.push(result);
|
||||
}
|
||||
}
|
||||
Ok(Err((root, err))) => {
|
||||
warn!("fuzzy-file-search in dir '{root}' failed: {err}");
|
||||
}
|
||||
Err(err) => {
|
||||
warn!("fuzzy-file-search join_next failed: {err}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
files.sort_by(file_search::cmp_by_score_desc_then_path_asc::<
|
||||
FuzzyFileSearchResult,
|
||||
_,
|
||||
_,
|
||||
>(|f| f.score, |f| f.path.as_str()));
|
||||
|
||||
files
|
||||
}
|
||||
140
codex-rs/app-server/src/lib.rs
Normal file
140
codex-rs/app-server/src/lib.rs
Normal file
@@ -0,0 +1,140 @@
|
||||
#![deny(clippy::print_stdout, clippy::print_stderr)]
|
||||
|
||||
use std::io::ErrorKind;
|
||||
use std::io::Result as IoResult;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use codex_common::CliConfigOverrides;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
|
||||
use codex_app_server_protocol::JSONRPCMessage;
|
||||
use tokio::io::AsyncBufReadExt;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
use tokio::io::BufReader;
|
||||
use tokio::io::{self};
|
||||
use tokio::sync::mpsc;
|
||||
use tracing::debug;
|
||||
use tracing::error;
|
||||
use tracing::info;
|
||||
use tracing_subscriber::EnvFilter;
|
||||
|
||||
use crate::message_processor::MessageProcessor;
|
||||
use crate::outgoing_message::OutgoingMessage;
|
||||
use crate::outgoing_message::OutgoingMessageSender;
|
||||
|
||||
mod codex_message_processor;
|
||||
mod error_code;
|
||||
mod fuzzy_file_search;
|
||||
mod message_processor;
|
||||
mod outgoing_message;
|
||||
|
||||
/// Size of the bounded channels used to communicate between tasks. The value
|
||||
/// is a balance between throughput and memory usage – 128 messages should be
|
||||
/// plenty for an interactive CLI.
|
||||
const CHANNEL_CAPACITY: usize = 128;
|
||||
|
||||
pub async fn run_main(
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
cli_config_overrides: CliConfigOverrides,
|
||||
) -> IoResult<()> {
|
||||
// Install a simple subscriber so `tracing` output is visible. Users can
|
||||
// control the log level with `RUST_LOG`.
|
||||
tracing_subscriber::fmt()
|
||||
.with_writer(std::io::stderr)
|
||||
.with_env_filter(EnvFilter::from_default_env())
|
||||
.init();
|
||||
|
||||
// Set up channels.
|
||||
let (incoming_tx, mut incoming_rx) = mpsc::channel::<JSONRPCMessage>(CHANNEL_CAPACITY);
|
||||
let (outgoing_tx, mut outgoing_rx) = mpsc::unbounded_channel::<OutgoingMessage>();
|
||||
|
||||
// Task: read from stdin, push to `incoming_tx`.
|
||||
let stdin_reader_handle = tokio::spawn({
|
||||
async move {
|
||||
let stdin = io::stdin();
|
||||
let reader = BufReader::new(stdin);
|
||||
let mut lines = reader.lines();
|
||||
|
||||
while let Some(line) = lines.next_line().await.unwrap_or_default() {
|
||||
match serde_json::from_str::<JSONRPCMessage>(&line) {
|
||||
Ok(msg) => {
|
||||
if incoming_tx.send(msg).await.is_err() {
|
||||
// Receiver gone – nothing left to do.
|
||||
break;
|
||||
}
|
||||
}
|
||||
Err(e) => error!("Failed to deserialize JSONRPCMessage: {e}"),
|
||||
}
|
||||
}
|
||||
|
||||
debug!("stdin reader finished (EOF)");
|
||||
}
|
||||
});
|
||||
|
||||
// Parse CLI overrides once and derive the base Config eagerly so later
|
||||
// components do not need to work with raw TOML values.
|
||||
let cli_kv_overrides = cli_config_overrides.parse_overrides().map_err(|e| {
|
||||
std::io::Error::new(
|
||||
ErrorKind::InvalidInput,
|
||||
format!("error parsing -c overrides: {e}"),
|
||||
)
|
||||
})?;
|
||||
let config = Config::load_with_cli_overrides(cli_kv_overrides, ConfigOverrides::default())
|
||||
.await
|
||||
.map_err(|e| {
|
||||
std::io::Error::new(ErrorKind::InvalidData, format!("error loading config: {e}"))
|
||||
})?;
|
||||
|
||||
// Task: process incoming messages.
|
||||
let processor_handle = tokio::spawn({
|
||||
let outgoing_message_sender = OutgoingMessageSender::new(outgoing_tx);
|
||||
let mut processor = MessageProcessor::new(
|
||||
outgoing_message_sender,
|
||||
codex_linux_sandbox_exe,
|
||||
std::sync::Arc::new(config),
|
||||
);
|
||||
async move {
|
||||
while let Some(msg) = incoming_rx.recv().await {
|
||||
match msg {
|
||||
JSONRPCMessage::Request(r) => processor.process_request(r).await,
|
||||
JSONRPCMessage::Response(r) => processor.process_response(r).await,
|
||||
JSONRPCMessage::Notification(n) => processor.process_notification(n).await,
|
||||
JSONRPCMessage::Error(e) => processor.process_error(e),
|
||||
}
|
||||
}
|
||||
|
||||
info!("processor task exited (channel closed)");
|
||||
}
|
||||
});
|
||||
|
||||
// Task: write outgoing messages to stdout.
|
||||
let stdout_writer_handle = tokio::spawn(async move {
|
||||
let mut stdout = io::stdout();
|
||||
while let Some(outgoing_message) = outgoing_rx.recv().await {
|
||||
let Ok(value) = serde_json::to_value(outgoing_message) else {
|
||||
error!("Failed to convert OutgoingMessage to JSON value");
|
||||
continue;
|
||||
};
|
||||
match serde_json::to_string(&value) {
|
||||
Ok(mut json) => {
|
||||
json.push('\n');
|
||||
if let Err(e) = stdout.write_all(json.as_bytes()).await {
|
||||
error!("Failed to write to stdout: {e}");
|
||||
break;
|
||||
}
|
||||
}
|
||||
Err(e) => error!("Failed to serialize JSONRPCMessage: {e}"),
|
||||
}
|
||||
}
|
||||
|
||||
info!("stdout writer exited (channel closed)");
|
||||
});
|
||||
|
||||
// Wait for all tasks to finish. The typical exit path is the stdin reader
|
||||
// hitting EOF which, once it drops `incoming_tx`, propagates shutdown to
|
||||
// the processor and then to the stdout task.
|
||||
let _ = tokio::join!(stdin_reader_handle, processor_handle, stdout_writer_handle);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
10
codex-rs/app-server/src/main.rs
Normal file
10
codex-rs/app-server/src/main.rs
Normal file
@@ -0,0 +1,10 @@
|
||||
use codex_app_server::run_main;
|
||||
use codex_arg0::arg0_dispatch_or_else;
|
||||
use codex_common::CliConfigOverrides;
|
||||
|
||||
fn main() -> anyhow::Result<()> {
|
||||
arg0_dispatch_or_else(|codex_linux_sandbox_exe| async move {
|
||||
run_main(codex_linux_sandbox_exe, CliConfigOverrides::default()).await?;
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
137
codex-rs/app-server/src/message_processor.rs
Normal file
137
codex-rs/app-server/src/message_processor.rs
Normal file
@@ -0,0 +1,137 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::codex_message_processor::CodexMessageProcessor;
|
||||
use crate::error_code::INVALID_REQUEST_ERROR_CODE;
|
||||
use crate::outgoing_message::OutgoingMessageSender;
|
||||
use codex_app_server_protocol::ClientInfo;
|
||||
use codex_app_server_protocol::ClientRequest;
|
||||
use codex_app_server_protocol::InitializeResponse;
|
||||
|
||||
use codex_app_server_protocol::JSONRPCError;
|
||||
use codex_app_server_protocol::JSONRPCErrorError;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCRequest;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_core::AuthManager;
|
||||
use codex_core::ConversationManager;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::default_client::USER_AGENT_SUFFIX;
|
||||
use codex_core::default_client::get_codex_user_agent;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub(crate) struct MessageProcessor {
|
||||
outgoing: Arc<OutgoingMessageSender>,
|
||||
codex_message_processor: CodexMessageProcessor,
|
||||
initialized: bool,
|
||||
}
|
||||
|
||||
impl MessageProcessor {
|
||||
/// Create a new `MessageProcessor`, retaining a handle to the outgoing
|
||||
/// `Sender` so handlers can enqueue messages to be written to stdout.
|
||||
pub(crate) fn new(
|
||||
outgoing: OutgoingMessageSender,
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
config: Arc<Config>,
|
||||
) -> Self {
|
||||
let outgoing = Arc::new(outgoing);
|
||||
let auth_manager = AuthManager::shared(config.codex_home.clone(), false);
|
||||
let conversation_manager = Arc::new(ConversationManager::new(
|
||||
auth_manager.clone(),
|
||||
SessionSource::VSCode,
|
||||
));
|
||||
let codex_message_processor = CodexMessageProcessor::new(
|
||||
auth_manager,
|
||||
conversation_manager,
|
||||
outgoing.clone(),
|
||||
codex_linux_sandbox_exe,
|
||||
config,
|
||||
);
|
||||
|
||||
Self {
|
||||
outgoing,
|
||||
codex_message_processor,
|
||||
initialized: false,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn process_request(&mut self, request: JSONRPCRequest) {
|
||||
let request_id = request.id.clone();
|
||||
if let Ok(request_json) = serde_json::to_value(request)
|
||||
&& let Ok(codex_request) = serde_json::from_value::<ClientRequest>(request_json)
|
||||
{
|
||||
match codex_request {
|
||||
// Handle Initialize internally so CodexMessageProcessor does not have to concern
|
||||
// itself with the `initialized` bool.
|
||||
ClientRequest::Initialize { request_id, params } => {
|
||||
if self.initialized {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "Already initialized".to_string(),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
} else {
|
||||
let ClientInfo {
|
||||
name,
|
||||
title: _title,
|
||||
version,
|
||||
} = params.client_info;
|
||||
let user_agent_suffix = format!("{name}; {version}");
|
||||
if let Ok(mut suffix) = USER_AGENT_SUFFIX.lock() {
|
||||
*suffix = Some(user_agent_suffix);
|
||||
}
|
||||
|
||||
let user_agent = get_codex_user_agent();
|
||||
let response = InitializeResponse { user_agent };
|
||||
self.outgoing.send_response(request_id, response).await;
|
||||
|
||||
self.initialized = true;
|
||||
return;
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
if !self.initialized {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "Not initialized".to_string(),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.codex_message_processor
|
||||
.process_request(codex_request)
|
||||
.await;
|
||||
} else {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "Invalid request".to_string(),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn process_notification(&self, notification: JSONRPCNotification) {
|
||||
// Currently, we do not expect to receive any notifications from the
|
||||
// client, so we just log them.
|
||||
tracing::info!("<- notification: {:?}", notification);
|
||||
}
|
||||
|
||||
/// Handle a standalone JSON-RPC response originating from the peer.
|
||||
pub(crate) async fn process_response(&mut self, response: JSONRPCResponse) {
|
||||
tracing::info!("<- response: {:?}", response);
|
||||
let JSONRPCResponse { id, result, .. } = response;
|
||||
self.outgoing.notify_client_response(id, result).await
|
||||
}
|
||||
|
||||
/// Handle an error object received from the peer.
|
||||
pub(crate) fn process_error(&mut self, err: JSONRPCError) {
|
||||
tracing::error!("<- error: {:?}", err);
|
||||
}
|
||||
}
|
||||
174
codex-rs/app-server/src/outgoing_message.rs
Normal file
174
codex-rs/app-server/src/outgoing_message.rs
Normal file
@@ -0,0 +1,174 @@
|
||||
use std::collections::HashMap;
|
||||
use std::sync::atomic::AtomicI64;
|
||||
use std::sync::atomic::Ordering;
|
||||
|
||||
use codex_app_server_protocol::JSONRPCErrorError;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::Result;
|
||||
use codex_app_server_protocol::ServerNotification;
|
||||
use codex_app_server_protocol::ServerRequest;
|
||||
use codex_app_server_protocol::ServerRequestPayload;
|
||||
use serde::Serialize;
|
||||
use tokio::sync::Mutex;
|
||||
use tokio::sync::mpsc;
|
||||
use tokio::sync::oneshot;
|
||||
use tracing::warn;
|
||||
|
||||
use crate::error_code::INTERNAL_ERROR_CODE;
|
||||
|
||||
/// Sends messages to the client and manages request callbacks.
|
||||
pub(crate) struct OutgoingMessageSender {
|
||||
next_request_id: AtomicI64,
|
||||
sender: mpsc::UnboundedSender<OutgoingMessage>,
|
||||
request_id_to_callback: Mutex<HashMap<RequestId, oneshot::Sender<Result>>>,
|
||||
}
|
||||
|
||||
impl OutgoingMessageSender {
|
||||
pub(crate) fn new(sender: mpsc::UnboundedSender<OutgoingMessage>) -> Self {
|
||||
Self {
|
||||
next_request_id: AtomicI64::new(0),
|
||||
sender,
|
||||
request_id_to_callback: Mutex::new(HashMap::new()),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn send_request(
|
||||
&self,
|
||||
request: ServerRequestPayload,
|
||||
) -> oneshot::Receiver<Result> {
|
||||
let id = RequestId::Integer(self.next_request_id.fetch_add(1, Ordering::Relaxed));
|
||||
let outgoing_message_id = id.clone();
|
||||
let (tx_approve, rx_approve) = oneshot::channel();
|
||||
{
|
||||
let mut request_id_to_callback = self.request_id_to_callback.lock().await;
|
||||
request_id_to_callback.insert(id, tx_approve);
|
||||
}
|
||||
|
||||
let outgoing_message =
|
||||
OutgoingMessage::Request(request.request_with_id(outgoing_message_id));
|
||||
let _ = self.sender.send(outgoing_message);
|
||||
rx_approve
|
||||
}
|
||||
|
||||
pub(crate) async fn notify_client_response(&self, id: RequestId, result: Result) {
|
||||
let entry = {
|
||||
let mut request_id_to_callback = self.request_id_to_callback.lock().await;
|
||||
request_id_to_callback.remove_entry(&id)
|
||||
};
|
||||
|
||||
match entry {
|
||||
Some((id, sender)) => {
|
||||
if let Err(err) = sender.send(result) {
|
||||
warn!("could not notify callback for {id:?} due to: {err:?}");
|
||||
}
|
||||
}
|
||||
None => {
|
||||
warn!("could not find callback for {id:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn send_response<T: Serialize>(&self, id: RequestId, response: T) {
|
||||
match serde_json::to_value(response) {
|
||||
Ok(result) => {
|
||||
let outgoing_message = OutgoingMessage::Response(OutgoingResponse { id, result });
|
||||
let _ = self.sender.send(outgoing_message);
|
||||
}
|
||||
Err(err) => {
|
||||
self.send_error(
|
||||
id,
|
||||
JSONRPCErrorError {
|
||||
code: INTERNAL_ERROR_CODE,
|
||||
message: format!("failed to serialize response: {err}"),
|
||||
data: None,
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn send_server_notification(&self, notification: ServerNotification) {
|
||||
let _ = self
|
||||
.sender
|
||||
.send(OutgoingMessage::AppServerNotification(notification));
|
||||
}
|
||||
|
||||
/// All notifications should be migrated to [`ServerNotification`] and
|
||||
/// [`OutgoingMessage::Notification`] should be removed.
|
||||
pub(crate) async fn send_notification(&self, notification: OutgoingNotification) {
|
||||
let outgoing_message = OutgoingMessage::Notification(notification);
|
||||
let _ = self.sender.send(outgoing_message);
|
||||
}
|
||||
|
||||
pub(crate) async fn send_error(&self, id: RequestId, error: JSONRPCErrorError) {
|
||||
let outgoing_message = OutgoingMessage::Error(OutgoingError { id, error });
|
||||
let _ = self.sender.send(outgoing_message);
|
||||
}
|
||||
}
|
||||
|
||||
/// Outgoing message from the server to the client.
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
#[serde(untagged)]
|
||||
pub(crate) enum OutgoingMessage {
|
||||
Request(ServerRequest),
|
||||
Notification(OutgoingNotification),
|
||||
/// AppServerNotification is specific to the case where this is run as an
|
||||
/// "app server" as opposed to an MCP server.
|
||||
AppServerNotification(ServerNotification),
|
||||
Response(OutgoingResponse),
|
||||
Error(OutgoingError),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize)]
|
||||
pub(crate) struct OutgoingNotification {
|
||||
pub method: String,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub params: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize)]
|
||||
pub(crate) struct OutgoingResponse {
|
||||
pub id: RequestId,
|
||||
pub result: Result,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize)]
|
||||
pub(crate) struct OutgoingError {
|
||||
pub error: JSONRPCErrorError,
|
||||
pub id: RequestId,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use codex_app_server_protocol::LoginChatGptCompleteNotification;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use uuid::Uuid;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn verify_server_notification_serialization() {
|
||||
let notification =
|
||||
ServerNotification::LoginChatGptComplete(LoginChatGptCompleteNotification {
|
||||
login_id: Uuid::nil(),
|
||||
success: true,
|
||||
error: None,
|
||||
});
|
||||
|
||||
let jsonrpc_notification = OutgoingMessage::AppServerNotification(notification);
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "loginChatGptComplete",
|
||||
"params": {
|
||||
"loginId": Uuid::nil(),
|
||||
"success": true,
|
||||
},
|
||||
}),
|
||||
serde_json::to_value(jsonrpc_notification)
|
||||
.expect("ensure the strum macros serialize the method field correctly"),
|
||||
"ensure the strum macros serialize the method field correctly"
|
||||
);
|
||||
}
|
||||
}
|
||||
3
codex-rs/app-server/tests/all.rs
Normal file
3
codex-rs/app-server/tests/all.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
// Single integration test binary that aggregates all test modules.
|
||||
// The submodules live in `tests/suite/`.
|
||||
mod suite;
|
||||
21
codex-rs/app-server/tests/common/Cargo.toml
Normal file
21
codex-rs/app-server/tests/common/Cargo.toml
Normal file
@@ -0,0 +1,21 @@
|
||||
[package]
|
||||
edition = "2024"
|
||||
name = "app_test_support"
|
||||
version = { workspace = true }
|
||||
|
||||
[lib]
|
||||
path = "lib.rs"
|
||||
|
||||
[dependencies]
|
||||
anyhow = { workspace = true }
|
||||
assert_cmd = { workspace = true }
|
||||
codex-app-server-protocol = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
tokio = { workspace = true, features = [
|
||||
"io-std",
|
||||
"macros",
|
||||
"process",
|
||||
"rt-multi-thread",
|
||||
] }
|
||||
wiremock = { workspace = true }
|
||||
17
codex-rs/app-server/tests/common/lib.rs
Normal file
17
codex-rs/app-server/tests/common/lib.rs
Normal file
@@ -0,0 +1,17 @@
|
||||
mod mcp_process;
|
||||
mod mock_model_server;
|
||||
mod responses;
|
||||
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
pub use mcp_process::McpProcess;
|
||||
pub use mock_model_server::create_mock_chat_completions_server;
|
||||
pub use responses::create_apply_patch_sse_response;
|
||||
pub use responses::create_final_assistant_message_sse_response;
|
||||
pub use responses::create_shell_sse_response;
|
||||
use serde::de::DeserializeOwned;
|
||||
|
||||
pub fn to_response<T: DeserializeOwned>(response: JSONRPCResponse) -> anyhow::Result<T> {
|
||||
let value = serde_json::to_value(response.result)?;
|
||||
let codex_response = serde_json::from_value(value)?;
|
||||
Ok(codex_response)
|
||||
}
|
||||
502
codex-rs/app-server/tests/common/mcp_process.rs
Normal file
502
codex-rs/app-server/tests/common/mcp_process.rs
Normal file
@@ -0,0 +1,502 @@
|
||||
use std::collections::VecDeque;
|
||||
use std::path::Path;
|
||||
use std::process::Stdio;
|
||||
use std::sync::atomic::AtomicI64;
|
||||
use std::sync::atomic::Ordering;
|
||||
use tokio::io::AsyncBufReadExt;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
use tokio::io::BufReader;
|
||||
use tokio::process::Child;
|
||||
use tokio::process::ChildStdin;
|
||||
use tokio::process::ChildStdout;
|
||||
|
||||
use anyhow::Context;
|
||||
use assert_cmd::prelude::*;
|
||||
use codex_app_server_protocol::AddConversationListenerParams;
|
||||
use codex_app_server_protocol::ArchiveConversationParams;
|
||||
use codex_app_server_protocol::CancelLoginChatGptParams;
|
||||
use codex_app_server_protocol::ClientInfo;
|
||||
use codex_app_server_protocol::ClientNotification;
|
||||
use codex_app_server_protocol::GetAuthStatusParams;
|
||||
use codex_app_server_protocol::InitializeParams;
|
||||
use codex_app_server_protocol::InterruptConversationParams;
|
||||
use codex_app_server_protocol::ListConversationsParams;
|
||||
use codex_app_server_protocol::LoginApiKeyParams;
|
||||
use codex_app_server_protocol::NewConversationParams;
|
||||
use codex_app_server_protocol::RemoveConversationListenerParams;
|
||||
use codex_app_server_protocol::ResumeConversationParams;
|
||||
use codex_app_server_protocol::SendUserMessageParams;
|
||||
use codex_app_server_protocol::SendUserTurnParams;
|
||||
use codex_app_server_protocol::ServerRequest;
|
||||
use codex_app_server_protocol::SetDefaultModelParams;
|
||||
|
||||
use codex_app_server_protocol::JSONRPCError;
|
||||
use codex_app_server_protocol::JSONRPCMessage;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCRequest;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use std::process::Command as StdCommand;
|
||||
use tokio::process::Command;
|
||||
|
||||
pub struct McpProcess {
|
||||
next_request_id: AtomicI64,
|
||||
/// Retain this child process until the client is dropped. The Tokio runtime
|
||||
/// will make a "best effort" to reap the process after it exits, but it is
|
||||
/// not a guarantee. See the `kill_on_drop` documentation for details.
|
||||
#[allow(dead_code)]
|
||||
process: Child,
|
||||
stdin: ChildStdin,
|
||||
stdout: BufReader<ChildStdout>,
|
||||
pending_user_messages: VecDeque<JSONRPCNotification>,
|
||||
}
|
||||
|
||||
impl McpProcess {
|
||||
pub async fn new(codex_home: &Path) -> anyhow::Result<Self> {
|
||||
Self::new_with_env(codex_home, &[]).await
|
||||
}
|
||||
|
||||
/// Creates a new MCP process, allowing tests to override or remove
|
||||
/// specific environment variables for the child process only.
|
||||
///
|
||||
/// Pass a tuple of (key, Some(value)) to set/override, or (key, None) to
|
||||
/// remove a variable from the child's environment.
|
||||
pub async fn new_with_env(
|
||||
codex_home: &Path,
|
||||
env_overrides: &[(&str, Option<&str>)],
|
||||
) -> anyhow::Result<Self> {
|
||||
// Use assert_cmd to locate the binary path and then switch to tokio::process::Command
|
||||
let std_cmd = StdCommand::cargo_bin("codex-app-server")
|
||||
.context("should find binary for codex-mcp-server")?;
|
||||
|
||||
let program = std_cmd.get_program().to_owned();
|
||||
|
||||
let mut cmd = Command::new(program);
|
||||
|
||||
cmd.stdin(Stdio::piped());
|
||||
cmd.stdout(Stdio::piped());
|
||||
cmd.stderr(Stdio::piped());
|
||||
cmd.env("CODEX_HOME", codex_home);
|
||||
cmd.env("RUST_LOG", "debug");
|
||||
|
||||
for (k, v) in env_overrides {
|
||||
match v {
|
||||
Some(val) => {
|
||||
cmd.env(k, val);
|
||||
}
|
||||
None => {
|
||||
cmd.env_remove(k);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut process = cmd
|
||||
.kill_on_drop(true)
|
||||
.spawn()
|
||||
.context("codex-mcp-server proc should start")?;
|
||||
let stdin = process
|
||||
.stdin
|
||||
.take()
|
||||
.ok_or_else(|| anyhow::format_err!("mcp should have stdin fd"))?;
|
||||
let stdout = process
|
||||
.stdout
|
||||
.take()
|
||||
.ok_or_else(|| anyhow::format_err!("mcp should have stdout fd"))?;
|
||||
let stdout = BufReader::new(stdout);
|
||||
|
||||
// Forward child's stderr to our stderr so failures are visible even
|
||||
// when stdout/stderr are captured by the test harness.
|
||||
if let Some(stderr) = process.stderr.take() {
|
||||
let mut stderr_reader = BufReader::new(stderr).lines();
|
||||
tokio::spawn(async move {
|
||||
while let Ok(Some(line)) = stderr_reader.next_line().await {
|
||||
eprintln!("[mcp stderr] {line}");
|
||||
}
|
||||
});
|
||||
}
|
||||
Ok(Self {
|
||||
next_request_id: AtomicI64::new(0),
|
||||
process,
|
||||
stdin,
|
||||
stdout,
|
||||
pending_user_messages: VecDeque::new(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Performs the initialization handshake with the MCP server.
|
||||
pub async fn initialize(&mut self) -> anyhow::Result<()> {
|
||||
let params = Some(serde_json::to_value(InitializeParams {
|
||||
client_info: ClientInfo {
|
||||
name: "codex-app-server-tests".to_string(),
|
||||
title: None,
|
||||
version: "0.1.0".to_string(),
|
||||
},
|
||||
})?);
|
||||
let req_id = self.send_request("initialize", params).await?;
|
||||
let initialized = self.read_jsonrpc_message().await?;
|
||||
let JSONRPCMessage::Response(response) = initialized else {
|
||||
unreachable!("expected JSONRPCMessage::Response for initialize, got {initialized:?}");
|
||||
};
|
||||
if response.id != RequestId::Integer(req_id) {
|
||||
anyhow::bail!(
|
||||
"initialize response id mismatch: expected {}, got {:?}",
|
||||
req_id,
|
||||
response.id
|
||||
);
|
||||
}
|
||||
|
||||
// Send notifications/initialized to ack the response.
|
||||
self.send_notification(ClientNotification::Initialized)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Send a `newConversation` JSON-RPC request.
|
||||
pub async fn send_new_conversation_request(
|
||||
&mut self,
|
||||
params: NewConversationParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("newConversation", params).await
|
||||
}
|
||||
|
||||
/// Send an `archiveConversation` JSON-RPC request.
|
||||
pub async fn send_archive_conversation_request(
|
||||
&mut self,
|
||||
params: ArchiveConversationParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("archiveConversation", params).await
|
||||
}
|
||||
|
||||
/// Send an `addConversationListener` JSON-RPC request.
|
||||
pub async fn send_add_conversation_listener_request(
|
||||
&mut self,
|
||||
params: AddConversationListenerParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("addConversationListener", params).await
|
||||
}
|
||||
|
||||
/// Send a `sendUserMessage` JSON-RPC request with a single text item.
|
||||
pub async fn send_send_user_message_request(
|
||||
&mut self,
|
||||
params: SendUserMessageParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
// Wire format expects variants in camelCase; text item uses external tagging.
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("sendUserMessage", params).await
|
||||
}
|
||||
|
||||
/// Send a `removeConversationListener` JSON-RPC request.
|
||||
pub async fn send_remove_conversation_listener_request(
|
||||
&mut self,
|
||||
params: RemoveConversationListenerParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("removeConversationListener", params)
|
||||
.await
|
||||
}
|
||||
|
||||
/// Send a `sendUserTurn` JSON-RPC request.
|
||||
pub async fn send_send_user_turn_request(
|
||||
&mut self,
|
||||
params: SendUserTurnParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("sendUserTurn", params).await
|
||||
}
|
||||
|
||||
/// Send a `interruptConversation` JSON-RPC request.
|
||||
pub async fn send_interrupt_conversation_request(
|
||||
&mut self,
|
||||
params: InterruptConversationParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("interruptConversation", params).await
|
||||
}
|
||||
|
||||
/// Send a `getAuthStatus` JSON-RPC request.
|
||||
pub async fn send_get_auth_status_request(
|
||||
&mut self,
|
||||
params: GetAuthStatusParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("getAuthStatus", params).await
|
||||
}
|
||||
|
||||
/// Send a `getUserSavedConfig` JSON-RPC request.
|
||||
pub async fn send_get_user_saved_config_request(&mut self) -> anyhow::Result<i64> {
|
||||
self.send_request("getUserSavedConfig", None).await
|
||||
}
|
||||
|
||||
/// Send a `getUserAgent` JSON-RPC request.
|
||||
pub async fn send_get_user_agent_request(&mut self) -> anyhow::Result<i64> {
|
||||
self.send_request("getUserAgent", None).await
|
||||
}
|
||||
|
||||
/// Send a `userInfo` JSON-RPC request.
|
||||
pub async fn send_user_info_request(&mut self) -> anyhow::Result<i64> {
|
||||
self.send_request("userInfo", None).await
|
||||
}
|
||||
|
||||
/// Send a `setDefaultModel` JSON-RPC request.
|
||||
pub async fn send_set_default_model_request(
|
||||
&mut self,
|
||||
params: SetDefaultModelParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("setDefaultModel", params).await
|
||||
}
|
||||
|
||||
/// Send a `listConversations` JSON-RPC request.
|
||||
pub async fn send_list_conversations_request(
|
||||
&mut self,
|
||||
params: ListConversationsParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("listConversations", params).await
|
||||
}
|
||||
|
||||
/// Send a `resumeConversation` JSON-RPC request.
|
||||
pub async fn send_resume_conversation_request(
|
||||
&mut self,
|
||||
params: ResumeConversationParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("resumeConversation", params).await
|
||||
}
|
||||
|
||||
/// Send a `loginApiKey` JSON-RPC request.
|
||||
pub async fn send_login_api_key_request(
|
||||
&mut self,
|
||||
params: LoginApiKeyParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("loginApiKey", params).await
|
||||
}
|
||||
|
||||
/// Send a `loginChatGpt` JSON-RPC request.
|
||||
pub async fn send_login_chat_gpt_request(&mut self) -> anyhow::Result<i64> {
|
||||
self.send_request("loginChatGpt", None).await
|
||||
}
|
||||
|
||||
/// Send a `cancelLoginChatGpt` JSON-RPC request.
|
||||
pub async fn send_cancel_login_chat_gpt_request(
|
||||
&mut self,
|
||||
params: CancelLoginChatGptParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("cancelLoginChatGpt", params).await
|
||||
}
|
||||
|
||||
/// Send a `logoutChatGpt` JSON-RPC request.
|
||||
pub async fn send_logout_chat_gpt_request(&mut self) -> anyhow::Result<i64> {
|
||||
self.send_request("logoutChatGpt", None).await
|
||||
}
|
||||
|
||||
/// Send a `fuzzyFileSearch` JSON-RPC request.
|
||||
pub async fn send_fuzzy_file_search_request(
|
||||
&mut self,
|
||||
query: &str,
|
||||
roots: Vec<String>,
|
||||
cancellation_token: Option<String>,
|
||||
) -> anyhow::Result<i64> {
|
||||
let mut params = serde_json::json!({
|
||||
"query": query,
|
||||
"roots": roots,
|
||||
});
|
||||
if let Some(token) = cancellation_token {
|
||||
params["cancellationToken"] = serde_json::json!(token);
|
||||
}
|
||||
self.send_request("fuzzyFileSearch", Some(params)).await
|
||||
}
|
||||
|
||||
async fn send_request(
|
||||
&mut self,
|
||||
method: &str,
|
||||
params: Option<serde_json::Value>,
|
||||
) -> anyhow::Result<i64> {
|
||||
let request_id = self.next_request_id.fetch_add(1, Ordering::Relaxed);
|
||||
|
||||
let message = JSONRPCMessage::Request(JSONRPCRequest {
|
||||
id: RequestId::Integer(request_id),
|
||||
method: method.to_string(),
|
||||
params,
|
||||
});
|
||||
self.send_jsonrpc_message(message).await?;
|
||||
Ok(request_id)
|
||||
}
|
||||
|
||||
pub async fn send_response(
|
||||
&mut self,
|
||||
id: RequestId,
|
||||
result: serde_json::Value,
|
||||
) -> anyhow::Result<()> {
|
||||
self.send_jsonrpc_message(JSONRPCMessage::Response(JSONRPCResponse { id, result }))
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn send_notification(
|
||||
&mut self,
|
||||
notification: ClientNotification,
|
||||
) -> anyhow::Result<()> {
|
||||
let value = serde_json::to_value(notification)?;
|
||||
self.send_jsonrpc_message(JSONRPCMessage::Notification(JSONRPCNotification {
|
||||
method: value
|
||||
.get("method")
|
||||
.and_then(|m| m.as_str())
|
||||
.ok_or_else(|| anyhow::format_err!("notification missing method field"))?
|
||||
.to_string(),
|
||||
params: value.get("params").cloned(),
|
||||
}))
|
||||
.await
|
||||
}
|
||||
|
||||
async fn send_jsonrpc_message(&mut self, message: JSONRPCMessage) -> anyhow::Result<()> {
|
||||
eprintln!("writing message to stdin: {message:?}");
|
||||
let payload = serde_json::to_string(&message)?;
|
||||
self.stdin.write_all(payload.as_bytes()).await?;
|
||||
self.stdin.write_all(b"\n").await?;
|
||||
self.stdin.flush().await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn read_jsonrpc_message(&mut self) -> anyhow::Result<JSONRPCMessage> {
|
||||
let mut line = String::new();
|
||||
self.stdout.read_line(&mut line).await?;
|
||||
let message = serde_json::from_str::<JSONRPCMessage>(&line)?;
|
||||
eprintln!("read message from stdout: {message:?}");
|
||||
Ok(message)
|
||||
}
|
||||
|
||||
pub async fn read_stream_until_request_message(&mut self) -> anyhow::Result<ServerRequest> {
|
||||
eprintln!("in read_stream_until_request_message()");
|
||||
|
||||
loop {
|
||||
let message = self.read_jsonrpc_message().await?;
|
||||
|
||||
match message {
|
||||
JSONRPCMessage::Notification(notification) => {
|
||||
eprintln!("notification: {notification:?}");
|
||||
self.enqueue_user_message(notification);
|
||||
}
|
||||
JSONRPCMessage::Request(jsonrpc_request) => {
|
||||
return jsonrpc_request.try_into().with_context(
|
||||
|| "failed to deserialize ServerRequest from JSONRPCRequest",
|
||||
);
|
||||
}
|
||||
JSONRPCMessage::Error(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Error: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Response(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Response: {message:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn read_stream_until_response_message(
|
||||
&mut self,
|
||||
request_id: RequestId,
|
||||
) -> anyhow::Result<JSONRPCResponse> {
|
||||
eprintln!("in read_stream_until_response_message({request_id:?})");
|
||||
|
||||
loop {
|
||||
let message = self.read_jsonrpc_message().await?;
|
||||
match message {
|
||||
JSONRPCMessage::Notification(notification) => {
|
||||
eprintln!("notification: {notification:?}");
|
||||
self.enqueue_user_message(notification);
|
||||
}
|
||||
JSONRPCMessage::Request(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Request: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Error(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Error: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Response(jsonrpc_response) => {
|
||||
if jsonrpc_response.id == request_id {
|
||||
return Ok(jsonrpc_response);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn read_stream_until_error_message(
|
||||
&mut self,
|
||||
request_id: RequestId,
|
||||
) -> anyhow::Result<JSONRPCError> {
|
||||
loop {
|
||||
let message = self.read_jsonrpc_message().await?;
|
||||
match message {
|
||||
JSONRPCMessage::Notification(notification) => {
|
||||
eprintln!("notification: {notification:?}");
|
||||
self.enqueue_user_message(notification);
|
||||
}
|
||||
JSONRPCMessage::Request(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Request: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Response(_) => {
|
||||
// Keep scanning; we're waiting for an error with matching id.
|
||||
}
|
||||
JSONRPCMessage::Error(err) => {
|
||||
if err.id == request_id {
|
||||
return Ok(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn read_stream_until_notification_message(
|
||||
&mut self,
|
||||
method: &str,
|
||||
) -> anyhow::Result<JSONRPCNotification> {
|
||||
eprintln!("in read_stream_until_notification_message({method})");
|
||||
|
||||
if let Some(notification) = self.take_pending_notification_by_method(method) {
|
||||
return Ok(notification);
|
||||
}
|
||||
|
||||
loop {
|
||||
let message = self.read_jsonrpc_message().await?;
|
||||
match message {
|
||||
JSONRPCMessage::Notification(notification) => {
|
||||
if notification.method == method {
|
||||
return Ok(notification);
|
||||
}
|
||||
self.enqueue_user_message(notification);
|
||||
}
|
||||
JSONRPCMessage::Request(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Request: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Error(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Error: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Response(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Response: {message:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn take_pending_notification_by_method(&mut self, method: &str) -> Option<JSONRPCNotification> {
|
||||
if let Some(pos) = self
|
||||
.pending_user_messages
|
||||
.iter()
|
||||
.position(|notification| notification.method == method)
|
||||
{
|
||||
return self.pending_user_messages.remove(pos);
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn enqueue_user_message(&mut self, notification: JSONRPCNotification) {
|
||||
if notification.method == "codex/event/user_message" {
|
||||
self.pending_user_messages.push_back(notification);
|
||||
}
|
||||
}
|
||||
}
|
||||
47
codex-rs/app-server/tests/common/mock_model_server.rs
Normal file
47
codex-rs/app-server/tests/common/mock_model_server.rs
Normal file
@@ -0,0 +1,47 @@
|
||||
use std::sync::atomic::AtomicUsize;
|
||||
use std::sync::atomic::Ordering;
|
||||
|
||||
use wiremock::Mock;
|
||||
use wiremock::MockServer;
|
||||
use wiremock::Respond;
|
||||
use wiremock::ResponseTemplate;
|
||||
use wiremock::matchers::method;
|
||||
use wiremock::matchers::path;
|
||||
|
||||
/// Create a mock server that will provide the responses, in order, for
|
||||
/// requests to the `/v1/chat/completions` endpoint.
|
||||
pub async fn create_mock_chat_completions_server(responses: Vec<String>) -> MockServer {
|
||||
let server = MockServer::start().await;
|
||||
|
||||
let num_calls = responses.len();
|
||||
let seq_responder = SeqResponder {
|
||||
num_calls: AtomicUsize::new(0),
|
||||
responses,
|
||||
};
|
||||
|
||||
Mock::given(method("POST"))
|
||||
.and(path("/v1/chat/completions"))
|
||||
.respond_with(seq_responder)
|
||||
.expect(num_calls as u64)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
server
|
||||
}
|
||||
|
||||
struct SeqResponder {
|
||||
num_calls: AtomicUsize,
|
||||
responses: Vec<String>,
|
||||
}
|
||||
|
||||
impl Respond for SeqResponder {
|
||||
fn respond(&self, _: &wiremock::Request) -> ResponseTemplate {
|
||||
let call_num = self.num_calls.fetch_add(1, Ordering::SeqCst);
|
||||
match self.responses.get(call_num) {
|
||||
Some(response) => ResponseTemplate::new(200)
|
||||
.insert_header("content-type", "text/event-stream")
|
||||
.set_body_raw(response.clone(), "text/event-stream"),
|
||||
None => panic!("no response for {call_num}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
95
codex-rs/app-server/tests/common/responses.rs
Normal file
95
codex-rs/app-server/tests/common/responses.rs
Normal file
@@ -0,0 +1,95 @@
|
||||
use serde_json::json;
|
||||
use std::path::Path;
|
||||
|
||||
pub fn create_shell_sse_response(
|
||||
command: Vec<String>,
|
||||
workdir: Option<&Path>,
|
||||
timeout_ms: Option<u64>,
|
||||
call_id: &str,
|
||||
) -> anyhow::Result<String> {
|
||||
// The `arguments`` for the `shell` tool is a serialized JSON object.
|
||||
let tool_call_arguments = serde_json::to_string(&json!({
|
||||
"command": command,
|
||||
"workdir": workdir.map(|w| w.to_string_lossy()),
|
||||
"timeout": timeout_ms
|
||||
}))?;
|
||||
let tool_call = json!({
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"tool_calls": [
|
||||
{
|
||||
"id": call_id,
|
||||
"function": {
|
||||
"name": "shell",
|
||||
"arguments": tool_call_arguments
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": "tool_calls"
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
let sse = format!(
|
||||
"data: {}\n\ndata: DONE\n\n",
|
||||
serde_json::to_string(&tool_call)?
|
||||
);
|
||||
Ok(sse)
|
||||
}
|
||||
|
||||
pub fn create_final_assistant_message_sse_response(message: &str) -> anyhow::Result<String> {
|
||||
let assistant_message = json!({
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": message
|
||||
},
|
||||
"finish_reason": "stop"
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
let sse = format!(
|
||||
"data: {}\n\ndata: DONE\n\n",
|
||||
serde_json::to_string(&assistant_message)?
|
||||
);
|
||||
Ok(sse)
|
||||
}
|
||||
|
||||
pub fn create_apply_patch_sse_response(
|
||||
patch_content: &str,
|
||||
call_id: &str,
|
||||
) -> anyhow::Result<String> {
|
||||
// Use shell command to call apply_patch with heredoc format
|
||||
let shell_command = format!("apply_patch <<'EOF'\n{patch_content}\nEOF");
|
||||
let tool_call_arguments = serde_json::to_string(&json!({
|
||||
"command": ["bash", "-lc", shell_command]
|
||||
}))?;
|
||||
|
||||
let tool_call = json!({
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"tool_calls": [
|
||||
{
|
||||
"id": call_id,
|
||||
"function": {
|
||||
"name": "shell",
|
||||
"arguments": tool_call_arguments
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": "tool_calls"
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
let sse = format!(
|
||||
"data: {}\n\ndata: DONE\n\n",
|
||||
serde_json::to_string(&tool_call)?
|
||||
);
|
||||
Ok(sse)
|
||||
}
|
||||
105
codex-rs/app-server/tests/suite/archive_conversation.rs
Normal file
105
codex-rs/app-server/tests/suite/archive_conversation.rs
Normal file
@@ -0,0 +1,105 @@
|
||||
use std::path::Path;
|
||||
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::ArchiveConversationParams;
|
||||
use codex_app_server_protocol::ArchiveConversationResponse;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::NewConversationParams;
|
||||
use codex_app_server_protocol::NewConversationResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_core::ARCHIVED_SESSIONS_SUBDIR;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn archive_conversation_moves_rollout_into_archived_directory() {
|
||||
let codex_home = TempDir::new().expect("create temp dir");
|
||||
create_config_toml(codex_home.path()).expect("write config.toml");
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("initialize timeout")
|
||||
.expect("initialize request");
|
||||
|
||||
let new_request_id = mcp
|
||||
.send_new_conversation_request(NewConversationParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await
|
||||
.expect("send newConversation");
|
||||
let new_response: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(new_request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("newConversation timeout")
|
||||
.expect("newConversation response");
|
||||
|
||||
let NewConversationResponse {
|
||||
conversation_id,
|
||||
rollout_path,
|
||||
..
|
||||
} = to_response::<NewConversationResponse>(new_response)
|
||||
.expect("deserialize newConversation response");
|
||||
|
||||
assert!(
|
||||
rollout_path.exists(),
|
||||
"expected rollout path {} to exist",
|
||||
rollout_path.display()
|
||||
);
|
||||
|
||||
let archive_request_id = mcp
|
||||
.send_archive_conversation_request(ArchiveConversationParams {
|
||||
conversation_id,
|
||||
rollout_path: rollout_path.clone(),
|
||||
})
|
||||
.await
|
||||
.expect("send archiveConversation");
|
||||
let archive_response: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(archive_request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("archiveConversation timeout")
|
||||
.expect("archiveConversation response");
|
||||
|
||||
let _: ArchiveConversationResponse =
|
||||
to_response::<ArchiveConversationResponse>(archive_response)
|
||||
.expect("deserialize archiveConversation response");
|
||||
|
||||
let archived_directory = codex_home.path().join(ARCHIVED_SESSIONS_SUBDIR);
|
||||
let archived_rollout_path =
|
||||
archived_directory.join(rollout_path.file_name().unwrap_or_else(|| {
|
||||
panic!("rollout path {} missing file name", rollout_path.display())
|
||||
}));
|
||||
|
||||
assert!(
|
||||
!rollout_path.exists(),
|
||||
"expected rollout path {} to be moved",
|
||||
rollout_path.display()
|
||||
);
|
||||
assert!(
|
||||
archived_rollout_path.exists(),
|
||||
"expected archived rollout path {} to exist",
|
||||
archived_rollout_path.display()
|
||||
);
|
||||
}
|
||||
|
||||
fn create_config_toml(codex_home: &Path) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(config_toml, config_contents())
|
||||
}
|
||||
|
||||
fn config_contents() -> &'static str {
|
||||
r#"model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "read-only"
|
||||
"#
|
||||
}
|
||||
@@ -1,24 +1,31 @@
|
||||
use std::path::Path;
|
||||
|
||||
use codex_core::auth::login_with_api_key;
|
||||
use codex_protocol::mcp_protocol::AuthMode;
|
||||
use codex_protocol::mcp_protocol::GetAuthStatusParams;
|
||||
use codex_protocol::mcp_protocol::GetAuthStatusResponse;
|
||||
use mcp_test_support::McpProcess;
|
||||
use mcp_test_support::to_response;
|
||||
use mcp_types::JSONRPCResponse;
|
||||
use mcp_types::RequestId;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_app_server_protocol::GetAuthStatusParams;
|
||||
use codex_app_server_protocol::GetAuthStatusResponse;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::LoginApiKeyParams;
|
||||
use codex_app_server_protocol::LoginApiKeyResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use pretty_assertions::assert_eq;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
// Helper to create a config.toml; mirrors create_conversation.rs
|
||||
fn create_config_toml(codex_home: &Path) -> std::io::Result<()> {
|
||||
fn create_config_toml_custom_provider(
|
||||
codex_home: &Path,
|
||||
requires_openai_auth: bool,
|
||||
) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
let requires_line = if requires_openai_auth {
|
||||
"requires_openai_auth = true\n"
|
||||
} else {
|
||||
""
|
||||
};
|
||||
let contents = format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
@@ -32,16 +39,49 @@ base_url = "http://127.0.0.1:0/v1"
|
||||
wire_api = "chat"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
{requires_line}
|
||||
"#
|
||||
);
|
||||
std::fs::write(config_toml, contents)
|
||||
}
|
||||
|
||||
fn create_config_toml(codex_home: &Path) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "danger-full-access"
|
||||
"#,
|
||||
)
|
||||
}
|
||||
|
||||
async fn login_with_api_key_via_request(mcp: &mut McpProcess, api_key: &str) {
|
||||
let request_id = mcp
|
||||
.send_login_api_key_request(LoginApiKeyParams {
|
||||
api_key: api_key.to_string(),
|
||||
})
|
||||
.await
|
||||
.unwrap_or_else(|e| panic!("send loginApiKey: {e}"));
|
||||
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.unwrap_or_else(|e| panic!("loginApiKey timeout: {e}"))
|
||||
.unwrap_or_else(|e| panic!("loginApiKey response: {e}"));
|
||||
let _: LoginApiKeyResponse =
|
||||
to_response(resp).unwrap_or_else(|e| panic!("deserialize login response: {e}"));
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn get_auth_status_no_auth() {
|
||||
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
||||
create_config_toml(codex_home.path()).expect("write config.toml");
|
||||
create_config_toml(codex_home.path()).unwrap_or_else(|err| panic!("write config.toml: {err}"));
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)])
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
@@ -72,8 +112,7 @@ async fn get_auth_status_no_auth() {
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn get_auth_status_with_api_key() {
|
||||
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
||||
create_config_toml(codex_home.path()).expect("write config.toml");
|
||||
login_with_api_key(codex_home.path(), "sk-test-key").expect("seed api key");
|
||||
create_config_toml(codex_home.path()).unwrap_or_else(|err| panic!("write config.toml: {err}"));
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
@@ -83,6 +122,8 @@ async fn get_auth_status_with_api_key() {
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
|
||||
login_with_api_key_via_request(&mut mcp, "sk-test-key").await;
|
||||
|
||||
let request_id = mcp
|
||||
.send_get_auth_status_request(GetAuthStatusParams {
|
||||
include_token: Some(true),
|
||||
@@ -101,14 +142,13 @@ async fn get_auth_status_with_api_key() {
|
||||
let status: GetAuthStatusResponse = to_response(resp).expect("deserialize status");
|
||||
assert_eq!(status.auth_method, Some(AuthMode::ApiKey));
|
||||
assert_eq!(status.auth_token, Some("sk-test-key".to_string()));
|
||||
assert_eq!(status.preferred_auth_method, AuthMode::ChatGPT);
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn get_auth_status_with_api_key_no_include_token() {
|
||||
async fn get_auth_status_with_api_key_when_auth_not_required() {
|
||||
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
||||
create_config_toml(codex_home.path()).expect("write config.toml");
|
||||
login_with_api_key(codex_home.path(), "sk-test-key").expect("seed api key");
|
||||
create_config_toml_custom_provider(codex_home.path(), false)
|
||||
.unwrap_or_else(|err| panic!("write config.toml: {err}"));
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
@@ -118,6 +158,48 @@ async fn get_auth_status_with_api_key_no_include_token() {
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
|
||||
login_with_api_key_via_request(&mut mcp, "sk-test-key").await;
|
||||
|
||||
let request_id = mcp
|
||||
.send_get_auth_status_request(GetAuthStatusParams {
|
||||
include_token: Some(true),
|
||||
refresh_token: Some(false),
|
||||
})
|
||||
.await
|
||||
.expect("send getAuthStatus");
|
||||
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("getAuthStatus timeout")
|
||||
.expect("getAuthStatus response");
|
||||
let status: GetAuthStatusResponse = to_response(resp).expect("deserialize status");
|
||||
assert_eq!(status.auth_method, None, "expected no auth method");
|
||||
assert_eq!(status.auth_token, None, "expected no token");
|
||||
assert_eq!(
|
||||
status.requires_openai_auth,
|
||||
Some(false),
|
||||
"requires_openai_auth should be false",
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn get_auth_status_with_api_key_no_include_token() {
|
||||
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
||||
create_config_toml(codex_home.path()).unwrap_or_else(|err| panic!("write config.toml: {err}"));
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
|
||||
login_with_api_key_via_request(&mut mcp, "sk-test-key").await;
|
||||
|
||||
// Build params via struct so None field is omitted in wire JSON.
|
||||
let params = GetAuthStatusParams {
|
||||
include_token: None,
|
||||
@@ -138,5 +220,4 @@ async fn get_auth_status_with_api_key_no_include_token() {
|
||||
let status: GetAuthStatusResponse = to_response(resp).expect("deserialize status");
|
||||
assert_eq!(status.auth_method, Some(AuthMode::ApiKey));
|
||||
assert!(status.auth_token.is_none(), "token must be omitted");
|
||||
assert_eq!(status.preferred_auth_method, AuthMode::ChatGPT);
|
||||
}
|
||||
@@ -1,29 +1,35 @@
|
||||
use std::path::Path;
|
||||
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_final_assistant_message_sse_response;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
use app_test_support::create_shell_sse_response;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::AddConversationListenerParams;
|
||||
use codex_app_server_protocol::AddConversationSubscriptionResponse;
|
||||
use codex_app_server_protocol::ExecCommandApprovalParams;
|
||||
use codex_app_server_protocol::InputItem;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::NewConversationParams;
|
||||
use codex_app_server_protocol::NewConversationResponse;
|
||||
use codex_app_server_protocol::RemoveConversationListenerParams;
|
||||
use codex_app_server_protocol::RemoveConversationSubscriptionResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::SendUserMessageParams;
|
||||
use codex_app_server_protocol::SendUserMessageResponse;
|
||||
use codex_app_server_protocol::SendUserTurnParams;
|
||||
use codex_app_server_protocol::SendUserTurnResponse;
|
||||
use codex_app_server_protocol::ServerRequest;
|
||||
use codex_core::protocol::AskForApproval;
|
||||
use codex_core::protocol::SandboxPolicy;
|
||||
use codex_core::protocol_config_types::ReasoningEffort;
|
||||
use codex_core::protocol_config_types::ReasoningSummary;
|
||||
use codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR;
|
||||
use codex_protocol::mcp_protocol::AddConversationListenerParams;
|
||||
use codex_protocol::mcp_protocol::AddConversationSubscriptionResponse;
|
||||
use codex_protocol::mcp_protocol::EXEC_COMMAND_APPROVAL_METHOD;
|
||||
use codex_protocol::mcp_protocol::NewConversationParams;
|
||||
use codex_protocol::mcp_protocol::NewConversationResponse;
|
||||
use codex_protocol::mcp_protocol::RemoveConversationListenerParams;
|
||||
use codex_protocol::mcp_protocol::RemoveConversationSubscriptionResponse;
|
||||
use codex_protocol::mcp_protocol::SendUserMessageParams;
|
||||
use codex_protocol::mcp_protocol::SendUserMessageResponse;
|
||||
use codex_protocol::mcp_protocol::SendUserTurnParams;
|
||||
use codex_protocol::mcp_protocol::SendUserTurnResponse;
|
||||
use mcp_test_support::McpProcess;
|
||||
use mcp_test_support::create_final_assistant_message_sse_response;
|
||||
use mcp_test_support::create_mock_chat_completions_server;
|
||||
use mcp_test_support::create_shell_sse_response;
|
||||
use mcp_test_support::to_response;
|
||||
use mcp_types::JSONRPCNotification;
|
||||
use mcp_types::JSONRPCResponse;
|
||||
use mcp_types::RequestId;
|
||||
use codex_protocol::config_types::SandboxMode;
|
||||
use codex_protocol::protocol::Event;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use codex_protocol::protocol::InputMessageKind;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::env;
|
||||
use tempfile::TempDir;
|
||||
@@ -90,6 +96,8 @@ async fn test_codex_jsonrpc_conversation_flow() {
|
||||
let NewConversationResponse {
|
||||
conversation_id,
|
||||
model,
|
||||
reasoning_effort: _,
|
||||
rollout_path: _,
|
||||
} = new_conv_resp;
|
||||
assert_eq!(model, "mock-model");
|
||||
|
||||
@@ -113,7 +121,7 @@ async fn test_codex_jsonrpc_conversation_flow() {
|
||||
let send_user_id = mcp
|
||||
.send_send_user_message_request(SendUserMessageParams {
|
||||
conversation_id,
|
||||
items: vec![codex_protocol::mcp_protocol::InputItem::Text {
|
||||
items: vec![codex_app_server_protocol::InputItem::Text {
|
||||
text: "text".to_string(),
|
||||
}],
|
||||
})
|
||||
@@ -263,7 +271,7 @@ async fn test_send_user_turn_changes_approval_policy_behavior() {
|
||||
let send_user_id = mcp
|
||||
.send_send_user_message_request(SendUserMessageParams {
|
||||
conversation_id,
|
||||
items: vec![codex_protocol::mcp_protocol::InputItem::Text {
|
||||
items: vec![codex_app_server_protocol::InputItem::Text {
|
||||
text: "run python".to_string(),
|
||||
}],
|
||||
})
|
||||
@@ -288,11 +296,28 @@ async fn test_send_user_turn_changes_approval_policy_behavior() {
|
||||
.await
|
||||
.expect("waiting for exec approval request timeout")
|
||||
.expect("exec approval request");
|
||||
assert_eq!(request.method, EXEC_COMMAND_APPROVAL_METHOD);
|
||||
let ServerRequest::ExecCommandApproval { request_id, params } = request else {
|
||||
panic!("expected ExecCommandApproval request, got: {request:?}");
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
ExecCommandApprovalParams {
|
||||
conversation_id,
|
||||
call_id: "call1".to_string(),
|
||||
command: vec![
|
||||
"python3".to_string(),
|
||||
"-c".to_string(),
|
||||
"print(42)".to_string(),
|
||||
],
|
||||
cwd: working_directory.clone(),
|
||||
reason: None,
|
||||
},
|
||||
params
|
||||
);
|
||||
|
||||
// Approve so the first turn can complete
|
||||
mcp.send_response(
|
||||
request.id,
|
||||
request_id,
|
||||
serde_json::json!({ "decision": codex_core::protocol::ReviewDecision::Approved }),
|
||||
)
|
||||
.await
|
||||
@@ -311,14 +336,14 @@ async fn test_send_user_turn_changes_approval_policy_behavior() {
|
||||
let send_turn_id = mcp
|
||||
.send_send_user_turn_request(SendUserTurnParams {
|
||||
conversation_id,
|
||||
items: vec![codex_protocol::mcp_protocol::InputItem::Text {
|
||||
items: vec![codex_app_server_protocol::InputItem::Text {
|
||||
text: "run python again".to_string(),
|
||||
}],
|
||||
cwd: working_directory.clone(),
|
||||
approval_policy: AskForApproval::Never,
|
||||
sandbox_policy: SandboxPolicy::new_read_only_policy(),
|
||||
model: "mock-model".to_string(),
|
||||
effort: ReasoningEffort::Medium,
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
summary: ReasoningSummary::Auto,
|
||||
})
|
||||
.await
|
||||
@@ -347,6 +372,234 @@ async fn test_send_user_turn_changes_approval_policy_behavior() {
|
||||
}
|
||||
|
||||
// Helper: minimal config.toml pointing at mock provider.
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 4)]
|
||||
async fn test_send_user_turn_updates_sandbox_and_cwd_between_turns() {
|
||||
if env::var(CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() {
|
||||
println!(
|
||||
"Skipping test because it cannot execute when network is disabled in a Codex sandbox."
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
let tmp = TempDir::new().expect("tmp dir");
|
||||
let codex_home = tmp.path().join("codex_home");
|
||||
std::fs::create_dir(&codex_home).expect("create codex home dir");
|
||||
let workspace_root = tmp.path().join("workspace");
|
||||
std::fs::create_dir(&workspace_root).expect("create workspace root");
|
||||
let first_cwd = workspace_root.join("turn1");
|
||||
let second_cwd = workspace_root.join("turn2");
|
||||
std::fs::create_dir(&first_cwd).expect("create first cwd");
|
||||
std::fs::create_dir(&second_cwd).expect("create second cwd");
|
||||
|
||||
let responses = vec![
|
||||
create_shell_sse_response(
|
||||
vec![
|
||||
"bash".to_string(),
|
||||
"-lc".to_string(),
|
||||
"echo first turn".to_string(),
|
||||
],
|
||||
None,
|
||||
Some(5000),
|
||||
"call-first",
|
||||
)
|
||||
.expect("create first shell response"),
|
||||
create_final_assistant_message_sse_response("done first")
|
||||
.expect("create first final assistant message"),
|
||||
create_shell_sse_response(
|
||||
vec![
|
||||
"bash".to_string(),
|
||||
"-lc".to_string(),
|
||||
"echo second turn".to_string(),
|
||||
],
|
||||
None,
|
||||
Some(5000),
|
||||
"call-second",
|
||||
)
|
||||
.expect("create second shell response"),
|
||||
create_final_assistant_message_sse_response("done second")
|
||||
.expect("create second final assistant message"),
|
||||
];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
create_config_toml(&codex_home, &server.uri()).expect("write config");
|
||||
|
||||
let mut mcp = McpProcess::new(&codex_home)
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
|
||||
let new_conv_id = mcp
|
||||
.send_new_conversation_request(NewConversationParams {
|
||||
cwd: Some(first_cwd.to_string_lossy().into_owned()),
|
||||
approval_policy: Some(AskForApproval::Never),
|
||||
sandbox: Some(SandboxMode::WorkspaceWrite),
|
||||
..Default::default()
|
||||
})
|
||||
.await
|
||||
.expect("send newConversation");
|
||||
let new_conv_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(new_conv_id)),
|
||||
)
|
||||
.await
|
||||
.expect("newConversation timeout")
|
||||
.expect("newConversation resp");
|
||||
let NewConversationResponse {
|
||||
conversation_id,
|
||||
model,
|
||||
..
|
||||
} = to_response::<NewConversationResponse>(new_conv_resp)
|
||||
.expect("deserialize newConversation response");
|
||||
|
||||
let add_listener_id = mcp
|
||||
.send_add_conversation_listener_request(AddConversationListenerParams { conversation_id })
|
||||
.await
|
||||
.expect("send addConversationListener");
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(add_listener_id)),
|
||||
)
|
||||
.await
|
||||
.expect("addConversationListener timeout")
|
||||
.expect("addConversationListener resp");
|
||||
|
||||
let first_turn_id = mcp
|
||||
.send_send_user_turn_request(SendUserTurnParams {
|
||||
conversation_id,
|
||||
items: vec![InputItem::Text {
|
||||
text: "first turn".to_string(),
|
||||
}],
|
||||
cwd: first_cwd.clone(),
|
||||
approval_policy: AskForApproval::Never,
|
||||
sandbox_policy: SandboxPolicy::WorkspaceWrite {
|
||||
writable_roots: vec![first_cwd.clone()],
|
||||
network_access: false,
|
||||
exclude_tmpdir_env_var: false,
|
||||
exclude_slash_tmp: false,
|
||||
},
|
||||
model: model.clone(),
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
summary: ReasoningSummary::Auto,
|
||||
})
|
||||
.await
|
||||
.expect("send first sendUserTurn");
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(first_turn_id)),
|
||||
)
|
||||
.await
|
||||
.expect("sendUserTurn 1 timeout")
|
||||
.expect("sendUserTurn 1 resp");
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await
|
||||
.expect("task_complete 1 timeout")
|
||||
.expect("task_complete 1 notification");
|
||||
|
||||
let second_turn_id = mcp
|
||||
.send_send_user_turn_request(SendUserTurnParams {
|
||||
conversation_id,
|
||||
items: vec![InputItem::Text {
|
||||
text: "second turn".to_string(),
|
||||
}],
|
||||
cwd: second_cwd.clone(),
|
||||
approval_policy: AskForApproval::Never,
|
||||
sandbox_policy: SandboxPolicy::DangerFullAccess,
|
||||
model: model.clone(),
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
summary: ReasoningSummary::Auto,
|
||||
})
|
||||
.await
|
||||
.expect("send second sendUserTurn");
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(second_turn_id)),
|
||||
)
|
||||
.await
|
||||
.expect("sendUserTurn 2 timeout")
|
||||
.expect("sendUserTurn 2 resp");
|
||||
|
||||
let mut env_message: Option<String> = None;
|
||||
let second_cwd_str = second_cwd.to_string_lossy().into_owned();
|
||||
for _ in 0..10 {
|
||||
let notification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/user_message"),
|
||||
)
|
||||
.await
|
||||
.expect("user_message timeout")
|
||||
.expect("user_message notification");
|
||||
let params = notification
|
||||
.params
|
||||
.clone()
|
||||
.expect("user_message should include params");
|
||||
let event: Event = serde_json::from_value(params).expect("deserialize user_message event");
|
||||
if let EventMsg::UserMessage(user) = event.msg
|
||||
&& matches!(user.kind, Some(InputMessageKind::EnvironmentContext))
|
||||
&& user.message.contains(&second_cwd_str)
|
||||
{
|
||||
env_message = Some(user.message);
|
||||
break;
|
||||
}
|
||||
}
|
||||
let env_message = env_message.expect("expected environment context update");
|
||||
assert!(
|
||||
env_message.contains("<sandbox_mode>danger-full-access</sandbox_mode>"),
|
||||
"env context should reflect new sandbox mode: {env_message}"
|
||||
);
|
||||
assert!(
|
||||
env_message.contains("<network_access>enabled</network_access>"),
|
||||
"env context should enable network access for danger-full-access policy: {env_message}"
|
||||
);
|
||||
assert!(
|
||||
env_message.contains(&second_cwd_str),
|
||||
"env context should include updated cwd: {env_message}"
|
||||
);
|
||||
|
||||
let exec_begin_notification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/exec_command_begin"),
|
||||
)
|
||||
.await
|
||||
.expect("exec_command_begin timeout")
|
||||
.expect("exec_command_begin notification");
|
||||
let params = exec_begin_notification
|
||||
.params
|
||||
.clone()
|
||||
.expect("exec_command_begin params");
|
||||
let event: Event = serde_json::from_value(params).expect("deserialize exec begin event");
|
||||
let exec_begin = match event.msg {
|
||||
EventMsg::ExecCommandBegin(exec_begin) => exec_begin,
|
||||
other => panic!("expected ExecCommandBegin event, got {other:?}"),
|
||||
};
|
||||
assert_eq!(
|
||||
exec_begin.cwd, second_cwd,
|
||||
"exec turn should run from updated cwd"
|
||||
);
|
||||
assert_eq!(
|
||||
exec_begin.command,
|
||||
vec![
|
||||
"bash".to_string(),
|
||||
"-lc".to_string(),
|
||||
"echo second turn".to_string()
|
||||
],
|
||||
"exec turn should run expected command"
|
||||
);
|
||||
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await
|
||||
.expect("task_complete 2 timeout")
|
||||
.expect("task_complete 2 notification");
|
||||
}
|
||||
|
||||
fn create_config_toml(codex_home: &Path, server_uri: &str) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
@@ -1,20 +1,20 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::GetUserSavedConfigResponse;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::Profile;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::SandboxSettings;
|
||||
use codex_app_server_protocol::Tools;
|
||||
use codex_app_server_protocol::UserSavedConfig;
|
||||
use codex_core::protocol::AskForApproval;
|
||||
use codex_protocol::config_types::ReasoningEffort;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use codex_protocol::config_types::SandboxMode;
|
||||
use codex_protocol::config_types::Verbosity;
|
||||
use codex_protocol::mcp_protocol::GetUserSavedConfigResponse;
|
||||
use codex_protocol::mcp_protocol::Profile;
|
||||
use codex_protocol::mcp_protocol::SandboxSettings;
|
||||
use codex_protocol::mcp_protocol::Tools;
|
||||
use codex_protocol::mcp_protocol::UserSavedConfig;
|
||||
use mcp_test_support::McpProcess;
|
||||
use mcp_test_support::to_response;
|
||||
use mcp_types::JSONRPCResponse;
|
||||
use mcp_types::RequestId;
|
||||
use pretty_assertions::assert_eq;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
@@ -26,7 +26,7 @@ fn create_config_toml(codex_home: &Path) -> std::io::Result<()> {
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
r#"
|
||||
model = "gpt-5"
|
||||
model = "gpt-5-codex"
|
||||
approval_policy = "on-request"
|
||||
sandbox_mode = "workspace-write"
|
||||
model_reasoning_summary = "detailed"
|
||||
@@ -92,7 +92,7 @@ async fn get_config_toml_parses_all_fields() {
|
||||
exclude_tmpdir_env_var: Some(true),
|
||||
exclude_slash_tmp: Some(true),
|
||||
}),
|
||||
model: Some("gpt-5".into()),
|
||||
model: Some("gpt-5-codex".into()),
|
||||
model_reasoning_effort: Some(ReasoningEffort::High),
|
||||
model_reasoning_summary: Some(ReasoningSummary::Detailed),
|
||||
model_verbosity: Some(Verbosity::Medium),
|
||||
@@ -1,18 +1,18 @@
|
||||
use std::path::Path;
|
||||
|
||||
use codex_protocol::mcp_protocol::AddConversationListenerParams;
|
||||
use codex_protocol::mcp_protocol::AddConversationSubscriptionResponse;
|
||||
use codex_protocol::mcp_protocol::InputItem;
|
||||
use codex_protocol::mcp_protocol::NewConversationParams;
|
||||
use codex_protocol::mcp_protocol::NewConversationResponse;
|
||||
use codex_protocol::mcp_protocol::SendUserMessageParams;
|
||||
use codex_protocol::mcp_protocol::SendUserMessageResponse;
|
||||
use mcp_test_support::McpProcess;
|
||||
use mcp_test_support::create_final_assistant_message_sse_response;
|
||||
use mcp_test_support::create_mock_chat_completions_server;
|
||||
use mcp_test_support::to_response;
|
||||
use mcp_types::JSONRPCResponse;
|
||||
use mcp_types::RequestId;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_final_assistant_message_sse_response;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::AddConversationListenerParams;
|
||||
use codex_app_server_protocol::AddConversationSubscriptionResponse;
|
||||
use codex_app_server_protocol::InputItem;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::NewConversationParams;
|
||||
use codex_app_server_protocol::NewConversationResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::SendUserMessageParams;
|
||||
use codex_app_server_protocol::SendUserMessageResponse;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use tempfile::TempDir;
|
||||
@@ -59,6 +59,8 @@ async fn test_conversation_create_and_send_message_ok() {
|
||||
let NewConversationResponse {
|
||||
conversation_id,
|
||||
model,
|
||||
reasoning_effort: _,
|
||||
rollout_path: _,
|
||||
} = to_response::<NewConversationResponse>(new_conv_resp)
|
||||
.expect("deserialize newConversation response");
|
||||
assert_eq!(model, "o3");
|
||||
146
codex-rs/app-server/tests/suite/fuzzy_file_search.rs
Normal file
146
codex-rs/app-server/tests/suite/fuzzy_file_search.rs
Normal file
@@ -0,0 +1,146 @@
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn test_fuzzy_file_search_sorts_and_includes_indices() -> Result<()> {
|
||||
// Prepare a temporary Codex home and a separate root with test files.
|
||||
let codex_home = TempDir::new().context("create temp codex home")?;
|
||||
let root = TempDir::new().context("create temp search root")?;
|
||||
|
||||
// Create files designed to have deterministic ordering for query "abe".
|
||||
std::fs::write(root.path().join("abc"), "x").context("write file abc")?;
|
||||
std::fs::write(root.path().join("abcde"), "x").context("write file abcde")?;
|
||||
std::fs::write(root.path().join("abexy"), "x").context("write file abexy")?;
|
||||
std::fs::write(root.path().join("zzz.txt"), "x").context("write file zzz")?;
|
||||
let sub_dir = root.path().join("sub");
|
||||
std::fs::create_dir_all(&sub_dir).context("create sub dir")?;
|
||||
let sub_abce_path = sub_dir.join("abce");
|
||||
std::fs::write(&sub_abce_path, "x").context("write file sub/abce")?;
|
||||
let sub_abce_rel = sub_abce_path
|
||||
.strip_prefix(root.path())
|
||||
.context("strip root prefix from sub/abce")?
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
|
||||
// Start MCP server and initialize.
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.context("spawn mcp")?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.context("init timeout")?
|
||||
.context("init failed")?;
|
||||
|
||||
let root_path = root.path().to_string_lossy().to_string();
|
||||
// Send fuzzyFileSearch request.
|
||||
let request_id = mcp
|
||||
.send_fuzzy_file_search_request("abe", vec![root_path.clone()], None)
|
||||
.await
|
||||
.context("send fuzzyFileSearch")?;
|
||||
|
||||
// Read response and verify shape and ordering.
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.context("fuzzyFileSearch timeout")?
|
||||
.context("fuzzyFileSearch resp")?;
|
||||
|
||||
let value = resp.result;
|
||||
// The path separator on Windows affects the score.
|
||||
let expected_score = if cfg!(windows) { 69 } else { 72 };
|
||||
|
||||
assert_eq!(
|
||||
value,
|
||||
json!({
|
||||
"files": [
|
||||
{
|
||||
"root": root_path.clone(),
|
||||
"path": "abexy",
|
||||
"file_name": "abexy",
|
||||
"score": 88,
|
||||
"indices": [0, 1, 2],
|
||||
},
|
||||
{
|
||||
"root": root_path.clone(),
|
||||
"path": "abcde",
|
||||
"file_name": "abcde",
|
||||
"score": 74,
|
||||
"indices": [0, 1, 4],
|
||||
},
|
||||
{
|
||||
"root": root_path.clone(),
|
||||
"path": sub_abce_rel,
|
||||
"file_name": "abce",
|
||||
"score": expected_score,
|
||||
"indices": [4, 5, 7],
|
||||
},
|
||||
]
|
||||
})
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn test_fuzzy_file_search_accepts_cancellation_token() -> Result<()> {
|
||||
let codex_home = TempDir::new().context("create temp codex home")?;
|
||||
let root = TempDir::new().context("create temp search root")?;
|
||||
|
||||
std::fs::write(root.path().join("alpha.txt"), "contents").context("write alpha")?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.context("spawn mcp")?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.context("init timeout")?
|
||||
.context("init failed")?;
|
||||
|
||||
let root_path = root.path().to_string_lossy().to_string();
|
||||
let request_id = mcp
|
||||
.send_fuzzy_file_search_request("alp", vec![root_path.clone()], None)
|
||||
.await
|
||||
.context("send fuzzyFileSearch")?;
|
||||
|
||||
let request_id_2 = mcp
|
||||
.send_fuzzy_file_search_request(
|
||||
"alp",
|
||||
vec![root_path.clone()],
|
||||
Some(request_id.to_string()),
|
||||
)
|
||||
.await
|
||||
.context("send fuzzyFileSearch")?;
|
||||
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id_2)),
|
||||
)
|
||||
.await
|
||||
.context("fuzzyFileSearch timeout")?
|
||||
.context("fuzzyFileSearch resp")?;
|
||||
|
||||
let files = resp
|
||||
.result
|
||||
.get("files")
|
||||
.context("files key missing")?
|
||||
.as_array()
|
||||
.context("files not array")?
|
||||
.clone();
|
||||
|
||||
assert_eq!(files.len(), 1);
|
||||
assert_eq!(files[0]["root"], root_path);
|
||||
assert_eq!(files[0]["path"], "alpha.txt");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,37 +1,32 @@
|
||||
#![cfg(unix)]
|
||||
// Support code lives in the `mcp_test_support` crate under tests/common.
|
||||
// Support code lives in the `app_test_support` crate under tests/common.
|
||||
|
||||
use std::path::Path;
|
||||
|
||||
use codex_app_server_protocol::AddConversationListenerParams;
|
||||
use codex_app_server_protocol::InterruptConversationParams;
|
||||
use codex_app_server_protocol::InterruptConversationResponse;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::NewConversationParams;
|
||||
use codex_app_server_protocol::NewConversationResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::SendUserMessageParams;
|
||||
use codex_app_server_protocol::SendUserMessageResponse;
|
||||
use codex_core::protocol::TurnAbortReason;
|
||||
use codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR;
|
||||
use codex_protocol::mcp_protocol::AddConversationListenerParams;
|
||||
use codex_protocol::mcp_protocol::InterruptConversationParams;
|
||||
use codex_protocol::mcp_protocol::InterruptConversationResponse;
|
||||
use codex_protocol::mcp_protocol::NewConversationParams;
|
||||
use codex_protocol::mcp_protocol::NewConversationResponse;
|
||||
use codex_protocol::mcp_protocol::SendUserMessageParams;
|
||||
use codex_protocol::mcp_protocol::SendUserMessageResponse;
|
||||
use mcp_types::JSONRPCResponse;
|
||||
use mcp_types::RequestId;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
use mcp_test_support::McpProcess;
|
||||
use mcp_test_support::create_mock_chat_completions_server;
|
||||
use mcp_test_support::create_shell_sse_response;
|
||||
use mcp_test_support::to_response;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
use app_test_support::create_shell_sse_response;
|
||||
use app_test_support::to_response;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn test_shell_command_interruption() {
|
||||
if std::env::var(CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() {
|
||||
println!(
|
||||
"Skipping test because it cannot execute when network is disabled in a Codex sandbox."
|
||||
);
|
||||
return;
|
||||
}
|
||||
skip_if_no_network!();
|
||||
|
||||
if let Err(err) = shell_command_interruption().await {
|
||||
panic!("failure: {err}");
|
||||
@@ -105,7 +100,7 @@ async fn shell_command_interruption() -> anyhow::Result<()> {
|
||||
let send_user_id = mcp
|
||||
.send_send_user_message_request(SendUserMessageParams {
|
||||
conversation_id,
|
||||
items: vec![codex_protocol::mcp_protocol::InputItem::Text {
|
||||
items: vec![codex_app_server_protocol::InputItem::Text {
|
||||
text: "run first sleep command".to_string(),
|
||||
}],
|
||||
})
|
||||
@@ -1,16 +1,18 @@
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
use codex_protocol::mcp_protocol::ListConversationsParams;
|
||||
use codex_protocol::mcp_protocol::ListConversationsResponse;
|
||||
use codex_protocol::mcp_protocol::NewConversationParams; // reused for overrides shape
|
||||
use codex_protocol::mcp_protocol::ResumeConversationParams;
|
||||
use codex_protocol::mcp_protocol::ResumeConversationResponse;
|
||||
use mcp_test_support::McpProcess;
|
||||
use mcp_test_support::to_response;
|
||||
use mcp_types::JSONRPCNotification;
|
||||
use mcp_types::JSONRPCResponse;
|
||||
use mcp_types::RequestId;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::ListConversationsParams;
|
||||
use codex_app_server_protocol::ListConversationsResponse;
|
||||
use codex_app_server_protocol::NewConversationParams; // reused for overrides shape
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ResumeConversationParams;
|
||||
use codex_app_server_protocol::ResumeConversationResponse;
|
||||
use codex_app_server_protocol::ServerNotification;
|
||||
use codex_app_server_protocol::SessionConfiguredNotification;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use tempfile::TempDir;
|
||||
@@ -111,23 +113,28 @@ async fn test_list_and_resume_conversations() {
|
||||
.await
|
||||
.expect("send resumeConversation");
|
||||
|
||||
// Expect a codex/event notification with msg.type == session_configured
|
||||
// Expect a codex/event notification with msg.type == sessionConfigured
|
||||
let notification: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event"),
|
||||
mcp.read_stream_until_notification_message("sessionConfigured"),
|
||||
)
|
||||
.await
|
||||
.expect("session_configured notification timeout")
|
||||
.expect("session_configured notification");
|
||||
// Basic shape assertion: ensure event type is session_configured
|
||||
let msg_type = notification
|
||||
.params
|
||||
.as_ref()
|
||||
.and_then(|p| p.get("msg"))
|
||||
.and_then(|m| m.get("type"))
|
||||
.and_then(|t| t.as_str())
|
||||
.unwrap_or("");
|
||||
assert_eq!(msg_type, "session_configured");
|
||||
.expect("sessionConfigured notification timeout")
|
||||
.expect("sessionConfigured notification");
|
||||
let session_configured: ServerNotification = notification
|
||||
.try_into()
|
||||
.expect("deserialize sessionConfigured notification");
|
||||
// Basic shape assertion: ensure event type is sessionConfigured
|
||||
let ServerNotification::SessionConfigured(SessionConfiguredNotification {
|
||||
model,
|
||||
rollout_path,
|
||||
..
|
||||
}) = session_configured
|
||||
else {
|
||||
unreachable!("expected sessionConfigured notification");
|
||||
};
|
||||
assert_eq!(model, "o3");
|
||||
assert_eq!(items[0].path.clone(), rollout_path);
|
||||
|
||||
// Then the response for resumeConversation
|
||||
let resume_resp: JSONRPCResponse = timeout(
|
||||
@@ -142,7 +149,7 @@ async fn test_list_and_resume_conversations() {
|
||||
} = to_response::<ResumeConversationResponse>(resume_resp)
|
||||
.expect("deserialize resumeConversation response");
|
||||
// conversation id should be a valid UUID
|
||||
let _: uuid::Uuid = conversation_id.into();
|
||||
assert!(!conversation_id.to_string().is_empty());
|
||||
}
|
||||
|
||||
fn create_fake_rollout(codex_home: &Path, filename_ts: &str, meta_rfc3339: &str, preview: &str) {
|
||||
@@ -156,14 +163,45 @@ fn create_fake_rollout(codex_home: &Path, filename_ts: &str, meta_rfc3339: &str,
|
||||
|
||||
let file_path = dir.join(format!("rollout-{filename_ts}-{uuid}.jsonl"));
|
||||
let mut lines = Vec::new();
|
||||
// Meta line with timestamp
|
||||
lines.push(json!({"timestamp": meta_rfc3339, "id": uuid}).to_string());
|
||||
// Minimal user message entry as a persisted response item
|
||||
// Meta line with timestamp (flattened meta in payload for new schema)
|
||||
lines.push(
|
||||
json!({
|
||||
"type":"message",
|
||||
"role":"user",
|
||||
"content":[{"type":"input_text","text": preview}]
|
||||
"timestamp": meta_rfc3339,
|
||||
"type": "session_meta",
|
||||
"payload": {
|
||||
"id": uuid,
|
||||
"timestamp": meta_rfc3339,
|
||||
"cwd": "/",
|
||||
"originator": "codex",
|
||||
"cli_version": "0.0.0",
|
||||
"instructions": null
|
||||
}
|
||||
})
|
||||
.to_string(),
|
||||
);
|
||||
// Minimal user message entry as a persisted response item (with envelope timestamp)
|
||||
lines.push(
|
||||
json!({
|
||||
"timestamp": meta_rfc3339,
|
||||
"type":"response_item",
|
||||
"payload": {
|
||||
"type":"message",
|
||||
"role":"user",
|
||||
"content":[{"type":"input_text","text": preview}]
|
||||
}
|
||||
})
|
||||
.to_string(),
|
||||
);
|
||||
// Add a matching user message event line to satisfy filters
|
||||
lines.push(
|
||||
json!({
|
||||
"timestamp": meta_rfc3339,
|
||||
"type":"event_msg",
|
||||
"payload": {
|
||||
"type":"user_message",
|
||||
"message": preview,
|
||||
"kind": "plain"
|
||||
}
|
||||
})
|
||||
.to_string(),
|
||||
);
|
||||
@@ -1,17 +1,17 @@
|
||||
use std::path::Path;
|
||||
use std::time::Duration;
|
||||
|
||||
use codex_core::auth::login_with_api_key;
|
||||
use codex_protocol::mcp_protocol::CancelLoginChatGptParams;
|
||||
use codex_protocol::mcp_protocol::CancelLoginChatGptResponse;
|
||||
use codex_protocol::mcp_protocol::GetAuthStatusParams;
|
||||
use codex_protocol::mcp_protocol::GetAuthStatusResponse;
|
||||
use codex_protocol::mcp_protocol::LoginChatGptResponse;
|
||||
use codex_protocol::mcp_protocol::LogoutChatGptResponse;
|
||||
use mcp_test_support::McpProcess;
|
||||
use mcp_test_support::to_response;
|
||||
use mcp_types::JSONRPCResponse;
|
||||
use mcp_types::RequestId;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::CancelLoginChatGptParams;
|
||||
use codex_app_server_protocol::CancelLoginChatGptResponse;
|
||||
use codex_app_server_protocol::GetAuthStatusParams;
|
||||
use codex_app_server_protocol::GetAuthStatusResponse;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::LoginChatGptResponse;
|
||||
use codex_app_server_protocol::LogoutChatGptResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_login::login_with_api_key;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
@@ -46,7 +46,7 @@ async fn logout_chatgpt_removes_auth() {
|
||||
login_with_api_key(codex_home.path(), "sk-test-key").expect("seed api key");
|
||||
assert!(codex_home.path().join("auth.json").exists());
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)])
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
@@ -95,7 +95,7 @@ async fn logout_chatgpt_removes_auth() {
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn login_and_cancel_chatgpt() {
|
||||
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
||||
create_config_toml(codex_home.path()).expect("write config.toml");
|
||||
create_config_toml(codex_home.path()).unwrap_or_else(|err| panic!("write config.toml: {err}"));
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
13
codex-rs/app-server/tests/suite/mod.rs
Normal file
13
codex-rs/app-server/tests/suite/mod.rs
Normal file
@@ -0,0 +1,13 @@
|
||||
mod archive_conversation;
|
||||
mod auth;
|
||||
mod codex_message_processor_flow;
|
||||
mod config;
|
||||
mod create_conversation;
|
||||
mod fuzzy_file_search;
|
||||
mod interrupt;
|
||||
mod list_resume;
|
||||
mod login;
|
||||
mod send_message;
|
||||
mod set_default_model;
|
||||
mod user_agent;
|
||||
mod user_info;
|
||||
@@ -1,20 +1,20 @@
|
||||
use std::path::Path;
|
||||
|
||||
use codex_protocol::mcp_protocol::AddConversationListenerParams;
|
||||
use codex_protocol::mcp_protocol::AddConversationSubscriptionResponse;
|
||||
use codex_protocol::mcp_protocol::ConversationId;
|
||||
use codex_protocol::mcp_protocol::InputItem;
|
||||
use codex_protocol::mcp_protocol::NewConversationParams;
|
||||
use codex_protocol::mcp_protocol::NewConversationResponse;
|
||||
use codex_protocol::mcp_protocol::SendUserMessageParams;
|
||||
use codex_protocol::mcp_protocol::SendUserMessageResponse;
|
||||
use mcp_test_support::McpProcess;
|
||||
use mcp_test_support::create_final_assistant_message_sse_response;
|
||||
use mcp_test_support::create_mock_chat_completions_server;
|
||||
use mcp_test_support::to_response;
|
||||
use mcp_types::JSONRPCNotification;
|
||||
use mcp_types::JSONRPCResponse;
|
||||
use mcp_types::RequestId;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_final_assistant_message_sse_response;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::AddConversationListenerParams;
|
||||
use codex_app_server_protocol::AddConversationSubscriptionResponse;
|
||||
use codex_app_server_protocol::InputItem;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::NewConversationParams;
|
||||
use codex_app_server_protocol::NewConversationResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::SendUserMessageParams;
|
||||
use codex_app_server_protocol::SendUserMessageResponse;
|
||||
use codex_protocol::ConversationId;
|
||||
use pretty_assertions::assert_eq;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
@@ -136,7 +136,7 @@ async fn test_send_message_session_not_found() {
|
||||
.expect("timeout")
|
||||
.expect("init");
|
||||
|
||||
let unknown = ConversationId(uuid::Uuid::new_v4());
|
||||
let unknown = ConversationId::new();
|
||||
let req_id = mcp
|
||||
.send_send_user_message_request(SendUserMessageParams {
|
||||
conversation_id: unknown,
|
||||
76
codex-rs/app-server/tests/suite/set_default_model.rs
Normal file
76
codex-rs/app-server/tests/suite/set_default_model.rs
Normal file
@@ -0,0 +1,76 @@
|
||||
use std::path::Path;
|
||||
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::SetDefaultModelParams;
|
||||
use codex_app_server_protocol::SetDefaultModelResponse;
|
||||
use codex_core::config::ConfigToml;
|
||||
use pretty_assertions::assert_eq;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn set_default_model_persists_overrides() {
|
||||
let codex_home = TempDir::new().expect("create tempdir");
|
||||
create_config_toml(codex_home.path()).expect("write config.toml");
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
|
||||
let params = SetDefaultModelParams {
|
||||
model: Some("gpt-4.1".to_string()),
|
||||
reasoning_effort: None,
|
||||
};
|
||||
|
||||
let request_id = mcp
|
||||
.send_set_default_model_request(params)
|
||||
.await
|
||||
.expect("send setDefaultModel");
|
||||
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("setDefaultModel timeout")
|
||||
.expect("setDefaultModel response");
|
||||
|
||||
let _: SetDefaultModelResponse =
|
||||
to_response(resp).expect("deserialize setDefaultModel response");
|
||||
|
||||
let config_path = codex_home.path().join("config.toml");
|
||||
let config_contents = tokio::fs::read_to_string(&config_path)
|
||||
.await
|
||||
.expect("read config.toml");
|
||||
let config_toml: ConfigToml = toml::from_str(&config_contents).expect("parse config.toml");
|
||||
|
||||
assert_eq!(
|
||||
ConfigToml {
|
||||
model: Some("gpt-4.1".to_string()),
|
||||
model_reasoning_effort: None,
|
||||
..Default::default()
|
||||
},
|
||||
config_toml,
|
||||
);
|
||||
}
|
||||
|
||||
// Helper to create a config.toml; mirrors create_conversation.rs
|
||||
fn create_config_toml(codex_home: &Path) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
r#"
|
||||
model = "gpt-5-codex"
|
||||
model_reasoning_effort = "medium"
|
||||
"#,
|
||||
)
|
||||
}
|
||||
@@ -1,10 +1,8 @@
|
||||
use codex_core::default_client::DEFAULT_ORIGINATOR;
|
||||
use codex_core::default_client::get_codex_user_agent;
|
||||
use codex_protocol::mcp_protocol::GetUserAgentResponse;
|
||||
use mcp_test_support::McpProcess;
|
||||
use mcp_test_support::to_response;
|
||||
use mcp_types::JSONRPCResponse;
|
||||
use mcp_types::RequestId;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::GetUserAgentResponse;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use pretty_assertions::assert_eq;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
@@ -35,11 +33,18 @@ async fn get_user_agent_returns_current_codex_user_agent() {
|
||||
.expect("getUserAgent timeout")
|
||||
.expect("getUserAgent response");
|
||||
|
||||
let os_info = os_info::get();
|
||||
let user_agent = format!(
|
||||
"codex_cli_rs/0.0.0 ({} {}; {}) {} (codex-app-server-tests; 0.1.0)",
|
||||
os_info.os_type(),
|
||||
os_info.version(),
|
||||
os_info.architecture().unwrap_or("unknown"),
|
||||
codex_core::terminal::user_agent()
|
||||
);
|
||||
|
||||
let received: GetUserAgentResponse =
|
||||
to_response(response).expect("deserialize getUserAgent response");
|
||||
let expected = GetUserAgentResponse {
|
||||
user_agent: get_codex_user_agent(Some(DEFAULT_ORIGINATOR)),
|
||||
};
|
||||
let expected = GetUserAgentResponse { user_agent };
|
||||
|
||||
assert_eq!(received, expected);
|
||||
}
|
||||
78
codex-rs/app-server/tests/suite/user_info.rs
Normal file
78
codex-rs/app-server/tests/suite/user_info.rs
Normal file
@@ -0,0 +1,78 @@
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::Context;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use base64::Engine;
|
||||
use base64::engine::general_purpose::URL_SAFE_NO_PAD;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::UserInfoResponse;
|
||||
use codex_core::auth::AuthDotJson;
|
||||
use codex_core::auth::get_auth_file;
|
||||
use codex_core::auth::write_auth_json;
|
||||
use codex_core::token_data::IdTokenInfo;
|
||||
use codex_core::token_data::TokenData;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: Duration = Duration::from_secs(10);
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn user_info_returns_email_from_auth_json() {
|
||||
let codex_home = TempDir::new().expect("create tempdir");
|
||||
|
||||
let auth_path = get_auth_file(codex_home.path());
|
||||
let mut id_token = IdTokenInfo::default();
|
||||
id_token.email = Some("user@example.com".to_string());
|
||||
id_token.raw_jwt = encode_id_token_with_email("user@example.com").expect("encode id token");
|
||||
|
||||
let auth = AuthDotJson {
|
||||
openai_api_key: None,
|
||||
tokens: Some(TokenData {
|
||||
id_token,
|
||||
access_token: "access".to_string(),
|
||||
refresh_token: "refresh".to_string(),
|
||||
account_id: None,
|
||||
}),
|
||||
last_refresh: None,
|
||||
};
|
||||
write_auth_json(&auth_path, &auth).expect("write auth.json");
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("initialize timeout")
|
||||
.expect("initialize request");
|
||||
|
||||
let request_id = mcp.send_user_info_request().await.expect("send userInfo");
|
||||
let response: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("userInfo timeout")
|
||||
.expect("userInfo response");
|
||||
|
||||
let received: UserInfoResponse = to_response(response).expect("deserialize userInfo response");
|
||||
let expected = UserInfoResponse {
|
||||
alleged_user_email: Some("user@example.com".to_string()),
|
||||
};
|
||||
|
||||
assert_eq!(received, expected);
|
||||
}
|
||||
|
||||
fn encode_id_token_with_email(email: &str) -> anyhow::Result<String> {
|
||||
let header_b64 = URL_SAFE_NO_PAD.encode(
|
||||
serde_json::to_vec(&json!({ "alg": "none", "typ": "JWT" }))
|
||||
.context("serialize jwt header")?,
|
||||
);
|
||||
let payload =
|
||||
serde_json::to_vec(&json!({ "email": email })).context("serialize jwt payload")?;
|
||||
let payload_b64 = URL_SAFE_NO_PAD.encode(payload);
|
||||
Ok(format!("{header_b64}.{payload_b64}.signature"))
|
||||
}
|
||||
@@ -15,14 +15,14 @@ path = "src/main.rs"
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1"
|
||||
similar = "2.7.0"
|
||||
thiserror = "2.0.16"
|
||||
tree-sitter = "0.25.9"
|
||||
tree-sitter-bash = "0.25.0"
|
||||
once_cell = "1"
|
||||
anyhow = { workspace = true }
|
||||
similar = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
tree-sitter = { workspace = true }
|
||||
tree-sitter-bash = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
assert_cmd = "2"
|
||||
pretty_assertions = "1.4.1"
|
||||
tempfile = "3.13.0"
|
||||
assert_cmd = { workspace = true }
|
||||
assert_matches = { workspace = true }
|
||||
pretty_assertions = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
|
||||
@@ -6,10 +6,10 @@ use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::str::Utf8Error;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use once_cell::sync::Lazy;
|
||||
pub use parser::Hunk;
|
||||
pub use parser::ParseError;
|
||||
use parser::ParseError::*;
|
||||
@@ -40,6 +40,11 @@ pub enum ApplyPatchError {
|
||||
/// Error that occurs while computing replacements when applying patch chunks
|
||||
#[error("{0}")]
|
||||
ComputeReplacements(String),
|
||||
/// A raw patch body was provided without an explicit `apply_patch` invocation.
|
||||
#[error(
|
||||
"patch detected without explicit call to apply_patch. Rerun as [\"apply_patch\", \"<patch>\"]"
|
||||
)]
|
||||
ImplicitInvocation,
|
||||
}
|
||||
|
||||
impl From<std::io::Error> for ApplyPatchError {
|
||||
@@ -93,10 +98,12 @@ pub struct ApplyPatchArgs {
|
||||
|
||||
pub fn maybe_parse_apply_patch(argv: &[String]) -> MaybeApplyPatch {
|
||||
match argv {
|
||||
// Direct invocation: apply_patch <patch>
|
||||
[cmd, body] if APPLY_PATCH_COMMANDS.contains(&cmd.as_str()) => match parse_patch(body) {
|
||||
Ok(source) => MaybeApplyPatch::Body(source),
|
||||
Err(e) => MaybeApplyPatch::PatchParseError(e),
|
||||
},
|
||||
// Bash heredoc form: (optional `cd <path> &&`) apply_patch <<'EOF' ...
|
||||
[bash, flag, script] if bash == "bash" && flag == "-lc" => {
|
||||
match extract_apply_patch_from_bash(script) {
|
||||
Ok((body, workdir)) => match parse_patch(&body) {
|
||||
@@ -207,6 +214,26 @@ impl ApplyPatchAction {
|
||||
/// cwd must be an absolute path so that we can resolve relative paths in the
|
||||
/// patch.
|
||||
pub fn maybe_parse_apply_patch_verified(argv: &[String], cwd: &Path) -> MaybeApplyPatchVerified {
|
||||
// Detect a raw patch body passed directly as the command or as the body of a bash -lc
|
||||
// script. In these cases, report an explicit error rather than applying the patch.
|
||||
match argv {
|
||||
[body] => {
|
||||
if parse_patch(body).is_ok() {
|
||||
return MaybeApplyPatchVerified::CorrectnessError(
|
||||
ApplyPatchError::ImplicitInvocation,
|
||||
);
|
||||
}
|
||||
}
|
||||
[bash, flag, script] if bash == "bash" && flag == "-lc" => {
|
||||
if parse_patch(script).is_ok() {
|
||||
return MaybeApplyPatchVerified::CorrectnessError(
|
||||
ApplyPatchError::ImplicitInvocation,
|
||||
);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
match maybe_parse_apply_patch(argv) {
|
||||
MaybeApplyPatch::Body(ApplyPatchArgs {
|
||||
patch,
|
||||
@@ -324,7 +351,7 @@ fn extract_apply_patch_from_bash(
|
||||
// also run an arbitrary query against the AST. This is useful for understanding
|
||||
// how tree-sitter parses the script and whether the query syntax is correct. Be sure
|
||||
// to test both positive and negative cases.
|
||||
static APPLY_PATCH_QUERY: Lazy<Query> = Lazy::new(|| {
|
||||
static APPLY_PATCH_QUERY: LazyLock<Query> = LazyLock::new(|| {
|
||||
let language = BASH.into();
|
||||
#[expect(clippy::expect_used)]
|
||||
Query::new(
|
||||
@@ -621,21 +648,18 @@ fn derive_new_contents_from_chunks(
|
||||
}
|
||||
};
|
||||
|
||||
let mut original_lines: Vec<String> = original_contents
|
||||
.split('\n')
|
||||
.map(|s| s.to_string())
|
||||
.collect();
|
||||
let mut original_lines: Vec<String> = original_contents.split('\n').map(String::from).collect();
|
||||
|
||||
// Drop the trailing empty element that results from the final newline so
|
||||
// that line counts match the behaviour of standard `diff`.
|
||||
if original_lines.last().is_some_and(|s| s.is_empty()) {
|
||||
if original_lines.last().is_some_and(String::is_empty) {
|
||||
original_lines.pop();
|
||||
}
|
||||
|
||||
let replacements = compute_replacements(&original_lines, path, chunks)?;
|
||||
let new_lines = apply_replacements(original_lines, &replacements);
|
||||
let mut new_lines = new_lines;
|
||||
if !new_lines.last().is_some_and(|s| s.is_empty()) {
|
||||
if !new_lines.last().is_some_and(String::is_empty) {
|
||||
new_lines.push(String::new());
|
||||
}
|
||||
let new_contents = new_lines.join("\n");
|
||||
@@ -679,7 +703,7 @@ fn compute_replacements(
|
||||
if chunk.old_lines.is_empty() {
|
||||
// Pure addition (no old lines). We'll add them at the end or just
|
||||
// before the final empty line if one exists.
|
||||
let insertion_idx = if original_lines.last().is_some_and(|s| s.is_empty()) {
|
||||
let insertion_idx = if original_lines.last().is_some_and(String::is_empty) {
|
||||
original_lines.len() - 1
|
||||
} else {
|
||||
original_lines.len()
|
||||
@@ -705,11 +729,11 @@ fn compute_replacements(
|
||||
|
||||
let mut new_slice: &[String] = &chunk.new_lines;
|
||||
|
||||
if found.is_none() && pattern.last().is_some_and(|s| s.is_empty()) {
|
||||
if found.is_none() && pattern.last().is_some_and(String::is_empty) {
|
||||
// Retry without the trailing empty line which represents the final
|
||||
// newline in the file.
|
||||
pattern = &pattern[..pattern.len() - 1];
|
||||
if new_slice.last().is_some_and(|s| s.is_empty()) {
|
||||
if new_slice.last().is_some_and(String::is_empty) {
|
||||
new_slice = &new_slice[..new_slice.len() - 1];
|
||||
}
|
||||
|
||||
@@ -726,13 +750,15 @@ fn compute_replacements(
|
||||
line_index = start_idx + pattern.len();
|
||||
} else {
|
||||
return Err(ApplyPatchError::ComputeReplacements(format!(
|
||||
"Failed to find expected lines {:?} in {}",
|
||||
chunk.old_lines,
|
||||
path.display()
|
||||
"Failed to find expected lines in {}:\n{}",
|
||||
path.display(),
|
||||
chunk.old_lines.join("\n"),
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
replacements.sort_by(|(lhs_idx, _, _), (rhs_idx, _, _)| lhs_idx.cmp(rhs_idx));
|
||||
|
||||
Ok(replacements)
|
||||
}
|
||||
|
||||
@@ -817,8 +843,10 @@ pub fn print_summary(
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use assert_matches::assert_matches;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::fs;
|
||||
use std::string::ToString;
|
||||
use tempfile::tempdir;
|
||||
|
||||
/// Helper to construct a patch with the given body.
|
||||
@@ -827,7 +855,7 @@ mod tests {
|
||||
}
|
||||
|
||||
fn strs_to_strings(strs: &[&str]) -> Vec<String> {
|
||||
strs.iter().map(|s| s.to_string()).collect()
|
||||
strs.iter().map(ToString::to_string).collect()
|
||||
}
|
||||
|
||||
// Test helpers to reduce repetition when building bash -lc heredoc scripts
|
||||
@@ -867,10 +895,32 @@ mod tests {
|
||||
|
||||
fn assert_not_match(script: &str) {
|
||||
let args = args_bash(script);
|
||||
assert!(matches!(
|
||||
assert_matches!(
|
||||
maybe_parse_apply_patch(&args),
|
||||
MaybeApplyPatch::NotApplyPatch
|
||||
));
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_implicit_patch_single_arg_is_error() {
|
||||
let patch = "*** Begin Patch\n*** Add File: foo\n+hi\n*** End Patch".to_string();
|
||||
let args = vec![patch];
|
||||
let dir = tempdir().unwrap();
|
||||
assert_matches!(
|
||||
maybe_parse_apply_patch_verified(&args, dir.path()),
|
||||
MaybeApplyPatchVerified::CorrectnessError(ApplyPatchError::ImplicitInvocation)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_implicit_patch_bash_script_is_error() {
|
||||
let script = "*** Begin Patch\n*** Add File: foo\n+hi\n*** End Patch";
|
||||
let args = args_bash(script);
|
||||
let dir = tempdir().unwrap();
|
||||
assert_matches!(
|
||||
maybe_parse_apply_patch_verified(&args, dir.path()),
|
||||
MaybeApplyPatchVerified::CorrectnessError(ApplyPatchError::ImplicitInvocation)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -1216,6 +1266,33 @@ PATCH"#,
|
||||
assert_eq!(contents, "a\nB\nc\nd\nE\nf\ng\n");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pure_addition_chunk_followed_by_removal() {
|
||||
let dir = tempdir().unwrap();
|
||||
let path = dir.path().join("panic.txt");
|
||||
fs::write(&path, "line1\nline2\nline3\n").unwrap();
|
||||
let patch = wrap_patch(&format!(
|
||||
r#"*** Update File: {}
|
||||
@@
|
||||
+after-context
|
||||
+second-line
|
||||
@@
|
||||
line1
|
||||
-line2
|
||||
-line3
|
||||
+line2-replacement"#,
|
||||
path.display()
|
||||
));
|
||||
let mut stdout = Vec::new();
|
||||
let mut stderr = Vec::new();
|
||||
apply_patch(&patch, &mut stdout, &mut stderr).unwrap();
|
||||
let contents = fs::read_to_string(path).unwrap();
|
||||
assert_eq!(
|
||||
contents,
|
||||
"line1\nline2-replacement\nafter-context\nsecond-line\n"
|
||||
);
|
||||
}
|
||||
|
||||
/// Ensure that patches authored with ASCII characters can update lines that
|
||||
/// contain typographic Unicode punctuation (e.g. EN DASH, NON-BREAKING
|
||||
/// HYPHEN). Historically `git apply` succeeds in such scenarios but our
|
||||
|
||||
@@ -617,7 +617,7 @@ fn test_parse_patch_lenient() {
|
||||
assert_eq!(
|
||||
parse_patch_text(&patch_text_in_double_quoted_heredoc, ParseMode::Lenient),
|
||||
Ok(ApplyPatchArgs {
|
||||
hunks: expected_patch.clone(),
|
||||
hunks: expected_patch,
|
||||
patch: patch_text.to_string(),
|
||||
workdir: None,
|
||||
})
|
||||
@@ -637,7 +637,7 @@ fn test_parse_patch_lenient() {
|
||||
"<<EOF\n*** Begin Patch\n*** Update File: file2.py\nEOF\n".to_string();
|
||||
assert_eq!(
|
||||
parse_patch_text(&patch_text_with_missing_closing_heredoc, ParseMode::Strict),
|
||||
Err(expected_error.clone())
|
||||
Err(expected_error)
|
||||
);
|
||||
assert_eq!(
|
||||
parse_patch_text(&patch_text_with_missing_closing_heredoc, ParseMode::Lenient),
|
||||
|
||||
@@ -112,9 +112,10 @@ pub(crate) fn seek_sequence(
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::seek_sequence;
|
||||
use std::string::ToString;
|
||||
|
||||
fn to_vec(strings: &[&str]) -> Vec<String> {
|
||||
strings.iter().map(|s| s.to_string()).collect()
|
||||
strings.iter().map(ToString::to_string).collect()
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -11,10 +11,10 @@ path = "src/lib.rs"
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1"
|
||||
codex-apply-patch = { path = "../apply-patch" }
|
||||
codex-core = { path = "../core" }
|
||||
codex-linux-sandbox = { path = "../linux-sandbox" }
|
||||
dotenvy = "0.15.7"
|
||||
tempfile = "3"
|
||||
tokio = { version = "1", features = ["rt-multi-thread"] }
|
||||
anyhow = { workspace = true }
|
||||
codex-apply-patch = { workspace = true }
|
||||
codex-core = { workspace = true }
|
||||
codex-linux-sandbox = { workspace = true }
|
||||
dotenvy = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
tokio = { workspace = true, features = ["rt-multi-thread"] }
|
||||
|
||||
@@ -54,7 +54,7 @@ where
|
||||
|
||||
let argv1 = args.next().unwrap_or_default();
|
||||
if argv1 == CODEX_APPLY_PATCH_ARG1 {
|
||||
let patch_arg = args.next().and_then(|s| s.to_str().map(|s| s.to_owned()));
|
||||
let patch_arg = args.next().and_then(|s| s.to_str().map(str::to_owned));
|
||||
let exit_code = match patch_arg {
|
||||
Some(patch_arg) => {
|
||||
let mut stdout = std::io::stdout();
|
||||
|
||||
18
codex-rs/backend-client/Cargo.toml
Normal file
18
codex-rs/backend-client/Cargo.toml
Normal file
@@ -0,0 +1,18 @@
|
||||
[package]
|
||||
name = "codex-backend-client"
|
||||
version = "0.0.0"
|
||||
edition = "2024"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/lib.rs"
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
reqwest = { version = "0.12", default-features = false, features = ["json", "rustls-tls"] }
|
||||
codex-backend-openapi-models = { path = "../codex-backend-openapi-models" }
|
||||
|
||||
[dev-dependencies]
|
||||
pretty_assertions = "1"
|
||||
244
codex-rs/backend-client/src/client.rs
Normal file
244
codex-rs/backend-client/src/client.rs
Normal file
@@ -0,0 +1,244 @@
|
||||
use crate::types::CodeTaskDetailsResponse;
|
||||
use crate::types::PaginatedListTaskListItem;
|
||||
use crate::types::TurnAttemptsSiblingTurnsResponse;
|
||||
use anyhow::Result;
|
||||
use reqwest::header::AUTHORIZATION;
|
||||
use reqwest::header::CONTENT_TYPE;
|
||||
use reqwest::header::HeaderMap;
|
||||
use reqwest::header::HeaderName;
|
||||
use reqwest::header::HeaderValue;
|
||||
use reqwest::header::USER_AGENT;
|
||||
use serde::de::DeserializeOwned;
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum PathStyle {
|
||||
/// /api/codex/…
|
||||
CodexApi,
|
||||
/// /wham/…
|
||||
ChatGptApi,
|
||||
}
|
||||
|
||||
impl PathStyle {
|
||||
pub fn from_base_url(base_url: &str) -> Self {
|
||||
if base_url.contains("/backend-api") {
|
||||
PathStyle::ChatGptApi
|
||||
} else {
|
||||
PathStyle::CodexApi
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Client {
|
||||
base_url: String,
|
||||
http: reqwest::Client,
|
||||
bearer_token: Option<String>,
|
||||
user_agent: Option<HeaderValue>,
|
||||
chatgpt_account_id: Option<String>,
|
||||
path_style: PathStyle,
|
||||
}
|
||||
|
||||
impl Client {
|
||||
pub fn new(base_url: impl Into<String>) -> Result<Self> {
|
||||
let mut base_url = base_url.into();
|
||||
// Normalize common ChatGPT hostnames to include /backend-api so we hit the WHAM paths.
|
||||
// Also trim trailing slashes for consistent URL building.
|
||||
while base_url.ends_with('/') {
|
||||
base_url.pop();
|
||||
}
|
||||
if (base_url.starts_with("https://chatgpt.com")
|
||||
|| base_url.starts_with("https://chat.openai.com"))
|
||||
&& !base_url.contains("/backend-api")
|
||||
{
|
||||
base_url = format!("{base_url}/backend-api");
|
||||
}
|
||||
let http = reqwest::Client::builder().build()?;
|
||||
let path_style = PathStyle::from_base_url(&base_url);
|
||||
Ok(Self {
|
||||
base_url,
|
||||
http,
|
||||
bearer_token: None,
|
||||
user_agent: None,
|
||||
chatgpt_account_id: None,
|
||||
path_style,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn with_bearer_token(mut self, token: impl Into<String>) -> Self {
|
||||
self.bearer_token = Some(token.into());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_user_agent(mut self, ua: impl Into<String>) -> Self {
|
||||
if let Ok(hv) = HeaderValue::from_str(&ua.into()) {
|
||||
self.user_agent = Some(hv);
|
||||
}
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_chatgpt_account_id(mut self, account_id: impl Into<String>) -> Self {
|
||||
self.chatgpt_account_id = Some(account_id.into());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_path_style(mut self, style: PathStyle) -> Self {
|
||||
self.path_style = style;
|
||||
self
|
||||
}
|
||||
|
||||
fn headers(&self) -> HeaderMap {
|
||||
let mut h = HeaderMap::new();
|
||||
if let Some(ua) = &self.user_agent {
|
||||
h.insert(USER_AGENT, ua.clone());
|
||||
} else {
|
||||
h.insert(USER_AGENT, HeaderValue::from_static("codex-cli"));
|
||||
}
|
||||
if let Some(token) = &self.bearer_token {
|
||||
let value = format!("Bearer {token}");
|
||||
if let Ok(hv) = HeaderValue::from_str(&value) {
|
||||
h.insert(AUTHORIZATION, hv);
|
||||
}
|
||||
}
|
||||
if let Some(acc) = &self.chatgpt_account_id
|
||||
&& let Ok(name) = HeaderName::from_bytes(b"ChatGPT-Account-Id")
|
||||
&& let Ok(hv) = HeaderValue::from_str(acc)
|
||||
{
|
||||
h.insert(name, hv);
|
||||
}
|
||||
h
|
||||
}
|
||||
|
||||
async fn exec_request(
|
||||
&self,
|
||||
req: reqwest::RequestBuilder,
|
||||
method: &str,
|
||||
url: &str,
|
||||
) -> Result<(String, String)> {
|
||||
let res = req.send().await?;
|
||||
let status = res.status();
|
||||
let ct = res
|
||||
.headers()
|
||||
.get(CONTENT_TYPE)
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
let body = res.text().await.unwrap_or_default();
|
||||
if !status.is_success() {
|
||||
anyhow::bail!("{method} {url} failed: {status}; content-type={ct}; body={body}");
|
||||
}
|
||||
Ok((body, ct))
|
||||
}
|
||||
|
||||
fn decode_json<T: DeserializeOwned>(&self, url: &str, ct: &str, body: &str) -> Result<T> {
|
||||
match serde_json::from_str::<T>(body) {
|
||||
Ok(v) => Ok(v),
|
||||
Err(e) => {
|
||||
anyhow::bail!("Decode error for {url}: {e}; content-type={ct}; body={body}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn list_tasks(
|
||||
&self,
|
||||
limit: Option<i32>,
|
||||
task_filter: Option<&str>,
|
||||
environment_id: Option<&str>,
|
||||
) -> Result<PaginatedListTaskListItem> {
|
||||
let url = match self.path_style {
|
||||
PathStyle::CodexApi => format!("{}/api/codex/tasks/list", self.base_url),
|
||||
PathStyle::ChatGptApi => format!("{}/wham/tasks/list", self.base_url),
|
||||
};
|
||||
let req = self.http.get(&url).headers(self.headers());
|
||||
let req = if let Some(lim) = limit {
|
||||
req.query(&[("limit", lim)])
|
||||
} else {
|
||||
req
|
||||
};
|
||||
let req = if let Some(tf) = task_filter {
|
||||
req.query(&[("task_filter", tf)])
|
||||
} else {
|
||||
req
|
||||
};
|
||||
let req = if let Some(id) = environment_id {
|
||||
req.query(&[("environment_id", id)])
|
||||
} else {
|
||||
req
|
||||
};
|
||||
let (body, ct) = self.exec_request(req, "GET", &url).await?;
|
||||
self.decode_json::<PaginatedListTaskListItem>(&url, &ct, &body)
|
||||
}
|
||||
|
||||
pub async fn get_task_details(&self, task_id: &str) -> Result<CodeTaskDetailsResponse> {
|
||||
let (parsed, _body, _ct) = self.get_task_details_with_body(task_id).await?;
|
||||
Ok(parsed)
|
||||
}
|
||||
|
||||
pub async fn get_task_details_with_body(
|
||||
&self,
|
||||
task_id: &str,
|
||||
) -> Result<(CodeTaskDetailsResponse, String, String)> {
|
||||
let url = match self.path_style {
|
||||
PathStyle::CodexApi => format!("{}/api/codex/tasks/{}", self.base_url, task_id),
|
||||
PathStyle::ChatGptApi => format!("{}/wham/tasks/{}", self.base_url, task_id),
|
||||
};
|
||||
let req = self.http.get(&url).headers(self.headers());
|
||||
let (body, ct) = self.exec_request(req, "GET", &url).await?;
|
||||
let parsed: CodeTaskDetailsResponse = self.decode_json(&url, &ct, &body)?;
|
||||
Ok((parsed, body, ct))
|
||||
}
|
||||
|
||||
pub async fn list_sibling_turns(
|
||||
&self,
|
||||
task_id: &str,
|
||||
turn_id: &str,
|
||||
) -> Result<TurnAttemptsSiblingTurnsResponse> {
|
||||
let url = match self.path_style {
|
||||
PathStyle::CodexApi => format!(
|
||||
"{}/api/codex/tasks/{}/turns/{}/sibling_turns",
|
||||
self.base_url, task_id, turn_id
|
||||
),
|
||||
PathStyle::ChatGptApi => format!(
|
||||
"{}/wham/tasks/{}/turns/{}/sibling_turns",
|
||||
self.base_url, task_id, turn_id
|
||||
),
|
||||
};
|
||||
let req = self.http.get(&url).headers(self.headers());
|
||||
let (body, ct) = self.exec_request(req, "GET", &url).await?;
|
||||
self.decode_json::<TurnAttemptsSiblingTurnsResponse>(&url, &ct, &body)
|
||||
}
|
||||
|
||||
/// Create a new task (user turn) by POSTing to the appropriate backend path
|
||||
/// based on `path_style`. Returns the created task id.
|
||||
pub async fn create_task(&self, request_body: serde_json::Value) -> Result<String> {
|
||||
let url = match self.path_style {
|
||||
PathStyle::CodexApi => format!("{}/api/codex/tasks", self.base_url),
|
||||
PathStyle::ChatGptApi => format!("{}/wham/tasks", self.base_url),
|
||||
};
|
||||
let req = self
|
||||
.http
|
||||
.post(&url)
|
||||
.headers(self.headers())
|
||||
.header(CONTENT_TYPE, HeaderValue::from_static("application/json"))
|
||||
.json(&request_body);
|
||||
let (body, ct) = self.exec_request(req, "POST", &url).await?;
|
||||
// Extract id from JSON: prefer `task.id`; fallback to top-level `id` when present.
|
||||
match serde_json::from_str::<serde_json::Value>(&body) {
|
||||
Ok(v) => {
|
||||
if let Some(id) = v
|
||||
.get("task")
|
||||
.and_then(|t| t.get("id"))
|
||||
.and_then(|s| s.as_str())
|
||||
{
|
||||
Ok(id.to_string())
|
||||
} else if let Some(id) = v.get("id").and_then(|s| s.as_str()) {
|
||||
Ok(id.to_string())
|
||||
} else {
|
||||
anyhow::bail!(
|
||||
"POST {url} succeeded but no task id found; content-type={ct}; body={body}"
|
||||
);
|
||||
}
|
||||
}
|
||||
Err(e) => anyhow::bail!("Decode error for {url}: {e}; content-type={ct}; body={body}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
9
codex-rs/backend-client/src/lib.rs
Normal file
9
codex-rs/backend-client/src/lib.rs
Normal file
@@ -0,0 +1,9 @@
|
||||
mod client;
|
||||
pub mod types;
|
||||
|
||||
pub use client::Client;
|
||||
pub use types::CodeTaskDetailsResponse;
|
||||
pub use types::CodeTaskDetailsResponseExt;
|
||||
pub use types::PaginatedListTaskListItem;
|
||||
pub use types::TaskListItem;
|
||||
pub use types::TurnAttemptsSiblingTurnsResponse;
|
||||
369
codex-rs/backend-client/src/types.rs
Normal file
369
codex-rs/backend-client/src/types.rs
Normal file
@@ -0,0 +1,369 @@
|
||||
pub use codex_backend_openapi_models::models::PaginatedListTaskListItem;
|
||||
pub use codex_backend_openapi_models::models::TaskListItem;
|
||||
|
||||
use serde::Deserialize;
|
||||
use serde::de::Deserializer;
|
||||
use serde_json::Value;
|
||||
use std::collections::HashMap;
|
||||
|
||||
/// Hand-rolled models for the Cloud Tasks task-details response.
|
||||
/// The generated OpenAPI models are pretty bad. This is a half-step
|
||||
/// towards hand-rolling them.
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub struct CodeTaskDetailsResponse {
|
||||
#[serde(default)]
|
||||
pub current_user_turn: Option<Turn>,
|
||||
#[serde(default)]
|
||||
pub current_assistant_turn: Option<Turn>,
|
||||
#[serde(default)]
|
||||
pub current_diff_task_turn: Option<Turn>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
pub struct Turn {
|
||||
#[serde(default)]
|
||||
pub id: Option<String>,
|
||||
#[serde(default)]
|
||||
pub attempt_placement: Option<i64>,
|
||||
#[serde(default, rename = "turn_status")]
|
||||
pub turn_status: Option<String>,
|
||||
#[serde(default, deserialize_with = "deserialize_vec")]
|
||||
pub sibling_turn_ids: Vec<String>,
|
||||
#[serde(default, deserialize_with = "deserialize_vec")]
|
||||
pub input_items: Vec<TurnItem>,
|
||||
#[serde(default, deserialize_with = "deserialize_vec")]
|
||||
pub output_items: Vec<TurnItem>,
|
||||
#[serde(default)]
|
||||
pub worklog: Option<Worklog>,
|
||||
#[serde(default)]
|
||||
pub error: Option<TurnError>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
pub struct TurnItem {
|
||||
#[serde(rename = "type", default)]
|
||||
pub kind: String,
|
||||
#[serde(default)]
|
||||
pub role: Option<String>,
|
||||
#[serde(default, deserialize_with = "deserialize_vec")]
|
||||
pub content: Vec<ContentFragment>,
|
||||
#[serde(default)]
|
||||
pub diff: Option<String>,
|
||||
#[serde(default)]
|
||||
pub output_diff: Option<DiffPayload>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
#[serde(untagged)]
|
||||
pub enum ContentFragment {
|
||||
Structured(StructuredContent),
|
||||
Text(String),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
pub struct StructuredContent {
|
||||
#[serde(rename = "content_type", default)]
|
||||
pub content_type: Option<String>,
|
||||
#[serde(default)]
|
||||
pub text: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
pub struct DiffPayload {
|
||||
#[serde(default)]
|
||||
pub diff: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
pub struct Worklog {
|
||||
#[serde(default, deserialize_with = "deserialize_vec")]
|
||||
pub messages: Vec<WorklogMessage>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
pub struct WorklogMessage {
|
||||
#[serde(default)]
|
||||
pub author: Option<Author>,
|
||||
#[serde(default)]
|
||||
pub content: Option<WorklogContent>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
pub struct Author {
|
||||
#[serde(default)]
|
||||
pub role: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
pub struct WorklogContent {
|
||||
#[serde(default)]
|
||||
pub parts: Vec<ContentFragment>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
pub struct TurnError {
|
||||
#[serde(default)]
|
||||
pub code: Option<String>,
|
||||
#[serde(default)]
|
||||
pub message: Option<String>,
|
||||
}
|
||||
|
||||
impl ContentFragment {
|
||||
fn text(&self) -> Option<&str> {
|
||||
match self {
|
||||
ContentFragment::Structured(inner) => {
|
||||
if inner
|
||||
.content_type
|
||||
.as_deref()
|
||||
.map(|ct| ct.eq_ignore_ascii_case("text"))
|
||||
.unwrap_or(false)
|
||||
{
|
||||
inner.text.as_deref().filter(|s| !s.is_empty())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
ContentFragment::Text(raw) => {
|
||||
if raw.trim().is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(raw.as_str())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TurnItem {
|
||||
fn text_values(&self) -> Vec<String> {
|
||||
self.content
|
||||
.iter()
|
||||
.filter_map(|fragment| fragment.text().map(str::to_string))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn diff_text(&self) -> Option<String> {
|
||||
if self.kind == "output_diff" {
|
||||
if let Some(diff) = &self.diff
|
||||
&& !diff.is_empty()
|
||||
{
|
||||
return Some(diff.clone());
|
||||
}
|
||||
} else if self.kind == "pr"
|
||||
&& let Some(payload) = &self.output_diff
|
||||
&& let Some(diff) = &payload.diff
|
||||
&& !diff.is_empty()
|
||||
{
|
||||
return Some(diff.clone());
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl Turn {
|
||||
fn unified_diff(&self) -> Option<String> {
|
||||
self.output_items.iter().find_map(TurnItem::diff_text)
|
||||
}
|
||||
|
||||
fn message_texts(&self) -> Vec<String> {
|
||||
let mut out: Vec<String> = self
|
||||
.output_items
|
||||
.iter()
|
||||
.filter(|item| item.kind == "message")
|
||||
.flat_map(TurnItem::text_values)
|
||||
.collect();
|
||||
|
||||
if let Some(log) = &self.worklog {
|
||||
for message in &log.messages {
|
||||
if message.is_assistant() {
|
||||
out.extend(message.text_values());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
out
|
||||
}
|
||||
|
||||
fn user_prompt(&self) -> Option<String> {
|
||||
let parts: Vec<String> = self
|
||||
.input_items
|
||||
.iter()
|
||||
.filter(|item| item.kind == "message")
|
||||
.filter(|item| {
|
||||
item.role
|
||||
.as_deref()
|
||||
.map(|r| r.eq_ignore_ascii_case("user"))
|
||||
.unwrap_or(true)
|
||||
})
|
||||
.flat_map(TurnItem::text_values)
|
||||
.collect();
|
||||
|
||||
if parts.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(parts.join(
|
||||
"
|
||||
|
||||
",
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
fn error_summary(&self) -> Option<String> {
|
||||
self.error.as_ref().and_then(TurnError::summary)
|
||||
}
|
||||
}
|
||||
|
||||
impl WorklogMessage {
|
||||
fn is_assistant(&self) -> bool {
|
||||
self.author
|
||||
.as_ref()
|
||||
.and_then(|a| a.role.as_deref())
|
||||
.map(|role| role.eq_ignore_ascii_case("assistant"))
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
fn text_values(&self) -> Vec<String> {
|
||||
self.content
|
||||
.as_ref()
|
||||
.map(|content| {
|
||||
content
|
||||
.parts
|
||||
.iter()
|
||||
.filter_map(|fragment| fragment.text().map(str::to_string))
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
impl TurnError {
|
||||
fn summary(&self) -> Option<String> {
|
||||
let code = self.code.as_deref().unwrap_or("");
|
||||
let message = self.message.as_deref().unwrap_or("");
|
||||
match (code.is_empty(), message.is_empty()) {
|
||||
(true, true) => None,
|
||||
(false, true) => Some(code.to_string()),
|
||||
(true, false) => Some(message.to_string()),
|
||||
(false, false) => Some(format!("{code}: {message}")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait CodeTaskDetailsResponseExt {
|
||||
/// Attempt to extract a unified diff string from the assistant or diff turn.
|
||||
fn unified_diff(&self) -> Option<String>;
|
||||
/// Extract assistant text output messages (no diff) from current turns.
|
||||
fn assistant_text_messages(&self) -> Vec<String>;
|
||||
/// Extract the user's prompt text from the current user turn, when present.
|
||||
fn user_text_prompt(&self) -> Option<String>;
|
||||
/// Extract an assistant error message (if the turn failed and provided one).
|
||||
fn assistant_error_message(&self) -> Option<String>;
|
||||
}
|
||||
|
||||
impl CodeTaskDetailsResponseExt for CodeTaskDetailsResponse {
|
||||
fn unified_diff(&self) -> Option<String> {
|
||||
[
|
||||
self.current_diff_task_turn.as_ref(),
|
||||
self.current_assistant_turn.as_ref(),
|
||||
]
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.find_map(Turn::unified_diff)
|
||||
}
|
||||
|
||||
fn assistant_text_messages(&self) -> Vec<String> {
|
||||
let mut out = Vec::new();
|
||||
for turn in [
|
||||
self.current_diff_task_turn.as_ref(),
|
||||
self.current_assistant_turn.as_ref(),
|
||||
]
|
||||
.into_iter()
|
||||
.flatten()
|
||||
{
|
||||
out.extend(turn.message_texts());
|
||||
}
|
||||
out
|
||||
}
|
||||
|
||||
fn user_text_prompt(&self) -> Option<String> {
|
||||
self.current_user_turn.as_ref().and_then(Turn::user_prompt)
|
||||
}
|
||||
|
||||
fn assistant_error_message(&self) -> Option<String> {
|
||||
self.current_assistant_turn
|
||||
.as_ref()
|
||||
.and_then(Turn::error_summary)
|
||||
}
|
||||
}
|
||||
|
||||
fn deserialize_vec<'de, D, T>(deserializer: D) -> Result<Vec<T>, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
T: Deserialize<'de>,
|
||||
{
|
||||
Option::<Vec<T>>::deserialize(deserializer).map(|opt| opt.unwrap_or_default())
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub struct TurnAttemptsSiblingTurnsResponse {
|
||||
#[serde(default)]
|
||||
pub sibling_turns: Vec<HashMap<String, Value>>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
fn fixture(name: &str) -> CodeTaskDetailsResponse {
|
||||
let json = match name {
|
||||
"diff" => include_str!("../tests/fixtures/task_details_with_diff.json"),
|
||||
"error" => include_str!("../tests/fixtures/task_details_with_error.json"),
|
||||
other => panic!("unknown fixture {other}"),
|
||||
};
|
||||
serde_json::from_str(json).expect("fixture should deserialize")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unified_diff_prefers_current_diff_task_turn() {
|
||||
let details = fixture("diff");
|
||||
let diff = details.unified_diff().expect("diff present");
|
||||
assert!(diff.contains("diff --git"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unified_diff_falls_back_to_pr_output_diff() {
|
||||
let details = fixture("error");
|
||||
let diff = details.unified_diff().expect("diff from pr output");
|
||||
assert!(diff.contains("lib.rs"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn assistant_text_messages_extracts_text_content() {
|
||||
let details = fixture("diff");
|
||||
let messages = details.assistant_text_messages();
|
||||
assert_eq!(messages, vec!["Assistant response".to_string()]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn user_text_prompt_joins_parts_with_spacing() {
|
||||
let details = fixture("diff");
|
||||
let prompt = details.user_text_prompt().expect("prompt present");
|
||||
assert_eq!(
|
||||
prompt,
|
||||
"First line
|
||||
|
||||
Second line"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn assistant_error_message_combines_code_and_message() {
|
||||
let details = fixture("error");
|
||||
let msg = details
|
||||
.assistant_error_message()
|
||||
.expect("error should be present");
|
||||
assert_eq!(msg, "APPLY_FAILED: Patch could not be applied");
|
||||
}
|
||||
}
|
||||
38
codex-rs/backend-client/tests/fixtures/task_details_with_diff.json
vendored
Normal file
38
codex-rs/backend-client/tests/fixtures/task_details_with_diff.json
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
{
|
||||
"task": {
|
||||
"id": "task_123",
|
||||
"title": "Refactor cloud task client",
|
||||
"archived": false,
|
||||
"external_pull_requests": []
|
||||
},
|
||||
"current_user_turn": {
|
||||
"input_items": [
|
||||
{
|
||||
"type": "message",
|
||||
"role": "user",
|
||||
"content": [
|
||||
{ "content_type": "text", "text": "First line" },
|
||||
{ "content_type": "text", "text": "Second line" }
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"current_assistant_turn": {
|
||||
"output_items": [
|
||||
{
|
||||
"type": "message",
|
||||
"content": [
|
||||
{ "content_type": "text", "text": "Assistant response" }
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"current_diff_task_turn": {
|
||||
"output_items": [
|
||||
{
|
||||
"type": "output_diff",
|
||||
"diff": "diff --git a/src/main.rs b/src/main.rs\n+fn main() { println!(\"hi\"); }\n"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
22
codex-rs/backend-client/tests/fixtures/task_details_with_error.json
vendored
Normal file
22
codex-rs/backend-client/tests/fixtures/task_details_with_error.json
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"task": {
|
||||
"id": "task_456",
|
||||
"title": "Investigate failure",
|
||||
"archived": false,
|
||||
"external_pull_requests": []
|
||||
},
|
||||
"current_assistant_turn": {
|
||||
"output_items": [
|
||||
{
|
||||
"type": "pr",
|
||||
"output_diff": {
|
||||
"diff": "diff --git a/lib.rs b/lib.rs\n+pub fn hello() {}\n"
|
||||
}
|
||||
}
|
||||
],
|
||||
"error": {
|
||||
"code": "APPLY_FAILED",
|
||||
"message": "Patch could not be applied"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -7,15 +7,14 @@ version = { workspace = true }
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1"
|
||||
clap = { version = "4", features = ["derive"] }
|
||||
codex-common = { path = "../common", features = ["cli"] }
|
||||
codex-core = { path = "../core" }
|
||||
codex-protocol = { path = "../protocol" }
|
||||
reqwest = { version = "0.12", features = ["json", "stream"] }
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
anyhow = { workspace = true }
|
||||
clap = { workspace = true, features = ["derive"] }
|
||||
codex-common = { workspace = true, features = ["cli"] }
|
||||
codex-core = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
tokio = { workspace = true, features = ["full"] }
|
||||
codex-git-apply = { path = "../git-apply" }
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3"
|
||||
tempfile = { workspace = true }
|
||||
|
||||
@@ -29,9 +29,10 @@ pub async fn run_apply_command(
|
||||
.parse_overrides()
|
||||
.map_err(anyhow::Error::msg)?,
|
||||
ConfigOverrides::default(),
|
||||
)?;
|
||||
)
|
||||
.await?;
|
||||
|
||||
init_chatgpt_token_from_auth(&config.codex_home, &config.responses_originator_header).await?;
|
||||
init_chatgpt_token_from_auth(&config.codex_home).await?;
|
||||
|
||||
let task_response = get_task(&config, apply_cli.task_id).await?;
|
||||
apply_diff_from_task(task_response, cwd).await
|
||||
@@ -56,46 +57,24 @@ pub async fn apply_diff_from_task(
|
||||
}
|
||||
|
||||
async fn apply_diff(diff: &str, cwd: Option<PathBuf>) -> anyhow::Result<()> {
|
||||
let mut cmd = tokio::process::Command::new("git");
|
||||
if let Some(cwd) = cwd {
|
||||
cmd.current_dir(cwd);
|
||||
}
|
||||
let toplevel_output = cmd
|
||||
.args(vec!["rev-parse", "--show-toplevel"])
|
||||
.output()
|
||||
.await?;
|
||||
|
||||
if !toplevel_output.status.success() {
|
||||
anyhow::bail!("apply must be run from a git repository.");
|
||||
}
|
||||
|
||||
let repo_root = String::from_utf8(toplevel_output.stdout)?
|
||||
.trim()
|
||||
.to_string();
|
||||
|
||||
let mut git_apply_cmd = tokio::process::Command::new("git")
|
||||
.args(vec!["apply", "--3way"])
|
||||
.current_dir(&repo_root)
|
||||
.stdin(std::process::Stdio::piped())
|
||||
.stdout(std::process::Stdio::piped())
|
||||
.stderr(std::process::Stdio::piped())
|
||||
.spawn()?;
|
||||
|
||||
if let Some(mut stdin) = git_apply_cmd.stdin.take() {
|
||||
tokio::io::AsyncWriteExt::write_all(&mut stdin, diff.as_bytes()).await?;
|
||||
drop(stdin);
|
||||
}
|
||||
|
||||
let output = git_apply_cmd.wait_with_output().await?;
|
||||
|
||||
if !output.status.success() {
|
||||
let cwd = cwd.unwrap_or(std::env::current_dir().unwrap_or_else(|_| std::env::temp_dir()));
|
||||
let req = codex_git_apply::ApplyGitRequest {
|
||||
cwd,
|
||||
diff: diff.to_string(),
|
||||
revert: false,
|
||||
preflight: false,
|
||||
};
|
||||
let res = codex_git_apply::apply_git_patch(&req)?;
|
||||
if res.exit_code != 0 {
|
||||
anyhow::bail!(
|
||||
"Git apply failed with status {}: {}",
|
||||
output.status,
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
"Git apply failed (applied={}, skipped={}, conflicts={})\nstdout:\n{}\nstderr:\n{}",
|
||||
res.applied_paths.len(),
|
||||
res.skipped_paths.len(),
|
||||
res.conflicted_paths.len(),
|
||||
res.stdout,
|
||||
res.stderr
|
||||
);
|
||||
}
|
||||
|
||||
println!("Successfully applied diff");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -13,10 +13,10 @@ pub(crate) async fn chatgpt_get_request<T: DeserializeOwned>(
|
||||
path: String,
|
||||
) -> anyhow::Result<T> {
|
||||
let chatgpt_base_url = &config.chatgpt_base_url;
|
||||
init_chatgpt_token_from_auth(&config.codex_home, &config.responses_originator_header).await?;
|
||||
init_chatgpt_token_from_auth(&config.codex_home).await?;
|
||||
|
||||
// Make direct HTTP request to ChatGPT backend API with the token
|
||||
let client = create_client(&config.responses_originator_header);
|
||||
let client = create_client();
|
||||
let url = format!("{chatgpt_base_url}{path}");
|
||||
|
||||
let token =
|
||||
@@ -44,6 +44,6 @@ pub(crate) async fn chatgpt_get_request<T: DeserializeOwned>(
|
||||
} else {
|
||||
let status = response.status();
|
||||
let body = response.text().await.unwrap_or_default();
|
||||
anyhow::bail!("Request failed with status {}: {}", status, body)
|
||||
anyhow::bail!("Request failed with status {status}: {body}")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use codex_core::CodexAuth;
|
||||
use codex_protocol::mcp_protocol::AuthMode;
|
||||
use std::path::Path;
|
||||
use std::sync::LazyLock;
|
||||
use std::sync::RwLock;
|
||||
@@ -19,11 +18,8 @@ pub fn set_chatgpt_token_data(value: TokenData) {
|
||||
}
|
||||
|
||||
/// Initialize the ChatGPT token from auth.json file
|
||||
pub async fn init_chatgpt_token_from_auth(
|
||||
codex_home: &Path,
|
||||
originator: &str,
|
||||
) -> std::io::Result<()> {
|
||||
let auth = CodexAuth::from_codex_home(codex_home, AuthMode::ChatGPT, originator)?;
|
||||
pub async fn init_chatgpt_token_from_auth(codex_home: &Path) -> std::io::Result<()> {
|
||||
let auth = CodexAuth::from_codex_home(codex_home)?;
|
||||
if let Some(auth) = auth {
|
||||
let token_data = auth.get_token_data().await?;
|
||||
set_chatgpt_token_data(token_data);
|
||||
|
||||
@@ -15,26 +15,40 @@ path = "src/lib.rs"
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1"
|
||||
clap = { version = "4", features = ["derive"] }
|
||||
clap_complete = "4"
|
||||
codex-arg0 = { path = "../arg0" }
|
||||
codex-chatgpt = { path = "../chatgpt" }
|
||||
codex-common = { path = "../common", features = ["cli"] }
|
||||
codex-core = { path = "../core" }
|
||||
codex-exec = { path = "../exec" }
|
||||
codex-login = { path = "../login" }
|
||||
codex-mcp-server = { path = "../mcp-server" }
|
||||
codex-protocol = { path = "../protocol" }
|
||||
codex-tui = { path = "../tui" }
|
||||
serde_json = "1"
|
||||
tokio = { version = "1", features = [
|
||||
anyhow = { workspace = true }
|
||||
clap = { workspace = true, features = ["derive"] }
|
||||
clap_complete = { workspace = true }
|
||||
codex-app-server = { workspace = true }
|
||||
codex-arg0 = { workspace = true }
|
||||
codex-chatgpt = { workspace = true }
|
||||
codex-common = { workspace = true, features = ["cli"] }
|
||||
codex-core = { workspace = true }
|
||||
codex-exec = { workspace = true }
|
||||
codex-login = { workspace = true }
|
||||
codex-mcp-server = { workspace = true }
|
||||
codex-process-hardening = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
codex-app-server-protocol = { workspace = true }
|
||||
codex-protocol-ts = { workspace = true }
|
||||
codex-responses-api-proxy = { workspace = true }
|
||||
codex-tui = { workspace = true }
|
||||
codex-rmcp-client = { workspace = true }
|
||||
codex-cloud-tasks = { path = "../cloud-tasks" }
|
||||
ctor = { workspace = true }
|
||||
owo-colors = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
supports-color = { workspace = true }
|
||||
tokio = { workspace = true, features = [
|
||||
"io-std",
|
||||
"macros",
|
||||
"process",
|
||||
"rt-multi-thread",
|
||||
"signal",
|
||||
] }
|
||||
tracing = "0.1.41"
|
||||
tracing-subscriber = "0.3.19"
|
||||
codex-protocol-ts = { path = "../protocol-ts" }
|
||||
|
||||
[dev-dependencies]
|
||||
assert_matches = { workspace = true }
|
||||
assert_cmd = { workspace = true }
|
||||
predicates = { workspace = true }
|
||||
pretty_assertions = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
|
||||
@@ -64,7 +64,6 @@ async fn run_command_under_sandbox(
|
||||
sandbox_type: SandboxType,
|
||||
) -> anyhow::Result<()> {
|
||||
let sandbox_mode = create_sandbox_mode(full_auto);
|
||||
let cwd = std::env::current_dir()?;
|
||||
let config = Config::load_with_cli_overrides(
|
||||
config_overrides
|
||||
.parse_overrides()
|
||||
@@ -74,14 +73,31 @@ async fn run_command_under_sandbox(
|
||||
codex_linux_sandbox_exe,
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
)
|
||||
.await?;
|
||||
|
||||
// In practice, this should be `std::env::current_dir()` because this CLI
|
||||
// does not support `--cwd`, but let's use the config value for consistency.
|
||||
let cwd = config.cwd.clone();
|
||||
// For now, we always use the same cwd for both the command and the
|
||||
// sandbox policy. In the future, we could add a CLI option to set them
|
||||
// separately.
|
||||
let sandbox_policy_cwd = cwd.clone();
|
||||
|
||||
let stdio_policy = StdioPolicy::Inherit;
|
||||
let env = create_env(&config.shell_environment_policy);
|
||||
|
||||
let mut child = match sandbox_type {
|
||||
SandboxType::Seatbelt => {
|
||||
spawn_command_under_seatbelt(command, &config.sandbox_policy, cwd, stdio_policy, env)
|
||||
.await?
|
||||
spawn_command_under_seatbelt(
|
||||
command,
|
||||
cwd,
|
||||
&config.sandbox_policy,
|
||||
sandbox_policy_cwd.as_path(),
|
||||
stdio_policy,
|
||||
env,
|
||||
)
|
||||
.await?
|
||||
}
|
||||
SandboxType::Landlock => {
|
||||
#[expect(clippy::expect_used)]
|
||||
@@ -91,8 +107,9 @@ async fn run_command_under_sandbox(
|
||||
spawn_command_under_linux_sandbox(
|
||||
codex_linux_sandbox_exe,
|
||||
command,
|
||||
&config.sandbox_policy,
|
||||
cwd,
|
||||
&config.sandbox_policy,
|
||||
sandbox_policy_cwd.as_path(),
|
||||
stdio_policy,
|
||||
env,
|
||||
)
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
pub mod debug_sandbox;
|
||||
mod exit_status;
|
||||
pub mod login;
|
||||
pub mod proto;
|
||||
|
||||
use clap::Parser;
|
||||
use codex_common::CliConfigOverrides;
|
||||
|
||||
@@ -1,19 +1,20 @@
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_common::CliConfigOverrides;
|
||||
use codex_core::CodexAuth;
|
||||
use codex_core::auth::CLIENT_ID;
|
||||
use codex_core::auth::OPENAI_API_KEY_ENV_VAR;
|
||||
use codex_core::auth::login_with_api_key;
|
||||
use codex_core::auth::logout;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_login::ServerOptions;
|
||||
use codex_login::run_device_code_login;
|
||||
use codex_login::run_login_server;
|
||||
use codex_protocol::mcp_protocol::AuthMode;
|
||||
use std::env;
|
||||
use std::io::IsTerminal;
|
||||
use std::io::Read;
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub async fn login_with_chatgpt(codex_home: PathBuf, originator: String) -> std::io::Result<()> {
|
||||
let opts = ServerOptions::new(codex_home, CLIENT_ID.to_string(), originator);
|
||||
pub async fn login_with_chatgpt(codex_home: PathBuf) -> std::io::Result<()> {
|
||||
let opts = ServerOptions::new(codex_home, CLIENT_ID.to_string());
|
||||
let server = run_login_server(opts)?;
|
||||
|
||||
eprintln!(
|
||||
@@ -25,14 +26,9 @@ pub async fn login_with_chatgpt(codex_home: PathBuf, originator: String) -> std:
|
||||
}
|
||||
|
||||
pub async fn run_login_with_chatgpt(cli_config_overrides: CliConfigOverrides) -> ! {
|
||||
let config = load_config_or_exit(cli_config_overrides);
|
||||
let config = load_config_or_exit(cli_config_overrides).await;
|
||||
|
||||
match login_with_chatgpt(
|
||||
config.codex_home,
|
||||
config.responses_originator_header.clone(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
match login_with_chatgpt(config.codex_home).await {
|
||||
Ok(_) => {
|
||||
eprintln!("Successfully logged in");
|
||||
std::process::exit(0);
|
||||
@@ -48,7 +44,7 @@ pub async fn run_login_with_api_key(
|
||||
cli_config_overrides: CliConfigOverrides,
|
||||
api_key: String,
|
||||
) -> ! {
|
||||
let config = load_config_or_exit(cli_config_overrides);
|
||||
let config = load_config_or_exit(cli_config_overrides).await;
|
||||
|
||||
match login_with_api_key(&config.codex_home, &api_key) {
|
||||
Ok(_) => {
|
||||
@@ -62,26 +58,67 @@ pub async fn run_login_with_api_key(
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn run_login_status(cli_config_overrides: CliConfigOverrides) -> ! {
|
||||
let config = load_config_or_exit(cli_config_overrides);
|
||||
pub fn read_api_key_from_stdin() -> String {
|
||||
let mut stdin = std::io::stdin();
|
||||
|
||||
match CodexAuth::from_codex_home(
|
||||
&config.codex_home,
|
||||
config.preferred_auth_method,
|
||||
&config.responses_originator_header,
|
||||
) {
|
||||
if stdin.is_terminal() {
|
||||
eprintln!(
|
||||
"--with-api-key expects the API key on stdin. Try piping it, e.g. `printenv OPENAI_API_KEY | codex login --with-api-key`."
|
||||
);
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
eprintln!("Reading API key from stdin...");
|
||||
|
||||
let mut buffer = String::new();
|
||||
if let Err(err) = stdin.read_to_string(&mut buffer) {
|
||||
eprintln!("Failed to read API key from stdin: {err}");
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
let api_key = buffer.trim().to_string();
|
||||
if api_key.is_empty() {
|
||||
eprintln!("No API key provided via stdin.");
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
api_key
|
||||
}
|
||||
|
||||
/// Login using the OAuth device code flow.
|
||||
pub async fn run_login_with_device_code(
|
||||
cli_config_overrides: CliConfigOverrides,
|
||||
issuer_base_url: Option<String>,
|
||||
client_id: Option<String>,
|
||||
) -> ! {
|
||||
let config = load_config_or_exit(cli_config_overrides).await;
|
||||
let mut opts = ServerOptions::new(
|
||||
config.codex_home,
|
||||
client_id.unwrap_or(CLIENT_ID.to_string()),
|
||||
);
|
||||
if let Some(iss) = issuer_base_url {
|
||||
opts.issuer = iss;
|
||||
}
|
||||
match run_device_code_login(opts).await {
|
||||
Ok(()) => {
|
||||
eprintln!("Successfully logged in");
|
||||
std::process::exit(0);
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Error logging in with device code: {e}");
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn run_login_status(cli_config_overrides: CliConfigOverrides) -> ! {
|
||||
let config = load_config_or_exit(cli_config_overrides).await;
|
||||
|
||||
match CodexAuth::from_codex_home(&config.codex_home) {
|
||||
Ok(Some(auth)) => match auth.mode {
|
||||
AuthMode::ApiKey => match auth.get_token().await {
|
||||
Ok(api_key) => {
|
||||
eprintln!("Logged in using an API key - {}", safe_format_key(&api_key));
|
||||
|
||||
if let Ok(env_api_key) = env::var(OPENAI_API_KEY_ENV_VAR)
|
||||
&& env_api_key == api_key
|
||||
{
|
||||
eprintln!(
|
||||
" API loaded from OPENAI_API_KEY environment variable or .env file"
|
||||
);
|
||||
}
|
||||
std::process::exit(0);
|
||||
}
|
||||
Err(e) => {
|
||||
@@ -106,7 +143,7 @@ pub async fn run_login_status(cli_config_overrides: CliConfigOverrides) -> ! {
|
||||
}
|
||||
|
||||
pub async fn run_logout(cli_config_overrides: CliConfigOverrides) -> ! {
|
||||
let config = load_config_or_exit(cli_config_overrides);
|
||||
let config = load_config_or_exit(cli_config_overrides).await;
|
||||
|
||||
match logout(&config.codex_home) {
|
||||
Ok(true) => {
|
||||
@@ -124,7 +161,7 @@ pub async fn run_logout(cli_config_overrides: CliConfigOverrides) -> ! {
|
||||
}
|
||||
}
|
||||
|
||||
fn load_config_or_exit(cli_config_overrides: CliConfigOverrides) -> Config {
|
||||
async fn load_config_or_exit(cli_config_overrides: CliConfigOverrides) -> Config {
|
||||
let cli_overrides = match cli_config_overrides.parse_overrides() {
|
||||
Ok(v) => v,
|
||||
Err(e) => {
|
||||
@@ -134,7 +171,7 @@ fn load_config_or_exit(cli_config_overrides: CliConfigOverrides) -> Config {
|
||||
};
|
||||
|
||||
let config_overrides = ConfigOverrides::default();
|
||||
match Config::load_with_cli_overrides(cli_overrides, config_overrides) {
|
||||
match Config::load_with_cli_overrides(cli_overrides, config_overrides).await {
|
||||
Ok(config) => config,
|
||||
Err(e) => {
|
||||
eprintln!("Error loading configuration: {e}");
|
||||
|
||||
@@ -7,17 +7,27 @@ use codex_chatgpt::apply_command::ApplyCommand;
|
||||
use codex_chatgpt::apply_command::run_apply_command;
|
||||
use codex_cli::LandlockCommand;
|
||||
use codex_cli::SeatbeltCommand;
|
||||
use codex_cli::login::read_api_key_from_stdin;
|
||||
use codex_cli::login::run_login_status;
|
||||
use codex_cli::login::run_login_with_api_key;
|
||||
use codex_cli::login::run_login_with_chatgpt;
|
||||
use codex_cli::login::run_login_with_device_code;
|
||||
use codex_cli::login::run_logout;
|
||||
use codex_cli::proto;
|
||||
use codex_cloud_tasks::Cli as CloudTasksCli;
|
||||
use codex_common::CliConfigOverrides;
|
||||
use codex_exec::Cli as ExecCli;
|
||||
use codex_responses_api_proxy::Args as ResponsesApiProxyArgs;
|
||||
use codex_tui::AppExitInfo;
|
||||
use codex_tui::Cli as TuiCli;
|
||||
use owo_colors::OwoColorize;
|
||||
use std::path::PathBuf;
|
||||
use supports_color::Stream;
|
||||
|
||||
use crate::proto::ProtoCli;
|
||||
mod mcp_cmd;
|
||||
|
||||
use crate::mcp_cmd::McpCli;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
|
||||
/// Codex CLI
|
||||
///
|
||||
@@ -37,6 +47,9 @@ struct MultitoolCli {
|
||||
#[clap(flatten)]
|
||||
pub config_overrides: CliConfigOverrides,
|
||||
|
||||
#[clap(flatten)]
|
||||
pub feature_toggles: FeatureToggles,
|
||||
|
||||
#[clap(flatten)]
|
||||
interactive: TuiCli,
|
||||
|
||||
@@ -56,26 +69,42 @@ enum Subcommand {
|
||||
/// Remove stored authentication credentials.
|
||||
Logout(LogoutCommand),
|
||||
|
||||
/// Experimental: run Codex as an MCP server.
|
||||
Mcp,
|
||||
/// [experimental] Run Codex as an MCP server and manage MCP servers.
|
||||
Mcp(McpCli),
|
||||
|
||||
/// Run the Protocol stream via stdin/stdout
|
||||
#[clap(visible_alias = "p")]
|
||||
Proto(ProtoCli),
|
||||
/// [experimental] Run the Codex MCP server (stdio transport).
|
||||
McpServer,
|
||||
|
||||
/// [experimental] Run the app server.
|
||||
AppServer,
|
||||
|
||||
/// Generate shell completion scripts.
|
||||
Completion(CompletionCommand),
|
||||
|
||||
/// Internal debugging commands.
|
||||
Debug(DebugArgs),
|
||||
/// Run commands within a Codex-provided sandbox.
|
||||
#[clap(visible_alias = "debug")]
|
||||
Sandbox(SandboxArgs),
|
||||
|
||||
/// Apply the latest diff produced by Codex agent as a `git apply` to your local working tree.
|
||||
#[clap(visible_alias = "a")]
|
||||
Apply(ApplyCommand),
|
||||
|
||||
/// Resume a previous interactive session (picker by default; use --last to continue the most recent).
|
||||
Resume(ResumeCommand),
|
||||
|
||||
/// Internal: generate TypeScript protocol bindings.
|
||||
#[clap(hide = true)]
|
||||
GenerateTs(GenerateTsCommand),
|
||||
/// [EXPERIMENTAL] Browse tasks from Codex Cloud and apply changes locally.
|
||||
#[clap(name = "cloud", alias = "cloud-tasks")]
|
||||
Cloud(CloudTasksCli),
|
||||
|
||||
/// Internal: run the responses API proxy.
|
||||
#[clap(hide = true)]
|
||||
ResponsesApiProxy(ResponsesApiProxyArgs),
|
||||
|
||||
/// Inspect feature flags.
|
||||
Features(FeaturesCli),
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
@@ -86,18 +115,35 @@ struct CompletionCommand {
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
struct DebugArgs {
|
||||
struct ResumeCommand {
|
||||
/// Conversation/session id (UUID). When provided, resumes this session.
|
||||
/// If omitted, use --last to pick the most recent recorded session.
|
||||
#[arg(value_name = "SESSION_ID")]
|
||||
session_id: Option<String>,
|
||||
|
||||
/// Continue the most recent session without showing the picker.
|
||||
#[arg(long = "last", default_value_t = false, conflicts_with = "session_id")]
|
||||
last: bool,
|
||||
|
||||
#[clap(flatten)]
|
||||
config_overrides: TuiCli,
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
struct SandboxArgs {
|
||||
#[command(subcommand)]
|
||||
cmd: DebugCommand,
|
||||
cmd: SandboxCommand,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Subcommand)]
|
||||
enum DebugCommand {
|
||||
enum SandboxCommand {
|
||||
/// Run a command under Seatbelt (macOS only).
|
||||
Seatbelt(SeatbeltCommand),
|
||||
#[clap(visible_alias = "seatbelt")]
|
||||
Macos(SeatbeltCommand),
|
||||
|
||||
/// Run a command under Landlock+seccomp (Linux only).
|
||||
Landlock(LandlockCommand),
|
||||
#[clap(visible_alias = "landlock")]
|
||||
Linux(LandlockCommand),
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
@@ -105,9 +151,32 @@ struct LoginCommand {
|
||||
#[clap(skip)]
|
||||
config_overrides: CliConfigOverrides,
|
||||
|
||||
#[arg(long = "api-key", value_name = "API_KEY")]
|
||||
#[arg(
|
||||
long = "with-api-key",
|
||||
help = "Read the API key from stdin (e.g. `printenv OPENAI_API_KEY | codex login --with-api-key`)"
|
||||
)]
|
||||
with_api_key: bool,
|
||||
|
||||
#[arg(
|
||||
long = "api-key",
|
||||
value_name = "API_KEY",
|
||||
help = "(deprecated) Previously accepted the API key directly; now exits with guidance to use --with-api-key",
|
||||
hide = true
|
||||
)]
|
||||
api_key: Option<String>,
|
||||
|
||||
#[arg(long = "device-auth")]
|
||||
use_device_code: bool,
|
||||
|
||||
/// EXPERIMENTAL: Use custom OAuth issuer base URL (advanced)
|
||||
/// Override the OAuth issuer base URL (advanced)
|
||||
#[arg(long = "experimental_issuer", value_name = "URL", hide = true)]
|
||||
issuer_base_url: Option<String>,
|
||||
|
||||
/// EXPERIMENTAL: Use custom OAuth client ID (advanced)
|
||||
#[arg(long = "experimental_client-id", value_name = "CLIENT_ID", hide = true)]
|
||||
client_id: Option<String>,
|
||||
|
||||
#[command(subcommand)]
|
||||
action: Option<LoginSubcommand>,
|
||||
}
|
||||
@@ -135,6 +204,96 @@ struct GenerateTsCommand {
|
||||
prettier: Option<PathBuf>,
|
||||
}
|
||||
|
||||
fn format_exit_messages(exit_info: AppExitInfo, color_enabled: bool) -> Vec<String> {
|
||||
let AppExitInfo {
|
||||
token_usage,
|
||||
conversation_id,
|
||||
} = exit_info;
|
||||
|
||||
if token_usage.is_zero() {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
let mut lines = vec![format!(
|
||||
"{}",
|
||||
codex_core::protocol::FinalOutput::from(token_usage)
|
||||
)];
|
||||
|
||||
if let Some(session_id) = conversation_id {
|
||||
let resume_cmd = format!("codex resume {session_id}");
|
||||
let command = if color_enabled {
|
||||
resume_cmd.cyan().to_string()
|
||||
} else {
|
||||
resume_cmd
|
||||
};
|
||||
lines.push(format!("To continue this session, run {command}"));
|
||||
}
|
||||
|
||||
lines
|
||||
}
|
||||
|
||||
fn print_exit_messages(exit_info: AppExitInfo) {
|
||||
let color_enabled = supports_color::on(Stream::Stdout).is_some();
|
||||
for line in format_exit_messages(exit_info, color_enabled) {
|
||||
println!("{line}");
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Parser, Clone)]
|
||||
struct FeatureToggles {
|
||||
/// Enable a feature (repeatable). Equivalent to `-c features.<name>=true`.
|
||||
#[arg(long = "enable", value_name = "FEATURE", action = clap::ArgAction::Append, global = true)]
|
||||
enable: Vec<String>,
|
||||
|
||||
/// Disable a feature (repeatable). Equivalent to `-c features.<name>=false`.
|
||||
#[arg(long = "disable", value_name = "FEATURE", action = clap::ArgAction::Append, global = true)]
|
||||
disable: Vec<String>,
|
||||
}
|
||||
|
||||
impl FeatureToggles {
|
||||
fn to_overrides(&self) -> Vec<String> {
|
||||
let mut v = Vec::new();
|
||||
for k in &self.enable {
|
||||
v.push(format!("features.{k}=true"));
|
||||
}
|
||||
for k in &self.disable {
|
||||
v.push(format!("features.{k}=false"));
|
||||
}
|
||||
v
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
struct FeaturesCli {
|
||||
#[command(subcommand)]
|
||||
sub: FeaturesSubcommand,
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
enum FeaturesSubcommand {
|
||||
/// List known features with their stage and effective state.
|
||||
List,
|
||||
}
|
||||
|
||||
fn stage_str(stage: codex_core::features::Stage) -> &'static str {
|
||||
use codex_core::features::Stage;
|
||||
match stage {
|
||||
Stage::Experimental => "experimental",
|
||||
Stage::Beta => "beta",
|
||||
Stage::Stable => "stable",
|
||||
Stage::Deprecated => "deprecated",
|
||||
Stage::Removed => "removed",
|
||||
}
|
||||
}
|
||||
|
||||
/// As early as possible in the process lifecycle, apply hardening measures. We
|
||||
/// skip this in debug builds to avoid interfering with debugging.
|
||||
#[ctor::ctor]
|
||||
#[cfg(not(debug_assertions))]
|
||||
fn pre_main_hardening() {
|
||||
codex_process_hardening::pre_main_hardening();
|
||||
}
|
||||
|
||||
fn main() -> anyhow::Result<()> {
|
||||
arg0_dispatch_or_else(|codex_linux_sandbox_exe| async move {
|
||||
cli_main(codex_linux_sandbox_exe).await?;
|
||||
@@ -143,32 +302,84 @@ fn main() -> anyhow::Result<()> {
|
||||
}
|
||||
|
||||
async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()> {
|
||||
let cli = MultitoolCli::parse();
|
||||
let MultitoolCli {
|
||||
config_overrides: mut root_config_overrides,
|
||||
feature_toggles,
|
||||
mut interactive,
|
||||
subcommand,
|
||||
} = MultitoolCli::parse();
|
||||
|
||||
match cli.subcommand {
|
||||
// Fold --enable/--disable into config overrides so they flow to all subcommands.
|
||||
root_config_overrides
|
||||
.raw_overrides
|
||||
.extend(feature_toggles.to_overrides());
|
||||
|
||||
match subcommand {
|
||||
None => {
|
||||
let mut tui_cli = cli.interactive;
|
||||
prepend_config_flags(&mut tui_cli.config_overrides, cli.config_overrides);
|
||||
let usage = codex_tui::run_main(tui_cli, codex_linux_sandbox_exe).await?;
|
||||
if !usage.is_zero() {
|
||||
println!("{}", codex_core::protocol::FinalOutput::from(usage));
|
||||
}
|
||||
prepend_config_flags(
|
||||
&mut interactive.config_overrides,
|
||||
root_config_overrides.clone(),
|
||||
);
|
||||
let exit_info = codex_tui::run_main(interactive, codex_linux_sandbox_exe).await?;
|
||||
print_exit_messages(exit_info);
|
||||
}
|
||||
Some(Subcommand::Exec(mut exec_cli)) => {
|
||||
prepend_config_flags(&mut exec_cli.config_overrides, cli.config_overrides);
|
||||
prepend_config_flags(
|
||||
&mut exec_cli.config_overrides,
|
||||
root_config_overrides.clone(),
|
||||
);
|
||||
codex_exec::run_main(exec_cli, codex_linux_sandbox_exe).await?;
|
||||
}
|
||||
Some(Subcommand::Mcp) => {
|
||||
codex_mcp_server::run_main(codex_linux_sandbox_exe, cli.config_overrides).await?;
|
||||
Some(Subcommand::McpServer) => {
|
||||
codex_mcp_server::run_main(codex_linux_sandbox_exe, root_config_overrides).await?;
|
||||
}
|
||||
Some(Subcommand::Mcp(mut mcp_cli)) => {
|
||||
// Propagate any root-level config overrides (e.g. `-c key=value`).
|
||||
prepend_config_flags(&mut mcp_cli.config_overrides, root_config_overrides.clone());
|
||||
mcp_cli.run().await?;
|
||||
}
|
||||
Some(Subcommand::AppServer) => {
|
||||
codex_app_server::run_main(codex_linux_sandbox_exe, root_config_overrides).await?;
|
||||
}
|
||||
Some(Subcommand::Resume(ResumeCommand {
|
||||
session_id,
|
||||
last,
|
||||
config_overrides,
|
||||
})) => {
|
||||
interactive = finalize_resume_interactive(
|
||||
interactive,
|
||||
root_config_overrides.clone(),
|
||||
session_id,
|
||||
last,
|
||||
config_overrides,
|
||||
);
|
||||
let exit_info = codex_tui::run_main(interactive, codex_linux_sandbox_exe).await?;
|
||||
print_exit_messages(exit_info);
|
||||
}
|
||||
Some(Subcommand::Login(mut login_cli)) => {
|
||||
prepend_config_flags(&mut login_cli.config_overrides, cli.config_overrides);
|
||||
prepend_config_flags(
|
||||
&mut login_cli.config_overrides,
|
||||
root_config_overrides.clone(),
|
||||
);
|
||||
match login_cli.action {
|
||||
Some(LoginSubcommand::Status) => {
|
||||
run_login_status(login_cli.config_overrides).await;
|
||||
}
|
||||
None => {
|
||||
if let Some(api_key) = login_cli.api_key {
|
||||
if login_cli.use_device_code {
|
||||
run_login_with_device_code(
|
||||
login_cli.config_overrides,
|
||||
login_cli.issuer_base_url,
|
||||
login_cli.client_id,
|
||||
)
|
||||
.await;
|
||||
} else if login_cli.api_key.is_some() {
|
||||
eprintln!(
|
||||
"The --api-key flag is no longer supported. Pipe the key instead, e.g. `printenv OPENAI_API_KEY | codex login --with-api-key`."
|
||||
);
|
||||
std::process::exit(1);
|
||||
} else if login_cli.with_api_key {
|
||||
let api_key = read_api_key_from_stdin();
|
||||
run_login_with_api_key(login_cli.config_overrides, api_key).await;
|
||||
} else {
|
||||
run_login_with_chatgpt(login_cli.config_overrides).await;
|
||||
@@ -177,27 +388,39 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
}
|
||||
}
|
||||
Some(Subcommand::Logout(mut logout_cli)) => {
|
||||
prepend_config_flags(&mut logout_cli.config_overrides, cli.config_overrides);
|
||||
prepend_config_flags(
|
||||
&mut logout_cli.config_overrides,
|
||||
root_config_overrides.clone(),
|
||||
);
|
||||
run_logout(logout_cli.config_overrides).await;
|
||||
}
|
||||
Some(Subcommand::Proto(mut proto_cli)) => {
|
||||
prepend_config_flags(&mut proto_cli.config_overrides, cli.config_overrides);
|
||||
proto::run_main(proto_cli).await?;
|
||||
}
|
||||
Some(Subcommand::Completion(completion_cli)) => {
|
||||
print_completion(completion_cli);
|
||||
}
|
||||
Some(Subcommand::Debug(debug_args)) => match debug_args.cmd {
|
||||
DebugCommand::Seatbelt(mut seatbelt_cli) => {
|
||||
prepend_config_flags(&mut seatbelt_cli.config_overrides, cli.config_overrides);
|
||||
Some(Subcommand::Cloud(mut cloud_cli)) => {
|
||||
prepend_config_flags(
|
||||
&mut cloud_cli.config_overrides,
|
||||
root_config_overrides.clone(),
|
||||
);
|
||||
codex_cloud_tasks::run_main(cloud_cli, codex_linux_sandbox_exe).await?;
|
||||
}
|
||||
Some(Subcommand::Sandbox(sandbox_args)) => match sandbox_args.cmd {
|
||||
SandboxCommand::Macos(mut seatbelt_cli) => {
|
||||
prepend_config_flags(
|
||||
&mut seatbelt_cli.config_overrides,
|
||||
root_config_overrides.clone(),
|
||||
);
|
||||
codex_cli::debug_sandbox::run_command_under_seatbelt(
|
||||
seatbelt_cli,
|
||||
codex_linux_sandbox_exe,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
DebugCommand::Landlock(mut landlock_cli) => {
|
||||
prepend_config_flags(&mut landlock_cli.config_overrides, cli.config_overrides);
|
||||
SandboxCommand::Linux(mut landlock_cli) => {
|
||||
prepend_config_flags(
|
||||
&mut landlock_cli.config_overrides,
|
||||
root_config_overrides.clone(),
|
||||
);
|
||||
codex_cli::debug_sandbox::run_command_under_landlock(
|
||||
landlock_cli,
|
||||
codex_linux_sandbox_exe,
|
||||
@@ -206,12 +429,43 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
}
|
||||
},
|
||||
Some(Subcommand::Apply(mut apply_cli)) => {
|
||||
prepend_config_flags(&mut apply_cli.config_overrides, cli.config_overrides);
|
||||
prepend_config_flags(
|
||||
&mut apply_cli.config_overrides,
|
||||
root_config_overrides.clone(),
|
||||
);
|
||||
run_apply_command(apply_cli, None).await?;
|
||||
}
|
||||
Some(Subcommand::ResponsesApiProxy(args)) => {
|
||||
tokio::task::spawn_blocking(move || codex_responses_api_proxy::run_main(args))
|
||||
.await??;
|
||||
}
|
||||
Some(Subcommand::GenerateTs(gen_cli)) => {
|
||||
codex_protocol_ts::generate_ts(&gen_cli.out_dir, gen_cli.prettier.as_deref())?;
|
||||
}
|
||||
Some(Subcommand::Features(FeaturesCli { sub })) => match sub {
|
||||
FeaturesSubcommand::List => {
|
||||
// Respect root-level `-c` overrides plus top-level flags like `--profile`.
|
||||
let cli_kv_overrides = root_config_overrides
|
||||
.parse_overrides()
|
||||
.map_err(|e| anyhow::anyhow!(e))?;
|
||||
|
||||
// Thread through relevant top-level flags (at minimum, `--profile`).
|
||||
// Also honor `--search` since it maps to a feature toggle.
|
||||
let overrides = ConfigOverrides {
|
||||
config_profile: interactive.config_profile.clone(),
|
||||
tools_web_search_request: interactive.web_search.then_some(true),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let config = Config::load_with_cli_overrides(cli_kv_overrides, overrides).await?;
|
||||
for def in codex_core::features::FEATURES.iter() {
|
||||
let name = def.key;
|
||||
let stage = stage_str(def.stage);
|
||||
let enabled = config.features.enabled(def.id);
|
||||
println!("{name}\t{stage}\t{enabled}");
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -228,8 +482,258 @@ fn prepend_config_flags(
|
||||
.splice(0..0, cli_config_overrides.raw_overrides);
|
||||
}
|
||||
|
||||
/// Build the final `TuiCli` for a `codex resume` invocation.
|
||||
fn finalize_resume_interactive(
|
||||
mut interactive: TuiCli,
|
||||
root_config_overrides: CliConfigOverrides,
|
||||
session_id: Option<String>,
|
||||
last: bool,
|
||||
resume_cli: TuiCli,
|
||||
) -> TuiCli {
|
||||
// Start with the parsed interactive CLI so resume shares the same
|
||||
// configuration surface area as `codex` without additional flags.
|
||||
let resume_session_id = session_id;
|
||||
interactive.resume_picker = resume_session_id.is_none() && !last;
|
||||
interactive.resume_last = last;
|
||||
interactive.resume_session_id = resume_session_id;
|
||||
|
||||
// Merge resume-scoped flags and overrides with highest precedence.
|
||||
merge_resume_cli_flags(&mut interactive, resume_cli);
|
||||
|
||||
// Propagate any root-level config overrides (e.g. `-c key=value`).
|
||||
prepend_config_flags(&mut interactive.config_overrides, root_config_overrides);
|
||||
|
||||
interactive
|
||||
}
|
||||
|
||||
/// Merge flags provided to `codex resume` so they take precedence over any
|
||||
/// root-level flags. Only overrides fields explicitly set on the resume-scoped
|
||||
/// CLI. Also appends `-c key=value` overrides with highest precedence.
|
||||
fn merge_resume_cli_flags(interactive: &mut TuiCli, resume_cli: TuiCli) {
|
||||
if let Some(model) = resume_cli.model {
|
||||
interactive.model = Some(model);
|
||||
}
|
||||
if resume_cli.oss {
|
||||
interactive.oss = true;
|
||||
}
|
||||
if let Some(profile) = resume_cli.config_profile {
|
||||
interactive.config_profile = Some(profile);
|
||||
}
|
||||
if let Some(sandbox) = resume_cli.sandbox_mode {
|
||||
interactive.sandbox_mode = Some(sandbox);
|
||||
}
|
||||
if let Some(approval) = resume_cli.approval_policy {
|
||||
interactive.approval_policy = Some(approval);
|
||||
}
|
||||
if resume_cli.full_auto {
|
||||
interactive.full_auto = true;
|
||||
}
|
||||
if resume_cli.dangerously_bypass_approvals_and_sandbox {
|
||||
interactive.dangerously_bypass_approvals_and_sandbox = true;
|
||||
}
|
||||
if let Some(cwd) = resume_cli.cwd {
|
||||
interactive.cwd = Some(cwd);
|
||||
}
|
||||
if resume_cli.web_search {
|
||||
interactive.web_search = true;
|
||||
}
|
||||
if !resume_cli.images.is_empty() {
|
||||
interactive.images = resume_cli.images;
|
||||
}
|
||||
if let Some(prompt) = resume_cli.prompt {
|
||||
interactive.prompt = Some(prompt);
|
||||
}
|
||||
|
||||
interactive
|
||||
.config_overrides
|
||||
.raw_overrides
|
||||
.extend(resume_cli.config_overrides.raw_overrides);
|
||||
}
|
||||
|
||||
fn print_completion(cmd: CompletionCommand) {
|
||||
let mut app = MultitoolCli::command();
|
||||
let name = "codex";
|
||||
generate(cmd.shell, &mut app, name, &mut std::io::stdout());
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use assert_matches::assert_matches;
|
||||
use codex_core::protocol::TokenUsage;
|
||||
use codex_protocol::ConversationId;
|
||||
|
||||
fn finalize_from_args(args: &[&str]) -> TuiCli {
|
||||
let cli = MultitoolCli::try_parse_from(args).expect("parse");
|
||||
let MultitoolCli {
|
||||
interactive,
|
||||
config_overrides: root_overrides,
|
||||
subcommand,
|
||||
feature_toggles: _,
|
||||
} = cli;
|
||||
|
||||
let Subcommand::Resume(ResumeCommand {
|
||||
session_id,
|
||||
last,
|
||||
config_overrides: resume_cli,
|
||||
}) = subcommand.expect("resume present")
|
||||
else {
|
||||
unreachable!()
|
||||
};
|
||||
|
||||
finalize_resume_interactive(interactive, root_overrides, session_id, last, resume_cli)
|
||||
}
|
||||
|
||||
fn sample_exit_info(conversation: Option<&str>) -> AppExitInfo {
|
||||
let token_usage = TokenUsage {
|
||||
output_tokens: 2,
|
||||
total_tokens: 2,
|
||||
..Default::default()
|
||||
};
|
||||
AppExitInfo {
|
||||
token_usage,
|
||||
conversation_id: conversation
|
||||
.map(ConversationId::from_string)
|
||||
.map(Result::unwrap),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn format_exit_messages_skips_zero_usage() {
|
||||
let exit_info = AppExitInfo {
|
||||
token_usage: TokenUsage::default(),
|
||||
conversation_id: None,
|
||||
};
|
||||
let lines = format_exit_messages(exit_info, false);
|
||||
assert!(lines.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn format_exit_messages_includes_resume_hint_without_color() {
|
||||
let exit_info = sample_exit_info(Some("123e4567-e89b-12d3-a456-426614174000"));
|
||||
let lines = format_exit_messages(exit_info, false);
|
||||
assert_eq!(
|
||||
lines,
|
||||
vec![
|
||||
"Token usage: total=2 input=0 output=2".to_string(),
|
||||
"To continue this session, run codex resume 123e4567-e89b-12d3-a456-426614174000"
|
||||
.to_string(),
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn format_exit_messages_applies_color_when_enabled() {
|
||||
let exit_info = sample_exit_info(Some("123e4567-e89b-12d3-a456-426614174000"));
|
||||
let lines = format_exit_messages(exit_info, true);
|
||||
assert_eq!(lines.len(), 2);
|
||||
assert!(lines[1].contains("\u{1b}[36m"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resume_model_flag_applies_when_no_root_flags() {
|
||||
let interactive = finalize_from_args(["codex", "resume", "-m", "gpt-5-test"].as_ref());
|
||||
|
||||
assert_eq!(interactive.model.as_deref(), Some("gpt-5-test"));
|
||||
assert!(interactive.resume_picker);
|
||||
assert!(!interactive.resume_last);
|
||||
assert_eq!(interactive.resume_session_id, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resume_picker_logic_none_and_not_last() {
|
||||
let interactive = finalize_from_args(["codex", "resume"].as_ref());
|
||||
assert!(interactive.resume_picker);
|
||||
assert!(!interactive.resume_last);
|
||||
assert_eq!(interactive.resume_session_id, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resume_picker_logic_last() {
|
||||
let interactive = finalize_from_args(["codex", "resume", "--last"].as_ref());
|
||||
assert!(!interactive.resume_picker);
|
||||
assert!(interactive.resume_last);
|
||||
assert_eq!(interactive.resume_session_id, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resume_picker_logic_with_session_id() {
|
||||
let interactive = finalize_from_args(["codex", "resume", "1234"].as_ref());
|
||||
assert!(!interactive.resume_picker);
|
||||
assert!(!interactive.resume_last);
|
||||
assert_eq!(interactive.resume_session_id.as_deref(), Some("1234"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resume_merges_option_flags_and_full_auto() {
|
||||
let interactive = finalize_from_args(
|
||||
[
|
||||
"codex",
|
||||
"resume",
|
||||
"sid",
|
||||
"--oss",
|
||||
"--full-auto",
|
||||
"--search",
|
||||
"--sandbox",
|
||||
"workspace-write",
|
||||
"--ask-for-approval",
|
||||
"on-request",
|
||||
"-m",
|
||||
"gpt-5-test",
|
||||
"-p",
|
||||
"my-profile",
|
||||
"-C",
|
||||
"/tmp",
|
||||
"-i",
|
||||
"/tmp/a.png,/tmp/b.png",
|
||||
]
|
||||
.as_ref(),
|
||||
);
|
||||
|
||||
assert_eq!(interactive.model.as_deref(), Some("gpt-5-test"));
|
||||
assert!(interactive.oss);
|
||||
assert_eq!(interactive.config_profile.as_deref(), Some("my-profile"));
|
||||
assert_matches!(
|
||||
interactive.sandbox_mode,
|
||||
Some(codex_common::SandboxModeCliArg::WorkspaceWrite)
|
||||
);
|
||||
assert_matches!(
|
||||
interactive.approval_policy,
|
||||
Some(codex_common::ApprovalModeCliArg::OnRequest)
|
||||
);
|
||||
assert!(interactive.full_auto);
|
||||
assert_eq!(
|
||||
interactive.cwd.as_deref(),
|
||||
Some(std::path::Path::new("/tmp"))
|
||||
);
|
||||
assert!(interactive.web_search);
|
||||
let has_a = interactive
|
||||
.images
|
||||
.iter()
|
||||
.any(|p| p == std::path::Path::new("/tmp/a.png"));
|
||||
let has_b = interactive
|
||||
.images
|
||||
.iter()
|
||||
.any(|p| p == std::path::Path::new("/tmp/b.png"));
|
||||
assert!(has_a && has_b);
|
||||
assert!(!interactive.resume_picker);
|
||||
assert!(!interactive.resume_last);
|
||||
assert_eq!(interactive.resume_session_id.as_deref(), Some("sid"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resume_merges_dangerously_bypass_flag() {
|
||||
let interactive = finalize_from_args(
|
||||
[
|
||||
"codex",
|
||||
"resume",
|
||||
"--dangerously-bypass-approvals-and-sandbox",
|
||||
]
|
||||
.as_ref(),
|
||||
);
|
||||
assert!(interactive.dangerously_bypass_approvals_and_sandbox);
|
||||
assert!(interactive.resume_picker);
|
||||
assert!(!interactive.resume_last);
|
||||
assert_eq!(interactive.resume_session_id, None);
|
||||
}
|
||||
}
|
||||
|
||||
708
codex-rs/cli/src/mcp_cmd.rs
Normal file
708
codex-rs/cli/src/mcp_cmd.rs
Normal file
@@ -0,0 +1,708 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use anyhow::anyhow;
|
||||
use anyhow::bail;
|
||||
use clap::ArgGroup;
|
||||
use codex_common::CliConfigOverrides;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_core::config::find_codex_home;
|
||||
use codex_core::config::load_global_mcp_servers;
|
||||
use codex_core::config::write_global_mcp_servers;
|
||||
use codex_core::config_types::McpServerConfig;
|
||||
use codex_core::config_types::McpServerTransportConfig;
|
||||
use codex_core::features::Feature;
|
||||
use codex_core::mcp::auth::compute_auth_statuses;
|
||||
use codex_core::protocol::McpAuthStatus;
|
||||
use codex_rmcp_client::delete_oauth_tokens;
|
||||
use codex_rmcp_client::perform_oauth_login;
|
||||
use codex_rmcp_client::supports_oauth_login;
|
||||
|
||||
/// [experimental] Launch Codex as an MCP server or manage configured MCP servers.
|
||||
///
|
||||
/// Subcommands:
|
||||
/// - `serve` — run the MCP server on stdio
|
||||
/// - `list` — list configured servers (with `--json`)
|
||||
/// - `get` — show a single server (with `--json`)
|
||||
/// - `add` — add a server launcher entry to `~/.codex/config.toml`
|
||||
/// - `remove` — delete a server entry
|
||||
#[derive(Debug, clap::Parser)]
|
||||
pub struct McpCli {
|
||||
#[clap(flatten)]
|
||||
pub config_overrides: CliConfigOverrides,
|
||||
|
||||
#[command(subcommand)]
|
||||
pub subcommand: McpSubcommand,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Subcommand)]
|
||||
pub enum McpSubcommand {
|
||||
/// [experimental] List configured MCP servers.
|
||||
List(ListArgs),
|
||||
|
||||
/// [experimental] Show details for a configured MCP server.
|
||||
Get(GetArgs),
|
||||
|
||||
/// [experimental] Add a global MCP server entry.
|
||||
Add(AddArgs),
|
||||
|
||||
/// [experimental] Remove a global MCP server entry.
|
||||
Remove(RemoveArgs),
|
||||
|
||||
/// [experimental] Authenticate with a configured MCP server via OAuth.
|
||||
/// Requires experimental_use_rmcp_client = true in config.toml.
|
||||
Login(LoginArgs),
|
||||
|
||||
/// [experimental] Remove stored OAuth credentials for a server.
|
||||
/// Requires experimental_use_rmcp_client = true in config.toml.
|
||||
Logout(LogoutArgs),
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Parser)]
|
||||
pub struct ListArgs {
|
||||
/// Output the configured servers as JSON.
|
||||
#[arg(long)]
|
||||
pub json: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Parser)]
|
||||
pub struct GetArgs {
|
||||
/// Name of the MCP server to display.
|
||||
pub name: String,
|
||||
|
||||
/// Output the server configuration as JSON.
|
||||
#[arg(long)]
|
||||
pub json: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Parser)]
|
||||
pub struct AddArgs {
|
||||
/// Name for the MCP server configuration.
|
||||
pub name: String,
|
||||
|
||||
#[command(flatten)]
|
||||
pub transport_args: AddMcpTransportArgs,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Args)]
|
||||
#[command(
|
||||
group(
|
||||
ArgGroup::new("transport")
|
||||
.args(["command", "url"])
|
||||
.required(true)
|
||||
.multiple(false)
|
||||
)
|
||||
)]
|
||||
pub struct AddMcpTransportArgs {
|
||||
#[command(flatten)]
|
||||
pub stdio: Option<AddMcpStdioArgs>,
|
||||
|
||||
#[command(flatten)]
|
||||
pub streamable_http: Option<AddMcpStreamableHttpArgs>,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Args)]
|
||||
pub struct AddMcpStdioArgs {
|
||||
/// Command to launch the MCP server.
|
||||
/// Use --url for a streamable HTTP server.
|
||||
#[arg(
|
||||
trailing_var_arg = true,
|
||||
num_args = 0..,
|
||||
)]
|
||||
pub command: Vec<String>,
|
||||
|
||||
/// Environment variables to set when launching the server.
|
||||
/// Only valid with stdio servers.
|
||||
#[arg(
|
||||
long,
|
||||
value_parser = parse_env_pair,
|
||||
value_name = "KEY=VALUE",
|
||||
)]
|
||||
pub env: Vec<(String, String)>,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Args)]
|
||||
pub struct AddMcpStreamableHttpArgs {
|
||||
/// URL for a streamable HTTP MCP server.
|
||||
#[arg(long)]
|
||||
pub url: String,
|
||||
|
||||
/// Optional environment variable to read for a bearer token.
|
||||
/// Only valid with streamable HTTP servers.
|
||||
#[arg(
|
||||
long = "bearer-token-env-var",
|
||||
value_name = "ENV_VAR",
|
||||
requires = "url"
|
||||
)]
|
||||
pub bearer_token_env_var: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Parser)]
|
||||
pub struct RemoveArgs {
|
||||
/// Name of the MCP server configuration to remove.
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Parser)]
|
||||
pub struct LoginArgs {
|
||||
/// Name of the MCP server to authenticate with oauth.
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Parser)]
|
||||
pub struct LogoutArgs {
|
||||
/// Name of the MCP server to deauthenticate.
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
impl McpCli {
|
||||
pub async fn run(self) -> Result<()> {
|
||||
let McpCli {
|
||||
config_overrides,
|
||||
subcommand,
|
||||
} = self;
|
||||
|
||||
match subcommand {
|
||||
McpSubcommand::List(args) => {
|
||||
run_list(&config_overrides, args).await?;
|
||||
}
|
||||
McpSubcommand::Get(args) => {
|
||||
run_get(&config_overrides, args).await?;
|
||||
}
|
||||
McpSubcommand::Add(args) => {
|
||||
run_add(&config_overrides, args).await?;
|
||||
}
|
||||
McpSubcommand::Remove(args) => {
|
||||
run_remove(&config_overrides, args).await?;
|
||||
}
|
||||
McpSubcommand::Login(args) => {
|
||||
run_login(&config_overrides, args).await?;
|
||||
}
|
||||
McpSubcommand::Logout(args) => {
|
||||
run_logout(&config_overrides, args).await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
async fn run_add(config_overrides: &CliConfigOverrides, add_args: AddArgs) -> Result<()> {
|
||||
// Validate any provided overrides even though they are not currently applied.
|
||||
let overrides = config_overrides.parse_overrides().map_err(|e| anyhow!(e))?;
|
||||
let config = Config::load_with_cli_overrides(overrides, ConfigOverrides::default())
|
||||
.await
|
||||
.context("failed to load configuration")?;
|
||||
|
||||
let AddArgs {
|
||||
name,
|
||||
transport_args,
|
||||
} = add_args;
|
||||
|
||||
validate_server_name(&name)?;
|
||||
|
||||
let codex_home = find_codex_home().context("failed to resolve CODEX_HOME")?;
|
||||
let mut servers = load_global_mcp_servers(&codex_home)
|
||||
.await
|
||||
.with_context(|| format!("failed to load MCP servers from {}", codex_home.display()))?;
|
||||
|
||||
let transport = match transport_args {
|
||||
AddMcpTransportArgs {
|
||||
stdio: Some(stdio), ..
|
||||
} => {
|
||||
let mut command_parts = stdio.command.into_iter();
|
||||
let command_bin = command_parts
|
||||
.next()
|
||||
.ok_or_else(|| anyhow!("command is required"))?;
|
||||
let command_args: Vec<String> = command_parts.collect();
|
||||
|
||||
let env_map = if stdio.env.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(stdio.env.into_iter().collect::<HashMap<_, _>>())
|
||||
};
|
||||
McpServerTransportConfig::Stdio {
|
||||
command: command_bin,
|
||||
args: command_args,
|
||||
env: env_map,
|
||||
}
|
||||
}
|
||||
AddMcpTransportArgs {
|
||||
streamable_http:
|
||||
Some(AddMcpStreamableHttpArgs {
|
||||
url,
|
||||
bearer_token_env_var,
|
||||
}),
|
||||
..
|
||||
} => McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var,
|
||||
},
|
||||
AddMcpTransportArgs { .. } => bail!("exactly one of --command or --url must be provided"),
|
||||
};
|
||||
|
||||
let new_entry = McpServerConfig {
|
||||
transport: transport.clone(),
|
||||
enabled: true,
|
||||
startup_timeout_sec: None,
|
||||
tool_timeout_sec: None,
|
||||
};
|
||||
|
||||
servers.insert(name.clone(), new_entry);
|
||||
|
||||
write_global_mcp_servers(&codex_home, &servers)
|
||||
.with_context(|| format!("failed to write MCP servers to {}", codex_home.display()))?;
|
||||
|
||||
println!("Added global MCP server '{name}'.");
|
||||
|
||||
if let McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var: None,
|
||||
} = transport
|
||||
&& matches!(supports_oauth_login(&url).await, Ok(true))
|
||||
{
|
||||
println!("Detected OAuth support. Starting OAuth flow…");
|
||||
perform_oauth_login(&name, &url, config.mcp_oauth_credentials_store_mode).await?;
|
||||
println!("Successfully logged in.");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn run_remove(config_overrides: &CliConfigOverrides, remove_args: RemoveArgs) -> Result<()> {
|
||||
config_overrides.parse_overrides().map_err(|e| anyhow!(e))?;
|
||||
|
||||
let RemoveArgs { name } = remove_args;
|
||||
|
||||
validate_server_name(&name)?;
|
||||
|
||||
let codex_home = find_codex_home().context("failed to resolve CODEX_HOME")?;
|
||||
let mut servers = load_global_mcp_servers(&codex_home)
|
||||
.await
|
||||
.with_context(|| format!("failed to load MCP servers from {}", codex_home.display()))?;
|
||||
|
||||
let removed = servers.remove(&name).is_some();
|
||||
|
||||
if removed {
|
||||
write_global_mcp_servers(&codex_home, &servers)
|
||||
.with_context(|| format!("failed to write MCP servers to {}", codex_home.display()))?;
|
||||
}
|
||||
|
||||
if removed {
|
||||
println!("Removed global MCP server '{name}'.");
|
||||
} else {
|
||||
println!("No MCP server named '{name}' found.");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn run_login(config_overrides: &CliConfigOverrides, login_args: LoginArgs) -> Result<()> {
|
||||
let overrides = config_overrides.parse_overrides().map_err(|e| anyhow!(e))?;
|
||||
let config = Config::load_with_cli_overrides(overrides, ConfigOverrides::default())
|
||||
.await
|
||||
.context("failed to load configuration")?;
|
||||
|
||||
if !config.features.enabled(Feature::RmcpClient) {
|
||||
bail!(
|
||||
"OAuth login is only supported when experimental_use_rmcp_client is true in config.toml."
|
||||
);
|
||||
}
|
||||
|
||||
let LoginArgs { name } = login_args;
|
||||
|
||||
let Some(server) = config.mcp_servers.get(&name) else {
|
||||
bail!("No MCP server named '{name}' found.");
|
||||
};
|
||||
|
||||
let url = match &server.transport {
|
||||
McpServerTransportConfig::StreamableHttp { url, .. } => url.clone(),
|
||||
_ => bail!("OAuth login is only supported for streamable HTTP servers."),
|
||||
};
|
||||
|
||||
perform_oauth_login(&name, &url, config.mcp_oauth_credentials_store_mode).await?;
|
||||
println!("Successfully logged in to MCP server '{name}'.");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn run_logout(config_overrides: &CliConfigOverrides, logout_args: LogoutArgs) -> Result<()> {
|
||||
let overrides = config_overrides.parse_overrides().map_err(|e| anyhow!(e))?;
|
||||
let config = Config::load_with_cli_overrides(overrides, ConfigOverrides::default())
|
||||
.await
|
||||
.context("failed to load configuration")?;
|
||||
|
||||
let LogoutArgs { name } = logout_args;
|
||||
|
||||
let server = config
|
||||
.mcp_servers
|
||||
.get(&name)
|
||||
.ok_or_else(|| anyhow!("No MCP server named '{name}' found in configuration."))?;
|
||||
|
||||
let url = match &server.transport {
|
||||
McpServerTransportConfig::StreamableHttp { url, .. } => url.clone(),
|
||||
_ => bail!("OAuth logout is only supported for streamable_http transports."),
|
||||
};
|
||||
|
||||
match delete_oauth_tokens(&name, &url, config.mcp_oauth_credentials_store_mode) {
|
||||
Ok(true) => println!("Removed OAuth credentials for '{name}'."),
|
||||
Ok(false) => println!("No OAuth credentials stored for '{name}'."),
|
||||
Err(err) => return Err(anyhow!("failed to delete OAuth credentials: {err}")),
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn run_list(config_overrides: &CliConfigOverrides, list_args: ListArgs) -> Result<()> {
|
||||
let overrides = config_overrides.parse_overrides().map_err(|e| anyhow!(e))?;
|
||||
let config = Config::load_with_cli_overrides(overrides, ConfigOverrides::default())
|
||||
.await
|
||||
.context("failed to load configuration")?;
|
||||
|
||||
let mut entries: Vec<_> = config.mcp_servers.iter().collect();
|
||||
entries.sort_by(|(a, _), (b, _)| a.cmp(b));
|
||||
let auth_statuses = compute_auth_statuses(
|
||||
config.mcp_servers.iter(),
|
||||
config.mcp_oauth_credentials_store_mode,
|
||||
)
|
||||
.await;
|
||||
|
||||
if list_args.json {
|
||||
let json_entries: Vec<_> = entries
|
||||
.into_iter()
|
||||
.map(|(name, cfg)| {
|
||||
let auth_status = auth_statuses
|
||||
.get(name.as_str())
|
||||
.copied()
|
||||
.unwrap_or(McpAuthStatus::Unsupported);
|
||||
let transport = match &cfg.transport {
|
||||
McpServerTransportConfig::Stdio { command, args, env } => serde_json::json!({
|
||||
"type": "stdio",
|
||||
"command": command,
|
||||
"args": args,
|
||||
"env": env,
|
||||
}),
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var,
|
||||
} => {
|
||||
serde_json::json!({
|
||||
"type": "streamable_http",
|
||||
"url": url,
|
||||
"bearer_token_env_var": bearer_token_env_var,
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
serde_json::json!({
|
||||
"name": name,
|
||||
"enabled": cfg.enabled,
|
||||
"transport": transport,
|
||||
"startup_timeout_sec": cfg
|
||||
.startup_timeout_sec
|
||||
.map(|timeout| timeout.as_secs_f64()),
|
||||
"tool_timeout_sec": cfg
|
||||
.tool_timeout_sec
|
||||
.map(|timeout| timeout.as_secs_f64()),
|
||||
"auth_status": auth_status,
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
let output = serde_json::to_string_pretty(&json_entries)?;
|
||||
println!("{output}");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if entries.is_empty() {
|
||||
println!("No MCP servers configured yet. Try `codex mcp add my-tool -- my-command`.");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut stdio_rows: Vec<[String; 6]> = Vec::new();
|
||||
let mut http_rows: Vec<[String; 5]> = Vec::new();
|
||||
|
||||
for (name, cfg) in entries {
|
||||
match &cfg.transport {
|
||||
McpServerTransportConfig::Stdio { command, args, env } => {
|
||||
let args_display = if args.is_empty() {
|
||||
"-".to_string()
|
||||
} else {
|
||||
args.join(" ")
|
||||
};
|
||||
let env_display = match env.as_ref() {
|
||||
None => "-".to_string(),
|
||||
Some(map) if map.is_empty() => "-".to_string(),
|
||||
Some(map) => {
|
||||
let mut pairs: Vec<_> = map.iter().collect();
|
||||
pairs.sort_by(|(a, _), (b, _)| a.cmp(b));
|
||||
pairs
|
||||
.into_iter()
|
||||
.map(|(k, v)| format!("{k}={v}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
}
|
||||
};
|
||||
let status = if cfg.enabled {
|
||||
"enabled".to_string()
|
||||
} else {
|
||||
"disabled".to_string()
|
||||
};
|
||||
let auth_status = auth_statuses
|
||||
.get(name.as_str())
|
||||
.copied()
|
||||
.unwrap_or(McpAuthStatus::Unsupported)
|
||||
.to_string();
|
||||
stdio_rows.push([
|
||||
name.clone(),
|
||||
command.clone(),
|
||||
args_display,
|
||||
env_display,
|
||||
status,
|
||||
auth_status,
|
||||
]);
|
||||
}
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var,
|
||||
} => {
|
||||
let status = if cfg.enabled {
|
||||
"enabled".to_string()
|
||||
} else {
|
||||
"disabled".to_string()
|
||||
};
|
||||
let auth_status = auth_statuses
|
||||
.get(name.as_str())
|
||||
.copied()
|
||||
.unwrap_or(McpAuthStatus::Unsupported)
|
||||
.to_string();
|
||||
http_rows.push([
|
||||
name.clone(),
|
||||
url.clone(),
|
||||
bearer_token_env_var.clone().unwrap_or("-".to_string()),
|
||||
status,
|
||||
auth_status,
|
||||
]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !stdio_rows.is_empty() {
|
||||
let mut widths = [
|
||||
"Name".len(),
|
||||
"Command".len(),
|
||||
"Args".len(),
|
||||
"Env".len(),
|
||||
"Status".len(),
|
||||
"Auth".len(),
|
||||
];
|
||||
for row in &stdio_rows {
|
||||
for (i, cell) in row.iter().enumerate() {
|
||||
widths[i] = widths[i].max(cell.len());
|
||||
}
|
||||
}
|
||||
|
||||
println!(
|
||||
"{name:<name_w$} {command:<cmd_w$} {args:<args_w$} {env:<env_w$} {status:<status_w$} {auth:<auth_w$}",
|
||||
name = "Name",
|
||||
command = "Command",
|
||||
args = "Args",
|
||||
env = "Env",
|
||||
status = "Status",
|
||||
auth = "Auth",
|
||||
name_w = widths[0],
|
||||
cmd_w = widths[1],
|
||||
args_w = widths[2],
|
||||
env_w = widths[3],
|
||||
status_w = widths[4],
|
||||
auth_w = widths[5],
|
||||
);
|
||||
|
||||
for row in &stdio_rows {
|
||||
println!(
|
||||
"{name:<name_w$} {command:<cmd_w$} {args:<args_w$} {env:<env_w$} {status:<status_w$} {auth:<auth_w$}",
|
||||
name = row[0].as_str(),
|
||||
command = row[1].as_str(),
|
||||
args = row[2].as_str(),
|
||||
env = row[3].as_str(),
|
||||
status = row[4].as_str(),
|
||||
auth = row[5].as_str(),
|
||||
name_w = widths[0],
|
||||
cmd_w = widths[1],
|
||||
args_w = widths[2],
|
||||
env_w = widths[3],
|
||||
status_w = widths[4],
|
||||
auth_w = widths[5],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if !stdio_rows.is_empty() && !http_rows.is_empty() {
|
||||
println!();
|
||||
}
|
||||
|
||||
if !http_rows.is_empty() {
|
||||
let mut widths = [
|
||||
"Name".len(),
|
||||
"Url".len(),
|
||||
"Bearer Token Env Var".len(),
|
||||
"Status".len(),
|
||||
"Auth".len(),
|
||||
];
|
||||
for row in &http_rows {
|
||||
for (i, cell) in row.iter().enumerate() {
|
||||
widths[i] = widths[i].max(cell.len());
|
||||
}
|
||||
}
|
||||
|
||||
println!(
|
||||
"{name:<name_w$} {url:<url_w$} {token:<token_w$} {status:<status_w$} {auth:<auth_w$}",
|
||||
name = "Name",
|
||||
url = "Url",
|
||||
token = "Bearer Token Env Var",
|
||||
status = "Status",
|
||||
auth = "Auth",
|
||||
name_w = widths[0],
|
||||
url_w = widths[1],
|
||||
token_w = widths[2],
|
||||
status_w = widths[3],
|
||||
auth_w = widths[4],
|
||||
);
|
||||
|
||||
for row in &http_rows {
|
||||
println!(
|
||||
"{name:<name_w$} {url:<url_w$} {token:<token_w$} {status:<status_w$} {auth:<auth_w$}",
|
||||
name = row[0].as_str(),
|
||||
url = row[1].as_str(),
|
||||
token = row[2].as_str(),
|
||||
status = row[3].as_str(),
|
||||
auth = row[4].as_str(),
|
||||
name_w = widths[0],
|
||||
url_w = widths[1],
|
||||
token_w = widths[2],
|
||||
status_w = widths[3],
|
||||
auth_w = widths[4],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn run_get(config_overrides: &CliConfigOverrides, get_args: GetArgs) -> Result<()> {
|
||||
let overrides = config_overrides.parse_overrides().map_err(|e| anyhow!(e))?;
|
||||
let config = Config::load_with_cli_overrides(overrides, ConfigOverrides::default())
|
||||
.await
|
||||
.context("failed to load configuration")?;
|
||||
|
||||
let Some(server) = config.mcp_servers.get(&get_args.name) else {
|
||||
bail!("No MCP server named '{name}' found.", name = get_args.name);
|
||||
};
|
||||
|
||||
if get_args.json {
|
||||
let transport = match &server.transport {
|
||||
McpServerTransportConfig::Stdio { command, args, env } => serde_json::json!({
|
||||
"type": "stdio",
|
||||
"command": command,
|
||||
"args": args,
|
||||
"env": env,
|
||||
}),
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var,
|
||||
} => serde_json::json!({
|
||||
"type": "streamable_http",
|
||||
"url": url,
|
||||
"bearer_token_env_var": bearer_token_env_var,
|
||||
}),
|
||||
};
|
||||
let output = serde_json::to_string_pretty(&serde_json::json!({
|
||||
"name": get_args.name,
|
||||
"enabled": server.enabled,
|
||||
"transport": transport,
|
||||
"startup_timeout_sec": server
|
||||
.startup_timeout_sec
|
||||
.map(|timeout| timeout.as_secs_f64()),
|
||||
"tool_timeout_sec": server
|
||||
.tool_timeout_sec
|
||||
.map(|timeout| timeout.as_secs_f64()),
|
||||
}))?;
|
||||
println!("{output}");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
println!("{}", get_args.name);
|
||||
println!(" enabled: {}", server.enabled);
|
||||
match &server.transport {
|
||||
McpServerTransportConfig::Stdio { command, args, env } => {
|
||||
println!(" transport: stdio");
|
||||
println!(" command: {command}");
|
||||
let args_display = if args.is_empty() {
|
||||
"-".to_string()
|
||||
} else {
|
||||
args.join(" ")
|
||||
};
|
||||
println!(" args: {args_display}");
|
||||
let env_display = match env.as_ref() {
|
||||
None => "-".to_string(),
|
||||
Some(map) if map.is_empty() => "-".to_string(),
|
||||
Some(map) => {
|
||||
let mut pairs: Vec<_> = map.iter().collect();
|
||||
pairs.sort_by(|(a, _), (b, _)| a.cmp(b));
|
||||
pairs
|
||||
.into_iter()
|
||||
.map(|(k, v)| format!("{k}={v}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
}
|
||||
};
|
||||
println!(" env: {env_display}");
|
||||
}
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var,
|
||||
} => {
|
||||
println!(" transport: streamable_http");
|
||||
println!(" url: {url}");
|
||||
let env_var = bearer_token_env_var.as_deref().unwrap_or("-");
|
||||
println!(" bearer_token_env_var: {env_var}");
|
||||
}
|
||||
}
|
||||
if let Some(timeout) = server.startup_timeout_sec {
|
||||
println!(" startup_timeout_sec: {}", timeout.as_secs_f64());
|
||||
}
|
||||
if let Some(timeout) = server.tool_timeout_sec {
|
||||
println!(" tool_timeout_sec: {}", timeout.as_secs_f64());
|
||||
}
|
||||
println!(" remove: codex mcp remove {}", get_args.name);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn parse_env_pair(raw: &str) -> Result<(String, String), String> {
|
||||
let mut parts = raw.splitn(2, '=');
|
||||
let key = parts
|
||||
.next()
|
||||
.map(str::trim)
|
||||
.filter(|s| !s.is_empty())
|
||||
.ok_or_else(|| "environment entries must be in KEY=VALUE form".to_string())?;
|
||||
let value = parts
|
||||
.next()
|
||||
.map(str::to_string)
|
||||
.ok_or_else(|| "environment entries must be in KEY=VALUE form".to_string())?;
|
||||
|
||||
Ok((key.to_string(), value))
|
||||
}
|
||||
|
||||
fn validate_server_name(name: &str) -> Result<()> {
|
||||
let is_valid = !name.is_empty()
|
||||
&& name
|
||||
.chars()
|
||||
.all(|c| c.is_ascii_alphanumeric() || c == '-' || c == '_');
|
||||
|
||||
if is_valid {
|
||||
Ok(())
|
||||
} else {
|
||||
bail!("invalid server name '{name}' (use letters, numbers, '-', '_')");
|
||||
}
|
||||
}
|
||||
@@ -1,136 +0,0 @@
|
||||
use std::io::IsTerminal;
|
||||
|
||||
use clap::Parser;
|
||||
use codex_common::CliConfigOverrides;
|
||||
use codex_core::AuthManager;
|
||||
use codex_core::ConversationManager;
|
||||
use codex_core::NewConversation;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_core::protocol::Event;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::Submission;
|
||||
use tokio::io::AsyncBufReadExt;
|
||||
use tokio::io::BufReader;
|
||||
use tracing::error;
|
||||
use tracing::info;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
pub struct ProtoCli {
|
||||
#[clap(skip)]
|
||||
pub config_overrides: CliConfigOverrides,
|
||||
}
|
||||
|
||||
pub async fn run_main(opts: ProtoCli) -> anyhow::Result<()> {
|
||||
if std::io::stdin().is_terminal() {
|
||||
anyhow::bail!("Protocol mode expects stdin to be a pipe, not a terminal");
|
||||
}
|
||||
|
||||
tracing_subscriber::fmt()
|
||||
.with_writer(std::io::stderr)
|
||||
.init();
|
||||
|
||||
let ProtoCli { config_overrides } = opts;
|
||||
let overrides_vec = config_overrides
|
||||
.parse_overrides()
|
||||
.map_err(anyhow::Error::msg)?;
|
||||
|
||||
let config = Config::load_with_cli_overrides(overrides_vec, ConfigOverrides::default())?;
|
||||
// Use conversation_manager API to start a conversation
|
||||
let conversation_manager = ConversationManager::new(AuthManager::shared(
|
||||
config.codex_home.clone(),
|
||||
config.preferred_auth_method,
|
||||
config.responses_originator_header.clone(),
|
||||
));
|
||||
let NewConversation {
|
||||
conversation_id: _,
|
||||
conversation,
|
||||
session_configured,
|
||||
} = conversation_manager.new_conversation(config).await?;
|
||||
|
||||
// Simulate streaming the session_configured event.
|
||||
let synthetic_event = Event {
|
||||
// Fake id value.
|
||||
id: "".to_string(),
|
||||
msg: EventMsg::SessionConfigured(session_configured),
|
||||
};
|
||||
let session_configured_event = match serde_json::to_string(&synthetic_event) {
|
||||
Ok(s) => s,
|
||||
Err(e) => {
|
||||
error!("Failed to serialize session_configured: {e}");
|
||||
return Err(anyhow::Error::from(e));
|
||||
}
|
||||
};
|
||||
println!("{session_configured_event}");
|
||||
|
||||
// Task that reads JSON lines from stdin and forwards to Submission Queue
|
||||
let sq_fut = {
|
||||
let conversation = conversation.clone();
|
||||
async move {
|
||||
let stdin = BufReader::new(tokio::io::stdin());
|
||||
let mut lines = stdin.lines();
|
||||
loop {
|
||||
let result = tokio::select! {
|
||||
_ = tokio::signal::ctrl_c() => {
|
||||
break
|
||||
},
|
||||
res = lines.next_line() => res,
|
||||
};
|
||||
|
||||
match result {
|
||||
Ok(Some(line)) => {
|
||||
let line = line.trim();
|
||||
if line.is_empty() {
|
||||
continue;
|
||||
}
|
||||
match serde_json::from_str::<Submission>(line) {
|
||||
Ok(sub) => {
|
||||
if let Err(e) = conversation.submit_with_id(sub).await {
|
||||
error!("{e:#}");
|
||||
break;
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
error!("invalid submission: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
info!("Submission queue closed");
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Task that reads events from the agent and prints them as JSON lines to stdout
|
||||
let eq_fut = async move {
|
||||
loop {
|
||||
let event = tokio::select! {
|
||||
_ = tokio::signal::ctrl_c() => break,
|
||||
event = conversation.next_event() => event,
|
||||
};
|
||||
match event {
|
||||
Ok(event) => {
|
||||
let event_str = match serde_json::to_string(&event) {
|
||||
Ok(s) => s,
|
||||
Err(e) => {
|
||||
error!("Failed to serialize event: {e}");
|
||||
continue;
|
||||
}
|
||||
};
|
||||
println!("{event_str}");
|
||||
}
|
||||
Err(e) => {
|
||||
error!("{e:#}");
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
info!("Event queue closed");
|
||||
};
|
||||
|
||||
tokio::join!(sq_fut, eq_fut);
|
||||
Ok(())
|
||||
}
|
||||
212
codex-rs/cli/tests/mcp_add_remove.rs
Normal file
212
codex-rs/cli/tests/mcp_add_remove.rs
Normal file
@@ -0,0 +1,212 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use codex_core::config::load_global_mcp_servers;
|
||||
use codex_core::config_types::McpServerTransportConfig;
|
||||
use predicates::str::contains;
|
||||
use pretty_assertions::assert_eq;
|
||||
use tempfile::TempDir;
|
||||
|
||||
fn codex_command(codex_home: &Path) -> Result<assert_cmd::Command> {
|
||||
let mut cmd = assert_cmd::Command::cargo_bin("codex")?;
|
||||
cmd.env("CODEX_HOME", codex_home);
|
||||
Ok(cmd)
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn add_and_remove_server_updates_global_config() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let mut add_cmd = codex_command(codex_home.path())?;
|
||||
add_cmd
|
||||
.args(["mcp", "add", "docs", "--", "echo", "hello"])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains("Added global MCP server 'docs'."));
|
||||
|
||||
let servers = load_global_mcp_servers(codex_home.path()).await?;
|
||||
assert_eq!(servers.len(), 1);
|
||||
let docs = servers.get("docs").expect("server should exist");
|
||||
match &docs.transport {
|
||||
McpServerTransportConfig::Stdio { command, args, env } => {
|
||||
assert_eq!(command, "echo");
|
||||
assert_eq!(args, &vec!["hello".to_string()]);
|
||||
assert!(env.is_none());
|
||||
}
|
||||
other => panic!("unexpected transport: {other:?}"),
|
||||
}
|
||||
assert!(docs.enabled);
|
||||
|
||||
let mut remove_cmd = codex_command(codex_home.path())?;
|
||||
remove_cmd
|
||||
.args(["mcp", "remove", "docs"])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains("Removed global MCP server 'docs'."));
|
||||
|
||||
let servers = load_global_mcp_servers(codex_home.path()).await?;
|
||||
assert!(servers.is_empty());
|
||||
|
||||
let mut remove_again_cmd = codex_command(codex_home.path())?;
|
||||
remove_again_cmd
|
||||
.args(["mcp", "remove", "docs"])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains("No MCP server named 'docs' found."));
|
||||
|
||||
let servers = load_global_mcp_servers(codex_home.path()).await?;
|
||||
assert!(servers.is_empty());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn add_with_env_preserves_key_order_and_values() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let mut add_cmd = codex_command(codex_home.path())?;
|
||||
add_cmd
|
||||
.args([
|
||||
"mcp",
|
||||
"add",
|
||||
"envy",
|
||||
"--env",
|
||||
"FOO=bar",
|
||||
"--env",
|
||||
"ALPHA=beta",
|
||||
"--",
|
||||
"python",
|
||||
"server.py",
|
||||
])
|
||||
.assert()
|
||||
.success();
|
||||
|
||||
let servers = load_global_mcp_servers(codex_home.path()).await?;
|
||||
let envy = servers.get("envy").expect("server should exist");
|
||||
let env = match &envy.transport {
|
||||
McpServerTransportConfig::Stdio { env: Some(env), .. } => env,
|
||||
other => panic!("unexpected transport: {other:?}"),
|
||||
};
|
||||
|
||||
assert_eq!(env.len(), 2);
|
||||
assert_eq!(env.get("FOO"), Some(&"bar".to_string()));
|
||||
assert_eq!(env.get("ALPHA"), Some(&"beta".to_string()));
|
||||
assert!(envy.enabled);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn add_streamable_http_without_manual_token() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let mut add_cmd = codex_command(codex_home.path())?;
|
||||
add_cmd
|
||||
.args(["mcp", "add", "github", "--url", "https://example.com/mcp"])
|
||||
.assert()
|
||||
.success();
|
||||
|
||||
let servers = load_global_mcp_servers(codex_home.path()).await?;
|
||||
let github = servers.get("github").expect("github server should exist");
|
||||
match &github.transport {
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var,
|
||||
} => {
|
||||
assert_eq!(url, "https://example.com/mcp");
|
||||
assert!(bearer_token_env_var.is_none());
|
||||
}
|
||||
other => panic!("unexpected transport: {other:?}"),
|
||||
}
|
||||
assert!(github.enabled);
|
||||
|
||||
assert!(!codex_home.path().join(".credentials.json").exists());
|
||||
assert!(!codex_home.path().join(".env").exists());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn add_streamable_http_with_custom_env_var() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let mut add_cmd = codex_command(codex_home.path())?;
|
||||
add_cmd
|
||||
.args([
|
||||
"mcp",
|
||||
"add",
|
||||
"issues",
|
||||
"--url",
|
||||
"https://example.com/issues",
|
||||
"--bearer-token-env-var",
|
||||
"GITHUB_TOKEN",
|
||||
])
|
||||
.assert()
|
||||
.success();
|
||||
|
||||
let servers = load_global_mcp_servers(codex_home.path()).await?;
|
||||
let issues = servers.get("issues").expect("issues server should exist");
|
||||
match &issues.transport {
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var,
|
||||
} => {
|
||||
assert_eq!(url, "https://example.com/issues");
|
||||
assert_eq!(bearer_token_env_var.as_deref(), Some("GITHUB_TOKEN"));
|
||||
}
|
||||
other => panic!("unexpected transport: {other:?}"),
|
||||
}
|
||||
assert!(issues.enabled);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn add_streamable_http_rejects_removed_flag() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let mut add_cmd = codex_command(codex_home.path())?;
|
||||
add_cmd
|
||||
.args([
|
||||
"mcp",
|
||||
"add",
|
||||
"github",
|
||||
"--url",
|
||||
"https://example.com/mcp",
|
||||
"--with-bearer-token",
|
||||
])
|
||||
.assert()
|
||||
.failure()
|
||||
.stderr(contains("--with-bearer-token"));
|
||||
|
||||
let servers = load_global_mcp_servers(codex_home.path()).await?;
|
||||
assert!(servers.is_empty());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn add_cant_add_command_and_url() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let mut add_cmd = codex_command(codex_home.path())?;
|
||||
add_cmd
|
||||
.args([
|
||||
"mcp",
|
||||
"add",
|
||||
"github",
|
||||
"--url",
|
||||
"https://example.com/mcp",
|
||||
"--command",
|
||||
"--",
|
||||
"echo",
|
||||
"hello",
|
||||
])
|
||||
.assert()
|
||||
.failure()
|
||||
.stderr(contains("unexpected argument '--command' found"));
|
||||
|
||||
let servers = load_global_mcp_servers(codex_home.path()).await?;
|
||||
assert!(servers.is_empty());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
112
codex-rs/cli/tests/mcp_list.rs
Normal file
112
codex-rs/cli/tests/mcp_list.rs
Normal file
@@ -0,0 +1,112 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use predicates::prelude::PredicateBooleanExt;
|
||||
use predicates::str::contains;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::Value as JsonValue;
|
||||
use serde_json::json;
|
||||
use tempfile::TempDir;
|
||||
|
||||
fn codex_command(codex_home: &Path) -> Result<assert_cmd::Command> {
|
||||
let mut cmd = assert_cmd::Command::cargo_bin("codex")?;
|
||||
cmd.env("CODEX_HOME", codex_home);
|
||||
Ok(cmd)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn list_shows_empty_state() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let mut cmd = codex_command(codex_home.path())?;
|
||||
let output = cmd.args(["mcp", "list"]).output()?;
|
||||
assert!(output.status.success());
|
||||
let stdout = String::from_utf8(output.stdout)?;
|
||||
assert!(stdout.contains("No MCP servers configured yet."));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn list_and_get_render_expected_output() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let mut add = codex_command(codex_home.path())?;
|
||||
add.args([
|
||||
"mcp",
|
||||
"add",
|
||||
"docs",
|
||||
"--env",
|
||||
"TOKEN=secret",
|
||||
"--",
|
||||
"docs-server",
|
||||
"--port",
|
||||
"4000",
|
||||
])
|
||||
.assert()
|
||||
.success();
|
||||
|
||||
let mut list_cmd = codex_command(codex_home.path())?;
|
||||
let list_output = list_cmd.args(["mcp", "list"]).output()?;
|
||||
assert!(list_output.status.success());
|
||||
let stdout = String::from_utf8(list_output.stdout)?;
|
||||
assert!(stdout.contains("Name"));
|
||||
assert!(stdout.contains("docs"));
|
||||
assert!(stdout.contains("docs-server"));
|
||||
assert!(stdout.contains("TOKEN=secret"));
|
||||
assert!(stdout.contains("Status"));
|
||||
assert!(stdout.contains("Auth"));
|
||||
assert!(stdout.contains("enabled"));
|
||||
assert!(stdout.contains("Unsupported"));
|
||||
|
||||
let mut list_json_cmd = codex_command(codex_home.path())?;
|
||||
let json_output = list_json_cmd.args(["mcp", "list", "--json"]).output()?;
|
||||
assert!(json_output.status.success());
|
||||
let stdout = String::from_utf8(json_output.stdout)?;
|
||||
let parsed: JsonValue = serde_json::from_str(&stdout)?;
|
||||
assert_eq!(
|
||||
parsed,
|
||||
json!([
|
||||
{
|
||||
"name": "docs",
|
||||
"enabled": true,
|
||||
"transport": {
|
||||
"type": "stdio",
|
||||
"command": "docs-server",
|
||||
"args": [
|
||||
"--port",
|
||||
"4000"
|
||||
],
|
||||
"env": {
|
||||
"TOKEN": "secret"
|
||||
}
|
||||
},
|
||||
"startup_timeout_sec": null,
|
||||
"tool_timeout_sec": null,
|
||||
"auth_status": "unsupported"
|
||||
}
|
||||
]
|
||||
)
|
||||
);
|
||||
|
||||
let mut get_cmd = codex_command(codex_home.path())?;
|
||||
let get_output = get_cmd.args(["mcp", "get", "docs"]).output()?;
|
||||
assert!(get_output.status.success());
|
||||
let stdout = String::from_utf8(get_output.stdout)?;
|
||||
assert!(stdout.contains("docs"));
|
||||
assert!(stdout.contains("transport: stdio"));
|
||||
assert!(stdout.contains("command: docs-server"));
|
||||
assert!(stdout.contains("args: --port 4000"));
|
||||
assert!(stdout.contains("env: TOKEN=secret"));
|
||||
assert!(stdout.contains("enabled: true"));
|
||||
assert!(stdout.contains("remove: codex mcp remove docs"));
|
||||
|
||||
let mut get_json_cmd = codex_command(codex_home.path())?;
|
||||
get_json_cmd
|
||||
.args(["mcp", "get", "docs", "--json"])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains("\"name\": \"docs\"").and(contains("\"enabled\": true")));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
27
codex-rs/cloud-tasks-client/Cargo.toml
Normal file
27
codex-rs/cloud-tasks-client/Cargo.toml
Normal file
@@ -0,0 +1,27 @@
|
||||
[package]
|
||||
name = "codex-cloud-tasks-client"
|
||||
version = { workspace = true }
|
||||
edition = "2024"
|
||||
|
||||
[lib]
|
||||
name = "codex_cloud_tasks_client"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[features]
|
||||
default = ["online"]
|
||||
online = ["dep:codex-backend-client"]
|
||||
mock = []
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1"
|
||||
async-trait = "0.1"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
diffy = "0.4.2"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
thiserror = "2.0.12"
|
||||
codex-backend-client = { path = "../backend-client", optional = true }
|
||||
codex-git-apply = { path = "../git-apply" }
|
||||
158
codex-rs/cloud-tasks-client/src/api.rs
Normal file
158
codex-rs/cloud-tasks-client/src/api.rs
Normal file
@@ -0,0 +1,158 @@
|
||||
use chrono::DateTime;
|
||||
use chrono::Utc;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
pub type Result<T> = std::result::Result<T, CloudTaskError>;
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum CloudTaskError {
|
||||
#[error("unimplemented: {0}")]
|
||||
Unimplemented(&'static str),
|
||||
#[error("http error: {0}")]
|
||||
Http(String),
|
||||
#[error("io error: {0}")]
|
||||
Io(String),
|
||||
#[error("{0}")]
|
||||
Msg(String),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(transparent)]
|
||||
pub struct TaskId(pub String);
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub enum TaskStatus {
|
||||
Pending,
|
||||
Ready,
|
||||
Applied,
|
||||
Error,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct TaskSummary {
|
||||
pub id: TaskId,
|
||||
pub title: String,
|
||||
pub status: TaskStatus,
|
||||
pub updated_at: DateTime<Utc>,
|
||||
/// Backend environment identifier (when available)
|
||||
pub environment_id: Option<String>,
|
||||
/// Human-friendly environment label (when available)
|
||||
pub environment_label: Option<String>,
|
||||
pub summary: DiffSummary,
|
||||
/// True when the backend reports this task as a code review.
|
||||
#[serde(default)]
|
||||
pub is_review: bool,
|
||||
/// Number of assistant attempts (best-of-N), when reported by the backend.
|
||||
#[serde(default)]
|
||||
pub attempt_total: Option<usize>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
|
||||
pub enum AttemptStatus {
|
||||
Pending,
|
||||
InProgress,
|
||||
Completed,
|
||||
Failed,
|
||||
Cancelled,
|
||||
#[default]
|
||||
Unknown,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct TurnAttempt {
|
||||
pub turn_id: String,
|
||||
pub attempt_placement: Option<i64>,
|
||||
pub created_at: Option<DateTime<Utc>>,
|
||||
pub status: AttemptStatus,
|
||||
pub diff: Option<String>,
|
||||
pub messages: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum ApplyStatus {
|
||||
Success,
|
||||
Partial,
|
||||
Error,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct ApplyOutcome {
|
||||
pub applied: bool,
|
||||
pub status: ApplyStatus,
|
||||
pub message: String,
|
||||
#[serde(default)]
|
||||
pub skipped_paths: Vec<String>,
|
||||
#[serde(default)]
|
||||
pub conflict_paths: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct CreatedTask {
|
||||
pub id: TaskId,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
|
||||
pub struct DiffSummary {
|
||||
pub files_changed: usize,
|
||||
pub lines_added: usize,
|
||||
pub lines_removed: usize,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct TaskText {
|
||||
pub prompt: Option<String>,
|
||||
pub messages: Vec<String>,
|
||||
pub turn_id: Option<String>,
|
||||
pub sibling_turn_ids: Vec<String>,
|
||||
pub attempt_placement: Option<i64>,
|
||||
pub attempt_status: AttemptStatus,
|
||||
}
|
||||
|
||||
impl Default for TaskText {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
prompt: None,
|
||||
messages: Vec::new(),
|
||||
turn_id: None,
|
||||
sibling_turn_ids: Vec::new(),
|
||||
attempt_placement: None,
|
||||
attempt_status: AttemptStatus::Unknown,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
pub trait CloudBackend: Send + Sync {
|
||||
async fn list_tasks(&self, env: Option<&str>) -> Result<Vec<TaskSummary>>;
|
||||
async fn get_task_diff(&self, id: TaskId) -> Result<Option<String>>;
|
||||
/// Return assistant output messages (no diff) when available.
|
||||
async fn get_task_messages(&self, id: TaskId) -> Result<Vec<String>>;
|
||||
/// Return the creating prompt and assistant messages (when available).
|
||||
async fn get_task_text(&self, id: TaskId) -> Result<TaskText>;
|
||||
/// Return any sibling attempts (best-of-N) for the given assistant turn.
|
||||
async fn list_sibling_attempts(
|
||||
&self,
|
||||
task: TaskId,
|
||||
turn_id: String,
|
||||
) -> Result<Vec<TurnAttempt>>;
|
||||
/// Dry-run apply (preflight) that validates whether the patch would apply cleanly.
|
||||
/// Never modifies the working tree. When `diff_override` is supplied, the provided diff is
|
||||
/// used instead of re-fetching the task details so callers can apply alternate attempts.
|
||||
async fn apply_task_preflight(
|
||||
&self,
|
||||
id: TaskId,
|
||||
diff_override: Option<String>,
|
||||
) -> Result<ApplyOutcome>;
|
||||
async fn apply_task(&self, id: TaskId, diff_override: Option<String>) -> Result<ApplyOutcome>;
|
||||
async fn create_task(
|
||||
&self,
|
||||
env_id: &str,
|
||||
prompt: &str,
|
||||
git_ref: &str,
|
||||
qa_mode: bool,
|
||||
best_of_n: usize,
|
||||
) -> Result<CreatedTask>;
|
||||
}
|
||||
769
codex-rs/cloud-tasks-client/src/http.rs
Normal file
769
codex-rs/cloud-tasks-client/src/http.rs
Normal file
@@ -0,0 +1,769 @@
|
||||
use crate::ApplyOutcome;
|
||||
use crate::ApplyStatus;
|
||||
use crate::AttemptStatus;
|
||||
use crate::CloudBackend;
|
||||
use crate::CloudTaskError;
|
||||
use crate::DiffSummary;
|
||||
use crate::Result;
|
||||
use crate::TaskId;
|
||||
use crate::TaskStatus;
|
||||
use crate::TaskSummary;
|
||||
use crate::TurnAttempt;
|
||||
use crate::api::TaskText;
|
||||
use chrono::DateTime;
|
||||
use chrono::Utc;
|
||||
|
||||
use codex_backend_client as backend;
|
||||
use codex_backend_client::CodeTaskDetailsResponseExt;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct HttpClient {
|
||||
pub base_url: String,
|
||||
backend: backend::Client,
|
||||
}
|
||||
|
||||
impl HttpClient {
|
||||
pub fn new(base_url: impl Into<String>) -> anyhow::Result<Self> {
|
||||
let base_url = base_url.into();
|
||||
let backend = backend::Client::new(base_url.clone())?;
|
||||
Ok(Self { base_url, backend })
|
||||
}
|
||||
|
||||
pub fn with_bearer_token(mut self, token: impl Into<String>) -> Self {
|
||||
self.backend = self.backend.clone().with_bearer_token(token);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_user_agent(mut self, ua: impl Into<String>) -> Self {
|
||||
self.backend = self.backend.clone().with_user_agent(ua);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_chatgpt_account_id(mut self, account_id: impl Into<String>) -> Self {
|
||||
self.backend = self.backend.clone().with_chatgpt_account_id(account_id);
|
||||
self
|
||||
}
|
||||
|
||||
fn tasks_api(&self) -> api::Tasks<'_> {
|
||||
api::Tasks::new(self)
|
||||
}
|
||||
|
||||
fn attempts_api(&self) -> api::Attempts<'_> {
|
||||
api::Attempts::new(self)
|
||||
}
|
||||
|
||||
fn apply_api(&self) -> api::Apply<'_> {
|
||||
api::Apply::new(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl CloudBackend for HttpClient {
|
||||
async fn list_tasks(&self, env: Option<&str>) -> Result<Vec<TaskSummary>> {
|
||||
self.tasks_api().list(env).await
|
||||
}
|
||||
|
||||
async fn get_task_diff(&self, id: TaskId) -> Result<Option<String>> {
|
||||
self.tasks_api().diff(id).await
|
||||
}
|
||||
|
||||
async fn get_task_messages(&self, id: TaskId) -> Result<Vec<String>> {
|
||||
self.tasks_api().messages(id).await
|
||||
}
|
||||
|
||||
async fn get_task_text(&self, id: TaskId) -> Result<TaskText> {
|
||||
self.tasks_api().task_text(id).await
|
||||
}
|
||||
|
||||
async fn list_sibling_attempts(
|
||||
&self,
|
||||
task: TaskId,
|
||||
turn_id: String,
|
||||
) -> Result<Vec<TurnAttempt>> {
|
||||
self.attempts_api().list(task, turn_id).await
|
||||
}
|
||||
|
||||
async fn apply_task(&self, id: TaskId, diff_override: Option<String>) -> Result<ApplyOutcome> {
|
||||
self.apply_api().run(id, diff_override, false).await
|
||||
}
|
||||
|
||||
async fn apply_task_preflight(
|
||||
&self,
|
||||
id: TaskId,
|
||||
diff_override: Option<String>,
|
||||
) -> Result<ApplyOutcome> {
|
||||
self.apply_api().run(id, diff_override, true).await
|
||||
}
|
||||
|
||||
async fn create_task(
|
||||
&self,
|
||||
env_id: &str,
|
||||
prompt: &str,
|
||||
git_ref: &str,
|
||||
qa_mode: bool,
|
||||
best_of_n: usize,
|
||||
) -> Result<crate::CreatedTask> {
|
||||
self.tasks_api()
|
||||
.create(env_id, prompt, git_ref, qa_mode, best_of_n)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
mod api {
|
||||
use super::*;
|
||||
use serde_json::Value;
|
||||
use std::cmp::Ordering;
|
||||
use std::collections::HashMap;
|
||||
|
||||
pub(crate) struct Tasks<'a> {
|
||||
base_url: &'a str,
|
||||
backend: &'a backend::Client,
|
||||
}
|
||||
|
||||
impl<'a> Tasks<'a> {
|
||||
pub(crate) fn new(client: &'a HttpClient) -> Self {
|
||||
Self {
|
||||
base_url: &client.base_url,
|
||||
backend: &client.backend,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn list(&self, env: Option<&str>) -> Result<Vec<TaskSummary>> {
|
||||
let resp = self
|
||||
.backend
|
||||
.list_tasks(Some(20), Some("current"), env)
|
||||
.await
|
||||
.map_err(|e| CloudTaskError::Http(format!("list_tasks failed: {e}")))?;
|
||||
|
||||
let tasks: Vec<TaskSummary> = resp
|
||||
.items
|
||||
.into_iter()
|
||||
.map(map_task_list_item_to_summary)
|
||||
.collect();
|
||||
|
||||
append_error_log(&format!(
|
||||
"http.list_tasks: env={} items={}",
|
||||
env.unwrap_or("<all>"),
|
||||
tasks.len()
|
||||
));
|
||||
Ok(tasks)
|
||||
}
|
||||
|
||||
pub(crate) async fn diff(&self, id: TaskId) -> Result<Option<String>> {
|
||||
let (details, body, ct) = self
|
||||
.details_with_body(&id.0)
|
||||
.await
|
||||
.map_err(|e| CloudTaskError::Http(format!("get_task_details failed: {e}")))?;
|
||||
if let Some(diff) = details.unified_diff() {
|
||||
return Ok(Some(diff));
|
||||
}
|
||||
let _ = (body, ct);
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
pub(crate) async fn messages(&self, id: TaskId) -> Result<Vec<String>> {
|
||||
let (details, body, ct) = self
|
||||
.details_with_body(&id.0)
|
||||
.await
|
||||
.map_err(|e| CloudTaskError::Http(format!("get_task_details failed: {e}")))?;
|
||||
|
||||
let mut msgs = details.assistant_text_messages();
|
||||
if msgs.is_empty() {
|
||||
msgs.extend(extract_assistant_messages_from_body(&body));
|
||||
}
|
||||
if !msgs.is_empty() {
|
||||
return Ok(msgs);
|
||||
}
|
||||
if let Some(err) = details.assistant_error_message() {
|
||||
return Ok(vec![format!("Task failed: {err}")]);
|
||||
}
|
||||
|
||||
let url = match details_path(self.base_url, &id.0) {
|
||||
Some(url) => url,
|
||||
None => format!("{}/api/codex/tasks/{}", self.base_url, id.0),
|
||||
};
|
||||
Err(CloudTaskError::Http(format!(
|
||||
"No assistant text messages in response. GET {url}; content-type={ct}; body={body}"
|
||||
)))
|
||||
}
|
||||
|
||||
pub(crate) async fn task_text(&self, id: TaskId) -> Result<TaskText> {
|
||||
let (details, body, _ct) = self
|
||||
.details_with_body(&id.0)
|
||||
.await
|
||||
.map_err(|e| CloudTaskError::Http(format!("get_task_details failed: {e}")))?;
|
||||
let prompt = details.user_text_prompt();
|
||||
let mut messages = details.assistant_text_messages();
|
||||
if messages.is_empty() {
|
||||
messages.extend(extract_assistant_messages_from_body(&body));
|
||||
}
|
||||
let assistant_turn = details.current_assistant_turn.as_ref();
|
||||
let turn_id = assistant_turn.and_then(|turn| turn.id.clone());
|
||||
let sibling_turn_ids = assistant_turn
|
||||
.map(|turn| turn.sibling_turn_ids.clone())
|
||||
.unwrap_or_default();
|
||||
let attempt_placement = assistant_turn.and_then(|turn| turn.attempt_placement);
|
||||
let attempt_status = attempt_status_from_str(
|
||||
assistant_turn.and_then(|turn| turn.turn_status.as_deref()),
|
||||
);
|
||||
Ok(TaskText {
|
||||
prompt,
|
||||
messages,
|
||||
turn_id,
|
||||
sibling_turn_ids,
|
||||
attempt_placement,
|
||||
attempt_status,
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) async fn create(
|
||||
&self,
|
||||
env_id: &str,
|
||||
prompt: &str,
|
||||
git_ref: &str,
|
||||
qa_mode: bool,
|
||||
best_of_n: usize,
|
||||
) -> Result<crate::CreatedTask> {
|
||||
let mut input_items: Vec<serde_json::Value> = Vec::new();
|
||||
input_items.push(serde_json::json!({
|
||||
"type": "message",
|
||||
"role": "user",
|
||||
"content": [{ "content_type": "text", "text": prompt }]
|
||||
}));
|
||||
|
||||
if let Ok(diff) = std::env::var("CODEX_STARTING_DIFF")
|
||||
&& !diff.is_empty()
|
||||
{
|
||||
input_items.push(serde_json::json!({
|
||||
"type": "pre_apply_patch",
|
||||
"output_diff": { "diff": diff }
|
||||
}));
|
||||
}
|
||||
|
||||
let mut request_body = serde_json::json!({
|
||||
"new_task": {
|
||||
"environment_id": env_id,
|
||||
"branch": git_ref,
|
||||
"run_environment_in_qa_mode": qa_mode,
|
||||
},
|
||||
"input_items": input_items,
|
||||
});
|
||||
|
||||
if best_of_n > 1
|
||||
&& let Some(obj) = request_body.as_object_mut()
|
||||
{
|
||||
obj.insert(
|
||||
"metadata".to_string(),
|
||||
serde_json::json!({ "best_of_n": best_of_n }),
|
||||
);
|
||||
}
|
||||
|
||||
match self.backend.create_task(request_body).await {
|
||||
Ok(id) => {
|
||||
append_error_log(&format!(
|
||||
"new_task: created id={id} env={} prompt_chars={}",
|
||||
env_id,
|
||||
prompt.chars().count()
|
||||
));
|
||||
Ok(crate::CreatedTask { id: TaskId(id) })
|
||||
}
|
||||
Err(e) => {
|
||||
append_error_log(&format!(
|
||||
"new_task: create failed env={} prompt_chars={}: {}",
|
||||
env_id,
|
||||
prompt.chars().count(),
|
||||
e
|
||||
));
|
||||
Err(CloudTaskError::Http(format!("create_task failed: {e}")))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn details_with_body(
|
||||
&self,
|
||||
id: &str,
|
||||
) -> anyhow::Result<(backend::CodeTaskDetailsResponse, String, String)> {
|
||||
let (parsed, body, ct) = self.backend.get_task_details_with_body(id).await?;
|
||||
Ok((parsed, body, ct))
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct Attempts<'a> {
|
||||
backend: &'a backend::Client,
|
||||
}
|
||||
|
||||
impl<'a> Attempts<'a> {
|
||||
pub(crate) fn new(client: &'a HttpClient) -> Self {
|
||||
Self {
|
||||
backend: &client.backend,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn list(&self, task: TaskId, turn_id: String) -> Result<Vec<TurnAttempt>> {
|
||||
let resp = self
|
||||
.backend
|
||||
.list_sibling_turns(&task.0, &turn_id)
|
||||
.await
|
||||
.map_err(|e| CloudTaskError::Http(format!("list_sibling_turns failed: {e}")))?;
|
||||
|
||||
let mut attempts: Vec<TurnAttempt> = resp
|
||||
.sibling_turns
|
||||
.iter()
|
||||
.filter_map(turn_attempt_from_map)
|
||||
.collect();
|
||||
attempts.sort_by(compare_attempts);
|
||||
Ok(attempts)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct Apply<'a> {
|
||||
backend: &'a backend::Client,
|
||||
}
|
||||
|
||||
impl<'a> Apply<'a> {
|
||||
pub(crate) fn new(client: &'a HttpClient) -> Self {
|
||||
Self {
|
||||
backend: &client.backend,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn run(
|
||||
&self,
|
||||
task_id: TaskId,
|
||||
diff_override: Option<String>,
|
||||
preflight: bool,
|
||||
) -> Result<ApplyOutcome> {
|
||||
let id = task_id.0.clone();
|
||||
let diff = match diff_override {
|
||||
Some(diff) => diff,
|
||||
None => {
|
||||
let details = self.backend.get_task_details(&id).await.map_err(|e| {
|
||||
CloudTaskError::Http(format!("get_task_details failed: {e}"))
|
||||
})?;
|
||||
details.unified_diff().ok_or_else(|| {
|
||||
CloudTaskError::Msg(format!("No diff available for task {id}"))
|
||||
})?
|
||||
}
|
||||
};
|
||||
|
||||
if !is_unified_diff(&diff) {
|
||||
let summary = summarize_patch_for_logging(&diff);
|
||||
let mode = if preflight { "preflight" } else { "apply" };
|
||||
append_error_log(&format!(
|
||||
"apply_error: id={id} mode={mode} format=non-unified; {summary}"
|
||||
));
|
||||
return Ok(ApplyOutcome {
|
||||
applied: false,
|
||||
status: ApplyStatus::Error,
|
||||
message: "Expected unified git diff; backend returned an incompatible format."
|
||||
.to_string(),
|
||||
skipped_paths: Vec::new(),
|
||||
conflict_paths: Vec::new(),
|
||||
});
|
||||
}
|
||||
|
||||
let req = codex_git_apply::ApplyGitRequest {
|
||||
cwd: std::env::current_dir().unwrap_or_else(|_| std::env::temp_dir()),
|
||||
diff: diff.clone(),
|
||||
revert: false,
|
||||
preflight,
|
||||
};
|
||||
let r = codex_git_apply::apply_git_patch(&req)
|
||||
.map_err(|e| CloudTaskError::Io(format!("git apply failed to run: {e}")))?;
|
||||
|
||||
let status = if r.exit_code == 0 {
|
||||
ApplyStatus::Success
|
||||
} else if !r.applied_paths.is_empty() || !r.conflicted_paths.is_empty() {
|
||||
ApplyStatus::Partial
|
||||
} else {
|
||||
ApplyStatus::Error
|
||||
};
|
||||
let applied = matches!(status, ApplyStatus::Success) && !preflight;
|
||||
|
||||
let message = if preflight {
|
||||
match status {
|
||||
ApplyStatus::Success => {
|
||||
format!("Preflight passed for task {id} (applies cleanly)")
|
||||
}
|
||||
ApplyStatus::Partial => format!(
|
||||
"Preflight: patch does not fully apply for task {id} (applied={}, skipped={}, conflicts={})",
|
||||
r.applied_paths.len(),
|
||||
r.skipped_paths.len(),
|
||||
r.conflicted_paths.len()
|
||||
),
|
||||
ApplyStatus::Error => format!(
|
||||
"Preflight failed for task {id} (applied={}, skipped={}, conflicts={})",
|
||||
r.applied_paths.len(),
|
||||
r.skipped_paths.len(),
|
||||
r.conflicted_paths.len()
|
||||
),
|
||||
}
|
||||
} else {
|
||||
match status {
|
||||
ApplyStatus::Success => format!(
|
||||
"Applied task {id} locally ({} files)",
|
||||
r.applied_paths.len()
|
||||
),
|
||||
ApplyStatus::Partial => format!(
|
||||
"Apply partially succeeded for task {id} (applied={}, skipped={}, conflicts={})",
|
||||
r.applied_paths.len(),
|
||||
r.skipped_paths.len(),
|
||||
r.conflicted_paths.len()
|
||||
),
|
||||
ApplyStatus::Error => format!(
|
||||
"Apply failed for task {id} (applied={}, skipped={}, conflicts={})",
|
||||
r.applied_paths.len(),
|
||||
r.skipped_paths.len(),
|
||||
r.conflicted_paths.len()
|
||||
),
|
||||
}
|
||||
};
|
||||
|
||||
if matches!(status, ApplyStatus::Partial | ApplyStatus::Error)
|
||||
|| (preflight && !matches!(status, ApplyStatus::Success))
|
||||
{
|
||||
let mut log = String::new();
|
||||
let summary = summarize_patch_for_logging(&diff);
|
||||
let mode = if preflight { "preflight" } else { "apply" };
|
||||
use std::fmt::Write as _;
|
||||
let _ = writeln!(
|
||||
&mut log,
|
||||
"apply_result: mode={} id={} status={:?} applied={} skipped={} conflicts={} cmd={}",
|
||||
mode,
|
||||
id,
|
||||
status,
|
||||
r.applied_paths.len(),
|
||||
r.skipped_paths.len(),
|
||||
r.conflicted_paths.len(),
|
||||
r.cmd_for_log
|
||||
);
|
||||
let _ = writeln!(
|
||||
&mut log,
|
||||
"stdout_tail=\n{}\nstderr_tail=\n{}",
|
||||
tail(&r.stdout, 2000),
|
||||
tail(&r.stderr, 2000)
|
||||
);
|
||||
let _ = writeln!(&mut log, "{summary}");
|
||||
let _ = writeln!(
|
||||
&mut log,
|
||||
"----- PATCH BEGIN -----\n{diff}\n----- PATCH END -----"
|
||||
);
|
||||
append_error_log(&log);
|
||||
}
|
||||
|
||||
Ok(ApplyOutcome {
|
||||
applied,
|
||||
status,
|
||||
message,
|
||||
skipped_paths: r.skipped_paths,
|
||||
conflict_paths: r.conflicted_paths,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn details_path(base_url: &str, id: &str) -> Option<String> {
|
||||
if base_url.contains("/backend-api") {
|
||||
Some(format!("{base_url}/wham/tasks/{id}"))
|
||||
} else if base_url.contains("/api/codex") {
|
||||
Some(format!("{base_url}/tasks/{id}"))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_assistant_messages_from_body(body: &str) -> Vec<String> {
|
||||
let mut msgs = Vec::new();
|
||||
if let Ok(full) = serde_json::from_str::<serde_json::Value>(body)
|
||||
&& let Some(arr) = full
|
||||
.get("current_assistant_turn")
|
||||
.and_then(|v| v.get("worklog"))
|
||||
.and_then(|v| v.get("messages"))
|
||||
.and_then(|v| v.as_array())
|
||||
{
|
||||
for m in arr {
|
||||
let is_assistant = m
|
||||
.get("author")
|
||||
.and_then(|a| a.get("role"))
|
||||
.and_then(|r| r.as_str())
|
||||
== Some("assistant");
|
||||
if !is_assistant {
|
||||
continue;
|
||||
}
|
||||
if let Some(parts) = m
|
||||
.get("content")
|
||||
.and_then(|c| c.get("parts"))
|
||||
.and_then(|p| p.as_array())
|
||||
{
|
||||
for p in parts {
|
||||
if let Some(s) = p.as_str() {
|
||||
if !s.is_empty() {
|
||||
msgs.push(s.to_string());
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if let Some(obj) = p.as_object()
|
||||
&& obj.get("content_type").and_then(|t| t.as_str()) == Some("text")
|
||||
&& let Some(txt) = obj.get("text").and_then(|t| t.as_str())
|
||||
{
|
||||
msgs.push(txt.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
msgs
|
||||
}
|
||||
|
||||
fn turn_attempt_from_map(turn: &HashMap<String, Value>) -> Option<TurnAttempt> {
|
||||
let turn_id = turn.get("id").and_then(Value::as_str)?.to_string();
|
||||
let attempt_placement = turn.get("attempt_placement").and_then(Value::as_i64);
|
||||
let created_at = parse_timestamp_value(turn.get("created_at"));
|
||||
let status = attempt_status_from_str(turn.get("turn_status").and_then(Value::as_str));
|
||||
let diff = extract_diff_from_turn(turn);
|
||||
let messages = extract_assistant_messages_from_turn(turn);
|
||||
Some(TurnAttempt {
|
||||
turn_id,
|
||||
attempt_placement,
|
||||
created_at,
|
||||
status,
|
||||
diff,
|
||||
messages,
|
||||
})
|
||||
}
|
||||
|
||||
fn compare_attempts(a: &TurnAttempt, b: &TurnAttempt) -> Ordering {
|
||||
match (a.attempt_placement, b.attempt_placement) {
|
||||
(Some(lhs), Some(rhs)) => lhs.cmp(&rhs),
|
||||
(Some(_), None) => Ordering::Less,
|
||||
(None, Some(_)) => Ordering::Greater,
|
||||
(None, None) => match (a.created_at, b.created_at) {
|
||||
(Some(lhs), Some(rhs)) => lhs.cmp(&rhs),
|
||||
(Some(_), None) => Ordering::Less,
|
||||
(None, Some(_)) => Ordering::Greater,
|
||||
(None, None) => a.turn_id.cmp(&b.turn_id),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_diff_from_turn(turn: &HashMap<String, Value>) -> Option<String> {
|
||||
let items = turn.get("output_items").and_then(Value::as_array)?;
|
||||
for item in items {
|
||||
match item.get("type").and_then(Value::as_str) {
|
||||
Some("output_diff") => {
|
||||
if let Some(diff) = item.get("diff").and_then(Value::as_str)
|
||||
&& !diff.is_empty()
|
||||
{
|
||||
return Some(diff.to_string());
|
||||
}
|
||||
}
|
||||
Some("pr") => {
|
||||
if let Some(diff) = item
|
||||
.get("output_diff")
|
||||
.and_then(Value::as_object)
|
||||
.and_then(|od| od.get("diff"))
|
||||
.and_then(Value::as_str)
|
||||
&& !diff.is_empty()
|
||||
{
|
||||
return Some(diff.to_string());
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn extract_assistant_messages_from_turn(turn: &HashMap<String, Value>) -> Vec<String> {
|
||||
let mut msgs = Vec::new();
|
||||
if let Some(items) = turn.get("output_items").and_then(Value::as_array) {
|
||||
for item in items {
|
||||
if item.get("type").and_then(Value::as_str) != Some("message") {
|
||||
continue;
|
||||
}
|
||||
if let Some(content) = item.get("content").and_then(Value::as_array) {
|
||||
for part in content {
|
||||
if part.get("content_type").and_then(Value::as_str) == Some("text")
|
||||
&& let Some(txt) = part.get("text").and_then(Value::as_str)
|
||||
&& !txt.is_empty()
|
||||
{
|
||||
msgs.push(txt.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
msgs
|
||||
}
|
||||
|
||||
fn attempt_status_from_str(raw: Option<&str>) -> AttemptStatus {
|
||||
match raw.unwrap_or_default() {
|
||||
"failed" => AttemptStatus::Failed,
|
||||
"completed" => AttemptStatus::Completed,
|
||||
"in_progress" => AttemptStatus::InProgress,
|
||||
"pending" => AttemptStatus::Pending,
|
||||
_ => AttemptStatus::Pending,
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_timestamp_value(v: Option<&Value>) -> Option<DateTime<Utc>> {
|
||||
let ts = v?.as_f64()?;
|
||||
let secs = ts as i64;
|
||||
let nanos = ((ts - secs as f64) * 1_000_000_000.0) as u32;
|
||||
Some(DateTime::<Utc>::from(
|
||||
std::time::UNIX_EPOCH + std::time::Duration::new(secs.max(0) as u64, nanos),
|
||||
))
|
||||
}
|
||||
|
||||
fn map_task_list_item_to_summary(src: backend::TaskListItem) -> TaskSummary {
|
||||
let status_display = src.task_status_display.as_ref();
|
||||
TaskSummary {
|
||||
id: TaskId(src.id),
|
||||
title: src.title,
|
||||
status: map_status(status_display),
|
||||
updated_at: parse_updated_at(src.updated_at.as_ref()),
|
||||
environment_id: None,
|
||||
environment_label: env_label_from_status_display(status_display),
|
||||
summary: diff_summary_from_status_display(status_display),
|
||||
is_review: src
|
||||
.pull_requests
|
||||
.as_ref()
|
||||
.is_some_and(|prs| !prs.is_empty()),
|
||||
attempt_total: attempt_total_from_status_display(status_display),
|
||||
}
|
||||
}
|
||||
|
||||
fn map_status(v: Option<&HashMap<String, Value>>) -> TaskStatus {
|
||||
if let Some(val) = v {
|
||||
if let Some(turn) = val
|
||||
.get("latest_turn_status_display")
|
||||
.and_then(Value::as_object)
|
||||
&& let Some(s) = turn.get("turn_status").and_then(Value::as_str)
|
||||
{
|
||||
return match s {
|
||||
"failed" => TaskStatus::Error,
|
||||
"completed" => TaskStatus::Ready,
|
||||
"in_progress" => TaskStatus::Pending,
|
||||
"pending" => TaskStatus::Pending,
|
||||
"cancelled" => TaskStatus::Error,
|
||||
_ => TaskStatus::Pending,
|
||||
};
|
||||
}
|
||||
if let Some(state) = val.get("state").and_then(Value::as_str) {
|
||||
return match state {
|
||||
"pending" => TaskStatus::Pending,
|
||||
"ready" => TaskStatus::Ready,
|
||||
"applied" => TaskStatus::Applied,
|
||||
"error" => TaskStatus::Error,
|
||||
_ => TaskStatus::Pending,
|
||||
};
|
||||
}
|
||||
}
|
||||
TaskStatus::Pending
|
||||
}
|
||||
|
||||
fn parse_updated_at(ts: Option<&f64>) -> DateTime<Utc> {
|
||||
if let Some(v) = ts {
|
||||
let secs = *v as i64;
|
||||
let nanos = ((*v - secs as f64) * 1_000_000_000.0) as u32;
|
||||
return DateTime::<Utc>::from(
|
||||
std::time::UNIX_EPOCH + std::time::Duration::new(secs.max(0) as u64, nanos),
|
||||
);
|
||||
}
|
||||
Utc::now()
|
||||
}
|
||||
|
||||
fn env_label_from_status_display(v: Option<&HashMap<String, Value>>) -> Option<String> {
|
||||
let map = v?;
|
||||
map.get("environment_label")
|
||||
.and_then(Value::as_str)
|
||||
.map(str::to_string)
|
||||
}
|
||||
|
||||
fn diff_summary_from_status_display(v: Option<&HashMap<String, Value>>) -> DiffSummary {
|
||||
let mut out = DiffSummary::default();
|
||||
let Some(map) = v else { return out };
|
||||
let latest = map
|
||||
.get("latest_turn_status_display")
|
||||
.and_then(Value::as_object);
|
||||
let Some(latest) = latest else { return out };
|
||||
if let Some(ds) = latest.get("diff_stats").and_then(Value::as_object) {
|
||||
if let Some(n) = ds.get("files_modified").and_then(Value::as_i64) {
|
||||
out.files_changed = n.max(0) as usize;
|
||||
}
|
||||
if let Some(n) = ds.get("lines_added").and_then(Value::as_i64) {
|
||||
out.lines_added = n.max(0) as usize;
|
||||
}
|
||||
if let Some(n) = ds.get("lines_removed").and_then(Value::as_i64) {
|
||||
out.lines_removed = n.max(0) as usize;
|
||||
}
|
||||
}
|
||||
out
|
||||
}
|
||||
|
||||
fn attempt_total_from_status_display(v: Option<&HashMap<String, Value>>) -> Option<usize> {
|
||||
let map = v?;
|
||||
let latest = map
|
||||
.get("latest_turn_status_display")
|
||||
.and_then(Value::as_object)?;
|
||||
let siblings = latest.get("sibling_turn_ids").and_then(Value::as_array)?;
|
||||
Some(siblings.len().saturating_add(1))
|
||||
}
|
||||
|
||||
fn is_unified_diff(diff: &str) -> bool {
|
||||
let t = diff.trim_start();
|
||||
if t.starts_with("diff --git ") {
|
||||
return true;
|
||||
}
|
||||
let has_dash_headers = diff.contains("\n--- ") && diff.contains("\n+++ ");
|
||||
let has_hunk = diff.contains("\n@@ ") || diff.starts_with("@@ ");
|
||||
has_dash_headers && has_hunk
|
||||
}
|
||||
|
||||
fn tail(s: &str, max: usize) -> String {
|
||||
if s.len() <= max {
|
||||
s.to_string()
|
||||
} else {
|
||||
s[s.len() - max..].to_string()
|
||||
}
|
||||
}
|
||||
|
||||
fn summarize_patch_for_logging(patch: &str) -> String {
|
||||
let trimmed = patch.trim_start();
|
||||
let kind = if trimmed.starts_with("*** Begin Patch") {
|
||||
"codex-patch"
|
||||
} else if trimmed.starts_with("diff --git ") || trimmed.contains("\n*** End Patch\n") {
|
||||
"git-diff"
|
||||
} else if trimmed.starts_with("@@ ") || trimmed.contains("\n@@ ") {
|
||||
"unified-diff"
|
||||
} else {
|
||||
"unknown"
|
||||
};
|
||||
let lines = patch.lines().count();
|
||||
let chars = patch.len();
|
||||
let cwd = std::env::current_dir()
|
||||
.ok()
|
||||
.map(|p| p.display().to_string())
|
||||
.unwrap_or_else(|| "<unknown>".to_string());
|
||||
let head: String = patch.lines().take(20).collect::<Vec<&str>>().join("\n");
|
||||
let head_trunc = if head.len() > 800 {
|
||||
format!("{}…", &head[..800])
|
||||
} else {
|
||||
head
|
||||
};
|
||||
format!(
|
||||
"patch_summary: kind={kind} lines={lines} chars={chars} cwd={cwd} ; head=\n{head_trunc}"
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn append_error_log(message: &str) {
|
||||
let ts = Utc::now().to_rfc3339();
|
||||
if let Ok(mut f) = std::fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.append(true)
|
||||
.open("error.log")
|
||||
{
|
||||
use std::io::Write as _;
|
||||
let _ = writeln!(f, "[{ts}] {message}");
|
||||
}
|
||||
}
|
||||
29
codex-rs/cloud-tasks-client/src/lib.rs
Normal file
29
codex-rs/cloud-tasks-client/src/lib.rs
Normal file
@@ -0,0 +1,29 @@
|
||||
mod api;
|
||||
|
||||
pub use api::ApplyOutcome;
|
||||
pub use api::ApplyStatus;
|
||||
pub use api::AttemptStatus;
|
||||
pub use api::CloudBackend;
|
||||
pub use api::CloudTaskError;
|
||||
pub use api::CreatedTask;
|
||||
pub use api::DiffSummary;
|
||||
pub use api::Result;
|
||||
pub use api::TaskId;
|
||||
pub use api::TaskStatus;
|
||||
pub use api::TaskSummary;
|
||||
pub use api::TaskText;
|
||||
pub use api::TurnAttempt;
|
||||
|
||||
#[cfg(feature = "mock")]
|
||||
mod mock;
|
||||
|
||||
#[cfg(feature = "online")]
|
||||
mod http;
|
||||
|
||||
#[cfg(feature = "mock")]
|
||||
pub use mock::MockClient;
|
||||
|
||||
#[cfg(feature = "online")]
|
||||
pub use http::HttpClient;
|
||||
|
||||
// Reusable apply engine now lives in the shared crate `codex-git-apply`.
|
||||
180
codex-rs/cloud-tasks-client/src/mock.rs
Normal file
180
codex-rs/cloud-tasks-client/src/mock.rs
Normal file
@@ -0,0 +1,180 @@
|
||||
use crate::ApplyOutcome;
|
||||
use crate::AttemptStatus;
|
||||
use crate::CloudBackend;
|
||||
use crate::DiffSummary;
|
||||
use crate::Result;
|
||||
use crate::TaskId;
|
||||
use crate::TaskStatus;
|
||||
use crate::TaskSummary;
|
||||
use crate::TurnAttempt;
|
||||
use crate::api::TaskText;
|
||||
use chrono::Utc;
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct MockClient;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl CloudBackend for MockClient {
|
||||
async fn list_tasks(&self, _env: Option<&str>) -> Result<Vec<TaskSummary>> {
|
||||
// Slightly vary content by env to aid tests that rely on the mock
|
||||
let rows = match _env {
|
||||
Some("env-A") => vec![("T-2000", "A: First", TaskStatus::Ready)],
|
||||
Some("env-B") => vec![
|
||||
("T-3000", "B: One", TaskStatus::Ready),
|
||||
("T-3001", "B: Two", TaskStatus::Pending),
|
||||
],
|
||||
_ => vec![
|
||||
("T-1000", "Update README formatting", TaskStatus::Ready),
|
||||
("T-1001", "Fix clippy warnings in core", TaskStatus::Pending),
|
||||
("T-1002", "Add contributing guide", TaskStatus::Ready),
|
||||
],
|
||||
};
|
||||
let environment_id = _env.map(str::to_string);
|
||||
let environment_label = match _env {
|
||||
Some("env-A") => Some("Env A".to_string()),
|
||||
Some("env-B") => Some("Env B".to_string()),
|
||||
Some(other) => Some(other.to_string()),
|
||||
None => Some("Global".to_string()),
|
||||
};
|
||||
let mut out = Vec::new();
|
||||
for (id_str, title, status) in rows {
|
||||
let id = TaskId(id_str.to_string());
|
||||
let diff = mock_diff_for(&id);
|
||||
let (a, d) = count_from_unified(&diff);
|
||||
out.push(TaskSummary {
|
||||
id,
|
||||
title: title.to_string(),
|
||||
status,
|
||||
updated_at: Utc::now(),
|
||||
environment_id: environment_id.clone(),
|
||||
environment_label: environment_label.clone(),
|
||||
summary: DiffSummary {
|
||||
files_changed: 1,
|
||||
lines_added: a,
|
||||
lines_removed: d,
|
||||
},
|
||||
is_review: false,
|
||||
attempt_total: Some(if id_str == "T-1000" { 2 } else { 1 }),
|
||||
});
|
||||
}
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
async fn get_task_diff(&self, id: TaskId) -> Result<Option<String>> {
|
||||
Ok(Some(mock_diff_for(&id)))
|
||||
}
|
||||
|
||||
async fn get_task_messages(&self, _id: TaskId) -> Result<Vec<String>> {
|
||||
Ok(vec![
|
||||
"Mock assistant output: this task contains no diff.".to_string(),
|
||||
])
|
||||
}
|
||||
|
||||
async fn get_task_text(&self, _id: TaskId) -> Result<TaskText> {
|
||||
Ok(TaskText {
|
||||
prompt: Some("Why is there no diff?".to_string()),
|
||||
messages: vec!["Mock assistant output: this task contains no diff.".to_string()],
|
||||
turn_id: Some("mock-turn".to_string()),
|
||||
sibling_turn_ids: Vec::new(),
|
||||
attempt_placement: Some(0),
|
||||
attempt_status: AttemptStatus::Completed,
|
||||
})
|
||||
}
|
||||
|
||||
async fn apply_task(&self, id: TaskId, _diff_override: Option<String>) -> Result<ApplyOutcome> {
|
||||
Ok(ApplyOutcome {
|
||||
applied: true,
|
||||
status: crate::ApplyStatus::Success,
|
||||
message: format!("Applied task {} locally (mock)", id.0),
|
||||
skipped_paths: Vec::new(),
|
||||
conflict_paths: Vec::new(),
|
||||
})
|
||||
}
|
||||
|
||||
async fn apply_task_preflight(
|
||||
&self,
|
||||
id: TaskId,
|
||||
_diff_override: Option<String>,
|
||||
) -> Result<ApplyOutcome> {
|
||||
Ok(ApplyOutcome {
|
||||
applied: false,
|
||||
status: crate::ApplyStatus::Success,
|
||||
message: format!("Preflight passed for task {} (mock)", id.0),
|
||||
skipped_paths: Vec::new(),
|
||||
conflict_paths: Vec::new(),
|
||||
})
|
||||
}
|
||||
|
||||
async fn list_sibling_attempts(
|
||||
&self,
|
||||
task: TaskId,
|
||||
_turn_id: String,
|
||||
) -> Result<Vec<TurnAttempt>> {
|
||||
if task.0 == "T-1000" {
|
||||
return Ok(vec![TurnAttempt {
|
||||
turn_id: "T-1000-attempt-2".to_string(),
|
||||
attempt_placement: Some(1),
|
||||
created_at: Some(Utc::now()),
|
||||
status: AttemptStatus::Completed,
|
||||
diff: Some(mock_diff_for(&task)),
|
||||
messages: vec!["Mock alternate attempt".to_string()],
|
||||
}]);
|
||||
}
|
||||
Ok(Vec::new())
|
||||
}
|
||||
|
||||
async fn create_task(
|
||||
&self,
|
||||
env_id: &str,
|
||||
prompt: &str,
|
||||
git_ref: &str,
|
||||
qa_mode: bool,
|
||||
best_of_n: usize,
|
||||
) -> Result<crate::CreatedTask> {
|
||||
let _ = (env_id, prompt, git_ref, qa_mode, best_of_n);
|
||||
let id = format!("task_local_{}", chrono::Utc::now().timestamp_millis());
|
||||
Ok(crate::CreatedTask { id: TaskId(id) })
|
||||
}
|
||||
}
|
||||
|
||||
fn mock_diff_for(id: &TaskId) -> String {
|
||||
match id.0.as_str() {
|
||||
"T-1000" => {
|
||||
"diff --git a/README.md b/README.md\nindex 000000..111111 100644\n--- a/README.md\n+++ b/README.md\n@@ -1,2 +1,3 @@\n Intro\n-Hello\n+Hello, world!\n+Task: T-1000\n".to_string()
|
||||
}
|
||||
"T-1001" => {
|
||||
"diff --git a/core/src/lib.rs b/core/src/lib.rs\nindex 000000..111111 100644\n--- a/core/src/lib.rs\n+++ b/core/src/lib.rs\n@@ -1,2 +1,1 @@\n-use foo;\n use bar;\n".to_string()
|
||||
}
|
||||
_ => {
|
||||
"diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md\nindex 000000..111111 100644\n--- /dev/null\n+++ b/CONTRIBUTING.md\n@@ -0,0 +1,3 @@\n+## Contributing\n+Please open PRs.\n+Thanks!\n".to_string()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn count_from_unified(diff: &str) -> (usize, usize) {
|
||||
if let Ok(patch) = diffy::Patch::from_str(diff) {
|
||||
patch
|
||||
.hunks()
|
||||
.iter()
|
||||
.flat_map(diffy::Hunk::lines)
|
||||
.fold((0, 0), |(a, d), l| match l {
|
||||
diffy::Line::Insert(_) => (a + 1, d),
|
||||
diffy::Line::Delete(_) => (a, d + 1),
|
||||
_ => (a, d),
|
||||
})
|
||||
} else {
|
||||
let mut a = 0;
|
||||
let mut d = 0;
|
||||
for l in diff.lines() {
|
||||
if l.starts_with("+++") || l.starts_with("---") || l.starts_with("@@") {
|
||||
continue;
|
||||
}
|
||||
match l.as_bytes().first() {
|
||||
Some(b'+') => a += 1,
|
||||
Some(b'-') => d += 1,
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
(a, d)
|
||||
}
|
||||
}
|
||||
38
codex-rs/cloud-tasks/Cargo.toml
Normal file
38
codex-rs/cloud-tasks/Cargo.toml
Normal file
@@ -0,0 +1,38 @@
|
||||
[package]
|
||||
edition = "2024"
|
||||
name = "codex-cloud-tasks"
|
||||
version = { workspace = true }
|
||||
|
||||
[lib]
|
||||
name = "codex_cloud_tasks"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = { workspace = true }
|
||||
base64 = { workspace = true }
|
||||
chrono = { workspace = true, features = ["serde"] }
|
||||
clap = { workspace = true, features = ["derive"] }
|
||||
codex-cloud-tasks-client = { path = "../cloud-tasks-client", features = [
|
||||
"mock",
|
||||
"online",
|
||||
] }
|
||||
codex-common = { path = "../common", features = ["cli"] }
|
||||
codex-core = { path = "../core" }
|
||||
codex-login = { path = "../login" }
|
||||
codex-tui = { path = "../tui" }
|
||||
crossterm = { workspace = true, features = ["event-stream"] }
|
||||
ratatui = { workspace = true }
|
||||
reqwest = { workspace = true, features = ["json"] }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
tokio = { workspace = true, features = ["macros", "rt-multi-thread"] }
|
||||
tokio-stream = { workspace = true }
|
||||
tracing = { workspace = true, features = ["log"] }
|
||||
tracing-subscriber = { workspace = true, features = ["env-filter"] }
|
||||
unicode-width = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
async-trait = { workspace = true }
|
||||
481
codex-rs/cloud-tasks/src/app.rs
Normal file
481
codex-rs/cloud-tasks/src/app.rs
Normal file
@@ -0,0 +1,481 @@
|
||||
use std::time::Duration;
|
||||
use std::time::Instant;
|
||||
|
||||
// Environment filter data models for the TUI
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct EnvironmentRow {
|
||||
pub id: String,
|
||||
pub label: Option<String>,
|
||||
pub is_pinned: bool,
|
||||
pub repo_hints: Option<String>, // e.g., "openai/codex"
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct EnvModalState {
|
||||
pub query: String,
|
||||
pub selected: usize,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct BestOfModalState {
|
||||
pub selected: usize,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Copy, PartialEq, Eq)]
|
||||
pub enum ApplyResultLevel {
|
||||
Success,
|
||||
Partial,
|
||||
Error,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ApplyModalState {
|
||||
pub task_id: TaskId,
|
||||
pub title: String,
|
||||
pub result_message: Option<String>,
|
||||
pub result_level: Option<ApplyResultLevel>,
|
||||
pub skipped_paths: Vec<String>,
|
||||
pub conflict_paths: Vec<String>,
|
||||
pub diff_override: Option<String>,
|
||||
}
|
||||
|
||||
use crate::scrollable_diff::ScrollableDiff;
|
||||
use codex_cloud_tasks_client::CloudBackend;
|
||||
use codex_cloud_tasks_client::TaskId;
|
||||
use codex_cloud_tasks_client::TaskSummary;
|
||||
#[derive(Default)]
|
||||
pub struct App {
|
||||
pub tasks: Vec<TaskSummary>,
|
||||
pub selected: usize,
|
||||
pub status: String,
|
||||
pub diff_overlay: Option<DiffOverlay>,
|
||||
pub spinner_start: Option<Instant>,
|
||||
pub refresh_inflight: bool,
|
||||
pub details_inflight: bool,
|
||||
// Environment filter state
|
||||
pub env_filter: Option<String>,
|
||||
pub env_modal: Option<EnvModalState>,
|
||||
pub apply_modal: Option<ApplyModalState>,
|
||||
pub best_of_modal: Option<BestOfModalState>,
|
||||
pub environments: Vec<EnvironmentRow>,
|
||||
pub env_last_loaded: Option<std::time::Instant>,
|
||||
pub env_loading: bool,
|
||||
pub env_error: Option<String>,
|
||||
// New Task page
|
||||
pub new_task: Option<crate::new_task::NewTaskPage>,
|
||||
pub best_of_n: usize,
|
||||
// Apply preflight spinner state
|
||||
pub apply_preflight_inflight: bool,
|
||||
// Apply action spinner state
|
||||
pub apply_inflight: bool,
|
||||
// Background enrichment coordination
|
||||
pub list_generation: u64,
|
||||
pub in_flight: std::collections::HashSet<String>,
|
||||
// Background enrichment caches were planned; currently unused.
|
||||
}
|
||||
|
||||
impl App {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
tasks: Vec::new(),
|
||||
selected: 0,
|
||||
status: "Press r to refresh".to_string(),
|
||||
diff_overlay: None,
|
||||
spinner_start: None,
|
||||
refresh_inflight: false,
|
||||
details_inflight: false,
|
||||
env_filter: None,
|
||||
env_modal: None,
|
||||
apply_modal: None,
|
||||
best_of_modal: None,
|
||||
environments: Vec::new(),
|
||||
env_last_loaded: None,
|
||||
env_loading: false,
|
||||
env_error: None,
|
||||
new_task: None,
|
||||
best_of_n: 1,
|
||||
apply_preflight_inflight: false,
|
||||
apply_inflight: false,
|
||||
list_generation: 0,
|
||||
in_flight: std::collections::HashSet::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn next(&mut self) {
|
||||
if self.tasks.is_empty() {
|
||||
return;
|
||||
}
|
||||
self.selected = (self.selected + 1).min(self.tasks.len().saturating_sub(1));
|
||||
}
|
||||
|
||||
pub fn prev(&mut self) {
|
||||
if self.tasks.is_empty() {
|
||||
return;
|
||||
}
|
||||
if self.selected > 0 {
|
||||
self.selected -= 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn load_tasks(
|
||||
backend: &dyn CloudBackend,
|
||||
env: Option<&str>,
|
||||
) -> anyhow::Result<Vec<TaskSummary>> {
|
||||
// In later milestones, add a small debounce, spinner, and error display.
|
||||
let tasks = tokio::time::timeout(Duration::from_secs(5), backend.list_tasks(env)).await??;
|
||||
// Hide review-only tasks from the main list.
|
||||
let filtered: Vec<TaskSummary> = tasks.into_iter().filter(|t| !t.is_review).collect();
|
||||
Ok(filtered)
|
||||
}
|
||||
|
||||
pub struct DiffOverlay {
|
||||
pub title: String,
|
||||
pub task_id: TaskId,
|
||||
pub sd: ScrollableDiff,
|
||||
pub base_can_apply: bool,
|
||||
pub diff_lines: Vec<String>,
|
||||
pub text_lines: Vec<String>,
|
||||
pub prompt: Option<String>,
|
||||
pub attempts: Vec<AttemptView>,
|
||||
pub selected_attempt: usize,
|
||||
pub current_view: DetailView,
|
||||
pub base_turn_id: Option<String>,
|
||||
pub sibling_turn_ids: Vec<String>,
|
||||
pub attempt_total_hint: Option<usize>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct AttemptView {
|
||||
pub turn_id: Option<String>,
|
||||
pub status: codex_cloud_tasks_client::AttemptStatus,
|
||||
pub attempt_placement: Option<i64>,
|
||||
pub diff_lines: Vec<String>,
|
||||
pub text_lines: Vec<String>,
|
||||
pub prompt: Option<String>,
|
||||
pub diff_raw: Option<String>,
|
||||
}
|
||||
|
||||
impl AttemptView {
|
||||
pub fn has_diff(&self) -> bool {
|
||||
!self.diff_lines.is_empty()
|
||||
}
|
||||
|
||||
pub fn has_text(&self) -> bool {
|
||||
!self.text_lines.is_empty() || self.prompt.is_some()
|
||||
}
|
||||
}
|
||||
|
||||
impl DiffOverlay {
|
||||
pub fn new(task_id: TaskId, title: String, attempt_total_hint: Option<usize>) -> Self {
|
||||
let mut sd = ScrollableDiff::new();
|
||||
sd.set_content(Vec::new());
|
||||
Self {
|
||||
title,
|
||||
task_id,
|
||||
sd,
|
||||
base_can_apply: false,
|
||||
diff_lines: Vec::new(),
|
||||
text_lines: Vec::new(),
|
||||
prompt: None,
|
||||
attempts: vec![AttemptView::default()],
|
||||
selected_attempt: 0,
|
||||
current_view: DetailView::Prompt,
|
||||
base_turn_id: None,
|
||||
sibling_turn_ids: Vec::new(),
|
||||
attempt_total_hint,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn current_attempt(&self) -> Option<&AttemptView> {
|
||||
self.attempts.get(self.selected_attempt)
|
||||
}
|
||||
|
||||
pub fn base_attempt_mut(&mut self) -> &mut AttemptView {
|
||||
if self.attempts.is_empty() {
|
||||
self.attempts.push(AttemptView::default());
|
||||
}
|
||||
&mut self.attempts[0]
|
||||
}
|
||||
|
||||
pub fn set_view(&mut self, view: DetailView) {
|
||||
self.current_view = view;
|
||||
self.apply_selection_to_fields();
|
||||
}
|
||||
|
||||
pub fn expected_attempts(&self) -> Option<usize> {
|
||||
self.attempt_total_hint.or({
|
||||
if self.attempts.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(self.attempts.len())
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn attempt_count(&self) -> usize {
|
||||
self.attempts.len()
|
||||
}
|
||||
|
||||
pub fn attempt_display_total(&self) -> usize {
|
||||
self.expected_attempts()
|
||||
.unwrap_or_else(|| self.attempts.len().max(1))
|
||||
}
|
||||
|
||||
pub fn step_attempt(&mut self, delta: isize) -> bool {
|
||||
let total = self.attempts.len();
|
||||
if total <= 1 {
|
||||
return false;
|
||||
}
|
||||
let total_isize = total as isize;
|
||||
let current = self.selected_attempt as isize;
|
||||
let mut next = current + delta;
|
||||
next = ((next % total_isize) + total_isize) % total_isize;
|
||||
let next = next as usize;
|
||||
self.selected_attempt = next;
|
||||
self.apply_selection_to_fields();
|
||||
true
|
||||
}
|
||||
|
||||
pub fn current_can_apply(&self) -> bool {
|
||||
matches!(self.current_view, DetailView::Diff)
|
||||
&& self
|
||||
.current_attempt()
|
||||
.and_then(|attempt| attempt.diff_raw.as_ref())
|
||||
.map(|diff| !diff.is_empty())
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
pub fn apply_selection_to_fields(&mut self) {
|
||||
let (diff_lines, text_lines, prompt) = if let Some(attempt) = self.current_attempt() {
|
||||
(
|
||||
attempt.diff_lines.clone(),
|
||||
attempt.text_lines.clone(),
|
||||
attempt.prompt.clone(),
|
||||
)
|
||||
} else {
|
||||
self.diff_lines.clear();
|
||||
self.text_lines.clear();
|
||||
self.prompt = None;
|
||||
self.sd.set_content(vec!["<loading attempt>".to_string()]);
|
||||
return;
|
||||
};
|
||||
|
||||
self.diff_lines = diff_lines.clone();
|
||||
self.text_lines = text_lines.clone();
|
||||
self.prompt = prompt;
|
||||
|
||||
match self.current_view {
|
||||
DetailView::Diff => {
|
||||
if diff_lines.is_empty() {
|
||||
self.sd.set_content(vec!["<no diff available>".to_string()]);
|
||||
} else {
|
||||
self.sd.set_content(diff_lines);
|
||||
}
|
||||
}
|
||||
DetailView::Prompt => {
|
||||
if text_lines.is_empty() {
|
||||
self.sd.set_content(vec!["<no output>".to_string()]);
|
||||
} else {
|
||||
self.sd.set_content(text_lines);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum DetailView {
|
||||
Diff,
|
||||
Prompt,
|
||||
}
|
||||
|
||||
/// Internal app events delivered from background tasks.
|
||||
/// These let the UI event loop remain responsive and keep the spinner animating.
|
||||
#[derive(Debug)]
|
||||
pub enum AppEvent {
|
||||
TasksLoaded {
|
||||
env: Option<String>,
|
||||
result: anyhow::Result<Vec<TaskSummary>>,
|
||||
},
|
||||
// Background diff summary events were planned; removed for now to keep code minimal.
|
||||
/// Autodetection of a likely environment id finished
|
||||
EnvironmentAutodetected(anyhow::Result<crate::env_detect::AutodetectSelection>),
|
||||
/// Background completion of environment list fetch
|
||||
EnvironmentsLoaded(anyhow::Result<Vec<EnvironmentRow>>),
|
||||
DetailsDiffLoaded {
|
||||
id: TaskId,
|
||||
title: String,
|
||||
diff: String,
|
||||
},
|
||||
DetailsMessagesLoaded {
|
||||
id: TaskId,
|
||||
title: String,
|
||||
messages: Vec<String>,
|
||||
prompt: Option<String>,
|
||||
turn_id: Option<String>,
|
||||
sibling_turn_ids: Vec<String>,
|
||||
attempt_placement: Option<i64>,
|
||||
attempt_status: codex_cloud_tasks_client::AttemptStatus,
|
||||
},
|
||||
DetailsFailed {
|
||||
id: TaskId,
|
||||
title: String,
|
||||
error: String,
|
||||
},
|
||||
AttemptsLoaded {
|
||||
id: TaskId,
|
||||
attempts: Vec<codex_cloud_tasks_client::TurnAttempt>,
|
||||
},
|
||||
/// Background completion of new task submission
|
||||
NewTaskSubmitted(Result<codex_cloud_tasks_client::CreatedTask, String>),
|
||||
/// Background completion of apply preflight when opening modal or on demand
|
||||
ApplyPreflightFinished {
|
||||
id: TaskId,
|
||||
title: String,
|
||||
message: String,
|
||||
level: ApplyResultLevel,
|
||||
skipped: Vec<String>,
|
||||
conflicts: Vec<String>,
|
||||
},
|
||||
/// Background completion of apply action (actual patch application)
|
||||
ApplyFinished {
|
||||
id: TaskId,
|
||||
result: std::result::Result<codex_cloud_tasks_client::ApplyOutcome, String>,
|
||||
},
|
||||
}
|
||||
|
||||
// Convenience aliases; currently unused.
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use chrono::Utc;
|
||||
|
||||
struct FakeBackend {
|
||||
// maps env key to titles
|
||||
by_env: std::collections::HashMap<Option<String>, Vec<&'static str>>,
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl codex_cloud_tasks_client::CloudBackend for FakeBackend {
|
||||
async fn list_tasks(
|
||||
&self,
|
||||
env: Option<&str>,
|
||||
) -> codex_cloud_tasks_client::Result<Vec<TaskSummary>> {
|
||||
let key = env.map(str::to_string);
|
||||
let titles = self
|
||||
.by_env
|
||||
.get(&key)
|
||||
.cloned()
|
||||
.unwrap_or_else(|| vec!["default-a", "default-b"]);
|
||||
let mut out = Vec::new();
|
||||
for (i, t) in titles.into_iter().enumerate() {
|
||||
out.push(TaskSummary {
|
||||
id: TaskId(format!("T-{i}")),
|
||||
title: t.to_string(),
|
||||
status: codex_cloud_tasks_client::TaskStatus::Ready,
|
||||
updated_at: Utc::now(),
|
||||
environment_id: env.map(str::to_string),
|
||||
environment_label: None,
|
||||
summary: codex_cloud_tasks_client::DiffSummary::default(),
|
||||
is_review: false,
|
||||
attempt_total: Some(1),
|
||||
});
|
||||
}
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
async fn get_task_diff(
|
||||
&self,
|
||||
_id: TaskId,
|
||||
) -> codex_cloud_tasks_client::Result<Option<String>> {
|
||||
Err(codex_cloud_tasks_client::CloudTaskError::Unimplemented(
|
||||
"not used in test",
|
||||
))
|
||||
}
|
||||
|
||||
async fn get_task_messages(
|
||||
&self,
|
||||
_id: TaskId,
|
||||
) -> codex_cloud_tasks_client::Result<Vec<String>> {
|
||||
Ok(vec![])
|
||||
}
|
||||
async fn get_task_text(
|
||||
&self,
|
||||
_id: TaskId,
|
||||
) -> codex_cloud_tasks_client::Result<codex_cloud_tasks_client::TaskText> {
|
||||
Ok(codex_cloud_tasks_client::TaskText {
|
||||
prompt: Some("Example prompt".to_string()),
|
||||
messages: Vec::new(),
|
||||
turn_id: Some("fake-turn".to_string()),
|
||||
sibling_turn_ids: Vec::new(),
|
||||
attempt_placement: Some(0),
|
||||
attempt_status: codex_cloud_tasks_client::AttemptStatus::Completed,
|
||||
})
|
||||
}
|
||||
|
||||
async fn list_sibling_attempts(
|
||||
&self,
|
||||
_task: TaskId,
|
||||
_turn_id: String,
|
||||
) -> codex_cloud_tasks_client::Result<Vec<codex_cloud_tasks_client::TurnAttempt>> {
|
||||
Ok(Vec::new())
|
||||
}
|
||||
|
||||
async fn apply_task(
|
||||
&self,
|
||||
_id: TaskId,
|
||||
_diff_override: Option<String>,
|
||||
) -> codex_cloud_tasks_client::Result<codex_cloud_tasks_client::ApplyOutcome> {
|
||||
Err(codex_cloud_tasks_client::CloudTaskError::Unimplemented(
|
||||
"not used in test",
|
||||
))
|
||||
}
|
||||
|
||||
async fn apply_task_preflight(
|
||||
&self,
|
||||
_id: TaskId,
|
||||
_diff_override: Option<String>,
|
||||
) -> codex_cloud_tasks_client::Result<codex_cloud_tasks_client::ApplyOutcome> {
|
||||
Err(codex_cloud_tasks_client::CloudTaskError::Unimplemented(
|
||||
"not used in test",
|
||||
))
|
||||
}
|
||||
|
||||
async fn create_task(
|
||||
&self,
|
||||
_env_id: &str,
|
||||
_prompt: &str,
|
||||
_git_ref: &str,
|
||||
_qa_mode: bool,
|
||||
_best_of_n: usize,
|
||||
) -> codex_cloud_tasks_client::Result<codex_cloud_tasks_client::CreatedTask> {
|
||||
Err(codex_cloud_tasks_client::CloudTaskError::Unimplemented(
|
||||
"not used in test",
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn load_tasks_uses_env_parameter() {
|
||||
// Arrange: env-specific task titles
|
||||
let mut by_env = std::collections::HashMap::new();
|
||||
by_env.insert(None, vec!["root-1", "root-2"]);
|
||||
by_env.insert(Some("env-A".to_string()), vec!["A-1"]);
|
||||
by_env.insert(Some("env-B".to_string()), vec!["B-1", "B-2", "B-3"]);
|
||||
let backend = FakeBackend { by_env };
|
||||
|
||||
// Act + Assert
|
||||
let root = load_tasks(&backend, None).await.unwrap();
|
||||
assert_eq!(root.len(), 2);
|
||||
assert_eq!(root[0].title, "root-1");
|
||||
|
||||
let a = load_tasks(&backend, Some("env-A")).await.unwrap();
|
||||
assert_eq!(a.len(), 1);
|
||||
assert_eq!(a[0].title, "A-1");
|
||||
|
||||
let b = load_tasks(&backend, Some("env-B")).await.unwrap();
|
||||
assert_eq!(b.len(), 3);
|
||||
assert_eq!(b[2].title, "B-3");
|
||||
}
|
||||
}
|
||||
49
codex-rs/cloud-tasks/src/cli.rs
Normal file
49
codex-rs/cloud-tasks/src/cli.rs
Normal file
@@ -0,0 +1,49 @@
|
||||
use clap::Args;
|
||||
use clap::Parser;
|
||||
use codex_common::CliConfigOverrides;
|
||||
|
||||
#[derive(Parser, Debug, Default)]
|
||||
#[command(version)]
|
||||
pub struct Cli {
|
||||
#[clap(skip)]
|
||||
pub config_overrides: CliConfigOverrides,
|
||||
|
||||
#[command(subcommand)]
|
||||
pub command: Option<Command>,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Subcommand)]
|
||||
pub enum Command {
|
||||
/// Submit a new Codex Cloud task without launching the TUI.
|
||||
Exec(ExecCommand),
|
||||
}
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
pub struct ExecCommand {
|
||||
/// Task prompt to run in Codex Cloud.
|
||||
#[arg(value_name = "QUERY")]
|
||||
pub query: Option<String>,
|
||||
|
||||
/// Target environment identifier (see `codex cloud` to browse).
|
||||
#[arg(long = "env", value_name = "ENV_ID")]
|
||||
pub environment: String,
|
||||
|
||||
/// Number of assistant attempts (best-of-N).
|
||||
#[arg(
|
||||
long = "attempts",
|
||||
default_value_t = 1usize,
|
||||
value_parser = parse_attempts
|
||||
)]
|
||||
pub attempts: usize,
|
||||
}
|
||||
|
||||
fn parse_attempts(input: &str) -> Result<usize, String> {
|
||||
let value: usize = input
|
||||
.parse()
|
||||
.map_err(|_| "attempts must be an integer between 1 and 4".to_string())?;
|
||||
if (1..=4).contains(&value) {
|
||||
Ok(value)
|
||||
} else {
|
||||
Err("attempts must be between 1 and 4".to_string())
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user