mirror of
https://github.com/openai/codex.git
synced 2026-02-02 15:03:38 +00:00
Compare commits
1525 Commits
pakrym/pyt
...
tab-queue-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
35213c318a | ||
|
|
0c0c5aeddc | ||
|
|
d544adf71a | ||
|
|
070935d5e8 | ||
|
|
b11e96fb04 | ||
|
|
57ec3a8277 | ||
|
|
bf430ad9fe | ||
|
|
3788e2cc0f | ||
|
|
92cf2a1c3a | ||
|
|
31415ebfcf | ||
|
|
264d40efdc | ||
|
|
3c28c85063 | ||
|
|
dc1b62acbd | ||
|
|
186794dbb3 | ||
|
|
7ebe13f692 | ||
|
|
a803467f52 | ||
|
|
a5e5d7a384 | ||
|
|
66b74efbc6 | ||
|
|
78a359f7fa | ||
|
|
274af30525 | ||
|
|
efa9326f08 | ||
|
|
1271d450b1 | ||
|
|
c87a7d9043 | ||
|
|
f72f87fbee | ||
|
|
0a568a47fd | ||
|
|
aeaff26451 | ||
|
|
1478a88eb0 | ||
|
|
80d7a5d7fe | ||
|
|
bffe9b33e9 | ||
|
|
8f0e0300d2 | ||
|
|
b877a2041e | ||
|
|
764f3c7d03 | ||
|
|
93a5e0fe1c | ||
|
|
146d54cede | ||
|
|
ad8bf59cbf | ||
|
|
246f506551 | ||
|
|
c26fe64539 | ||
|
|
f1653dd4d3 | ||
|
|
e893e83eb9 | ||
|
|
f89a40a849 | ||
|
|
e650d4b02c | ||
|
|
ebdd8795e9 | ||
|
|
4125c825f9 | ||
|
|
9147df0e60 | ||
|
|
131590066e | ||
|
|
2691e1ce21 | ||
|
|
1668ca726f | ||
|
|
7905e99d03 | ||
|
|
7fc49697dd | ||
|
|
c576756c81 | ||
|
|
c1ac5223e1 | ||
|
|
f5b3e738fb | ||
|
|
0cce6ebd83 | ||
|
|
1fc72c647f | ||
|
|
99f47d6e9a | ||
|
|
a6324ab34b | ||
|
|
3cabb24210 | ||
|
|
1fa8350ae7 | ||
|
|
004a74940a | ||
|
|
749b58366c | ||
|
|
d886a8646c | ||
|
|
169201b1b5 | ||
|
|
42fa4c237f | ||
|
|
5f10548772 | ||
|
|
da44569fef | ||
|
|
393a5a0311 | ||
|
|
55bda1a0f2 | ||
|
|
b4d240c3ae | ||
|
|
f6df1596eb | ||
|
|
ae96a15312 | ||
|
|
3fc487e0e0 | ||
|
|
faeb08c1e1 | ||
|
|
05b960671d | ||
|
|
bad4c12b9d | ||
|
|
2259031d64 | ||
|
|
a09711332a | ||
|
|
4a9c2bcc5a | ||
|
|
2a68b74b9b | ||
|
|
3728db11b8 | ||
|
|
e59e7d163d | ||
|
|
71a2973fd9 | ||
|
|
24b88890cb | ||
|
|
5e426ac270 | ||
|
|
27da8a68d3 | ||
|
|
0471ddbe74 | ||
|
|
e6d2ef432d | ||
|
|
577e1fd1b2 | ||
|
|
fe1e0da102 | ||
|
|
e958d0337e | ||
|
|
8e937fbba9 | ||
|
|
02f67bace8 | ||
|
|
3d322fa9d8 | ||
|
|
6a939ed7a4 | ||
|
|
4283a7432b | ||
|
|
92472e7baa | ||
|
|
e1447c3009 | ||
|
|
bcd7858ced | ||
|
|
32b1795ff4 | ||
|
|
bdae0035ec | ||
|
|
7532f34699 | ||
|
|
bc6d9ef6fc | ||
|
|
dc3deaa3e7 | ||
|
|
6fbb89e858 | ||
|
|
258fc4b401 | ||
|
|
b9ff4ec830 | ||
|
|
0c09dc3c03 | ||
|
|
5675af5190 | ||
|
|
31d9b6f4d2 | ||
|
|
5a82a72d93 | ||
|
|
ce49e92848 | ||
|
|
4d787a2cc2 | ||
|
|
c96c26cf5b | ||
|
|
7e33ac7eb6 | ||
|
|
ebbbee70c6 | ||
|
|
5a70b1568f | ||
|
|
903a0c0933 | ||
|
|
4c673086bc | ||
|
|
2cd1a0a45e | ||
|
|
9f8d3c14ce | ||
|
|
89403c5e11 | ||
|
|
3c711f3d16 | ||
|
|
141d2b5022 | ||
|
|
ebacd28817 | ||
|
|
e25d2ab3bf | ||
|
|
bde734fd1e | ||
|
|
58e8f75b27 | ||
|
|
2651980bdf | ||
|
|
51d75bb80a | ||
|
|
57ba758df5 | ||
|
|
40e2405998 | ||
|
|
fe03320791 | ||
|
|
2d56519ecd | ||
|
|
97f1f20edb | ||
|
|
3b8d79ee11 | ||
|
|
3a300d1117 | ||
|
|
17ab5f6a52 | ||
|
|
3c8fb90bf0 | ||
|
|
325ce985f1 | ||
|
|
18b737910c | ||
|
|
cbca43d57a | ||
|
|
e726a82c8a | ||
|
|
ddae70bd62 | ||
|
|
d75626ad99 | ||
|
|
12779c7c07 | ||
|
|
490c1c1fdd | ||
|
|
87f7226cca | ||
|
|
3a6a43ff5c | ||
|
|
d7cdcfc302 | ||
|
|
5dfa780f3d | ||
|
|
3e91a95ce1 | ||
|
|
034d489c34 | ||
|
|
729e097662 | ||
|
|
7ac498e0e0 | ||
|
|
45ffcdf886 | ||
|
|
06088535ad | ||
|
|
4223948cf5 | ||
|
|
898e5f82f0 | ||
|
|
d5562983d9 | ||
|
|
9659583559 | ||
|
|
623707ab58 | ||
|
|
86f81ca010 | ||
|
|
6a57d7980b | ||
|
|
198289934f | ||
|
|
6709ad8975 | ||
|
|
cf515142b0 | ||
|
|
74b2238931 | ||
|
|
cc0b5e8504 | ||
|
|
8e49a2c0d1 | ||
|
|
af1ed2685e | ||
|
|
1a0e2e612b | ||
|
|
acfd94f625 | ||
|
|
cabf85aa18 | ||
|
|
bc284669c2 | ||
|
|
fbe883318d | ||
|
|
2a06d64bc9 | ||
|
|
7daaabc795 | ||
|
|
1aed01e99f | ||
|
|
ed64804cb5 | ||
|
|
5c380d5b1e | ||
|
|
46b0c4acbb | ||
|
|
5b5a5b92b5 | ||
|
|
ea56186c2b | ||
|
|
cacdae8c05 | ||
|
|
bc92dc5cf0 | ||
|
|
7e5b3e069e | ||
|
|
e2e3f4490e | ||
|
|
225614d7fb | ||
|
|
16c66c37eb | ||
|
|
e9c548c65e | ||
|
|
fceae86581 | ||
|
|
568b938c80 | ||
|
|
24d6e0114f | ||
|
|
d3ff668f68 | ||
|
|
81caee3400 | ||
|
|
51dd5af807 | ||
|
|
6372ba9d5f | ||
|
|
bdfdebcfa1 | ||
|
|
62a73b6d58 | ||
|
|
be4364bb80 | ||
|
|
0d3e673019 | ||
|
|
41a317321d | ||
|
|
051bf81df9 | ||
|
|
a70f5b0b3c | ||
|
|
224c4867dd | ||
|
|
c9c6560685 | ||
|
|
634764ece9 | ||
|
|
5bc3e325a6 | ||
|
|
4156060416 | ||
|
|
98122cbad0 | ||
|
|
7b21b443bb | ||
|
|
93dec9045e | ||
|
|
69898e3dba | ||
|
|
c4af304c77 | ||
|
|
5b7707dfb1 | ||
|
|
59d6937550 | ||
|
|
932a5a446f | ||
|
|
484f6f4c26 | ||
|
|
5522663f92 | ||
|
|
98e171258c | ||
|
|
da667b1f56 | ||
|
|
1e29774fce | ||
|
|
9ce6bbc43e | ||
|
|
7520d8ba58 | ||
|
|
0318f30ed8 | ||
|
|
be212db0c8 | ||
|
|
5b022c2904 | ||
|
|
e21ce6c5de | ||
|
|
267c05fb30 | ||
|
|
634650dd25 | ||
|
|
8a0c2e5841 | ||
|
|
0f8bb4579b | ||
|
|
35fd69a9f0 | ||
|
|
ccba737d26 | ||
|
|
75076aabfe | ||
|
|
f6b563ec64 | ||
|
|
357e4c902b | ||
|
|
ef8b8ebc94 | ||
|
|
54b290ec1d | ||
|
|
efd0c21b9b | ||
|
|
61e81af887 | ||
|
|
f07b8aa591 | ||
|
|
5f3f70203c | ||
|
|
21c6d40a44 | ||
|
|
a9b5e8a136 | ||
|
|
187924d761 | ||
|
|
66450f0445 | ||
|
|
e8421c761c | ||
|
|
fe460e0f9a | ||
|
|
1253d19641 | ||
|
|
4c9b4b684f | ||
|
|
018de994b0 | ||
|
|
c31960b13a | ||
|
|
9179c9deac | ||
|
|
a1e81180f8 | ||
|
|
fedcb8f63c | ||
|
|
116059c3a0 | ||
|
|
0d788e6263 | ||
|
|
4cef89a122 | ||
|
|
124a09e577 | ||
|
|
a59052341d | ||
|
|
8372d61be7 | ||
|
|
230a045ac9 | ||
|
|
3389465c8d | ||
|
|
8b4d27dfcd | ||
|
|
dc1a568dc7 | ||
|
|
54ded1a3c0 | ||
|
|
11d4f3f45e | ||
|
|
8b7ec31ba7 | ||
|
|
188f79afee | ||
|
|
a0b2d03302 | ||
|
|
4ce9d0aa7b | ||
|
|
1dd1355df3 | ||
|
|
915352b10c | ||
|
|
740bf0e755 | ||
|
|
d1c6329c32 | ||
|
|
cab7136fb3 | ||
|
|
32db8ea5ca | ||
|
|
06e21c7a65 | ||
|
|
7ecd0dc9b3 | ||
|
|
8858012fd1 | ||
|
|
6346e4f560 | ||
|
|
4c3d2a5bbe | ||
|
|
c92dbea7c1 | ||
|
|
771f1ca6ab | ||
|
|
b1c93e135b | ||
|
|
5f8776d34d | ||
|
|
58a91a0b50 | ||
|
|
c29afc0cf3 | ||
|
|
cafb07fe6e | ||
|
|
07f077dfb3 | ||
|
|
7cf6f1c723 | ||
|
|
57f8158608 | ||
|
|
95580f229e | ||
|
|
720fa67816 | ||
|
|
fabb797097 | ||
|
|
807f8a43c2 | ||
|
|
1d8e2b4da8 | ||
|
|
bba5e5e0d4 | ||
|
|
8f10d3bf05 | ||
|
|
468ee8a75c | ||
|
|
0b53aed2d0 | ||
|
|
649badd102 | ||
|
|
a8e0fe8bb9 | ||
|
|
e139ef3e67 | ||
|
|
db1423ae8b | ||
|
|
1d678c8187 | ||
|
|
181ff89cbd | ||
|
|
5678213058 | ||
|
|
279283fe02 | ||
|
|
0c1658d0ec | ||
|
|
19525efb22 | ||
|
|
90f37e8549 | ||
|
|
ee9d441777 | ||
|
|
1b5095b5d1 | ||
|
|
c673e7adb6 | ||
|
|
6846bc1115 | ||
|
|
efd2d76484 | ||
|
|
82fcc087b5 | ||
|
|
3cfa4bc8be | ||
|
|
ab753387cc | ||
|
|
2de731490e | ||
|
|
7078a0b676 | ||
|
|
79ce79a62e | ||
|
|
66b7c673e9 | ||
|
|
13c42a077c | ||
|
|
a48904de72 | ||
|
|
4313e0a710 | ||
|
|
ce3ff29932 | ||
|
|
810ebe0d2b | ||
|
|
bf732600ea | ||
|
|
38de0a1de4 | ||
|
|
e61bae12e3 | ||
|
|
96a65ff0ed | ||
|
|
40de81e7af | ||
|
|
972b5853a0 | ||
|
|
fb24c47bea | ||
|
|
f2b740c95d | ||
|
|
0130a2fa40 | ||
|
|
53eb2e9f27 | ||
|
|
2828549323 | ||
|
|
cbc5fb9acf | ||
|
|
310f2114ae | ||
|
|
e27d9bd88f | ||
|
|
414fbe0da9 | ||
|
|
277babba79 | ||
|
|
14dbd0610a | ||
|
|
f6275a5142 | ||
|
|
7d0c5c7bd5 | ||
|
|
4673090f73 | ||
|
|
8e900c210c | ||
|
|
6b2ef216f1 | ||
|
|
d65fe38b2c | ||
|
|
7809e36a92 | ||
|
|
0237459f71 | ||
|
|
314937fb11 | ||
|
|
8ff16a7714 | ||
|
|
96fdbdd434 | ||
|
|
33e1d0844a | ||
|
|
b24b7884c7 | ||
|
|
8d5ab97f2b | ||
|
|
6c8470953f | ||
|
|
334dbe51c6 | ||
|
|
5a0b5d1bd1 | ||
|
|
45727b9ed3 | ||
|
|
372de6d2c5 | ||
|
|
7a8407bbb6 | ||
|
|
4e6d6cd798 | ||
|
|
3c353a3aca | ||
|
|
99cbba8ea5 | ||
|
|
aa83d7da24 | ||
|
|
d281bcfcd4 | ||
|
|
fab1ded484 | ||
|
|
987dd7fde3 | ||
|
|
63942b883c | ||
|
|
a6974087e5 | ||
|
|
f0dc6fd3c7 | ||
|
|
797a68b9f2 | ||
|
|
dc61fc5f50 | ||
|
|
ec3738b47e | ||
|
|
1d4463ba81 | ||
|
|
e3d3445748 | ||
|
|
0a7021de72 | ||
|
|
7e5c343ef5 | ||
|
|
014235f533 | ||
|
|
b15b5082c6 | ||
|
|
37071e7e5c | ||
|
|
eeda6a5004 | ||
|
|
6f94a90797 | ||
|
|
339b052d68 | ||
|
|
f4371d2f6c | ||
|
|
8120c8765b | ||
|
|
d35337227a | ||
|
|
ba835c3c36 | ||
|
|
dcc01198e2 | ||
|
|
6c76d17713 | ||
|
|
3429de21b3 | ||
|
|
3d4ced3ff5 | ||
|
|
2d9826098e | ||
|
|
46baedd7cb | ||
|
|
358a5baba0 | ||
|
|
1cd1cf17c6 | ||
|
|
53f53173a8 | ||
|
|
9fb9ed6cea | ||
|
|
8f0b383621 | ||
|
|
d7ae342ff4 | ||
|
|
2f048f2063 | ||
|
|
4fb0b547d6 | ||
|
|
87abf06e78 | ||
|
|
6395430220 | ||
|
|
df46ea48a2 | ||
|
|
e9023d5662 | ||
|
|
ad41182ee8 | ||
|
|
2e5d52cb14 | ||
|
|
be274cbe62 | ||
|
|
7157421daa | ||
|
|
b903285746 | ||
|
|
425c8dc372 | ||
|
|
aea47b6553 | ||
|
|
f084e5264b | ||
|
|
4c9d589f14 | ||
|
|
deafead169 | ||
|
|
374d591311 | ||
|
|
9bf41e9262 | ||
|
|
1cfacbf56d | ||
|
|
cea76b85af | ||
|
|
5c8d22138a | ||
|
|
e1deeefa0f | ||
|
|
580c59aa9a | ||
|
|
50dafbc31b | ||
|
|
da3869eeb6 | ||
|
|
6f102e18c4 | ||
|
|
a8797019a1 | ||
|
|
774bd9e432 | ||
|
|
25ecd0c2e4 | ||
|
|
927a6acbea | ||
|
|
a9a7cf3488 | ||
|
|
df35189366 | ||
|
|
1e9babe178 | ||
|
|
3d92b443b0 | ||
|
|
167553f00d | ||
|
|
9f28c6251d | ||
|
|
3702793882 | ||
|
|
a2cc0032e0 | ||
|
|
f74e0cda92 | ||
|
|
ac6ba286aa | ||
|
|
9352c6b235 | ||
|
|
de3fa03e1c | ||
|
|
45c164a982 | ||
|
|
2e7e4f6ea6 | ||
|
|
0abaf1b57c | ||
|
|
2bf57674d6 | ||
|
|
813bdb9010 | ||
|
|
4897efcced | ||
|
|
2041b72da7 | ||
|
|
ebd1099b39 | ||
|
|
ae3793eb5d | ||
|
|
70913effc3 | ||
|
|
42b8f28ee8 | ||
|
|
14d80c35a9 | ||
|
|
3a0d9bca64 | ||
|
|
cafcd60ef0 | ||
|
|
600d01b33a | ||
|
|
3fbf379e02 | ||
|
|
a3b137d093 | ||
|
|
bbc5675974 | ||
|
|
51865695e4 | ||
|
|
3a32716e1c | ||
|
|
5ceeaa96b8 | ||
|
|
b27c702e83 | ||
|
|
e290d48264 | ||
|
|
3d14da9728 | ||
|
|
b53889aed5 | ||
|
|
d7482510b1 | ||
|
|
021c9a60e5 | ||
|
|
c9f5b9a6df | ||
|
|
ae57e18947 | ||
|
|
cf44511e77 | ||
|
|
bef36f4ae7 | ||
|
|
f074e5706b | ||
|
|
b9d1a087ee | ||
|
|
c0a12b3952 | ||
|
|
d802b18716 | ||
|
|
b093565bfb | ||
|
|
412dd37956 | ||
|
|
d9554c8191 | ||
|
|
3ee5c40261 | ||
|
|
f754b19e80 | ||
|
|
fbeb7d47a9 | ||
|
|
54def78a22 | ||
|
|
2c6995ca4d | ||
|
|
b4635ccc07 | ||
|
|
017a4a06b2 | ||
|
|
c696456bf1 | ||
|
|
5b472c933d | ||
|
|
4501c0ece4 | ||
|
|
0d9801d448 | ||
|
|
4274e6189a | ||
|
|
fc53411938 | ||
|
|
adbbcb0a15 | ||
|
|
3843cc7b34 | ||
|
|
a21f0ac033 | ||
|
|
b349ec4e94 | ||
|
|
1e3cad95c0 | ||
|
|
d39477ac06 | ||
|
|
dd68245a9d | ||
|
|
c3d5102f73 | ||
|
|
7c6a47958a | ||
|
|
5d77d4db6b | ||
|
|
a2c86e5d88 | ||
|
|
1ad261d681 | ||
|
|
6ec2831b91 | ||
|
|
ad7b9d63c3 | ||
|
|
596fcd040f | ||
|
|
7c18f7b680 | ||
|
|
b1905d3754 | ||
|
|
642b7566df | ||
|
|
3d07cd6c0c | ||
|
|
c978b6e222 | ||
|
|
54feceea46 | ||
|
|
4d2deb1098 | ||
|
|
9009490357 | ||
|
|
26d0d822a2 | ||
|
|
677732ff65 | ||
|
|
570eb5fe78 | ||
|
|
92098d36e8 | ||
|
|
149696d959 | ||
|
|
b3ddd50eee | ||
|
|
9429e8b219 | ||
|
|
f152b16ed9 | ||
|
|
b99ce883fe | ||
|
|
49bf49c2fa | ||
|
|
9287be762e | ||
|
|
60479a9674 | ||
|
|
4312cae005 | ||
|
|
190fa9e104 | ||
|
|
163a7e317e | ||
|
|
9e91e49edb | ||
|
|
c787e9d0c0 | ||
|
|
95f7d37ec6 | ||
|
|
43e6e75317 | ||
|
|
36610d975a | ||
|
|
e0d7ac51d3 | ||
|
|
bacbe871c8 | ||
|
|
b7fa7ca8e9 | ||
|
|
3e81ed4b91 | ||
|
|
c4f3f566a5 | ||
|
|
b9fb3b81e5 | ||
|
|
0af7e4a195 | ||
|
|
8c4c6a19e0 | ||
|
|
703bf12b36 | ||
|
|
bb8fdb20dc | ||
|
|
238ce7dfad | ||
|
|
d4554ce6c8 | ||
|
|
29381ba5c2 | ||
|
|
b2280d6205 | ||
|
|
dca7f4cb60 | ||
|
|
13c0919bff | ||
|
|
83aac0f985 | ||
|
|
057250020a | ||
|
|
3fc8b2894f | ||
|
|
ce19dbbb22 | ||
|
|
038767af69 | ||
|
|
7cabe54fc7 | ||
|
|
c1367808fb | ||
|
|
87f5b69b24 | ||
|
|
e2559ab28d | ||
|
|
90f262e9a4 | ||
|
|
321625072a | ||
|
|
b36ecb6c32 | ||
|
|
eb2e5458cc | ||
|
|
bfb4d5710b | ||
|
|
4953b2ae09 | ||
|
|
1a5809624d | ||
|
|
cb9a189857 | ||
|
|
8a71f8b634 | ||
|
|
4b684c53ae | ||
|
|
9f40d6eeeb | ||
|
|
bd51d1b103 | ||
|
|
f677d05871 | ||
|
|
c4af707e09 | ||
|
|
e0fb3ca1db | ||
|
|
97b90094cd | ||
|
|
463249eff3 | ||
|
|
0ad54982ae | ||
|
|
d1c5db5796 | ||
|
|
6fa24d65f5 | ||
|
|
ab9ddcd50b | ||
|
|
f11520f5f1 | ||
|
|
42e0817398 | ||
|
|
fc4249313b | ||
|
|
967d063f4b | ||
|
|
893f5261eb | ||
|
|
fa4cac1e6b | ||
|
|
0c8828c5e2 | ||
|
|
225a5f7ffb | ||
|
|
05e546ee1f | ||
|
|
7836aeddae | ||
|
|
ac3237721e | ||
|
|
9df70a0772 | ||
|
|
a7e3e37da8 | ||
|
|
164265bed1 | ||
|
|
2237b701b6 | ||
|
|
6382dc2338 | ||
|
|
80140c6d9d | ||
|
|
933e247e9f | ||
|
|
68505abf0f | ||
|
|
cacfd003ac | ||
|
|
0f2b589d5e | ||
|
|
06704b1a0f | ||
|
|
382f047a10 | ||
|
|
ac5fa6baf8 | ||
|
|
badda736c6 | ||
|
|
cb45139244 | ||
|
|
a9f566af7b | ||
|
|
71c75e648c | ||
|
|
0a32acaa2d | ||
|
|
222a491570 | ||
|
|
4a3e9ed88d | ||
|
|
28e7218c0b | ||
|
|
585f75bd5a | ||
|
|
da983c1761 | ||
|
|
c2bdee0946 | ||
|
|
cfda44b98b | ||
|
|
5e888ab48e | ||
|
|
9fa9e3e7bb | ||
|
|
7a6d6090d7 | ||
|
|
701f42b74b | ||
|
|
056c2ee276 | ||
|
|
98923654d0 | ||
|
|
57ba9fa100 | ||
|
|
acb8ed493f | ||
|
|
53a486f7ea | ||
|
|
3c3d3d1adc | ||
|
|
3c087e8fda | ||
|
|
7386e2efbc | ||
|
|
b2cb05d562 | ||
|
|
9a74228c66 | ||
|
|
315b1e957d | ||
|
|
82090803d9 | ||
|
|
f521d29726 | ||
|
|
93f61dbc5f | ||
|
|
6c9c563faf | ||
|
|
952d6c9465 | ||
|
|
2e4a402521 | ||
|
|
f48d88067e | ||
|
|
a8cbbdbc6e | ||
|
|
d08efb1743 | ||
|
|
5f80ad6da8 | ||
|
|
e91bb6b947 | ||
|
|
b8eab7ce90 | ||
|
|
b1c918d8f7 | ||
|
|
4c9762d15c | ||
|
|
7b359c9c8e | ||
|
|
6736d1828d | ||
|
|
073a8533b8 | ||
|
|
0972cd9404 | ||
|
|
28dcdb566a | ||
|
|
e8f6d65899 | ||
|
|
342c084cc3 | ||
|
|
903b7774bc | ||
|
|
6e6338aa87 | ||
|
|
7dfc3a4dc7 | ||
|
|
9b2055586d | ||
|
|
ce0b38c056 | ||
|
|
37c36024c7 | ||
|
|
291b54a762 | ||
|
|
2b5d0b2935 | ||
|
|
404a1ea34b | ||
|
|
36edb412b1 | ||
|
|
1b2509f05a | ||
|
|
f1b7cdc3bd | ||
|
|
c4e18f1b63 | ||
|
|
8f4e00e1f1 | ||
|
|
87666695ba | ||
|
|
871f44f385 | ||
|
|
3d35cb4619 | ||
|
|
e925a380dc | ||
|
|
ccdeb9d9c4 | ||
|
|
67e67e054f | ||
|
|
edd98dd3b7 | ||
|
|
3e6cd5660c | ||
|
|
cee37a32b2 | ||
|
|
8da91d1c89 | ||
|
|
00cc00ead8 | ||
|
|
70b97790be | ||
|
|
1cfc967eb8 | ||
|
|
9a50a04400 | ||
|
|
231ff19ca2 | ||
|
|
de08c735a6 | ||
|
|
3395ebd96e | ||
|
|
71504325d3 | ||
|
|
7f068cfbcc | ||
|
|
9e6c2c1e64 | ||
|
|
8d0f023fa9 | ||
|
|
2ad980abf4 | ||
|
|
3ef76ff29d | ||
|
|
844de19561 | ||
|
|
343aa35db1 | ||
|
|
c3e4f920b4 | ||
|
|
4785344c9c | ||
|
|
9b3251f28f | ||
|
|
45f3250eec | ||
|
|
51307eaf07 | ||
|
|
42ae738f67 | ||
|
|
00ef9d3784 | ||
|
|
f3989f6092 | ||
|
|
dbec741ef0 | ||
|
|
06e7667d0e | ||
|
|
1ef1fe67ec | ||
|
|
ee191dbe81 | ||
|
|
ad9eeeb287 | ||
|
|
6b5b9a687e | ||
|
|
58e1e570fa | ||
|
|
ec93b6daf3 | ||
|
|
4d4778ec1c | ||
|
|
77c457121e | ||
|
|
5ebdc9af1b | ||
|
|
f6a7da4ac3 | ||
|
|
1d09ac89a1 | ||
|
|
127e307f89 | ||
|
|
21ad1c1c90 | ||
|
|
349734e38d | ||
|
|
2222cab9ea | ||
|
|
c2f8c4e9f4 | ||
|
|
72b95db12f | ||
|
|
37ee6bf2c3 | ||
|
|
8b1e397211 | ||
|
|
85e687c74a | ||
|
|
9ee855ec57 | ||
|
|
4b78e2ab09 | ||
|
|
85e2fabc9f | ||
|
|
a8d5ad37b8 | ||
|
|
32e4a3a4d7 | ||
|
|
f443555728 | ||
|
|
ff4ca9959c | ||
|
|
5b25915d7e | ||
|
|
c0564edebe | ||
|
|
c936c68c84 | ||
|
|
41760f8a09 | ||
|
|
440c7acd8f | ||
|
|
0cc3b50228 | ||
|
|
8532876ad8 | ||
|
|
44d92675eb | ||
|
|
a421eba31f | ||
|
|
40006808a3 | ||
|
|
ba58184349 | ||
|
|
14df5c9492 | ||
|
|
cb85a7b96e | ||
|
|
3f12f1140f | ||
|
|
c22cd2e953 | ||
|
|
ebd485b1a0 | ||
|
|
457c9fdb87 | ||
|
|
6eeaf46ac1 | ||
|
|
aaec8abf58 | ||
|
|
cbd7d0d543 | ||
|
|
fabdbfef9c | ||
|
|
8b314e2d04 | ||
|
|
963009737f | ||
|
|
e953092949 | ||
|
|
28ff364c3a | ||
|
|
981e2f742d | ||
|
|
401f94ca31 | ||
|
|
865e225bde | ||
|
|
4502b1b263 | ||
|
|
2845e2c006 | ||
|
|
d8a7a63959 | ||
|
|
caf2749d5b | ||
|
|
9ba27cfa0a | ||
|
|
157a16cefa | ||
|
|
37d83e075e | ||
|
|
523b40a129 | ||
|
|
0dd822264a | ||
|
|
3308dc5e48 | ||
|
|
fc2ff624ac | ||
|
|
b897880378 | ||
|
|
99e5340c54 | ||
|
|
ec49b56874 | ||
|
|
4ed4c73d6b | ||
|
|
523dabc1ee | ||
|
|
3741f387e9 | ||
|
|
2c885edefa | ||
|
|
f6f4c8f5ee | ||
|
|
1875700220 | ||
|
|
207d94b0e7 | ||
|
|
481c84e118 | ||
|
|
6dd3606773 | ||
|
|
35850caff6 | ||
|
|
e8b6fb7937 | ||
|
|
be022be381 | ||
|
|
1e832b1438 | ||
|
|
c31663d745 | ||
|
|
35d89e820f | ||
|
|
486b1c4d9d | ||
|
|
b2cddec3d7 | ||
|
|
920239f272 | ||
|
|
99bcb90353 | ||
|
|
b519267d05 | ||
|
|
529eb4ff2a | ||
|
|
c6f68c9df8 | ||
|
|
af63e6eccc | ||
|
|
87b211709e | ||
|
|
67975ed33a | ||
|
|
7561a6aaf0 | ||
|
|
3ea33a0616 | ||
|
|
e8ef6d3c16 | ||
|
|
e52cc38dfd | ||
|
|
3bdcbc7292 | ||
|
|
a0434bbdb4 | ||
|
|
d5f661c91d | ||
|
|
8ecaad948b | ||
|
|
aa4e0d823e | ||
|
|
0e051644a9 | ||
|
|
40d14c0756 | ||
|
|
af65666561 | ||
|
|
2ae1f81d84 | ||
|
|
d363a0968e | ||
|
|
bce030ddb5 | ||
|
|
f4af6e389e | ||
|
|
b315b22f7b | ||
|
|
c9e149fd5c | ||
|
|
bacdc004be | ||
|
|
ab5972d447 | ||
|
|
767b66f407 | ||
|
|
830ab4ce20 | ||
|
|
3f73e2c892 | ||
|
|
1822ffe870 | ||
|
|
7e2165f394 | ||
|
|
8e5f38c0f0 | ||
|
|
1388e99674 | ||
|
|
f56d1dc8fc | ||
|
|
9be310041b | ||
|
|
0fbcdd77c8 | ||
|
|
9bce050385 | ||
|
|
3f92ad4190 | ||
|
|
54ee302a06 | ||
|
|
44fa06ae36 | ||
|
|
856f97f449 | ||
|
|
fe7a3f0c2b | ||
|
|
c30ca0d5b6 | ||
|
|
a8a6cbdd1c | ||
|
|
e4257f432e | ||
|
|
2c793083f4 | ||
|
|
e150798baf | ||
|
|
33a6cc66ab | ||
|
|
52d0ec4cd8 | ||
|
|
397279d46e | ||
|
|
30ca89424c | ||
|
|
d909048a85 | ||
|
|
888c6dd9e7 | ||
|
|
b5dd189067 | ||
|
|
54e6e4ac32 | ||
|
|
e8af41de8a | ||
|
|
d6c30ed25e | ||
|
|
fb9849e1e3 | ||
|
|
72a1453ac5 | ||
|
|
6d67b8b283 | ||
|
|
74a75679d9 | ||
|
|
92e3046733 | ||
|
|
65c13f1ae7 | ||
|
|
b00a7cf40d | ||
|
|
13d378f2ce | ||
|
|
a6597a9958 | ||
|
|
692989c277 | ||
|
|
2fde03b4a0 | ||
|
|
056c8f8279 | ||
|
|
c2ec477d93 | ||
|
|
20982d5c6a | ||
|
|
64ae9aa3c3 | ||
|
|
72af589398 | ||
|
|
b3d320433f | ||
|
|
91a1d20e2d | ||
|
|
87716e7cd0 | ||
|
|
8976551f0d | ||
|
|
f1d6767685 | ||
|
|
d62cab9a06 | ||
|
|
d5dfba2509 | ||
|
|
1924500250 | ||
|
|
cfc57e14c7 | ||
|
|
15b5eb30ed | ||
|
|
3e9e1d993d | ||
|
|
44c747837a | ||
|
|
4985a7a444 | ||
|
|
10d571f236 | ||
|
|
956d3bfac6 | ||
|
|
73488657cb | ||
|
|
efebc62fb7 | ||
|
|
75f38f16dd | ||
|
|
0440a3f105 | ||
|
|
ee0484a98c | ||
|
|
7e0e675db4 | ||
|
|
84458f12f6 | ||
|
|
793063070b | ||
|
|
030d1d5b1c | ||
|
|
7e6316d4aa | ||
|
|
a75321a64c | ||
|
|
7508e4fd2d | ||
|
|
cac0a6a29d | ||
|
|
b395dc1be6 | ||
|
|
4288091f63 | ||
|
|
526eb3ff82 | ||
|
|
b952bd2649 | ||
|
|
cf57320b9f | ||
|
|
4fb714fb46 | ||
|
|
c1391b9f94 | ||
|
|
9275e93364 | ||
|
|
b3a824ae3c | ||
|
|
ab30453dee | ||
|
|
c56d0c159b | ||
|
|
0bf857bc91 | ||
|
|
f7a921039c | ||
|
|
8ddae8cde3 | ||
|
|
29ca89c414 | ||
|
|
4bada5a84d | ||
|
|
3de8790714 | ||
|
|
b035c604b0 | ||
|
|
e9e644a119 | ||
|
|
f5d9939cda | ||
|
|
838531d3e4 | ||
|
|
0eb2e6f9ee | ||
|
|
c20df79a38 | ||
|
|
fc55fd7a81 | ||
|
|
f3d4e210d8 | ||
|
|
28ebe1c97a | ||
|
|
2b7378ac77 | ||
|
|
ddcc60a085 | ||
|
|
8465f1f2f4 | ||
|
|
7ab45487dd | ||
|
|
cecbd5b021 | ||
|
|
4000e26303 | ||
|
|
e032d338f2 | ||
|
|
8bebe86a47 | ||
|
|
ab2e7499f8 | ||
|
|
daf77b8452 | ||
|
|
03a6e853c0 | ||
|
|
837bc98a1d | ||
|
|
842a1b7fe7 | ||
|
|
03ffe4d595 | ||
|
|
ae2a084fae | ||
|
|
a941ae7632 | ||
|
|
2c665fb1dd | ||
|
|
98a90a3bb2 | ||
|
|
7c8d333980 | ||
|
|
497fb4a19c | ||
|
|
5860481bc4 | ||
|
|
a52cf4d2b4 | ||
|
|
e70c52a3af | ||
|
|
de1768d3ba | ||
|
|
702238f004 | ||
|
|
fa5f6e76c9 | ||
|
|
f828cd2897 | ||
|
|
326c1e0a7e | ||
|
|
3f1c4b9add | ||
|
|
0b28e72b66 | ||
|
|
94dfb211af | ||
|
|
b560c5cef1 | ||
|
|
4ae986967c | ||
|
|
89ecc00b79 | ||
|
|
018a2d2e50 | ||
|
|
cfcc87a953 | ||
|
|
c3951e505d | ||
|
|
abb7b79701 | ||
|
|
936650001f | ||
|
|
37fba28ac3 | ||
|
|
4ba562d2dd | ||
|
|
799364de87 | ||
|
|
4719cba19a | ||
|
|
526777c9b4 | ||
|
|
f17b392470 | ||
|
|
63c8c01f40 | ||
|
|
4788fb179a | ||
|
|
6c384eb9c6 | ||
|
|
2a6e9b20df | ||
|
|
f3c6b1334b | ||
|
|
9890ceb939 | ||
|
|
7b027e7536 | ||
|
|
db2aa57d73 | ||
|
|
b8ec97c0ef | ||
|
|
2c1b693da4 | ||
|
|
547be54ee8 | ||
|
|
b4a53aef47 | ||
|
|
439bc5dbbe | ||
|
|
c95bd345ea | ||
|
|
0792a7953d | ||
|
|
6cda3de3a4 | ||
|
|
041d6ad902 | ||
|
|
e6995174c1 | ||
|
|
d28e912214 | ||
|
|
ba74cee6f7 | ||
|
|
2a417c47ac | ||
|
|
8dcbd29edd | ||
|
|
34621166d5 | ||
|
|
e3dd362c94 | ||
|
|
305fe73d83 | ||
|
|
e3aaee00c8 | ||
|
|
b1979b70a8 | ||
|
|
73ed30d7e5 | ||
|
|
ad7eaa80f9 | ||
|
|
966d71c02a | ||
|
|
f97874093e | ||
|
|
e63ab0dd65 | ||
|
|
964220ac94 | ||
|
|
2f58e69997 | ||
|
|
ec69a4a810 | ||
|
|
ad09c138b9 | ||
|
|
e00eb50db3 | ||
|
|
7d9ad3effd | ||
|
|
c3a710ee14 | ||
|
|
29364f3a9b | ||
|
|
530db0ad73 | ||
|
|
424bfecd0b | ||
|
|
eb1c651c00 | ||
|
|
e357fc723d | ||
|
|
807e2c27f0 | ||
|
|
ad279eacdc | ||
|
|
052b052832 | ||
|
|
6951872776 | ||
|
|
bb7b0213a8 | ||
|
|
6c36318bd8 | ||
|
|
930f81a17b | ||
|
|
9aff64e017 | ||
|
|
3838d6739c | ||
|
|
60deb6773a | ||
|
|
0271c20d8f | ||
|
|
52e97b9b6b | ||
|
|
2ac49fea58 | ||
|
|
f01f2ec9ee | ||
|
|
980886498c | ||
|
|
e743d251a7 | ||
|
|
788badd221 | ||
|
|
fbdedd9a06 | ||
|
|
5916153157 | ||
|
|
b46012e483 | ||
|
|
42683dadfb | ||
|
|
65cb1a1b77 | ||
|
|
50a77dc138 | ||
|
|
557ac63094 | ||
|
|
131c384361 | ||
|
|
e2598f5094 | ||
|
|
78b2aeea55 | ||
|
|
082d2fa19a | ||
|
|
7c7c7567d5 | ||
|
|
625f2208c4 | ||
|
|
5f1fab0e7c | ||
|
|
c07461e6f3 | ||
|
|
8b80a0a269 | ||
|
|
a47181e471 | ||
|
|
5beb6167c8 | ||
|
|
917f39ec12 | ||
|
|
a2fdfce02a | ||
|
|
91b16b8682 | ||
|
|
183fc8e01a | ||
|
|
9fba811764 | ||
|
|
db408b9e62 | ||
|
|
2eecc1a2e4 | ||
|
|
c76528ca1f | ||
|
|
bb47f2226f | ||
|
|
c6ab92bc50 | ||
|
|
4c1a6f0ee0 | ||
|
|
361d43b969 | ||
|
|
2e81f1900d | ||
|
|
2030b28083 | ||
|
|
e84e39940b | ||
|
|
e8905f6d20 | ||
|
|
316352be94 | ||
|
|
f8b30af6dc | ||
|
|
039a4b070e | ||
|
|
c368c6aeea | ||
|
|
0c647bc566 | ||
|
|
e30f65118d | ||
|
|
1bd2d7a659 | ||
|
|
65d53fd4b1 | ||
|
|
b5349202e9 | ||
|
|
1b8cc8b625 | ||
|
|
8501b0b768 | ||
|
|
fe7eb18104 | ||
|
|
8c75ed39d5 | ||
|
|
fdb9fa301e | ||
|
|
871d442b8e | ||
|
|
dbad5eeec6 | ||
|
|
4b4252210b | ||
|
|
6582554926 | ||
|
|
649ce520c4 | ||
|
|
667e841d3e | ||
|
|
63e1ef25af | ||
|
|
229d18f4d2 | ||
|
|
4a1a7f9685 | ||
|
|
86c149ae8e | ||
|
|
05f0b4f590 | ||
|
|
d4eda9d10b | ||
|
|
d7953aed74 | ||
|
|
2ab1650d4d | ||
|
|
79aa83ee39 | ||
|
|
c4ebe4b078 | ||
|
|
1a89f70015 | ||
|
|
62474a30e8 | ||
|
|
9a10e80ab7 | ||
|
|
9b538a8672 | ||
|
|
95af417923 | ||
|
|
fff576cf98 | ||
|
|
1575f0504c | ||
|
|
edf4c3f627 | ||
|
|
d40a6b7f73 | ||
|
|
3a22018edd | ||
|
|
fe54c216a3 | ||
|
|
cb6584de46 | ||
|
|
7e068e1094 | ||
|
|
d3187dbc17 | ||
|
|
dc2f26f7b5 | ||
|
|
553db8def1 | ||
|
|
ab63a47173 | ||
|
|
e658c6c73b | ||
|
|
1e0e553304 | ||
|
|
07b7d28937 | ||
|
|
6ee7fbcfff | ||
|
|
5f3a0473f1 | ||
|
|
2eda75a8ee | ||
|
|
e1f098b9b7 | ||
|
|
e5e13479d0 | ||
|
|
7bc3ca9e40 | ||
|
|
4d8b71d412 | ||
|
|
b484672961 | ||
|
|
a1ee10b438 | ||
|
|
dccce34d84 | ||
|
|
f5945d7c03 | ||
|
|
5fcf923c19 | ||
|
|
0c7efa0cfd | ||
|
|
d5853d9c47 | ||
|
|
d9118c04bf | ||
|
|
91e65ac0ce | ||
|
|
1ac4fb45d2 | ||
|
|
07b8bdfbf1 | ||
|
|
0f22067242 | ||
|
|
d7f8b97541 | ||
|
|
611e00c862 | ||
|
|
c8ebb2a0dc | ||
|
|
88e083a9d0 | ||
|
|
1c8507b32a | ||
|
|
23f31c6bff | ||
|
|
ff48ae192b | ||
|
|
a2fe2f9fb1 | ||
|
|
01ca2b5df6 | ||
|
|
368f7adfc6 | ||
|
|
68731ac74d | ||
|
|
0508823075 | ||
|
|
2ac14d1145 | ||
|
|
2371d771cc | ||
|
|
9a638dbf4e | ||
|
|
dc2aeac21f | ||
|
|
f842849bec | ||
|
|
dcf73970d2 | ||
|
|
e761924dc2 | ||
|
|
cdc3df3790 | ||
|
|
a3d3719481 | ||
|
|
11e5327770 | ||
|
|
87cce88f48 | ||
|
|
ff6d4cec6b | ||
|
|
6ef658a9f9 | ||
|
|
8b8be343a7 | ||
|
|
89c00611c2 | ||
|
|
9572cfc782 | ||
|
|
4a55646a02 | ||
|
|
209af68611 | ||
|
|
f4f9695978 | ||
|
|
5fcc380bd9 | ||
|
|
aa76003e28 | ||
|
|
fac548e430 | ||
|
|
9bd3453592 | ||
|
|
b34efde2f3 | ||
|
|
7aa46ab5fc | ||
|
|
bf35105af6 | ||
|
|
3429e82e45 | ||
|
|
815ae4164a | ||
|
|
13e1d0362d | ||
|
|
db31f6966d | ||
|
|
2b20cd66af | ||
|
|
39e09c289d | ||
|
|
069a38a06c | ||
|
|
3183935bd7 | ||
|
|
060637b4d4 | ||
|
|
fa92cd92fa | ||
|
|
89591e4246 | ||
|
|
802d2440b4 | ||
|
|
e9135fa7c5 | ||
|
|
ef3e075ad6 | ||
|
|
149e198ce8 | ||
|
|
1d76ba5ebe | ||
|
|
a1635eea25 | ||
|
|
36113509f2 | ||
|
|
ba95d9862c | ||
|
|
ef55992ab0 | ||
|
|
e3f913f567 | ||
|
|
1b8f2543ac | ||
|
|
65107d24a2 | ||
|
|
36eb071998 | ||
|
|
9b33ce3409 | ||
|
|
926c89cb20 | ||
|
|
5ba2a17576 | ||
|
|
266419217e | ||
|
|
be4bdfec93 | ||
|
|
7ff142d93f | ||
|
|
4a42c4e142 | ||
|
|
66a4b89822 | ||
|
|
d7b333be97 | ||
|
|
4d6a42a622 | ||
|
|
b0bdc04c30 | ||
|
|
67a219ffc2 | ||
|
|
7226365397 | ||
|
|
0fc295d958 | ||
|
|
3e50f94d76 | ||
|
|
eb5b1b627f | ||
|
|
0c1ff1d3fd | ||
|
|
aea7610c76 | ||
|
|
775fbba6e0 | ||
|
|
5ee8a17b4e | ||
|
|
81be54b229 | ||
|
|
5e8659dcbc | ||
|
|
2338294b39 | ||
|
|
afc4eaab8b | ||
|
|
e92c4f6561 | ||
|
|
15fa2283e7 | ||
|
|
5907422d65 | ||
|
|
f178805252 | ||
|
|
a55b0c4bcc | ||
|
|
224222f09f | ||
|
|
7aab45e060 | ||
|
|
bcd64c7e72 | ||
|
|
c124f24354 | ||
|
|
c7e4e6d0ee | ||
|
|
88abbf58ce | ||
|
|
71f838389b | ||
|
|
0533bd2e7c | ||
|
|
6af83d86ff | ||
|
|
e2e1b65da6 | ||
|
|
817d1508bc | ||
|
|
f8af4f5c8d | ||
|
|
a4be4d78b9 | ||
|
|
00c1de0c56 | ||
|
|
190e7eb104 | ||
|
|
061862a0e2 | ||
|
|
c72b2ad766 | ||
|
|
80783a7bb9 | ||
|
|
ed77d2d977 | ||
|
|
abccd3e367 | ||
|
|
0f4fd33ddd | ||
|
|
e258f0f044 | ||
|
|
a6b9471548 | ||
|
|
3059373e06 | ||
|
|
0b4527146e | ||
|
|
6745b12427 | ||
|
|
f59978ed3d | ||
|
|
3ab6028e80 | ||
|
|
892eaff46d | ||
|
|
8e291a1706 | ||
|
|
aee321f62b | ||
|
|
ed32da04d7 | ||
|
|
8ae3949072 | ||
|
|
273819aaae | ||
|
|
4cd6b01494 | ||
|
|
dd59b16a17 | ||
|
|
bac7acaa7c | ||
|
|
3c90728a29 | ||
|
|
34c5a9eaa9 | ||
|
|
f522aafb7f | ||
|
|
fd0673e457 | ||
|
|
00b1e130b3 | ||
|
|
53cadb4df6 | ||
|
|
db7eb9a7ce | ||
|
|
cdd106b930 | ||
|
|
404cae7d40 | ||
|
|
682d05512f | ||
|
|
5cd8803998 | ||
|
|
26f314904a | ||
|
|
da82153a8d | ||
|
|
4bd68e4d9e | ||
|
|
1b10a3a1b2 | ||
|
|
ad9a289951 | ||
|
|
a517f6f55b | ||
|
|
789e65b9d2 | ||
|
|
42d5c35020 | ||
|
|
ab95eaa356 | ||
|
|
7fc01c6e9b | ||
|
|
df15a2f6ef | ||
|
|
ef806456e4 | ||
|
|
bd6ab8c665 | ||
|
|
d2bae07687 | ||
|
|
9c09094583 | ||
|
|
7e4ab31488 | ||
|
|
32d50bda94 | ||
|
|
740b4a95f4 | ||
|
|
c37469b5ba | ||
|
|
c782f8c68d | ||
|
|
7d6e318f87 | ||
|
|
58159383c4 | ||
|
|
5c680c6587 | ||
|
|
39a2446716 | ||
|
|
9c903c4716 | ||
|
|
5e4f3bbb0b | ||
|
|
c84fc83222 | ||
|
|
8044b55335 | ||
|
|
846960ae3d | ||
|
|
049a61bcfc | ||
|
|
cda6db6ccf | ||
|
|
73a1787eb8 | ||
|
|
0e8d937a3f | ||
|
|
3282e86a60 | ||
|
|
540abfa05e | ||
|
|
d87f87e25b | ||
|
|
d01f91ecec | ||
|
|
0170860ef2 | ||
|
|
2d9ee9dbe9 | ||
|
|
3ed728790b | ||
|
|
3e071c4c95 | ||
|
|
c127062b40 | ||
|
|
1d9b27387b | ||
|
|
4f46360aa4 | ||
|
|
2287d2afde | ||
|
|
d6a9e38575 | ||
|
|
c81e1477ae | ||
|
|
11c019d6c5 | ||
|
|
a182c1315c | ||
|
|
98c6dfa537 | ||
|
|
0e08dd6055 | ||
|
|
41900e9d0f | ||
|
|
c1bde2a4ef | ||
|
|
6b0c486861 | ||
|
|
44ceaf085b | ||
|
|
c03e31ecf5 | ||
|
|
6915ba2100 | ||
|
|
50f53e7071 | ||
|
|
40fba1bb4c | ||
|
|
bdda762deb | ||
|
|
da5492694b | ||
|
|
a5d48a775b | ||
|
|
78f2785595 | ||
|
|
fc1723f131 | ||
|
|
ed5b0bfeb3 | ||
|
|
4b01f0f50a | ||
|
|
0139f6780c | ||
|
|
86ba270926 | ||
|
|
c146585cdb | ||
|
|
5fa7844ad7 | ||
|
|
84c9b574f9 | ||
|
|
272e13dd90 | ||
|
|
18d00e36b9 | ||
|
|
17550fee9e | ||
|
|
995f5c3614 | ||
|
|
9b53a306e3 | ||
|
|
0016346dfb | ||
|
|
f38ad65254 | ||
|
|
774892c6d7 | ||
|
|
897d4d5f17 | ||
|
|
8a281cd1f4 | ||
|
|
e8863b233b | ||
|
|
8fed0b53c4 | ||
|
|
00debb6399 | ||
|
|
0a0a10d8b3 | ||
|
|
13035561cd | ||
|
|
9be704a934 | ||
|
|
f7b4e29609 | ||
|
|
d6c5df9a0a | ||
|
|
8662162f45 | ||
|
|
57584d6f34 | ||
|
|
268a10f917 | ||
|
|
5346cc422d | ||
|
|
26f7c46856 | ||
|
|
90af046c5c | ||
|
|
961ed31901 | ||
|
|
85e7357973 | ||
|
|
f98fa85b44 | ||
|
|
ddcaf3dccd | ||
|
|
56296cad82 | ||
|
|
95b41dd7f1 | ||
|
|
bf82353f45 | ||
|
|
0308febc23 | ||
|
|
7b4a4c2219 | ||
|
|
3ddd4d47d0 | ||
|
|
ca6a0358de | ||
|
|
0026b12615 | ||
|
|
4300236681 | ||
|
|
ec238a2c39 | ||
|
|
b6165aee0c | ||
|
|
f4bc03d7c0 | ||
|
|
3c5e12e2a4 | ||
|
|
c89229db97 | ||
|
|
d3820f4782 | ||
|
|
e896db1180 | ||
|
|
96acb8a74e | ||
|
|
687a13bbe5 | ||
|
|
fe8122e514 | ||
|
|
876d4f450a | ||
|
|
f52320be86 | ||
|
|
a43ae86b6c | ||
|
|
496cb801e1 | ||
|
|
abd517091f | ||
|
|
b8b04514bc | ||
|
|
0e5d72cc57 | ||
|
|
60f9e85c16 | ||
|
|
b016a3e7d8 | ||
|
|
a0d56541cf | ||
|
|
226215f36d | ||
|
|
338c2c873c | ||
|
|
4b0f5eb6a8 | ||
|
|
75176dae70 | ||
|
|
12fd2b4160 | ||
|
|
f2555422b9 | ||
|
|
27f169bb91 | ||
|
|
b16c985ed2 | ||
|
|
35a770e871 | ||
|
|
b09f62a1c3 | ||
|
|
5833508a17 | ||
|
|
d73055c5b1 | ||
|
|
7e3a272b29 | ||
|
|
661663c98a | ||
|
|
721003c552 | ||
|
|
36f1cca1b1 | ||
|
|
d3e1beb26c | ||
|
|
c264ae6021 | ||
|
|
8cd882c4bd | ||
|
|
90fe5e4a7e | ||
|
|
a90a58f7a1 | ||
|
|
b2d81a7cac | ||
|
|
77a8b7fdeb | ||
|
|
7fa5e95c1f | ||
|
|
191d620707 | ||
|
|
53504a38d2 | ||
|
|
5c42419b02 | ||
|
|
aecbe0f333 | ||
|
|
a30a902db5 | ||
|
|
f3b4a26f32 | ||
|
|
dc3c6bf62a | ||
|
|
3203862167 | ||
|
|
06853d94f0 | ||
|
|
cc2f4aafd7 | ||
|
|
356ea6ea34 | ||
|
|
4764fc1ee7 | ||
|
|
90ef94d3b3 | ||
|
|
6c2969d22d | ||
|
|
0ad1b0782b | ||
|
|
d7acd146fb | ||
|
|
c5465aed60 | ||
|
|
a95605a867 | ||
|
|
848058f05b | ||
|
|
a4f1c9d67e | ||
|
|
665341c9b1 | ||
|
|
fae0e6c52c | ||
|
|
1b4a79f03c | ||
|
|
640192ac3d | ||
|
|
205c36e393 | ||
|
|
d13ee79c41 | ||
|
|
bde468ff8d | ||
|
|
e292d1ed21 | ||
|
|
de8d77274a | ||
|
|
a5b7675e42 | ||
|
|
9823de3cc6 | ||
|
|
c32e9cfe86 | ||
|
|
1d17ca1fa3 | ||
|
|
bfe3328129 | ||
|
|
e0b38bd7a2 | ||
|
|
153338c20f | ||
|
|
3495a7dc37 | ||
|
|
042d4d55d9 | ||
|
|
5af08e0719 | ||
|
|
33d3ecbccc | ||
|
|
69cb72f842 | ||
|
|
69ac5153d4 | ||
|
|
16b6951648 | ||
|
|
231c36f8d3 | ||
|
|
1e4541b982 | ||
|
|
7be3b484ad | ||
|
|
9617b69c8a | ||
|
|
1d94b9111c | ||
|
|
2d6cd6951a | ||
|
|
310e3c32e5 | ||
|
|
37786593a0 | ||
|
|
819a5782b6 | ||
|
|
c0a84473a4 | ||
|
|
591a8ecc16 | ||
|
|
c405d8c06c | ||
|
|
138be0fd73 | ||
|
|
25a2e15ec5 | ||
|
|
62cc8a4b8d | ||
|
|
f895d4cbb3 | ||
|
|
ed5d656fa8 | ||
|
|
c43a561916 | ||
|
|
b93cc0f431 | ||
|
|
4c566d484a | ||
|
|
06e34d4607 | ||
|
|
45936f8fbd | ||
|
|
ec98445abf | ||
|
|
b07aafa5f5 | ||
|
|
b727d3f98a | ||
|
|
2f6fb37d72 | ||
|
|
35c76ad47d | ||
|
|
c07fb71186 | ||
|
|
e899ae7d8a | ||
|
|
6f97ec4990 | ||
|
|
07c1db351a | ||
|
|
31102af54b | ||
|
|
5d78c1edd3 | ||
|
|
170c685882 | ||
|
|
609f75acec | ||
|
|
eabe18714f | ||
|
|
ceaba36c7f | ||
|
|
d94e8bad8b | ||
|
|
8a367ef6bf | ||
|
|
400a5a90bf | ||
|
|
2f370e946d | ||
|
|
751b3b50ac | ||
|
|
d78d0764aa | ||
|
|
699c121606 | ||
|
|
dde615f482 | ||
|
|
325fad1d92 | ||
|
|
f815157dd9 | ||
|
|
b8195a17e5 | ||
|
|
349ef7edc6 | ||
|
|
5881c0d6d4 | ||
|
|
8dd771d217 | ||
|
|
32853ecbc5 | ||
|
|
7fc3edf8a7 | ||
|
|
01e6503672 | ||
|
|
9c259737d3 | ||
|
|
b8e1fe60c5 | ||
|
|
ddfb7eb548 | ||
|
|
6910be3224 | ||
|
|
a534356fe1 |
3
.bazelignore
Normal file
3
.bazelignore
Normal file
@@ -0,0 +1,3 @@
|
||||
# Without this, Bazel will consider BUILD.bazel files in
|
||||
# .git/sl/origbackups (which can be populated by Sapling SCM).
|
||||
.git
|
||||
45
.bazelrc
Normal file
45
.bazelrc
Normal file
@@ -0,0 +1,45 @@
|
||||
common --repo_env=BAZEL_DO_NOT_DETECT_CPP_TOOLCHAIN=1
|
||||
common --repo_env=BAZEL_NO_APPLE_CPP_TOOLCHAIN=1
|
||||
|
||||
common --disk_cache=~/.cache/bazel-disk-cache
|
||||
common --repo_contents_cache=~/.cache/bazel-repo-contents-cache
|
||||
common --repository_cache=~/.cache/bazel-repo-cache
|
||||
|
||||
common --experimental_platform_in_output_dir
|
||||
|
||||
common --enable_platform_specific_config
|
||||
# TODO(zbarsky): We need to untangle these libc constraints to get linux remote builds working.
|
||||
common:linux --host_platform=//:local
|
||||
common --@rules_cc//cc/toolchains/args/archiver_flags:use_libtool_on_macos=False
|
||||
common --@toolchains_llvm_bootstrapped//config:experimental_stub_libgcc_s
|
||||
|
||||
# We need to use the sh toolchain on windows so we don't send host bash paths to the linux executor.
|
||||
common:windows --@rules_rust//rust/settings:experimental_use_sh_toolchain_for_bootstrap_process_wrapper
|
||||
|
||||
# TODO(zbarsky): rules_rust doesn't implement this flag properly with remote exec...
|
||||
# common --@rules_rust//rust/settings:pipelined_compilation
|
||||
|
||||
common --incompatible_strict_action_env
|
||||
# Not ideal, but We need to allow dotslash to be found
|
||||
common --test_env=PATH=/opt/homebrew/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin
|
||||
|
||||
common --test_output=errors
|
||||
common --bes_results_url=https://app.buildbuddy.io/invocation/
|
||||
common --bes_backend=grpcs://remote.buildbuddy.io
|
||||
common --remote_cache=grpcs://remote.buildbuddy.io
|
||||
common --remote_download_toplevel
|
||||
common --nobuild_runfile_links
|
||||
common --remote_timeout=3600
|
||||
common --noexperimental_throttle_remote_action_building
|
||||
common --experimental_remote_execution_keepalive
|
||||
common --grpc_keepalive_time=30s
|
||||
|
||||
# This limits both in-flight executions and concurrent downloads. Even with high number
|
||||
# of jobs execution will still be limited by CPU cores, so this just pays a bit of
|
||||
# memory in exchange for higher download concurrency.
|
||||
common --jobs=30
|
||||
|
||||
common:remote --extra_execution_platforms=//:rbe
|
||||
common:remote --remote_executor=grpcs://remote.buildbuddy.io
|
||||
common:remote --jobs=800
|
||||
|
||||
@@ -1 +1,3 @@
|
||||
iTerm
|
||||
iTerm2
|
||||
psuedo
|
||||
@@ -3,4 +3,4 @@
|
||||
skip = .git*,vendor,*-lock.yaml,*.lock,.codespellrc,*test.ts,*.jsonl,frame*.txt
|
||||
check-hidden = true
|
||||
ignore-regex = ^\s*"image/\S+": ".*|\b(afterAll)\b
|
||||
ignore-words-list = ratatui,ser
|
||||
ignore-words-list = ratatui,ser,iTerm,iterm2,iterm
|
||||
|
||||
29
.github/ISSUE_TEMPLATE/2-bug-report.yml
vendored
29
.github/ISSUE_TEMPLATE/2-bug-report.yml
vendored
@@ -20,6 +20,14 @@ body:
|
||||
attributes:
|
||||
label: What version of Codex is running?
|
||||
description: Copy the output of `codex --version`
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: plan
|
||||
attributes:
|
||||
label: What subscription do you have?
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: model
|
||||
attributes:
|
||||
@@ -32,11 +40,25 @@ body:
|
||||
description: |
|
||||
For MacOS and Linux: copy the output of `uname -mprs`
|
||||
For Windows: copy the output of `"$([Environment]::OSVersion | ForEach-Object VersionString) $(if ([Environment]::Is64BitOperatingSystem) { "x64" } else { "x86" })"` in the PowerShell console
|
||||
- type: input
|
||||
id: terminal
|
||||
attributes:
|
||||
label: What terminal emulator and version are you using (if applicable)?
|
||||
description: Also note any multiplexer in use (screen / tmux / zellij)
|
||||
description: |
|
||||
E.g, VSCode, Terminal.app, iTerm2, Ghostty, Windows Terminal (WSL / PowerShell)
|
||||
- type: textarea
|
||||
id: actual
|
||||
attributes:
|
||||
label: What issue are you seeing?
|
||||
description: Please include the full error messages and prompts with PII redacted. If possible, please provide text instead of a screenshot.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: steps
|
||||
attributes:
|
||||
label: What steps can reproduce the bug?
|
||||
description: Explain the bug and provide a code snippet that can reproduce it.
|
||||
description: Explain the bug and provide a code snippet that can reproduce it. Please include session id, token limit usage, context window usage if applicable.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
@@ -44,11 +66,6 @@ body:
|
||||
attributes:
|
||||
label: What is the expected behavior?
|
||||
description: If possible, please provide text instead of a screenshot.
|
||||
- type: textarea
|
||||
id: actual
|
||||
attributes:
|
||||
label: What do you see instead?
|
||||
description: If possible, please provide text instead of a screenshot.
|
||||
- type: textarea
|
||||
id: notes
|
||||
attributes:
|
||||
|
||||
6
.github/ISSUE_TEMPLATE/4-feature-request.yml
vendored
6
.github/ISSUE_TEMPLATE/4-feature-request.yml
vendored
@@ -2,7 +2,6 @@ name: 🎁 Feature Request
|
||||
description: Propose a new feature for Codex
|
||||
labels:
|
||||
- enhancement
|
||||
- needs triage
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
@@ -19,11 +18,6 @@ body:
|
||||
label: What feature would you like to see?
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: author
|
||||
attributes:
|
||||
label: Are you interested in implementing this feature?
|
||||
description: Please wait for acknowledgement before implementing or opening a PR.
|
||||
- type: textarea
|
||||
id: notes
|
||||
attributes:
|
||||
|
||||
24
.github/ISSUE_TEMPLATE/5-vs-code-extension.yml
vendored
24
.github/ISSUE_TEMPLATE/5-vs-code-extension.yml
vendored
@@ -14,11 +14,21 @@ body:
|
||||
id: version
|
||||
attributes:
|
||||
label: What version of the VS Code extension are you using?
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: plan
|
||||
attributes:
|
||||
label: What subscription do you have?
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: ide
|
||||
attributes:
|
||||
label: Which IDE are you using?
|
||||
description: Like `VS Code`, `Cursor`, `Windsurf`, etc.
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: platform
|
||||
attributes:
|
||||
@@ -26,11 +36,18 @@ body:
|
||||
description: |
|
||||
For MacOS and Linux: copy the output of `uname -mprs`
|
||||
For Windows: copy the output of `"$([Environment]::OSVersion | ForEach-Object VersionString) $(if ([Environment]::Is64BitOperatingSystem) { "x64" } else { "x86" })"` in the PowerShell console
|
||||
- type: textarea
|
||||
id: actual
|
||||
attributes:
|
||||
label: What issue are you seeing?
|
||||
description: Please include the full error messages and prompts with PII redacted. If possible, please provide text instead of a screenshot.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: steps
|
||||
attributes:
|
||||
label: What steps can reproduce the bug?
|
||||
description: Explain the bug and provide a code snippet that can reproduce it.
|
||||
description: Explain the bug and provide a code snippet that can reproduce it. Please include session id, token limit usage, context window usage if applicable.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
@@ -38,11 +55,6 @@ body:
|
||||
attributes:
|
||||
label: What is the expected behavior?
|
||||
description: If possible, please provide text instead of a screenshot.
|
||||
- type: textarea
|
||||
id: actual
|
||||
attributes:
|
||||
label: What do you see instead?
|
||||
description: If possible, please provide text instead of a screenshot.
|
||||
- type: textarea
|
||||
id: notes
|
||||
attributes:
|
||||
|
||||
44
.github/actions/linux-code-sign/action.yml
vendored
Normal file
44
.github/actions/linux-code-sign/action.yml
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
name: linux-code-sign
|
||||
description: Sign Linux artifacts with cosign.
|
||||
inputs:
|
||||
target:
|
||||
description: Target triple for the artifacts to sign.
|
||||
required: true
|
||||
artifacts-dir:
|
||||
description: Absolute path to the directory containing built binaries to sign.
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Install cosign
|
||||
uses: sigstore/cosign-installer@v3.7.0
|
||||
|
||||
- name: Cosign Linux artifacts
|
||||
shell: bash
|
||||
env:
|
||||
COSIGN_EXPERIMENTAL: "1"
|
||||
COSIGN_YES: "true"
|
||||
COSIGN_OIDC_CLIENT_ID: "sigstore"
|
||||
COSIGN_OIDC_ISSUER: "https://oauth2.sigstore.dev/auth"
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
dest="${{ inputs.artifacts-dir }}"
|
||||
if [[ ! -d "$dest" ]]; then
|
||||
echo "Destination $dest does not exist"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
for binary in codex codex-responses-api-proxy; do
|
||||
artifact="${dest}/${binary}"
|
||||
if [[ ! -f "$artifact" ]]; then
|
||||
echo "Binary $artifact not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cosign sign-blob \
|
||||
--yes \
|
||||
--bundle "${artifact}.sigstore" \
|
||||
"$artifact"
|
||||
done
|
||||
246
.github/actions/macos-code-sign/action.yml
vendored
Normal file
246
.github/actions/macos-code-sign/action.yml
vendored
Normal file
@@ -0,0 +1,246 @@
|
||||
name: macos-code-sign
|
||||
description: Configure, sign, notarize, and clean up macOS code signing artifacts.
|
||||
inputs:
|
||||
target:
|
||||
description: Rust compilation target triple (e.g. aarch64-apple-darwin).
|
||||
required: true
|
||||
sign-binaries:
|
||||
description: Whether to sign and notarize the macOS binaries.
|
||||
required: false
|
||||
default: "true"
|
||||
sign-dmg:
|
||||
description: Whether to sign and notarize the macOS dmg.
|
||||
required: false
|
||||
default: "true"
|
||||
apple-certificate:
|
||||
description: Base64-encoded Apple signing certificate (P12).
|
||||
required: true
|
||||
apple-certificate-password:
|
||||
description: Password for the signing certificate.
|
||||
required: true
|
||||
apple-notarization-key-p8:
|
||||
description: Base64-encoded Apple notarization key (P8).
|
||||
required: true
|
||||
apple-notarization-key-id:
|
||||
description: Apple notarization key ID.
|
||||
required: true
|
||||
apple-notarization-issuer-id:
|
||||
description: Apple notarization issuer ID.
|
||||
required: true
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Configure Apple code signing
|
||||
shell: bash
|
||||
env:
|
||||
KEYCHAIN_PASSWORD: actions
|
||||
APPLE_CERTIFICATE: ${{ inputs.apple-certificate }}
|
||||
APPLE_CERTIFICATE_PASSWORD: ${{ inputs.apple-certificate-password }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
if [[ -z "${APPLE_CERTIFICATE:-}" ]]; then
|
||||
echo "APPLE_CERTIFICATE is required for macOS signing"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ -z "${APPLE_CERTIFICATE_PASSWORD:-}" ]]; then
|
||||
echo "APPLE_CERTIFICATE_PASSWORD is required for macOS signing"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cert_path="${RUNNER_TEMP}/apple_signing_certificate.p12"
|
||||
echo "$APPLE_CERTIFICATE" | base64 -d > "$cert_path"
|
||||
|
||||
keychain_path="${RUNNER_TEMP}/codex-signing.keychain-db"
|
||||
security create-keychain -p "$KEYCHAIN_PASSWORD" "$keychain_path"
|
||||
security set-keychain-settings -lut 21600 "$keychain_path"
|
||||
security unlock-keychain -p "$KEYCHAIN_PASSWORD" "$keychain_path"
|
||||
|
||||
keychain_args=()
|
||||
cleanup_keychain() {
|
||||
if ((${#keychain_args[@]} > 0)); then
|
||||
security list-keychains -s "${keychain_args[@]}" || true
|
||||
security default-keychain -s "${keychain_args[0]}" || true
|
||||
else
|
||||
security list-keychains -s || true
|
||||
fi
|
||||
if [[ -f "$keychain_path" ]]; then
|
||||
security delete-keychain "$keychain_path" || true
|
||||
fi
|
||||
}
|
||||
|
||||
while IFS= read -r keychain; do
|
||||
[[ -n "$keychain" ]] && keychain_args+=("$keychain")
|
||||
done < <(security list-keychains | sed 's/^[[:space:]]*//;s/[[:space:]]*$//;s/"//g')
|
||||
|
||||
if ((${#keychain_args[@]} > 0)); then
|
||||
security list-keychains -s "$keychain_path" "${keychain_args[@]}"
|
||||
else
|
||||
security list-keychains -s "$keychain_path"
|
||||
fi
|
||||
|
||||
security default-keychain -s "$keychain_path"
|
||||
security import "$cert_path" -k "$keychain_path" -P "$APPLE_CERTIFICATE_PASSWORD" -T /usr/bin/codesign -T /usr/bin/security
|
||||
security set-key-partition-list -S apple-tool:,apple: -s -k "$KEYCHAIN_PASSWORD" "$keychain_path" > /dev/null
|
||||
|
||||
codesign_hashes=()
|
||||
while IFS= read -r hash; do
|
||||
[[ -n "$hash" ]] && codesign_hashes+=("$hash")
|
||||
done < <(security find-identity -v -p codesigning "$keychain_path" \
|
||||
| sed -n 's/.*\([0-9A-F]\{40\}\).*/\1/p' \
|
||||
| sort -u)
|
||||
|
||||
if ((${#codesign_hashes[@]} == 0)); then
|
||||
echo "No signing identities found in $keychain_path"
|
||||
cleanup_keychain
|
||||
rm -f "$cert_path"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ((${#codesign_hashes[@]} > 1)); then
|
||||
echo "Multiple signing identities found in $keychain_path:"
|
||||
printf ' %s\n' "${codesign_hashes[@]}"
|
||||
cleanup_keychain
|
||||
rm -f "$cert_path"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
APPLE_CODESIGN_IDENTITY="${codesign_hashes[0]}"
|
||||
|
||||
rm -f "$cert_path"
|
||||
|
||||
echo "APPLE_CODESIGN_IDENTITY=$APPLE_CODESIGN_IDENTITY" >> "$GITHUB_ENV"
|
||||
echo "APPLE_CODESIGN_KEYCHAIN=$keychain_path" >> "$GITHUB_ENV"
|
||||
echo "::add-mask::$APPLE_CODESIGN_IDENTITY"
|
||||
|
||||
- name: Sign macOS binaries
|
||||
if: ${{ inputs.sign-binaries == 'true' }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
if [[ -z "${APPLE_CODESIGN_IDENTITY:-}" ]]; then
|
||||
echo "APPLE_CODESIGN_IDENTITY is required for macOS signing"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
keychain_args=()
|
||||
if [[ -n "${APPLE_CODESIGN_KEYCHAIN:-}" && -f "${APPLE_CODESIGN_KEYCHAIN}" ]]; then
|
||||
keychain_args+=(--keychain "${APPLE_CODESIGN_KEYCHAIN}")
|
||||
fi
|
||||
|
||||
for binary in codex codex-responses-api-proxy; do
|
||||
path="codex-rs/target/${{ inputs.target }}/release/${binary}"
|
||||
codesign --force --options runtime --timestamp --sign "$APPLE_CODESIGN_IDENTITY" "${keychain_args[@]}" "$path"
|
||||
done
|
||||
|
||||
- name: Notarize macOS binaries
|
||||
if: ${{ inputs.sign-binaries == 'true' }}
|
||||
shell: bash
|
||||
env:
|
||||
APPLE_NOTARIZATION_KEY_P8: ${{ inputs.apple-notarization-key-p8 }}
|
||||
APPLE_NOTARIZATION_KEY_ID: ${{ inputs.apple-notarization-key-id }}
|
||||
APPLE_NOTARIZATION_ISSUER_ID: ${{ inputs.apple-notarization-issuer-id }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
for var in APPLE_NOTARIZATION_KEY_P8 APPLE_NOTARIZATION_KEY_ID APPLE_NOTARIZATION_ISSUER_ID; do
|
||||
if [[ -z "${!var:-}" ]]; then
|
||||
echo "$var is required for notarization"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
notary_key_path="${RUNNER_TEMP}/notarytool.key.p8"
|
||||
echo "$APPLE_NOTARIZATION_KEY_P8" | base64 -d > "$notary_key_path"
|
||||
cleanup_notary() {
|
||||
rm -f "$notary_key_path"
|
||||
}
|
||||
trap cleanup_notary EXIT
|
||||
|
||||
source "$GITHUB_ACTION_PATH/notary_helpers.sh"
|
||||
|
||||
notarize_binary() {
|
||||
local binary="$1"
|
||||
local source_path="codex-rs/target/${{ inputs.target }}/release/${binary}"
|
||||
local archive_path="${RUNNER_TEMP}/${binary}.zip"
|
||||
|
||||
if [[ ! -f "$source_path" ]]; then
|
||||
echo "Binary $source_path not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
rm -f "$archive_path"
|
||||
ditto -c -k --keepParent "$source_path" "$archive_path"
|
||||
|
||||
notarize_submission "$binary" "$archive_path" "$notary_key_path"
|
||||
}
|
||||
|
||||
notarize_binary "codex"
|
||||
notarize_binary "codex-responses-api-proxy"
|
||||
|
||||
- name: Sign and notarize macOS dmg
|
||||
if: ${{ inputs.sign-dmg == 'true' }}
|
||||
shell: bash
|
||||
env:
|
||||
APPLE_NOTARIZATION_KEY_P8: ${{ inputs.apple-notarization-key-p8 }}
|
||||
APPLE_NOTARIZATION_KEY_ID: ${{ inputs.apple-notarization-key-id }}
|
||||
APPLE_NOTARIZATION_ISSUER_ID: ${{ inputs.apple-notarization-issuer-id }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
for var in APPLE_CODESIGN_IDENTITY APPLE_NOTARIZATION_KEY_P8 APPLE_NOTARIZATION_KEY_ID APPLE_NOTARIZATION_ISSUER_ID; do
|
||||
if [[ -z "${!var:-}" ]]; then
|
||||
echo "$var is required"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
notary_key_path="${RUNNER_TEMP}/notarytool.key.p8"
|
||||
echo "$APPLE_NOTARIZATION_KEY_P8" | base64 -d > "$notary_key_path"
|
||||
cleanup_notary() {
|
||||
rm -f "$notary_key_path"
|
||||
}
|
||||
trap cleanup_notary EXIT
|
||||
|
||||
source "$GITHUB_ACTION_PATH/notary_helpers.sh"
|
||||
|
||||
dmg_path="codex-rs/target/${{ inputs.target }}/release/codex-${{ inputs.target }}.dmg"
|
||||
|
||||
if [[ ! -f "$dmg_path" ]]; then
|
||||
echo "dmg $dmg_path not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
keychain_args=()
|
||||
if [[ -n "${APPLE_CODESIGN_KEYCHAIN:-}" && -f "${APPLE_CODESIGN_KEYCHAIN}" ]]; then
|
||||
keychain_args+=(--keychain "${APPLE_CODESIGN_KEYCHAIN}")
|
||||
fi
|
||||
|
||||
codesign --force --timestamp --sign "$APPLE_CODESIGN_IDENTITY" "${keychain_args[@]}" "$dmg_path"
|
||||
notarize_submission "codex-${{ inputs.target }}.dmg" "$dmg_path" "$notary_key_path"
|
||||
xcrun stapler staple "$dmg_path"
|
||||
|
||||
- name: Remove signing keychain
|
||||
if: ${{ always() }}
|
||||
shell: bash
|
||||
env:
|
||||
APPLE_CODESIGN_KEYCHAIN: ${{ env.APPLE_CODESIGN_KEYCHAIN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [[ -n "${APPLE_CODESIGN_KEYCHAIN:-}" ]]; then
|
||||
keychain_args=()
|
||||
while IFS= read -r keychain; do
|
||||
[[ "$keychain" == "$APPLE_CODESIGN_KEYCHAIN" ]] && continue
|
||||
[[ -n "$keychain" ]] && keychain_args+=("$keychain")
|
||||
done < <(security list-keychains | sed 's/^[[:space:]]*//;s/[[:space:]]*$//;s/"//g')
|
||||
if ((${#keychain_args[@]} > 0)); then
|
||||
security list-keychains -s "${keychain_args[@]}"
|
||||
security default-keychain -s "${keychain_args[0]}"
|
||||
fi
|
||||
|
||||
if [[ -f "$APPLE_CODESIGN_KEYCHAIN" ]]; then
|
||||
security delete-keychain "$APPLE_CODESIGN_KEYCHAIN"
|
||||
fi
|
||||
fi
|
||||
46
.github/actions/macos-code-sign/notary_helpers.sh
vendored
Normal file
46
.github/actions/macos-code-sign/notary_helpers.sh
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
notarize_submission() {
|
||||
local label="$1"
|
||||
local path="$2"
|
||||
local notary_key_path="$3"
|
||||
|
||||
if [[ -z "${APPLE_NOTARIZATION_KEY_ID:-}" || -z "${APPLE_NOTARIZATION_ISSUER_ID:-}" ]]; then
|
||||
echo "APPLE_NOTARIZATION_KEY_ID and APPLE_NOTARIZATION_ISSUER_ID are required for notarization"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ -z "$notary_key_path" || ! -f "$notary_key_path" ]]; then
|
||||
echo "Notary key file $notary_key_path not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ ! -f "$path" ]]; then
|
||||
echo "Notarization payload $path not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
local submission_json
|
||||
submission_json=$(xcrun notarytool submit "$path" \
|
||||
--key "$notary_key_path" \
|
||||
--key-id "$APPLE_NOTARIZATION_KEY_ID" \
|
||||
--issuer "$APPLE_NOTARIZATION_ISSUER_ID" \
|
||||
--output-format json \
|
||||
--wait)
|
||||
|
||||
local status submission_id
|
||||
status=$(printf '%s\n' "$submission_json" | jq -r '.status // "Unknown"')
|
||||
submission_id=$(printf '%s\n' "$submission_json" | jq -r '.id // ""')
|
||||
|
||||
if [[ -z "$submission_id" ]]; then
|
||||
echo "Failed to retrieve submission ID for $label"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "::notice title=Notarization::$label submission ${submission_id} completed with status ${status}"
|
||||
|
||||
if [[ "$status" != "Accepted" ]]; then
|
||||
echo "Notarization failed for ${label} (submission ${submission_id}, status ${status})"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
57
.github/actions/windows-code-sign/action.yml
vendored
Normal file
57
.github/actions/windows-code-sign/action.yml
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
name: windows-code-sign
|
||||
description: Sign Windows binaries with Azure Trusted Signing.
|
||||
inputs:
|
||||
target:
|
||||
description: Target triple for the artifacts to sign.
|
||||
required: true
|
||||
client-id:
|
||||
description: Azure Trusted Signing client ID.
|
||||
required: true
|
||||
tenant-id:
|
||||
description: Azure tenant ID for Trusted Signing.
|
||||
required: true
|
||||
subscription-id:
|
||||
description: Azure subscription ID for Trusted Signing.
|
||||
required: true
|
||||
endpoint:
|
||||
description: Azure Trusted Signing endpoint.
|
||||
required: true
|
||||
account-name:
|
||||
description: Azure Trusted Signing account name.
|
||||
required: true
|
||||
certificate-profile-name:
|
||||
description: Certificate profile name for signing.
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Azure login for Trusted Signing (OIDC)
|
||||
uses: azure/login@v2
|
||||
with:
|
||||
client-id: ${{ inputs.client-id }}
|
||||
tenant-id: ${{ inputs.tenant-id }}
|
||||
subscription-id: ${{ inputs.subscription-id }}
|
||||
|
||||
- name: Sign Windows binaries with Azure Trusted Signing
|
||||
uses: azure/trusted-signing-action@v0
|
||||
with:
|
||||
endpoint: ${{ inputs.endpoint }}
|
||||
trusted-signing-account-name: ${{ inputs.account-name }}
|
||||
certificate-profile-name: ${{ inputs.certificate-profile-name }}
|
||||
exclude-environment-credential: true
|
||||
exclude-workload-identity-credential: true
|
||||
exclude-managed-identity-credential: true
|
||||
exclude-shared-token-cache-credential: true
|
||||
exclude-visual-studio-credential: true
|
||||
exclude-visual-studio-code-credential: true
|
||||
exclude-azure-cli-credential: false
|
||||
exclude-azure-powershell-credential: true
|
||||
exclude-azure-developer-cli-credential: true
|
||||
exclude-interactive-browser-credential: true
|
||||
cache-dependencies: false
|
||||
files: |
|
||||
${{ github.workspace }}/codex-rs/target/${{ inputs.target }}/release/codex.exe
|
||||
${{ github.workspace }}/codex-rs/target/${{ inputs.target }}/release/codex-responses-api-proxy.exe
|
||||
${{ github.workspace }}/codex-rs/target/${{ inputs.target }}/release/codex-windows-sandbox-setup.exe
|
||||
${{ github.workspace }}/codex-rs/target/${{ inputs.target }}/release/codex-command-runner.exe
|
||||
BIN
.github/codex-cli-login.png
vendored
BIN
.github/codex-cli-login.png
vendored
Binary file not shown.
|
Before Width: | Height: | Size: 2.9 MiB |
BIN
.github/codex-cli-permissions.png
vendored
BIN
.github/codex-cli-permissions.png
vendored
Binary file not shown.
|
Before Width: | Height: | Size: 408 KiB |
BIN
.github/codex-cli-splash.png
vendored
BIN
.github/codex-cli-splash.png
vendored
Binary file not shown.
|
Before Width: | Height: | Size: 3.1 MiB After Width: | Height: | Size: 818 KiB |
2
.github/codex/home/config.toml
vendored
2
.github/codex/home/config.toml
vendored
@@ -1,3 +1,3 @@
|
||||
model = "gpt-5"
|
||||
model = "gpt-5.1"
|
||||
|
||||
# Consider setting [mcp_servers] here!
|
||||
|
||||
BIN
.github/demo.gif
vendored
BIN
.github/demo.gif
vendored
Binary file not shown.
|
Before Width: | Height: | Size: 19 MiB |
24
.github/dotslash-config.json
vendored
24
.github/dotslash-config.json
vendored
@@ -55,6 +55,30 @@
|
||||
"path": "codex-responses-api-proxy.exe"
|
||||
}
|
||||
}
|
||||
},
|
||||
"codex-command-runner": {
|
||||
"platforms": {
|
||||
"windows-x86_64": {
|
||||
"regex": "^codex-command-runner-x86_64-pc-windows-msvc\\.exe\\.zst$",
|
||||
"path": "codex-command-runner.exe"
|
||||
},
|
||||
"windows-aarch64": {
|
||||
"regex": "^codex-command-runner-aarch64-pc-windows-msvc\\.exe\\.zst$",
|
||||
"path": "codex-command-runner.exe"
|
||||
}
|
||||
}
|
||||
},
|
||||
"codex-windows-sandbox-setup": {
|
||||
"platforms": {
|
||||
"windows-x86_64": {
|
||||
"regex": "^codex-windows-sandbox-setup-x86_64-pc-windows-msvc\\.exe\\.zst$",
|
||||
"path": "codex-windows-sandbox-setup.exe"
|
||||
},
|
||||
"windows-aarch64": {
|
||||
"regex": "^codex-windows-sandbox-setup-aarch64-pc-windows-msvc\\.exe\\.zst$",
|
||||
"path": "codex-windows-sandbox-setup.exe"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
18
.github/prompts/issue-deduplicator.txt
vendored
Normal file
18
.github/prompts/issue-deduplicator.txt
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
You are an assistant that triages new GitHub issues by identifying potential duplicates.
|
||||
|
||||
You will receive the following JSON files located in the current working directory:
|
||||
- `codex-current-issue.json`: JSON object describing the newly created issue (fields: number, title, body).
|
||||
- `codex-existing-issues.json`: JSON array of recent issues (each element includes number, title, body, createdAt).
|
||||
|
||||
Instructions:
|
||||
- Load both files as JSON and review their contents carefully. The codex-existing-issues.json file is large, ensure you explore all of it.
|
||||
- Compare the current issue against the existing issues to find up to five that appear to describe the same underlying problem or request.
|
||||
- Only consider an issue a potential duplicate if there is a clear overlap in symptoms, feature requests, reproduction steps, or error messages.
|
||||
- Prioritize newer issues when similarity is comparable.
|
||||
- Ignore pull requests and issues whose similarity is tenuous.
|
||||
- When unsure, prefer returning fewer matches.
|
||||
|
||||
Output requirements:
|
||||
- Respond with a JSON array of issue numbers (integers), ordered from most likely duplicate to least.
|
||||
- Include at most five numbers.
|
||||
- If you find no plausible duplicates, respond with `[]`.
|
||||
26
.github/prompts/issue-labeler.txt
vendored
Normal file
26
.github/prompts/issue-labeler.txt
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
You are an assistant that reviews GitHub issues for the repository.
|
||||
|
||||
Your job is to choose the most appropriate existing labels for the issue described later in this prompt.
|
||||
Follow these rules:
|
||||
- Only pick labels out of the list below.
|
||||
- Prefer a small set of precise labels over many broad ones.
|
||||
- If none of the labels fit, respond with an empty JSON array: []
|
||||
- Output must be a JSON array of label names (strings) with no additional commentary.
|
||||
|
||||
Labels to apply:
|
||||
1. bug — Reproducible defects in Codex products (CLI, VS Code extension, web, auth).
|
||||
2. enhancement — Feature requests or usability improvements that ask for new capabilities, better ergonomics, or quality-of-life tweaks.
|
||||
3. extension — VS Code (or other IDE) extension-specific issues.
|
||||
4. windows-os — Bugs or friction specific to Windows environments (PowerShell behavior, path handling, copy/paste, OS-specific auth or tooling failures).
|
||||
5. mcp — Topics involving Model Context Protocol servers/clients.
|
||||
6. codex-web — Issues targeting the Codex web UI/Cloud experience.
|
||||
8. azure — Problems or requests tied to Azure OpenAI deployments.
|
||||
9. documentation — Updates or corrections needed in docs/README/config references (broken links, missing examples, outdated keys, clarification requests).
|
||||
10. model-behavior — Undesirable LLM behavior: forgetting goals, refusing work, hallucinating environment details, quota misreports, or other reasoning/performance anomalies.
|
||||
|
||||
Issue information is available in environment variables:
|
||||
|
||||
ISSUE_NUMBER
|
||||
ISSUE_TITLE
|
||||
ISSUE_BODY
|
||||
REPO_FULL_NAME
|
||||
2
.github/pull_request_template.md
vendored
2
.github/pull_request_template.md
vendored
@@ -4,3 +4,5 @@ Before opening this Pull Request, please read the dedicated "Contributing" markd
|
||||
https://github.com/openai/codex/blob/main/docs/contributing.md
|
||||
|
||||
If your PR conforms to our contribution guidelines, replace this text with a detailed and high quality description of your changes.
|
||||
|
||||
Include a link to a bug report or enhancement request.
|
||||
|
||||
20
.github/workflows/Dockerfile.bazel
vendored
Normal file
20
.github/workflows/Dockerfile.bazel
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
FROM ubuntu:24.04
|
||||
|
||||
# TODO(mbolin): Published to docker.io/mbolin491/codex-bazel:latest for
|
||||
# initial debugging, but we should publish to a more proper location.
|
||||
#
|
||||
# docker buildx create --use
|
||||
# docker buildx build --platform linux/amd64,linux/arm64 -f .github/workflows/Dockerfile.bazel -t mbolin491/codex-bazel:latest --push .
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends \
|
||||
curl git python3 ca-certificates && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install dotslash.
|
||||
RUN curl -LSfs "https://github.com/facebook/dotslash/releases/download/v0.5.8/dotslash-ubuntu-22.04.$(uname -m).tar.gz" | tar fxz - -C /usr/local/bin
|
||||
|
||||
# Ubuntu 24.04 ships with user 'ubuntu' already created with UID 1000.
|
||||
USER ubuntu
|
||||
|
||||
WORKDIR /workspace
|
||||
110
.github/workflows/bazel.yml
vendored
Normal file
110
.github/workflows/bazel.yml
vendored
Normal file
@@ -0,0 +1,110 @@
|
||||
name: Bazel (experimental)
|
||||
|
||||
# Note this workflow was originally derived from:
|
||||
# https://github.com/cerisier/toolchains_llvm_bootstrapped/blob/main/.github/workflows/ci.yaml
|
||||
|
||||
on:
|
||||
pull_request: {}
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
# Cancel previous actions from the same PR or branch except 'main' branch.
|
||||
# See https://docs.github.com/en/actions/using-jobs/using-concurrency and https://docs.github.com/en/actions/learn-github-actions/contexts for more info.
|
||||
group: concurrency-group::${{ github.workflow }}::${{ github.event.pull_request.number > 0 && format('pr-{0}', github.event.pull_request.number) || github.ref_name }}${{ github.ref_name == 'main' && format('::{0}', github.run_id) || ''}}
|
||||
cancel-in-progress: ${{ github.ref_name != 'main' }}
|
||||
jobs:
|
||||
test:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
# macOS
|
||||
- os: macos-15-xlarge
|
||||
target: aarch64-apple-darwin
|
||||
- os: macos-15-xlarge
|
||||
target: x86_64-apple-darwin
|
||||
|
||||
# Linux
|
||||
- os: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-gnu
|
||||
- os: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-gnu
|
||||
- os: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
- os: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
# TODO: Enable Windows once we fix the toolchain issues there.
|
||||
#- os: windows-latest
|
||||
# target: x86_64-pc-windows-gnullvm
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
# Configure a human readable name for each job
|
||||
name: Local Bazel build on ${{ matrix.os }} for ${{ matrix.target }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
# Some integration tests rely on DotSlash being installed.
|
||||
# See https://github.com/openai/codex/pull/7617.
|
||||
- name: Install DotSlash
|
||||
uses: facebook/install-dotslash@v2
|
||||
|
||||
- name: Make DotSlash available in PATH (Unix)
|
||||
if: runner.os != 'Windows'
|
||||
run: cp "$(which dotslash)" /usr/local/bin
|
||||
|
||||
- name: Make DotSlash available in PATH (Windows)
|
||||
if: runner.os == 'Windows'
|
||||
shell: pwsh
|
||||
run: Copy-Item (Get-Command dotslash).Source -Destination "$env:LOCALAPPDATA\Microsoft\WindowsApps\dotslash.exe"
|
||||
|
||||
# Install Bazel via Bazelisk
|
||||
- name: Set up Bazel
|
||||
uses: bazelbuild/setup-bazelisk@v3
|
||||
|
||||
# TODO(mbolin): Bring this back once we have caching working. Currently,
|
||||
# we never seem to get a cache hit but we still end up paying the cost of
|
||||
# uploading at the end of the build, which takes over a minute!
|
||||
#
|
||||
# Cache build and external artifacts so that the next ci build is incremental.
|
||||
# Because github action caches cannot be updated after a build, we need to
|
||||
# store the contents of each build in a unique cache key, then fall back to loading
|
||||
# it on the next ci run. We use hashFiles(...) in the key and restore-keys- with
|
||||
# the prefix to load the most recent cache for the branch on a cache miss. You
|
||||
# should customize the contents of hashFiles to capture any bazel input sources,
|
||||
# although this doesn't need to be perfect. If none of the input sources change
|
||||
# then a cache hit will load an existing cache and bazel won't have to do any work.
|
||||
# In the case of a cache miss, you want the fallback cache to contain most of the
|
||||
# previously built artifacts to minimize build time. The more precise you are with
|
||||
# hashFiles sources the less work bazel will have to do.
|
||||
# - name: Mount bazel caches
|
||||
# uses: actions/cache@v4
|
||||
# with:
|
||||
# path: |
|
||||
# ~/.cache/bazel-repo-cache
|
||||
# ~/.cache/bazel-repo-contents-cache
|
||||
# key: bazel-cache-${{ matrix.os }}-${{ hashFiles('**/BUILD.bazel', '**/*.bzl', 'MODULE.bazel') }}
|
||||
# restore-keys: |
|
||||
# bazel-cache-${{ matrix.os }}
|
||||
|
||||
- name: Configure Bazel startup args (Windows)
|
||||
if: runner.os == 'Windows'
|
||||
shell: pwsh
|
||||
run: |
|
||||
# Use a very short path to reduce argv/path length issues.
|
||||
"BAZEL_STARTUP_ARGS=--output_user_root=C:\" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append
|
||||
|
||||
- name: bazel test //...
|
||||
env:
|
||||
BUILDBUDDY_API_KEY: ${{ secrets.BUILDBUDDY_API_KEY }}
|
||||
shell: bash
|
||||
run: |
|
||||
bazel $BAZEL_STARTUP_ARGS --bazelrc=.github/workflows/ci.bazelrc test //... \
|
||||
--build_metadata=REPO_URL=https://github.com/openai/codex.git \
|
||||
--build_metadata=COMMIT_SHA=$(git rev-parse HEAD) \
|
||||
--build_metadata=ROLE=CI \
|
||||
--build_metadata=VISIBILITY=PUBLIC \
|
||||
"--remote_header=x-buildbuddy-api-key=$BUILDBUDDY_API_KEY"
|
||||
26
.github/workflows/cargo-deny.yml
vendored
Normal file
26
.github/workflows/cargo-deny.yml
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
name: cargo-deny
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
cargo-deny:
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./codex-rs
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Install Rust toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
|
||||
- name: Run cargo-deny
|
||||
uses: EmbarkStudios/cargo-deny-action@v2
|
||||
with:
|
||||
rust-version: stable
|
||||
manifest-path: ./codex-rs/Cargo.toml
|
||||
20
.github/workflows/ci.bazelrc
vendored
Normal file
20
.github/workflows/ci.bazelrc
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
common --remote_download_minimal
|
||||
common --nobuild_runfile_links
|
||||
common --keep_going
|
||||
|
||||
# We prefer to run the build actions entirely remotely so we can dial up the concurrency.
|
||||
# We have platform-specific tests, so we want to execute the tests on all platforms using the strongest sandboxing available on each platform.
|
||||
|
||||
# On linux, we can do a full remote build/test, by targeting the right (x86/arm) runners, so we have coverage of both.
|
||||
# Linux crossbuilds don't work until we untangle the libc constraint mess.
|
||||
common:linux --config=remote
|
||||
common:linux --strategy=remote
|
||||
common:linux --platforms=//:rbe
|
||||
|
||||
# On mac, we can run all the build actions remotely but test actions locally.
|
||||
common:macos --config=remote
|
||||
common:macos --strategy=remote
|
||||
common:macos --strategy=TestRunner=darwin-sandbox,local
|
||||
|
||||
common:windows --strategy=TestRunner=local
|
||||
|
||||
22
.github/workflows/ci.yml
vendored
22
.github/workflows/ci.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
NODE_OPTIONS: --max-old-space-size=4096
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
@@ -20,14 +20,14 @@ jobs:
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 22
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
# build_npm_package.py requires DotSlash when staging releases.
|
||||
# stage_npm_packages.py requires DotSlash when staging releases.
|
||||
- uses: facebook/install-dotslash@v2
|
||||
|
||||
- name: Stage npm package
|
||||
@@ -36,15 +36,18 @@ jobs:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
CODEX_VERSION=0.40.0
|
||||
PACK_OUTPUT="${RUNNER_TEMP}/codex-npm.tgz"
|
||||
python3 ./codex-cli/scripts/build_npm_package.py \
|
||||
# Use a rust-release version that includes all native binaries.
|
||||
CODEX_VERSION=0.74.0
|
||||
OUTPUT_DIR="${RUNNER_TEMP}"
|
||||
python3 ./scripts/stage_npm_packages.py \
|
||||
--release-version "$CODEX_VERSION" \
|
||||
--pack-output "$PACK_OUTPUT"
|
||||
--package codex \
|
||||
--output-dir "$OUTPUT_DIR"
|
||||
PACK_OUTPUT="${OUTPUT_DIR}/codex-npm-${CODEX_VERSION}.tgz"
|
||||
echo "pack_output=$PACK_OUTPUT" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Upload staged npm package artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: codex-npm-staging
|
||||
path: ${{ steps.stage_npm_package.outputs.pack_output }}
|
||||
@@ -58,3 +61,6 @@ jobs:
|
||||
run: ./scripts/asciicheck.py codex-cli/README.md
|
||||
- name: Check codex-cli/README ToC
|
||||
run: python3 scripts/readme_toc.py codex-cli/README.md
|
||||
|
||||
- name: Prettier (run `pnpm run format:fix` to fix)
|
||||
run: pnpm run format
|
||||
|
||||
28
.github/workflows/cla.yml
vendored
28
.github/workflows/cla.yml
vendored
@@ -13,17 +13,37 @@ permissions:
|
||||
|
||||
jobs:
|
||||
cla:
|
||||
# Only run the CLA assistant for the canonical openai repo so forks are not blocked
|
||||
# and contributors who signed previously do not receive duplicate CLA notifications.
|
||||
if: ${{ github.repository_owner == 'openai' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: contributor-assistant/github-action@v2.6.1
|
||||
# Run on close only if the PR was merged. This will lock the PR to preserve
|
||||
# the CLA agreement. We don't want to lock PRs that have been closed without
|
||||
# merging because the contributor may want to respond with additional comments.
|
||||
# This action has a "lock-pullrequest-aftermerge" option that can be set to false,
|
||||
# but that would unconditionally skip locking even in cases where the PR was merged.
|
||||
if: |
|
||||
github.event_name == 'pull_request_target' ||
|
||||
github.event.comment.body == 'recheck' ||
|
||||
github.event.comment.body == 'I have read the CLA Document and I hereby sign the CLA'
|
||||
(
|
||||
github.event_name == 'pull_request_target' &&
|
||||
(
|
||||
github.event.action == 'opened' ||
|
||||
github.event.action == 'synchronize' ||
|
||||
(github.event.action == 'closed' && github.event.pull_request.merged == true)
|
||||
)
|
||||
) ||
|
||||
(
|
||||
github.event_name == 'issue_comment' &&
|
||||
(
|
||||
github.event.comment.body == 'recheck' ||
|
||||
github.event.comment.body == 'I have read the CLA Document and I hereby sign the CLA'
|
||||
)
|
||||
)
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
path-to-document: https://github.com/openai/codex/blob/main/docs/CLA.md
|
||||
path-to-signatures: signatures/cla.json
|
||||
branch: cla-signatures
|
||||
allowlist: dependabot[bot]
|
||||
allowlist: codex,dependabot,dependabot[bot],github-actions[bot]
|
||||
|
||||
107
.github/workflows/close-stale-contributor-prs.yml
vendored
Normal file
107
.github/workflows/close-stale-contributor-prs.yml
vendored
Normal file
@@ -0,0 +1,107 @@
|
||||
name: Close stale contributor PRs
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "0 6 * * *"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
close-stale-contributor-prs:
|
||||
# Prevent scheduled runs on forks
|
||||
if: github.repository == 'openai/codex'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Close inactive PRs from contributors
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
const DAYS_INACTIVE = 14;
|
||||
const cutoff = new Date(Date.now() - DAYS_INACTIVE * 24 * 60 * 60 * 1000);
|
||||
const { owner, repo } = context.repo;
|
||||
const dryRun = false;
|
||||
const stalePrs = [];
|
||||
|
||||
core.info(`Dry run mode: ${dryRun}`);
|
||||
|
||||
const prs = await github.paginate(github.rest.pulls.list, {
|
||||
owner,
|
||||
repo,
|
||||
state: "open",
|
||||
per_page: 100,
|
||||
sort: "updated",
|
||||
direction: "asc",
|
||||
});
|
||||
|
||||
for (const pr of prs) {
|
||||
const lastUpdated = new Date(pr.updated_at);
|
||||
if (lastUpdated > cutoff) {
|
||||
core.info(`PR ${pr.number} is fresh`);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!pr.user || pr.user.type !== "User") {
|
||||
core.info(`PR ${pr.number} wasn't created by a user`);
|
||||
continue;
|
||||
}
|
||||
|
||||
let permission;
|
||||
try {
|
||||
const permissionResponse = await github.rest.repos.getCollaboratorPermissionLevel({
|
||||
owner,
|
||||
repo,
|
||||
username: pr.user.login,
|
||||
});
|
||||
permission = permissionResponse.data.permission;
|
||||
} catch (error) {
|
||||
if (error.status === 404) {
|
||||
core.info(`Author ${pr.user.login} is not a collaborator; skipping #${pr.number}`);
|
||||
continue;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
|
||||
const hasContributorAccess = ["admin", "maintain", "write"].includes(permission);
|
||||
if (!hasContributorAccess) {
|
||||
core.info(`Author ${pr.user.login} has ${permission} access; skipping #${pr.number}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
stalePrs.push(pr);
|
||||
}
|
||||
|
||||
if (!stalePrs.length) {
|
||||
core.info("No stale contributor pull requests found.");
|
||||
return;
|
||||
}
|
||||
|
||||
for (const pr of stalePrs) {
|
||||
const issue_number = pr.number;
|
||||
const closeComment = `Closing this pull request because it has had no updates for more than ${DAYS_INACTIVE} days. If you plan to continue working on it, feel free to reopen or open a new PR.`;
|
||||
|
||||
if (dryRun) {
|
||||
core.info(`[dry-run] Would close contributor PR #${issue_number} from ${pr.user.login}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
await github.rest.issues.createComment({
|
||||
owner,
|
||||
repo,
|
||||
issue_number,
|
||||
body: closeComment,
|
||||
});
|
||||
|
||||
await github.rest.pulls.update({
|
||||
owner,
|
||||
repo,
|
||||
pull_number: issue_number,
|
||||
state: "closed",
|
||||
});
|
||||
|
||||
core.info(`Closed contributor PR #${issue_number} from ${pr.user.login}`);
|
||||
}
|
||||
4
.github/workflows/codespell.yml
vendored
4
.github/workflows/codespell.yml
vendored
@@ -18,10 +18,10 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
- name: Annotate locations with typos
|
||||
uses: codespell-project/codespell-problem-matcher@b80729f885d32f78a716c2f107b4db1025001c42 # v1
|
||||
- name: Codespell
|
||||
uses: codespell-project/actions-codespell@406322ec52dd7b488e48c1c4b82e2a8b3a1bf630 # v2.1
|
||||
uses: codespell-project/actions-codespell@8f01853be192eb0f849a5c7d721450e7a467c579 # v2.2
|
||||
with:
|
||||
ignore_words_file: .codespellignore
|
||||
|
||||
140
.github/workflows/issue-deduplicator.yml
vendored
Normal file
140
.github/workflows/issue-deduplicator.yml
vendored
Normal file
@@ -0,0 +1,140 @@
|
||||
name: Issue Deduplicator
|
||||
|
||||
on:
|
||||
issues:
|
||||
types:
|
||||
- opened
|
||||
- labeled
|
||||
|
||||
jobs:
|
||||
gather-duplicates:
|
||||
name: Identify potential duplicates
|
||||
# Prevent runs on forks (requires OpenAI API key, wastes Actions minutes)
|
||||
if: github.repository == 'openai/codex' && (github.event.action == 'opened' || (github.event.action == 'labeled' && github.event.label.name == 'codex-deduplicate'))
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
codex_output: ${{ steps.codex.outputs.final-message }}
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Prepare Codex inputs
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
set -eo pipefail
|
||||
|
||||
CURRENT_ISSUE_FILE=codex-current-issue.json
|
||||
EXISTING_ISSUES_FILE=codex-existing-issues.json
|
||||
|
||||
gh issue list --repo "${{ github.repository }}" \
|
||||
--json number,title,body,createdAt \
|
||||
--limit 1000 \
|
||||
--state all \
|
||||
--search "sort:created-desc" \
|
||||
| jq '.' \
|
||||
> "$EXISTING_ISSUES_FILE"
|
||||
|
||||
gh issue view "${{ github.event.issue.number }}" \
|
||||
--repo "${{ github.repository }}" \
|
||||
--json number,title,body \
|
||||
| jq '.' \
|
||||
> "$CURRENT_ISSUE_FILE"
|
||||
|
||||
- id: codex
|
||||
uses: openai/codex-action@main
|
||||
with:
|
||||
openai-api-key: ${{ secrets.CODEX_OPENAI_API_KEY }}
|
||||
allow-users: "*"
|
||||
prompt: |
|
||||
You are an assistant that triages new GitHub issues by identifying potential duplicates.
|
||||
|
||||
You will receive the following JSON files located in the current working directory:
|
||||
- `codex-current-issue.json`: JSON object describing the newly created issue (fields: number, title, body).
|
||||
- `codex-existing-issues.json`: JSON array of recent issues (each element includes number, title, body, createdAt).
|
||||
|
||||
Instructions:
|
||||
- Compare the current issue against the existing issues to find up to five that appear to describe the same underlying problem or request.
|
||||
- Focus on the underlying intent and context of each issue—such as reported symptoms, feature requests, reproduction steps, or error messages—rather than relying solely on string similarity or synthetic metrics.
|
||||
- After your analysis, validate your results in 1-2 lines explaining your decision to return the selected matches.
|
||||
- When unsure, prefer returning fewer matches.
|
||||
- Include at most five numbers.
|
||||
|
||||
output-schema: |
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"issues": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"reason": { "type": "string" }
|
||||
},
|
||||
"required": ["issues", "reason"],
|
||||
"additionalProperties": false
|
||||
}
|
||||
|
||||
comment-on-issue:
|
||||
name: Comment with potential duplicates
|
||||
needs: gather-duplicates
|
||||
if: ${{ needs.gather-duplicates.result != 'skipped' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
steps:
|
||||
- name: Comment on issue
|
||||
uses: actions/github-script@v8
|
||||
env:
|
||||
CODEX_OUTPUT: ${{ needs.gather-duplicates.outputs.codex_output }}
|
||||
with:
|
||||
github-token: ${{ github.token }}
|
||||
script: |
|
||||
const raw = process.env.CODEX_OUTPUT ?? '';
|
||||
let parsed;
|
||||
try {
|
||||
parsed = JSON.parse(raw);
|
||||
} catch (error) {
|
||||
core.info(`Codex output was not valid JSON. Raw output: ${raw}`);
|
||||
core.info(`Parse error: ${error.message}`);
|
||||
return;
|
||||
}
|
||||
|
||||
const issues = Array.isArray(parsed?.issues) ? parsed.issues : [];
|
||||
const currentIssueNumber = String(context.payload.issue.number);
|
||||
|
||||
console.log(`Current issue number: ${currentIssueNumber}`);
|
||||
console.log(issues);
|
||||
|
||||
const filteredIssues = issues.filter((value) => String(value) !== currentIssueNumber);
|
||||
|
||||
if (filteredIssues.length === 0) {
|
||||
core.info('Codex reported no potential duplicates.');
|
||||
return;
|
||||
}
|
||||
|
||||
const lines = [
|
||||
'Potential duplicates detected. Please review them and close your issue if it is a duplicate.',
|
||||
'',
|
||||
...filteredIssues.map((value) => `- #${String(value)}`),
|
||||
'',
|
||||
'*Powered by [Codex Action](https://github.com/openai/codex-action)*'];
|
||||
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.payload.issue.number,
|
||||
body: lines.join("\n"),
|
||||
});
|
||||
|
||||
- name: Remove codex-deduplicate label
|
||||
if: ${{ always() && github.event.action == 'labeled' && github.event.label.name == 'codex-deduplicate' }}
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
GH_REPO: ${{ github.repository }}
|
||||
run: |
|
||||
gh issue edit "${{ github.event.issue.number }}" --remove-label codex-deduplicate || true
|
||||
echo "Attempted to remove label: codex-deduplicate"
|
||||
131
.github/workflows/issue-labeler.yml
vendored
Normal file
131
.github/workflows/issue-labeler.yml
vendored
Normal file
@@ -0,0 +1,131 @@
|
||||
name: Issue Labeler
|
||||
|
||||
on:
|
||||
issues:
|
||||
types:
|
||||
- opened
|
||||
- labeled
|
||||
|
||||
jobs:
|
||||
gather-labels:
|
||||
name: Generate label suggestions
|
||||
# Prevent runs on forks (requires OpenAI API key, wastes Actions minutes)
|
||||
if: github.repository == 'openai/codex' && (github.event.action == 'opened' || (github.event.action == 'labeled' && github.event.label.name == 'codex-label'))
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
codex_output: ${{ steps.codex.outputs.final-message }}
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- id: codex
|
||||
uses: openai/codex-action@main
|
||||
with:
|
||||
openai-api-key: ${{ secrets.CODEX_OPENAI_API_KEY }}
|
||||
allow-users: "*"
|
||||
prompt: |
|
||||
You are an assistant that reviews GitHub issues for the repository.
|
||||
|
||||
Your job is to choose the most appropriate labels for the issue described later in this prompt.
|
||||
Follow these rules:
|
||||
|
||||
- Add one (and only one) of the following three labels to distinguish the type of issue. Default to "bug" if unsure.
|
||||
1. bug — Reproducible defects in Codex products (CLI, VS Code extension, web, auth).
|
||||
2. enhancement — Feature requests or usability improvements that ask for new capabilities, better ergonomics, or quality-of-life tweaks.
|
||||
3. documentation — Updates or corrections needed in docs/README/config references (broken links, missing examples, outdated keys, clarification requests).
|
||||
|
||||
- If applicable, add one of the following labels to specify which sub-product or product surface the issue relates to.
|
||||
1. CLI — the Codex command line interface.
|
||||
2. extension — VS Code (or other IDE) extension-specific issues.
|
||||
3. codex-web — Issues targeting the Codex web UI/Cloud experience.
|
||||
4. github-action — Issues with the Codex GitHub action.
|
||||
5. iOS — Issues with the Codex iOS app.
|
||||
|
||||
- Additionally add zero or more of the following labels that are relevant to the issue content. Prefer a small set of precise labels over many broad ones.
|
||||
1. windows-os — Bugs or friction specific to Windows environments (always when PowerShell is mentioned, path handling, copy/paste, OS-specific auth or tooling failures).
|
||||
2. mcp — Topics involving Model Context Protocol servers/clients.
|
||||
3. mcp-server — Problems related to the codex mcp-server command, where codex runs as an MCP server.
|
||||
4. azure — Problems or requests tied to Azure OpenAI deployments.
|
||||
5. model-behavior — Undesirable LLM behavior: forgetting goals, refusing work, hallucinating environment details, quota misreports, or other reasoning/performance anomalies.
|
||||
6. code-review — Issues related to the code review feature or functionality.
|
||||
7. auth - Problems related to authentication, login, or access tokens.
|
||||
8. codex-exec - Problems related to the "codex exec" command or functionality.
|
||||
9. context-management - Problems related to compaction, context windows, or available context reporting.
|
||||
10. custom-model - Problems that involve using custom model providers, local models, or OSS models.
|
||||
11. rate-limits - Problems related to token limits, rate limits, or token usage reporting.
|
||||
12. sandbox - Issues related to local sandbox environments or tool call approvals to override sandbox restrictions.
|
||||
13. tool-calls - Problems related to specific tool call invocations including unexpected errors, failures, or hangs.
|
||||
14. TUI - Problems with the terminal user interface (TUI) including keyboard shortcuts, copy & pasting, menus, or screen update issues.
|
||||
|
||||
Issue number: ${{ github.event.issue.number }}
|
||||
|
||||
Issue title:
|
||||
${{ github.event.issue.title }}
|
||||
|
||||
Issue body:
|
||||
${{ github.event.issue.body }}
|
||||
|
||||
Repository full name:
|
||||
${{ github.repository }}
|
||||
|
||||
output-schema: |
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"labels": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": ["labels"],
|
||||
"additionalProperties": false
|
||||
}
|
||||
|
||||
apply-labels:
|
||||
name: Apply labels from Codex output
|
||||
needs: gather-labels
|
||||
if: ${{ needs.gather-labels.result != 'skipped' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
GH_REPO: ${{ github.repository }}
|
||||
ISSUE_NUMBER: ${{ github.event.issue.number }}
|
||||
CODEX_OUTPUT: ${{ needs.gather-labels.outputs.codex_output }}
|
||||
steps:
|
||||
- name: Apply labels
|
||||
run: |
|
||||
json=${CODEX_OUTPUT//$'\r'/}
|
||||
if [ -z "$json" ]; then
|
||||
echo "Codex produced no output. Skipping label application."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if ! printf '%s' "$json" | jq -e 'type == "object" and (.labels | type == "array")' >/dev/null 2>&1; then
|
||||
echo "Codex output did not include a labels array. Raw output: $json"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
labels=$(printf '%s' "$json" | jq -r '.labels[] | tostring')
|
||||
if [ -z "$labels" ]; then
|
||||
echo "Codex returned an empty array. Nothing to do."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
cmd=(gh issue edit "$ISSUE_NUMBER")
|
||||
while IFS= read -r label; do
|
||||
cmd+=(--add-label "$label")
|
||||
done <<< "$labels"
|
||||
|
||||
"${cmd[@]}" || true
|
||||
|
||||
- name: Remove codex-label trigger
|
||||
if: ${{ always() && github.event.action == 'labeled' && github.event.label.name == 'codex-label' }}
|
||||
run: |
|
||||
gh issue edit "$ISSUE_NUMBER" --remove-label codex-label || true
|
||||
echo "Attempted to remove label: codex-label"
|
||||
417
.github/workflows/rust-ci.yml
vendored
417
.github/workflows/rust-ci.yml
vendored
@@ -9,7 +9,7 @@ on:
|
||||
# CI builds in debug (dev) for faster signal.
|
||||
|
||||
jobs:
|
||||
# --- Detect what changed (always runs) -------------------------------------
|
||||
# --- Detect what changed to detect which tests to run (always runs) -------------------------------------
|
||||
changed:
|
||||
name: Detect changed areas
|
||||
runs-on: ubuntu-24.04
|
||||
@@ -17,7 +17,7 @@ jobs:
|
||||
codex: ${{ steps.detect.outputs.codex }}
|
||||
workflows: ${{ steps.detect.outputs.workflows }}
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Detect changed paths (no external action)
|
||||
@@ -28,9 +28,11 @@ jobs:
|
||||
|
||||
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
|
||||
BASE_SHA='${{ github.event.pull_request.base.sha }}'
|
||||
HEAD_SHA='${{ github.event.pull_request.head.sha }}'
|
||||
echo "Base SHA: $BASE_SHA"
|
||||
# List files changed between base and current HEAD (merge-base aware)
|
||||
mapfile -t files < <(git diff --name-only --no-renames "$BASE_SHA"...HEAD)
|
||||
echo "Head SHA: $HEAD_SHA"
|
||||
# List files changed between base and PR head
|
||||
mapfile -t files < <(git diff --name-only --no-renames "$BASE_SHA" "$HEAD_SHA")
|
||||
else
|
||||
# On push / manual runs, default to running everything
|
||||
files=("codex-rs/force" ".github/force")
|
||||
@@ -56,8 +58,8 @@ jobs:
|
||||
run:
|
||||
working-directory: codex-rs
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
- uses: actions/checkout@v6
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
with:
|
||||
components: rustfmt
|
||||
- name: cargo fmt
|
||||
@@ -74,9 +76,9 @@ jobs:
|
||||
run:
|
||||
working-directory: codex-rs
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
- uses: taiki-e/install-action@0c5db7f7f897c03b771660e91d065338615679f4 # v2
|
||||
- uses: actions/checkout@v6
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
with:
|
||||
tool: cargo-shear
|
||||
version: 1.5.1
|
||||
@@ -84,9 +86,9 @@ jobs:
|
||||
run: cargo shear
|
||||
|
||||
# --- CI to validate on different os/targets --------------------------------
|
||||
lint_build_test:
|
||||
name: ${{ matrix.runner }} - ${{ matrix.target }}${{ matrix.profile == 'release' && ' (release)' || '' }}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
lint_build:
|
||||
name: Lint/Build — ${{ matrix.runner }} - ${{ matrix.target }}${{ matrix.profile == 'release' && ' (release)' || '' }}
|
||||
runs-on: ${{ matrix.runs_on || matrix.runner }}
|
||||
timeout-minutes: 30
|
||||
needs: changed
|
||||
# Keep job-level if to avoid spinning up runners when not needed
|
||||
@@ -94,74 +96,196 @@ jobs:
|
||||
defaults:
|
||||
run:
|
||||
working-directory: codex-rs
|
||||
env:
|
||||
# Speed up repeated builds across CI runs by caching compiled objects (non-Windows).
|
||||
USE_SCCACHE: ${{ startsWith(matrix.runner, 'windows') && 'false' || 'true' }}
|
||||
CARGO_INCREMENTAL: "0"
|
||||
SCCACHE_CACHE_SIZE: 10G
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- runner: macos-14
|
||||
- runner: macos-15-xlarge
|
||||
target: aarch64-apple-darwin
|
||||
profile: dev
|
||||
- runner: macos-14
|
||||
- runner: macos-15-xlarge
|
||||
target: x86_64-apple-darwin
|
||||
profile: dev
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
profile: dev
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-linux-x64
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-gnu
|
||||
profile: dev
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-linux-x64
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
profile: dev
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-linux-arm64
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-gnu
|
||||
profile: dev
|
||||
- runner: windows-latest
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-linux-arm64
|
||||
- runner: windows-x64
|
||||
target: x86_64-pc-windows-msvc
|
||||
profile: dev
|
||||
- runner: windows-11-arm
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-windows-x64
|
||||
- runner: windows-arm64
|
||||
target: aarch64-pc-windows-msvc
|
||||
profile: dev
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-windows-arm64
|
||||
|
||||
# Also run representative release builds on Mac and Linux because
|
||||
# there could be release-only build errors we want to catch.
|
||||
# Hopefully this also pre-populates the build cache to speed up
|
||||
# releases.
|
||||
- runner: macos-14
|
||||
- runner: macos-15-xlarge
|
||||
target: aarch64-apple-darwin
|
||||
profile: release
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
profile: release
|
||||
- runner: windows-latest
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-linux-x64
|
||||
- runner: windows-x64
|
||||
target: x86_64-pc-windows-msvc
|
||||
profile: release
|
||||
- runner: windows-11-arm
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-windows-x64
|
||||
- runner: windows-arm64
|
||||
target: aarch64-pc-windows-msvc
|
||||
profile: release
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-windows-arm64
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
- uses: actions/checkout@v6
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
components: clippy
|
||||
|
||||
- uses: actions/cache@v4
|
||||
- name: Compute lockfile hash
|
||||
id: lockhash
|
||||
working-directory: codex-rs
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "hash=$(sha256sum Cargo.lock | cut -d' ' -f1)" >> "$GITHUB_OUTPUT"
|
||||
echo "toolchain_hash=$(sha256sum rust-toolchain.toml | cut -d' ' -f1)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
# Explicit cache restore: split cargo home vs target, so we can
|
||||
# avoid caching the large target dir on the gnu-dev job.
|
||||
- name: Restore cargo home cache
|
||||
id: cache_cargo_home_restore
|
||||
uses: actions/cache/restore@v5
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
${{ github.workspace }}/codex-rs/target/
|
||||
key: cargo-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ steps.lockhash.outputs.toolchain_hash }}
|
||||
restore-keys: |
|
||||
cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
|
||||
|
||||
# Install and restore sccache cache
|
||||
- name: Install sccache
|
||||
if: ${{ env.USE_SCCACHE == 'true' }}
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
with:
|
||||
tool: sccache
|
||||
version: 0.7.5
|
||||
|
||||
- name: Configure sccache backend
|
||||
if: ${{ env.USE_SCCACHE == 'true' }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [[ -n "${ACTIONS_CACHE_URL:-}" && -n "${ACTIONS_RUNTIME_TOKEN:-}" ]]; then
|
||||
echo "SCCACHE_GHA_ENABLED=true" >> "$GITHUB_ENV"
|
||||
echo "Using sccache GitHub backend"
|
||||
else
|
||||
echo "SCCACHE_GHA_ENABLED=false" >> "$GITHUB_ENV"
|
||||
echo "SCCACHE_DIR=${{ github.workspace }}/.sccache" >> "$GITHUB_ENV"
|
||||
echo "Using sccache local disk + actions/cache fallback"
|
||||
fi
|
||||
|
||||
- name: Enable sccache wrapper
|
||||
if: ${{ env.USE_SCCACHE == 'true' }}
|
||||
shell: bash
|
||||
run: echo "RUSTC_WRAPPER=sccache" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Restore sccache cache (fallback)
|
||||
if: ${{ env.USE_SCCACHE == 'true' && env.SCCACHE_GHA_ENABLED != 'true' }}
|
||||
id: cache_sccache_restore
|
||||
uses: actions/cache/restore@v5
|
||||
with:
|
||||
path: ${{ github.workspace }}/.sccache/
|
||||
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ github.run_id }}
|
||||
restore-keys: |
|
||||
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-
|
||||
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Prepare APT cache directories (musl)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
sudo mkdir -p /var/cache/apt/archives /var/lib/apt/lists
|
||||
sudo chown -R "$USER:$USER" /var/cache/apt /var/lib/apt/lists
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Restore APT cache (musl)
|
||||
id: cache_apt_restore
|
||||
uses: actions/cache/restore@v5
|
||||
with:
|
||||
path: |
|
||||
/var/cache/apt
|
||||
key: apt-${{ matrix.runner }}-${{ matrix.target }}-v1
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Install musl build tools
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
shell: bash
|
||||
run: |
|
||||
sudo apt install -y musl-tools pkg-config && sudo rm -rf /var/lib/apt/lists/*
|
||||
set -euo pipefail
|
||||
sudo apt-get -y update -o Acquire::Retries=3
|
||||
sudo apt-get -y install --no-install-recommends musl-tools pkg-config
|
||||
|
||||
- name: Install cargo-chef
|
||||
if: ${{ matrix.profile == 'release' }}
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
with:
|
||||
tool: cargo-chef
|
||||
version: 0.1.71
|
||||
|
||||
- name: Pre-warm dependency cache (cargo-chef)
|
||||
if: ${{ matrix.profile == 'release' }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
RECIPE="${RUNNER_TEMP}/chef-recipe.json"
|
||||
cargo chef prepare --recipe-path "$RECIPE"
|
||||
cargo chef cook --recipe-path "$RECIPE" --target ${{ matrix.target }} --release --all-features
|
||||
|
||||
- name: cargo clippy
|
||||
id: clippy
|
||||
@@ -180,34 +304,240 @@ jobs:
|
||||
find . -name Cargo.toml -mindepth 2 -maxdepth 2 -print0 \
|
||||
| xargs -0 -n1 -I{} bash -c 'cd "$(dirname "{}")" && cargo check --profile ${{ matrix.profile }}'
|
||||
|
||||
- uses: taiki-e/install-action@0c5db7f7f897c03b771660e91d065338615679f4 # v2
|
||||
# Save caches explicitly; make non-fatal so cache packaging
|
||||
# never fails the overall job. Only save when key wasn't hit.
|
||||
- name: Save cargo home cache
|
||||
if: always() && !cancelled() && steps.cache_cargo_home_restore.outputs.cache-hit != 'true'
|
||||
continue-on-error: true
|
||||
uses: actions/cache/save@v5
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ steps.lockhash.outputs.toolchain_hash }}
|
||||
|
||||
- name: Save sccache cache (fallback)
|
||||
if: always() && !cancelled() && env.USE_SCCACHE == 'true' && env.SCCACHE_GHA_ENABLED != 'true'
|
||||
continue-on-error: true
|
||||
uses: actions/cache/save@v5
|
||||
with:
|
||||
path: ${{ github.workspace }}/.sccache/
|
||||
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ github.run_id }}
|
||||
|
||||
- name: sccache stats
|
||||
if: always() && env.USE_SCCACHE == 'true'
|
||||
continue-on-error: true
|
||||
run: sccache --show-stats || true
|
||||
|
||||
- name: sccache summary
|
||||
if: always() && env.USE_SCCACHE == 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
{
|
||||
echo "### sccache stats — ${{ matrix.target }} (${{ matrix.profile }})";
|
||||
echo;
|
||||
echo '```';
|
||||
sccache --show-stats || true;
|
||||
echo '```';
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
- name: Save APT cache (musl)
|
||||
if: always() && !cancelled() && (matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl') && steps.cache_apt_restore.outputs.cache-hit != 'true'
|
||||
continue-on-error: true
|
||||
uses: actions/cache/save@v5
|
||||
with:
|
||||
path: |
|
||||
/var/cache/apt
|
||||
key: apt-${{ matrix.runner }}-${{ matrix.target }}-v1
|
||||
|
||||
# Fail the job if any of the previous steps failed.
|
||||
- name: verify all steps passed
|
||||
if: |
|
||||
steps.clippy.outcome == 'failure' ||
|
||||
steps.cargo_check_all_crates.outcome == 'failure'
|
||||
run: |
|
||||
echo "One or more checks failed (clippy or cargo_check_all_crates). See logs for details."
|
||||
exit 1
|
||||
|
||||
tests:
|
||||
name: Tests — ${{ matrix.runner }} - ${{ matrix.target }}
|
||||
runs-on: ${{ matrix.runs_on || matrix.runner }}
|
||||
timeout-minutes: 30
|
||||
needs: changed
|
||||
if: ${{ needs.changed.outputs.codex == 'true' || needs.changed.outputs.workflows == 'true' || github.event_name == 'push' }}
|
||||
defaults:
|
||||
run:
|
||||
working-directory: codex-rs
|
||||
env:
|
||||
# Speed up repeated builds across CI runs by caching compiled objects (non-Windows).
|
||||
USE_SCCACHE: ${{ startsWith(matrix.runner, 'windows') && 'false' || 'true' }}
|
||||
CARGO_INCREMENTAL: "0"
|
||||
SCCACHE_CACHE_SIZE: 10G
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- runner: macos-15-xlarge
|
||||
target: aarch64-apple-darwin
|
||||
profile: dev
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-gnu
|
||||
profile: dev
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-linux-x64
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-gnu
|
||||
profile: dev
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-linux-arm64
|
||||
- runner: windows-x64
|
||||
target: x86_64-pc-windows-msvc
|
||||
profile: dev
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-windows-x64
|
||||
- runner: windows-arm64
|
||||
target: aarch64-pc-windows-msvc
|
||||
profile: dev
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-windows-arm64
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
# Some integration tests rely on DotSlash being installed.
|
||||
# See https://github.com/openai/codex/pull/7617.
|
||||
- name: Install DotSlash
|
||||
uses: facebook/install-dotslash@v2
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
- name: Compute lockfile hash
|
||||
id: lockhash
|
||||
working-directory: codex-rs
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "hash=$(sha256sum Cargo.lock | cut -d' ' -f1)" >> "$GITHUB_OUTPUT"
|
||||
echo "toolchain_hash=$(sha256sum rust-toolchain.toml | cut -d' ' -f1)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Restore cargo home cache
|
||||
id: cache_cargo_home_restore
|
||||
uses: actions/cache/restore@v5
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ steps.lockhash.outputs.toolchain_hash }}
|
||||
restore-keys: |
|
||||
cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
|
||||
|
||||
- name: Install sccache
|
||||
if: ${{ env.USE_SCCACHE == 'true' }}
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
with:
|
||||
tool: sccache
|
||||
version: 0.7.5
|
||||
|
||||
- name: Configure sccache backend
|
||||
if: ${{ env.USE_SCCACHE == 'true' }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [[ -n "${ACTIONS_CACHE_URL:-}" && -n "${ACTIONS_RUNTIME_TOKEN:-}" ]]; then
|
||||
echo "SCCACHE_GHA_ENABLED=true" >> "$GITHUB_ENV"
|
||||
echo "Using sccache GitHub backend"
|
||||
else
|
||||
echo "SCCACHE_GHA_ENABLED=false" >> "$GITHUB_ENV"
|
||||
echo "SCCACHE_DIR=${{ github.workspace }}/.sccache" >> "$GITHUB_ENV"
|
||||
echo "Using sccache local disk + actions/cache fallback"
|
||||
fi
|
||||
|
||||
- name: Enable sccache wrapper
|
||||
if: ${{ env.USE_SCCACHE == 'true' }}
|
||||
shell: bash
|
||||
run: echo "RUSTC_WRAPPER=sccache" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Restore sccache cache (fallback)
|
||||
if: ${{ env.USE_SCCACHE == 'true' && env.SCCACHE_GHA_ENABLED != 'true' }}
|
||||
id: cache_sccache_restore
|
||||
uses: actions/cache/restore@v5
|
||||
with:
|
||||
path: ${{ github.workspace }}/.sccache/
|
||||
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ github.run_id }}
|
||||
restore-keys: |
|
||||
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-
|
||||
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
|
||||
|
||||
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
with:
|
||||
tool: nextest
|
||||
version: 0.9.103
|
||||
|
||||
- name: tests
|
||||
id: test
|
||||
# Tests take too long for release builds to run them on every PR.
|
||||
if: ${{ matrix.profile != 'release' }}
|
||||
continue-on-error: true
|
||||
run: cargo nextest run --all-features --no-fail-fast --target ${{ matrix.target }}
|
||||
run: cargo nextest run --all-features --no-fail-fast --target ${{ matrix.target }} --cargo-profile ci-test
|
||||
env:
|
||||
RUST_BACKTRACE: 1
|
||||
NEXTEST_STATUS_LEVEL: leak
|
||||
|
||||
# Fail the job if any of the previous steps failed.
|
||||
- name: verify all steps passed
|
||||
if: |
|
||||
steps.clippy.outcome == 'failure' ||
|
||||
steps.cargo_check_all_crates.outcome == 'failure' ||
|
||||
steps.test.outcome == 'failure'
|
||||
- name: Save cargo home cache
|
||||
if: always() && !cancelled() && steps.cache_cargo_home_restore.outputs.cache-hit != 'true'
|
||||
continue-on-error: true
|
||||
uses: actions/cache/save@v5
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ steps.lockhash.outputs.toolchain_hash }}
|
||||
|
||||
- name: Save sccache cache (fallback)
|
||||
if: always() && !cancelled() && env.USE_SCCACHE == 'true' && env.SCCACHE_GHA_ENABLED != 'true'
|
||||
continue-on-error: true
|
||||
uses: actions/cache/save@v5
|
||||
with:
|
||||
path: ${{ github.workspace }}/.sccache/
|
||||
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ github.run_id }}
|
||||
|
||||
- name: sccache stats
|
||||
if: always() && env.USE_SCCACHE == 'true'
|
||||
continue-on-error: true
|
||||
run: sccache --show-stats || true
|
||||
|
||||
- name: sccache summary
|
||||
if: always() && env.USE_SCCACHE == 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
echo "One or more checks failed (clippy, cargo_check_all_crates, or test). See logs for details."
|
||||
{
|
||||
echo "### sccache stats — ${{ matrix.target }} (tests)";
|
||||
echo;
|
||||
echo '```';
|
||||
sccache --show-stats || true;
|
||||
echo '```';
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
- name: verify tests passed
|
||||
if: steps.test.outcome == 'failure'
|
||||
run: |
|
||||
echo "Tests failed. See logs for details."
|
||||
exit 1
|
||||
|
||||
# --- Gatherer job that you mark as the ONLY required status -----------------
|
||||
results:
|
||||
name: CI results (required)
|
||||
needs: [changed, general, cargo_shear, lint_build_test]
|
||||
needs: [changed, general, cargo_shear, lint_build, tests]
|
||||
if: always()
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
@@ -216,7 +546,8 @@ jobs:
|
||||
run: |
|
||||
echo "general: ${{ needs.general.result }}"
|
||||
echo "shear : ${{ needs.cargo_shear.result }}"
|
||||
echo "matrix : ${{ needs.lint_build_test.result }}"
|
||||
echo "lint : ${{ needs.lint_build.result }}"
|
||||
echo "tests : ${{ needs.tests.result }}"
|
||||
|
||||
# If nothing relevant changed (PR touching only root README, etc.),
|
||||
# declare success regardless of other jobs.
|
||||
@@ -228,4 +559,10 @@ jobs:
|
||||
# Otherwise require the jobs to have succeeded
|
||||
[[ '${{ needs.general.result }}' == 'success' ]] || { echo 'general failed'; exit 1; }
|
||||
[[ '${{ needs.cargo_shear.result }}' == 'success' ]] || { echo 'cargo_shear failed'; exit 1; }
|
||||
[[ '${{ needs.lint_build_test.result }}' == 'success' ]] || { echo 'matrix failed'; exit 1; }
|
||||
[[ '${{ needs.lint_build.result }}' == 'success' ]] || { echo 'lint_build failed'; exit 1; }
|
||||
[[ '${{ needs.tests.result }}' == 'success' ]] || { echo 'tests failed'; exit 1; }
|
||||
|
||||
- name: sccache summary note
|
||||
if: always()
|
||||
run: |
|
||||
echo "Per-job sccache stats are attached to each matrix job's Step Summary."
|
||||
|
||||
53
.github/workflows/rust-release-prepare.yml
vendored
Normal file
53
.github/workflows/rust-release-prepare.yml
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
name: rust-release-prepare
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "0 */4 * * *"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
cancel-in-progress: false
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
# Prevent scheduled runs on forks (no secrets, wastes Actions minutes)
|
||||
if: github.repository == 'openai/codex'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
ref: main
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Update models.json
|
||||
env:
|
||||
OPENAI_API_KEY: ${{ secrets.CODEX_OPENAI_API_KEY }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
client_version="99.99.99"
|
||||
terminal_info="github-actions"
|
||||
user_agent="codex_cli_rs/99.99.99 (Linux $(uname -r); $(uname -m)) ${terminal_info}"
|
||||
base_url="${OPENAI_BASE_URL:-https://chatgpt.com/backend-api/codex}"
|
||||
|
||||
headers=(
|
||||
-H "Authorization: Bearer ${OPENAI_API_KEY}"
|
||||
-H "User-Agent: ${user_agent}"
|
||||
)
|
||||
|
||||
url="${base_url%/}/models?client_version=${client_version}"
|
||||
curl --http1.1 --fail --show-error --location "${headers[@]}" "${url}" | jq '.' > codex-rs/core/models.json
|
||||
|
||||
- name: Open pull request (if changed)
|
||||
uses: peter-evans/create-pull-request@v8
|
||||
with:
|
||||
commit-message: "Update models.json"
|
||||
title: "Update models.json"
|
||||
body: "Automated update of models.json."
|
||||
branch: "bot/update-models-json"
|
||||
reviewers: "pakrym-oai,aibrahim-oai"
|
||||
delete-branch: true
|
||||
248
.github/workflows/rust-release.yml
vendored
248
.github/workflows/rust-release.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
tag-check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Validate tag matches Cargo.toml version
|
||||
shell: bash
|
||||
@@ -47,9 +47,12 @@ jobs:
|
||||
|
||||
build:
|
||||
needs: tag-check
|
||||
name: ${{ matrix.runner }} - ${{ matrix.target }}
|
||||
name: Build - ${{ matrix.runner }} - ${{ matrix.target }}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 60
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
defaults:
|
||||
run:
|
||||
working-directory: codex-rs
|
||||
@@ -58,9 +61,9 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- runner: macos-14
|
||||
- runner: macos-15-xlarge
|
||||
target: aarch64-apple-darwin
|
||||
- runner: macos-14
|
||||
- runner: macos-15-xlarge
|
||||
target: x86_64-apple-darwin
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
@@ -76,12 +79,12 @@ jobs:
|
||||
target: aarch64-pc-windows-msvc
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
- uses: actions/checkout@v6
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
- uses: actions/cache@v4
|
||||
- uses: actions/cache@v5
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
@@ -94,10 +97,108 @@ jobs:
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Install musl build tools
|
||||
run: |
|
||||
sudo apt install -y musl-tools pkg-config
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y musl-tools pkg-config
|
||||
|
||||
- name: Cargo build
|
||||
run: cargo build --target ${{ matrix.target }} --release --bin codex --bin codex-responses-api-proxy
|
||||
shell: bash
|
||||
run: |
|
||||
if [[ "${{ contains(matrix.target, 'windows') }}" == 'true' ]]; then
|
||||
cargo build --target ${{ matrix.target }} --release --bin codex --bin codex-responses-api-proxy --bin codex-windows-sandbox-setup --bin codex-command-runner
|
||||
else
|
||||
cargo build --target ${{ matrix.target }} --release --bin codex --bin codex-responses-api-proxy
|
||||
fi
|
||||
|
||||
- if: ${{ contains(matrix.target, 'linux') }}
|
||||
name: Cosign Linux artifacts
|
||||
uses: ./.github/actions/linux-code-sign
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
artifacts-dir: ${{ github.workspace }}/codex-rs/target/${{ matrix.target }}/release
|
||||
|
||||
- if: ${{ contains(matrix.target, 'windows') }}
|
||||
name: Sign Windows binaries with Azure Trusted Signing
|
||||
uses: ./.github/actions/windows-code-sign
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
client-id: ${{ secrets.AZURE_TRUSTED_SIGNING_CLIENT_ID }}
|
||||
tenant-id: ${{ secrets.AZURE_TRUSTED_SIGNING_TENANT_ID }}
|
||||
subscription-id: ${{ secrets.AZURE_TRUSTED_SIGNING_SUBSCRIPTION_ID }}
|
||||
endpoint: ${{ secrets.AZURE_TRUSTED_SIGNING_ENDPOINT }}
|
||||
account-name: ${{ secrets.AZURE_TRUSTED_SIGNING_ACCOUNT_NAME }}
|
||||
certificate-profile-name: ${{ secrets.AZURE_TRUSTED_SIGNING_CERTIFICATE_PROFILE_NAME }}
|
||||
|
||||
- if: ${{ runner.os == 'macOS' }}
|
||||
name: MacOS code signing (binaries)
|
||||
uses: ./.github/actions/macos-code-sign
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
sign-binaries: "true"
|
||||
sign-dmg: "false"
|
||||
apple-certificate: ${{ secrets.APPLE_CERTIFICATE_P12 }}
|
||||
apple-certificate-password: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
|
||||
apple-notarization-key-p8: ${{ secrets.APPLE_NOTARIZATION_KEY_P8 }}
|
||||
apple-notarization-key-id: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
|
||||
apple-notarization-issuer-id: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
|
||||
|
||||
- if: ${{ runner.os == 'macOS' }}
|
||||
name: Build macOS dmg
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
target="${{ matrix.target }}"
|
||||
release_dir="target/${target}/release"
|
||||
dmg_root="${RUNNER_TEMP}/codex-dmg-root"
|
||||
volname="Codex (${target})"
|
||||
dmg_path="${release_dir}/codex-${target}.dmg"
|
||||
|
||||
# The previous "MacOS code signing (binaries)" step signs + notarizes the
|
||||
# built artifacts in `${release_dir}`. This step packages *those same*
|
||||
# signed binaries into a dmg.
|
||||
codex_binary_path="${release_dir}/codex"
|
||||
proxy_binary_path="${release_dir}/codex-responses-api-proxy"
|
||||
|
||||
rm -rf "$dmg_root"
|
||||
mkdir -p "$dmg_root"
|
||||
|
||||
if [[ ! -f "$codex_binary_path" ]]; then
|
||||
echo "Binary $codex_binary_path not found"
|
||||
exit 1
|
||||
fi
|
||||
if [[ ! -f "$proxy_binary_path" ]]; then
|
||||
echo "Binary $proxy_binary_path not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ditto "$codex_binary_path" "${dmg_root}/codex"
|
||||
ditto "$proxy_binary_path" "${dmg_root}/codex-responses-api-proxy"
|
||||
|
||||
rm -f "$dmg_path"
|
||||
hdiutil create \
|
||||
-volname "$volname" \
|
||||
-srcfolder "$dmg_root" \
|
||||
-format UDZO \
|
||||
-ov \
|
||||
"$dmg_path"
|
||||
|
||||
if [[ ! -f "$dmg_path" ]]; then
|
||||
echo "dmg $dmg_path not found after build"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- if: ${{ runner.os == 'macOS' }}
|
||||
name: MacOS code signing (dmg)
|
||||
uses: ./.github/actions/macos-code-sign
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
sign-binaries: "false"
|
||||
sign-dmg: "true"
|
||||
apple-certificate: ${{ secrets.APPLE_CERTIFICATE_P12 }}
|
||||
apple-certificate-password: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
|
||||
apple-notarization-key-p8: ${{ secrets.APPLE_NOTARIZATION_KEY_P8 }}
|
||||
apple-notarization-key-id: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
|
||||
apple-notarization-issuer-id: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
|
||||
|
||||
- name: Stage artifacts
|
||||
shell: bash
|
||||
@@ -108,11 +209,22 @@ jobs:
|
||||
if [[ "${{ matrix.runner }}" == windows* ]]; then
|
||||
cp target/${{ matrix.target }}/release/codex.exe "$dest/codex-${{ matrix.target }}.exe"
|
||||
cp target/${{ matrix.target }}/release/codex-responses-api-proxy.exe "$dest/codex-responses-api-proxy-${{ matrix.target }}.exe"
|
||||
cp target/${{ matrix.target }}/release/codex-windows-sandbox-setup.exe "$dest/codex-windows-sandbox-setup-${{ matrix.target }}.exe"
|
||||
cp target/${{ matrix.target }}/release/codex-command-runner.exe "$dest/codex-command-runner-${{ matrix.target }}.exe"
|
||||
else
|
||||
cp target/${{ matrix.target }}/release/codex "$dest/codex-${{ matrix.target }}"
|
||||
cp target/${{ matrix.target }}/release/codex-responses-api-proxy "$dest/codex-responses-api-proxy-${{ matrix.target }}"
|
||||
fi
|
||||
|
||||
if [[ "${{ matrix.target }}" == *linux* ]]; then
|
||||
cp target/${{ matrix.target }}/release/codex.sigstore "$dest/codex-${{ matrix.target }}.sigstore"
|
||||
cp target/${{ matrix.target }}/release/codex-responses-api-proxy.sigstore "$dest/codex-responses-api-proxy-${{ matrix.target }}.sigstore"
|
||||
fi
|
||||
|
||||
if [[ "${{ matrix.target }}" == *apple-darwin ]]; then
|
||||
cp target/${{ matrix.target }}/release/codex-${{ matrix.target }}.dmg "$dest/codex-${{ matrix.target }}.dmg"
|
||||
fi
|
||||
|
||||
- if: ${{ matrix.runner == 'windows-11-arm' }}
|
||||
name: Install zstd
|
||||
shell: powershell
|
||||
@@ -125,6 +237,15 @@ jobs:
|
||||
# ${{ matrix.target }}
|
||||
dest="dist/${{ matrix.target }}"
|
||||
|
||||
# We want to ship the raw Windows executables in the GitHub Release
|
||||
# in addition to the compressed archives. Keep the originals for
|
||||
# Windows targets; remove them elsewhere to limit the number of
|
||||
# artifacts that end up in the GitHub Release.
|
||||
keep_originals=false
|
||||
if [[ "${{ matrix.runner }}" == windows* ]]; then
|
||||
keep_originals=true
|
||||
fi
|
||||
|
||||
# For compatibility with environments that lack the `zstd` tool we
|
||||
# additionally create a `.tar.gz` for all platforms and `.zip` for
|
||||
# Windows alongside every single binary that we publish. The end result is:
|
||||
@@ -138,7 +259,12 @@ jobs:
|
||||
base="$(basename "$f")"
|
||||
# Skip files that are already archives (shouldn't happen, but be
|
||||
# safe).
|
||||
if [[ "$base" == *.tar.gz || "$base" == *.zip ]]; then
|
||||
if [[ "$base" == *.tar.gz || "$base" == *.zip || "$base" == *.dmg ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
# Don't try to compress signature bundles.
|
||||
if [[ "$base" == *.sigstore ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
@@ -154,10 +280,14 @@ jobs:
|
||||
|
||||
# Also create .zst (existing behaviour) *and* remove the original
|
||||
# uncompressed binary to keep the directory small.
|
||||
zstd -T0 -19 --rm "$dest/$base"
|
||||
zstd_args=(-T0 -19)
|
||||
if [[ "${keep_originals}" == false ]]; then
|
||||
zstd_args+=(--rm)
|
||||
fi
|
||||
zstd "${zstd_args[@]}" "$dest/$base"
|
||||
done
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
- uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: ${{ matrix.target }}
|
||||
# Upload the per-binary .zst files as well as the new .tar.gz
|
||||
@@ -165,8 +295,19 @@ jobs:
|
||||
path: |
|
||||
codex-rs/dist/${{ matrix.target }}/*
|
||||
|
||||
shell-tool-mcp:
|
||||
name: shell-tool-mcp
|
||||
needs: tag-check
|
||||
uses: ./.github/workflows/shell-tool-mcp.yml
|
||||
with:
|
||||
release-tag: ${{ github.ref_name }}
|
||||
publish: true
|
||||
secrets: inherit
|
||||
|
||||
release:
|
||||
needs: build
|
||||
needs:
|
||||
- build
|
||||
- shell-tool-mcp
|
||||
name: release
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
@@ -180,15 +321,43 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
- name: Generate release notes from tag commit message
|
||||
id: release_notes
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# On tag pushes, GITHUB_SHA may be a tag object for annotated tags;
|
||||
# peel it to the underlying commit.
|
||||
commit="$(git rev-parse "${GITHUB_SHA}^{commit}")"
|
||||
notes_path="${RUNNER_TEMP}/release-notes.md"
|
||||
|
||||
# Use the commit message for the commit the tag points at (not the
|
||||
# annotated tag message).
|
||||
git log -1 --format=%B "${commit}" > "${notes_path}"
|
||||
# Ensure trailing newline so GitHub's markdown renderer doesn't
|
||||
# occasionally run the last line into subsequent content.
|
||||
echo >> "${notes_path}"
|
||||
|
||||
echo "path=${notes_path}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
- uses: actions/download-artifact@v7
|
||||
with:
|
||||
path: dist
|
||||
|
||||
- name: List
|
||||
run: ls -R dist/
|
||||
|
||||
# This is a temporary fix: we should modify shell-tool-mcp.yml so these
|
||||
# files do not end up in dist/ in the first place.
|
||||
- name: Delete entries from dist/ that should not go in the release
|
||||
run: |
|
||||
rm -rf dist/shell-tool-mcp*
|
||||
|
||||
ls -R dist/
|
||||
|
||||
- name: Define release name
|
||||
id: release_name
|
||||
run: |
|
||||
@@ -216,37 +385,37 @@ jobs:
|
||||
echo "npm_tag=" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
# build_npm_package.py requires DotSlash when staging releases.
|
||||
- uses: facebook/install-dotslash@v2
|
||||
- name: Stage codex CLI npm package
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
TMP_DIR="${RUNNER_TEMP}/npm-stage"
|
||||
./codex-cli/scripts/build_npm_package.py \
|
||||
--package codex \
|
||||
--release-version "${{ steps.release_name.outputs.name }}" \
|
||||
--staging-dir "${TMP_DIR}" \
|
||||
--pack-output "${GITHUB_WORKSPACE}/dist/npm/codex-npm-${{ steps.release_name.outputs.name }}.tgz"
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Stage responses API proxy npm package
|
||||
- name: Setup Node.js for npm packaging
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 22
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
# stage_npm_packages.py requires DotSlash when staging releases.
|
||||
- uses: facebook/install-dotslash@v2
|
||||
- name: Stage npm packages
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
TMP_DIR="${RUNNER_TEMP}/npm-stage-responses"
|
||||
./codex-cli/scripts/build_npm_package.py \
|
||||
--package codex-responses-api-proxy \
|
||||
./scripts/stage_npm_packages.py \
|
||||
--release-version "${{ steps.release_name.outputs.name }}" \
|
||||
--staging-dir "${TMP_DIR}" \
|
||||
--pack-output "${GITHUB_WORKSPACE}/dist/npm/codex-responses-api-proxy-npm-${{ steps.release_name.outputs.name }}.tgz"
|
||||
--package codex \
|
||||
--package codex-responses-api-proxy \
|
||||
--package codex-sdk
|
||||
|
||||
- name: Create GitHub Release
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
name: ${{ steps.release_name.outputs.name }}
|
||||
tag_name: ${{ github.ref_name }}
|
||||
body_path: ${{ steps.release_notes.outputs.path }}
|
||||
files: dist/**
|
||||
# Mark as prerelease only when the version has a suffix after x.y.z
|
||||
# (e.g. -alpha, -beta). Otherwise publish a normal release.
|
||||
@@ -274,7 +443,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 22
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
@@ -300,6 +469,10 @@ jobs:
|
||||
--repo "${GITHUB_REPOSITORY}" \
|
||||
--pattern "codex-responses-api-proxy-npm-${version}.tgz" \
|
||||
--dir dist/npm
|
||||
gh release download "$tag" \
|
||||
--repo "${GITHUB_REPOSITORY}" \
|
||||
--pattern "codex-sdk-npm-${version}.tgz" \
|
||||
--dir dist/npm
|
||||
|
||||
# No NODE_AUTH_TOKEN needed because we use OIDC.
|
||||
- name: Publish to npm
|
||||
@@ -316,6 +489,7 @@ jobs:
|
||||
tarballs=(
|
||||
"codex-npm-${VERSION}.tgz"
|
||||
"codex-responses-api-proxy-npm-${VERSION}.tgz"
|
||||
"codex-sdk-npm-${VERSION}.tgz"
|
||||
)
|
||||
|
||||
for tarball in "${tarballs[@]}"; do
|
||||
|
||||
6
.github/workflows/sdk.yml
vendored
6
.github/workflows/sdk.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
@@ -19,12 +19,12 @@ jobs:
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 22
|
||||
cache: pnpm
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
|
||||
- name: build codex
|
||||
run: cargo build --bin codex
|
||||
|
||||
48
.github/workflows/shell-tool-mcp-ci.yml
vendored
Normal file
48
.github/workflows/shell-tool-mcp-ci.yml
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
name: shell-tool-mcp CI
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- "shell-tool-mcp/**"
|
||||
- ".github/workflows/shell-tool-mcp-ci.yml"
|
||||
- "pnpm-lock.yaml"
|
||||
- "pnpm-workspace.yaml"
|
||||
pull_request:
|
||||
paths:
|
||||
- "shell-tool-mcp/**"
|
||||
- ".github/workflows/shell-tool-mcp-ci.yml"
|
||||
- "pnpm-lock.yaml"
|
||||
- "pnpm-workspace.yaml"
|
||||
|
||||
env:
|
||||
NODE_VERSION: 22
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: "pnpm"
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Format check
|
||||
run: pnpm --filter @openai/codex-shell-tool-mcp run format
|
||||
|
||||
- name: Run tests
|
||||
run: pnpm --filter @openai/codex-shell-tool-mcp test
|
||||
|
||||
- name: Build
|
||||
run: pnpm --filter @openai/codex-shell-tool-mcp run build
|
||||
405
.github/workflows/shell-tool-mcp.yml
vendored
Normal file
405
.github/workflows/shell-tool-mcp.yml
vendored
Normal file
@@ -0,0 +1,405 @@
|
||||
name: shell-tool-mcp
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
release-version:
|
||||
description: Version to publish (x.y.z or x.y.z-alpha.N). Defaults to GITHUB_REF_NAME when it starts with rust-v.
|
||||
required: false
|
||||
type: string
|
||||
release-tag:
|
||||
description: Tag name to use when downloading release artifacts (defaults to rust-v<version>).
|
||||
required: false
|
||||
type: string
|
||||
publish:
|
||||
description: Whether to publish to npm when the version is releasable.
|
||||
required: false
|
||||
default: true
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
NODE_VERSION: 22
|
||||
|
||||
jobs:
|
||||
metadata:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
version: ${{ steps.compute.outputs.version }}
|
||||
release_tag: ${{ steps.compute.outputs.release_tag }}
|
||||
should_publish: ${{ steps.compute.outputs.should_publish }}
|
||||
npm_tag: ${{ steps.compute.outputs.npm_tag }}
|
||||
steps:
|
||||
- name: Compute version and tags
|
||||
id: compute
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
version="${{ inputs.release-version }}"
|
||||
release_tag="${{ inputs.release-tag }}"
|
||||
|
||||
if [[ -z "$version" ]]; then
|
||||
if [[ -n "$release_tag" && "$release_tag" =~ ^rust-v.+ ]]; then
|
||||
version="${release_tag#rust-v}"
|
||||
elif [[ "${GITHUB_REF_NAME:-}" =~ ^rust-v.+ ]]; then
|
||||
version="${GITHUB_REF_NAME#rust-v}"
|
||||
release_tag="${GITHUB_REF_NAME}"
|
||||
else
|
||||
echo "release-version is required when GITHUB_REF_NAME is not a rust-v tag."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -z "$release_tag" ]]; then
|
||||
release_tag="rust-v${version}"
|
||||
fi
|
||||
|
||||
npm_tag=""
|
||||
should_publish="false"
|
||||
if [[ "$version" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
should_publish="true"
|
||||
elif [[ "$version" =~ ^[0-9]+\.[0-9]+\.[0-9]+-alpha\.[0-9]+$ ]]; then
|
||||
should_publish="true"
|
||||
npm_tag="alpha"
|
||||
fi
|
||||
|
||||
echo "version=${version}" >> "$GITHUB_OUTPUT"
|
||||
echo "release_tag=${release_tag}" >> "$GITHUB_OUTPUT"
|
||||
echo "npm_tag=${npm_tag}" >> "$GITHUB_OUTPUT"
|
||||
echo "should_publish=${should_publish}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
rust-binaries:
|
||||
name: Build Rust - ${{ matrix.target }}
|
||||
needs: metadata
|
||||
runs-on: ${{ matrix.runner }}
|
||||
timeout-minutes: 30
|
||||
defaults:
|
||||
run:
|
||||
working-directory: codex-rs
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- runner: macos-15-xlarge
|
||||
target: aarch64-apple-darwin
|
||||
- runner: macos-15-xlarge
|
||||
target: x86_64-apple-darwin
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
install_musl: true
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
install_musl: true
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
- if: ${{ matrix.install_musl }}
|
||||
name: Install musl build dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y musl-tools pkg-config
|
||||
|
||||
- name: Build exec server binaries
|
||||
run: cargo build --release --target ${{ matrix.target }} --bin codex-exec-mcp-server --bin codex-execve-wrapper
|
||||
|
||||
- name: Stage exec server binaries
|
||||
run: |
|
||||
dest="${GITHUB_WORKSPACE}/artifacts/vendor/${{ matrix.target }}"
|
||||
mkdir -p "$dest"
|
||||
cp "target/${{ matrix.target }}/release/codex-exec-mcp-server" "$dest/"
|
||||
cp "target/${{ matrix.target }}/release/codex-execve-wrapper" "$dest/"
|
||||
|
||||
- uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: shell-tool-mcp-rust-${{ matrix.target }}
|
||||
path: artifacts/**
|
||||
if-no-files-found: error
|
||||
|
||||
bash-linux:
|
||||
name: Build Bash (Linux) - ${{ matrix.variant }} - ${{ matrix.target }}
|
||||
needs: metadata
|
||||
runs-on: ${{ matrix.runner }}
|
||||
timeout-minutes: 30
|
||||
container:
|
||||
image: ${{ matrix.image }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
variant: ubuntu-24.04
|
||||
image: ubuntu:24.04
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
variant: ubuntu-22.04
|
||||
image: ubuntu:22.04
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
variant: debian-12
|
||||
image: debian:12
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
variant: debian-11
|
||||
image: debian:11
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
variant: centos-9
|
||||
image: quay.io/centos/centos:stream9
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
variant: ubuntu-24.04
|
||||
image: arm64v8/ubuntu:24.04
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
variant: ubuntu-22.04
|
||||
image: arm64v8/ubuntu:22.04
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
variant: ubuntu-20.04
|
||||
image: arm64v8/ubuntu:20.04
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
variant: debian-12
|
||||
image: arm64v8/debian:12
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
variant: debian-11
|
||||
image: arm64v8/debian:11
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
variant: centos-9
|
||||
image: quay.io/centos/centos:stream9
|
||||
steps:
|
||||
- name: Install build prerequisites
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if command -v apt-get >/dev/null 2>&1; then
|
||||
apt-get update
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -y git build-essential bison autoconf gettext
|
||||
elif command -v dnf >/dev/null 2>&1; then
|
||||
dnf install -y git gcc gcc-c++ make bison autoconf gettext
|
||||
elif command -v yum >/dev/null 2>&1; then
|
||||
yum install -y git gcc gcc-c++ make bison autoconf gettext
|
||||
else
|
||||
echo "Unsupported package manager in container"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Build patched Bash
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
git clone --depth 1 https://github.com/bminor/bash /tmp/bash
|
||||
cd /tmp/bash
|
||||
git fetch --depth 1 origin a8a1c2fac029404d3f42cd39f5a20f24b6e4fe4b
|
||||
git checkout a8a1c2fac029404d3f42cd39f5a20f24b6e4fe4b
|
||||
git apply "${GITHUB_WORKSPACE}/shell-tool-mcp/patches/bash-exec-wrapper.patch"
|
||||
./configure --without-bash-malloc
|
||||
cores="$(command -v nproc >/dev/null 2>&1 && nproc || getconf _NPROCESSORS_ONLN)"
|
||||
make -j"${cores}"
|
||||
|
||||
dest="${GITHUB_WORKSPACE}/artifacts/vendor/${{ matrix.target }}/bash/${{ matrix.variant }}"
|
||||
mkdir -p "$dest"
|
||||
cp bash "$dest/bash"
|
||||
|
||||
- uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: shell-tool-mcp-bash-${{ matrix.target }}-${{ matrix.variant }}
|
||||
path: artifacts/**
|
||||
if-no-files-found: error
|
||||
|
||||
bash-darwin:
|
||||
name: Build Bash (macOS) - ${{ matrix.variant }} - ${{ matrix.target }}
|
||||
needs: metadata
|
||||
runs-on: ${{ matrix.runner }}
|
||||
timeout-minutes: 30
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- runner: macos-15-xlarge
|
||||
target: aarch64-apple-darwin
|
||||
variant: macos-15
|
||||
- runner: macos-14
|
||||
target: aarch64-apple-darwin
|
||||
variant: macos-14
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Build patched Bash
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
git clone --depth 1 https://github.com/bminor/bash /tmp/bash
|
||||
cd /tmp/bash
|
||||
git fetch --depth 1 origin a8a1c2fac029404d3f42cd39f5a20f24b6e4fe4b
|
||||
git checkout a8a1c2fac029404d3f42cd39f5a20f24b6e4fe4b
|
||||
git apply "${GITHUB_WORKSPACE}/shell-tool-mcp/patches/bash-exec-wrapper.patch"
|
||||
./configure --without-bash-malloc
|
||||
cores="$(getconf _NPROCESSORS_ONLN)"
|
||||
make -j"${cores}"
|
||||
|
||||
dest="${GITHUB_WORKSPACE}/artifacts/vendor/${{ matrix.target }}/bash/${{ matrix.variant }}"
|
||||
mkdir -p "$dest"
|
||||
cp bash "$dest/bash"
|
||||
|
||||
- uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: shell-tool-mcp-bash-${{ matrix.target }}-${{ matrix.variant }}
|
||||
path: artifacts/**
|
||||
if-no-files-found: error
|
||||
|
||||
package:
|
||||
name: Package npm module
|
||||
needs:
|
||||
- metadata
|
||||
- rust-binaries
|
||||
- bash-linux
|
||||
- bash-darwin
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
PACKAGE_VERSION: ${{ needs.metadata.outputs.version }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10.8.1
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
|
||||
- name: Install JavaScript dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build (shell-tool-mcp)
|
||||
run: pnpm --filter @openai/codex-shell-tool-mcp run build
|
||||
|
||||
- name: Download build artifacts
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
path: artifacts
|
||||
|
||||
- name: Assemble staging directory
|
||||
id: staging
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
staging="${STAGING_DIR}"
|
||||
mkdir -p "$staging" "$staging/vendor"
|
||||
cp shell-tool-mcp/README.md "$staging/"
|
||||
cp shell-tool-mcp/package.json "$staging/"
|
||||
cp -R shell-tool-mcp/bin "$staging/"
|
||||
|
||||
found_vendor="false"
|
||||
shopt -s nullglob
|
||||
for vendor_dir in artifacts/*/vendor; do
|
||||
rsync -av "$vendor_dir/" "$staging/vendor/"
|
||||
found_vendor="true"
|
||||
done
|
||||
if [[ "$found_vendor" == "false" ]]; then
|
||||
echo "No vendor payloads were downloaded."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
node - <<'NODE'
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
|
||||
const stagingDir = process.env.STAGING_DIR;
|
||||
const version = process.env.PACKAGE_VERSION;
|
||||
const pkgPath = path.join(stagingDir, "package.json");
|
||||
const pkg = JSON.parse(fs.readFileSync(pkgPath, "utf8"));
|
||||
pkg.version = version;
|
||||
fs.writeFileSync(pkgPath, JSON.stringify(pkg, null, 2) + "\n");
|
||||
NODE
|
||||
|
||||
echo "dir=$staging" >> "$GITHUB_OUTPUT"
|
||||
env:
|
||||
STAGING_DIR: ${{ runner.temp }}/shell-tool-mcp
|
||||
|
||||
- name: Ensure binaries are executable
|
||||
run: |
|
||||
set -euo pipefail
|
||||
staging="${{ steps.staging.outputs.dir }}"
|
||||
chmod +x \
|
||||
"$staging"/vendor/*/codex-exec-mcp-server \
|
||||
"$staging"/vendor/*/codex-execve-wrapper \
|
||||
"$staging"/vendor/*/bash/*/bash
|
||||
|
||||
- name: Create npm tarball
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p dist/npm
|
||||
staging="${{ steps.staging.outputs.dir }}"
|
||||
pack_info=$(cd "$staging" && npm pack --ignore-scripts --json --pack-destination "${GITHUB_WORKSPACE}/dist/npm")
|
||||
filename=$(PACK_INFO="$pack_info" node -e 'const data = JSON.parse(process.env.PACK_INFO); console.log(data[0].filename);')
|
||||
mv "dist/npm/${filename}" "dist/npm/codex-shell-tool-mcp-npm-${PACKAGE_VERSION}.tgz"
|
||||
|
||||
- uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: codex-shell-tool-mcp-npm
|
||||
path: dist/npm/codex-shell-tool-mcp-npm-${{ env.PACKAGE_VERSION }}.tgz
|
||||
if-no-files-found: error
|
||||
|
||||
publish:
|
||||
name: Publish npm package
|
||||
needs:
|
||||
- metadata
|
||||
- package
|
||||
if: ${{ inputs.publish && needs.metadata.outputs.should_publish == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: read
|
||||
steps:
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10.8.1
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
registry-url: https://registry.npmjs.org
|
||||
scope: "@openai"
|
||||
|
||||
- name: Update npm
|
||||
run: npm install -g npm@latest
|
||||
|
||||
- name: Download npm tarball
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
name: codex-shell-tool-mcp-npm
|
||||
path: dist/npm
|
||||
|
||||
- name: Publish to npm
|
||||
env:
|
||||
NPM_TAG: ${{ needs.metadata.outputs.npm_tag }}
|
||||
VERSION: ${{ needs.metadata.outputs.version }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
tag_args=()
|
||||
if [[ -n "${NPM_TAG}" ]]; then
|
||||
tag_args+=(--tag "${NPM_TAG}")
|
||||
fi
|
||||
npm publish "dist/npm/codex-shell-tool-mcp-npm-${VERSION}.tgz" "${tag_args[@]}"
|
||||
10
.gitignore
vendored
10
.gitignore
vendored
@@ -9,6 +9,7 @@ node_modules
|
||||
|
||||
# build
|
||||
dist/
|
||||
bazel-*
|
||||
build/
|
||||
out/
|
||||
storybook-static/
|
||||
@@ -30,6 +31,7 @@ result
|
||||
# cli tools
|
||||
CLAUDE.md
|
||||
.claude/
|
||||
AGENTS.override.md
|
||||
|
||||
# caches
|
||||
.cache/
|
||||
@@ -63,6 +65,9 @@ apply_patch/
|
||||
# coverage
|
||||
coverage/
|
||||
|
||||
# personal files
|
||||
personal/
|
||||
|
||||
# os
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
@@ -81,3 +86,8 @@ CHANGELOG.ignore.md
|
||||
# nix related
|
||||
.direnv
|
||||
.envrc
|
||||
|
||||
# Python bytecode files
|
||||
__pycache__/
|
||||
*.pyc
|
||||
|
||||
|
||||
6
.markdownlint-cli2.yaml
Normal file
6
.markdownlint-cli2.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
config:
|
||||
MD013:
|
||||
line_length: 100
|
||||
|
||||
globs:
|
||||
- "docs/tui-chat-composer.md"
|
||||
1
.vscode/settings.json
vendored
1
.vscode/settings.json
vendored
@@ -3,6 +3,7 @@
|
||||
"rust-analyzer.check.command": "clippy",
|
||||
"rust-analyzer.check.extraArgs": ["--all-features", "--tests"],
|
||||
"rust-analyzer.rustfmt.extraArgs": ["--config", "imports_granularity=Item"],
|
||||
"rust-analyzer.cargo.targetDir": "${workspaceFolder}/codex-rs/target/rust-analyzer",
|
||||
"[rust]": {
|
||||
"editor.defaultFormatter": "rust-lang.rust-analyzer",
|
||||
"editor.formatOnSave": true,
|
||||
|
||||
47
AGENTS.md
47
AGENTS.md
@@ -8,11 +8,18 @@ In the codex-rs folder where the rust code lives:
|
||||
- Never add or modify any code related to `CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR` or `CODEX_SANDBOX_ENV_VAR`.
|
||||
- You operate in a sandbox where `CODEX_SANDBOX_NETWORK_DISABLED=1` will be set whenever you use the `shell` tool. Any existing code that uses `CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR` was authored with this fact in mind. It is often used to early exit out of tests that the author knew you would not be able to run given your sandbox limitations.
|
||||
- Similarly, when you spawn a process using Seatbelt (`/usr/bin/sandbox-exec`), `CODEX_SANDBOX=seatbelt` will be set on the child process. Integration tests that want to run Seatbelt themselves cannot be run under Seatbelt, so checks for `CODEX_SANDBOX=seatbelt` are also often used to early exit out of tests, as appropriate.
|
||||
- Always collapse if statements per https://rust-lang.github.io/rust-clippy/master/index.html#collapsible_if
|
||||
- Always inline format! args when possible per https://rust-lang.github.io/rust-clippy/master/index.html#uninlined_format_args
|
||||
- Use method references over closures when possible per https://rust-lang.github.io/rust-clippy/master/index.html#redundant_closure_for_method_calls
|
||||
- When writing tests, prefer comparing the equality of entire objects over fields one by one.
|
||||
- When making a change that adds or changes an API, ensure that the documentation in the `docs/` folder is up to date if applicable.
|
||||
- If you change `ConfigToml` or nested config types, run `just write-config-schema` to update `codex-rs/core/config.schema.json`.
|
||||
|
||||
Run `just fmt` (in `codex-rs` directory) automatically after making Rust code changes; do not ask for approval to run it. Before finalizing a change to `codex-rs`, run `just fix -p <project>` (in `codex-rs` directory) to fix any linter issues in the code. Prefer scoping with `-p` to avoid slow workspace‑wide Clippy builds; only run `just fix` without `-p` if you changed shared crates. Additionally, run the tests:
|
||||
|
||||
1. Run the test for the specific project that was changed. For example, if changes were made in `codex-rs/tui`, run `cargo test -p codex-tui`.
|
||||
2. Once those pass, if any changes were made in common, core, or protocol, run the complete test suite with `cargo test --all-features`.
|
||||
When running interactively, ask the user before running `just fix` to finalize. `just fmt` does not require approval. project-specific or individual tests can be run without asking the user, but do ask the user before running the complete test suite.
|
||||
When running interactively, ask the user before running `just fix` to finalize. `just fmt` does not require approval. project-specific or individual tests can be run without asking the user, but do ask the user before running the complete test suite.
|
||||
|
||||
## TUI style conventions
|
||||
|
||||
@@ -28,6 +35,7 @@ See `codex-rs/tui/styles.md`.
|
||||
- Desired: vec![" └ ".into(), "M".red(), " ".dim(), "tui/src/app.rs".dim()]
|
||||
|
||||
### TUI Styling (ratatui)
|
||||
|
||||
- Prefer Stylize helpers: use "text".dim(), .bold(), .cyan(), .italic(), .underlined() instead of manual Style where possible.
|
||||
- Prefer simple conversions: use "text".into() for spans and vec![…].into() for lines; when inference is ambiguous (e.g., Paragraph::new/Cell::from), use Line::from(spans) or Span::from(text).
|
||||
- Computed styles: if the Style is computed at runtime, using `Span::styled` is OK (`Span::from(text).set_style(style)` is also acceptable).
|
||||
@@ -39,6 +47,7 @@ See `codex-rs/tui/styles.md`.
|
||||
- Compactness: prefer the form that stays on one line after rustfmt; if only one of Line::from(vec![…]) or vec![…].into() avoids wrapping, choose that. If both wrap, pick the one with fewer wrapped lines.
|
||||
|
||||
### Text wrapping
|
||||
|
||||
- Always use textwrap::wrap to wrap plain strings.
|
||||
- If you have a ratatui Line and you want to wrap it, use the helpers in tui/src/wrapping.rs, e.g. word_wrap_lines / word_wrap_line.
|
||||
- If you need to indent wrapped lines, use the initial_indent / subsequent_indent options from RtOptions if you can, rather than writing custom logic.
|
||||
@@ -60,8 +69,44 @@ This repo uses snapshot tests (via `insta`), especially in `codex-rs/tui`, to va
|
||||
- `cargo insta accept -p codex-tui`
|
||||
|
||||
If you don’t have the tool:
|
||||
|
||||
- `cargo install cargo-insta`
|
||||
|
||||
### Test assertions
|
||||
|
||||
- Tests should use pretty_assertions::assert_eq for clearer diffs. Import this at the top of the test module if it isn't already.
|
||||
- Prefer deep equals comparisons whenever possible. Perform `assert_eq!()` on entire objects, rather than individual fields.
|
||||
- Avoid mutating process environment in tests; prefer passing environment-derived flags or dependencies from above.
|
||||
|
||||
### Spawning workspace binaries in tests (Cargo vs Bazel)
|
||||
|
||||
- Prefer `codex_utils_cargo_bin::cargo_bin("...")` over `assert_cmd::Command::cargo_bin(...)` or `escargot` when tests need to spawn first-party binaries.
|
||||
- Under Bazel, binaries and resources may live under runfiles; use `codex_utils_cargo_bin::cargo_bin` to resolve absolute paths that remain stable after `chdir`.
|
||||
- When locating fixture files or test resources under Bazel, avoid `env!("CARGO_MANIFEST_DIR")`. Prefer `codex_utils_cargo_bin::find_resource!` so paths resolve correctly under both Cargo and Bazel runfiles.
|
||||
|
||||
### Integration tests (core)
|
||||
|
||||
- Prefer the utilities in `core_test_support::responses` when writing end-to-end Codex tests.
|
||||
|
||||
- All `mount_sse*` helpers return a `ResponseMock`; hold onto it so you can assert against outbound `/responses` POST bodies.
|
||||
- Use `ResponseMock::single_request()` when a test should only issue one POST, or `ResponseMock::requests()` to inspect every captured `ResponsesRequest`.
|
||||
- `ResponsesRequest` exposes helpers (`body_json`, `input`, `function_call_output`, `custom_tool_call_output`, `call_output`, `header`, `path`, `query_param`) so assertions can target structured payloads instead of manual JSON digging.
|
||||
- Build SSE payloads with the provided `ev_*` constructors and the `sse(...)`.
|
||||
- Prefer `wait_for_event` over `wait_for_event_with_timeout`.
|
||||
- Prefer `mount_sse_once` over `mount_sse_once_match` or `mount_sse_sequence`
|
||||
|
||||
- Typical pattern:
|
||||
|
||||
```rust
|
||||
let mock = responses::mount_sse_once(&server, responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_function_call(call_id, "shell", &serde_json::to_string(&args)?),
|
||||
responses::ev_completed("resp-1"),
|
||||
])).await;
|
||||
|
||||
codex.submit(Op::UserTurn { ... }).await?;
|
||||
|
||||
// Assert request body if needed.
|
||||
let request = mock.single_request();
|
||||
// assert using request.function_call_output(call_id) or request.json_body() or other helpers.
|
||||
```
|
||||
|
||||
19
BUILD.bazel
Normal file
19
BUILD.bazel
Normal file
@@ -0,0 +1,19 @@
|
||||
# We mark the local platform as glibc-compatible so that rust can grab a toolchain for us.
|
||||
# TODO(zbarsky): Upstream a better libc constraint into rules_rust.
|
||||
# We only enable this on linux though for sanity, and because it breaks remote execution.
|
||||
platform(
|
||||
name = "local",
|
||||
constraint_values = [
|
||||
"@toolchains_llvm_bootstrapped//constraints/libc:gnu.2.28",
|
||||
],
|
||||
parents = [
|
||||
"@platforms//host",
|
||||
],
|
||||
)
|
||||
|
||||
alias(
|
||||
name = "rbe",
|
||||
actual = "@rbe_platform",
|
||||
)
|
||||
|
||||
exports_files(["AGENTS.md"])
|
||||
@@ -1 +1 @@
|
||||
The changelog can be found on the [releases page](https://github.com/openai/codex/releases)
|
||||
The changelog can be found on the [releases page](https://github.com/openai/codex/releases).
|
||||
|
||||
128
MODULE.bazel
Normal file
128
MODULE.bazel
Normal file
@@ -0,0 +1,128 @@
|
||||
bazel_dep(name = "platforms", version = "1.0.0")
|
||||
bazel_dep(name = "toolchains_llvm_bootstrapped", version = "0.3.1")
|
||||
archive_override(
|
||||
module_name = "toolchains_llvm_bootstrapped",
|
||||
integrity = "sha256-9ks21bgEqbQWmwUIvqeLA64+Jk6o4ZVjC8KxjVa2Vw8=",
|
||||
strip_prefix = "toolchains_llvm_bootstrapped-e3775e66a7b6d287c705ca0cd24497ef4a77c503",
|
||||
urls = ["https://github.com/cerisier/toolchains_llvm_bootstrapped/archive/e3775e66a7b6d287c705ca0cd24497ef4a77c503/master.tar.gz"],
|
||||
patch_strip = 1,
|
||||
patches = [
|
||||
"//patches:llvm_toolchain_archive_params.patch",
|
||||
],
|
||||
)
|
||||
|
||||
osx = use_extension("@toolchains_llvm_bootstrapped//toolchain/extension:osx.bzl", "osx")
|
||||
osx.framework(name = "ApplicationServices")
|
||||
osx.framework(name = "AppKit")
|
||||
osx.framework(name = "ColorSync")
|
||||
osx.framework(name = "CoreFoundation")
|
||||
osx.framework(name = "CoreGraphics")
|
||||
osx.framework(name = "CoreServices")
|
||||
osx.framework(name = "CoreText")
|
||||
osx.framework(name = "CFNetwork")
|
||||
osx.framework(name = "Foundation")
|
||||
osx.framework(name = "ImageIO")
|
||||
osx.framework(name = "Kernel")
|
||||
osx.framework(name = "OSLog")
|
||||
osx.framework(name = "Security")
|
||||
osx.framework(name = "SystemConfiguration")
|
||||
|
||||
register_toolchains(
|
||||
"@toolchains_llvm_bootstrapped//toolchain:all",
|
||||
)
|
||||
|
||||
bazel_dep(name = "rules_cc", version = "0.2.16")
|
||||
bazel_dep(name = "rules_platform", version = "0.1.0")
|
||||
bazel_dep(name = "rules_rust", version = "0.68.1")
|
||||
single_version_override(
|
||||
module_name = "rules_rust",
|
||||
patch_strip = 1,
|
||||
patches = [
|
||||
"//patches:rules_rust.patch",
|
||||
"//patches:rules_rust_windows_gnu.patch",
|
||||
"//patches:rules_rust_musl.patch",
|
||||
],
|
||||
)
|
||||
|
||||
RUST_TRIPLES = [
|
||||
"aarch64-unknown-linux-musl",
|
||||
"aarch64-apple-darwin",
|
||||
"aarch64-pc-windows-gnullvm",
|
||||
"x86_64-unknown-linux-musl",
|
||||
"x86_64-apple-darwin",
|
||||
"x86_64-pc-windows-gnullvm",
|
||||
]
|
||||
|
||||
rust = use_extension("@rules_rust//rust:extensions.bzl", "rust")
|
||||
rust.toolchain(
|
||||
edition = "2024",
|
||||
extra_target_triples = RUST_TRIPLES,
|
||||
versions = ["1.90.0"],
|
||||
)
|
||||
use_repo(rust, "rust_toolchains")
|
||||
|
||||
register_toolchains("@rust_toolchains//:all")
|
||||
|
||||
bazel_dep(name = "rules_rs", version = "0.0.23")
|
||||
|
||||
crate = use_extension("@rules_rs//rs:extensions.bzl", "crate")
|
||||
crate.from_cargo(
|
||||
cargo_lock = "//codex-rs:Cargo.lock",
|
||||
cargo_toml = "//codex-rs:Cargo.toml",
|
||||
platform_triples = RUST_TRIPLES,
|
||||
)
|
||||
|
||||
bazel_dep(name = "openssl", version = "3.5.4.bcr.0")
|
||||
|
||||
crate.annotation(
|
||||
build_script_data = [
|
||||
"@openssl//:gen_dir",
|
||||
],
|
||||
build_script_env = {
|
||||
"OPENSSL_DIR": "$(execpath @openssl//:gen_dir)",
|
||||
"OPENSSL_NO_VENDOR": "1",
|
||||
"OPENSSL_STATIC": "1",
|
||||
},
|
||||
crate = "openssl-sys",
|
||||
data = ["@openssl//:gen_dir"],
|
||||
)
|
||||
|
||||
inject_repo(crate, "openssl")
|
||||
|
||||
# Fix readme inclusions
|
||||
crate.annotation(
|
||||
crate = "windows-link",
|
||||
patch_args = ["-p1"],
|
||||
patches = [
|
||||
"//patches:windows-link.patch"
|
||||
],
|
||||
)
|
||||
|
||||
WINDOWS_IMPORT_LIB = """
|
||||
load("@rules_cc//cc:defs.bzl", "cc_import")
|
||||
|
||||
cc_import(
|
||||
name = "windows_import_lib",
|
||||
static_library = glob(["lib/*.a"])[0],
|
||||
)
|
||||
"""
|
||||
|
||||
crate.annotation(
|
||||
additive_build_file_content = WINDOWS_IMPORT_LIB,
|
||||
crate = "windows_x86_64_gnullvm",
|
||||
gen_build_script = "off",
|
||||
deps = [":windows_import_lib"],
|
||||
)
|
||||
crate.annotation(
|
||||
additive_build_file_content = WINDOWS_IMPORT_LIB,
|
||||
crate = "windows_aarch64_gnullvm",
|
||||
gen_build_script = "off",
|
||||
deps = [":windows_import_lib"],
|
||||
)
|
||||
use_repo(crate, "crates")
|
||||
|
||||
rbe_platform_repository = use_repo_rule("//:rbe.bzl", "rbe_platform_repository")
|
||||
|
||||
rbe_platform_repository(
|
||||
name = "rbe_platform",
|
||||
)
|
||||
1101
MODULE.bazel.lock
generated
Normal file
1101
MODULE.bazel.lock
generated
Normal file
File diff suppressed because one or more lines are too long
72
README.md
72
README.md
@@ -1,14 +1,11 @@
|
||||
|
||||
<p align="center"><code>npm i -g @openai/codex</code><br />or <code>brew install codex</code></p>
|
||||
|
||||
<p align="center"><code>npm i -g @openai/codex</code><br />or <code>brew install --cask codex</code></p>
|
||||
<p align="center"><strong>Codex CLI</strong> is a coding agent from OpenAI that runs locally on your computer.
|
||||
</br>
|
||||
</br>If you want Codex in your code editor (VS Code, Cursor, Windsurf), <a href="https://developers.openai.com/codex/ide">install in your IDE</a>
|
||||
</br>If you are looking for the <em>cloud-based agent</em> from OpenAI, <strong>Codex Web</strong>, go to <a href="https://chatgpt.com/codex">chatgpt.com/codex</a></p>
|
||||
|
||||
<p align="center">
|
||||
<img src="./.github/codex-cli-splash.png" alt="Codex CLI splash" width="80%" />
|
||||
</p>
|
||||
</p>
|
||||
</br>
|
||||
If you want Codex in your code editor (VS Code, Cursor, Windsurf), <a href="https://developers.openai.com/codex/ide">install in your IDE.</a>
|
||||
</br>If you are looking for the <em>cloud-based agent</em> from OpenAI, <strong>Codex Web</strong>, go to <a href="https://chatgpt.com/codex">chatgpt.com/codex</a>.</p>
|
||||
|
||||
---
|
||||
|
||||
@@ -16,23 +13,19 @@
|
||||
|
||||
### Installing and running Codex CLI
|
||||
|
||||
Install globally with your preferred package manager. If you use npm:
|
||||
Install globally with your preferred package manager:
|
||||
|
||||
```shell
|
||||
# Install using npm
|
||||
npm install -g @openai/codex
|
||||
```
|
||||
|
||||
Alternatively, if you use Homebrew:
|
||||
|
||||
```shell
|
||||
brew install codex
|
||||
# Install using Homebrew
|
||||
brew install --cask codex
|
||||
```
|
||||
|
||||
Then simply run `codex` to get started:
|
||||
|
||||
```shell
|
||||
codex
|
||||
```
|
||||
Then simply run `codex` to get started.
|
||||
|
||||
<details>
|
||||
<summary>You can also go to the <a href="https://github.com/openai/codex/releases/latest">latest GitHub Release</a> and download the appropriate binary for your platform.</summary>
|
||||
@@ -52,52 +45,15 @@ Each archive contains a single entry with the platform baked into the name (e.g.
|
||||
|
||||
### Using Codex with your ChatGPT plan
|
||||
|
||||
<p align="center">
|
||||
<img src="./.github/codex-cli-login.png" alt="Codex CLI login" width="80%" />
|
||||
</p>
|
||||
|
||||
Run `codex` and select **Sign in with ChatGPT**. We recommend signing into your ChatGPT account to use Codex as part of your Plus, Pro, Team, Edu, or Enterprise plan. [Learn more about what's included in your ChatGPT plan](https://help.openai.com/en/articles/11369540-codex-in-chatgpt).
|
||||
|
||||
You can also use Codex with an API key, but this requires [additional setup](./docs/authentication.md#usage-based-billing-alternative-use-an-openai-api-key). If you previously used an API key for usage-based billing, see the [migration steps](./docs/authentication.md#migrating-from-usage-based-billing-api-key). If you're having trouble with login, please comment on [this issue](https://github.com/openai/codex/issues/1243).
|
||||
You can also use Codex with an API key, but this requires [additional setup](https://developers.openai.com/codex/auth#sign-in-with-an-api-key).
|
||||
|
||||
### Model Context Protocol (MCP)
|
||||
## Docs
|
||||
|
||||
Codex CLI supports [MCP servers](./docs/advanced.md#model-context-protocol-mcp). Enable by adding an `mcp_servers` section to your `~/.codex/config.toml`.
|
||||
|
||||
|
||||
### Configuration
|
||||
|
||||
Codex CLI supports a rich set of configuration options, with preferences stored in `~/.codex/config.toml`. For full configuration options, see [Configuration](./docs/config.md).
|
||||
|
||||
---
|
||||
|
||||
### Docs & FAQ
|
||||
|
||||
- [**Getting started**](./docs/getting-started.md)
|
||||
- [CLI usage](./docs/getting-started.md#cli-usage)
|
||||
- [Running with a prompt as input](./docs/getting-started.md#running-with-a-prompt-as-input)
|
||||
- [Example prompts](./docs/getting-started.md#example-prompts)
|
||||
- [Memory with AGENTS.md](./docs/getting-started.md#memory-with-agentsmd)
|
||||
- [Configuration](./docs/config.md)
|
||||
- [**Sandbox & approvals**](./docs/sandbox.md)
|
||||
- [**Authentication**](./docs/authentication.md)
|
||||
- [Auth methods](./docs/authentication.md#forcing-a-specific-auth-method-advanced)
|
||||
- [Login on a "Headless" machine](./docs/authentication.md#connecting-on-a-headless-machine)
|
||||
- [**Advanced**](./docs/advanced.md)
|
||||
- [Non-interactive / CI mode](./docs/advanced.md#non-interactive--ci-mode)
|
||||
- [Tracing / verbose logging](./docs/advanced.md#tracing--verbose-logging)
|
||||
- [Model Context Protocol (MCP)](./docs/advanced.md#model-context-protocol-mcp)
|
||||
- [**Zero data retention (ZDR)**](./docs/zdr.md)
|
||||
- [**Codex Documentation**](https://developers.openai.com/codex)
|
||||
- [**Contributing**](./docs/contributing.md)
|
||||
- [**Install & build**](./docs/install.md)
|
||||
- [System Requirements](./docs/install.md#system-requirements)
|
||||
- [DotSlash](./docs/install.md#dotslash)
|
||||
- [Build from source](./docs/install.md#build-from-source)
|
||||
- [**FAQ**](./docs/faq.md)
|
||||
- [**Installing & building**](./docs/install.md)
|
||||
- [**Open source fund**](./docs/open-source-fund.md)
|
||||
|
||||
---
|
||||
|
||||
## License
|
||||
|
||||
This repository is licensed under the [Apache-2.0 License](LICENSE).
|
||||
|
||||
17
announcement_tip.toml
Normal file
17
announcement_tip.toml
Normal file
@@ -0,0 +1,17 @@
|
||||
# Example announcement tips for Codex TUI.
|
||||
# Each [[announcements]] entry is evaluated in order; the last matching one is shown.
|
||||
# Dates are UTC, formatted as YYYY-MM-DD. The from_date is inclusive and the to_date is exclusive.
|
||||
# version_regex matches against the CLI version (env!("CARGO_PKG_VERSION")); omit to apply to all versions.
|
||||
# target_app specify which app should display the announcement (cli, vsce, ...).
|
||||
|
||||
[[announcements]]
|
||||
content = "Welcome to Codex! Check out the new onboarding flow."
|
||||
from_date = "2024-10-01"
|
||||
to_date = "2024-10-15"
|
||||
target_app = "cli"
|
||||
|
||||
# Test announcement only for local build version until 2026-01-10 excluded (past)
|
||||
[[announcements]]
|
||||
content = "This is a test announcement"
|
||||
version_regex = "^0\\.0\\.0$"
|
||||
to_date = "2026-01-10"
|
||||
@@ -4,7 +4,7 @@
|
||||
header = """
|
||||
# Changelog
|
||||
|
||||
You can install any of these versions: `npm install -g codex@version`
|
||||
You can install any of these versions: `npm install -g @openai/codex@<version>`
|
||||
"""
|
||||
|
||||
body = """
|
||||
|
||||
34
codex-cli/bin/codex.js
Executable file → Normal file
34
codex-cli/bin/codex.js
Executable file → Normal file
@@ -80,6 +80,31 @@ function getUpdatedPath(newDirs) {
|
||||
return updatedPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Use heuristics to detect the package manager that was used to install Codex
|
||||
* in order to give the user a hint about how to update it.
|
||||
*/
|
||||
function detectPackageManager() {
|
||||
const userAgent = process.env.npm_config_user_agent || "";
|
||||
if (/\bbun\//.test(userAgent)) {
|
||||
return "bun";
|
||||
}
|
||||
|
||||
const execPath = process.env.npm_execpath || "";
|
||||
if (execPath.includes("bun")) {
|
||||
return "bun";
|
||||
}
|
||||
|
||||
if (
|
||||
__dirname.includes(".bun/install/global") ||
|
||||
__dirname.includes(".bun\\install\\global")
|
||||
) {
|
||||
return "bun";
|
||||
}
|
||||
|
||||
return userAgent ? "npm" : null;
|
||||
}
|
||||
|
||||
const additionalDirs = [];
|
||||
const pathDir = path.join(archRoot, "path");
|
||||
if (existsSync(pathDir)) {
|
||||
@@ -87,9 +112,16 @@ if (existsSync(pathDir)) {
|
||||
}
|
||||
const updatedPath = getUpdatedPath(additionalDirs);
|
||||
|
||||
const env = { ...process.env, PATH: updatedPath };
|
||||
const packageManagerEnvVar =
|
||||
detectPackageManager() === "bun"
|
||||
? "CODEX_MANAGED_BY_BUN"
|
||||
: "CODEX_MANAGED_BY_NPM";
|
||||
env[packageManagerEnvVar] = "1";
|
||||
|
||||
const child = spawn(binaryPath, process.argv.slice(2), {
|
||||
stdio: "inherit",
|
||||
env: { ...process.env, PATH: updatedPath, CODEX_MANAGED_BY_NPM: "1" },
|
||||
env,
|
||||
});
|
||||
|
||||
child.on("error", (err) => {
|
||||
|
||||
@@ -1,11 +1,19 @@
|
||||
# npm releases
|
||||
|
||||
Run the following:
|
||||
|
||||
To build the 0.2.x or later version of the npm module, which runs the Rust version of the CLI, build it as follows:
|
||||
Use the staging helper in the repo root to generate npm tarballs for a release. For
|
||||
example, to stage the CLI, responses proxy, and SDK packages for version `0.6.0`:
|
||||
|
||||
```bash
|
||||
./codex-cli/scripts/build_npm_package.py --release-version 0.6.0
|
||||
./scripts/stage_npm_packages.py \
|
||||
--release-version 0.6.0 \
|
||||
--package codex \
|
||||
--package codex-responses-api-proxy \
|
||||
--package codex-sdk
|
||||
```
|
||||
|
||||
Note this will create `./codex-cli/vendor/` as a side-effect.
|
||||
This downloads the native artifacts once, hydrates `vendor/` for each package, and writes
|
||||
tarballs to `dist/npm/`.
|
||||
|
||||
If you need to invoke `build_npm_package.py` directly, run
|
||||
`codex-cli/scripts/install_native_deps.py` first and pass `--vendor-src` pointing to the
|
||||
directory that contains the populated `vendor/` tree.
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
@@ -14,19 +13,30 @@ SCRIPT_DIR = Path(__file__).resolve().parent
|
||||
CODEX_CLI_ROOT = SCRIPT_DIR.parent
|
||||
REPO_ROOT = CODEX_CLI_ROOT.parent
|
||||
RESPONSES_API_PROXY_NPM_ROOT = REPO_ROOT / "codex-rs" / "responses-api-proxy" / "npm"
|
||||
GITHUB_REPO = "openai/codex"
|
||||
CODEX_SDK_ROOT = REPO_ROOT / "sdk" / "typescript"
|
||||
|
||||
# The docs are not clear on what the expected value/format of
|
||||
# workflow/workflowName is:
|
||||
# https://cli.github.com/manual/gh_run_list
|
||||
WORKFLOW_NAME = ".github/workflows/rust-release.yml"
|
||||
PACKAGE_NATIVE_COMPONENTS: dict[str, list[str]] = {
|
||||
"codex": ["codex", "rg"],
|
||||
"codex-responses-api-proxy": ["codex-responses-api-proxy"],
|
||||
"codex-sdk": ["codex"],
|
||||
}
|
||||
WINDOWS_ONLY_COMPONENTS: dict[str, list[str]] = {
|
||||
"codex": ["codex-windows-sandbox-setup", "codex-command-runner"],
|
||||
}
|
||||
COMPONENT_DEST_DIR: dict[str, str] = {
|
||||
"codex": "codex",
|
||||
"codex-responses-api-proxy": "codex-responses-api-proxy",
|
||||
"codex-windows-sandbox-setup": "codex",
|
||||
"codex-command-runner": "codex",
|
||||
"rg": "path",
|
||||
}
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description="Build or stage the Codex CLI npm package.")
|
||||
parser.add_argument(
|
||||
"--package",
|
||||
choices=("codex", "codex-responses-api-proxy"),
|
||||
choices=("codex", "codex-responses-api-proxy", "codex-sdk"),
|
||||
default="codex",
|
||||
help="Which npm package to stage (default: codex).",
|
||||
)
|
||||
@@ -37,14 +47,9 @@ def parse_args() -> argparse.Namespace:
|
||||
parser.add_argument(
|
||||
"--release-version",
|
||||
help=(
|
||||
"Version to stage for npm release. When provided, the script also resolves the "
|
||||
"matching rust-release workflow unless --workflow-url is supplied."
|
||||
"Version to stage for npm release."
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--workflow-url",
|
||||
help="Optional GitHub Actions workflow run URL used to download native binaries.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--staging-dir",
|
||||
type=Path,
|
||||
@@ -64,6 +69,11 @@ def parse_args() -> argparse.Namespace:
|
||||
type=Path,
|
||||
help="Path where the generated npm tarball should be written.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--vendor-src",
|
||||
type=Path,
|
||||
help="Directory containing pre-installed native binaries to bundle (vendor root).",
|
||||
)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
@@ -86,29 +96,19 @@ def main() -> int:
|
||||
try:
|
||||
stage_sources(staging_dir, version, package)
|
||||
|
||||
workflow_url = args.workflow_url
|
||||
resolved_head_sha: str | None = None
|
||||
if not workflow_url:
|
||||
if release_version:
|
||||
workflow = resolve_release_workflow(version)
|
||||
workflow_url = workflow["url"]
|
||||
resolved_head_sha = workflow.get("headSha")
|
||||
else:
|
||||
workflow_url = resolve_latest_alpha_workflow_url()
|
||||
elif release_version:
|
||||
try:
|
||||
workflow = resolve_release_workflow(version)
|
||||
resolved_head_sha = workflow.get("headSha")
|
||||
except Exception:
|
||||
resolved_head_sha = None
|
||||
vendor_src = args.vendor_src.resolve() if args.vendor_src else None
|
||||
native_components = PACKAGE_NATIVE_COMPONENTS.get(package, [])
|
||||
|
||||
if release_version and resolved_head_sha:
|
||||
print(f"should `git checkout {resolved_head_sha}`")
|
||||
if native_components:
|
||||
if vendor_src is None:
|
||||
components_str = ", ".join(native_components)
|
||||
raise RuntimeError(
|
||||
"Native components "
|
||||
f"({components_str}) required for package '{package}'. Provide --vendor-src "
|
||||
"pointing to a directory containing pre-installed binaries."
|
||||
)
|
||||
|
||||
if not workflow_url:
|
||||
raise RuntimeError("Unable to determine workflow URL for native binaries.")
|
||||
|
||||
install_native_binaries(staging_dir, workflow_url, package)
|
||||
copy_native_binaries(vendor_src, staging_dir, package, native_components)
|
||||
|
||||
if release_version:
|
||||
staging_dir_str = str(staging_dir)
|
||||
@@ -119,12 +119,20 @@ def main() -> int:
|
||||
f" node {staging_dir_str}/bin/codex.js --version\n"
|
||||
f" node {staging_dir_str}/bin/codex.js --help\n\n"
|
||||
)
|
||||
else:
|
||||
elif package == "codex-responses-api-proxy":
|
||||
print(
|
||||
f"Staged version {version} for release in {staging_dir_str}\n\n"
|
||||
"Verify the responses API proxy:\n"
|
||||
f" node {staging_dir_str}/bin/codex-responses-api-proxy.js --help\n\n"
|
||||
)
|
||||
else:
|
||||
print(
|
||||
f"Staged version {version} for release in {staging_dir_str}\n\n"
|
||||
"Verify the SDK contents:\n"
|
||||
f" ls {staging_dir_str}/dist\n"
|
||||
f" ls {staging_dir_str}/vendor\n"
|
||||
" node -e \"import('./dist/index.js').then(() => console.log('ok'))\"\n\n"
|
||||
)
|
||||
else:
|
||||
print(f"Staged package in {staging_dir}")
|
||||
|
||||
@@ -152,10 +160,9 @@ def prepare_staging_dir(staging_dir: Path | None) -> tuple[Path, bool]:
|
||||
|
||||
|
||||
def stage_sources(staging_dir: Path, version: str, package: str) -> None:
|
||||
bin_dir = staging_dir / "bin"
|
||||
bin_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if package == "codex":
|
||||
bin_dir = staging_dir / "bin"
|
||||
bin_dir.mkdir(parents=True, exist_ok=True)
|
||||
shutil.copy2(CODEX_CLI_ROOT / "bin" / "codex.js", bin_dir / "codex.js")
|
||||
rg_manifest = CODEX_CLI_ROOT / "bin" / "rg"
|
||||
if rg_manifest.exists():
|
||||
@@ -167,6 +174,8 @@ def stage_sources(staging_dir: Path, version: str, package: str) -> None:
|
||||
|
||||
package_json_path = CODEX_CLI_ROOT / "package.json"
|
||||
elif package == "codex-responses-api-proxy":
|
||||
bin_dir = staging_dir / "bin"
|
||||
bin_dir.mkdir(parents=True, exist_ok=True)
|
||||
launcher_src = RESPONSES_API_PROXY_NPM_ROOT / "bin" / "codex-responses-api-proxy.js"
|
||||
shutil.copy2(launcher_src, bin_dir / "codex-responses-api-proxy.js")
|
||||
|
||||
@@ -175,6 +184,9 @@ def stage_sources(staging_dir: Path, version: str, package: str) -> None:
|
||||
shutil.copy2(readme_src, staging_dir / "README.md")
|
||||
|
||||
package_json_path = RESPONSES_API_PROXY_NPM_ROOT / "package.json"
|
||||
elif package == "codex-sdk":
|
||||
package_json_path = CODEX_SDK_ROOT / "package.json"
|
||||
stage_codex_sdk_sources(staging_dir)
|
||||
else:
|
||||
raise RuntimeError(f"Unknown package '{package}'.")
|
||||
|
||||
@@ -182,91 +194,93 @@ def stage_sources(staging_dir: Path, version: str, package: str) -> None:
|
||||
package_json = json.load(fh)
|
||||
package_json["version"] = version
|
||||
|
||||
if package == "codex-sdk":
|
||||
scripts = package_json.get("scripts")
|
||||
if isinstance(scripts, dict):
|
||||
scripts.pop("prepare", None)
|
||||
|
||||
files = package_json.get("files")
|
||||
if isinstance(files, list):
|
||||
if "vendor" not in files:
|
||||
files.append("vendor")
|
||||
else:
|
||||
package_json["files"] = ["dist", "vendor"]
|
||||
|
||||
with open(staging_dir / "package.json", "w", encoding="utf-8") as out:
|
||||
json.dump(package_json, out, indent=2)
|
||||
out.write("\n")
|
||||
|
||||
|
||||
def install_native_binaries(staging_dir: Path, workflow_url: str, package: str) -> None:
|
||||
package_components = {
|
||||
"codex": ["codex", "rg"],
|
||||
"codex-responses-api-proxy": ["codex-responses-api-proxy"],
|
||||
}
|
||||
|
||||
components = package_components.get(package)
|
||||
if components is None:
|
||||
raise RuntimeError(f"Unknown package '{package}'.")
|
||||
|
||||
cmd = ["./scripts/install_native_deps.py", "--workflow-url", workflow_url]
|
||||
for component in components:
|
||||
cmd.extend(["--component", component])
|
||||
cmd.append(str(staging_dir))
|
||||
subprocess.check_call(cmd, cwd=CODEX_CLI_ROOT)
|
||||
def run_command(cmd: list[str], cwd: Path | None = None) -> None:
|
||||
print("+", " ".join(cmd))
|
||||
subprocess.run(cmd, cwd=cwd, check=True)
|
||||
|
||||
|
||||
def resolve_latest_alpha_workflow_url() -> str:
|
||||
version = determine_latest_alpha_version()
|
||||
workflow = resolve_release_workflow(version)
|
||||
return workflow["url"]
|
||||
def stage_codex_sdk_sources(staging_dir: Path) -> None:
|
||||
package_root = CODEX_SDK_ROOT
|
||||
|
||||
run_command(["pnpm", "install", "--frozen-lockfile"], cwd=package_root)
|
||||
run_command(["pnpm", "run", "build"], cwd=package_root)
|
||||
|
||||
dist_src = package_root / "dist"
|
||||
if not dist_src.exists():
|
||||
raise RuntimeError("codex-sdk build did not produce a dist directory.")
|
||||
|
||||
shutil.copytree(dist_src, staging_dir / "dist")
|
||||
|
||||
readme_src = package_root / "README.md"
|
||||
if readme_src.exists():
|
||||
shutil.copy2(readme_src, staging_dir / "README.md")
|
||||
|
||||
license_src = REPO_ROOT / "LICENSE"
|
||||
if license_src.exists():
|
||||
shutil.copy2(license_src, staging_dir / "LICENSE")
|
||||
|
||||
|
||||
def determine_latest_alpha_version() -> str:
|
||||
releases = list_releases()
|
||||
best_key: tuple[int, int, int, int] | None = None
|
||||
best_version: str | None = None
|
||||
pattern = re.compile(r"^rust-v(\d+)\.(\d+)\.(\d+)-alpha\.(\d+)$")
|
||||
for release in releases:
|
||||
tag = release.get("tag_name", "")
|
||||
match = pattern.match(tag)
|
||||
if not match:
|
||||
def copy_native_binaries(
|
||||
vendor_src: Path,
|
||||
staging_dir: Path,
|
||||
package: str,
|
||||
components: list[str],
|
||||
) -> None:
|
||||
vendor_src = vendor_src.resolve()
|
||||
if not vendor_src.exists():
|
||||
raise RuntimeError(f"Vendor source directory not found: {vendor_src}")
|
||||
|
||||
components_set = {component for component in components if component in COMPONENT_DEST_DIR}
|
||||
if not components_set:
|
||||
return
|
||||
|
||||
vendor_dest = staging_dir / "vendor"
|
||||
if vendor_dest.exists():
|
||||
shutil.rmtree(vendor_dest)
|
||||
vendor_dest.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
for target_dir in vendor_src.iterdir():
|
||||
if not target_dir.is_dir():
|
||||
continue
|
||||
key = tuple(int(match.group(i)) for i in range(1, 5))
|
||||
if best_key is None or key > best_key:
|
||||
best_key = key
|
||||
best_version = (
|
||||
f"{match.group(1)}.{match.group(2)}.{match.group(3)}-alpha.{match.group(4)}"
|
||||
)
|
||||
|
||||
if best_version is None:
|
||||
raise RuntimeError("No alpha releases found when resolving workflow URL.")
|
||||
return best_version
|
||||
if "windows" in target_dir.name:
|
||||
components_set.update(WINDOWS_ONLY_COMPONENTS.get(package, []))
|
||||
|
||||
dest_target_dir = vendor_dest / target_dir.name
|
||||
dest_target_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def list_releases() -> list[dict]:
|
||||
stdout = subprocess.check_output(
|
||||
["gh", "api", f"/repos/{GITHUB_REPO}/releases?per_page=100"],
|
||||
text=True,
|
||||
)
|
||||
try:
|
||||
releases = json.loads(stdout or "[]")
|
||||
except json.JSONDecodeError as exc:
|
||||
raise RuntimeError("Unable to parse releases JSON.") from exc
|
||||
if not isinstance(releases, list):
|
||||
raise RuntimeError("Unexpected response when listing releases.")
|
||||
return releases
|
||||
for component in components_set:
|
||||
dest_dir_name = COMPONENT_DEST_DIR.get(component)
|
||||
if dest_dir_name is None:
|
||||
continue
|
||||
|
||||
src_component_dir = target_dir / dest_dir_name
|
||||
if not src_component_dir.exists():
|
||||
raise RuntimeError(
|
||||
f"Missing native component '{component}' in vendor source: {src_component_dir}"
|
||||
)
|
||||
|
||||
def resolve_release_workflow(version: str) -> dict:
|
||||
stdout = subprocess.check_output(
|
||||
[
|
||||
"gh",
|
||||
"run",
|
||||
"list",
|
||||
"--branch",
|
||||
f"rust-v{version}",
|
||||
"--json",
|
||||
"workflowName,url,headSha",
|
||||
"--workflow",
|
||||
WORKFLOW_NAME,
|
||||
"--jq",
|
||||
"first(.[])",
|
||||
],
|
||||
text=True,
|
||||
)
|
||||
workflow = json.loads(stdout or "[]")
|
||||
if not workflow:
|
||||
raise RuntimeError(f"Unable to find rust-release workflow for version {version}.")
|
||||
return workflow
|
||||
dest_component_dir = dest_target_dir / dest_dir_name
|
||||
if dest_component_dir.exists():
|
||||
shutil.rmtree(dest_component_dir)
|
||||
shutil.copytree(src_component_dir, dest_component_dir)
|
||||
|
||||
|
||||
def run_npm_pack(staging_dir: Path, output_path: Path) -> Path:
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
"""Install Codex native binaries (Rust CLI plus ripgrep helpers)."""
|
||||
|
||||
import argparse
|
||||
from contextlib import contextmanager
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
@@ -12,6 +13,7 @@ import zipfile
|
||||
from dataclasses import dataclass
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
from pathlib import Path
|
||||
import sys
|
||||
from typing import Iterable, Sequence
|
||||
from urllib.parse import urlparse
|
||||
from urllib.request import urlopen
|
||||
@@ -36,8 +38,11 @@ class BinaryComponent:
|
||||
artifact_prefix: str # matches the artifact filename prefix (e.g. codex-<target>.zst)
|
||||
dest_dir: str # directory under vendor/<target>/ where the binary is installed
|
||||
binary_basename: str # executable name inside dest_dir (before optional .exe)
|
||||
targets: tuple[str, ...] | None = None # limit installation to specific targets
|
||||
|
||||
|
||||
WINDOWS_TARGETS = tuple(target for target in BINARY_TARGETS if "windows" in target)
|
||||
|
||||
BINARY_COMPONENTS = {
|
||||
"codex": BinaryComponent(
|
||||
artifact_prefix="codex",
|
||||
@@ -49,6 +54,18 @@ BINARY_COMPONENTS = {
|
||||
dest_dir="codex-responses-api-proxy",
|
||||
binary_basename="codex-responses-api-proxy",
|
||||
),
|
||||
"codex-windows-sandbox-setup": BinaryComponent(
|
||||
artifact_prefix="codex-windows-sandbox-setup",
|
||||
dest_dir="codex",
|
||||
binary_basename="codex-windows-sandbox-setup",
|
||||
targets=WINDOWS_TARGETS,
|
||||
),
|
||||
"codex-command-runner": BinaryComponent(
|
||||
artifact_prefix="codex-command-runner",
|
||||
dest_dir="codex",
|
||||
binary_basename="codex-command-runner",
|
||||
targets=WINDOWS_TARGETS,
|
||||
),
|
||||
}
|
||||
|
||||
RG_TARGET_PLATFORM_PAIRS: list[tuple[str, str]] = [
|
||||
@@ -62,6 +79,45 @@ RG_TARGET_PLATFORM_PAIRS: list[tuple[str, str]] = [
|
||||
RG_TARGET_TO_PLATFORM = {target: platform for target, platform in RG_TARGET_PLATFORM_PAIRS}
|
||||
DEFAULT_RG_TARGETS = [target for target, _ in RG_TARGET_PLATFORM_PAIRS]
|
||||
|
||||
# urllib.request.urlopen() defaults to no timeout (can hang indefinitely), which is painful in CI.
|
||||
DOWNLOAD_TIMEOUT_SECS = 60
|
||||
|
||||
|
||||
def _gha_enabled() -> bool:
|
||||
# GitHub Actions supports "workflow commands" (e.g. ::group:: / ::error::) that make logs
|
||||
# much easier to scan: groups collapse noisy sections and error annotations surface the
|
||||
# failure in the UI without changing the actual exception/traceback output.
|
||||
return os.environ.get("GITHUB_ACTIONS") == "true"
|
||||
|
||||
|
||||
def _gha_escape(value: str) -> str:
|
||||
# Workflow commands require percent/newline escaping.
|
||||
return value.replace("%", "%25").replace("\r", "%0D").replace("\n", "%0A")
|
||||
|
||||
|
||||
def _gha_error(*, title: str, message: str) -> None:
|
||||
# Emit a GitHub Actions error annotation. This does not replace stdout/stderr logs; it just
|
||||
# adds a prominent summary line to the job UI so the root cause is easier to spot.
|
||||
if not _gha_enabled():
|
||||
return
|
||||
print(
|
||||
f"::error title={_gha_escape(title)}::{_gha_escape(message)}",
|
||||
flush=True,
|
||||
)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def _gha_group(title: str):
|
||||
# Wrap a block in a collapsible log group on GitHub Actions. Outside of GHA this is a no-op
|
||||
# so local output remains unchanged.
|
||||
if _gha_enabled():
|
||||
print(f"::group::{_gha_escape(title)}", flush=True)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
if _gha_enabled():
|
||||
print("::endgroup::", flush=True)
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description="Install native Codex binaries.")
|
||||
@@ -79,7 +135,8 @@ def parse_args() -> argparse.Namespace:
|
||||
choices=tuple(list(BINARY_COMPONENTS) + ["rg"]),
|
||||
help=(
|
||||
"Limit installation to the specified components."
|
||||
" May be repeated. Defaults to 'codex' and 'rg'."
|
||||
" May be repeated. Defaults to codex, codex-windows-sandbox-setup,"
|
||||
" codex-command-runner, and rg."
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
@@ -101,7 +158,12 @@ def main() -> int:
|
||||
vendor_dir = codex_cli_root / VENDOR_DIR_NAME
|
||||
vendor_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
components = args.components or ["codex", "rg"]
|
||||
components = args.components or [
|
||||
"codex",
|
||||
"codex-windows-sandbox-setup",
|
||||
"codex-command-runner",
|
||||
"rg",
|
||||
]
|
||||
|
||||
workflow_url = (args.workflow_url or DEFAULT_WORKFLOW_URL).strip()
|
||||
if not workflow_url:
|
||||
@@ -110,19 +172,20 @@ def main() -> int:
|
||||
workflow_id = workflow_url.rstrip("/").split("/")[-1]
|
||||
print(f"Downloading native artifacts from workflow {workflow_id}...")
|
||||
|
||||
with tempfile.TemporaryDirectory(prefix="codex-native-artifacts-") as artifacts_dir_str:
|
||||
artifacts_dir = Path(artifacts_dir_str)
|
||||
_download_artifacts(workflow_id, artifacts_dir)
|
||||
install_binary_components(
|
||||
artifacts_dir,
|
||||
vendor_dir,
|
||||
BINARY_TARGETS,
|
||||
[name for name in components if name in BINARY_COMPONENTS],
|
||||
)
|
||||
with _gha_group(f"Download native artifacts from workflow {workflow_id}"):
|
||||
with tempfile.TemporaryDirectory(prefix="codex-native-artifacts-") as artifacts_dir_str:
|
||||
artifacts_dir = Path(artifacts_dir_str)
|
||||
_download_artifacts(workflow_id, artifacts_dir)
|
||||
install_binary_components(
|
||||
artifacts_dir,
|
||||
vendor_dir,
|
||||
[BINARY_COMPONENTS[name] for name in components if name in BINARY_COMPONENTS],
|
||||
)
|
||||
|
||||
if "rg" in components:
|
||||
print("Fetching ripgrep binaries...")
|
||||
fetch_rg(vendor_dir, DEFAULT_RG_TARGETS, manifest_path=RG_MANIFEST)
|
||||
with _gha_group("Fetch ripgrep binaries"):
|
||||
print("Fetching ripgrep binaries...")
|
||||
fetch_rg(vendor_dir, DEFAULT_RG_TARGETS, manifest_path=RG_MANIFEST)
|
||||
|
||||
print(f"Installed native dependencies into {vendor_dir}")
|
||||
return 0
|
||||
@@ -183,7 +246,14 @@ def fetch_rg(
|
||||
|
||||
for future in as_completed(future_map):
|
||||
target = future_map[future]
|
||||
results[target] = future.result()
|
||||
try:
|
||||
results[target] = future.result()
|
||||
except Exception as exc:
|
||||
_gha_error(
|
||||
title="ripgrep install failed",
|
||||
message=f"target={target} error={exc!r}",
|
||||
)
|
||||
raise RuntimeError(f"Failed to install ripgrep for target {target}.") from exc
|
||||
print(f" installed ripgrep for {target}")
|
||||
|
||||
return [results[target] for target in targets]
|
||||
@@ -206,23 +276,19 @@ def _download_artifacts(workflow_id: str, dest_dir: Path) -> None:
|
||||
def install_binary_components(
|
||||
artifacts_dir: Path,
|
||||
vendor_dir: Path,
|
||||
targets: Iterable[str],
|
||||
component_names: Sequence[str],
|
||||
selected_components: Sequence[BinaryComponent],
|
||||
) -> None:
|
||||
selected_components = [BINARY_COMPONENTS[name] for name in component_names if name in BINARY_COMPONENTS]
|
||||
if not selected_components:
|
||||
return
|
||||
|
||||
targets = list(targets)
|
||||
if not targets:
|
||||
return
|
||||
|
||||
for component in selected_components:
|
||||
component_targets = list(component.targets or BINARY_TARGETS)
|
||||
|
||||
print(
|
||||
f"Installing {component.binary_basename} binaries for targets: "
|
||||
+ ", ".join(targets)
|
||||
+ ", ".join(component_targets)
|
||||
)
|
||||
max_workers = min(len(targets), max(1, (os.cpu_count() or 1)))
|
||||
max_workers = min(len(component_targets), max(1, (os.cpu_count() or 1)))
|
||||
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
||||
futures = {
|
||||
executor.submit(
|
||||
@@ -232,7 +298,7 @@ def install_binary_components(
|
||||
target,
|
||||
component,
|
||||
): target
|
||||
for target in targets
|
||||
for target in component_targets
|
||||
}
|
||||
for future in as_completed(futures):
|
||||
installed_path = future.result()
|
||||
@@ -285,6 +351,8 @@ def _fetch_single_rg(
|
||||
url = providers[0]["url"]
|
||||
archive_format = platform_info.get("format", "zst")
|
||||
archive_member = platform_info.get("path")
|
||||
digest = platform_info.get("digest")
|
||||
expected_size = platform_info.get("size")
|
||||
|
||||
dest_dir = vendor_dir / target / "path"
|
||||
dest_dir.mkdir(parents=True, exist_ok=True)
|
||||
@@ -297,10 +365,32 @@ def _fetch_single_rg(
|
||||
tmp_dir = Path(tmp_dir_str)
|
||||
archive_filename = os.path.basename(urlparse(url).path)
|
||||
download_path = tmp_dir / archive_filename
|
||||
_download_file(url, download_path)
|
||||
print(
|
||||
f" downloading ripgrep for {target} ({platform_key}) from {url}",
|
||||
flush=True,
|
||||
)
|
||||
try:
|
||||
_download_file(url, download_path)
|
||||
except Exception as exc:
|
||||
_gha_error(
|
||||
title="ripgrep download failed",
|
||||
message=f"target={target} platform={platform_key} url={url} error={exc!r}",
|
||||
)
|
||||
raise RuntimeError(
|
||||
"Failed to download ripgrep "
|
||||
f"(target={target}, platform={platform_key}, format={archive_format}, "
|
||||
f"expected_size={expected_size!r}, digest={digest!r}, url={url}, dest={download_path})."
|
||||
) from exc
|
||||
|
||||
dest.unlink(missing_ok=True)
|
||||
extract_archive(download_path, archive_format, archive_member, dest)
|
||||
try:
|
||||
extract_archive(download_path, archive_format, archive_member, dest)
|
||||
except Exception as exc:
|
||||
raise RuntimeError(
|
||||
"Failed to extract ripgrep "
|
||||
f"(target={target}, platform={platform_key}, format={archive_format}, "
|
||||
f"member={archive_member!r}, url={url}, archive={download_path})."
|
||||
) from exc
|
||||
|
||||
if not is_windows:
|
||||
dest.chmod(0o755)
|
||||
@@ -310,7 +400,9 @@ def _fetch_single_rg(
|
||||
|
||||
def _download_file(url: str, dest: Path) -> None:
|
||||
dest.parent.mkdir(parents=True, exist_ok=True)
|
||||
with urlopen(url) as response, open(dest, "wb") as out:
|
||||
dest.unlink(missing_ok=True)
|
||||
|
||||
with urlopen(url, timeout=DOWNLOAD_TIMEOUT_SECS) as response, open(dest, "wb") as out:
|
||||
shutil.copyfileobj(response, out)
|
||||
|
||||
|
||||
|
||||
6
codex-rs/.cargo/audit.toml
Normal file
6
codex-rs/.cargo/audit.toml
Normal file
@@ -0,0 +1,6 @@
|
||||
[advisories]
|
||||
ignore = [
|
||||
"RUSTSEC-2024-0388", # derivative 2.2.0 via starlark; upstream crate is unmaintained
|
||||
"RUSTSEC-2025-0057", # fxhash 0.2.1 via starlark_map; upstream crate is unmaintained
|
||||
"RUSTSEC-2024-0436", # paste 1.0.15 via starlark/ratatui; upstream crate is unmaintained
|
||||
]
|
||||
5
codex-rs/.cargo/config.toml
Normal file
5
codex-rs/.cargo/config.toml
Normal file
@@ -0,0 +1,5 @@
|
||||
[target.'cfg(all(windows, target_env = "msvc"))']
|
||||
rustflags = ["-C", "link-arg=/STACK:8388608"]
|
||||
|
||||
[target.'cfg(all(windows, target_env = "gnu"))']
|
||||
rustflags = ["-C", "link-arg=-Wl,--stack,8388608"]
|
||||
13
codex-rs/.config/nextest.toml
Normal file
13
codex-rs/.config/nextest.toml
Normal file
@@ -0,0 +1,13 @@
|
||||
[profile.default]
|
||||
# Do not increase, fix your test instead
|
||||
slow-timeout = { period = "15s", terminate-after = 2 }
|
||||
|
||||
|
||||
[[profile.default.overrides]]
|
||||
# Do not add new tests here
|
||||
filter = 'test(rmcp_client) | test(humanlike_typing_1000_chars_appears_live_no_placeholder)'
|
||||
slow-timeout = { period = "1m", terminate-after = 4 }
|
||||
|
||||
[[profile.default.overrides]]
|
||||
filter = 'test(approval_matrix_covers_all_modes)'
|
||||
slow-timeout = { period = "30s", terminate-after = 2 }
|
||||
26
codex-rs/.github/workflows/cargo-audit.yml
vendored
Normal file
26
codex-rs/.github/workflows/cargo-audit.yml
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
name: Cargo audit
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
audit:
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: codex-rs
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
- name: Install cargo-audit
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-audit
|
||||
- name: Run cargo audit
|
||||
run: cargo audit --deny warnings
|
||||
1
codex-rs/.gitignore
vendored
1
codex-rs/.gitignore
vendored
@@ -1,4 +1,5 @@
|
||||
/target/
|
||||
/target-*/
|
||||
|
||||
# Recommended value of CARGO_TARGET_DIR when using Docker as explained in .devcontainer/README.md.
|
||||
/target-amd64/
|
||||
|
||||
1
codex-rs/BUILD.bazel
Normal file
1
codex-rs/BUILD.bazel
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
3621
codex-rs/Cargo.lock
generated
3621
codex-rs/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -2,9 +2,14 @@
|
||||
members = [
|
||||
"backend-client",
|
||||
"ansi-escape",
|
||||
"async-utils",
|
||||
"app-server",
|
||||
"app-server-protocol",
|
||||
"app-server-test-client",
|
||||
"debug-client",
|
||||
"apply-patch",
|
||||
"arg0",
|
||||
"feedback",
|
||||
"codex-backend-openapi-models",
|
||||
"cloud-tasks",
|
||||
"cloud-tasks-client",
|
||||
@@ -12,25 +17,36 @@ members = [
|
||||
"common",
|
||||
"core",
|
||||
"exec",
|
||||
"exec-server",
|
||||
"execpolicy",
|
||||
"execpolicy-legacy",
|
||||
"keyring-store",
|
||||
"file-search",
|
||||
"git-tooling",
|
||||
"linux-sandbox",
|
||||
"lmstudio",
|
||||
"login",
|
||||
"mcp-client",
|
||||
"mcp-server",
|
||||
"mcp-types",
|
||||
"ollama",
|
||||
"process-hardening",
|
||||
"protocol",
|
||||
"protocol-ts",
|
||||
"rmcp-client",
|
||||
"responses-api-proxy",
|
||||
"stdio-to-uds",
|
||||
"otel",
|
||||
"tui",
|
||||
"git-apply",
|
||||
"tui2",
|
||||
"utils/absolute-path",
|
||||
"utils/cargo-bin",
|
||||
"utils/git",
|
||||
"utils/cache",
|
||||
"utils/image",
|
||||
"utils/json-to-toml",
|
||||
"utils/pty",
|
||||
"utils/readiness",
|
||||
"utils/string",
|
||||
"codex-client",
|
||||
"codex-api",
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
@@ -41,35 +57,54 @@ version = "0.0.0"
|
||||
# crates created with `cargo new -w ...` automatically inherit the 2024
|
||||
# edition.
|
||||
edition = "2024"
|
||||
license = "Apache-2.0"
|
||||
|
||||
[workspace.dependencies]
|
||||
# Internal
|
||||
app_test_support = { path = "app-server/tests/common" }
|
||||
codex-ansi-escape = { path = "ansi-escape" }
|
||||
codex-api = { path = "codex-api" }
|
||||
codex-app-server = { path = "app-server" }
|
||||
codex-app-server-protocol = { path = "app-server-protocol" }
|
||||
codex-apply-patch = { path = "apply-patch" }
|
||||
codex-arg0 = { path = "arg0" }
|
||||
codex-async-utils = { path = "async-utils" }
|
||||
codex-backend-client = { path = "backend-client" }
|
||||
codex-chatgpt = { path = "chatgpt" }
|
||||
codex-cli = { path = "cli"}
|
||||
codex-client = { path = "codex-client" }
|
||||
codex-common = { path = "common" }
|
||||
codex-core = { path = "core" }
|
||||
codex-exec = { path = "exec" }
|
||||
codex-execpolicy = { path = "execpolicy" }
|
||||
codex-feedback = { path = "feedback" }
|
||||
codex-file-search = { path = "file-search" }
|
||||
codex-git-tooling = { path = "git-tooling" }
|
||||
codex-git = { path = "utils/git" }
|
||||
codex-keyring-store = { path = "keyring-store" }
|
||||
codex-linux-sandbox = { path = "linux-sandbox" }
|
||||
codex-lmstudio = { path = "lmstudio" }
|
||||
codex-login = { path = "login" }
|
||||
codex-mcp-client = { path = "mcp-client" }
|
||||
codex-mcp-server = { path = "mcp-server" }
|
||||
codex-ollama = { path = "ollama" }
|
||||
codex-otel = { path = "otel" }
|
||||
codex-process-hardening = { path = "process-hardening" }
|
||||
codex-protocol = { path = "protocol" }
|
||||
codex-protocol-ts = { path = "protocol-ts" }
|
||||
codex-responses-api-proxy = { path = "responses-api-proxy" }
|
||||
codex-rmcp-client = { path = "rmcp-client" }
|
||||
codex-stdio-to-uds = { path = "stdio-to-uds" }
|
||||
codex-tui = { path = "tui" }
|
||||
codex-tui2 = { path = "tui2" }
|
||||
codex-utils-absolute-path = { path = "utils/absolute-path" }
|
||||
codex-utils-cache = { path = "utils/cache" }
|
||||
codex-utils-cargo-bin = { path = "utils/cargo-bin" }
|
||||
codex-utils-image = { path = "utils/image" }
|
||||
codex-utils-json-to-toml = { path = "utils/json-to-toml" }
|
||||
codex-utils-pty = { path = "utils/pty" }
|
||||
codex-utils-readiness = { path = "utils/readiness" }
|
||||
codex-utils-string = { path = "utils/string" }
|
||||
codex-windows-sandbox = { path = "windows-sandbox-rs" }
|
||||
core_test_support = { path = "core/tests/common" }
|
||||
exec_server_test_support = { path = "exec-server/tests/common" }
|
||||
mcp-types = { path = "mcp-types" }
|
||||
mcp_test_support = { path = "mcp-server/tests/common" }
|
||||
|
||||
@@ -77,54 +112,65 @@ mcp_test_support = { path = "mcp-server/tests/common" }
|
||||
allocative = "0.3.3"
|
||||
ansi-to-tui = "7.0.0"
|
||||
anyhow = "1"
|
||||
arboard = "3"
|
||||
askama = "0.12"
|
||||
arboard = { version = "3", features = ["wayland-data-control"] }
|
||||
assert_cmd = "2"
|
||||
assert_matches = "1.5.0"
|
||||
async-channel = "2.3.1"
|
||||
async-stream = "0.3.6"
|
||||
async-trait = "0.1.89"
|
||||
axum = { version = "0.8", default-features = false }
|
||||
base64 = "0.22.1"
|
||||
bytes = "1.10.1"
|
||||
chrono = "0.4.42"
|
||||
chardetng = "0.1.17"
|
||||
chrono = "0.4.43"
|
||||
clap = "4"
|
||||
clap_complete = "4"
|
||||
color-eyre = "0.6.3"
|
||||
crossterm = "0.28.1"
|
||||
ctor = "0.5.0"
|
||||
ctor = "0.6.3"
|
||||
derive_more = "2"
|
||||
diffy = "0.4.2"
|
||||
dirs = "6"
|
||||
dotenvy = "0.15.7"
|
||||
dunce = "1.0.4"
|
||||
encoding_rs = "0.8.35"
|
||||
env-flags = "0.1.1"
|
||||
env_logger = "0.11.5"
|
||||
escargot = "0.5"
|
||||
eventsource-stream = "0.2.3"
|
||||
futures = "0.3"
|
||||
icu_decimal = "2.0.0"
|
||||
icu_locale_core = "2.0.0"
|
||||
futures = { version = "0.3", default-features = false }
|
||||
http = "1.3.1"
|
||||
icu_decimal = "2.1"
|
||||
icu_locale_core = "2.1"
|
||||
icu_provider = { version = "2.1", features = ["sync"] }
|
||||
ignore = "0.4.23"
|
||||
image = { version = "^0.25.8", default-features = false }
|
||||
indexmap = "2.6.0"
|
||||
insta = "1.43.2"
|
||||
indoc = "2.0"
|
||||
image = { version = "^0.25.9", default-features = false }
|
||||
include_dir = "0.7.4"
|
||||
indexmap = "2.12.0"
|
||||
insta = "1.46.0"
|
||||
itertools = "0.14.0"
|
||||
landlock = "0.4.1"
|
||||
keyring = { version = "3.6", default-features = false }
|
||||
landlock = "0.4.4"
|
||||
lazy_static = "1"
|
||||
libc = "0.2.175"
|
||||
libc = "0.2.177"
|
||||
log = "0.4"
|
||||
lru = "0.16.3"
|
||||
maplit = "1.0.2"
|
||||
mime_guess = "2.0.5"
|
||||
multimap = "0.10.0"
|
||||
notify = "8.2.0"
|
||||
nucleo-matcher = "0.3.1"
|
||||
once_cell = "1.20.2"
|
||||
openssl-sys = "*"
|
||||
opentelemetry = "0.30.0"
|
||||
opentelemetry-appender-tracing = "0.30.0"
|
||||
opentelemetry-otlp = "0.30.0"
|
||||
opentelemetry-semantic-conventions = "0.30.0"
|
||||
opentelemetry_sdk = "0.30.0"
|
||||
opentelemetry = "0.31.0"
|
||||
opentelemetry-appender-tracing = "0.31.0"
|
||||
opentelemetry-otlp = "0.31.0"
|
||||
opentelemetry-semantic-conventions = "0.31.0"
|
||||
opentelemetry_sdk = "0.31.0"
|
||||
tracing-opentelemetry = "0.32.0"
|
||||
os_info = "3.12.0"
|
||||
owo-colors = "4.2.0"
|
||||
path-absolutize = "3.1.1"
|
||||
path-clean = "1.0.1"
|
||||
pathdiff = "0.2"
|
||||
portable-pty = "0.9.0"
|
||||
predicates = "3"
|
||||
@@ -132,41 +178,55 @@ pretty_assertions = "1.4.1"
|
||||
pulldown-cmark = "0.10"
|
||||
rand = "0.9"
|
||||
ratatui = "0.29.0"
|
||||
regex-lite = "0.1.7"
|
||||
ratatui-core = "0.1.0"
|
||||
ratatui-macros = "0.6.0"
|
||||
regex = "1.12.2"
|
||||
regex-lite = "0.1.8"
|
||||
reqwest = "0.12"
|
||||
rmcp = { version = "0.12.0", default-features = false }
|
||||
schemars = "0.8.22"
|
||||
seccompiler = "0.5.0"
|
||||
sentry = "0.46.0"
|
||||
serde = "1"
|
||||
serde_json = "1"
|
||||
serde_with = "3.14"
|
||||
serde_with = "3.16"
|
||||
serde_yaml = "0.9"
|
||||
serial_test = "3.2.0"
|
||||
sha1 = "0.10.6"
|
||||
sha2 = "0.10"
|
||||
semver = "1.0"
|
||||
shlex = "1.3.0"
|
||||
similar = "2.7.0"
|
||||
socket2 = "0.6.1"
|
||||
starlark = "0.13.0"
|
||||
strum = "0.27.2"
|
||||
strum_macros = "0.27.2"
|
||||
supports-color = "3.0.2"
|
||||
sys-locale = "0.3.2"
|
||||
tempfile = "3.23.0"
|
||||
test-log = "0.2.19"
|
||||
textwrap = "0.16.2"
|
||||
thiserror = "2.0.16"
|
||||
thiserror = "2.0.17"
|
||||
time = "0.3"
|
||||
tiny_http = "0.12"
|
||||
tokio = "1"
|
||||
tokio-stream = "0.1.17"
|
||||
tokio-stream = "0.1.18"
|
||||
tokio-test = "0.4"
|
||||
tokio-util = "0.7.16"
|
||||
tokio-tungstenite = "0.21.0"
|
||||
tokio-util = "0.7.18"
|
||||
toml = "0.9.5"
|
||||
toml_edit = "0.23.4"
|
||||
tonic = "0.13.1"
|
||||
tracing = "0.1.41"
|
||||
toml_edit = "0.24.0"
|
||||
tracing = "0.1.43"
|
||||
tracing-appender = "0.2.3"
|
||||
tracing-subscriber = "0.3.20"
|
||||
tracing-subscriber = "0.3.22"
|
||||
tracing-test = "0.2.5"
|
||||
tree-sitter = "0.25.9"
|
||||
tree-sitter-bash = "0.25.0"
|
||||
tree-sitter = "0.25.10"
|
||||
tree-sitter-bash = "0.25"
|
||||
zstd = "0.13"
|
||||
tree-sitter-highlight = "0.25.10"
|
||||
ts-rs = "11"
|
||||
tui-scrollbar = "0.2.2"
|
||||
uds_windows = "1.1.0"
|
||||
unicode-segmentation = "1.12.0"
|
||||
unicode-width = "0.2"
|
||||
url = "2"
|
||||
@@ -175,10 +235,11 @@ uuid = "1"
|
||||
vt100 = "0.16.2"
|
||||
walkdir = "2.5.0"
|
||||
webbrowser = "1.0"
|
||||
which = "6"
|
||||
wildmatch = "2.5.0"
|
||||
which = "8"
|
||||
wildmatch = "2.6.1"
|
||||
|
||||
wiremock = "0.6"
|
||||
zeroize = "1.8.1"
|
||||
zeroize = "1.8.2"
|
||||
|
||||
[workspace.lints]
|
||||
rust = {}
|
||||
@@ -221,7 +282,7 @@ unwrap_used = "deny"
|
||||
# cargo-shear cannot see the platform-specific openssl-sys usage, so we
|
||||
# silence the false positive here instead of deleting a real dependency.
|
||||
[workspace.metadata.cargo-shear]
|
||||
ignored = ["openssl-sys", "codex-utils-readiness"]
|
||||
ignored = ["icu_provider", "openssl-sys", "codex-utils-readiness"]
|
||||
|
||||
[profile.release]
|
||||
lto = "fat"
|
||||
@@ -232,6 +293,16 @@ strip = "symbols"
|
||||
# See https://github.com/openai/codex/issues/1411 for details.
|
||||
codegen-units = 1
|
||||
|
||||
[profile.ci-test]
|
||||
debug = 1 # Reduce debug symbol size
|
||||
inherits = "test"
|
||||
opt-level = 0
|
||||
|
||||
[patch.crates-io]
|
||||
# Uncomment to debug local changes.
|
||||
# ratatui = { path = "../../ratatui" }
|
||||
crossterm = { git = "https://github.com/nornagon/crossterm", branch = "nornagon/color-query" }
|
||||
ratatui = { git = "https://github.com/nornagon/ratatui", branch = "nornagon-v0.29.0-patch" }
|
||||
|
||||
# Uncomment to debug local changes.
|
||||
# rmcp = { path = "../../rust-sdk/crates/rmcp" }
|
||||
|
||||
@@ -11,7 +11,12 @@ npm i -g @openai/codex
|
||||
codex
|
||||
```
|
||||
|
||||
You can also install via Homebrew (`brew install codex`) or download a platform-specific release directly from our [GitHub Releases](https://github.com/openai/codex/releases).
|
||||
You can also install via Homebrew (`brew install --cask codex`) or download a platform-specific release directly from our [GitHub Releases](https://github.com/openai/codex/releases).
|
||||
|
||||
## Documentation quickstart
|
||||
|
||||
- First run with Codex? Start with [`docs/getting-started.md`](../docs/getting-started.md) (links to the walkthrough for prompts, keyboard shortcuts, and session management).
|
||||
- Want deeper control? See [`docs/config.md`](../docs/config.md) and [`docs/install.md`](../docs/install.md).
|
||||
|
||||
## What's new in the Rust CLI
|
||||
|
||||
@@ -23,9 +28,15 @@ Codex supports a rich set of configuration options. Note that the Rust CLI uses
|
||||
|
||||
### Model Context Protocol Support
|
||||
|
||||
Codex CLI functions as an MCP client that can connect to MCP servers on startup. See the [`mcp_servers`](../docs/config.md#mcp_servers) section in the configuration documentation for details.
|
||||
#### MCP client
|
||||
|
||||
It is still experimental, but you can also launch Codex as an MCP _server_ by running `codex mcp-server`. Use the [`@modelcontextprotocol/inspector`](https://github.com/modelcontextprotocol/inspector) to try it out:
|
||||
Codex CLI functions as an MCP client that allows the Codex CLI and IDE extension to connect to MCP servers on startup. See the [`configuration documentation`](../docs/config.md#connecting-to-mcp-servers) for details.
|
||||
|
||||
#### MCP server (experimental)
|
||||
|
||||
Codex can be launched as an MCP _server_ by running `codex mcp-server`. This allows _other_ MCP clients to use Codex as a tool for another agent.
|
||||
|
||||
Use the [`@modelcontextprotocol/inspector`](https://github.com/modelcontextprotocol/inspector) to try it out:
|
||||
|
||||
```shell
|
||||
npx @modelcontextprotocol/inspector codex mcp-server
|
||||
@@ -35,45 +46,28 @@ Use `codex mcp` to add/list/get/remove MCP server launchers defined in `config.t
|
||||
|
||||
### Notifications
|
||||
|
||||
You can enable notifications by configuring a script that is run whenever the agent finishes a turn. The [notify documentation](../docs/config.md#notify) includes a detailed example that explains how to get desktop notifications via [terminal-notifier](https://github.com/julienXX/terminal-notifier) on macOS.
|
||||
You can enable notifications by configuring a script that is run whenever the agent finishes a turn. The [notify documentation](../docs/config.md#notify) includes a detailed example that explains how to get desktop notifications via [terminal-notifier](https://github.com/julienXX/terminal-notifier) on macOS. When Codex detects that it is running under WSL 2 inside Windows Terminal (`WT_SESSION` is set), the TUI automatically falls back to native Windows toast notifications so approval prompts and completed turns surface even though Windows Terminal does not implement OSC 9.
|
||||
|
||||
### `codex exec` to run Codex programmatically/non-interactively
|
||||
|
||||
To run Codex non-interactively, run `codex exec PROMPT` (you can also pass the prompt via `stdin`) and Codex will work on your task until it decides that it is done and exits. Output is printed to the terminal directly. You can set the `RUST_LOG` environment variable to see more about what's going on.
|
||||
|
||||
### Use `@` for file search
|
||||
|
||||
Typing `@` triggers a fuzzy-filename search over the workspace root. Use up/down to select among the results and Tab or Enter to replace the `@` with the selected path. You can use Esc to cancel the search.
|
||||
|
||||
### Esc–Esc to edit a previous message
|
||||
|
||||
When the chat composer is empty, press Esc to prime “backtrack” mode. Press Esc again to open a transcript preview highlighting the last user message; press Esc repeatedly to step to older user messages. Press Enter to confirm and Codex will fork the conversation from that point, trim the visible transcript accordingly, and pre‑fill the composer with the selected user message so you can edit and resubmit it.
|
||||
|
||||
In the transcript preview, the footer shows an `Esc edit prev` hint while editing is active.
|
||||
|
||||
### `--cd`/`-C` flag
|
||||
|
||||
Sometimes it is not convenient to `cd` to the directory you want Codex to use as the "working root" before running Codex. Fortunately, `codex` supports a `--cd` option so you can specify whatever folder you want. You can confirm that Codex is honoring `--cd` by double-checking the **workdir** it reports in the TUI at the start of a new session.
|
||||
|
||||
### Shell completions
|
||||
|
||||
Generate shell completion scripts via:
|
||||
|
||||
```shell
|
||||
codex completion bash
|
||||
codex completion zsh
|
||||
codex completion fish
|
||||
```
|
||||
|
||||
### Experimenting with the Codex Sandbox
|
||||
|
||||
To test to see what happens when a command is run under the sandbox provided by Codex, we provide the following subcommands in Codex CLI:
|
||||
|
||||
```
|
||||
# macOS
|
||||
codex debug seatbelt [--full-auto] [COMMAND]...
|
||||
codex sandbox macos [--full-auto] [--log-denials] [COMMAND]...
|
||||
|
||||
# Linux
|
||||
codex sandbox linux [--full-auto] [COMMAND]...
|
||||
|
||||
# Windows
|
||||
codex sandbox windows [--full-auto] [COMMAND]...
|
||||
|
||||
# Legacy aliases
|
||||
codex debug seatbelt [--full-auto] [--log-denials] [COMMAND]...
|
||||
codex debug landlock [--full-auto] [COMMAND]...
|
||||
```
|
||||
|
||||
|
||||
6
codex-rs/ansi-escape/BUILD.bazel
Normal file
6
codex-rs/ansi-escape/BUILD.bazel
Normal file
@@ -0,0 +1,6 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "ansi-escape",
|
||||
crate_name = "codex_ansi_escape",
|
||||
)
|
||||
@@ -1,7 +1,8 @@
|
||||
[package]
|
||||
edition = "2024"
|
||||
name = "codex-ansi-escape"
|
||||
version = { workspace = true }
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[lib]
|
||||
name = "codex_ansi_escape"
|
||||
|
||||
@@ -3,11 +3,30 @@ use ansi_to_tui::IntoText;
|
||||
use ratatui::text::Line;
|
||||
use ratatui::text::Text;
|
||||
|
||||
// Expand tabs in a best-effort way for transcript rendering.
|
||||
// Tabs can interact poorly with left-gutter prefixes in our TUI and CLI
|
||||
// transcript views (e.g., `nl` separates line numbers from content with a tab).
|
||||
// Replacing tabs with spaces avoids odd visual artifacts without changing
|
||||
// semantics for our use cases.
|
||||
fn expand_tabs(s: &str) -> std::borrow::Cow<'_, str> {
|
||||
if s.contains('\t') {
|
||||
// Keep it simple: replace each tab with 4 spaces.
|
||||
// We do not try to align to tab stops since most usages (like `nl`)
|
||||
// look acceptable with a fixed substitution and this avoids stateful math
|
||||
// across spans.
|
||||
std::borrow::Cow::Owned(s.replace('\t', " "))
|
||||
} else {
|
||||
std::borrow::Cow::Borrowed(s)
|
||||
}
|
||||
}
|
||||
|
||||
/// This function should be used when the contents of `s` are expected to match
|
||||
/// a single line. If multiple lines are found, a warning is logged and only the
|
||||
/// first line is returned.
|
||||
pub fn ansi_escape_line(s: &str) -> Line<'static> {
|
||||
let text = ansi_escape(s);
|
||||
// Normalize tabs to spaces to avoid odd gutter collisions in transcript mode.
|
||||
let s = expand_tabs(s);
|
||||
let text = ansi_escape(&s);
|
||||
match text.lines.as_slice() {
|
||||
[] => "".into(),
|
||||
[only] => only.clone(),
|
||||
|
||||
6
codex-rs/app-server-protocol/BUILD.bazel
Normal file
6
codex-rs/app-server-protocol/BUILD.bazel
Normal file
@@ -0,0 +1,6 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "app-server-protocol",
|
||||
crate_name = "codex_app_server_protocol",
|
||||
)
|
||||
30
codex-rs/app-server-protocol/Cargo.toml
Normal file
30
codex-rs/app-server-protocol/Cargo.toml
Normal file
@@ -0,0 +1,30 @@
|
||||
[package]
|
||||
name = "codex-app-server-protocol"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[lib]
|
||||
name = "codex_app_server_protocol"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = { workspace = true }
|
||||
clap = { workspace = true, features = ["derive"] }
|
||||
codex-protocol = { workspace = true }
|
||||
codex-utils-absolute-path = { workspace = true }
|
||||
mcp-types = { workspace = true }
|
||||
schemars = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
strum_macros = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
ts-rs = { workspace = true }
|
||||
uuid = { workspace = true, features = ["serde", "v7"] }
|
||||
|
||||
[dev-dependencies]
|
||||
anyhow = { workspace = true }
|
||||
pretty_assertions = { workspace = true }
|
||||
@@ -3,18 +3,20 @@ use clap::Parser;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(about = "Generate TypeScript bindings for the Codex protocol")]
|
||||
#[command(
|
||||
about = "Generate TypeScript bindings and JSON Schemas for the Codex app-server protocol"
|
||||
)]
|
||||
struct Args {
|
||||
/// Output directory where .ts files will be written
|
||||
/// Output directory where generated files will be written
|
||||
#[arg(short = 'o', long = "out", value_name = "DIR")]
|
||||
out_dir: PathBuf,
|
||||
|
||||
/// Optional path to the Prettier executable to format generated files
|
||||
/// Optional Prettier executable path to format generated TypeScript files
|
||||
#[arg(short = 'p', long = "prettier", value_name = "PRETTIER_BIN")]
|
||||
prettier: Option<PathBuf>,
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
let args = Args::parse();
|
||||
codex_protocol_ts::generate_ts(&args.out_dir, args.prettier.as_deref())
|
||||
codex_app_server_protocol::generate_types(&args.out_dir, args.prettier.as_deref())
|
||||
}
|
||||
946
codex-rs/app-server-protocol/src/export.rs
Normal file
946
codex-rs/app-server-protocol/src/export.rs
Normal file
@@ -0,0 +1,946 @@
|
||||
use crate::ClientNotification;
|
||||
use crate::ClientRequest;
|
||||
use crate::ServerNotification;
|
||||
use crate::ServerRequest;
|
||||
use crate::export_client_notification_schemas;
|
||||
use crate::export_client_param_schemas;
|
||||
use crate::export_client_response_schemas;
|
||||
use crate::export_client_responses;
|
||||
use crate::export_server_notification_schemas;
|
||||
use crate::export_server_param_schemas;
|
||||
use crate::export_server_response_schemas;
|
||||
use crate::export_server_responses;
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use anyhow::anyhow;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use schemars::JsonSchema;
|
||||
use schemars::schema_for;
|
||||
use serde::Serialize;
|
||||
use serde_json::Map;
|
||||
use serde_json::Value;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::ffi::OsStr;
|
||||
use std::fs;
|
||||
use std::io::Read;
|
||||
use std::io::Write;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
use ts_rs::TS;
|
||||
|
||||
const HEADER: &str = "// GENERATED CODE! DO NOT MODIFY BY HAND!\n\n";
|
||||
const IGNORED_DEFINITIONS: &[&str] = &["Option<()>"];
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct GeneratedSchema {
|
||||
namespace: Option<String>,
|
||||
logical_name: String,
|
||||
value: Value,
|
||||
in_v1_dir: bool,
|
||||
}
|
||||
|
||||
impl GeneratedSchema {
|
||||
fn namespace(&self) -> Option<&str> {
|
||||
self.namespace.as_deref()
|
||||
}
|
||||
|
||||
fn logical_name(&self) -> &str {
|
||||
&self.logical_name
|
||||
}
|
||||
|
||||
fn value(&self) -> &Value {
|
||||
&self.value
|
||||
}
|
||||
}
|
||||
|
||||
type JsonSchemaEmitter = fn(&Path) -> Result<GeneratedSchema>;
|
||||
pub fn generate_types(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
|
||||
generate_ts(out_dir, prettier)?;
|
||||
generate_json(out_dir)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct GenerateTsOptions {
|
||||
pub generate_indices: bool,
|
||||
pub ensure_headers: bool,
|
||||
pub run_prettier: bool,
|
||||
}
|
||||
|
||||
impl Default for GenerateTsOptions {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
generate_indices: true,
|
||||
ensure_headers: true,
|
||||
run_prettier: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn generate_ts(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
|
||||
generate_ts_with_options(out_dir, prettier, GenerateTsOptions::default())
|
||||
}
|
||||
|
||||
pub fn generate_ts_with_options(
|
||||
out_dir: &Path,
|
||||
prettier: Option<&Path>,
|
||||
options: GenerateTsOptions,
|
||||
) -> Result<()> {
|
||||
let v2_out_dir = out_dir.join("v2");
|
||||
ensure_dir(out_dir)?;
|
||||
ensure_dir(&v2_out_dir)?;
|
||||
|
||||
ClientRequest::export_all_to(out_dir)?;
|
||||
export_client_responses(out_dir)?;
|
||||
ClientNotification::export_all_to(out_dir)?;
|
||||
|
||||
ServerRequest::export_all_to(out_dir)?;
|
||||
export_server_responses(out_dir)?;
|
||||
ServerNotification::export_all_to(out_dir)?;
|
||||
|
||||
if options.generate_indices {
|
||||
generate_index_ts(out_dir)?;
|
||||
generate_index_ts(&v2_out_dir)?;
|
||||
}
|
||||
|
||||
// Ensure our header is present on all TS files (root + subdirs like v2/).
|
||||
let mut ts_files = Vec::new();
|
||||
let should_collect_ts_files =
|
||||
options.ensure_headers || (options.run_prettier && prettier.is_some());
|
||||
if should_collect_ts_files {
|
||||
ts_files = ts_files_in_recursive(out_dir)?;
|
||||
}
|
||||
|
||||
if options.ensure_headers {
|
||||
for file in &ts_files {
|
||||
prepend_header_if_missing(file)?;
|
||||
}
|
||||
}
|
||||
|
||||
// Optionally run Prettier on all generated TS files.
|
||||
if options.run_prettier
|
||||
&& let Some(prettier_bin) = prettier
|
||||
&& !ts_files.is_empty()
|
||||
{
|
||||
let status = Command::new(prettier_bin)
|
||||
.arg("--write")
|
||||
.arg("--log-level")
|
||||
.arg("warn")
|
||||
.args(ts_files.iter().map(|p| p.as_os_str()))
|
||||
.status()
|
||||
.with_context(|| format!("Failed to invoke Prettier at {}", prettier_bin.display()))?;
|
||||
if !status.success() {
|
||||
return Err(anyhow!("Prettier failed with status {status}"));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn generate_json(out_dir: &Path) -> Result<()> {
|
||||
ensure_dir(out_dir)?;
|
||||
let envelope_emitters: &[JsonSchemaEmitter] = &[
|
||||
|d| write_json_schema_with_return::<crate::RequestId>(d, "RequestId"),
|
||||
|d| write_json_schema_with_return::<crate::JSONRPCMessage>(d, "JSONRPCMessage"),
|
||||
|d| write_json_schema_with_return::<crate::JSONRPCRequest>(d, "JSONRPCRequest"),
|
||||
|d| write_json_schema_with_return::<crate::JSONRPCNotification>(d, "JSONRPCNotification"),
|
||||
|d| write_json_schema_with_return::<crate::JSONRPCResponse>(d, "JSONRPCResponse"),
|
||||
|d| write_json_schema_with_return::<crate::JSONRPCError>(d, "JSONRPCError"),
|
||||
|d| write_json_schema_with_return::<crate::JSONRPCErrorError>(d, "JSONRPCErrorError"),
|
||||
|d| write_json_schema_with_return::<crate::ClientRequest>(d, "ClientRequest"),
|
||||
|d| write_json_schema_with_return::<crate::ServerRequest>(d, "ServerRequest"),
|
||||
|d| write_json_schema_with_return::<crate::ClientNotification>(d, "ClientNotification"),
|
||||
|d| write_json_schema_with_return::<crate::ServerNotification>(d, "ServerNotification"),
|
||||
|d| write_json_schema_with_return::<EventMsg>(d, "EventMsg"),
|
||||
];
|
||||
|
||||
let mut schemas: Vec<GeneratedSchema> = Vec::new();
|
||||
for emit in envelope_emitters {
|
||||
schemas.push(emit(out_dir)?);
|
||||
}
|
||||
|
||||
schemas.extend(export_client_param_schemas(out_dir)?);
|
||||
schemas.extend(export_client_response_schemas(out_dir)?);
|
||||
schemas.extend(export_server_param_schemas(out_dir)?);
|
||||
schemas.extend(export_server_response_schemas(out_dir)?);
|
||||
schemas.extend(export_client_notification_schemas(out_dir)?);
|
||||
schemas.extend(export_server_notification_schemas(out_dir)?);
|
||||
|
||||
let bundle = build_schema_bundle(schemas)?;
|
||||
write_pretty_json(
|
||||
out_dir.join("codex_app_server_protocol.schemas.json"),
|
||||
&bundle,
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn build_schema_bundle(schemas: Vec<GeneratedSchema>) -> Result<Value> {
|
||||
const SPECIAL_DEFINITIONS: &[&str] = &[
|
||||
"ClientNotification",
|
||||
"ClientRequest",
|
||||
"EventMsg",
|
||||
"ServerNotification",
|
||||
"ServerRequest",
|
||||
];
|
||||
|
||||
let namespaced_types = collect_namespaced_types(&schemas);
|
||||
let mut definitions = Map::new();
|
||||
|
||||
for schema in schemas {
|
||||
let GeneratedSchema {
|
||||
namespace,
|
||||
logical_name,
|
||||
mut value,
|
||||
in_v1_dir,
|
||||
} = schema;
|
||||
|
||||
if IGNORED_DEFINITIONS.contains(&logical_name.as_str()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(ref ns) = namespace {
|
||||
rewrite_refs_to_namespace(&mut value, ns);
|
||||
}
|
||||
|
||||
let mut forced_namespace_refs: Vec<(String, String)> = Vec::new();
|
||||
if let Value::Object(ref mut obj) = value
|
||||
&& let Some(defs) = obj.remove("definitions")
|
||||
&& let Value::Object(defs_obj) = defs
|
||||
{
|
||||
for (def_name, mut def_schema) in defs_obj {
|
||||
if IGNORED_DEFINITIONS.contains(&def_name.as_str()) {
|
||||
continue;
|
||||
}
|
||||
if SPECIAL_DEFINITIONS.contains(&def_name.as_str()) {
|
||||
continue;
|
||||
}
|
||||
annotate_schema(&mut def_schema, Some(def_name.as_str()));
|
||||
let target_namespace = match namespace {
|
||||
Some(ref ns) => Some(ns.clone()),
|
||||
None => namespace_for_definition(&def_name, &namespaced_types)
|
||||
.cloned()
|
||||
.filter(|_| !in_v1_dir),
|
||||
};
|
||||
if let Some(ref ns) = target_namespace {
|
||||
if namespace.as_deref() == Some(ns.as_str()) {
|
||||
rewrite_refs_to_namespace(&mut def_schema, ns);
|
||||
insert_into_namespace(&mut definitions, ns, def_name.clone(), def_schema)?;
|
||||
} else if !forced_namespace_refs
|
||||
.iter()
|
||||
.any(|(name, existing_ns)| name == &def_name && existing_ns == ns)
|
||||
{
|
||||
forced_namespace_refs.push((def_name.clone(), ns.clone()));
|
||||
}
|
||||
} else {
|
||||
definitions.insert(def_name, def_schema);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (name, ns) in forced_namespace_refs {
|
||||
rewrite_named_ref_to_namespace(&mut value, &ns, &name);
|
||||
}
|
||||
|
||||
if let Some(ref ns) = namespace {
|
||||
insert_into_namespace(&mut definitions, ns, logical_name.clone(), value)?;
|
||||
} else {
|
||||
definitions.insert(logical_name, value);
|
||||
}
|
||||
}
|
||||
|
||||
let mut root = Map::new();
|
||||
root.insert(
|
||||
"$schema".to_string(),
|
||||
Value::String("http://json-schema.org/draft-07/schema#".into()),
|
||||
);
|
||||
root.insert(
|
||||
"title".to_string(),
|
||||
Value::String("CodexAppServerProtocol".into()),
|
||||
);
|
||||
root.insert("type".to_string(), Value::String("object".into()));
|
||||
root.insert("definitions".to_string(), Value::Object(definitions));
|
||||
|
||||
Ok(Value::Object(root))
|
||||
}
|
||||
|
||||
fn insert_into_namespace(
|
||||
definitions: &mut Map<String, Value>,
|
||||
namespace: &str,
|
||||
name: String,
|
||||
schema: Value,
|
||||
) -> Result<()> {
|
||||
let entry = definitions
|
||||
.entry(namespace.to_string())
|
||||
.or_insert_with(|| Value::Object(Map::new()));
|
||||
match entry {
|
||||
Value::Object(map) => {
|
||||
map.insert(name, schema);
|
||||
Ok(())
|
||||
}
|
||||
_ => Err(anyhow!("expected namespace {namespace} to be an object")),
|
||||
}
|
||||
}
|
||||
|
||||
fn write_json_schema_with_return<T>(out_dir: &Path, name: &str) -> Result<GeneratedSchema>
|
||||
where
|
||||
T: JsonSchema,
|
||||
{
|
||||
let file_stem = name.trim();
|
||||
let schema = schema_for!(T);
|
||||
let mut schema_value = serde_json::to_value(schema)?;
|
||||
annotate_schema(&mut schema_value, Some(file_stem));
|
||||
// If the name looks like a namespaced path (e.g., "v2::Type"), mirror
|
||||
// the TypeScript layout and write to out_dir/v2/Type.json. Otherwise
|
||||
// write alongside the legacy files.
|
||||
let (raw_namespace, logical_name) = split_namespace(file_stem);
|
||||
let out_path = if let Some(ns) = raw_namespace {
|
||||
let dir = out_dir.join(ns);
|
||||
ensure_dir(&dir)?;
|
||||
dir.join(format!("{logical_name}.json"))
|
||||
} else {
|
||||
out_dir.join(format!("{file_stem}.json"))
|
||||
};
|
||||
|
||||
if !IGNORED_DEFINITIONS.contains(&logical_name) {
|
||||
write_pretty_json(out_path, &schema_value)
|
||||
.with_context(|| format!("Failed to write JSON schema for {file_stem}"))?;
|
||||
}
|
||||
|
||||
let namespace = match raw_namespace {
|
||||
Some("v1") | None => None,
|
||||
Some(ns) => Some(ns.to_string()),
|
||||
};
|
||||
Ok(GeneratedSchema {
|
||||
in_v1_dir: raw_namespace == Some("v1"),
|
||||
namespace,
|
||||
logical_name: logical_name.to_string(),
|
||||
value: schema_value,
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn write_json_schema<T>(out_dir: &Path, name: &str) -> Result<GeneratedSchema>
|
||||
where
|
||||
T: JsonSchema,
|
||||
{
|
||||
write_json_schema_with_return::<T>(out_dir, name)
|
||||
}
|
||||
|
||||
fn write_pretty_json(path: PathBuf, value: &impl Serialize) -> Result<()> {
|
||||
let json = serde_json::to_vec_pretty(value)
|
||||
.with_context(|| format!("Failed to serialize JSON schema to {}", path.display()))?;
|
||||
fs::write(&path, json).with_context(|| format!("Failed to write {}", path.display()))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Split a fully-qualified type name like "v2::Type" into its namespace and logical name.
|
||||
fn split_namespace(name: &str) -> (Option<&str>, &str) {
|
||||
name.split_once("::")
|
||||
.map_or((None, name), |(ns, rest)| (Some(ns), rest))
|
||||
}
|
||||
|
||||
/// Recursively rewrite $ref values that point at "#/definitions/..." so that
|
||||
/// they point to a namespaced location under the bundle.
|
||||
fn rewrite_refs_to_namespace(value: &mut Value, ns: &str) {
|
||||
match value {
|
||||
Value::Object(obj) => {
|
||||
if let Some(Value::String(r)) = obj.get_mut("$ref")
|
||||
&& let Some(suffix) = r.strip_prefix("#/definitions/")
|
||||
{
|
||||
let prefix = format!("{ns}/");
|
||||
if !suffix.starts_with(&prefix) {
|
||||
*r = format!("#/definitions/{ns}/{suffix}");
|
||||
}
|
||||
}
|
||||
for v in obj.values_mut() {
|
||||
rewrite_refs_to_namespace(v, ns);
|
||||
}
|
||||
}
|
||||
Value::Array(items) => {
|
||||
for v in items.iter_mut() {
|
||||
rewrite_refs_to_namespace(v, ns);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn collect_namespaced_types(schemas: &[GeneratedSchema]) -> HashMap<String, String> {
|
||||
let mut types = HashMap::new();
|
||||
for schema in schemas {
|
||||
if let Some(ns) = schema.namespace() {
|
||||
types
|
||||
.entry(schema.logical_name().to_string())
|
||||
.or_insert_with(|| ns.to_string());
|
||||
if let Some(Value::Object(defs)) = schema.value().get("definitions") {
|
||||
for key in defs.keys() {
|
||||
types.entry(key.clone()).or_insert_with(|| ns.to_string());
|
||||
}
|
||||
}
|
||||
if let Some(Value::Object(defs)) = schema.value().get("$defs") {
|
||||
for key in defs.keys() {
|
||||
types.entry(key.clone()).or_insert_with(|| ns.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
types
|
||||
}
|
||||
|
||||
fn namespace_for_definition<'a>(
|
||||
name: &str,
|
||||
types: &'a HashMap<String, String>,
|
||||
) -> Option<&'a String> {
|
||||
if let Some(ns) = types.get(name) {
|
||||
return Some(ns);
|
||||
}
|
||||
let trimmed = name.trim_end_matches(|c: char| c.is_ascii_digit());
|
||||
if trimmed != name {
|
||||
return types.get(trimmed);
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn variant_definition_name(base: &str, variant: &Value) -> Option<String> {
|
||||
if let Some(props) = variant.get("properties").and_then(Value::as_object) {
|
||||
if let Some(method_literal) = literal_from_property(props, "method") {
|
||||
let pascal = to_pascal_case(method_literal);
|
||||
return Some(match base {
|
||||
"ClientRequest" | "ServerRequest" => format!("{pascal}Request"),
|
||||
"ClientNotification" | "ServerNotification" => format!("{pascal}Notification"),
|
||||
_ => format!("{pascal}{base}"),
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(type_literal) = literal_from_property(props, "type") {
|
||||
let pascal = to_pascal_case(type_literal);
|
||||
return Some(match base {
|
||||
"EventMsg" => format!("{pascal}EventMsg"),
|
||||
_ => format!("{pascal}{base}"),
|
||||
});
|
||||
}
|
||||
|
||||
if props.len() == 1
|
||||
&& let Some(key) = props.keys().next()
|
||||
{
|
||||
let pascal = to_pascal_case(key);
|
||||
return Some(format!("{pascal}{base}"));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(required) = variant.get("required").and_then(Value::as_array)
|
||||
&& required.len() == 1
|
||||
&& let Some(key) = required[0].as_str()
|
||||
{
|
||||
let pascal = to_pascal_case(key);
|
||||
return Some(format!("{pascal}{base}"));
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
fn literal_from_property<'a>(props: &'a Map<String, Value>, key: &str) -> Option<&'a str> {
|
||||
props.get(key).and_then(string_literal)
|
||||
}
|
||||
|
||||
fn string_literal(value: &Value) -> Option<&str> {
|
||||
value.get("const").and_then(Value::as_str).or_else(|| {
|
||||
value
|
||||
.get("enum")
|
||||
.and_then(Value::as_array)
|
||||
.and_then(|arr| arr.first())
|
||||
.and_then(Value::as_str)
|
||||
})
|
||||
}
|
||||
|
||||
fn annotate_schema(value: &mut Value, base: Option<&str>) {
|
||||
match value {
|
||||
Value::Object(map) => annotate_object(map, base),
|
||||
Value::Array(items) => {
|
||||
for item in items {
|
||||
annotate_schema(item, base);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn annotate_object(map: &mut Map<String, Value>, base: Option<&str>) {
|
||||
let owner = map.get("title").and_then(Value::as_str).map(str::to_owned);
|
||||
if let Some(owner) = owner.as_deref()
|
||||
&& let Some(Value::Object(props)) = map.get_mut("properties")
|
||||
{
|
||||
set_discriminator_titles(props, owner);
|
||||
}
|
||||
|
||||
if let Some(Value::Array(variants)) = map.get_mut("oneOf") {
|
||||
annotate_variant_list(variants, base);
|
||||
}
|
||||
if let Some(Value::Array(variants)) = map.get_mut("anyOf") {
|
||||
annotate_variant_list(variants, base);
|
||||
}
|
||||
|
||||
if let Some(Value::Object(defs)) = map.get_mut("definitions") {
|
||||
for (name, schema) in defs.iter_mut() {
|
||||
annotate_schema(schema, Some(name.as_str()));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(Value::Object(defs)) = map.get_mut("$defs") {
|
||||
for (name, schema) in defs.iter_mut() {
|
||||
annotate_schema(schema, Some(name.as_str()));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(Value::Object(props)) = map.get_mut("properties") {
|
||||
for value in props.values_mut() {
|
||||
annotate_schema(value, base);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(items) = map.get_mut("items") {
|
||||
annotate_schema(items, base);
|
||||
}
|
||||
|
||||
if let Some(additional) = map.get_mut("additionalProperties") {
|
||||
annotate_schema(additional, base);
|
||||
}
|
||||
|
||||
for (key, child) in map.iter_mut() {
|
||||
match key.as_str() {
|
||||
"oneOf"
|
||||
| "anyOf"
|
||||
| "definitions"
|
||||
| "$defs"
|
||||
| "properties"
|
||||
| "items"
|
||||
| "additionalProperties" => {}
|
||||
_ => annotate_schema(child, base),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn annotate_variant_list(variants: &mut [Value], base: Option<&str>) {
|
||||
let mut seen = HashSet::new();
|
||||
|
||||
for variant in variants.iter() {
|
||||
if let Some(name) = variant_title(variant) {
|
||||
seen.insert(name.to_owned());
|
||||
}
|
||||
}
|
||||
|
||||
for variant in variants.iter_mut() {
|
||||
let mut variant_name = variant_title(variant).map(str::to_owned);
|
||||
|
||||
if variant_name.is_none()
|
||||
&& let Some(base_name) = base
|
||||
&& let Some(name) = variant_definition_name(base_name, variant)
|
||||
{
|
||||
let mut candidate = name.clone();
|
||||
let mut index = 2;
|
||||
while seen.contains(&candidate) {
|
||||
candidate = format!("{name}{index}");
|
||||
index += 1;
|
||||
}
|
||||
if let Some(obj) = variant.as_object_mut() {
|
||||
obj.insert("title".into(), Value::String(candidate.clone()));
|
||||
}
|
||||
seen.insert(candidate.clone());
|
||||
variant_name = Some(candidate);
|
||||
}
|
||||
|
||||
if let Some(name) = variant_name.as_deref()
|
||||
&& let Some(obj) = variant.as_object_mut()
|
||||
&& let Some(Value::Object(props)) = obj.get_mut("properties")
|
||||
{
|
||||
set_discriminator_titles(props, name);
|
||||
}
|
||||
|
||||
annotate_schema(variant, base);
|
||||
}
|
||||
}
|
||||
|
||||
const DISCRIMINATOR_KEYS: &[&str] = &["type", "method", "mode", "status", "role", "reason"];
|
||||
|
||||
fn set_discriminator_titles(props: &mut Map<String, Value>, owner: &str) {
|
||||
for key in DISCRIMINATOR_KEYS {
|
||||
if let Some(prop_schema) = props.get_mut(*key)
|
||||
&& string_literal(prop_schema).is_some()
|
||||
&& let Value::Object(prop_obj) = prop_schema
|
||||
{
|
||||
if prop_obj.contains_key("title") {
|
||||
continue;
|
||||
}
|
||||
let suffix = to_pascal_case(key);
|
||||
prop_obj.insert("title".into(), Value::String(format!("{owner}{suffix}")));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn variant_title(value: &Value) -> Option<&str> {
|
||||
value
|
||||
.as_object()
|
||||
.and_then(|obj| obj.get("title"))
|
||||
.and_then(Value::as_str)
|
||||
}
|
||||
|
||||
fn to_pascal_case(input: &str) -> String {
|
||||
let mut result = String::new();
|
||||
let mut capitalize_next = true;
|
||||
|
||||
for c in input.chars() {
|
||||
if c == '_' || c == '-' {
|
||||
capitalize_next = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if capitalize_next {
|
||||
result.extend(c.to_uppercase());
|
||||
capitalize_next = false;
|
||||
} else {
|
||||
result.push(c);
|
||||
}
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn ensure_dir(dir: &Path) -> Result<()> {
|
||||
fs::create_dir_all(dir)
|
||||
.with_context(|| format!("Failed to create output directory {}", dir.display()))
|
||||
}
|
||||
|
||||
fn rewrite_named_ref_to_namespace(value: &mut Value, ns: &str, name: &str) {
|
||||
let direct = format!("#/definitions/{name}");
|
||||
let prefixed = format!("{direct}/");
|
||||
let replacement = format!("#/definitions/{ns}/{name}");
|
||||
let replacement_prefixed = format!("{replacement}/");
|
||||
match value {
|
||||
Value::Object(obj) => {
|
||||
if let Some(Value::String(reference)) = obj.get_mut("$ref") {
|
||||
if reference == &direct {
|
||||
*reference = replacement;
|
||||
} else if let Some(rest) = reference.strip_prefix(&prefixed) {
|
||||
*reference = format!("{replacement_prefixed}{rest}");
|
||||
}
|
||||
}
|
||||
for child in obj.values_mut() {
|
||||
rewrite_named_ref_to_namespace(child, ns, name);
|
||||
}
|
||||
}
|
||||
Value::Array(items) => {
|
||||
for child in items {
|
||||
rewrite_named_ref_to_namespace(child, ns, name);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn prepend_header_if_missing(path: &Path) -> Result<()> {
|
||||
let mut content = String::new();
|
||||
{
|
||||
let mut f = fs::File::open(path)
|
||||
.with_context(|| format!("Failed to open {} for reading", path.display()))?;
|
||||
f.read_to_string(&mut content)
|
||||
.with_context(|| format!("Failed to read {}", path.display()))?;
|
||||
}
|
||||
|
||||
if content.starts_with(HEADER) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut f = fs::File::create(path)
|
||||
.with_context(|| format!("Failed to open {} for writing", path.display()))?;
|
||||
f.write_all(HEADER.as_bytes())
|
||||
.with_context(|| format!("Failed to write header to {}", path.display()))?;
|
||||
f.write_all(content.as_bytes())
|
||||
.with_context(|| format!("Failed to write content to {}", path.display()))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn ts_files_in(dir: &Path) -> Result<Vec<PathBuf>> {
|
||||
let mut files = Vec::new();
|
||||
for entry in
|
||||
fs::read_dir(dir).with_context(|| format!("Failed to read dir {}", dir.display()))?
|
||||
{
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
if path.is_file() && path.extension() == Some(OsStr::new("ts")) {
|
||||
files.push(path);
|
||||
}
|
||||
}
|
||||
files.sort();
|
||||
Ok(files)
|
||||
}
|
||||
|
||||
fn ts_files_in_recursive(dir: &Path) -> Result<Vec<PathBuf>> {
|
||||
let mut files = Vec::new();
|
||||
let mut stack = vec![dir.to_path_buf()];
|
||||
while let Some(d) = stack.pop() {
|
||||
for entry in
|
||||
fs::read_dir(&d).with_context(|| format!("Failed to read dir {}", d.display()))?
|
||||
{
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
if path.is_dir() {
|
||||
stack.push(path);
|
||||
} else if path.is_file() && path.extension() == Some(OsStr::new("ts")) {
|
||||
files.push(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
files.sort();
|
||||
Ok(files)
|
||||
}
|
||||
|
||||
/// Generate an index.ts file that re-exports all generated types.
|
||||
/// This allows consumers to import all types from a single file.
|
||||
fn generate_index_ts(out_dir: &Path) -> Result<PathBuf> {
|
||||
let mut entries: Vec<String> = Vec::new();
|
||||
let mut stems: Vec<String> = ts_files_in(out_dir)?
|
||||
.into_iter()
|
||||
.filter_map(|p| {
|
||||
let stem = p.file_stem()?.to_string_lossy().into_owned();
|
||||
if stem == "index" { None } else { Some(stem) }
|
||||
})
|
||||
.collect();
|
||||
stems.sort();
|
||||
stems.dedup();
|
||||
|
||||
for name in stems {
|
||||
entries.push(format!("export type {{ {name} }} from \"./{name}\";\n"));
|
||||
}
|
||||
|
||||
// If this is the root out_dir and a ./v2 folder exists with TS files,
|
||||
// expose it as a namespace to avoid symbol collisions at the root.
|
||||
let v2_dir = out_dir.join("v2");
|
||||
let has_v2_ts = ts_files_in(&v2_dir).map(|v| !v.is_empty()).unwrap_or(false);
|
||||
if has_v2_ts {
|
||||
entries.push("export * as v2 from \"./v2\";\n".to_string());
|
||||
}
|
||||
|
||||
let mut content =
|
||||
String::with_capacity(HEADER.len() + entries.iter().map(String::len).sum::<usize>());
|
||||
content.push_str(HEADER);
|
||||
for line in &entries {
|
||||
content.push_str(line);
|
||||
}
|
||||
|
||||
let index_path = out_dir.join("index.ts");
|
||||
let mut f = fs::File::create(&index_path)
|
||||
.with_context(|| format!("Failed to create {}", index_path.display()))?;
|
||||
f.write_all(content.as_bytes())
|
||||
.with_context(|| format!("Failed to write {}", index_path.display()))?;
|
||||
Ok(index_path)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use anyhow::Result;
|
||||
use std::collections::BTreeSet;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use uuid::Uuid;
|
||||
|
||||
#[test]
|
||||
fn generated_ts_has_no_optional_nullable_fields() -> Result<()> {
|
||||
// Assert that there are no types of the form "?: T | null" in the generated TS files.
|
||||
let output_dir = std::env::temp_dir().join(format!("codex_ts_types_{}", Uuid::now_v7()));
|
||||
fs::create_dir(&output_dir)?;
|
||||
|
||||
struct TempDirGuard(PathBuf);
|
||||
|
||||
impl Drop for TempDirGuard {
|
||||
fn drop(&mut self) {
|
||||
let _ = fs::remove_dir_all(&self.0);
|
||||
}
|
||||
}
|
||||
|
||||
let _guard = TempDirGuard(output_dir.clone());
|
||||
|
||||
// Avoid doing more work than necessary to keep the test from timing out.
|
||||
let options = GenerateTsOptions {
|
||||
generate_indices: false,
|
||||
ensure_headers: false,
|
||||
run_prettier: false,
|
||||
};
|
||||
generate_ts_with_options(&output_dir, None, options)?;
|
||||
|
||||
let mut undefined_offenders = Vec::new();
|
||||
let mut optional_nullable_offenders = BTreeSet::new();
|
||||
let mut stack = vec![output_dir];
|
||||
while let Some(dir) = stack.pop() {
|
||||
for entry in fs::read_dir(&dir)? {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
if path.is_dir() {
|
||||
stack.push(path);
|
||||
continue;
|
||||
}
|
||||
|
||||
if matches!(path.extension().and_then(|ext| ext.to_str()), Some("ts")) {
|
||||
let contents = fs::read_to_string(&path)?;
|
||||
if contents.contains("| undefined") {
|
||||
undefined_offenders.push(path.clone());
|
||||
}
|
||||
|
||||
const SKIP_PREFIXES: &[&str] = &[
|
||||
"const ",
|
||||
"let ",
|
||||
"var ",
|
||||
"export const ",
|
||||
"export let ",
|
||||
"export var ",
|
||||
];
|
||||
|
||||
let mut search_start = 0;
|
||||
while let Some(idx) = contents[search_start..].find("| null") {
|
||||
let abs_idx = search_start + idx;
|
||||
// Find the property-colon for this field by scanning forward
|
||||
// from the start of the segment and ignoring nested braces,
|
||||
// brackets, and parens. This avoids colons inside nested
|
||||
// type literals like `{ [k in string]?: string }`.
|
||||
|
||||
let line_start_idx =
|
||||
contents[..abs_idx].rfind('\n').map(|i| i + 1).unwrap_or(0);
|
||||
|
||||
let mut segment_start_idx = line_start_idx;
|
||||
if let Some(rel_idx) = contents[line_start_idx..abs_idx].rfind(',') {
|
||||
segment_start_idx = segment_start_idx.max(line_start_idx + rel_idx + 1);
|
||||
}
|
||||
if let Some(rel_idx) = contents[line_start_idx..abs_idx].rfind('{') {
|
||||
segment_start_idx = segment_start_idx.max(line_start_idx + rel_idx + 1);
|
||||
}
|
||||
if let Some(rel_idx) = contents[line_start_idx..abs_idx].rfind('}') {
|
||||
segment_start_idx = segment_start_idx.max(line_start_idx + rel_idx + 1);
|
||||
}
|
||||
|
||||
// Scan forward for the colon that separates the field name from its type.
|
||||
let mut level_brace = 0_i32;
|
||||
let mut level_brack = 0_i32;
|
||||
let mut level_paren = 0_i32;
|
||||
let mut in_single = false;
|
||||
let mut in_double = false;
|
||||
let mut escape = false;
|
||||
let mut prop_colon_idx = None;
|
||||
for (i, ch) in contents[segment_start_idx..abs_idx].char_indices() {
|
||||
let idx_abs = segment_start_idx + i;
|
||||
if escape {
|
||||
escape = false;
|
||||
continue;
|
||||
}
|
||||
match ch {
|
||||
'\\' => {
|
||||
// Only treat as escape when inside a string.
|
||||
if in_single || in_double {
|
||||
escape = true;
|
||||
}
|
||||
}
|
||||
'\'' => {
|
||||
if !in_double {
|
||||
in_single = !in_single;
|
||||
}
|
||||
}
|
||||
'"' => {
|
||||
if !in_single {
|
||||
in_double = !in_double;
|
||||
}
|
||||
}
|
||||
'{' if !in_single && !in_double => level_brace += 1,
|
||||
'}' if !in_single && !in_double => level_brace -= 1,
|
||||
'[' if !in_single && !in_double => level_brack += 1,
|
||||
']' if !in_single && !in_double => level_brack -= 1,
|
||||
'(' if !in_single && !in_double => level_paren += 1,
|
||||
')' if !in_single && !in_double => level_paren -= 1,
|
||||
':' if !in_single
|
||||
&& !in_double
|
||||
&& level_brace == 0
|
||||
&& level_brack == 0
|
||||
&& level_paren == 0 =>
|
||||
{
|
||||
prop_colon_idx = Some(idx_abs);
|
||||
break;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
let Some(colon_idx) = prop_colon_idx else {
|
||||
search_start = abs_idx + 5;
|
||||
continue;
|
||||
};
|
||||
|
||||
let mut field_prefix = contents[segment_start_idx..colon_idx].trim();
|
||||
if field_prefix.is_empty() {
|
||||
search_start = abs_idx + 5;
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(comment_idx) = field_prefix.rfind("*/") {
|
||||
field_prefix = field_prefix[comment_idx + 2..].trim_start();
|
||||
}
|
||||
|
||||
if field_prefix.is_empty() {
|
||||
search_start = abs_idx + 5;
|
||||
continue;
|
||||
}
|
||||
|
||||
if SKIP_PREFIXES
|
||||
.iter()
|
||||
.any(|prefix| field_prefix.starts_with(prefix))
|
||||
{
|
||||
search_start = abs_idx + 5;
|
||||
continue;
|
||||
}
|
||||
|
||||
if field_prefix.contains('(') {
|
||||
search_start = abs_idx + 5;
|
||||
continue;
|
||||
}
|
||||
|
||||
// If the last non-whitespace before ':' is '?', then this is an
|
||||
// optional field with a nullable type (i.e., "?: T | null"),
|
||||
// which we explicitly disallow.
|
||||
if field_prefix.chars().rev().find(|c| !c.is_whitespace()) == Some('?') {
|
||||
let line_number =
|
||||
contents[..abs_idx].chars().filter(|c| *c == '\n').count() + 1;
|
||||
let offending_line_end = contents[line_start_idx..]
|
||||
.find('\n')
|
||||
.map(|i| line_start_idx + i)
|
||||
.unwrap_or(contents.len());
|
||||
let offending_snippet =
|
||||
contents[line_start_idx..offending_line_end].trim();
|
||||
|
||||
optional_nullable_offenders.insert(format!(
|
||||
"{}:{}: {offending_snippet}",
|
||||
path.display(),
|
||||
line_number
|
||||
));
|
||||
}
|
||||
|
||||
search_start = abs_idx + 5;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assert!(
|
||||
undefined_offenders.is_empty(),
|
||||
"Generated TypeScript still includes unions with `undefined` in {undefined_offenders:?}"
|
||||
);
|
||||
|
||||
// If this assertion fails, it means a field was generated as
|
||||
// "?: T | null" — i.e., both optional (undefined) and nullable (null).
|
||||
// We only want either "?: T" or ": T | null".
|
||||
assert!(
|
||||
optional_nullable_offenders.is_empty(),
|
||||
"Generated TypeScript has optional fields with nullable types (disallowed '?: T | null'), add #[ts(optional)] to fix:\n{optional_nullable_offenders:?}"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
71
codex-rs/app-server-protocol/src/jsonrpc_lite.rs
Normal file
71
codex-rs/app-server-protocol/src/jsonrpc_lite.rs
Normal file
@@ -0,0 +1,71 @@
|
||||
//! We do not do true JSON-RPC 2.0, as we neither send nor expect the
|
||||
//! "jsonrpc": "2.0" field.
|
||||
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use ts_rs::TS;
|
||||
|
||||
pub const JSONRPC_VERSION: &str = "2.0";
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, Hash, Eq, JsonSchema, TS)]
|
||||
#[serde(untagged)]
|
||||
pub enum RequestId {
|
||||
String(String),
|
||||
#[ts(type = "number")]
|
||||
Integer(i64),
|
||||
}
|
||||
|
||||
pub type Result = serde_json::Value;
|
||||
|
||||
/// Refers to any valid JSON-RPC object that can be decoded off the wire, or encoded to be sent.
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, JsonSchema, TS)]
|
||||
#[serde(untagged)]
|
||||
pub enum JSONRPCMessage {
|
||||
Request(JSONRPCRequest),
|
||||
Notification(JSONRPCNotification),
|
||||
Response(JSONRPCResponse),
|
||||
Error(JSONRPCError),
|
||||
}
|
||||
|
||||
/// A request that expects a response.
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, JsonSchema, TS)]
|
||||
pub struct JSONRPCRequest {
|
||||
pub id: RequestId,
|
||||
pub method: String,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub params: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
/// A notification which does not expect a response.
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, JsonSchema, TS)]
|
||||
pub struct JSONRPCNotification {
|
||||
pub method: String,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub params: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
/// A successful (non-error) response to a request.
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, JsonSchema, TS)]
|
||||
pub struct JSONRPCResponse {
|
||||
pub id: RequestId,
|
||||
pub result: Result,
|
||||
}
|
||||
|
||||
/// A response to a request that indicates an error occurred.
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, JsonSchema, TS)]
|
||||
pub struct JSONRPCError {
|
||||
pub error: JSONRPCErrorError,
|
||||
pub id: RequestId,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, JsonSchema, TS)]
|
||||
pub struct JSONRPCErrorError {
|
||||
pub code: i64,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub data: Option<serde_json::Value>,
|
||||
pub message: String,
|
||||
}
|
||||
12
codex-rs/app-server-protocol/src/lib.rs
Normal file
12
codex-rs/app-server-protocol/src/lib.rs
Normal file
@@ -0,0 +1,12 @@
|
||||
mod export;
|
||||
mod jsonrpc_lite;
|
||||
mod protocol;
|
||||
|
||||
pub use export::generate_json;
|
||||
pub use export::generate_ts;
|
||||
pub use export::generate_types;
|
||||
pub use jsonrpc_lite::*;
|
||||
pub use protocol::common::*;
|
||||
pub use protocol::thread_history::*;
|
||||
pub use protocol::v1::*;
|
||||
pub use protocol::v2::*;
|
||||
909
codex-rs/app-server-protocol/src/protocol/common.rs
Normal file
909
codex-rs/app-server-protocol/src/protocol/common.rs
Normal file
@@ -0,0 +1,909 @@
|
||||
use std::path::Path;
|
||||
|
||||
use crate::JSONRPCNotification;
|
||||
use crate::JSONRPCRequest;
|
||||
use crate::RequestId;
|
||||
use crate::export::GeneratedSchema;
|
||||
use crate::export::write_json_schema;
|
||||
use crate::protocol::v1;
|
||||
use crate::protocol::v2;
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use strum_macros::Display;
|
||||
use ts_rs::TS;
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema, TS)]
|
||||
#[ts(type = "string")]
|
||||
pub struct GitSha(pub String);
|
||||
|
||||
impl GitSha {
|
||||
pub fn new(sha: &str) -> Self {
|
||||
Self(sha.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Display, JsonSchema, TS)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum AuthMode {
|
||||
ApiKey,
|
||||
ChatGPT,
|
||||
}
|
||||
|
||||
/// Generates an `enum ClientRequest` where each variant is a request that the
|
||||
/// client can send to the server. Each variant has associated `params` and
|
||||
/// `response` types. Also generates a `export_client_responses()` function to
|
||||
/// export all response types to TypeScript.
|
||||
macro_rules! client_request_definitions {
|
||||
(
|
||||
$(
|
||||
$(#[$variant_meta:meta])*
|
||||
$variant:ident $(=> $wire:literal)? {
|
||||
params: $(#[$params_meta:meta])* $params:ty,
|
||||
response: $response:ty,
|
||||
}
|
||||
),* $(,)?
|
||||
) => {
|
||||
/// Request from the client to the server.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(tag = "method", rename_all = "camelCase")]
|
||||
pub enum ClientRequest {
|
||||
$(
|
||||
$(#[$variant_meta])*
|
||||
$(#[serde(rename = $wire)] #[ts(rename = $wire)])?
|
||||
$variant {
|
||||
#[serde(rename = "id")]
|
||||
request_id: RequestId,
|
||||
$(#[$params_meta])*
|
||||
params: $params,
|
||||
},
|
||||
)*
|
||||
}
|
||||
|
||||
pub fn export_client_responses(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::std::result::Result<(), ::ts_rs::ExportError> {
|
||||
$(
|
||||
<$response as ::ts_rs::TS>::export_all_to(out_dir)?;
|
||||
)*
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(clippy::vec_init_then_push)]
|
||||
pub fn export_client_response_schemas(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
|
||||
let mut schemas = Vec::new();
|
||||
$(
|
||||
schemas.push(write_json_schema::<$response>(out_dir, stringify!($response))?);
|
||||
)*
|
||||
Ok(schemas)
|
||||
}
|
||||
|
||||
#[allow(clippy::vec_init_then_push)]
|
||||
pub fn export_client_param_schemas(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
|
||||
let mut schemas = Vec::new();
|
||||
$(
|
||||
schemas.push(write_json_schema::<$params>(out_dir, stringify!($params))?);
|
||||
)*
|
||||
Ok(schemas)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
client_request_definitions! {
|
||||
Initialize {
|
||||
params: v1::InitializeParams,
|
||||
response: v1::InitializeResponse,
|
||||
},
|
||||
|
||||
/// NEW APIs
|
||||
// Thread lifecycle
|
||||
ThreadStart => "thread/start" {
|
||||
params: v2::ThreadStartParams,
|
||||
response: v2::ThreadStartResponse,
|
||||
},
|
||||
ThreadResume => "thread/resume" {
|
||||
params: v2::ThreadResumeParams,
|
||||
response: v2::ThreadResumeResponse,
|
||||
},
|
||||
ThreadFork => "thread/fork" {
|
||||
params: v2::ThreadForkParams,
|
||||
response: v2::ThreadForkResponse,
|
||||
},
|
||||
ThreadArchive => "thread/archive" {
|
||||
params: v2::ThreadArchiveParams,
|
||||
response: v2::ThreadArchiveResponse,
|
||||
},
|
||||
ThreadRollback => "thread/rollback" {
|
||||
params: v2::ThreadRollbackParams,
|
||||
response: v2::ThreadRollbackResponse,
|
||||
},
|
||||
ThreadList => "thread/list" {
|
||||
params: v2::ThreadListParams,
|
||||
response: v2::ThreadListResponse,
|
||||
},
|
||||
ThreadLoadedList => "thread/loaded/list" {
|
||||
params: v2::ThreadLoadedListParams,
|
||||
response: v2::ThreadLoadedListResponse,
|
||||
},
|
||||
SkillsList => "skills/list" {
|
||||
params: v2::SkillsListParams,
|
||||
response: v2::SkillsListResponse,
|
||||
},
|
||||
SkillsConfigWrite => "skills/config/write" {
|
||||
params: v2::SkillsConfigWriteParams,
|
||||
response: v2::SkillsConfigWriteResponse,
|
||||
},
|
||||
TurnStart => "turn/start" {
|
||||
params: v2::TurnStartParams,
|
||||
response: v2::TurnStartResponse,
|
||||
},
|
||||
TurnInterrupt => "turn/interrupt" {
|
||||
params: v2::TurnInterruptParams,
|
||||
response: v2::TurnInterruptResponse,
|
||||
},
|
||||
ReviewStart => "review/start" {
|
||||
params: v2::ReviewStartParams,
|
||||
response: v2::ReviewStartResponse,
|
||||
},
|
||||
|
||||
ModelList => "model/list" {
|
||||
params: v2::ModelListParams,
|
||||
response: v2::ModelListResponse,
|
||||
},
|
||||
/// EXPERIMENTAL - list collaboration mode presets.
|
||||
CollaborationModeList => "collaborationMode/list" {
|
||||
params: v2::CollaborationModeListParams,
|
||||
response: v2::CollaborationModeListResponse,
|
||||
},
|
||||
|
||||
McpServerOauthLogin => "mcpServer/oauth/login" {
|
||||
params: v2::McpServerOauthLoginParams,
|
||||
response: v2::McpServerOauthLoginResponse,
|
||||
},
|
||||
|
||||
McpServerRefresh => "config/mcpServer/reload" {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v2::McpServerRefreshResponse,
|
||||
},
|
||||
|
||||
McpServerStatusList => "mcpServerStatus/list" {
|
||||
params: v2::ListMcpServerStatusParams,
|
||||
response: v2::ListMcpServerStatusResponse,
|
||||
},
|
||||
|
||||
LoginAccount => "account/login/start" {
|
||||
params: v2::LoginAccountParams,
|
||||
response: v2::LoginAccountResponse,
|
||||
},
|
||||
|
||||
CancelLoginAccount => "account/login/cancel" {
|
||||
params: v2::CancelLoginAccountParams,
|
||||
response: v2::CancelLoginAccountResponse,
|
||||
},
|
||||
|
||||
LogoutAccount => "account/logout" {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v2::LogoutAccountResponse,
|
||||
},
|
||||
|
||||
GetAccountRateLimits => "account/rateLimits/read" {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v2::GetAccountRateLimitsResponse,
|
||||
},
|
||||
|
||||
FeedbackUpload => "feedback/upload" {
|
||||
params: v2::FeedbackUploadParams,
|
||||
response: v2::FeedbackUploadResponse,
|
||||
},
|
||||
|
||||
/// Execute a command (argv vector) under the server's sandbox.
|
||||
OneOffCommandExec => "command/exec" {
|
||||
params: v2::CommandExecParams,
|
||||
response: v2::CommandExecResponse,
|
||||
},
|
||||
|
||||
ConfigRead => "config/read" {
|
||||
params: v2::ConfigReadParams,
|
||||
response: v2::ConfigReadResponse,
|
||||
},
|
||||
ConfigValueWrite => "config/value/write" {
|
||||
params: v2::ConfigValueWriteParams,
|
||||
response: v2::ConfigWriteResponse,
|
||||
},
|
||||
ConfigBatchWrite => "config/batchWrite" {
|
||||
params: v2::ConfigBatchWriteParams,
|
||||
response: v2::ConfigWriteResponse,
|
||||
},
|
||||
|
||||
ConfigRequirementsRead => "configRequirements/read" {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v2::ConfigRequirementsReadResponse,
|
||||
},
|
||||
|
||||
GetAccount => "account/read" {
|
||||
params: v2::GetAccountParams,
|
||||
response: v2::GetAccountResponse,
|
||||
},
|
||||
|
||||
/// DEPRECATED APIs below
|
||||
NewConversation {
|
||||
params: v1::NewConversationParams,
|
||||
response: v1::NewConversationResponse,
|
||||
},
|
||||
GetConversationSummary {
|
||||
params: v1::GetConversationSummaryParams,
|
||||
response: v1::GetConversationSummaryResponse,
|
||||
},
|
||||
/// List recorded Codex conversations (rollouts) with optional pagination and search.
|
||||
ListConversations {
|
||||
params: v1::ListConversationsParams,
|
||||
response: v1::ListConversationsResponse,
|
||||
},
|
||||
/// Resume a recorded Codex conversation from a rollout file.
|
||||
ResumeConversation {
|
||||
params: v1::ResumeConversationParams,
|
||||
response: v1::ResumeConversationResponse,
|
||||
},
|
||||
/// Fork a recorded Codex conversation into a new session.
|
||||
ForkConversation {
|
||||
params: v1::ForkConversationParams,
|
||||
response: v1::ForkConversationResponse,
|
||||
},
|
||||
ArchiveConversation {
|
||||
params: v1::ArchiveConversationParams,
|
||||
response: v1::ArchiveConversationResponse,
|
||||
},
|
||||
SendUserMessage {
|
||||
params: v1::SendUserMessageParams,
|
||||
response: v1::SendUserMessageResponse,
|
||||
},
|
||||
SendUserTurn {
|
||||
params: v1::SendUserTurnParams,
|
||||
response: v1::SendUserTurnResponse,
|
||||
},
|
||||
InterruptConversation {
|
||||
params: v1::InterruptConversationParams,
|
||||
response: v1::InterruptConversationResponse,
|
||||
},
|
||||
AddConversationListener {
|
||||
params: v1::AddConversationListenerParams,
|
||||
response: v1::AddConversationSubscriptionResponse,
|
||||
},
|
||||
RemoveConversationListener {
|
||||
params: v1::RemoveConversationListenerParams,
|
||||
response: v1::RemoveConversationSubscriptionResponse,
|
||||
},
|
||||
GitDiffToRemote {
|
||||
params: v1::GitDiffToRemoteParams,
|
||||
response: v1::GitDiffToRemoteResponse,
|
||||
},
|
||||
LoginApiKey {
|
||||
params: v1::LoginApiKeyParams,
|
||||
response: v1::LoginApiKeyResponse,
|
||||
},
|
||||
LoginChatGpt {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v1::LoginChatGptResponse,
|
||||
},
|
||||
// DEPRECATED in favor of CancelLoginAccount
|
||||
CancelLoginChatGpt {
|
||||
params: v1::CancelLoginChatGptParams,
|
||||
response: v1::CancelLoginChatGptResponse,
|
||||
},
|
||||
LogoutChatGpt {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v1::LogoutChatGptResponse,
|
||||
},
|
||||
/// DEPRECATED in favor of GetAccount
|
||||
GetAuthStatus {
|
||||
params: v1::GetAuthStatusParams,
|
||||
response: v1::GetAuthStatusResponse,
|
||||
},
|
||||
GetUserSavedConfig {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v1::GetUserSavedConfigResponse,
|
||||
},
|
||||
SetDefaultModel {
|
||||
params: v1::SetDefaultModelParams,
|
||||
response: v1::SetDefaultModelResponse,
|
||||
},
|
||||
GetUserAgent {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v1::GetUserAgentResponse,
|
||||
},
|
||||
UserInfo {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v1::UserInfoResponse,
|
||||
},
|
||||
FuzzyFileSearch {
|
||||
params: FuzzyFileSearchParams,
|
||||
response: FuzzyFileSearchResponse,
|
||||
},
|
||||
/// Execute a command (argv vector) under the server's sandbox.
|
||||
ExecOneOffCommand {
|
||||
params: v1::ExecOneOffCommandParams,
|
||||
response: v1::ExecOneOffCommandResponse,
|
||||
},
|
||||
}
|
||||
|
||||
/// Generates an `enum ServerRequest` where each variant is a request that the
|
||||
/// server can send to the client along with the corresponding params and
|
||||
/// response types. It also generates helper types used by the app/server
|
||||
/// infrastructure (payload enum, request constructor, and export helpers).
|
||||
macro_rules! server_request_definitions {
|
||||
(
|
||||
$(
|
||||
$(#[$variant_meta:meta])*
|
||||
$variant:ident $(=> $wire:literal)? {
|
||||
params: $params:ty,
|
||||
response: $response:ty,
|
||||
}
|
||||
),* $(,)?
|
||||
) => {
|
||||
/// Request initiated from the server and sent to the client.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(tag = "method", rename_all = "camelCase")]
|
||||
pub enum ServerRequest {
|
||||
$(
|
||||
$(#[$variant_meta])*
|
||||
$(#[serde(rename = $wire)] #[ts(rename = $wire)])?
|
||||
$variant {
|
||||
#[serde(rename = "id")]
|
||||
request_id: RequestId,
|
||||
params: $params,
|
||||
},
|
||||
)*
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, JsonSchema)]
|
||||
pub enum ServerRequestPayload {
|
||||
$( $variant($params), )*
|
||||
}
|
||||
|
||||
impl ServerRequestPayload {
|
||||
pub fn request_with_id(self, request_id: RequestId) -> ServerRequest {
|
||||
match self {
|
||||
$(Self::$variant(params) => ServerRequest::$variant { request_id, params },)*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn export_server_responses(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::std::result::Result<(), ::ts_rs::ExportError> {
|
||||
$(
|
||||
<$response as ::ts_rs::TS>::export_all_to(out_dir)?;
|
||||
)*
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(clippy::vec_init_then_push)]
|
||||
pub fn export_server_response_schemas(
|
||||
out_dir: &Path,
|
||||
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
|
||||
let mut schemas = Vec::new();
|
||||
$(
|
||||
schemas.push(crate::export::write_json_schema::<$response>(
|
||||
out_dir,
|
||||
concat!(stringify!($variant), "Response"),
|
||||
)?);
|
||||
)*
|
||||
Ok(schemas)
|
||||
}
|
||||
|
||||
#[allow(clippy::vec_init_then_push)]
|
||||
pub fn export_server_param_schemas(
|
||||
out_dir: &Path,
|
||||
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
|
||||
let mut schemas = Vec::new();
|
||||
$(
|
||||
schemas.push(crate::export::write_json_schema::<$params>(
|
||||
out_dir,
|
||||
concat!(stringify!($variant), "Params"),
|
||||
)?);
|
||||
)*
|
||||
Ok(schemas)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// Generates `ServerNotification` enum and helpers, including a JSON Schema
|
||||
/// exporter for each notification.
|
||||
macro_rules! server_notification_definitions {
|
||||
(
|
||||
$(
|
||||
$(#[$variant_meta:meta])*
|
||||
$variant:ident $(=> $wire:literal)? ( $payload:ty )
|
||||
),* $(,)?
|
||||
) => {
|
||||
/// Notification sent from the server to the client.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS, Display)]
|
||||
#[serde(tag = "method", content = "params", rename_all = "camelCase")]
|
||||
#[strum(serialize_all = "camelCase")]
|
||||
pub enum ServerNotification {
|
||||
$(
|
||||
$(#[$variant_meta])*
|
||||
$(#[serde(rename = $wire)] #[ts(rename = $wire)] #[strum(serialize = $wire)])?
|
||||
$variant($payload),
|
||||
)*
|
||||
}
|
||||
|
||||
impl ServerNotification {
|
||||
pub fn to_params(self) -> Result<serde_json::Value, serde_json::Error> {
|
||||
match self {
|
||||
$(Self::$variant(params) => serde_json::to_value(params),)*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<JSONRPCNotification> for ServerNotification {
|
||||
type Error = serde_json::Error;
|
||||
|
||||
fn try_from(value: JSONRPCNotification) -> Result<Self, serde_json::Error> {
|
||||
serde_json::from_value(serde_json::to_value(value)?)
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::vec_init_then_push)]
|
||||
pub fn export_server_notification_schemas(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
|
||||
let mut schemas = Vec::new();
|
||||
$(schemas.push(crate::export::write_json_schema::<$payload>(out_dir, stringify!($payload))?);)*
|
||||
Ok(schemas)
|
||||
}
|
||||
};
|
||||
}
|
||||
/// Notifications sent from the client to the server.
|
||||
macro_rules! client_notification_definitions {
|
||||
(
|
||||
$(
|
||||
$(#[$variant_meta:meta])*
|
||||
$variant:ident $( ( $payload:ty ) )?
|
||||
),* $(,)?
|
||||
) => {
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS, Display)]
|
||||
#[serde(tag = "method", content = "params", rename_all = "camelCase")]
|
||||
#[strum(serialize_all = "camelCase")]
|
||||
pub enum ClientNotification {
|
||||
$(
|
||||
$(#[$variant_meta])*
|
||||
$variant $( ( $payload ) )?,
|
||||
)*
|
||||
}
|
||||
|
||||
pub fn export_client_notification_schemas(
|
||||
_out_dir: &::std::path::Path,
|
||||
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
|
||||
let schemas = Vec::new();
|
||||
$( $(schemas.push(crate::export::write_json_schema::<$payload>(_out_dir, stringify!($payload))?);)? )*
|
||||
Ok(schemas)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl TryFrom<JSONRPCRequest> for ServerRequest {
|
||||
type Error = serde_json::Error;
|
||||
|
||||
fn try_from(value: JSONRPCRequest) -> Result<Self, Self::Error> {
|
||||
serde_json::from_value(serde_json::to_value(value)?)
|
||||
}
|
||||
}
|
||||
|
||||
server_request_definitions! {
|
||||
/// NEW APIs
|
||||
/// Sent when approval is requested for a specific command execution.
|
||||
/// This request is used for Turns started via turn/start.
|
||||
CommandExecutionRequestApproval => "item/commandExecution/requestApproval" {
|
||||
params: v2::CommandExecutionRequestApprovalParams,
|
||||
response: v2::CommandExecutionRequestApprovalResponse,
|
||||
},
|
||||
|
||||
/// Sent when approval is requested for a specific file change.
|
||||
/// This request is used for Turns started via turn/start.
|
||||
FileChangeRequestApproval => "item/fileChange/requestApproval" {
|
||||
params: v2::FileChangeRequestApprovalParams,
|
||||
response: v2::FileChangeRequestApprovalResponse,
|
||||
},
|
||||
|
||||
/// EXPERIMENTAL - Request input from the user for a tool call.
|
||||
ToolRequestUserInput => "item/tool/requestUserInput" {
|
||||
params: v2::ToolRequestUserInputParams,
|
||||
response: v2::ToolRequestUserInputResponse,
|
||||
},
|
||||
|
||||
/// DEPRECATED APIs below
|
||||
/// Request to approve a patch.
|
||||
/// This request is used for Turns started via the legacy APIs (i.e. SendUserTurn, SendUserMessage).
|
||||
ApplyPatchApproval {
|
||||
params: v1::ApplyPatchApprovalParams,
|
||||
response: v1::ApplyPatchApprovalResponse,
|
||||
},
|
||||
/// Request to exec a command.
|
||||
/// This request is used for Turns started via the legacy APIs (i.e. SendUserTurn, SendUserMessage).
|
||||
ExecCommandApproval {
|
||||
params: v1::ExecCommandApprovalParams,
|
||||
response: v1::ExecCommandApprovalResponse,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename_all = "camelCase")]
|
||||
pub struct FuzzyFileSearchParams {
|
||||
pub query: String,
|
||||
pub roots: Vec<String>,
|
||||
// if provided, will cancel any previous request that used the same value
|
||||
pub cancellation_token: Option<String>,
|
||||
}
|
||||
|
||||
/// Superset of [`codex_file_search::FileMatch`]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
pub struct FuzzyFileSearchResult {
|
||||
pub root: String,
|
||||
pub path: String,
|
||||
pub file_name: String,
|
||||
pub score: u32,
|
||||
pub indices: Option<Vec<u32>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
pub struct FuzzyFileSearchResponse {
|
||||
pub files: Vec<FuzzyFileSearchResult>,
|
||||
}
|
||||
|
||||
server_notification_definitions! {
|
||||
/// NEW NOTIFICATIONS
|
||||
Error => "error" (v2::ErrorNotification),
|
||||
ThreadStarted => "thread/started" (v2::ThreadStartedNotification),
|
||||
ThreadTokenUsageUpdated => "thread/tokenUsage/updated" (v2::ThreadTokenUsageUpdatedNotification),
|
||||
TurnStarted => "turn/started" (v2::TurnStartedNotification),
|
||||
TurnCompleted => "turn/completed" (v2::TurnCompletedNotification),
|
||||
TurnDiffUpdated => "turn/diff/updated" (v2::TurnDiffUpdatedNotification),
|
||||
TurnPlanUpdated => "turn/plan/updated" (v2::TurnPlanUpdatedNotification),
|
||||
ItemStarted => "item/started" (v2::ItemStartedNotification),
|
||||
ItemCompleted => "item/completed" (v2::ItemCompletedNotification),
|
||||
/// This event is internal-only. Used by Codex Cloud.
|
||||
RawResponseItemCompleted => "rawResponseItem/completed" (v2::RawResponseItemCompletedNotification),
|
||||
AgentMessageDelta => "item/agentMessage/delta" (v2::AgentMessageDeltaNotification),
|
||||
CommandExecutionOutputDelta => "item/commandExecution/outputDelta" (v2::CommandExecutionOutputDeltaNotification),
|
||||
TerminalInteraction => "item/commandExecution/terminalInteraction" (v2::TerminalInteractionNotification),
|
||||
FileChangeOutputDelta => "item/fileChange/outputDelta" (v2::FileChangeOutputDeltaNotification),
|
||||
McpToolCallProgress => "item/mcpToolCall/progress" (v2::McpToolCallProgressNotification),
|
||||
McpServerOauthLoginCompleted => "mcpServer/oauthLogin/completed" (v2::McpServerOauthLoginCompletedNotification),
|
||||
AccountUpdated => "account/updated" (v2::AccountUpdatedNotification),
|
||||
AccountRateLimitsUpdated => "account/rateLimits/updated" (v2::AccountRateLimitsUpdatedNotification),
|
||||
ReasoningSummaryTextDelta => "item/reasoning/summaryTextDelta" (v2::ReasoningSummaryTextDeltaNotification),
|
||||
ReasoningSummaryPartAdded => "item/reasoning/summaryPartAdded" (v2::ReasoningSummaryPartAddedNotification),
|
||||
ReasoningTextDelta => "item/reasoning/textDelta" (v2::ReasoningTextDeltaNotification),
|
||||
ContextCompacted => "thread/compacted" (v2::ContextCompactedNotification),
|
||||
DeprecationNotice => "deprecationNotice" (v2::DeprecationNoticeNotification),
|
||||
ConfigWarning => "configWarning" (v2::ConfigWarningNotification),
|
||||
|
||||
/// Notifies the user of world-writable directories on Windows, which cannot be protected by the sandbox.
|
||||
WindowsWorldWritableWarning => "windows/worldWritableWarning" (v2::WindowsWorldWritableWarningNotification),
|
||||
|
||||
#[serde(rename = "account/login/completed")]
|
||||
#[ts(rename = "account/login/completed")]
|
||||
#[strum(serialize = "account/login/completed")]
|
||||
AccountLoginCompleted(v2::AccountLoginCompletedNotification),
|
||||
|
||||
/// DEPRECATED NOTIFICATIONS below
|
||||
AuthStatusChange(v1::AuthStatusChangeNotification),
|
||||
|
||||
/// Deprecated: use `account/login/completed` instead.
|
||||
LoginChatGptComplete(v1::LoginChatGptCompleteNotification),
|
||||
SessionConfigured(v1::SessionConfiguredNotification),
|
||||
}
|
||||
|
||||
client_notification_definitions! {
|
||||
Initialized,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use anyhow::Result;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::account::PlanType;
|
||||
use codex_protocol::parse_command::ParsedCommand;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[test]
|
||||
fn serialize_new_conversation() -> Result<()> {
|
||||
let request = ClientRequest::NewConversation {
|
||||
request_id: RequestId::Integer(42),
|
||||
params: v1::NewConversationParams {
|
||||
model: Some("gpt-5.1-codex-max".to_string()),
|
||||
model_provider: None,
|
||||
profile: None,
|
||||
cwd: None,
|
||||
approval_policy: Some(AskForApproval::OnRequest),
|
||||
sandbox: None,
|
||||
config: None,
|
||||
base_instructions: None,
|
||||
developer_instructions: None,
|
||||
compact_prompt: None,
|
||||
include_apply_patch_tool: None,
|
||||
},
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "newConversation",
|
||||
"id": 42,
|
||||
"params": {
|
||||
"model": "gpt-5.1-codex-max",
|
||||
"modelProvider": null,
|
||||
"profile": null,
|
||||
"cwd": null,
|
||||
"approvalPolicy": "on-request",
|
||||
"sandbox": null,
|
||||
"config": null,
|
||||
"baseInstructions": null,
|
||||
"includeApplyPatchTool": null
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn conversation_id_serializes_as_plain_string() -> Result<()> {
|
||||
let id = ThreadId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?;
|
||||
|
||||
assert_eq!(
|
||||
json!("67e55044-10b1-426f-9247-bb680e5fe0c8"),
|
||||
serde_json::to_value(id)?
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn conversation_id_deserializes_from_plain_string() -> Result<()> {
|
||||
let id: ThreadId = serde_json::from_value(json!("67e55044-10b1-426f-9247-bb680e5fe0c8"))?;
|
||||
|
||||
assert_eq!(
|
||||
ThreadId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?,
|
||||
id,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_client_notification() -> Result<()> {
|
||||
let notification = ClientNotification::Initialized;
|
||||
// Note there is no "params" field for this notification.
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "initialized",
|
||||
}),
|
||||
serde_json::to_value(¬ification)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_server_request() -> Result<()> {
|
||||
let conversation_id = ThreadId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?;
|
||||
let params = v1::ExecCommandApprovalParams {
|
||||
conversation_id,
|
||||
call_id: "call-42".to_string(),
|
||||
command: vec!["echo".to_string(), "hello".to_string()],
|
||||
cwd: PathBuf::from("/tmp"),
|
||||
reason: Some("because tests".to_string()),
|
||||
parsed_cmd: vec![ParsedCommand::Unknown {
|
||||
cmd: "echo hello".to_string(),
|
||||
}],
|
||||
};
|
||||
let request = ServerRequest::ExecCommandApproval {
|
||||
request_id: RequestId::Integer(7),
|
||||
params: params.clone(),
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "execCommandApproval",
|
||||
"id": 7,
|
||||
"params": {
|
||||
"conversationId": "67e55044-10b1-426f-9247-bb680e5fe0c8",
|
||||
"callId": "call-42",
|
||||
"command": ["echo", "hello"],
|
||||
"cwd": "/tmp",
|
||||
"reason": "because tests",
|
||||
"parsedCmd": [
|
||||
{
|
||||
"type": "unknown",
|
||||
"cmd": "echo hello"
|
||||
}
|
||||
]
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
|
||||
let payload = ServerRequestPayload::ExecCommandApproval(params);
|
||||
assert_eq!(payload.request_with_id(RequestId::Integer(7)), request);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_get_account_rate_limits() -> Result<()> {
|
||||
let request = ClientRequest::GetAccountRateLimits {
|
||||
request_id: RequestId::Integer(1),
|
||||
params: None,
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/rateLimits/read",
|
||||
"id": 1,
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_config_requirements_read() -> Result<()> {
|
||||
let request = ClientRequest::ConfigRequirementsRead {
|
||||
request_id: RequestId::Integer(1),
|
||||
params: None,
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "configRequirements/read",
|
||||
"id": 1,
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_account_login_api_key() -> Result<()> {
|
||||
let request = ClientRequest::LoginAccount {
|
||||
request_id: RequestId::Integer(2),
|
||||
params: v2::LoginAccountParams::ApiKey {
|
||||
api_key: "secret".to_string(),
|
||||
},
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/login/start",
|
||||
"id": 2,
|
||||
"params": {
|
||||
"type": "apiKey",
|
||||
"apiKey": "secret"
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_account_login_chatgpt() -> Result<()> {
|
||||
let request = ClientRequest::LoginAccount {
|
||||
request_id: RequestId::Integer(3),
|
||||
params: v2::LoginAccountParams::Chatgpt,
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/login/start",
|
||||
"id": 3,
|
||||
"params": {
|
||||
"type": "chatgpt"
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_account_logout() -> Result<()> {
|
||||
let request = ClientRequest::LogoutAccount {
|
||||
request_id: RequestId::Integer(4),
|
||||
params: None,
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/logout",
|
||||
"id": 4,
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_get_account() -> Result<()> {
|
||||
let request = ClientRequest::GetAccount {
|
||||
request_id: RequestId::Integer(5),
|
||||
params: v2::GetAccountParams {
|
||||
refresh_token: false,
|
||||
},
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/read",
|
||||
"id": 5,
|
||||
"params": {
|
||||
"refreshToken": false
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn account_serializes_fields_in_camel_case() -> Result<()> {
|
||||
let api_key = v2::Account::ApiKey {};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"type": "apiKey",
|
||||
}),
|
||||
serde_json::to_value(&api_key)?,
|
||||
);
|
||||
|
||||
let chatgpt = v2::Account::Chatgpt {
|
||||
email: "user@example.com".to_string(),
|
||||
plan_type: PlanType::Plus,
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"type": "chatgpt",
|
||||
"email": "user@example.com",
|
||||
"planType": "plus",
|
||||
}),
|
||||
serde_json::to_value(&chatgpt)?,
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_list_models() -> Result<()> {
|
||||
let request = ClientRequest::ModelList {
|
||||
request_id: RequestId::Integer(6),
|
||||
params: v2::ModelListParams::default(),
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "model/list",
|
||||
"id": 6,
|
||||
"params": {
|
||||
"limit": null,
|
||||
"cursor": null
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_list_collaboration_modes() -> Result<()> {
|
||||
let request = ClientRequest::CollaborationModeList {
|
||||
request_id: RequestId::Integer(7),
|
||||
params: v2::CollaborationModeListParams::default(),
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "collaborationMode/list",
|
||||
"id": 7,
|
||||
"params": {}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
15
codex-rs/app-server-protocol/src/protocol/mappers.rs
Normal file
15
codex-rs/app-server-protocol/src/protocol/mappers.rs
Normal file
@@ -0,0 +1,15 @@
|
||||
use crate::protocol::v1;
|
||||
use crate::protocol::v2;
|
||||
|
||||
impl From<v1::ExecOneOffCommandParams> for v2::CommandExecParams {
|
||||
fn from(value: v1::ExecOneOffCommandParams) -> Self {
|
||||
Self {
|
||||
command: value.command,
|
||||
timeout_ms: value
|
||||
.timeout_ms
|
||||
.map(|timeout| i64::try_from(timeout).unwrap_or(60_000)),
|
||||
cwd: value.cwd,
|
||||
sandbox_policy: value.sandbox_policy.map(std::convert::Into::into),
|
||||
}
|
||||
}
|
||||
}
|
||||
8
codex-rs/app-server-protocol/src/protocol/mod.rs
Normal file
8
codex-rs/app-server-protocol/src/protocol/mod.rs
Normal file
@@ -0,0 +1,8 @@
|
||||
// Module declarations for the app-server protocol namespace.
|
||||
// Exposes protocol pieces used by `lib.rs` via `pub use protocol::common::*;`.
|
||||
|
||||
pub mod common;
|
||||
mod mappers;
|
||||
pub mod thread_history;
|
||||
pub mod v1;
|
||||
pub mod v2;
|
||||
559
codex-rs/app-server-protocol/src/protocol/thread_history.rs
Normal file
559
codex-rs/app-server-protocol/src/protocol/thread_history.rs
Normal file
@@ -0,0 +1,559 @@
|
||||
use crate::protocol::v2::ThreadItem;
|
||||
use crate::protocol::v2::Turn;
|
||||
use crate::protocol::v2::TurnError;
|
||||
use crate::protocol::v2::TurnStatus;
|
||||
use crate::protocol::v2::UserInput;
|
||||
use codex_protocol::protocol::AgentReasoningEvent;
|
||||
use codex_protocol::protocol::AgentReasoningRawContentEvent;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use codex_protocol::protocol::ThreadRolledBackEvent;
|
||||
use codex_protocol::protocol::TurnAbortedEvent;
|
||||
use codex_protocol::protocol::UserMessageEvent;
|
||||
|
||||
/// Convert persisted [`EventMsg`] entries into a sequence of [`Turn`] values.
|
||||
///
|
||||
/// The purpose of this is to convert the EventMsgs persisted in a rollout file
|
||||
/// into a sequence of Turns and ThreadItems, which allows the client to render
|
||||
/// the historical messages when resuming a thread.
|
||||
pub fn build_turns_from_event_msgs(events: &[EventMsg]) -> Vec<Turn> {
|
||||
let mut builder = ThreadHistoryBuilder::new();
|
||||
for event in events {
|
||||
builder.handle_event(event);
|
||||
}
|
||||
builder.finish()
|
||||
}
|
||||
|
||||
struct ThreadHistoryBuilder {
|
||||
turns: Vec<Turn>,
|
||||
current_turn: Option<PendingTurn>,
|
||||
next_turn_index: i64,
|
||||
next_item_index: i64,
|
||||
}
|
||||
|
||||
impl ThreadHistoryBuilder {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
turns: Vec::new(),
|
||||
current_turn: None,
|
||||
next_turn_index: 1,
|
||||
next_item_index: 1,
|
||||
}
|
||||
}
|
||||
|
||||
fn finish(mut self) -> Vec<Turn> {
|
||||
self.finish_current_turn();
|
||||
self.turns
|
||||
}
|
||||
|
||||
/// This function should handle all EventMsg variants that can be persisted in a rollout file.
|
||||
/// See `should_persist_event_msg` in `codex-rs/core/rollout/policy.rs`.
|
||||
fn handle_event(&mut self, event: &EventMsg) {
|
||||
match event {
|
||||
EventMsg::UserMessage(payload) => self.handle_user_message(payload),
|
||||
EventMsg::AgentMessage(payload) => self.handle_agent_message(payload.message.clone()),
|
||||
EventMsg::AgentReasoning(payload) => self.handle_agent_reasoning(payload),
|
||||
EventMsg::AgentReasoningRawContent(payload) => {
|
||||
self.handle_agent_reasoning_raw_content(payload)
|
||||
}
|
||||
EventMsg::TokenCount(_) => {}
|
||||
EventMsg::EnteredReviewMode(_) => {}
|
||||
EventMsg::ExitedReviewMode(_) => {}
|
||||
EventMsg::ThreadRolledBack(payload) => self.handle_thread_rollback(payload),
|
||||
EventMsg::UndoCompleted(_) => {}
|
||||
EventMsg::TurnAborted(payload) => self.handle_turn_aborted(payload),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_user_message(&mut self, payload: &UserMessageEvent) {
|
||||
self.finish_current_turn();
|
||||
let mut turn = self.new_turn();
|
||||
let id = self.next_item_id();
|
||||
let content = self.build_user_inputs(payload);
|
||||
turn.items.push(ThreadItem::UserMessage { id, content });
|
||||
self.current_turn = Some(turn);
|
||||
}
|
||||
|
||||
fn handle_agent_message(&mut self, text: String) {
|
||||
if text.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let id = self.next_item_id();
|
||||
self.ensure_turn()
|
||||
.items
|
||||
.push(ThreadItem::AgentMessage { id, text });
|
||||
}
|
||||
|
||||
fn handle_agent_reasoning(&mut self, payload: &AgentReasoningEvent) {
|
||||
if payload.text.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
// If the last item is a reasoning item, add the new text to the summary.
|
||||
if let Some(ThreadItem::Reasoning { summary, .. }) = self.ensure_turn().items.last_mut() {
|
||||
summary.push(payload.text.clone());
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise, create a new reasoning item.
|
||||
let id = self.next_item_id();
|
||||
self.ensure_turn().items.push(ThreadItem::Reasoning {
|
||||
id,
|
||||
summary: vec![payload.text.clone()],
|
||||
content: Vec::new(),
|
||||
});
|
||||
}
|
||||
|
||||
fn handle_agent_reasoning_raw_content(&mut self, payload: &AgentReasoningRawContentEvent) {
|
||||
if payload.text.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
// If the last item is a reasoning item, add the new text to the content.
|
||||
if let Some(ThreadItem::Reasoning { content, .. }) = self.ensure_turn().items.last_mut() {
|
||||
content.push(payload.text.clone());
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise, create a new reasoning item.
|
||||
let id = self.next_item_id();
|
||||
self.ensure_turn().items.push(ThreadItem::Reasoning {
|
||||
id,
|
||||
summary: Vec::new(),
|
||||
content: vec![payload.text.clone()],
|
||||
});
|
||||
}
|
||||
|
||||
fn handle_turn_aborted(&mut self, _payload: &TurnAbortedEvent) {
|
||||
let Some(turn) = self.current_turn.as_mut() else {
|
||||
return;
|
||||
};
|
||||
turn.status = TurnStatus::Interrupted;
|
||||
}
|
||||
|
||||
fn handle_thread_rollback(&mut self, payload: &ThreadRolledBackEvent) {
|
||||
self.finish_current_turn();
|
||||
|
||||
let n = usize::try_from(payload.num_turns).unwrap_or(usize::MAX);
|
||||
if n >= self.turns.len() {
|
||||
self.turns.clear();
|
||||
} else {
|
||||
self.turns.truncate(self.turns.len().saturating_sub(n));
|
||||
}
|
||||
|
||||
// Re-number subsequent synthetic ids so the pruned history is consistent.
|
||||
self.next_turn_index =
|
||||
i64::try_from(self.turns.len().saturating_add(1)).unwrap_or(i64::MAX);
|
||||
let item_count: usize = self.turns.iter().map(|t| t.items.len()).sum();
|
||||
self.next_item_index = i64::try_from(item_count.saturating_add(1)).unwrap_or(i64::MAX);
|
||||
}
|
||||
|
||||
fn finish_current_turn(&mut self) {
|
||||
if let Some(turn) = self.current_turn.take() {
|
||||
if turn.items.is_empty() {
|
||||
return;
|
||||
}
|
||||
self.turns.push(turn.into());
|
||||
}
|
||||
}
|
||||
|
||||
fn new_turn(&mut self) -> PendingTurn {
|
||||
PendingTurn {
|
||||
id: self.next_turn_id(),
|
||||
items: Vec::new(),
|
||||
error: None,
|
||||
status: TurnStatus::Completed,
|
||||
}
|
||||
}
|
||||
|
||||
fn ensure_turn(&mut self) -> &mut PendingTurn {
|
||||
if self.current_turn.is_none() {
|
||||
let turn = self.new_turn();
|
||||
return self.current_turn.insert(turn);
|
||||
}
|
||||
|
||||
if let Some(turn) = self.current_turn.as_mut() {
|
||||
return turn;
|
||||
}
|
||||
|
||||
unreachable!("current turn must exist after initialization");
|
||||
}
|
||||
|
||||
fn next_turn_id(&mut self) -> String {
|
||||
let id = format!("turn-{}", self.next_turn_index);
|
||||
self.next_turn_index += 1;
|
||||
id
|
||||
}
|
||||
|
||||
fn next_item_id(&mut self) -> String {
|
||||
let id = format!("item-{}", self.next_item_index);
|
||||
self.next_item_index += 1;
|
||||
id
|
||||
}
|
||||
|
||||
fn build_user_inputs(&self, payload: &UserMessageEvent) -> Vec<UserInput> {
|
||||
let mut content = Vec::new();
|
||||
if !payload.message.trim().is_empty() {
|
||||
content.push(UserInput::Text {
|
||||
text: payload.message.clone(),
|
||||
text_elements: payload
|
||||
.text_elements
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(Into::into)
|
||||
.collect(),
|
||||
});
|
||||
}
|
||||
if let Some(images) = &payload.images {
|
||||
for image in images {
|
||||
content.push(UserInput::Image { url: image.clone() });
|
||||
}
|
||||
}
|
||||
for path in &payload.local_images {
|
||||
content.push(UserInput::LocalImage { path: path.clone() });
|
||||
}
|
||||
content
|
||||
}
|
||||
}
|
||||
|
||||
struct PendingTurn {
|
||||
id: String,
|
||||
items: Vec<ThreadItem>,
|
||||
error: Option<TurnError>,
|
||||
status: TurnStatus,
|
||||
}
|
||||
|
||||
impl From<PendingTurn> for Turn {
|
||||
fn from(value: PendingTurn) -> Self {
|
||||
Self {
|
||||
id: value.id,
|
||||
items: value.items,
|
||||
error: value.error,
|
||||
status: value.status,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use codex_protocol::protocol::AgentMessageEvent;
|
||||
use codex_protocol::protocol::AgentReasoningEvent;
|
||||
use codex_protocol::protocol::AgentReasoningRawContentEvent;
|
||||
use codex_protocol::protocol::ThreadRolledBackEvent;
|
||||
use codex_protocol::protocol::TurnAbortReason;
|
||||
use codex_protocol::protocol::TurnAbortedEvent;
|
||||
use codex_protocol::protocol::UserMessageEvent;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn builds_multiple_turns_with_reasoning_items() {
|
||||
let events = vec![
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "First turn".into(),
|
||||
images: Some(vec!["https://example.com/one.png".into()]),
|
||||
text_elements: Vec::new(),
|
||||
local_images: Vec::new(),
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "Hi there".into(),
|
||||
}),
|
||||
EventMsg::AgentReasoning(AgentReasoningEvent {
|
||||
text: "thinking".into(),
|
||||
}),
|
||||
EventMsg::AgentReasoningRawContent(AgentReasoningRawContentEvent {
|
||||
text: "full reasoning".into(),
|
||||
}),
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "Second turn".into(),
|
||||
images: None,
|
||||
text_elements: Vec::new(),
|
||||
local_images: Vec::new(),
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "Reply two".into(),
|
||||
}),
|
||||
];
|
||||
|
||||
let turns = build_turns_from_event_msgs(&events);
|
||||
assert_eq!(turns.len(), 2);
|
||||
|
||||
let first = &turns[0];
|
||||
assert_eq!(first.id, "turn-1");
|
||||
assert_eq!(first.status, TurnStatus::Completed);
|
||||
assert_eq!(first.items.len(), 3);
|
||||
assert_eq!(
|
||||
first.items[0],
|
||||
ThreadItem::UserMessage {
|
||||
id: "item-1".into(),
|
||||
content: vec![
|
||||
UserInput::Text {
|
||||
text: "First turn".into(),
|
||||
text_elements: Vec::new(),
|
||||
},
|
||||
UserInput::Image {
|
||||
url: "https://example.com/one.png".into(),
|
||||
}
|
||||
],
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
first.items[1],
|
||||
ThreadItem::AgentMessage {
|
||||
id: "item-2".into(),
|
||||
text: "Hi there".into(),
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
first.items[2],
|
||||
ThreadItem::Reasoning {
|
||||
id: "item-3".into(),
|
||||
summary: vec!["thinking".into()],
|
||||
content: vec!["full reasoning".into()],
|
||||
}
|
||||
);
|
||||
|
||||
let second = &turns[1];
|
||||
assert_eq!(second.id, "turn-2");
|
||||
assert_eq!(second.items.len(), 2);
|
||||
assert_eq!(
|
||||
second.items[0],
|
||||
ThreadItem::UserMessage {
|
||||
id: "item-4".into(),
|
||||
content: vec![UserInput::Text {
|
||||
text: "Second turn".into(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
second.items[1],
|
||||
ThreadItem::AgentMessage {
|
||||
id: "item-5".into(),
|
||||
text: "Reply two".into(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn splits_reasoning_when_interleaved() {
|
||||
let events = vec![
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "Turn start".into(),
|
||||
images: None,
|
||||
text_elements: Vec::new(),
|
||||
local_images: Vec::new(),
|
||||
}),
|
||||
EventMsg::AgentReasoning(AgentReasoningEvent {
|
||||
text: "first summary".into(),
|
||||
}),
|
||||
EventMsg::AgentReasoningRawContent(AgentReasoningRawContentEvent {
|
||||
text: "first content".into(),
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "interlude".into(),
|
||||
}),
|
||||
EventMsg::AgentReasoning(AgentReasoningEvent {
|
||||
text: "second summary".into(),
|
||||
}),
|
||||
];
|
||||
|
||||
let turns = build_turns_from_event_msgs(&events);
|
||||
assert_eq!(turns.len(), 1);
|
||||
let turn = &turns[0];
|
||||
assert_eq!(turn.items.len(), 4);
|
||||
|
||||
assert_eq!(
|
||||
turn.items[1],
|
||||
ThreadItem::Reasoning {
|
||||
id: "item-2".into(),
|
||||
summary: vec!["first summary".into()],
|
||||
content: vec!["first content".into()],
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
turn.items[3],
|
||||
ThreadItem::Reasoning {
|
||||
id: "item-4".into(),
|
||||
summary: vec!["second summary".into()],
|
||||
content: Vec::new(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn marks_turn_as_interrupted_when_aborted() {
|
||||
let events = vec![
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "Please do the thing".into(),
|
||||
images: None,
|
||||
text_elements: Vec::new(),
|
||||
local_images: Vec::new(),
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "Working...".into(),
|
||||
}),
|
||||
EventMsg::TurnAborted(TurnAbortedEvent {
|
||||
reason: TurnAbortReason::Replaced,
|
||||
}),
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "Let's try again".into(),
|
||||
images: None,
|
||||
text_elements: Vec::new(),
|
||||
local_images: Vec::new(),
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "Second attempt complete.".into(),
|
||||
}),
|
||||
];
|
||||
|
||||
let turns = build_turns_from_event_msgs(&events);
|
||||
assert_eq!(turns.len(), 2);
|
||||
|
||||
let first_turn = &turns[0];
|
||||
assert_eq!(first_turn.status, TurnStatus::Interrupted);
|
||||
assert_eq!(first_turn.items.len(), 2);
|
||||
assert_eq!(
|
||||
first_turn.items[0],
|
||||
ThreadItem::UserMessage {
|
||||
id: "item-1".into(),
|
||||
content: vec![UserInput::Text {
|
||||
text: "Please do the thing".into(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
first_turn.items[1],
|
||||
ThreadItem::AgentMessage {
|
||||
id: "item-2".into(),
|
||||
text: "Working...".into(),
|
||||
}
|
||||
);
|
||||
|
||||
let second_turn = &turns[1];
|
||||
assert_eq!(second_turn.status, TurnStatus::Completed);
|
||||
assert_eq!(second_turn.items.len(), 2);
|
||||
assert_eq!(
|
||||
second_turn.items[0],
|
||||
ThreadItem::UserMessage {
|
||||
id: "item-3".into(),
|
||||
content: vec![UserInput::Text {
|
||||
text: "Let's try again".into(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
second_turn.items[1],
|
||||
ThreadItem::AgentMessage {
|
||||
id: "item-4".into(),
|
||||
text: "Second attempt complete.".into(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn drops_last_turns_on_thread_rollback() {
|
||||
let events = vec![
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "First".into(),
|
||||
images: None,
|
||||
text_elements: Vec::new(),
|
||||
local_images: Vec::new(),
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "A1".into(),
|
||||
}),
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "Second".into(),
|
||||
images: None,
|
||||
text_elements: Vec::new(),
|
||||
local_images: Vec::new(),
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "A2".into(),
|
||||
}),
|
||||
EventMsg::ThreadRolledBack(ThreadRolledBackEvent { num_turns: 1 }),
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "Third".into(),
|
||||
images: None,
|
||||
text_elements: Vec::new(),
|
||||
local_images: Vec::new(),
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "A3".into(),
|
||||
}),
|
||||
];
|
||||
|
||||
let turns = build_turns_from_event_msgs(&events);
|
||||
let expected = vec![
|
||||
Turn {
|
||||
id: "turn-1".into(),
|
||||
status: TurnStatus::Completed,
|
||||
error: None,
|
||||
items: vec![
|
||||
ThreadItem::UserMessage {
|
||||
id: "item-1".into(),
|
||||
content: vec![UserInput::Text {
|
||||
text: "First".into(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
},
|
||||
ThreadItem::AgentMessage {
|
||||
id: "item-2".into(),
|
||||
text: "A1".into(),
|
||||
},
|
||||
],
|
||||
},
|
||||
Turn {
|
||||
id: "turn-2".into(),
|
||||
status: TurnStatus::Completed,
|
||||
error: None,
|
||||
items: vec![
|
||||
ThreadItem::UserMessage {
|
||||
id: "item-3".into(),
|
||||
content: vec![UserInput::Text {
|
||||
text: "Third".into(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
},
|
||||
ThreadItem::AgentMessage {
|
||||
id: "item-4".into(),
|
||||
text: "A3".into(),
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
assert_eq!(turns, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn thread_rollback_clears_all_turns_when_num_turns_exceeds_history() {
|
||||
let events = vec![
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "One".into(),
|
||||
images: None,
|
||||
text_elements: Vec::new(),
|
||||
local_images: Vec::new(),
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "A1".into(),
|
||||
}),
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "Two".into(),
|
||||
images: None,
|
||||
text_elements: Vec::new(),
|
||||
local_images: Vec::new(),
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "A2".into(),
|
||||
}),
|
||||
EventMsg::ThreadRolledBack(ThreadRolledBackEvent { num_turns: 99 }),
|
||||
];
|
||||
|
||||
let turns = build_turns_from_event_msgs(&events);
|
||||
assert_eq!(turns, Vec::<Turn>::new());
|
||||
}
|
||||
}
|
||||
548
codex-rs/app-server-protocol/src/protocol/v1.rs
Normal file
548
codex-rs/app-server-protocol/src/protocol/v1.rs
Normal file
@@ -0,0 +1,548 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::config_types::ForcedLoginMethod;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use codex_protocol::config_types::SandboxMode;
|
||||
use codex_protocol::config_types::Verbosity;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::openai_models::ReasoningEffort;
|
||||
use codex_protocol::parse_command::ParsedCommand;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use codex_protocol::protocol::FileChange;
|
||||
use codex_protocol::protocol::ReviewDecision;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use codex_protocol::protocol::TurnAbortReason;
|
||||
use codex_protocol::user_input::ByteRange as CoreByteRange;
|
||||
use codex_protocol::user_input::TextElement as CoreTextElement;
|
||||
use codex_utils_absolute_path::AbsolutePathBuf;
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use ts_rs::TS;
|
||||
use uuid::Uuid;
|
||||
|
||||
// Reuse shared types defined in `common.rs`.
|
||||
use crate::protocol::common::AuthMode;
|
||||
use crate::protocol::common::GitSha;
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InitializeParams {
|
||||
pub client_info: ClientInfo,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ClientInfo {
|
||||
pub name: String,
|
||||
pub title: Option<String>,
|
||||
pub version: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InitializeResponse {
|
||||
pub user_agent: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct NewConversationParams {
|
||||
pub model: Option<String>,
|
||||
pub model_provider: Option<String>,
|
||||
pub profile: Option<String>,
|
||||
pub cwd: Option<String>,
|
||||
pub approval_policy: Option<AskForApproval>,
|
||||
pub sandbox: Option<SandboxMode>,
|
||||
pub config: Option<HashMap<String, serde_json::Value>>,
|
||||
pub base_instructions: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub developer_instructions: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub compact_prompt: Option<String>,
|
||||
pub include_apply_patch_tool: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct NewConversationResponse {
|
||||
pub conversation_id: ThreadId,
|
||||
pub model: String,
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
pub rollout_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ResumeConversationResponse {
|
||||
pub conversation_id: ThreadId,
|
||||
pub model: String,
|
||||
pub initial_messages: Option<Vec<EventMsg>>,
|
||||
pub rollout_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ForkConversationResponse {
|
||||
pub conversation_id: ThreadId,
|
||||
pub model: String,
|
||||
pub initial_messages: Option<Vec<EventMsg>>,
|
||||
pub rollout_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(untagged)]
|
||||
pub enum GetConversationSummaryParams {
|
||||
RolloutPath {
|
||||
#[serde(rename = "rolloutPath")]
|
||||
rollout_path: PathBuf,
|
||||
},
|
||||
ThreadId {
|
||||
#[serde(rename = "conversationId")]
|
||||
conversation_id: ThreadId,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetConversationSummaryResponse {
|
||||
pub summary: ConversationSummary,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ListConversationsParams {
|
||||
pub page_size: Option<usize>,
|
||||
pub cursor: Option<String>,
|
||||
pub model_providers: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ConversationSummary {
|
||||
pub conversation_id: ThreadId,
|
||||
pub path: PathBuf,
|
||||
pub preview: String,
|
||||
pub timestamp: Option<String>,
|
||||
pub updated_at: Option<String>,
|
||||
pub model_provider: String,
|
||||
pub cwd: PathBuf,
|
||||
pub cli_version: String,
|
||||
pub source: SessionSource,
|
||||
pub git_info: Option<ConversationGitInfo>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub struct ConversationGitInfo {
|
||||
pub sha: Option<String>,
|
||||
pub branch: Option<String>,
|
||||
pub origin_url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ListConversationsResponse {
|
||||
pub items: Vec<ConversationSummary>,
|
||||
pub next_cursor: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ResumeConversationParams {
|
||||
pub path: Option<PathBuf>,
|
||||
pub conversation_id: Option<ThreadId>,
|
||||
pub history: Option<Vec<ResponseItem>>,
|
||||
pub overrides: Option<NewConversationParams>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ForkConversationParams {
|
||||
pub path: Option<PathBuf>,
|
||||
pub conversation_id: Option<ThreadId>,
|
||||
pub overrides: Option<NewConversationParams>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct AddConversationSubscriptionResponse {
|
||||
#[schemars(with = "String")]
|
||||
pub subscription_id: Uuid,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ArchiveConversationParams {
|
||||
pub conversation_id: ThreadId,
|
||||
pub rollout_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ArchiveConversationResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct RemoveConversationSubscriptionResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LoginApiKeyParams {
|
||||
pub api_key: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LoginApiKeyResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LoginChatGptResponse {
|
||||
#[schemars(with = "String")]
|
||||
pub login_id: Uuid,
|
||||
pub auth_url: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GitDiffToRemoteResponse {
|
||||
pub sha: GitSha,
|
||||
pub diff: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ApplyPatchApprovalParams {
|
||||
pub conversation_id: ThreadId,
|
||||
/// Use to correlate this with [codex_core::protocol::PatchApplyBeginEvent]
|
||||
/// and [codex_core::protocol::PatchApplyEndEvent].
|
||||
pub call_id: String,
|
||||
pub file_changes: HashMap<PathBuf, FileChange>,
|
||||
/// Optional explanatory reason (e.g. request for extra write access).
|
||||
pub reason: Option<String>,
|
||||
/// When set, the agent is asking the user to allow writes under this root
|
||||
/// for the remainder of the session (unclear if this is honored today).
|
||||
pub grant_root: Option<PathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ApplyPatchApprovalResponse {
|
||||
pub decision: ReviewDecision,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ExecCommandApprovalParams {
|
||||
pub conversation_id: ThreadId,
|
||||
/// Use to correlate this with [codex_core::protocol::ExecCommandBeginEvent]
|
||||
/// and [codex_core::protocol::ExecCommandEndEvent].
|
||||
pub call_id: String,
|
||||
pub command: Vec<String>,
|
||||
pub cwd: PathBuf,
|
||||
pub reason: Option<String>,
|
||||
pub parsed_cmd: Vec<ParsedCommand>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
pub struct ExecCommandApprovalResponse {
|
||||
pub decision: ReviewDecision,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CancelLoginChatGptParams {
|
||||
#[schemars(with = "String")]
|
||||
pub login_id: Uuid,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GitDiffToRemoteParams {
|
||||
pub cwd: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CancelLoginChatGptResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LogoutChatGptParams {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LogoutChatGptResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetAuthStatusParams {
|
||||
pub include_token: Option<bool>,
|
||||
pub refresh_token: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ExecOneOffCommandParams {
|
||||
pub command: Vec<String>,
|
||||
pub timeout_ms: Option<u64>,
|
||||
pub cwd: Option<PathBuf>,
|
||||
pub sandbox_policy: Option<SandboxPolicy>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ExecOneOffCommandResponse {
|
||||
pub exit_code: i32,
|
||||
pub stdout: String,
|
||||
pub stderr: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetAuthStatusResponse {
|
||||
pub auth_method: Option<AuthMode>,
|
||||
pub auth_token: Option<String>,
|
||||
pub requires_openai_auth: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetUserAgentResponse {
|
||||
pub user_agent: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct UserInfoResponse {
|
||||
pub alleged_user_email: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetUserSavedConfigResponse {
|
||||
pub config: UserSavedConfig,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SetDefaultModelParams {
|
||||
pub model: Option<String>,
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SetDefaultModelResponse {}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct UserSavedConfig {
|
||||
pub approval_policy: Option<AskForApproval>,
|
||||
pub sandbox_mode: Option<SandboxMode>,
|
||||
pub sandbox_settings: Option<SandboxSettings>,
|
||||
pub forced_chatgpt_workspace_id: Option<String>,
|
||||
pub forced_login_method: Option<ForcedLoginMethod>,
|
||||
pub model: Option<String>,
|
||||
pub model_reasoning_effort: Option<ReasoningEffort>,
|
||||
pub model_reasoning_summary: Option<ReasoningSummary>,
|
||||
pub model_verbosity: Option<Verbosity>,
|
||||
pub tools: Option<Tools>,
|
||||
pub profile: Option<String>,
|
||||
pub profiles: HashMap<String, Profile>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Profile {
|
||||
pub model: Option<String>,
|
||||
pub model_provider: Option<String>,
|
||||
pub approval_policy: Option<AskForApproval>,
|
||||
pub model_reasoning_effort: Option<ReasoningEffort>,
|
||||
pub model_reasoning_summary: Option<ReasoningSummary>,
|
||||
pub model_verbosity: Option<Verbosity>,
|
||||
pub chatgpt_base_url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Tools {
|
||||
pub web_search: Option<bool>,
|
||||
pub view_image: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SandboxSettings {
|
||||
#[serde(default)]
|
||||
pub writable_roots: Vec<AbsolutePathBuf>,
|
||||
pub network_access: Option<bool>,
|
||||
pub exclude_tmpdir_env_var: Option<bool>,
|
||||
pub exclude_slash_tmp: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SendUserMessageParams {
|
||||
pub conversation_id: ThreadId,
|
||||
pub items: Vec<InputItem>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SendUserTurnParams {
|
||||
pub conversation_id: ThreadId,
|
||||
pub items: Vec<InputItem>,
|
||||
pub cwd: PathBuf,
|
||||
pub approval_policy: AskForApproval,
|
||||
pub sandbox_policy: SandboxPolicy,
|
||||
pub model: String,
|
||||
pub effort: Option<ReasoningEffort>,
|
||||
pub summary: ReasoningSummary,
|
||||
/// Optional JSON Schema used to constrain the final assistant message for this turn.
|
||||
pub output_schema: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SendUserTurnResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InterruptConversationParams {
|
||||
pub conversation_id: ThreadId,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InterruptConversationResponse {
|
||||
pub abort_reason: TurnAbortReason,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SendUserMessageResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct AddConversationListenerParams {
|
||||
pub conversation_id: ThreadId,
|
||||
#[serde(default)]
|
||||
pub experimental_raw_events: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct RemoveConversationListenerParams {
|
||||
#[schemars(with = "String")]
|
||||
pub subscription_id: Uuid,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[serde(tag = "type", content = "data")]
|
||||
pub enum InputItem {
|
||||
Text {
|
||||
text: String,
|
||||
/// UI-defined spans within `text` used to render or persist special elements.
|
||||
#[serde(default)]
|
||||
text_elements: Vec<V1TextElement>,
|
||||
},
|
||||
Image {
|
||||
image_url: String,
|
||||
},
|
||||
LocalImage {
|
||||
path: PathBuf,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename = "ByteRange")]
|
||||
pub struct V1ByteRange {
|
||||
/// Start byte offset (inclusive) within the UTF-8 text buffer.
|
||||
pub start: usize,
|
||||
/// End byte offset (exclusive) within the UTF-8 text buffer.
|
||||
pub end: usize,
|
||||
}
|
||||
|
||||
impl From<CoreByteRange> for V1ByteRange {
|
||||
fn from(value: CoreByteRange) -> Self {
|
||||
Self {
|
||||
start: value.start,
|
||||
end: value.end,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<V1ByteRange> for CoreByteRange {
|
||||
fn from(value: V1ByteRange) -> Self {
|
||||
Self {
|
||||
start: value.start,
|
||||
end: value.end,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename = "TextElement")]
|
||||
pub struct V1TextElement {
|
||||
/// Byte range in the parent `text` buffer that this element occupies.
|
||||
pub byte_range: V1ByteRange,
|
||||
/// Optional human-readable placeholder for the element, displayed in the UI.
|
||||
pub placeholder: Option<String>,
|
||||
}
|
||||
|
||||
impl From<CoreTextElement> for V1TextElement {
|
||||
fn from(value: CoreTextElement) -> Self {
|
||||
Self {
|
||||
byte_range: value.byte_range.into(),
|
||||
placeholder: value.placeholder,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<V1TextElement> for CoreTextElement {
|
||||
fn from(value: V1TextElement) -> Self {
|
||||
Self {
|
||||
byte_range: value.byte_range.into(),
|
||||
placeholder: value.placeholder,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
/// Deprecated in favor of AccountLoginCompletedNotification.
|
||||
pub struct LoginChatGptCompleteNotification {
|
||||
#[schemars(with = "String")]
|
||||
pub login_id: Uuid,
|
||||
pub success: bool,
|
||||
pub error: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SessionConfiguredNotification {
|
||||
pub session_id: ThreadId,
|
||||
pub model: String,
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
pub history_log_id: u64,
|
||||
#[ts(type = "number")]
|
||||
pub history_entry_count: usize,
|
||||
pub initial_messages: Option<Vec<EventMsg>>,
|
||||
pub rollout_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
/// Deprecated notification. Use AccountUpdatedNotification instead.
|
||||
pub struct AuthStatusChangeNotification {
|
||||
pub auth_method: Option<AuthMode>,
|
||||
}
|
||||
2593
codex-rs/app-server-protocol/src/protocol/v2.rs
Normal file
2593
codex-rs/app-server-protocol/src/protocol/v2.rs
Normal file
File diff suppressed because it is too large
Load Diff
6
codex-rs/app-server-test-client/BUILD.bazel
Normal file
6
codex-rs/app-server-test-client/BUILD.bazel
Normal file
@@ -0,0 +1,6 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "codex-app-server-test-client",
|
||||
crate_name = "codex_app_server_test_client",
|
||||
)
|
||||
1298
codex-rs/app-server-test-client/Cargo.lock
generated
Normal file
1298
codex-rs/app-server-test-client/Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
17
codex-rs/app-server-test-client/Cargo.toml
Normal file
17
codex-rs/app-server-test-client/Cargo.toml
Normal file
@@ -0,0 +1,17 @@
|
||||
[package]
|
||||
name = "codex-app-server-test-client"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = { workspace = true }
|
||||
clap = { workspace = true, features = ["derive", "env"] }
|
||||
codex-app-server-protocol = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
uuid = { workspace = true, features = ["v4"] }
|
||||
2
codex-rs/app-server-test-client/README.md
Normal file
2
codex-rs/app-server-test-client/README.md
Normal file
@@ -0,0 +1,2 @@
|
||||
# App Server Test Client
|
||||
Exercises simple `codex app-server` flows end-to-end, logging JSON-RPC messages sent between client and server to stdout.
|
||||
947
codex-rs/app-server-test-client/src/main.rs
Normal file
947
codex-rs/app-server-test-client/src/main.rs
Normal file
@@ -0,0 +1,947 @@
|
||||
use std::collections::VecDeque;
|
||||
use std::io::BufRead;
|
||||
use std::io::BufReader;
|
||||
use std::io::Write;
|
||||
use std::process::Child;
|
||||
use std::process::ChildStdin;
|
||||
use std::process::ChildStdout;
|
||||
use std::process::Command;
|
||||
use std::process::Stdio;
|
||||
use std::thread;
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use anyhow::bail;
|
||||
use clap::ArgAction;
|
||||
use clap::Parser;
|
||||
use clap::Subcommand;
|
||||
use codex_app_server_protocol::AddConversationListenerParams;
|
||||
use codex_app_server_protocol::AddConversationSubscriptionResponse;
|
||||
use codex_app_server_protocol::AskForApproval;
|
||||
use codex_app_server_protocol::ClientInfo;
|
||||
use codex_app_server_protocol::ClientRequest;
|
||||
use codex_app_server_protocol::CommandExecutionApprovalDecision;
|
||||
use codex_app_server_protocol::CommandExecutionRequestApprovalParams;
|
||||
use codex_app_server_protocol::CommandExecutionRequestApprovalResponse;
|
||||
use codex_app_server_protocol::FileChangeApprovalDecision;
|
||||
use codex_app_server_protocol::FileChangeRequestApprovalParams;
|
||||
use codex_app_server_protocol::FileChangeRequestApprovalResponse;
|
||||
use codex_app_server_protocol::GetAccountRateLimitsResponse;
|
||||
use codex_app_server_protocol::InitializeParams;
|
||||
use codex_app_server_protocol::InitializeResponse;
|
||||
use codex_app_server_protocol::InputItem;
|
||||
use codex_app_server_protocol::JSONRPCMessage;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCRequest;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::LoginChatGptCompleteNotification;
|
||||
use codex_app_server_protocol::LoginChatGptResponse;
|
||||
use codex_app_server_protocol::ModelListParams;
|
||||
use codex_app_server_protocol::ModelListResponse;
|
||||
use codex_app_server_protocol::NewConversationParams;
|
||||
use codex_app_server_protocol::NewConversationResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::SandboxPolicy;
|
||||
use codex_app_server_protocol::SendUserMessageParams;
|
||||
use codex_app_server_protocol::SendUserMessageResponse;
|
||||
use codex_app_server_protocol::ServerNotification;
|
||||
use codex_app_server_protocol::ServerRequest;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_app_server_protocol::TurnStartParams;
|
||||
use codex_app_server_protocol::TurnStartResponse;
|
||||
use codex_app_server_protocol::TurnStatus;
|
||||
use codex_app_server_protocol::UserInput as V2UserInput;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::protocol::Event;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use serde::Serialize;
|
||||
use serde::de::DeserializeOwned;
|
||||
use serde_json::Value;
|
||||
use uuid::Uuid;
|
||||
|
||||
/// Minimal launcher that initializes the Codex app-server and logs the handshake.
|
||||
#[derive(Parser)]
|
||||
#[command(author = "Codex", version, about = "Bootstrap Codex app-server", long_about = None)]
|
||||
struct Cli {
|
||||
/// Path to the `codex` CLI binary.
|
||||
#[arg(long, env = "CODEX_BIN", default_value = "codex")]
|
||||
codex_bin: String,
|
||||
|
||||
/// Forwarded to the `codex` CLI as `--config key=value`. Repeatable.
|
||||
///
|
||||
/// Example:
|
||||
/// `--config 'model_providers.mock.base_url="http://localhost:4010/v2"'`
|
||||
#[arg(
|
||||
short = 'c',
|
||||
long = "config",
|
||||
value_name = "key=value",
|
||||
action = ArgAction::Append,
|
||||
global = true
|
||||
)]
|
||||
config_overrides: Vec<String>,
|
||||
|
||||
#[command(subcommand)]
|
||||
command: CliCommand,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
enum CliCommand {
|
||||
/// Send a user message through the Codex app-server.
|
||||
SendMessage {
|
||||
/// User message to send to Codex.
|
||||
#[arg()]
|
||||
user_message: String,
|
||||
},
|
||||
/// Send a user message through the app-server V2 thread/turn APIs.
|
||||
SendMessageV2 {
|
||||
/// User message to send to Codex.
|
||||
#[arg()]
|
||||
user_message: String,
|
||||
},
|
||||
/// Start a V2 turn that elicits an ExecCommand approval.
|
||||
#[command(name = "trigger-cmd-approval")]
|
||||
TriggerCmdApproval {
|
||||
/// Optional prompt; defaults to a simple python command.
|
||||
#[arg()]
|
||||
user_message: Option<String>,
|
||||
},
|
||||
/// Start a V2 turn that elicits an ApplyPatch approval.
|
||||
#[command(name = "trigger-patch-approval")]
|
||||
TriggerPatchApproval {
|
||||
/// Optional prompt; defaults to creating a file via apply_patch.
|
||||
#[arg()]
|
||||
user_message: Option<String>,
|
||||
},
|
||||
/// Start a V2 turn that should not elicit an ExecCommand approval.
|
||||
#[command(name = "no-trigger-cmd-approval")]
|
||||
NoTriggerCmdApproval,
|
||||
/// Send two sequential V2 turns in the same thread to test follow-up behavior.
|
||||
SendFollowUpV2 {
|
||||
/// Initial user message for the first turn.
|
||||
#[arg()]
|
||||
first_message: String,
|
||||
/// Follow-up user message for the second turn.
|
||||
#[arg()]
|
||||
follow_up_message: String,
|
||||
},
|
||||
/// Trigger the ChatGPT login flow and wait for completion.
|
||||
TestLogin,
|
||||
/// Fetch the current account rate limits from the Codex app-server.
|
||||
GetAccountRateLimits,
|
||||
/// List the available models from the Codex app-server.
|
||||
#[command(name = "model-list")]
|
||||
ModelList,
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
let Cli {
|
||||
codex_bin,
|
||||
config_overrides,
|
||||
command,
|
||||
} = Cli::parse();
|
||||
|
||||
match command {
|
||||
CliCommand::SendMessage { user_message } => {
|
||||
send_message(&codex_bin, &config_overrides, user_message)
|
||||
}
|
||||
CliCommand::SendMessageV2 { user_message } => {
|
||||
send_message_v2(&codex_bin, &config_overrides, user_message)
|
||||
}
|
||||
CliCommand::TriggerCmdApproval { user_message } => {
|
||||
trigger_cmd_approval(&codex_bin, &config_overrides, user_message)
|
||||
}
|
||||
CliCommand::TriggerPatchApproval { user_message } => {
|
||||
trigger_patch_approval(&codex_bin, &config_overrides, user_message)
|
||||
}
|
||||
CliCommand::NoTriggerCmdApproval => no_trigger_cmd_approval(&codex_bin, &config_overrides),
|
||||
CliCommand::SendFollowUpV2 {
|
||||
first_message,
|
||||
follow_up_message,
|
||||
} => send_follow_up_v2(
|
||||
&codex_bin,
|
||||
&config_overrides,
|
||||
first_message,
|
||||
follow_up_message,
|
||||
),
|
||||
CliCommand::TestLogin => test_login(&codex_bin, &config_overrides),
|
||||
CliCommand::GetAccountRateLimits => get_account_rate_limits(&codex_bin, &config_overrides),
|
||||
CliCommand::ModelList => model_list(&codex_bin, &config_overrides),
|
||||
}
|
||||
}
|
||||
|
||||
fn send_message(codex_bin: &str, config_overrides: &[String], user_message: String) -> Result<()> {
|
||||
let mut client = CodexClient::spawn(codex_bin, config_overrides)?;
|
||||
|
||||
let initialize = client.initialize()?;
|
||||
println!("< initialize response: {initialize:?}");
|
||||
|
||||
let conversation = client.start_thread()?;
|
||||
println!("< newConversation response: {conversation:?}");
|
||||
|
||||
let subscription = client.add_conversation_listener(&conversation.conversation_id)?;
|
||||
println!("< addConversationListener response: {subscription:?}");
|
||||
|
||||
let send_response = client.send_user_message(&conversation.conversation_id, &user_message)?;
|
||||
println!("< sendUserMessage response: {send_response:?}");
|
||||
|
||||
client.stream_conversation(&conversation.conversation_id)?;
|
||||
|
||||
client.remove_thread_listener(subscription.subscription_id)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn send_message_v2(
|
||||
codex_bin: &str,
|
||||
config_overrides: &[String],
|
||||
user_message: String,
|
||||
) -> Result<()> {
|
||||
send_message_v2_with_policies(codex_bin, config_overrides, user_message, None, None)
|
||||
}
|
||||
|
||||
fn trigger_cmd_approval(
|
||||
codex_bin: &str,
|
||||
config_overrides: &[String],
|
||||
user_message: Option<String>,
|
||||
) -> Result<()> {
|
||||
let default_prompt =
|
||||
"Run `touch /tmp/should-trigger-approval` so I can confirm the file exists.";
|
||||
let message = user_message.unwrap_or_else(|| default_prompt.to_string());
|
||||
send_message_v2_with_policies(
|
||||
codex_bin,
|
||||
config_overrides,
|
||||
message,
|
||||
Some(AskForApproval::OnRequest),
|
||||
Some(SandboxPolicy::ReadOnly),
|
||||
)
|
||||
}
|
||||
|
||||
fn trigger_patch_approval(
|
||||
codex_bin: &str,
|
||||
config_overrides: &[String],
|
||||
user_message: Option<String>,
|
||||
) -> Result<()> {
|
||||
let default_prompt =
|
||||
"Create a file named APPROVAL_DEMO.txt containing a short hello message using apply_patch.";
|
||||
let message = user_message.unwrap_or_else(|| default_prompt.to_string());
|
||||
send_message_v2_with_policies(
|
||||
codex_bin,
|
||||
config_overrides,
|
||||
message,
|
||||
Some(AskForApproval::OnRequest),
|
||||
Some(SandboxPolicy::ReadOnly),
|
||||
)
|
||||
}
|
||||
|
||||
fn no_trigger_cmd_approval(codex_bin: &str, config_overrides: &[String]) -> Result<()> {
|
||||
let prompt = "Run `touch should_not_trigger_approval.txt`";
|
||||
send_message_v2_with_policies(codex_bin, config_overrides, prompt.to_string(), None, None)
|
||||
}
|
||||
|
||||
fn send_message_v2_with_policies(
|
||||
codex_bin: &str,
|
||||
config_overrides: &[String],
|
||||
user_message: String,
|
||||
approval_policy: Option<AskForApproval>,
|
||||
sandbox_policy: Option<SandboxPolicy>,
|
||||
) -> Result<()> {
|
||||
let mut client = CodexClient::spawn(codex_bin, config_overrides)?;
|
||||
|
||||
let initialize = client.initialize()?;
|
||||
println!("< initialize response: {initialize:?}");
|
||||
|
||||
let thread_response = client.thread_start(ThreadStartParams::default())?;
|
||||
println!("< thread/start response: {thread_response:?}");
|
||||
let mut turn_params = TurnStartParams {
|
||||
thread_id: thread_response.thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: user_message,
|
||||
// Plain text conversion has no UI element ranges.
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
};
|
||||
turn_params.approval_policy = approval_policy;
|
||||
turn_params.sandbox_policy = sandbox_policy;
|
||||
|
||||
let turn_response = client.turn_start(turn_params)?;
|
||||
println!("< turn/start response: {turn_response:?}");
|
||||
|
||||
client.stream_turn(&thread_response.thread.id, &turn_response.turn.id)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn send_follow_up_v2(
|
||||
codex_bin: &str,
|
||||
config_overrides: &[String],
|
||||
first_message: String,
|
||||
follow_up_message: String,
|
||||
) -> Result<()> {
|
||||
let mut client = CodexClient::spawn(codex_bin, config_overrides)?;
|
||||
|
||||
let initialize = client.initialize()?;
|
||||
println!("< initialize response: {initialize:?}");
|
||||
|
||||
let thread_response = client.thread_start(ThreadStartParams::default())?;
|
||||
println!("< thread/start response: {thread_response:?}");
|
||||
|
||||
let first_turn_params = TurnStartParams {
|
||||
thread_id: thread_response.thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: first_message,
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
};
|
||||
let first_turn_response = client.turn_start(first_turn_params)?;
|
||||
println!("< turn/start response (initial): {first_turn_response:?}");
|
||||
client.stream_turn(&thread_response.thread.id, &first_turn_response.turn.id)?;
|
||||
|
||||
let follow_up_params = TurnStartParams {
|
||||
thread_id: thread_response.thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: follow_up_message,
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
};
|
||||
let follow_up_response = client.turn_start(follow_up_params)?;
|
||||
println!("< turn/start response (follow-up): {follow_up_response:?}");
|
||||
client.stream_turn(&thread_response.thread.id, &follow_up_response.turn.id)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn test_login(codex_bin: &str, config_overrides: &[String]) -> Result<()> {
|
||||
let mut client = CodexClient::spawn(codex_bin, config_overrides)?;
|
||||
|
||||
let initialize = client.initialize()?;
|
||||
println!("< initialize response: {initialize:?}");
|
||||
|
||||
let login_response = client.login_chat_gpt()?;
|
||||
println!("< loginChatGpt response: {login_response:?}");
|
||||
println!(
|
||||
"Open the following URL in your browser to continue:\n{}",
|
||||
login_response.auth_url
|
||||
);
|
||||
|
||||
let completion = client.wait_for_login_completion(&login_response.login_id)?;
|
||||
println!("< loginChatGptComplete notification: {completion:?}");
|
||||
|
||||
if completion.success {
|
||||
println!("Login succeeded.");
|
||||
Ok(())
|
||||
} else {
|
||||
bail!(
|
||||
"login failed: {}",
|
||||
completion
|
||||
.error
|
||||
.as_deref()
|
||||
.unwrap_or("unknown error from loginChatGptComplete")
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn get_account_rate_limits(codex_bin: &str, config_overrides: &[String]) -> Result<()> {
|
||||
let mut client = CodexClient::spawn(codex_bin, config_overrides)?;
|
||||
|
||||
let initialize = client.initialize()?;
|
||||
println!("< initialize response: {initialize:?}");
|
||||
|
||||
let response = client.get_account_rate_limits()?;
|
||||
println!("< account/rateLimits/read response: {response:?}");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn model_list(codex_bin: &str, config_overrides: &[String]) -> Result<()> {
|
||||
let mut client = CodexClient::spawn(codex_bin, config_overrides)?;
|
||||
|
||||
let initialize = client.initialize()?;
|
||||
println!("< initialize response: {initialize:?}");
|
||||
|
||||
let response = client.model_list(ModelListParams::default())?;
|
||||
println!("< model/list response: {response:?}");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct CodexClient {
|
||||
child: Child,
|
||||
stdin: Option<ChildStdin>,
|
||||
stdout: BufReader<ChildStdout>,
|
||||
pending_notifications: VecDeque<JSONRPCNotification>,
|
||||
}
|
||||
|
||||
impl CodexClient {
|
||||
fn spawn(codex_bin: &str, config_overrides: &[String]) -> Result<Self> {
|
||||
let mut cmd = Command::new(codex_bin);
|
||||
for override_kv in config_overrides {
|
||||
cmd.arg("--config").arg(override_kv);
|
||||
}
|
||||
let mut codex_app_server = cmd
|
||||
.arg("app-server")
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::inherit())
|
||||
.spawn()
|
||||
.with_context(|| format!("failed to start `{codex_bin}` app-server"))?;
|
||||
|
||||
let stdin = codex_app_server
|
||||
.stdin
|
||||
.take()
|
||||
.context("codex app-server stdin unavailable")?;
|
||||
let stdout = codex_app_server
|
||||
.stdout
|
||||
.take()
|
||||
.context("codex app-server stdout unavailable")?;
|
||||
|
||||
Ok(Self {
|
||||
child: codex_app_server,
|
||||
stdin: Some(stdin),
|
||||
stdout: BufReader::new(stdout),
|
||||
pending_notifications: VecDeque::new(),
|
||||
})
|
||||
}
|
||||
|
||||
fn initialize(&mut self) -> Result<InitializeResponse> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::Initialize {
|
||||
request_id: request_id.clone(),
|
||||
params: InitializeParams {
|
||||
client_info: ClientInfo {
|
||||
name: "codex-toy-app-server".to_string(),
|
||||
title: Some("Codex Toy App Server".to_string()),
|
||||
version: env!("CARGO_PKG_VERSION").to_string(),
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
self.send_request(request, request_id, "initialize")
|
||||
}
|
||||
|
||||
fn start_thread(&mut self) -> Result<NewConversationResponse> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::NewConversation {
|
||||
request_id: request_id.clone(),
|
||||
params: NewConversationParams::default(),
|
||||
};
|
||||
|
||||
self.send_request(request, request_id, "newConversation")
|
||||
}
|
||||
|
||||
fn add_conversation_listener(
|
||||
&mut self,
|
||||
conversation_id: &ThreadId,
|
||||
) -> Result<AddConversationSubscriptionResponse> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::AddConversationListener {
|
||||
request_id: request_id.clone(),
|
||||
params: AddConversationListenerParams {
|
||||
conversation_id: *conversation_id,
|
||||
experimental_raw_events: false,
|
||||
},
|
||||
};
|
||||
|
||||
self.send_request(request, request_id, "addConversationListener")
|
||||
}
|
||||
|
||||
fn remove_thread_listener(&mut self, subscription_id: Uuid) -> Result<()> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::RemoveConversationListener {
|
||||
request_id: request_id.clone(),
|
||||
params: codex_app_server_protocol::RemoveConversationListenerParams { subscription_id },
|
||||
};
|
||||
|
||||
self.send_request::<codex_app_server_protocol::RemoveConversationSubscriptionResponse>(
|
||||
request,
|
||||
request_id,
|
||||
"removeConversationListener",
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn send_user_message(
|
||||
&mut self,
|
||||
conversation_id: &ThreadId,
|
||||
message: &str,
|
||||
) -> Result<SendUserMessageResponse> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::SendUserMessage {
|
||||
request_id: request_id.clone(),
|
||||
params: SendUserMessageParams {
|
||||
conversation_id: *conversation_id,
|
||||
items: vec![InputItem::Text {
|
||||
text: message.to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
},
|
||||
};
|
||||
|
||||
self.send_request(request, request_id, "sendUserMessage")
|
||||
}
|
||||
|
||||
fn thread_start(&mut self, params: ThreadStartParams) -> Result<ThreadStartResponse> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::ThreadStart {
|
||||
request_id: request_id.clone(),
|
||||
params,
|
||||
};
|
||||
|
||||
self.send_request(request, request_id, "thread/start")
|
||||
}
|
||||
|
||||
fn turn_start(&mut self, params: TurnStartParams) -> Result<TurnStartResponse> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::TurnStart {
|
||||
request_id: request_id.clone(),
|
||||
params,
|
||||
};
|
||||
|
||||
self.send_request(request, request_id, "turn/start")
|
||||
}
|
||||
|
||||
fn login_chat_gpt(&mut self) -> Result<LoginChatGptResponse> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::LoginChatGpt {
|
||||
request_id: request_id.clone(),
|
||||
params: None,
|
||||
};
|
||||
|
||||
self.send_request(request, request_id, "loginChatGpt")
|
||||
}
|
||||
|
||||
fn get_account_rate_limits(&mut self) -> Result<GetAccountRateLimitsResponse> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::GetAccountRateLimits {
|
||||
request_id: request_id.clone(),
|
||||
params: None,
|
||||
};
|
||||
|
||||
self.send_request(request, request_id, "account/rateLimits/read")
|
||||
}
|
||||
|
||||
fn model_list(&mut self, params: ModelListParams) -> Result<ModelListResponse> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::ModelList {
|
||||
request_id: request_id.clone(),
|
||||
params,
|
||||
};
|
||||
|
||||
self.send_request(request, request_id, "model/list")
|
||||
}
|
||||
|
||||
fn stream_conversation(&mut self, conversation_id: &ThreadId) -> Result<()> {
|
||||
loop {
|
||||
let notification = self.next_notification()?;
|
||||
|
||||
if !notification.method.starts_with("codex/event/") {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(event) = self.extract_event(notification, conversation_id)? {
|
||||
match &event.msg {
|
||||
EventMsg::AgentMessage(event) => {
|
||||
println!("{}", event.message);
|
||||
}
|
||||
EventMsg::AgentMessageDelta(event) => {
|
||||
print!("{}", event.delta);
|
||||
std::io::stdout().flush().ok();
|
||||
}
|
||||
EventMsg::TurnComplete(event) => {
|
||||
println!("\n[task complete: {event:?}]");
|
||||
break;
|
||||
}
|
||||
EventMsg::TurnAborted(event) => {
|
||||
println!("\n[turn aborted: {:?}]", event.reason);
|
||||
break;
|
||||
}
|
||||
EventMsg::Error(event) => {
|
||||
println!("[error] {event:?}");
|
||||
}
|
||||
_ => {
|
||||
println!("[UNKNOWN EVENT] {:?}", event.msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn wait_for_login_completion(
|
||||
&mut self,
|
||||
expected_login_id: &Uuid,
|
||||
) -> Result<LoginChatGptCompleteNotification> {
|
||||
loop {
|
||||
let notification = self.next_notification()?;
|
||||
|
||||
if let Ok(server_notification) = ServerNotification::try_from(notification) {
|
||||
match server_notification {
|
||||
ServerNotification::LoginChatGptComplete(completion) => {
|
||||
if &completion.login_id == expected_login_id {
|
||||
return Ok(completion);
|
||||
}
|
||||
|
||||
println!(
|
||||
"[ignoring loginChatGptComplete for unexpected login_id: {}]",
|
||||
completion.login_id
|
||||
);
|
||||
}
|
||||
ServerNotification::AuthStatusChange(status) => {
|
||||
println!("< authStatusChange notification: {status:?}");
|
||||
}
|
||||
ServerNotification::AccountRateLimitsUpdated(snapshot) => {
|
||||
println!("< accountRateLimitsUpdated notification: {snapshot:?}");
|
||||
}
|
||||
ServerNotification::SessionConfigured(_) => {
|
||||
// SessionConfigured notifications are unrelated to login; skip.
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
// Not a server notification (likely a conversation event); keep waiting.
|
||||
}
|
||||
}
|
||||
|
||||
fn stream_turn(&mut self, thread_id: &str, turn_id: &str) -> Result<()> {
|
||||
loop {
|
||||
let notification = self.next_notification()?;
|
||||
|
||||
let Ok(server_notification) = ServerNotification::try_from(notification) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
match server_notification {
|
||||
ServerNotification::ThreadStarted(payload) => {
|
||||
if payload.thread.id == thread_id {
|
||||
println!("< thread/started notification: {:?}", payload.thread);
|
||||
}
|
||||
}
|
||||
ServerNotification::TurnStarted(payload) => {
|
||||
if payload.turn.id == turn_id {
|
||||
println!("< turn/started notification: {:?}", payload.turn.status);
|
||||
}
|
||||
}
|
||||
ServerNotification::AgentMessageDelta(delta) => {
|
||||
print!("{}", delta.delta);
|
||||
std::io::stdout().flush().ok();
|
||||
}
|
||||
ServerNotification::CommandExecutionOutputDelta(delta) => {
|
||||
print!("{}", delta.delta);
|
||||
std::io::stdout().flush().ok();
|
||||
}
|
||||
ServerNotification::TerminalInteraction(delta) => {
|
||||
println!("[stdin sent: {}]", delta.stdin);
|
||||
std::io::stdout().flush().ok();
|
||||
}
|
||||
ServerNotification::ItemStarted(payload) => {
|
||||
println!("\n< item started: {:?}", payload.item);
|
||||
}
|
||||
ServerNotification::ItemCompleted(payload) => {
|
||||
println!("< item completed: {:?}", payload.item);
|
||||
}
|
||||
ServerNotification::TurnCompleted(payload) => {
|
||||
if payload.turn.id == turn_id {
|
||||
println!("\n< turn/completed notification: {:?}", payload.turn.status);
|
||||
if payload.turn.status == TurnStatus::Failed
|
||||
&& let Some(error) = payload.turn.error
|
||||
{
|
||||
println!("[turn error] {}", error.message);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
ServerNotification::McpToolCallProgress(payload) => {
|
||||
println!("< MCP tool progress: {}", payload.message);
|
||||
}
|
||||
_ => {
|
||||
println!("[UNKNOWN SERVER NOTIFICATION] {server_notification:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn extract_event(
|
||||
&self,
|
||||
notification: JSONRPCNotification,
|
||||
conversation_id: &ThreadId,
|
||||
) -> Result<Option<Event>> {
|
||||
let params = notification
|
||||
.params
|
||||
.context("event notification missing params")?;
|
||||
|
||||
let mut map = match params {
|
||||
Value::Object(map) => map,
|
||||
other => bail!("unexpected params shape: {other:?}"),
|
||||
};
|
||||
|
||||
let conversation_value = map
|
||||
.remove("conversationId")
|
||||
.context("event missing conversationId")?;
|
||||
let notification_conversation: ThreadId = serde_json::from_value(conversation_value)
|
||||
.context("conversationId was not a valid UUID")?;
|
||||
|
||||
if ¬ification_conversation != conversation_id {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let event_value = Value::Object(map);
|
||||
let event: Event =
|
||||
serde_json::from_value(event_value).context("failed to decode event payload")?;
|
||||
Ok(Some(event))
|
||||
}
|
||||
|
||||
fn send_request<T>(
|
||||
&mut self,
|
||||
request: ClientRequest,
|
||||
request_id: RequestId,
|
||||
method: &str,
|
||||
) -> Result<T>
|
||||
where
|
||||
T: DeserializeOwned,
|
||||
{
|
||||
self.write_request(&request)?;
|
||||
self.wait_for_response(request_id, method)
|
||||
}
|
||||
|
||||
fn write_request(&mut self, request: &ClientRequest) -> Result<()> {
|
||||
let request_json = serde_json::to_string(request)?;
|
||||
let request_pretty = serde_json::to_string_pretty(request)?;
|
||||
print_multiline_with_prefix("> ", &request_pretty);
|
||||
|
||||
if let Some(stdin) = self.stdin.as_mut() {
|
||||
writeln!(stdin, "{request_json}")?;
|
||||
stdin
|
||||
.flush()
|
||||
.context("failed to flush request to codex app-server")?;
|
||||
} else {
|
||||
bail!("codex app-server stdin closed");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn wait_for_response<T>(&mut self, request_id: RequestId, method: &str) -> Result<T>
|
||||
where
|
||||
T: DeserializeOwned,
|
||||
{
|
||||
loop {
|
||||
let message = self.read_jsonrpc_message()?;
|
||||
|
||||
match message {
|
||||
JSONRPCMessage::Response(JSONRPCResponse { id, result }) => {
|
||||
if id == request_id {
|
||||
return serde_json::from_value(result)
|
||||
.with_context(|| format!("{method} response missing payload"));
|
||||
}
|
||||
}
|
||||
JSONRPCMessage::Error(err) => {
|
||||
if err.id == request_id {
|
||||
bail!("{method} failed: {err:?}");
|
||||
}
|
||||
}
|
||||
JSONRPCMessage::Notification(notification) => {
|
||||
self.pending_notifications.push_back(notification);
|
||||
}
|
||||
JSONRPCMessage::Request(request) => {
|
||||
self.handle_server_request(request)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn next_notification(&mut self) -> Result<JSONRPCNotification> {
|
||||
if let Some(notification) = self.pending_notifications.pop_front() {
|
||||
return Ok(notification);
|
||||
}
|
||||
|
||||
loop {
|
||||
let message = self.read_jsonrpc_message()?;
|
||||
|
||||
match message {
|
||||
JSONRPCMessage::Notification(notification) => return Ok(notification),
|
||||
JSONRPCMessage::Response(_) | JSONRPCMessage::Error(_) => {
|
||||
// No outstanding requests, so ignore stray responses/errors for now.
|
||||
continue;
|
||||
}
|
||||
JSONRPCMessage::Request(request) => {
|
||||
self.handle_server_request(request)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn read_jsonrpc_message(&mut self) -> Result<JSONRPCMessage> {
|
||||
loop {
|
||||
let mut response_line = String::new();
|
||||
let bytes = self
|
||||
.stdout
|
||||
.read_line(&mut response_line)
|
||||
.context("failed to read from codex app-server")?;
|
||||
|
||||
if bytes == 0 {
|
||||
bail!("codex app-server closed stdout");
|
||||
}
|
||||
|
||||
let trimmed = response_line.trim();
|
||||
if trimmed.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let parsed: Value =
|
||||
serde_json::from_str(trimmed).context("response was not valid JSON-RPC")?;
|
||||
let pretty = serde_json::to_string_pretty(&parsed)?;
|
||||
print_multiline_with_prefix("< ", &pretty);
|
||||
let message: JSONRPCMessage = serde_json::from_value(parsed)
|
||||
.context("response was not a valid JSON-RPC message")?;
|
||||
return Ok(message);
|
||||
}
|
||||
}
|
||||
|
||||
fn request_id(&self) -> RequestId {
|
||||
RequestId::String(Uuid::new_v4().to_string())
|
||||
}
|
||||
|
||||
fn handle_server_request(&mut self, request: JSONRPCRequest) -> Result<()> {
|
||||
let server_request = ServerRequest::try_from(request)
|
||||
.context("failed to deserialize ServerRequest from JSONRPCRequest")?;
|
||||
|
||||
match server_request {
|
||||
ServerRequest::CommandExecutionRequestApproval { request_id, params } => {
|
||||
self.handle_command_execution_request_approval(request_id, params)?;
|
||||
}
|
||||
ServerRequest::FileChangeRequestApproval { request_id, params } => {
|
||||
self.approve_file_change_request(request_id, params)?;
|
||||
}
|
||||
other => {
|
||||
bail!("received unsupported server request: {other:?}");
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn handle_command_execution_request_approval(
|
||||
&mut self,
|
||||
request_id: RequestId,
|
||||
params: CommandExecutionRequestApprovalParams,
|
||||
) -> Result<()> {
|
||||
let CommandExecutionRequestApprovalParams {
|
||||
thread_id,
|
||||
turn_id,
|
||||
item_id,
|
||||
reason,
|
||||
proposed_execpolicy_amendment,
|
||||
} = params;
|
||||
|
||||
println!(
|
||||
"\n< commandExecution approval requested for thread {thread_id}, turn {turn_id}, item {item_id}"
|
||||
);
|
||||
if let Some(reason) = reason.as_deref() {
|
||||
println!("< reason: {reason}");
|
||||
}
|
||||
if let Some(execpolicy_amendment) = proposed_execpolicy_amendment.as_ref() {
|
||||
println!("< proposed execpolicy amendment: {execpolicy_amendment:?}");
|
||||
}
|
||||
|
||||
let response = CommandExecutionRequestApprovalResponse {
|
||||
decision: CommandExecutionApprovalDecision::Accept,
|
||||
};
|
||||
self.send_server_request_response(request_id, &response)?;
|
||||
println!("< approved commandExecution request for item {item_id}");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn approve_file_change_request(
|
||||
&mut self,
|
||||
request_id: RequestId,
|
||||
params: FileChangeRequestApprovalParams,
|
||||
) -> Result<()> {
|
||||
let FileChangeRequestApprovalParams {
|
||||
thread_id,
|
||||
turn_id,
|
||||
item_id,
|
||||
reason,
|
||||
grant_root,
|
||||
} = params;
|
||||
|
||||
println!(
|
||||
"\n< fileChange approval requested for thread {thread_id}, turn {turn_id}, item {item_id}"
|
||||
);
|
||||
if let Some(reason) = reason.as_deref() {
|
||||
println!("< reason: {reason}");
|
||||
}
|
||||
if let Some(grant_root) = grant_root.as_deref() {
|
||||
println!("< grant root: {}", grant_root.display());
|
||||
}
|
||||
|
||||
let response = FileChangeRequestApprovalResponse {
|
||||
decision: FileChangeApprovalDecision::Accept,
|
||||
};
|
||||
self.send_server_request_response(request_id, &response)?;
|
||||
println!("< approved fileChange request for item {item_id}");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn send_server_request_response<T>(&mut self, request_id: RequestId, response: &T) -> Result<()>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
let message = JSONRPCMessage::Response(JSONRPCResponse {
|
||||
id: request_id,
|
||||
result: serde_json::to_value(response)?,
|
||||
});
|
||||
self.write_jsonrpc_message(message)
|
||||
}
|
||||
|
||||
fn write_jsonrpc_message(&mut self, message: JSONRPCMessage) -> Result<()> {
|
||||
let payload = serde_json::to_string(&message)?;
|
||||
let pretty = serde_json::to_string_pretty(&message)?;
|
||||
print_multiline_with_prefix("> ", &pretty);
|
||||
|
||||
if let Some(stdin) = self.stdin.as_mut() {
|
||||
writeln!(stdin, "{payload}")?;
|
||||
stdin
|
||||
.flush()
|
||||
.context("failed to flush response to codex app-server")?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
bail!("codex app-server stdin closed")
|
||||
}
|
||||
}
|
||||
|
||||
fn print_multiline_with_prefix(prefix: &str, payload: &str) {
|
||||
for line in payload.lines() {
|
||||
println!("{prefix}{line}");
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for CodexClient {
|
||||
fn drop(&mut self) {
|
||||
let _ = self.stdin.take();
|
||||
|
||||
if let Ok(Some(status)) = self.child.try_wait() {
|
||||
println!("[codex app-server exited: {status}]");
|
||||
return;
|
||||
}
|
||||
|
||||
thread::sleep(Duration::from_millis(100));
|
||||
|
||||
if let Ok(Some(status)) = self.child.try_wait() {
|
||||
println!("[codex app-server exited: {status}]");
|
||||
return;
|
||||
}
|
||||
|
||||
let _ = self.child.kill();
|
||||
let _ = self.child.wait();
|
||||
}
|
||||
}
|
||||
8
codex-rs/app-server/BUILD.bazel
Normal file
8
codex-rs/app-server/BUILD.bazel
Normal file
@@ -0,0 +1,8 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "app-server",
|
||||
crate_name = "codex_app_server",
|
||||
integration_deps_extra = ["//codex-rs/app-server/tests/common:common"],
|
||||
test_tags = ["no-sandbox"],
|
||||
)
|
||||
@@ -1,7 +1,8 @@
|
||||
[package]
|
||||
edition = "2024"
|
||||
name = "codex-app-server"
|
||||
version = { workspace = true }
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[[bin]]
|
||||
name = "codex-app-server"
|
||||
@@ -19,15 +20,21 @@ anyhow = { workspace = true }
|
||||
codex-arg0 = { workspace = true }
|
||||
codex-common = { workspace = true, features = ["cli"] }
|
||||
codex-core = { workspace = true }
|
||||
codex-backend-client = { workspace = true }
|
||||
codex-file-search = { workspace = true }
|
||||
codex-login = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
codex-app-server-protocol = { workspace = true }
|
||||
codex-feedback = { workspace = true }
|
||||
codex-rmcp-client = { workspace = true }
|
||||
codex-utils-absolute-path = { workspace = true }
|
||||
codex-utils-json-to-toml = { workspace = true }
|
||||
# We should only be using mcp-types for JSON-RPC types: it would be nice to
|
||||
# split this out into a separate crate at some point.
|
||||
mcp-types = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
mcp-types = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
tokio = { workspace = true, features = [
|
||||
"io-std",
|
||||
"macros",
|
||||
@@ -41,11 +48,11 @@ uuid = { workspace = true, features = ["serde", "v7"] }
|
||||
|
||||
[dev-dependencies]
|
||||
app_test_support = { workspace = true }
|
||||
assert_cmd = { workspace = true }
|
||||
base64 = { workspace = true }
|
||||
core_test_support = { workspace = true }
|
||||
mcp-types = { workspace = true }
|
||||
os_info = { workspace = true }
|
||||
pretty_assertions = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
serial_test = { workspace = true }
|
||||
wiremock = { workspace = true }
|
||||
shlex = { workspace = true }
|
||||
|
||||
626
codex-rs/app-server/README.md
Normal file
626
codex-rs/app-server/README.md
Normal file
@@ -0,0 +1,626 @@
|
||||
# codex-app-server
|
||||
|
||||
`codex app-server` is the interface Codex uses to power rich interfaces such as the [Codex VS Code extension](https://marketplace.visualstudio.com/items?itemName=openai.chatgpt).
|
||||
|
||||
## Table of Contents
|
||||
|
||||
- [Protocol](#protocol)
|
||||
- [Message Schema](#message-schema)
|
||||
- [Core Primitives](#core-primitives)
|
||||
- [Lifecycle Overview](#lifecycle-overview)
|
||||
- [Initialization](#initialization)
|
||||
- [API Overview](#api-overview)
|
||||
- [Events](#events)
|
||||
- [Approvals](#approvals)
|
||||
- [Skills](#skills)
|
||||
- [Auth endpoints](#auth-endpoints)
|
||||
|
||||
## Protocol
|
||||
|
||||
Similar to [MCP](https://modelcontextprotocol.io/), `codex app-server` supports bidirectional communication, streaming JSONL over stdio. The protocol is JSON-RPC 2.0, though the `"jsonrpc":"2.0"` header is omitted.
|
||||
|
||||
## Message Schema
|
||||
|
||||
Currently, you can dump a TypeScript version of the schema using `codex app-server generate-ts`, or a JSON Schema bundle via `codex app-server generate-json-schema`. Each output is specific to the version of Codex you used to run the command, so the generated artifacts are guaranteed to match that version.
|
||||
|
||||
```
|
||||
codex app-server generate-ts --out DIR
|
||||
codex app-server generate-json-schema --out DIR
|
||||
```
|
||||
|
||||
## Core Primitives
|
||||
|
||||
The API exposes three top level primitives representing an interaction between a user and Codex:
|
||||
|
||||
- **Thread**: A conversation between a user and the Codex agent. Each thread contains multiple turns.
|
||||
- **Turn**: One turn of the conversation, typically starting with a user message and finishing with an agent message. Each turn contains multiple items.
|
||||
- **Item**: Represents user inputs and agent outputs as part of the turn, persisted and used as the context for future conversations. Example items include user message, agent reasoning, agent message, shell command, file edit, etc.
|
||||
|
||||
Use the thread APIs to create, list, or archive conversations. Drive a conversation with turn APIs and stream progress via turn notifications.
|
||||
|
||||
## Lifecycle Overview
|
||||
|
||||
- Initialize once: Immediately after launching the codex app-server process, send an `initialize` request with your client metadata, then emit an `initialized` notification. Any other request before this handshake gets rejected.
|
||||
- Start (or resume) a thread: Call `thread/start` to open a fresh conversation. The response returns the thread object and you’ll also get a `thread/started` notification. If you’re continuing an existing conversation, call `thread/resume` with its ID instead. If you want to branch from an existing conversation, call `thread/fork` to create a new thread id with copied history.
|
||||
- Begin a turn: To send user input, call `turn/start` with the target `threadId` and the user's input. Optional fields let you override model, cwd, sandbox policy, etc. This immediately returns the new turn object and triggers a `turn/started` notification.
|
||||
- Stream events: After `turn/start`, keep reading JSON-RPC notifications on stdout. You’ll see `item/started`, `item/completed`, deltas like `item/agentMessage/delta`, tool progress, etc. These represent streaming model output plus any side effects (commands, tool calls, reasoning notes).
|
||||
- Finish the turn: When the model is done (or the turn is interrupted via making the `turn/interrupt` call), the server sends `turn/completed` with the final turn state and token usage.
|
||||
|
||||
## Initialization
|
||||
|
||||
Clients must send a single `initialize` request before invoking any other method, then acknowledge with an `initialized` notification. The server returns the user agent string it will present to upstream services; subsequent requests issued before initialization receive a `"Not initialized"` error, and repeated `initialize` calls receive an `"Already initialized"` error.
|
||||
|
||||
Applications building on top of `codex app-server` should identify themselves via the `clientInfo` parameter.
|
||||
|
||||
**Important**: `clientInfo.name` is used to identify the client for the OpenAI Compliance Logs Platform. If
|
||||
you are developing a new Codex integration that is intended for enterprise use, please contact us to get it
|
||||
added to a known clients list. For more context: https://chatgpt.com/admin/api-reference#tag/Logs:-Codex
|
||||
|
||||
Example (from OpenAI's official VSCode extension):
|
||||
|
||||
```json
|
||||
{
|
||||
"method": "initialize",
|
||||
"id": 0,
|
||||
"params": {
|
||||
"clientInfo": {
|
||||
"name": "codex_vscode",
|
||||
"title": "Codex VS Code Extension",
|
||||
"version": "0.1.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## API Overview
|
||||
|
||||
- `thread/start` — create a new thread; emits `thread/started` and auto-subscribes you to turn/item events for that thread.
|
||||
- `thread/resume` — reopen an existing thread by id so subsequent `turn/start` calls append to it.
|
||||
- `thread/fork` — fork an existing thread into a new thread id by copying the stored history; emits `thread/started` and auto-subscribes you to turn/item events for the new thread.
|
||||
- `thread/list` — page through stored rollouts; supports cursor-based pagination and optional `modelProviders` filtering.
|
||||
- `thread/loaded/list` — list the thread ids currently loaded in memory.
|
||||
- `thread/archive` — move a thread’s rollout file into the archived directory; returns `{}` on success.
|
||||
- `thread/rollback` — drop the last N turns from the agent’s in-memory context and persist a rollback marker in the rollout so future resumes see the pruned history; returns the updated `thread` (with `turns` populated) on success.
|
||||
- `turn/start` — add user input to a thread and begin Codex generation; responds with the initial `turn` object and streams `turn/started`, `item/*`, and `turn/completed` notifications.
|
||||
- `turn/interrupt` — request cancellation of an in-flight turn by `(thread_id, turn_id)`; success is an empty `{}` response and the turn finishes with `status: "interrupted"`.
|
||||
- `review/start` — kick off Codex’s automated reviewer for a thread; responds like `turn/start` and emits `item/started`/`item/completed` notifications with `enteredReviewMode` and `exitedReviewMode` items, plus a final assistant `agentMessage` containing the review.
|
||||
- `command/exec` — run a single command under the server sandbox without starting a thread/turn (handy for utilities and validation).
|
||||
- `model/list` — list available models (with reasoning effort options).
|
||||
- `collaborationMode/list` — list available collaboration mode presets (experimental, no pagination).
|
||||
- `skills/list` — list skills for one or more `cwd` values (optional `forceReload`).
|
||||
- `skills/config/write` — write user-level skill config by path.
|
||||
- `mcpServer/oauth/login` — start an OAuth login for a configured MCP server; returns an `authorization_url` and later emits `mcpServer/oauthLogin/completed` once the browser flow finishes.
|
||||
- `tool/requestUserInput` — prompt the user with 1–3 short questions for a tool call and return their answers (experimental).
|
||||
- `config/mcpServer/reload` — reload MCP server config from disk and queue a refresh for loaded threads (applied on each thread's next active turn); returns `{}`. Use this after editing `config.toml` without restarting the server.
|
||||
- `mcpServerStatus/list` — enumerate configured MCP servers with their tools, resources, resource templates, and auth status; supports cursor+limit pagination.
|
||||
- `feedback/upload` — submit a feedback report (classification + optional reason/logs and conversation_id); returns the tracking thread id.
|
||||
- `command/exec` — run a single command under the server sandbox without starting a thread/turn (handy for utilities and validation).
|
||||
- `config/read` — fetch the effective config on disk after resolving config layering.
|
||||
- `config/value/write` — write a single config key/value to the user's config.toml on disk.
|
||||
- `config/batchWrite` — apply multiple config edits atomically to the user's config.toml on disk.
|
||||
- `configRequirements/read` — fetch the loaded requirements allow-lists from `requirements.toml` and/or MDM (or `null` if none are configured).
|
||||
|
||||
### Example: Start or resume a thread
|
||||
|
||||
Start a fresh thread when you need a new Codex conversation.
|
||||
|
||||
```json
|
||||
{ "method": "thread/start", "id": 10, "params": {
|
||||
// Optionally set config settings. If not specified, will use the user's
|
||||
// current config settings.
|
||||
"model": "gpt-5.1-codex",
|
||||
"cwd": "/Users/me/project",
|
||||
"approvalPolicy": "never",
|
||||
"sandbox": "workspaceWrite",
|
||||
} }
|
||||
{ "id": 10, "result": {
|
||||
"thread": {
|
||||
"id": "thr_123",
|
||||
"preview": "",
|
||||
"modelProvider": "openai",
|
||||
"createdAt": 1730910000
|
||||
}
|
||||
} }
|
||||
{ "method": "thread/started", "params": { "thread": { … } } }
|
||||
```
|
||||
|
||||
To continue a stored session, call `thread/resume` with the `thread.id` you previously recorded. The response shape matches `thread/start`, and no additional notifications are emitted:
|
||||
|
||||
```json
|
||||
{ "method": "thread/resume", "id": 11, "params": { "threadId": "thr_123" } }
|
||||
{ "id": 11, "result": { "thread": { "id": "thr_123", … } } }
|
||||
```
|
||||
|
||||
To branch from a stored session, call `thread/fork` with the `thread.id`. This creates a new thread id and emits a `thread/started` notification for it:
|
||||
|
||||
```json
|
||||
{ "method": "thread/fork", "id": 12, "params": { "threadId": "thr_123" } }
|
||||
{ "id": 12, "result": { "thread": { "id": "thr_456", … } } }
|
||||
{ "method": "thread/started", "params": { "thread": { … } } }
|
||||
```
|
||||
|
||||
### Example: List threads (with pagination & filters)
|
||||
|
||||
`thread/list` lets you render a history UI. Results default to `createdAt` (newest first) descending. Pass any combination of:
|
||||
|
||||
- `cursor` — opaque string from a prior response; omit for the first page.
|
||||
- `limit` — server defaults to a reasonable page size if unset.
|
||||
- `sortKey` — `created_at` (default) or `updated_at`.
|
||||
- `modelProviders` — restrict results to specific providers; unset, null, or an empty array will include all providers.
|
||||
|
||||
Example:
|
||||
|
||||
```json
|
||||
{ "method": "thread/list", "id": 20, "params": {
|
||||
"cursor": null,
|
||||
"limit": 25,
|
||||
"sortKey": "created_at"
|
||||
} }
|
||||
{ "id": 20, "result": {
|
||||
"data": [
|
||||
{ "id": "thr_a", "preview": "Create a TUI", "modelProvider": "openai", "createdAt": 1730831111, "updatedAt": 1730831111 },
|
||||
{ "id": "thr_b", "preview": "Fix tests", "modelProvider": "openai", "createdAt": 1730750000, "updatedAt": 1730750000 }
|
||||
],
|
||||
"nextCursor": "opaque-token-or-null"
|
||||
} }
|
||||
```
|
||||
|
||||
When `nextCursor` is `null`, you’ve reached the final page.
|
||||
|
||||
### Example: List loaded threads
|
||||
|
||||
`thread/loaded/list` returns thread ids currently loaded in memory. This is useful when you want to check which sessions are active without scanning rollouts on disk.
|
||||
|
||||
```json
|
||||
{ "method": "thread/loaded/list", "id": 21 }
|
||||
{ "id": 21, "result": {
|
||||
"data": ["thr_123", "thr_456"]
|
||||
} }
|
||||
```
|
||||
|
||||
### Example: Archive a thread
|
||||
|
||||
Use `thread/archive` to move the persisted rollout (stored as a JSONL file on disk) into the archived sessions directory.
|
||||
|
||||
```json
|
||||
{ "method": "thread/archive", "id": 21, "params": { "threadId": "thr_b" } }
|
||||
{ "id": 21, "result": {} }
|
||||
```
|
||||
|
||||
An archived thread will not appear in future calls to `thread/list`.
|
||||
|
||||
### Example: Start a turn (send user input)
|
||||
|
||||
Turns attach user input (text or images) to a thread and trigger Codex generation. The `input` field is a list of discriminated unions:
|
||||
|
||||
- `{"type":"text","text":"Explain this diff"}`
|
||||
- `{"type":"image","url":"https://…png"}`
|
||||
- `{"type":"localImage","path":"/tmp/screenshot.png"}`
|
||||
|
||||
You can optionally specify config overrides on the new turn. If specified, these settings become the default for subsequent turns on the same thread. `outputSchema` applies only to the current turn.
|
||||
|
||||
```json
|
||||
{ "method": "turn/start", "id": 30, "params": {
|
||||
"threadId": "thr_123",
|
||||
"input": [ { "type": "text", "text": "Run tests" } ],
|
||||
// Below are optional config overrides
|
||||
"cwd": "/Users/me/project",
|
||||
"approvalPolicy": "unlessTrusted",
|
||||
"sandboxPolicy": {
|
||||
"type": "workspaceWrite",
|
||||
"writableRoots": ["/Users/me/project"],
|
||||
"networkAccess": true
|
||||
},
|
||||
"model": "gpt-5.1-codex",
|
||||
"effort": "medium",
|
||||
"summary": "concise",
|
||||
// Optional JSON Schema to constrain the final assistant message for this turn.
|
||||
"outputSchema": {
|
||||
"type": "object",
|
||||
"properties": { "answer": { "type": "string" } },
|
||||
"required": ["answer"],
|
||||
"additionalProperties": false
|
||||
}
|
||||
} }
|
||||
{ "id": 30, "result": { "turn": {
|
||||
"id": "turn_456",
|
||||
"status": "inProgress",
|
||||
"items": [],
|
||||
"error": null
|
||||
} } }
|
||||
```
|
||||
|
||||
### Example: Start a turn (invoke a skill)
|
||||
|
||||
Invoke a skill explicitly by including `$<skill-name>` in the text input and adding a `skill` input item alongside it.
|
||||
|
||||
```json
|
||||
{ "method": "turn/start", "id": 33, "params": {
|
||||
"threadId": "thr_123",
|
||||
"input": [
|
||||
{ "type": "text", "text": "$skill-creator Add a new skill for triaging flaky CI and include step-by-step usage." },
|
||||
{ "type": "skill", "name": "skill-creator", "path": "/Users/me/.codex/skills/skill-creator/SKILL.md" }
|
||||
]
|
||||
} }
|
||||
{ "id": 33, "result": { "turn": {
|
||||
"id": "turn_457",
|
||||
"status": "inProgress",
|
||||
"items": [],
|
||||
"error": null
|
||||
} } }
|
||||
```
|
||||
|
||||
### Example: Interrupt an active turn
|
||||
|
||||
You can cancel a running Turn with `turn/interrupt`.
|
||||
|
||||
```json
|
||||
{ "method": "turn/interrupt", "id": 31, "params": {
|
||||
"threadId": "thr_123",
|
||||
"turnId": "turn_456"
|
||||
} }
|
||||
{ "id": 31, "result": {} }
|
||||
```
|
||||
|
||||
The server requests cancellations for running subprocesses, then emits a `turn/completed` event with `status: "interrupted"`. Rely on the `turn/completed` to know when Codex-side cleanup is done.
|
||||
|
||||
### Example: Request a code review
|
||||
|
||||
Use `review/start` to run Codex’s reviewer on the currently checked-out project. The request takes the thread id plus a `target` describing what should be reviewed:
|
||||
|
||||
- `{"type":"uncommittedChanges"}` — staged, unstaged, and untracked files.
|
||||
- `{"type":"baseBranch","branch":"main"}` — diff against the provided branch’s upstream (see prompt for the exact `git merge-base`/`git diff` instructions Codex will run).
|
||||
- `{"type":"commit","sha":"abc1234","title":"Optional subject"}` — review a specific commit.
|
||||
- `{"type":"custom","instructions":"Free-form reviewer instructions"}` — fallback prompt equivalent to the legacy manual review request.
|
||||
- `delivery` (`"inline"` or `"detached"`, default `"inline"`) — where the review runs:
|
||||
- `"inline"`: run the review as a new turn on the existing thread. The response’s `reviewThreadId` equals the original `threadId`, and no new `thread/started` notification is emitted.
|
||||
- `"detached"`: fork a new review thread from the parent conversation and run the review there. The response’s `reviewThreadId` is the id of this new review thread, and the server emits a `thread/started` notification for it before streaming review items.
|
||||
|
||||
Example request/response:
|
||||
|
||||
```json
|
||||
{ "method": "review/start", "id": 40, "params": {
|
||||
"threadId": "thr_123",
|
||||
"delivery": "inline",
|
||||
"target": { "type": "commit", "sha": "1234567deadbeef", "title": "Polish tui colors" }
|
||||
} }
|
||||
{ "id": 40, "result": {
|
||||
"turn": {
|
||||
"id": "turn_900",
|
||||
"status": "inProgress",
|
||||
"items": [
|
||||
{ "type": "userMessage", "id": "turn_900", "content": [ { "type": "text", "text": "Review commit 1234567: Polish tui colors" } ] }
|
||||
],
|
||||
"error": null
|
||||
},
|
||||
"reviewThreadId": "thr_123"
|
||||
} }
|
||||
```
|
||||
|
||||
For a detached review, use `"delivery": "detached"`. The response is the same shape, but `reviewThreadId` will be the id of the new review thread (different from the original `threadId`). The server also emits a `thread/started` notification for that new thread before streaming the review turn.
|
||||
|
||||
Codex streams the usual `turn/started` notification followed by an `item/started`
|
||||
with an `enteredReviewMode` item so clients can show progress:
|
||||
|
||||
```json
|
||||
{
|
||||
"method": "item/started",
|
||||
"params": {
|
||||
"item": {
|
||||
"type": "enteredReviewMode",
|
||||
"id": "turn_900",
|
||||
"review": "current changes"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
When the reviewer finishes, the server emits `item/started` and `item/completed`
|
||||
containing an `exitedReviewMode` item with the final review text:
|
||||
|
||||
```json
|
||||
{
|
||||
"method": "item/completed",
|
||||
"params": {
|
||||
"item": {
|
||||
"type": "exitedReviewMode",
|
||||
"id": "turn_900",
|
||||
"review": "Looks solid overall...\n\n- Prefer Stylize helpers — app.rs:10-20\n ..."
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
The `review` string is plain text that already bundles the overall explanation plus a bullet list for each structured finding (matching `ThreadItem::ExitedReviewMode` in the generated schema). Use this notification to render the reviewer output in your client.
|
||||
|
||||
### Example: One-off command execution
|
||||
|
||||
Run a standalone command (argv vector) in the server’s sandbox without creating a thread or turn:
|
||||
|
||||
```json
|
||||
{ "method": "command/exec", "id": 32, "params": {
|
||||
"command": ["ls", "-la"],
|
||||
"cwd": "/Users/me/project", // optional; defaults to server cwd
|
||||
"sandboxPolicy": { "type": "workspaceWrite" }, // optional; defaults to user config
|
||||
"timeoutMs": 10000 // optional; ms timeout; defaults to server timeout
|
||||
} }
|
||||
{ "id": 32, "result": { "exitCode": 0, "stdout": "...", "stderr": "" } }
|
||||
```
|
||||
|
||||
- For clients that are already sandboxed externally, set `sandboxPolicy` to `{"type":"externalSandbox","networkAccess":"enabled"}` (or omit `networkAccess` to keep it restricted). Codex will not enforce its own sandbox in this mode; it tells the model it has full file-system access and passes the `networkAccess` state through `environment_context`.
|
||||
|
||||
Notes:
|
||||
|
||||
- Empty `command` arrays are rejected.
|
||||
- `sandboxPolicy` accepts the same shape used by `turn/start` (e.g., `dangerFullAccess`, `readOnly`, `workspaceWrite` with flags, `externalSandbox` with `networkAccess` `restricted|enabled`).
|
||||
- When omitted, `timeoutMs` falls back to the server default.
|
||||
|
||||
## Events
|
||||
|
||||
Event notifications are the server-initiated event stream for thread lifecycles, turn lifecycles, and the items within them. After you start or resume a thread, keep reading stdout for `thread/started`, `turn/*`, and `item/*` notifications.
|
||||
|
||||
### Turn events
|
||||
|
||||
The app-server streams JSON-RPC notifications while a turn is running. Each turn starts with `turn/started` (initial `turn`) and ends with `turn/completed` (final `turn` status). Token usage events stream separately via `thread/tokenUsage/updated`. Clients subscribe to the events they care about, rendering each item incrementally as updates arrive. The per-item lifecycle is always: `item/started` → zero or more item-specific deltas → `item/completed`.
|
||||
|
||||
- `turn/started` — `{ turn }` with the turn id, empty `items`, and `status: "inProgress"`.
|
||||
- `turn/completed` — `{ turn }` where `turn.status` is `completed`, `interrupted`, or `failed`; failures carry `{ error: { message, codexErrorInfo?, additionalDetails? } }`.
|
||||
- `turn/diff/updated` — `{ threadId, turnId, diff }` represents the up-to-date snapshot of the turn-level unified diff, emitted after every FileChange item. `diff` is the latest aggregated unified diff across every file change in the turn. UIs can render this to show the full "what changed" view without stitching individual `fileChange` items.
|
||||
- `turn/plan/updated` — `{ turnId, explanation?, plan }` whenever the agent shares or changes its plan; each `plan` entry is `{ step, status }` with `status` in `pending`, `inProgress`, or `completed`.
|
||||
|
||||
Today both notifications carry an empty `items` array even when item events were streamed; rely on `item/*` notifications for the canonical item list until this is fixed.
|
||||
|
||||
#### Items
|
||||
|
||||
`ThreadItem` is the tagged union carried in turn responses and `item/*` notifications. Currently we support events for the following items:
|
||||
|
||||
- `userMessage` — `{id, content}` where `content` is a list of user inputs (`text`, `image`, or `localImage`).
|
||||
- `agentMessage` — `{id, text}` containing the accumulated agent reply.
|
||||
- `reasoning` — `{id, summary, content}` where `summary` holds streamed reasoning summaries (applicable for most OpenAI models) and `content` holds raw reasoning blocks (applicable for e.g. open source models).
|
||||
- `commandExecution` — `{id, command, cwd, status, commandActions, aggregatedOutput?, exitCode?, durationMs?}` for sandboxed commands; `status` is `inProgress`, `completed`, `failed`, or `declined`.
|
||||
- `fileChange` — `{id, changes, status}` describing proposed edits; `changes` list `{path, kind, diff}` and `status` is `inProgress`, `completed`, `failed`, or `declined`.
|
||||
- `mcpToolCall` — `{id, server, tool, status, arguments, result?, error?}` describing MCP calls; `status` is `inProgress`, `completed`, or `failed`.
|
||||
- `collabToolCall` — `{id, tool, status, senderThreadId, receiverThreadId?, newThreadId?, prompt?, agentStatus?}` describing collab tool calls (`spawn_agent`, `send_input`, `wait`, `close_agent`); `status` is `inProgress`, `completed`, or `failed`.
|
||||
- `webSearch` — `{id, query}` for a web search request issued by the agent.
|
||||
- `imageView` — `{id, path}` emitted when the agent invokes the image viewer tool.
|
||||
- `enteredReviewMode` — `{id, review}` sent when the reviewer starts; `review` is a short user-facing label such as `"current changes"` or the requested target description.
|
||||
- `exitedReviewMode` — `{id, review}` emitted when the reviewer finishes; `review` is the full plain-text review (usually, overall notes plus bullet point findings).
|
||||
- `compacted` - `{threadId, turnId}` when codex compacts the conversation history. This can happen automatically.
|
||||
|
||||
All items emit two shared lifecycle events:
|
||||
|
||||
- `item/started` — emits the full `item` when a new unit of work begins so the UI can render it immediately; the `item.id` in this payload matches the `itemId` used by deltas.
|
||||
- `item/completed` — sends the final `item` once that work finishes (e.g., after a tool call or message completes); treat this as the authoritative state.
|
||||
|
||||
There are additional item-specific events:
|
||||
|
||||
#### agentMessage
|
||||
|
||||
- `item/agentMessage/delta` — appends streamed text for the agent message; concatenate `delta` values for the same `itemId` in order to reconstruct the full reply.
|
||||
|
||||
#### reasoning
|
||||
|
||||
- `item/reasoning/summaryTextDelta` — streams readable reasoning summaries; `summaryIndex` increments when a new summary section opens.
|
||||
- `item/reasoning/summaryPartAdded` — marks the boundary between reasoning summary sections for an `itemId`; subsequent `summaryTextDelta` entries share the same `summaryIndex`.
|
||||
- `item/reasoning/textDelta` — streams raw reasoning text (only applicable for e.g. open source models); use `contentIndex` to group deltas that belong together before showing them in the UI.
|
||||
|
||||
#### commandExecution
|
||||
|
||||
- `item/commandExecution/outputDelta` — streams stdout/stderr for the command; append deltas in order to render live output alongside `aggregatedOutput` in the final item.
|
||||
Final `commandExecution` items include parsed `commandActions`, `status`, `exitCode`, and `durationMs` so the UI can summarize what ran and whether it succeeded.
|
||||
|
||||
#### fileChange
|
||||
|
||||
- `item/fileChange/outputDelta` - contains the tool call response of the underlying `apply_patch` tool call.
|
||||
|
||||
### Errors
|
||||
|
||||
`error` event is emitted whenever the server hits an error mid-turn (for example, upstream model errors or quota limits). Carries the same `{ error: { message, codexErrorInfo?, additionalDetails? } }` payload as `turn.status: "failed"` and may precede that terminal notification.
|
||||
|
||||
`codexErrorInfo` maps to the `CodexErrorInfo` enum. Common values:
|
||||
|
||||
- `ContextWindowExceeded`
|
||||
- `UsageLimitExceeded`
|
||||
- `HttpConnectionFailed { httpStatusCode? }`: upstream HTTP failures including 4xx/5xx
|
||||
- `ResponseStreamConnectionFailed { httpStatusCode? }`: failure to connect to the response SSE stream
|
||||
- `ResponseStreamDisconnected { httpStatusCode? }`: disconnect of the response SSE stream in the middle of a turn before completion
|
||||
- `ResponseTooManyFailedAttempts { httpStatusCode? }`
|
||||
- `BadRequest`
|
||||
- `Unauthorized`
|
||||
- `SandboxError`
|
||||
- `InternalServerError`
|
||||
- `Other`: all unclassified errors
|
||||
|
||||
When an upstream HTTP status is available (for example, from the Responses API or a provider), it is forwarded in `httpStatusCode` on the relevant `codexErrorInfo` variant.
|
||||
|
||||
## Approvals
|
||||
|
||||
Certain actions (shell commands or modifying files) may require explicit user approval depending on the user's config. When `turn/start` is used, the app-server drives an approval flow by sending a server-initiated JSON-RPC request to the client. The client must respond to tell Codex whether to proceed. UIs should present these requests inline with the active turn so users can review the proposed command or diff before choosing.
|
||||
|
||||
- Requests include `threadId` and `turnId`—use them to scope UI state to the active conversation.
|
||||
- Respond with a single `{ "decision": "accept" | "decline" }` payload (plus optional `acceptSettings` on command executions). The server resumes or declines the work and ends the item with `item/completed`.
|
||||
|
||||
### Command execution approvals
|
||||
|
||||
Order of messages:
|
||||
|
||||
1. `item/started` — shows the pending `commandExecution` item with `command`, `cwd`, and other fields so you can render the proposed action.
|
||||
2. `item/commandExecution/requestApproval` (request) — carries the same `itemId`, `threadId`, `turnId`, optionally `reason` or `risk`, plus `parsedCmd` for friendly display.
|
||||
3. Client response — `{ "decision": "accept", "acceptSettings": { "forSession": false } }` or `{ "decision": "decline" }`.
|
||||
4. `item/completed` — final `commandExecution` item with `status: "completed" | "failed" | "declined"` and execution output. Render this as the authoritative result.
|
||||
|
||||
### File change approvals
|
||||
|
||||
Order of messages:
|
||||
|
||||
1. `item/started` — emits a `fileChange` item with `changes` (diff chunk summaries) and `status: "inProgress"`. Show the proposed edits and paths to the user.
|
||||
2. `item/fileChange/requestApproval` (request) — includes `itemId`, `threadId`, `turnId`, and an optional `reason`.
|
||||
3. Client response — `{ "decision": "accept" }` or `{ "decision": "decline" }`.
|
||||
4. `item/completed` — returns the same `fileChange` item with `status` updated to `completed`, `failed`, or `declined` after the patch attempt. Rely on this to show success/failure and finalize the diff state in your UI.
|
||||
|
||||
UI guidance for IDEs: surface an approval dialog as soon as the request arrives. The turn will proceed after the server receives a response to the approval request. The terminal `item/completed` notification will be sent with the appropriate status.
|
||||
|
||||
## Skills
|
||||
|
||||
Invoke a skill by including `$<skill-name>` in the text input. Add a `skill` input item (recommended) so the backend injects full skill instructions instead of relying on the model to resolve the name.
|
||||
|
||||
```json
|
||||
{
|
||||
"method": "turn/start",
|
||||
"id": 101,
|
||||
"params": {
|
||||
"threadId": "thread-1",
|
||||
"input": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "$skill-creator Add a new skill for triaging flaky CI."
|
||||
},
|
||||
{
|
||||
"type": "skill",
|
||||
"name": "skill-creator",
|
||||
"path": "/Users/me/.codex/skills/skill-creator/SKILL.md"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
If you omit the `skill` item, the model will still parse the `$<skill-name>` marker and try to locate the skill, which can add latency.
|
||||
|
||||
Example:
|
||||
|
||||
```
|
||||
$skill-creator Add a new skill for triaging flaky CI and include step-by-step usage.
|
||||
```
|
||||
|
||||
Use `skills/list` to fetch the available skills (optionally scoped by `cwds`, with `forceReload`).
|
||||
|
||||
```json
|
||||
{ "method": "skills/list", "id": 25, "params": {
|
||||
"cwds": ["/Users/me/project"],
|
||||
"forceReload": false
|
||||
} }
|
||||
{ "id": 25, "result": {
|
||||
"data": [{
|
||||
"cwd": "/Users/me/project",
|
||||
"skills": [
|
||||
{ "name": "skill-creator", "description": "Create or update a Codex skill", "enabled": true }
|
||||
],
|
||||
"errors": []
|
||||
}]
|
||||
} }
|
||||
```
|
||||
|
||||
To enable or disable a skill by path:
|
||||
|
||||
```json
|
||||
{
|
||||
"method": "skills/config/write",
|
||||
"id": 26,
|
||||
"params": {
|
||||
"path": "/Users/me/.codex/skills/skill-creator/SKILL.md",
|
||||
"enabled": false
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Auth endpoints
|
||||
|
||||
The JSON-RPC auth/account surface exposes request/response methods plus server-initiated notifications (no `id`). Use these to determine auth state, start or cancel logins, logout, and inspect ChatGPT rate limits.
|
||||
|
||||
### API Overview
|
||||
|
||||
- `account/read` — fetch current account info; optionally refresh tokens.
|
||||
- `account/login/start` — begin login (`apiKey` or `chatgpt`).
|
||||
- `account/login/completed` (notify) — emitted when a login attempt finishes (success or error).
|
||||
- `account/login/cancel` — cancel a pending ChatGPT login by `loginId`.
|
||||
- `account/logout` — sign out; triggers `account/updated`.
|
||||
- `account/updated` (notify) — emitted whenever auth mode changes (`authMode`: `apikey`, `chatgpt`, or `null`).
|
||||
- `account/rateLimits/read` — fetch ChatGPT rate limits; updates arrive via `account/rateLimits/updated` (notify).
|
||||
- `account/rateLimits/updated` (notify) — emitted whenever a user's ChatGPT rate limits change.
|
||||
- `mcpServer/oauthLogin/completed` (notify) — emitted after a `mcpServer/oauth/login` flow finishes for a server; payload includes `{ name, success, error? }`.
|
||||
|
||||
### 1) Check auth state
|
||||
|
||||
Request:
|
||||
|
||||
```json
|
||||
{ "method": "account/read", "id": 1, "params": { "refreshToken": false } }
|
||||
```
|
||||
|
||||
Response examples:
|
||||
|
||||
```json
|
||||
{ "id": 1, "result": { "account": null, "requiresOpenaiAuth": false } } // No OpenAI auth needed (e.g., OSS/local models)
|
||||
{ "id": 1, "result": { "account": null, "requiresOpenaiAuth": true } } // OpenAI auth required (typical for OpenAI-hosted models)
|
||||
{ "id": 1, "result": { "account": { "type": "apiKey" }, "requiresOpenaiAuth": true } }
|
||||
{ "id": 1, "result": { "account": { "type": "chatgpt", "email": "user@example.com", "planType": "pro" }, "requiresOpenaiAuth": true } }
|
||||
```
|
||||
|
||||
Field notes:
|
||||
|
||||
- `refreshToken` (bool): set `true` to force a token refresh.
|
||||
- `requiresOpenaiAuth` reflects the active provider; when `false`, Codex can run without OpenAI credentials.
|
||||
|
||||
### 2) Log in with an API key
|
||||
|
||||
1. Send:
|
||||
```json
|
||||
{
|
||||
"method": "account/login/start",
|
||||
"id": 2,
|
||||
"params": { "type": "apiKey", "apiKey": "sk-…" }
|
||||
}
|
||||
```
|
||||
2. Expect:
|
||||
```json
|
||||
{ "id": 2, "result": { "type": "apiKey" } }
|
||||
```
|
||||
3. Notifications:
|
||||
```json
|
||||
{ "method": "account/login/completed", "params": { "loginId": null, "success": true, "error": null } }
|
||||
{ "method": "account/updated", "params": { "authMode": "apikey" } }
|
||||
```
|
||||
|
||||
### 3) Log in with ChatGPT (browser flow)
|
||||
|
||||
1. Start:
|
||||
```json
|
||||
{ "method": "account/login/start", "id": 3, "params": { "type": "chatgpt" } }
|
||||
{ "id": 3, "result": { "type": "chatgpt", "loginId": "<uuid>", "authUrl": "https://chatgpt.com/…&redirect_uri=http%3A%2F%2Flocalhost%3A<port>%2Fauth%2Fcallback" } }
|
||||
```
|
||||
2. Open `authUrl` in a browser; the app-server hosts the local callback.
|
||||
3. Wait for notifications:
|
||||
```json
|
||||
{ "method": "account/login/completed", "params": { "loginId": "<uuid>", "success": true, "error": null } }
|
||||
{ "method": "account/updated", "params": { "authMode": "chatgpt" } }
|
||||
```
|
||||
|
||||
### 4) Cancel a ChatGPT login
|
||||
|
||||
```json
|
||||
{ "method": "account/login/cancel", "id": 4, "params": { "loginId": "<uuid>" } }
|
||||
{ "method": "account/login/completed", "params": { "loginId": "<uuid>", "success": false, "error": "…" } }
|
||||
```
|
||||
|
||||
### 5) Logout
|
||||
|
||||
```json
|
||||
{ "method": "account/logout", "id": 5 }
|
||||
{ "id": 5, "result": {} }
|
||||
{ "method": "account/updated", "params": { "authMode": null } }
|
||||
```
|
||||
|
||||
### 6) Rate limits (ChatGPT)
|
||||
|
||||
```json
|
||||
{ "method": "account/rateLimits/read", "id": 6 }
|
||||
{ "id": 6, "result": { "rateLimits": { "primary": { "usedPercent": 25, "windowDurationMins": 15, "resetsAt": 1730947200 }, "secondary": null } } }
|
||||
{ "method": "account/rateLimits/updated", "params": { "rateLimits": { … } } }
|
||||
```
|
||||
|
||||
Field notes:
|
||||
|
||||
- `usedPercent` is current usage within the OpenAI quota window.
|
||||
- `windowDurationMins` is the quota window length.
|
||||
- `resetsAt` is a Unix timestamp (seconds) for the next reset.
|
||||
2451
codex-rs/app-server/src/bespoke_event_handling.rs
Normal file
2451
codex-rs/app-server/src/bespoke_event_handling.rs
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
155
codex-rs/app-server/src/config_api.rs
Normal file
155
codex-rs/app-server/src/config_api.rs
Normal file
@@ -0,0 +1,155 @@
|
||||
use crate::error_code::INTERNAL_ERROR_CODE;
|
||||
use crate::error_code::INVALID_REQUEST_ERROR_CODE;
|
||||
use codex_app_server_protocol::ConfigBatchWriteParams;
|
||||
use codex_app_server_protocol::ConfigReadParams;
|
||||
use codex_app_server_protocol::ConfigReadResponse;
|
||||
use codex_app_server_protocol::ConfigRequirements;
|
||||
use codex_app_server_protocol::ConfigRequirementsReadResponse;
|
||||
use codex_app_server_protocol::ConfigValueWriteParams;
|
||||
use codex_app_server_protocol::ConfigWriteErrorCode;
|
||||
use codex_app_server_protocol::ConfigWriteResponse;
|
||||
use codex_app_server_protocol::JSONRPCErrorError;
|
||||
use codex_app_server_protocol::SandboxMode;
|
||||
use codex_core::config::ConfigService;
|
||||
use codex_core::config::ConfigServiceError;
|
||||
use codex_core::config_loader::ConfigRequirementsToml;
|
||||
use codex_core::config_loader::LoaderOverrides;
|
||||
use codex_core::config_loader::SandboxModeRequirement as CoreSandboxModeRequirement;
|
||||
use serde_json::json;
|
||||
use std::path::PathBuf;
|
||||
use toml::Value as TomlValue;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct ConfigApi {
|
||||
service: ConfigService,
|
||||
}
|
||||
|
||||
impl ConfigApi {
|
||||
pub(crate) fn new(
|
||||
codex_home: PathBuf,
|
||||
cli_overrides: Vec<(String, TomlValue)>,
|
||||
loader_overrides: LoaderOverrides,
|
||||
) -> Self {
|
||||
Self {
|
||||
service: ConfigService::new(codex_home, cli_overrides, loader_overrides),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn read(
|
||||
&self,
|
||||
params: ConfigReadParams,
|
||||
) -> Result<ConfigReadResponse, JSONRPCErrorError> {
|
||||
self.service.read(params).await.map_err(map_error)
|
||||
}
|
||||
|
||||
pub(crate) async fn config_requirements_read(
|
||||
&self,
|
||||
) -> Result<ConfigRequirementsReadResponse, JSONRPCErrorError> {
|
||||
let requirements = self
|
||||
.service
|
||||
.read_requirements()
|
||||
.await
|
||||
.map_err(map_error)?
|
||||
.map(map_requirements_toml_to_api);
|
||||
|
||||
Ok(ConfigRequirementsReadResponse { requirements })
|
||||
}
|
||||
|
||||
pub(crate) async fn write_value(
|
||||
&self,
|
||||
params: ConfigValueWriteParams,
|
||||
) -> Result<ConfigWriteResponse, JSONRPCErrorError> {
|
||||
self.service.write_value(params).await.map_err(map_error)
|
||||
}
|
||||
|
||||
pub(crate) async fn batch_write(
|
||||
&self,
|
||||
params: ConfigBatchWriteParams,
|
||||
) -> Result<ConfigWriteResponse, JSONRPCErrorError> {
|
||||
self.service.batch_write(params).await.map_err(map_error)
|
||||
}
|
||||
}
|
||||
|
||||
fn map_requirements_toml_to_api(requirements: ConfigRequirementsToml) -> ConfigRequirements {
|
||||
ConfigRequirements {
|
||||
allowed_approval_policies: requirements.allowed_approval_policies.map(|policies| {
|
||||
policies
|
||||
.into_iter()
|
||||
.map(codex_app_server_protocol::AskForApproval::from)
|
||||
.collect()
|
||||
}),
|
||||
allowed_sandbox_modes: requirements.allowed_sandbox_modes.map(|modes| {
|
||||
modes
|
||||
.into_iter()
|
||||
.filter_map(map_sandbox_mode_requirement_to_api)
|
||||
.collect()
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
fn map_sandbox_mode_requirement_to_api(mode: CoreSandboxModeRequirement) -> Option<SandboxMode> {
|
||||
match mode {
|
||||
CoreSandboxModeRequirement::ReadOnly => Some(SandboxMode::ReadOnly),
|
||||
CoreSandboxModeRequirement::WorkspaceWrite => Some(SandboxMode::WorkspaceWrite),
|
||||
CoreSandboxModeRequirement::DangerFullAccess => Some(SandboxMode::DangerFullAccess),
|
||||
CoreSandboxModeRequirement::ExternalSandbox => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn map_error(err: ConfigServiceError) -> JSONRPCErrorError {
|
||||
if let Some(code) = err.write_error_code() {
|
||||
return config_write_error(code, err.to_string());
|
||||
}
|
||||
|
||||
JSONRPCErrorError {
|
||||
code: INTERNAL_ERROR_CODE,
|
||||
message: err.to_string(),
|
||||
data: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn config_write_error(code: ConfigWriteErrorCode, message: impl Into<String>) -> JSONRPCErrorError {
|
||||
JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: message.into(),
|
||||
data: Some(json!({
|
||||
"config_write_error_code": code,
|
||||
})),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use codex_protocol::protocol::AskForApproval as CoreAskForApproval;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn map_requirements_toml_to_api_converts_core_enums() {
|
||||
let requirements = ConfigRequirementsToml {
|
||||
allowed_approval_policies: Some(vec![
|
||||
CoreAskForApproval::Never,
|
||||
CoreAskForApproval::OnRequest,
|
||||
]),
|
||||
allowed_sandbox_modes: Some(vec![
|
||||
CoreSandboxModeRequirement::ReadOnly,
|
||||
CoreSandboxModeRequirement::ExternalSandbox,
|
||||
]),
|
||||
mcp_servers: None,
|
||||
};
|
||||
|
||||
let mapped = map_requirements_toml_to_api(requirements);
|
||||
|
||||
assert_eq!(
|
||||
mapped.allowed_approval_policies,
|
||||
Some(vec![
|
||||
codex_app_server_protocol::AskForApproval::Never,
|
||||
codex_app_server_protocol::AskForApproval::OnRequest,
|
||||
])
|
||||
);
|
||||
assert_eq!(
|
||||
mapped.allowed_sandbox_modes,
|
||||
Some(vec![SandboxMode::ReadOnly]),
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -4,8 +4,8 @@ use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
|
||||
use codex_app_server_protocol::FuzzyFileSearchResult;
|
||||
use codex_file_search as file_search;
|
||||
use codex_protocol::mcp_protocol::FuzzyFileSearchResult;
|
||||
use tokio::task::JoinSet;
|
||||
use tracing::warn;
|
||||
|
||||
@@ -18,6 +18,10 @@ pub(crate) async fn run_fuzzy_file_search(
|
||||
roots: Vec<String>,
|
||||
cancellation_flag: Arc<AtomicBool>,
|
||||
) -> Vec<FuzzyFileSearchResult> {
|
||||
if roots.is_empty() {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
#[expect(clippy::expect_used)]
|
||||
let limit_per_root =
|
||||
NonZero::new(LIMIT_PER_ROOT).expect("LIMIT_PER_ROOT should be a valid non-zero usize");
|
||||
@@ -45,6 +49,7 @@ pub(crate) async fn run_fuzzy_file_search(
|
||||
threads,
|
||||
cancel_flag,
|
||||
COMPUTE_INDICES,
|
||||
true,
|
||||
) {
|
||||
Ok(res) => Ok((root, res)),
|
||||
Err(err) => Err((root, err)),
|
||||
@@ -56,9 +61,12 @@ pub(crate) async fn run_fuzzy_file_search(
|
||||
match res {
|
||||
Ok(Ok((root, res))) => {
|
||||
for m in res.matches {
|
||||
let path = m.path;
|
||||
let file_name = file_search::file_name_from_path(&path);
|
||||
let result = FuzzyFileSearchResult {
|
||||
root: root.clone(),
|
||||
path: m.path,
|
||||
path,
|
||||
file_name,
|
||||
score: m.score,
|
||||
indices: m.indices,
|
||||
};
|
||||
|
||||
@@ -1,32 +1,42 @@
|
||||
#![deny(clippy::print_stdout, clippy::print_stderr)]
|
||||
|
||||
use codex_common::CliConfigOverrides;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigBuilder;
|
||||
use codex_core::config_loader::LoaderOverrides;
|
||||
use std::io::ErrorKind;
|
||||
use std::io::Result as IoResult;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use codex_common::CliConfigOverrides;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
|
||||
use mcp_types::JSONRPCMessage;
|
||||
use crate::message_processor::MessageProcessor;
|
||||
use crate::outgoing_message::OutgoingMessage;
|
||||
use crate::outgoing_message::OutgoingMessageSender;
|
||||
use codex_app_server_protocol::ConfigWarningNotification;
|
||||
use codex_app_server_protocol::JSONRPCMessage;
|
||||
use codex_core::check_execpolicy_for_warnings;
|
||||
use codex_feedback::CodexFeedback;
|
||||
use tokio::io::AsyncBufReadExt;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
use tokio::io::BufReader;
|
||||
use tokio::io::{self};
|
||||
use tokio::sync::mpsc;
|
||||
use toml::Value as TomlValue;
|
||||
use tracing::debug;
|
||||
use tracing::error;
|
||||
use tracing::info;
|
||||
use tracing::warn;
|
||||
use tracing_subscriber::EnvFilter;
|
||||
use tracing_subscriber::Layer;
|
||||
use tracing_subscriber::layer::SubscriberExt;
|
||||
use tracing_subscriber::util::SubscriberInitExt;
|
||||
|
||||
use crate::message_processor::MessageProcessor;
|
||||
use crate::outgoing_message::OutgoingMessage;
|
||||
use crate::outgoing_message::OutgoingMessageSender;
|
||||
|
||||
mod bespoke_event_handling;
|
||||
mod codex_message_processor;
|
||||
mod config_api;
|
||||
mod error_code;
|
||||
mod fuzzy_file_search;
|
||||
mod message_processor;
|
||||
mod models;
|
||||
mod outgoing_message;
|
||||
|
||||
/// Size of the bounded channels used to communicate between tasks. The value
|
||||
@@ -37,17 +47,12 @@ const CHANNEL_CAPACITY: usize = 128;
|
||||
pub async fn run_main(
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
cli_config_overrides: CliConfigOverrides,
|
||||
loader_overrides: LoaderOverrides,
|
||||
default_analytics_enabled: bool,
|
||||
) -> IoResult<()> {
|
||||
// Install a simple subscriber so `tracing` output is visible. Users can
|
||||
// control the log level with `RUST_LOG`.
|
||||
tracing_subscriber::fmt()
|
||||
.with_writer(std::io::stderr)
|
||||
.with_env_filter(EnvFilter::from_default_env())
|
||||
.init();
|
||||
|
||||
// Set up channels.
|
||||
let (incoming_tx, mut incoming_rx) = mpsc::channel::<JSONRPCMessage>(CHANNEL_CAPACITY);
|
||||
let (outgoing_tx, mut outgoing_rx) = mpsc::unbounded_channel::<OutgoingMessage>();
|
||||
let (outgoing_tx, mut outgoing_rx) = mpsc::channel::<OutgoingMessage>(CHANNEL_CAPACITY);
|
||||
|
||||
// Task: read from stdin, push to `incoming_tx`.
|
||||
let stdin_reader_handle = tokio::spawn({
|
||||
@@ -80,26 +85,130 @@ pub async fn run_main(
|
||||
format!("error parsing -c overrides: {e}"),
|
||||
)
|
||||
})?;
|
||||
let config = Config::load_with_cli_overrides(cli_kv_overrides, ConfigOverrides::default())
|
||||
.map_err(|e| {
|
||||
std::io::Error::new(ErrorKind::InvalidData, format!("error loading config: {e}"))
|
||||
})?;
|
||||
let loader_overrides_for_config_api = loader_overrides.clone();
|
||||
let mut config_warnings = Vec::new();
|
||||
let config = match ConfigBuilder::default()
|
||||
.cli_overrides(cli_kv_overrides.clone())
|
||||
.loader_overrides(loader_overrides)
|
||||
.build()
|
||||
.await
|
||||
{
|
||||
Ok(config) => config,
|
||||
Err(err) => {
|
||||
let message = ConfigWarningNotification {
|
||||
summary: "Invalid configuration; using defaults.".to_string(),
|
||||
details: Some(err.to_string()),
|
||||
};
|
||||
config_warnings.push(message);
|
||||
Config::load_default_with_cli_overrides(cli_kv_overrides.clone()).map_err(|e| {
|
||||
std::io::Error::new(
|
||||
ErrorKind::InvalidData,
|
||||
format!("error loading default config after config error: {e}"),
|
||||
)
|
||||
})?
|
||||
}
|
||||
};
|
||||
|
||||
if let Ok(Some(err)) =
|
||||
check_execpolicy_for_warnings(&config.features, &config.config_layer_stack).await
|
||||
{
|
||||
let message = ConfigWarningNotification {
|
||||
summary: "Error parsing rules; custom rules not applied.".to_string(),
|
||||
details: Some(err.to_string()),
|
||||
};
|
||||
config_warnings.push(message);
|
||||
}
|
||||
|
||||
let feedback = CodexFeedback::new();
|
||||
|
||||
let otel = codex_core::otel_init::build_provider(
|
||||
&config,
|
||||
env!("CARGO_PKG_VERSION"),
|
||||
Some("codex_app_server"),
|
||||
default_analytics_enabled,
|
||||
)
|
||||
.map_err(|e| {
|
||||
std::io::Error::new(
|
||||
ErrorKind::InvalidData,
|
||||
format!("error loading otel config: {e}"),
|
||||
)
|
||||
})?;
|
||||
|
||||
// Install a simple subscriber so `tracing` output is visible. Users can
|
||||
// control the log level with `RUST_LOG`.
|
||||
let stderr_fmt = tracing_subscriber::fmt::layer()
|
||||
.with_writer(std::io::stderr)
|
||||
.with_span_events(tracing_subscriber::fmt::format::FmtSpan::FULL)
|
||||
.with_filter(EnvFilter::from_default_env());
|
||||
|
||||
let feedback_layer = feedback.logger_layer();
|
||||
let feedback_metadata_layer = feedback.metadata_layer();
|
||||
|
||||
let otel_logger_layer = otel.as_ref().and_then(|o| o.logger_layer());
|
||||
|
||||
let otel_tracing_layer = otel.as_ref().and_then(|o| o.tracing_layer());
|
||||
|
||||
let _ = tracing_subscriber::registry()
|
||||
.with(stderr_fmt)
|
||||
.with(feedback_layer)
|
||||
.with(feedback_metadata_layer)
|
||||
.with(otel_logger_layer)
|
||||
.with(otel_tracing_layer)
|
||||
.try_init();
|
||||
for warning in &config_warnings {
|
||||
match &warning.details {
|
||||
Some(details) => error!("{} {}", warning.summary, details),
|
||||
None => error!("{}", warning.summary),
|
||||
}
|
||||
}
|
||||
|
||||
// Task: process incoming messages.
|
||||
let processor_handle = tokio::spawn({
|
||||
let outgoing_message_sender = OutgoingMessageSender::new(outgoing_tx);
|
||||
let cli_overrides: Vec<(String, TomlValue)> = cli_kv_overrides.clone();
|
||||
let loader_overrides = loader_overrides_for_config_api;
|
||||
let mut processor = MessageProcessor::new(
|
||||
outgoing_message_sender,
|
||||
codex_linux_sandbox_exe,
|
||||
std::sync::Arc::new(config),
|
||||
cli_overrides,
|
||||
loader_overrides,
|
||||
feedback.clone(),
|
||||
config_warnings,
|
||||
);
|
||||
let mut thread_created_rx = processor.thread_created_receiver();
|
||||
async move {
|
||||
while let Some(msg) = incoming_rx.recv().await {
|
||||
match msg {
|
||||
JSONRPCMessage::Request(r) => processor.process_request(r).await,
|
||||
JSONRPCMessage::Response(r) => processor.process_response(r).await,
|
||||
JSONRPCMessage::Notification(n) => processor.process_notification(n).await,
|
||||
JSONRPCMessage::Error(e) => processor.process_error(e),
|
||||
let mut listen_for_threads = true;
|
||||
loop {
|
||||
tokio::select! {
|
||||
msg = incoming_rx.recv() => {
|
||||
let Some(msg) = msg else {
|
||||
break;
|
||||
};
|
||||
match msg {
|
||||
JSONRPCMessage::Request(r) => processor.process_request(r).await,
|
||||
JSONRPCMessage::Response(r) => processor.process_response(r).await,
|
||||
JSONRPCMessage::Notification(n) => processor.process_notification(n).await,
|
||||
JSONRPCMessage::Error(e) => processor.process_error(e),
|
||||
}
|
||||
}
|
||||
created = thread_created_rx.recv(), if listen_for_threads => {
|
||||
match created {
|
||||
Ok(thread_id) => {
|
||||
processor.try_attach_thread_listener(thread_id).await;
|
||||
}
|
||||
Err(tokio::sync::broadcast::error::RecvError::Lagged(_)) => {
|
||||
// TODO(jif) handle lag.
|
||||
// Assumes thread creation volume is low enough that lag never happens.
|
||||
// If it does, we log and continue without resyncing to avoid attaching
|
||||
// listeners for threads that should remain unsubscribed.
|
||||
warn!("thread_created receiver lagged; skipping resync");
|
||||
}
|
||||
Err(tokio::sync::broadcast::error::RecvError::Closed) => {
|
||||
listen_for_threads = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -111,17 +220,17 @@ pub async fn run_main(
|
||||
let stdout_writer_handle = tokio::spawn(async move {
|
||||
let mut stdout = io::stdout();
|
||||
while let Some(outgoing_message) = outgoing_rx.recv().await {
|
||||
let msg: JSONRPCMessage = outgoing_message.into();
|
||||
match serde_json::to_string(&msg) {
|
||||
Ok(json) => {
|
||||
let Ok(value) = serde_json::to_value(outgoing_message) else {
|
||||
error!("Failed to convert OutgoingMessage to JSON value");
|
||||
continue;
|
||||
};
|
||||
match serde_json::to_string(&value) {
|
||||
Ok(mut json) => {
|
||||
json.push('\n');
|
||||
if let Err(e) = stdout.write_all(json.as_bytes()).await {
|
||||
error!("Failed to write to stdout: {e}");
|
||||
break;
|
||||
}
|
||||
if let Err(e) = stdout.write_all(b"\n").await {
|
||||
error!("Failed to write newline to stdout: {e}");
|
||||
break;
|
||||
}
|
||||
}
|
||||
Err(e) => error!("Failed to serialize JSONRPCMessage: {e}"),
|
||||
}
|
||||
|
||||
@@ -1,10 +1,43 @@
|
||||
use codex_app_server::run_main;
|
||||
use codex_arg0::arg0_dispatch_or_else;
|
||||
use codex_common::CliConfigOverrides;
|
||||
use codex_core::config_loader::LoaderOverrides;
|
||||
use std::path::PathBuf;
|
||||
|
||||
// Debug-only test hook: lets integration tests point the server at a temporary
|
||||
// managed config file without writing to /etc.
|
||||
const MANAGED_CONFIG_PATH_ENV_VAR: &str = "CODEX_APP_SERVER_MANAGED_CONFIG_PATH";
|
||||
|
||||
fn main() -> anyhow::Result<()> {
|
||||
arg0_dispatch_or_else(|codex_linux_sandbox_exe| async move {
|
||||
run_main(codex_linux_sandbox_exe, CliConfigOverrides::default()).await?;
|
||||
let managed_config_path = managed_config_path_from_debug_env();
|
||||
let loader_overrides = LoaderOverrides {
|
||||
managed_config_path,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
run_main(
|
||||
codex_linux_sandbox_exe,
|
||||
CliConfigOverrides::default(),
|
||||
loader_overrides,
|
||||
false,
|
||||
)
|
||||
.await?;
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
fn managed_config_path_from_debug_env() -> Option<PathBuf> {
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
if let Ok(value) = std::env::var(MANAGED_CONFIG_PATH_ENV_VAR) {
|
||||
return if value.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(PathBuf::from(value))
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
@@ -1,28 +1,44 @@
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::codex_message_processor::CodexMessageProcessor;
|
||||
use crate::config_api::ConfigApi;
|
||||
use crate::error_code::INVALID_REQUEST_ERROR_CODE;
|
||||
use crate::outgoing_message::OutgoingMessageSender;
|
||||
use codex_protocol::mcp_protocol::ClientInfo;
|
||||
use codex_protocol::mcp_protocol::ClientRequest;
|
||||
use codex_protocol::mcp_protocol::InitializeResponse;
|
||||
|
||||
use codex_app_server_protocol::ClientInfo;
|
||||
use codex_app_server_protocol::ClientRequest;
|
||||
use codex_app_server_protocol::ConfigBatchWriteParams;
|
||||
use codex_app_server_protocol::ConfigReadParams;
|
||||
use codex_app_server_protocol::ConfigValueWriteParams;
|
||||
use codex_app_server_protocol::ConfigWarningNotification;
|
||||
use codex_app_server_protocol::InitializeResponse;
|
||||
use codex_app_server_protocol::JSONRPCError;
|
||||
use codex_app_server_protocol::JSONRPCErrorError;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCRequest;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ServerNotification;
|
||||
use codex_core::AuthManager;
|
||||
use codex_core::ConversationManager;
|
||||
use codex_core::ThreadManager;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config_loader::LoaderOverrides;
|
||||
use codex_core::default_client::SetOriginatorError;
|
||||
use codex_core::default_client::USER_AGENT_SUFFIX;
|
||||
use codex_core::default_client::get_codex_user_agent;
|
||||
use mcp_types::JSONRPCError;
|
||||
use mcp_types::JSONRPCErrorError;
|
||||
use mcp_types::JSONRPCNotification;
|
||||
use mcp_types::JSONRPCRequest;
|
||||
use mcp_types::JSONRPCResponse;
|
||||
use std::sync::Arc;
|
||||
use codex_core::default_client::set_default_originator;
|
||||
use codex_feedback::CodexFeedback;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use tokio::sync::broadcast;
|
||||
use toml::Value as TomlValue;
|
||||
|
||||
pub(crate) struct MessageProcessor {
|
||||
outgoing: Arc<OutgoingMessageSender>,
|
||||
codex_message_processor: CodexMessageProcessor,
|
||||
config_api: ConfigApi,
|
||||
initialized: bool,
|
||||
config_warnings: Vec<ConfigWarningNotification>,
|
||||
}
|
||||
|
||||
impl MessageProcessor {
|
||||
@@ -32,84 +48,164 @@ impl MessageProcessor {
|
||||
outgoing: OutgoingMessageSender,
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
config: Arc<Config>,
|
||||
cli_overrides: Vec<(String, TomlValue)>,
|
||||
loader_overrides: LoaderOverrides,
|
||||
feedback: CodexFeedback,
|
||||
config_warnings: Vec<ConfigWarningNotification>,
|
||||
) -> Self {
|
||||
let outgoing = Arc::new(outgoing);
|
||||
let auth_manager = AuthManager::shared(config.codex_home.clone());
|
||||
let conversation_manager = Arc::new(ConversationManager::new(auth_manager.clone()));
|
||||
let auth_manager = AuthManager::shared(
|
||||
config.codex_home.clone(),
|
||||
false,
|
||||
config.cli_auth_credentials_store_mode,
|
||||
);
|
||||
let thread_manager = Arc::new(ThreadManager::new(
|
||||
config.codex_home.clone(),
|
||||
auth_manager.clone(),
|
||||
SessionSource::VSCode,
|
||||
));
|
||||
let codex_message_processor = CodexMessageProcessor::new(
|
||||
auth_manager,
|
||||
conversation_manager,
|
||||
thread_manager,
|
||||
outgoing.clone(),
|
||||
codex_linux_sandbox_exe,
|
||||
config,
|
||||
Arc::clone(&config),
|
||||
cli_overrides.clone(),
|
||||
feedback,
|
||||
);
|
||||
let config_api = ConfigApi::new(config.codex_home.clone(), cli_overrides, loader_overrides);
|
||||
|
||||
Self {
|
||||
outgoing,
|
||||
codex_message_processor,
|
||||
config_api,
|
||||
initialized: false,
|
||||
config_warnings,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn process_request(&mut self, request: JSONRPCRequest) {
|
||||
let request_id = request.id.clone();
|
||||
if let Ok(request_json) = serde_json::to_value(request)
|
||||
&& let Ok(codex_request) = serde_json::from_value::<ClientRequest>(request_json)
|
||||
{
|
||||
match codex_request {
|
||||
// Handle Initialize internally so CodexMessageProcessor does not have to concern
|
||||
// itself with the `initialized` bool.
|
||||
ClientRequest::Initialize { request_id, params } => {
|
||||
if self.initialized {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "Already initialized".to_string(),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
} else {
|
||||
let ClientInfo {
|
||||
name,
|
||||
title: _title,
|
||||
version,
|
||||
} = params.client_info;
|
||||
let user_agent_suffix = format!("{name}; {version}");
|
||||
if let Ok(mut suffix) = USER_AGENT_SUFFIX.lock() {
|
||||
*suffix = Some(user_agent_suffix);
|
||||
let request_json = match serde_json::to_value(&request) {
|
||||
Ok(request_json) => request_json,
|
||||
Err(err) => {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: format!("Invalid request: {err}"),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let codex_request = match serde_json::from_value::<ClientRequest>(request_json) {
|
||||
Ok(codex_request) => codex_request,
|
||||
Err(err) => {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: format!("Invalid request: {err}"),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
match codex_request {
|
||||
// Handle Initialize internally so CodexMessageProcessor does not have to concern
|
||||
// itself with the `initialized` bool.
|
||||
ClientRequest::Initialize { request_id, params } => {
|
||||
if self.initialized {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "Already initialized".to_string(),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
} else {
|
||||
let ClientInfo {
|
||||
name,
|
||||
title: _title,
|
||||
version,
|
||||
} = params.client_info;
|
||||
if let Err(error) = set_default_originator(name.clone()) {
|
||||
match error {
|
||||
SetOriginatorError::InvalidHeaderValue => {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: format!(
|
||||
"Invalid clientInfo.name: '{name}'. Must be a valid HTTP header value."
|
||||
),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
SetOriginatorError::AlreadyInitialized => {
|
||||
// No-op. This is expected to happen if the originator is already set via env var.
|
||||
// TODO(owen): Once we remove support for CODEX_INTERNAL_ORIGINATOR_OVERRIDE,
|
||||
// this will be an unexpected state and we can return a JSON-RPC error indicating
|
||||
// internal server error.
|
||||
}
|
||||
}
|
||||
|
||||
let user_agent = get_codex_user_agent();
|
||||
let response = InitializeResponse { user_agent };
|
||||
self.outgoing.send_response(request_id, response).await;
|
||||
|
||||
self.initialized = true;
|
||||
return;
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
if !self.initialized {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "Not initialized".to_string(),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
let user_agent_suffix = format!("{name}; {version}");
|
||||
if let Ok(mut suffix) = USER_AGENT_SUFFIX.lock() {
|
||||
*suffix = Some(user_agent_suffix);
|
||||
}
|
||||
|
||||
let user_agent = get_codex_user_agent();
|
||||
let response = InitializeResponse { user_agent };
|
||||
self.outgoing.send_response(request_id, response).await;
|
||||
|
||||
self.initialized = true;
|
||||
if !self.config_warnings.is_empty() {
|
||||
for notification in self.config_warnings.drain(..) {
|
||||
self.outgoing
|
||||
.send_server_notification(ServerNotification::ConfigWarning(
|
||||
notification,
|
||||
))
|
||||
.await;
|
||||
}
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
if !self.initialized {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "Not initialized".to_string(),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.codex_message_processor
|
||||
.process_request(codex_request)
|
||||
.await;
|
||||
} else {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "Invalid request".to_string(),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
match codex_request {
|
||||
ClientRequest::ConfigRead { request_id, params } => {
|
||||
self.handle_config_read(request_id, params).await;
|
||||
}
|
||||
ClientRequest::ConfigValueWrite { request_id, params } => {
|
||||
self.handle_config_value_write(request_id, params).await;
|
||||
}
|
||||
ClientRequest::ConfigBatchWrite { request_id, params } => {
|
||||
self.handle_config_batch_write(request_id, params).await;
|
||||
}
|
||||
ClientRequest::ConfigRequirementsRead {
|
||||
request_id,
|
||||
params: _,
|
||||
} => {
|
||||
self.handle_config_requirements_read(request_id).await;
|
||||
}
|
||||
other => {
|
||||
self.codex_message_processor.process_request(other).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -119,6 +215,19 @@ impl MessageProcessor {
|
||||
tracing::info!("<- notification: {:?}", notification);
|
||||
}
|
||||
|
||||
pub(crate) fn thread_created_receiver(&self) -> broadcast::Receiver<ThreadId> {
|
||||
self.codex_message_processor.thread_created_receiver()
|
||||
}
|
||||
|
||||
pub(crate) async fn try_attach_thread_listener(&mut self, thread_id: ThreadId) {
|
||||
if !self.initialized {
|
||||
return;
|
||||
}
|
||||
self.codex_message_processor
|
||||
.try_attach_thread_listener(thread_id)
|
||||
.await;
|
||||
}
|
||||
|
||||
/// Handle a standalone JSON-RPC response originating from the peer.
|
||||
pub(crate) async fn process_response(&mut self, response: JSONRPCResponse) {
|
||||
tracing::info!("<- response: {:?}", response);
|
||||
@@ -130,4 +239,40 @@ impl MessageProcessor {
|
||||
pub(crate) fn process_error(&mut self, err: JSONRPCError) {
|
||||
tracing::error!("<- error: {:?}", err);
|
||||
}
|
||||
|
||||
async fn handle_config_read(&self, request_id: RequestId, params: ConfigReadParams) {
|
||||
match self.config_api.read(params).await {
|
||||
Ok(response) => self.outgoing.send_response(request_id, response).await,
|
||||
Err(error) => self.outgoing.send_error(request_id, error).await,
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_config_value_write(
|
||||
&self,
|
||||
request_id: RequestId,
|
||||
params: ConfigValueWriteParams,
|
||||
) {
|
||||
match self.config_api.write_value(params).await {
|
||||
Ok(response) => self.outgoing.send_response(request_id, response).await,
|
||||
Err(error) => self.outgoing.send_error(request_id, error).await,
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_config_batch_write(
|
||||
&self,
|
||||
request_id: RequestId,
|
||||
params: ConfigBatchWriteParams,
|
||||
) {
|
||||
match self.config_api.batch_write(params).await {
|
||||
Ok(response) => self.outgoing.send_response(request_id, response).await,
|
||||
Err(error) => self.outgoing.send_error(request_id, error).await,
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_config_requirements_read(&self, request_id: RequestId) {
|
||||
match self.config_api.config_requirements_read().await {
|
||||
Ok(response) => self.outgoing.send_response(request_id, response).await,
|
||||
Err(error) => self.outgoing.send_error(request_id, error).await,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
45
codex-rs/app-server/src/models.rs
Normal file
45
codex-rs/app-server/src/models.rs
Normal file
@@ -0,0 +1,45 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use codex_app_server_protocol::Model;
|
||||
use codex_app_server_protocol::ReasoningEffortOption;
|
||||
use codex_core::ThreadManager;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::models_manager::manager::RefreshStrategy;
|
||||
use codex_protocol::openai_models::ModelPreset;
|
||||
use codex_protocol::openai_models::ReasoningEffortPreset;
|
||||
|
||||
pub async fn supported_models(thread_manager: Arc<ThreadManager>, config: &Config) -> Vec<Model> {
|
||||
thread_manager
|
||||
.list_models(config, RefreshStrategy::OnlineIfUncached)
|
||||
.await
|
||||
.into_iter()
|
||||
.filter(|preset| preset.show_in_picker)
|
||||
.map(model_from_preset)
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn model_from_preset(preset: ModelPreset) -> Model {
|
||||
Model {
|
||||
id: preset.id.to_string(),
|
||||
model: preset.model.to_string(),
|
||||
display_name: preset.display_name.to_string(),
|
||||
description: preset.description.to_string(),
|
||||
supported_reasoning_efforts: reasoning_efforts_from_preset(
|
||||
preset.supported_reasoning_efforts,
|
||||
),
|
||||
default_reasoning_effort: preset.default_reasoning_effort,
|
||||
is_default: preset.is_default,
|
||||
}
|
||||
}
|
||||
|
||||
fn reasoning_efforts_from_preset(
|
||||
efforts: Vec<ReasoningEffortPreset>,
|
||||
) -> Vec<ReasoningEffortOption> {
|
||||
efforts
|
||||
.iter()
|
||||
.map(|preset| ReasoningEffortOption {
|
||||
reasoning_effort: preset.effort,
|
||||
description: preset.description.to_string(),
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
@@ -2,16 +2,12 @@ use std::collections::HashMap;
|
||||
use std::sync::atomic::AtomicI64;
|
||||
use std::sync::atomic::Ordering;
|
||||
|
||||
use codex_protocol::mcp_protocol::ServerNotification;
|
||||
use mcp_types::JSONRPC_VERSION;
|
||||
use mcp_types::JSONRPCError;
|
||||
use mcp_types::JSONRPCErrorError;
|
||||
use mcp_types::JSONRPCMessage;
|
||||
use mcp_types::JSONRPCNotification;
|
||||
use mcp_types::JSONRPCRequest;
|
||||
use mcp_types::JSONRPCResponse;
|
||||
use mcp_types::RequestId;
|
||||
use mcp_types::Result;
|
||||
use codex_app_server_protocol::JSONRPCErrorError;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::Result;
|
||||
use codex_app_server_protocol::ServerNotification;
|
||||
use codex_app_server_protocol::ServerRequest;
|
||||
use codex_app_server_protocol::ServerRequestPayload;
|
||||
use serde::Serialize;
|
||||
use tokio::sync::Mutex;
|
||||
use tokio::sync::mpsc;
|
||||
@@ -20,15 +16,18 @@ use tracing::warn;
|
||||
|
||||
use crate::error_code::INTERNAL_ERROR_CODE;
|
||||
|
||||
#[cfg(test)]
|
||||
use codex_protocol::account::PlanType;
|
||||
|
||||
/// Sends messages to the client and manages request callbacks.
|
||||
pub(crate) struct OutgoingMessageSender {
|
||||
next_request_id: AtomicI64,
|
||||
sender: mpsc::UnboundedSender<OutgoingMessage>,
|
||||
sender: mpsc::Sender<OutgoingMessage>,
|
||||
request_id_to_callback: Mutex<HashMap<RequestId, oneshot::Sender<Result>>>,
|
||||
}
|
||||
|
||||
impl OutgoingMessageSender {
|
||||
pub(crate) fn new(sender: mpsc::UnboundedSender<OutgoingMessage>) -> Self {
|
||||
pub(crate) fn new(sender: mpsc::Sender<OutgoingMessage>) -> Self {
|
||||
Self {
|
||||
next_request_id: AtomicI64::new(0),
|
||||
sender,
|
||||
@@ -38,8 +37,7 @@ impl OutgoingMessageSender {
|
||||
|
||||
pub(crate) async fn send_request(
|
||||
&self,
|
||||
method: &str,
|
||||
params: Option<serde_json::Value>,
|
||||
request: ServerRequestPayload,
|
||||
) -> oneshot::Receiver<Result> {
|
||||
let id = RequestId::Integer(self.next_request_id.fetch_add(1, Ordering::Relaxed));
|
||||
let outgoing_message_id = id.clone();
|
||||
@@ -49,12 +47,13 @@ impl OutgoingMessageSender {
|
||||
request_id_to_callback.insert(id, tx_approve);
|
||||
}
|
||||
|
||||
let outgoing_message = OutgoingMessage::Request(OutgoingRequest {
|
||||
id: outgoing_message_id,
|
||||
method: method.to_string(),
|
||||
params,
|
||||
});
|
||||
let _ = self.sender.send(outgoing_message);
|
||||
let outgoing_message =
|
||||
OutgoingMessage::Request(request.request_with_id(outgoing_message_id.clone()));
|
||||
if let Err(err) = self.sender.send(outgoing_message).await {
|
||||
warn!("failed to send request {outgoing_message_id:?} to client: {err:?}");
|
||||
let mut request_id_to_callback = self.request_id_to_callback.lock().await;
|
||||
request_id_to_callback.remove(&outgoing_message_id);
|
||||
}
|
||||
rx_approve
|
||||
}
|
||||
|
||||
@@ -80,7 +79,9 @@ impl OutgoingMessageSender {
|
||||
match serde_json::to_value(response) {
|
||||
Ok(result) => {
|
||||
let outgoing_message = OutgoingMessage::Response(OutgoingResponse { id, result });
|
||||
let _ = self.sender.send(outgoing_message);
|
||||
if let Err(err) = self.sender.send(outgoing_message).await {
|
||||
warn!("failed to send response to client: {err:?}");
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
self.send_error(
|
||||
@@ -97,27 +98,37 @@ impl OutgoingMessageSender {
|
||||
}
|
||||
|
||||
pub(crate) async fn send_server_notification(&self, notification: ServerNotification) {
|
||||
let _ = self
|
||||
if let Err(err) = self
|
||||
.sender
|
||||
.send(OutgoingMessage::AppServerNotification(notification));
|
||||
.send(OutgoingMessage::AppServerNotification(notification))
|
||||
.await
|
||||
{
|
||||
warn!("failed to send server notification to client: {err:?}");
|
||||
}
|
||||
}
|
||||
|
||||
/// All notifications should be migrated to [`ServerNotification`] and
|
||||
/// [`OutgoingMessage::Notification`] should be removed.
|
||||
pub(crate) async fn send_notification(&self, notification: OutgoingNotification) {
|
||||
let outgoing_message = OutgoingMessage::Notification(notification);
|
||||
let _ = self.sender.send(outgoing_message);
|
||||
if let Err(err) = self.sender.send(outgoing_message).await {
|
||||
warn!("failed to send notification to client: {err:?}");
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn send_error(&self, id: RequestId, error: JSONRPCErrorError) {
|
||||
let outgoing_message = OutgoingMessage::Error(OutgoingError { id, error });
|
||||
let _ = self.sender.send(outgoing_message);
|
||||
if let Err(err) = self.sender.send(outgoing_message).await {
|
||||
warn!("failed to send error to client: {err:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Outgoing message from the server to the client.
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
#[serde(untagged)]
|
||||
pub(crate) enum OutgoingMessage {
|
||||
Request(OutgoingRequest),
|
||||
Request(ServerRequest),
|
||||
Notification(OutgoingNotification),
|
||||
/// AppServerNotification is specific to the case where this is run as an
|
||||
/// "app server" as opposed to an MCP server.
|
||||
@@ -126,64 +137,6 @@ pub(crate) enum OutgoingMessage {
|
||||
Error(OutgoingError),
|
||||
}
|
||||
|
||||
impl From<OutgoingMessage> for JSONRPCMessage {
|
||||
fn from(val: OutgoingMessage) -> Self {
|
||||
use OutgoingMessage::*;
|
||||
match val {
|
||||
Request(OutgoingRequest { id, method, params }) => {
|
||||
JSONRPCMessage::Request(JSONRPCRequest {
|
||||
jsonrpc: JSONRPC_VERSION.into(),
|
||||
id,
|
||||
method,
|
||||
params,
|
||||
})
|
||||
}
|
||||
Notification(OutgoingNotification { method, params }) => {
|
||||
JSONRPCMessage::Notification(JSONRPCNotification {
|
||||
jsonrpc: JSONRPC_VERSION.into(),
|
||||
method,
|
||||
params,
|
||||
})
|
||||
}
|
||||
AppServerNotification(notification) => {
|
||||
let method = notification.to_string();
|
||||
let params = match notification.to_params() {
|
||||
Ok(params) => Some(params),
|
||||
Err(err) => {
|
||||
warn!("failed to serialize notification params: {err}");
|
||||
None
|
||||
}
|
||||
};
|
||||
JSONRPCMessage::Notification(JSONRPCNotification {
|
||||
jsonrpc: JSONRPC_VERSION.into(),
|
||||
method,
|
||||
params,
|
||||
})
|
||||
}
|
||||
Response(OutgoingResponse { id, result }) => {
|
||||
JSONRPCMessage::Response(JSONRPCResponse {
|
||||
jsonrpc: JSONRPC_VERSION.into(),
|
||||
id,
|
||||
result,
|
||||
})
|
||||
}
|
||||
Error(OutgoingError { id, error }) => JSONRPCMessage::Error(JSONRPCError {
|
||||
jsonrpc: JSONRPC_VERSION.into(),
|
||||
id,
|
||||
error,
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize)]
|
||||
pub(crate) struct OutgoingRequest {
|
||||
pub id: RequestId,
|
||||
pub method: String,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub params: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize)]
|
||||
pub(crate) struct OutgoingNotification {
|
||||
pub method: String,
|
||||
@@ -205,7 +158,14 @@ pub(crate) struct OutgoingError {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use codex_protocol::mcp_protocol::LoginChatGptCompleteNotification;
|
||||
use codex_app_server_protocol::AccountLoginCompletedNotification;
|
||||
use codex_app_server_protocol::AccountRateLimitsUpdatedNotification;
|
||||
use codex_app_server_protocol::AccountUpdatedNotification;
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_app_server_protocol::ConfigWarningNotification;
|
||||
use codex_app_server_protocol::LoginChatGptCompleteNotification;
|
||||
use codex_app_server_protocol::RateLimitSnapshot;
|
||||
use codex_app_server_protocol::RateLimitWindow;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use uuid::Uuid;
|
||||
@@ -221,19 +181,125 @@ mod tests {
|
||||
error: None,
|
||||
});
|
||||
|
||||
let jsonrpc_notification: JSONRPCMessage =
|
||||
OutgoingMessage::AppServerNotification(notification).into();
|
||||
let jsonrpc_notification = OutgoingMessage::AppServerNotification(notification);
|
||||
assert_eq!(
|
||||
JSONRPCMessage::Notification(JSONRPCNotification {
|
||||
jsonrpc: "2.0".into(),
|
||||
method: "loginChatGptComplete".into(),
|
||||
params: Some(json!({
|
||||
json!({
|
||||
"method": "loginChatGptComplete",
|
||||
"params": {
|
||||
"loginId": Uuid::nil(),
|
||||
"success": true,
|
||||
})),
|
||||
"error": null,
|
||||
},
|
||||
}),
|
||||
jsonrpc_notification,
|
||||
serde_json::to_value(jsonrpc_notification)
|
||||
.expect("ensure the strum macros serialize the method field correctly"),
|
||||
"ensure the strum macros serialize the method field correctly"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn verify_account_login_completed_notification_serialization() {
|
||||
let notification =
|
||||
ServerNotification::AccountLoginCompleted(AccountLoginCompletedNotification {
|
||||
login_id: Some(Uuid::nil().to_string()),
|
||||
success: true,
|
||||
error: None,
|
||||
});
|
||||
|
||||
let jsonrpc_notification = OutgoingMessage::AppServerNotification(notification);
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/login/completed",
|
||||
"params": {
|
||||
"loginId": Uuid::nil().to_string(),
|
||||
"success": true,
|
||||
"error": null,
|
||||
},
|
||||
}),
|
||||
serde_json::to_value(jsonrpc_notification)
|
||||
.expect("ensure the notification serializes correctly"),
|
||||
"ensure the notification serializes correctly"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn verify_account_rate_limits_notification_serialization() {
|
||||
let notification =
|
||||
ServerNotification::AccountRateLimitsUpdated(AccountRateLimitsUpdatedNotification {
|
||||
rate_limits: RateLimitSnapshot {
|
||||
primary: Some(RateLimitWindow {
|
||||
used_percent: 25,
|
||||
window_duration_mins: Some(15),
|
||||
resets_at: Some(123),
|
||||
}),
|
||||
secondary: None,
|
||||
credits: None,
|
||||
plan_type: Some(PlanType::Plus),
|
||||
},
|
||||
});
|
||||
|
||||
let jsonrpc_notification = OutgoingMessage::AppServerNotification(notification);
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/rateLimits/updated",
|
||||
"params": {
|
||||
"rateLimits": {
|
||||
"primary": {
|
||||
"usedPercent": 25,
|
||||
"windowDurationMins": 15,
|
||||
"resetsAt": 123
|
||||
},
|
||||
"secondary": null,
|
||||
"credits": null,
|
||||
"planType": "plus"
|
||||
}
|
||||
},
|
||||
}),
|
||||
serde_json::to_value(jsonrpc_notification)
|
||||
.expect("ensure the notification serializes correctly"),
|
||||
"ensure the notification serializes correctly"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn verify_account_updated_notification_serialization() {
|
||||
let notification = ServerNotification::AccountUpdated(AccountUpdatedNotification {
|
||||
auth_mode: Some(AuthMode::ApiKey),
|
||||
});
|
||||
|
||||
let jsonrpc_notification = OutgoingMessage::AppServerNotification(notification);
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/updated",
|
||||
"params": {
|
||||
"authMode": "apikey"
|
||||
},
|
||||
}),
|
||||
serde_json::to_value(jsonrpc_notification)
|
||||
.expect("ensure the notification serializes correctly"),
|
||||
"ensure the notification serializes correctly"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn verify_config_warning_notification_serialization() {
|
||||
let notification = ServerNotification::ConfigWarning(ConfigWarningNotification {
|
||||
summary: "Config error: using defaults".to_string(),
|
||||
details: Some("error loading config: bad config".to_string()),
|
||||
});
|
||||
|
||||
let jsonrpc_notification = OutgoingMessage::AppServerNotification(notification);
|
||||
assert_eq!(
|
||||
json!( {
|
||||
"method": "configWarning",
|
||||
"params": {
|
||||
"summary": "Config error: using defaults",
|
||||
"details": "error loading config: bad config",
|
||||
},
|
||||
}),
|
||||
serde_json::to_value(jsonrpc_notification)
|
||||
.expect("ensure the notification serializes correctly"),
|
||||
"ensure the notification serializes correctly"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
7
codex-rs/app-server/tests/common/BUILD.bazel
Normal file
7
codex-rs/app-server/tests/common/BUILD.bazel
Normal file
@@ -0,0 +1,7 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "common",
|
||||
crate_name = "app_test_support",
|
||||
crate_srcs = glob(["*.rs"]),
|
||||
)
|
||||
@@ -1,16 +1,20 @@
|
||||
[package]
|
||||
edition = "2024"
|
||||
name = "app_test_support"
|
||||
version = { workspace = true }
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[lib]
|
||||
path = "lib.rs"
|
||||
|
||||
[dependencies]
|
||||
anyhow = { workspace = true }
|
||||
assert_cmd = { workspace = true }
|
||||
base64 = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
codex-app-server-protocol = { workspace = true }
|
||||
codex-core = { workspace = true, features = ["test-support"] }
|
||||
codex-protocol = { workspace = true }
|
||||
mcp-types = { workspace = true }
|
||||
codex-utils-cargo-bin = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
tokio = { workspace = true, features = [
|
||||
@@ -19,4 +23,7 @@ tokio = { workspace = true, features = [
|
||||
"process",
|
||||
"rt-multi-thread",
|
||||
] }
|
||||
uuid = { workspace = true }
|
||||
wiremock = { workspace = true }
|
||||
core_test_support = { path = "../../../core/tests/common" }
|
||||
shlex = { workspace = true }
|
||||
|
||||
135
codex-rs/app-server/tests/common/auth_fixtures.rs
Normal file
135
codex-rs/app-server/tests/common/auth_fixtures.rs
Normal file
@@ -0,0 +1,135 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use base64::Engine;
|
||||
use base64::engine::general_purpose::URL_SAFE_NO_PAD;
|
||||
use chrono::DateTime;
|
||||
use chrono::Utc;
|
||||
use codex_core::auth::AuthCredentialsStoreMode;
|
||||
use codex_core::auth::AuthDotJson;
|
||||
use codex_core::auth::save_auth;
|
||||
use codex_core::token_data::TokenData;
|
||||
use codex_core::token_data::parse_id_token;
|
||||
use serde_json::json;
|
||||
|
||||
/// Builder for writing a fake ChatGPT auth.json in tests.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ChatGptAuthFixture {
|
||||
access_token: String,
|
||||
refresh_token: String,
|
||||
account_id: Option<String>,
|
||||
claims: ChatGptIdTokenClaims,
|
||||
last_refresh: Option<Option<DateTime<Utc>>>,
|
||||
}
|
||||
|
||||
impl ChatGptAuthFixture {
|
||||
pub fn new(access_token: impl Into<String>) -> Self {
|
||||
Self {
|
||||
access_token: access_token.into(),
|
||||
refresh_token: "refresh-token".to_string(),
|
||||
account_id: None,
|
||||
claims: ChatGptIdTokenClaims::default(),
|
||||
last_refresh: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn refresh_token(mut self, refresh_token: impl Into<String>) -> Self {
|
||||
self.refresh_token = refresh_token.into();
|
||||
self
|
||||
}
|
||||
|
||||
pub fn account_id(mut self, account_id: impl Into<String>) -> Self {
|
||||
self.account_id = Some(account_id.into());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn plan_type(mut self, plan_type: impl Into<String>) -> Self {
|
||||
self.claims.plan_type = Some(plan_type.into());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn email(mut self, email: impl Into<String>) -> Self {
|
||||
self.claims.email = Some(email.into());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn last_refresh(mut self, last_refresh: Option<DateTime<Utc>>) -> Self {
|
||||
self.last_refresh = Some(last_refresh);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn claims(mut self, claims: ChatGptIdTokenClaims) -> Self {
|
||||
self.claims = claims;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct ChatGptIdTokenClaims {
|
||||
pub email: Option<String>,
|
||||
pub plan_type: Option<String>,
|
||||
}
|
||||
|
||||
impl ChatGptIdTokenClaims {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
pub fn email(mut self, email: impl Into<String>) -> Self {
|
||||
self.email = Some(email.into());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn plan_type(mut self, plan_type: impl Into<String>) -> Self {
|
||||
self.plan_type = Some(plan_type.into());
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
pub fn encode_id_token(claims: &ChatGptIdTokenClaims) -> Result<String> {
|
||||
let header = json!({ "alg": "none", "typ": "JWT" });
|
||||
let mut payload = serde_json::Map::new();
|
||||
if let Some(email) = &claims.email {
|
||||
payload.insert("email".to_string(), json!(email));
|
||||
}
|
||||
if let Some(plan_type) = &claims.plan_type {
|
||||
payload.insert(
|
||||
"https://api.openai.com/auth".to_string(),
|
||||
json!({ "chatgpt_plan_type": plan_type }),
|
||||
);
|
||||
}
|
||||
let payload = serde_json::Value::Object(payload);
|
||||
|
||||
let header_b64 =
|
||||
URL_SAFE_NO_PAD.encode(serde_json::to_vec(&header).context("serialize jwt header")?);
|
||||
let payload_b64 =
|
||||
URL_SAFE_NO_PAD.encode(serde_json::to_vec(&payload).context("serialize jwt payload")?);
|
||||
let signature_b64 = URL_SAFE_NO_PAD.encode(b"signature");
|
||||
Ok(format!("{header_b64}.{payload_b64}.{signature_b64}"))
|
||||
}
|
||||
|
||||
pub fn write_chatgpt_auth(
|
||||
codex_home: &Path,
|
||||
fixture: ChatGptAuthFixture,
|
||||
cli_auth_credentials_store_mode: AuthCredentialsStoreMode,
|
||||
) -> Result<()> {
|
||||
let id_token_raw = encode_id_token(&fixture.claims)?;
|
||||
let id_token = parse_id_token(&id_token_raw).context("parse id token")?;
|
||||
let tokens = TokenData {
|
||||
id_token,
|
||||
access_token: fixture.access_token,
|
||||
refresh_token: fixture.refresh_token,
|
||||
account_id: fixture.account_id,
|
||||
};
|
||||
|
||||
let last_refresh = fixture.last_refresh.unwrap_or_else(|| Some(Utc::now()));
|
||||
|
||||
let auth = AuthDotJson {
|
||||
openai_api_key: None,
|
||||
tokens: Some(tokens),
|
||||
last_refresh,
|
||||
};
|
||||
|
||||
save_auth(codex_home, &auth, cli_auth_credentials_store_mode).context("write auth.json")
|
||||
}
|
||||
@@ -1,13 +1,37 @@
|
||||
mod auth_fixtures;
|
||||
mod mcp_process;
|
||||
mod mock_model_server;
|
||||
mod models_cache;
|
||||
mod responses;
|
||||
mod rollout;
|
||||
|
||||
pub use auth_fixtures::ChatGptAuthFixture;
|
||||
pub use auth_fixtures::ChatGptIdTokenClaims;
|
||||
pub use auth_fixtures::encode_id_token;
|
||||
pub use auth_fixtures::write_chatgpt_auth;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
pub use core_test_support::format_with_current_shell;
|
||||
pub use core_test_support::format_with_current_shell_display;
|
||||
pub use core_test_support::format_with_current_shell_display_non_login;
|
||||
pub use core_test_support::format_with_current_shell_non_login;
|
||||
pub use core_test_support::test_path_buf_with_windows;
|
||||
pub use core_test_support::test_tmp_path;
|
||||
pub use core_test_support::test_tmp_path_buf;
|
||||
pub use mcp_process::DEFAULT_CLIENT_NAME;
|
||||
pub use mcp_process::McpProcess;
|
||||
use mcp_types::JSONRPCResponse;
|
||||
pub use mock_model_server::create_mock_chat_completions_server;
|
||||
pub use mock_model_server::create_mock_responses_server_repeating_assistant;
|
||||
pub use mock_model_server::create_mock_responses_server_sequence;
|
||||
pub use mock_model_server::create_mock_responses_server_sequence_unchecked;
|
||||
pub use models_cache::write_models_cache;
|
||||
pub use models_cache::write_models_cache_with_models;
|
||||
pub use responses::create_apply_patch_sse_response;
|
||||
pub use responses::create_exec_command_sse_response;
|
||||
pub use responses::create_final_assistant_message_sse_response;
|
||||
pub use responses::create_shell_sse_response;
|
||||
pub use responses::create_request_user_input_sse_response;
|
||||
pub use responses::create_shell_command_sse_response;
|
||||
pub use rollout::create_fake_rollout;
|
||||
pub use rollout::create_fake_rollout_with_text_elements;
|
||||
pub use rollout::rollout_path;
|
||||
use serde::de::DeserializeOwned;
|
||||
|
||||
pub fn to_response<T: DeserializeOwned>(response: JSONRPCResponse) -> anyhow::Result<T> {
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use std::collections::VecDeque;
|
||||
use std::path::Path;
|
||||
use std::process::Stdio;
|
||||
use std::sync::atomic::AtomicI64;
|
||||
@@ -10,31 +11,48 @@ use tokio::process::ChildStdin;
|
||||
use tokio::process::ChildStdout;
|
||||
|
||||
use anyhow::Context;
|
||||
use assert_cmd::prelude::*;
|
||||
use codex_protocol::mcp_protocol::AddConversationListenerParams;
|
||||
use codex_protocol::mcp_protocol::ArchiveConversationParams;
|
||||
use codex_protocol::mcp_protocol::CancelLoginChatGptParams;
|
||||
use codex_protocol::mcp_protocol::ClientInfo;
|
||||
use codex_protocol::mcp_protocol::ClientNotification;
|
||||
use codex_protocol::mcp_protocol::GetAuthStatusParams;
|
||||
use codex_protocol::mcp_protocol::InitializeParams;
|
||||
use codex_protocol::mcp_protocol::InterruptConversationParams;
|
||||
use codex_protocol::mcp_protocol::ListConversationsParams;
|
||||
use codex_protocol::mcp_protocol::LoginApiKeyParams;
|
||||
use codex_protocol::mcp_protocol::NewConversationParams;
|
||||
use codex_protocol::mcp_protocol::RemoveConversationListenerParams;
|
||||
use codex_protocol::mcp_protocol::ResumeConversationParams;
|
||||
use codex_protocol::mcp_protocol::SendUserMessageParams;
|
||||
use codex_protocol::mcp_protocol::SendUserTurnParams;
|
||||
use codex_protocol::mcp_protocol::SetDefaultModelParams;
|
||||
|
||||
use mcp_types::JSONRPC_VERSION;
|
||||
use mcp_types::JSONRPCMessage;
|
||||
use mcp_types::JSONRPCNotification;
|
||||
use mcp_types::JSONRPCRequest;
|
||||
use mcp_types::JSONRPCResponse;
|
||||
use mcp_types::RequestId;
|
||||
use std::process::Command as StdCommand;
|
||||
use codex_app_server_protocol::AddConversationListenerParams;
|
||||
use codex_app_server_protocol::ArchiveConversationParams;
|
||||
use codex_app_server_protocol::CancelLoginAccountParams;
|
||||
use codex_app_server_protocol::CancelLoginChatGptParams;
|
||||
use codex_app_server_protocol::ClientInfo;
|
||||
use codex_app_server_protocol::ClientNotification;
|
||||
use codex_app_server_protocol::CollaborationModeListParams;
|
||||
use codex_app_server_protocol::ConfigBatchWriteParams;
|
||||
use codex_app_server_protocol::ConfigReadParams;
|
||||
use codex_app_server_protocol::ConfigValueWriteParams;
|
||||
use codex_app_server_protocol::FeedbackUploadParams;
|
||||
use codex_app_server_protocol::ForkConversationParams;
|
||||
use codex_app_server_protocol::GetAccountParams;
|
||||
use codex_app_server_protocol::GetAuthStatusParams;
|
||||
use codex_app_server_protocol::InitializeParams;
|
||||
use codex_app_server_protocol::InterruptConversationParams;
|
||||
use codex_app_server_protocol::JSONRPCError;
|
||||
use codex_app_server_protocol::JSONRPCMessage;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCRequest;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::ListConversationsParams;
|
||||
use codex_app_server_protocol::LoginApiKeyParams;
|
||||
use codex_app_server_protocol::ModelListParams;
|
||||
use codex_app_server_protocol::NewConversationParams;
|
||||
use codex_app_server_protocol::RemoveConversationListenerParams;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ResumeConversationParams;
|
||||
use codex_app_server_protocol::ReviewStartParams;
|
||||
use codex_app_server_protocol::SendUserMessageParams;
|
||||
use codex_app_server_protocol::SendUserTurnParams;
|
||||
use codex_app_server_protocol::ServerRequest;
|
||||
use codex_app_server_protocol::SetDefaultModelParams;
|
||||
use codex_app_server_protocol::ThreadArchiveParams;
|
||||
use codex_app_server_protocol::ThreadForkParams;
|
||||
use codex_app_server_protocol::ThreadListParams;
|
||||
use codex_app_server_protocol::ThreadLoadedListParams;
|
||||
use codex_app_server_protocol::ThreadResumeParams;
|
||||
use codex_app_server_protocol::ThreadRollbackParams;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::TurnInterruptParams;
|
||||
use codex_app_server_protocol::TurnStartParams;
|
||||
use tokio::process::Command;
|
||||
|
||||
pub struct McpProcess {
|
||||
@@ -46,8 +64,11 @@ pub struct McpProcess {
|
||||
process: Child,
|
||||
stdin: ChildStdin,
|
||||
stdout: BufReader<ChildStdout>,
|
||||
pending_messages: VecDeque<JSONRPCMessage>,
|
||||
}
|
||||
|
||||
pub const DEFAULT_CLIENT_NAME: &str = "codex-app-server-tests";
|
||||
|
||||
impl McpProcess {
|
||||
pub async fn new(codex_home: &Path) -> anyhow::Result<Self> {
|
||||
Self::new_with_env(codex_home, &[]).await
|
||||
@@ -62,12 +83,8 @@ impl McpProcess {
|
||||
codex_home: &Path,
|
||||
env_overrides: &[(&str, Option<&str>)],
|
||||
) -> anyhow::Result<Self> {
|
||||
// Use assert_cmd to locate the binary path and then switch to tokio::process::Command
|
||||
let std_cmd = StdCommand::cargo_bin("codex-app-server")
|
||||
.context("should find binary for codex-mcp-server")?;
|
||||
|
||||
let program = std_cmd.get_program().to_owned();
|
||||
|
||||
let program = codex_utils_cargo_bin::cargo_bin("codex-app-server")
|
||||
.context("should find binary for codex-app-server")?;
|
||||
let mut cmd = Command::new(program);
|
||||
|
||||
cmd.stdin(Stdio::piped());
|
||||
@@ -116,38 +133,68 @@ impl McpProcess {
|
||||
process,
|
||||
stdin,
|
||||
stdout,
|
||||
pending_messages: VecDeque::new(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Performs the initialization handshake with the MCP server.
|
||||
pub async fn initialize(&mut self) -> anyhow::Result<()> {
|
||||
let params = Some(serde_json::to_value(InitializeParams {
|
||||
client_info: ClientInfo {
|
||||
name: "codex-app-server-tests".to_string(),
|
||||
let initialized = self
|
||||
.initialize_with_client_info(ClientInfo {
|
||||
name: DEFAULT_CLIENT_NAME.to_string(),
|
||||
title: None,
|
||||
version: "0.1.0".to_string(),
|
||||
},
|
||||
})?);
|
||||
let req_id = self.send_request("initialize", params).await?;
|
||||
let initialized = self.read_jsonrpc_message().await?;
|
||||
let JSONRPCMessage::Response(response) = initialized else {
|
||||
})
|
||||
.await?;
|
||||
let JSONRPCMessage::Response(_) = initialized else {
|
||||
unreachable!("expected JSONRPCMessage::Response for initialize, got {initialized:?}");
|
||||
};
|
||||
if response.id != RequestId::Integer(req_id) {
|
||||
anyhow::bail!(
|
||||
"initialize response id mismatch: expected {}, got {:?}",
|
||||
req_id,
|
||||
response.id
|
||||
);
|
||||
}
|
||||
|
||||
// Send notifications/initialized to ack the response.
|
||||
self.send_notification(ClientNotification::Initialized)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Sends initialize with the provided client info and returns the response/error message.
|
||||
pub async fn initialize_with_client_info(
|
||||
&mut self,
|
||||
client_info: ClientInfo,
|
||||
) -> anyhow::Result<JSONRPCMessage> {
|
||||
let params = Some(serde_json::to_value(InitializeParams { client_info })?);
|
||||
let request_id = self.send_request("initialize", params).await?;
|
||||
let message = self.read_jsonrpc_message().await?;
|
||||
match message {
|
||||
JSONRPCMessage::Response(response) => {
|
||||
if response.id != RequestId::Integer(request_id) {
|
||||
anyhow::bail!(
|
||||
"initialize response id mismatch: expected {}, got {:?}",
|
||||
request_id,
|
||||
response.id
|
||||
);
|
||||
}
|
||||
|
||||
// Send notifications/initialized to ack the response.
|
||||
self.send_notification(ClientNotification::Initialized)
|
||||
.await?;
|
||||
|
||||
Ok(JSONRPCMessage::Response(response))
|
||||
}
|
||||
JSONRPCMessage::Error(error) => {
|
||||
if error.id != RequestId::Integer(request_id) {
|
||||
anyhow::bail!(
|
||||
"initialize error id mismatch: expected {}, got {:?}",
|
||||
request_id,
|
||||
error.id
|
||||
);
|
||||
}
|
||||
Ok(JSONRPCMessage::Error(error))
|
||||
}
|
||||
JSONRPCMessage::Notification(notification) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Notification: {notification:?}");
|
||||
}
|
||||
JSONRPCMessage::Request(request) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Request: {request:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Send a `newConversation` JSON-RPC request.
|
||||
pub async fn send_new_conversation_request(
|
||||
&mut self,
|
||||
@@ -186,7 +233,7 @@ impl McpProcess {
|
||||
}
|
||||
|
||||
/// Send a `removeConversationListener` JSON-RPC request.
|
||||
pub async fn send_remove_conversation_listener_request(
|
||||
pub async fn send_remove_thread_listener_request(
|
||||
&mut self,
|
||||
params: RemoveConversationListenerParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
@@ -232,6 +279,29 @@ impl McpProcess {
|
||||
self.send_request("getUserAgent", None).await
|
||||
}
|
||||
|
||||
/// Send an `account/rateLimits/read` JSON-RPC request.
|
||||
pub async fn send_get_account_rate_limits_request(&mut self) -> anyhow::Result<i64> {
|
||||
self.send_request("account/rateLimits/read", None).await
|
||||
}
|
||||
|
||||
/// Send an `account/read` JSON-RPC request.
|
||||
pub async fn send_get_account_request(
|
||||
&mut self,
|
||||
params: GetAccountParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("account/read", params).await
|
||||
}
|
||||
|
||||
/// Send a `feedback/upload` JSON-RPC request.
|
||||
pub async fn send_feedback_upload_request(
|
||||
&mut self,
|
||||
params: FeedbackUploadParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("feedback/upload", params).await
|
||||
}
|
||||
|
||||
/// Send a `userInfo` JSON-RPC request.
|
||||
pub async fn send_user_info_request(&mut self) -> anyhow::Result<i64> {
|
||||
self.send_request("userInfo", None).await
|
||||
@@ -255,6 +325,87 @@ impl McpProcess {
|
||||
self.send_request("listConversations", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/start` JSON-RPC request.
|
||||
pub async fn send_thread_start_request(
|
||||
&mut self,
|
||||
params: ThreadStartParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("thread/start", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/resume` JSON-RPC request.
|
||||
pub async fn send_thread_resume_request(
|
||||
&mut self,
|
||||
params: ThreadResumeParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("thread/resume", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/fork` JSON-RPC request.
|
||||
pub async fn send_thread_fork_request(
|
||||
&mut self,
|
||||
params: ThreadForkParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("thread/fork", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/archive` JSON-RPC request.
|
||||
pub async fn send_thread_archive_request(
|
||||
&mut self,
|
||||
params: ThreadArchiveParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("thread/archive", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/rollback` JSON-RPC request.
|
||||
pub async fn send_thread_rollback_request(
|
||||
&mut self,
|
||||
params: ThreadRollbackParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("thread/rollback", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/list` JSON-RPC request.
|
||||
pub async fn send_thread_list_request(
|
||||
&mut self,
|
||||
params: ThreadListParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("thread/list", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/loaded/list` JSON-RPC request.
|
||||
pub async fn send_thread_loaded_list_request(
|
||||
&mut self,
|
||||
params: ThreadLoadedListParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("thread/loaded/list", params).await
|
||||
}
|
||||
|
||||
/// Send a `model/list` JSON-RPC request.
|
||||
pub async fn send_list_models_request(
|
||||
&mut self,
|
||||
params: ModelListParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("model/list", params).await
|
||||
}
|
||||
|
||||
/// Send a `collaborationMode/list` JSON-RPC request.
|
||||
pub async fn send_list_collaboration_modes_request(
|
||||
&mut self,
|
||||
params: CollaborationModeListParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("collaborationMode/list", params).await
|
||||
}
|
||||
|
||||
/// Send a `resumeConversation` JSON-RPC request.
|
||||
pub async fn send_resume_conversation_request(
|
||||
&mut self,
|
||||
@@ -264,6 +415,15 @@ impl McpProcess {
|
||||
self.send_request("resumeConversation", params).await
|
||||
}
|
||||
|
||||
/// Send a `forkConversation` JSON-RPC request.
|
||||
pub async fn send_fork_conversation_request(
|
||||
&mut self,
|
||||
params: ForkConversationParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("forkConversation", params).await
|
||||
}
|
||||
|
||||
/// Send a `loginApiKey` JSON-RPC request.
|
||||
pub async fn send_login_api_key_request(
|
||||
&mut self,
|
||||
@@ -278,6 +438,33 @@ impl McpProcess {
|
||||
self.send_request("loginChatGpt", None).await
|
||||
}
|
||||
|
||||
/// Send a `turn/start` JSON-RPC request (v2).
|
||||
pub async fn send_turn_start_request(
|
||||
&mut self,
|
||||
params: TurnStartParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("turn/start", params).await
|
||||
}
|
||||
|
||||
/// Send a `turn/interrupt` JSON-RPC request (v2).
|
||||
pub async fn send_turn_interrupt_request(
|
||||
&mut self,
|
||||
params: TurnInterruptParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("turn/interrupt", params).await
|
||||
}
|
||||
|
||||
/// Send a `review/start` JSON-RPC request (v2).
|
||||
pub async fn send_review_start_request(
|
||||
&mut self,
|
||||
params: ReviewStartParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("review/start", params).await
|
||||
}
|
||||
|
||||
/// Send a `cancelLoginChatGpt` JSON-RPC request.
|
||||
pub async fn send_cancel_login_chat_gpt_request(
|
||||
&mut self,
|
||||
@@ -292,6 +479,64 @@ impl McpProcess {
|
||||
self.send_request("logoutChatGpt", None).await
|
||||
}
|
||||
|
||||
pub async fn send_config_read_request(
|
||||
&mut self,
|
||||
params: ConfigReadParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("config/read", params).await
|
||||
}
|
||||
|
||||
pub async fn send_config_value_write_request(
|
||||
&mut self,
|
||||
params: ConfigValueWriteParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("config/value/write", params).await
|
||||
}
|
||||
|
||||
pub async fn send_config_batch_write_request(
|
||||
&mut self,
|
||||
params: ConfigBatchWriteParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("config/batchWrite", params).await
|
||||
}
|
||||
|
||||
/// Send an `account/logout` JSON-RPC request.
|
||||
pub async fn send_logout_account_request(&mut self) -> anyhow::Result<i64> {
|
||||
self.send_request("account/logout", None).await
|
||||
}
|
||||
|
||||
/// Send an `account/login/start` JSON-RPC request for API key login.
|
||||
pub async fn send_login_account_api_key_request(
|
||||
&mut self,
|
||||
api_key: &str,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = serde_json::json!({
|
||||
"type": "apiKey",
|
||||
"apiKey": api_key,
|
||||
});
|
||||
self.send_request("account/login/start", Some(params)).await
|
||||
}
|
||||
|
||||
/// Send an `account/login/start` JSON-RPC request for ChatGPT login.
|
||||
pub async fn send_login_account_chatgpt_request(&mut self) -> anyhow::Result<i64> {
|
||||
let params = serde_json::json!({
|
||||
"type": "chatgpt"
|
||||
});
|
||||
self.send_request("account/login/start", Some(params)).await
|
||||
}
|
||||
|
||||
/// Send an `account/login/cancel` JSON-RPC request.
|
||||
pub async fn send_cancel_login_account_request(
|
||||
&mut self,
|
||||
params: CancelLoginAccountParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("account/login/cancel", params).await
|
||||
}
|
||||
|
||||
/// Send a `fuzzyFileSearch` JSON-RPC request.
|
||||
pub async fn send_fuzzy_file_search_request(
|
||||
&mut self,
|
||||
@@ -317,7 +562,6 @@ impl McpProcess {
|
||||
let request_id = self.next_request_id.fetch_add(1, Ordering::Relaxed);
|
||||
|
||||
let message = JSONRPCMessage::Request(JSONRPCRequest {
|
||||
jsonrpc: JSONRPC_VERSION.into(),
|
||||
id: RequestId::Integer(request_id),
|
||||
method: method.to_string(),
|
||||
params,
|
||||
@@ -331,12 +575,8 @@ impl McpProcess {
|
||||
id: RequestId,
|
||||
result: serde_json::Value,
|
||||
) -> anyhow::Result<()> {
|
||||
self.send_jsonrpc_message(JSONRPCMessage::Response(JSONRPCResponse {
|
||||
jsonrpc: JSONRPC_VERSION.into(),
|
||||
id,
|
||||
result,
|
||||
}))
|
||||
.await
|
||||
self.send_jsonrpc_message(JSONRPCMessage::Response(JSONRPCResponse { id, result }))
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn send_notification(
|
||||
@@ -345,7 +585,6 @@ impl McpProcess {
|
||||
) -> anyhow::Result<()> {
|
||||
let value = serde_json::to_value(notification)?;
|
||||
self.send_jsonrpc_message(JSONRPCMessage::Notification(JSONRPCNotification {
|
||||
jsonrpc: JSONRPC_VERSION.into(),
|
||||
method: value
|
||||
.get("method")
|
||||
.and_then(|m| m.as_str())
|
||||
@@ -373,27 +612,19 @@ impl McpProcess {
|
||||
Ok(message)
|
||||
}
|
||||
|
||||
pub async fn read_stream_until_request_message(&mut self) -> anyhow::Result<JSONRPCRequest> {
|
||||
pub async fn read_stream_until_request_message(&mut self) -> anyhow::Result<ServerRequest> {
|
||||
eprintln!("in read_stream_until_request_message()");
|
||||
|
||||
loop {
|
||||
let message = self.read_jsonrpc_message().await?;
|
||||
let message = self
|
||||
.read_stream_until_message(|message| matches!(message, JSONRPCMessage::Request(_)))
|
||||
.await?;
|
||||
|
||||
match message {
|
||||
JSONRPCMessage::Notification(_) => {
|
||||
eprintln!("notification: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Request(jsonrpc_request) => {
|
||||
return Ok(jsonrpc_request);
|
||||
}
|
||||
JSONRPCMessage::Error(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Error: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Response(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Response: {message:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
let JSONRPCMessage::Request(jsonrpc_request) = message else {
|
||||
unreachable!("expected JSONRPCMessage::Request, got {message:?}");
|
||||
};
|
||||
jsonrpc_request
|
||||
.try_into()
|
||||
.with_context(|| "failed to deserialize ServerRequest from JSONRPCRequest")
|
||||
}
|
||||
|
||||
pub async fn read_stream_until_response_message(
|
||||
@@ -402,50 +633,32 @@ impl McpProcess {
|
||||
) -> anyhow::Result<JSONRPCResponse> {
|
||||
eprintln!("in read_stream_until_response_message({request_id:?})");
|
||||
|
||||
loop {
|
||||
let message = self.read_jsonrpc_message().await?;
|
||||
match message {
|
||||
JSONRPCMessage::Notification(_) => {
|
||||
eprintln!("notification: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Request(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Request: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Error(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Error: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Response(jsonrpc_response) => {
|
||||
if jsonrpc_response.id == request_id {
|
||||
return Ok(jsonrpc_response);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
let message = self
|
||||
.read_stream_until_message(|message| {
|
||||
Self::message_request_id(message) == Some(&request_id)
|
||||
})
|
||||
.await?;
|
||||
|
||||
let JSONRPCMessage::Response(response) = message else {
|
||||
unreachable!("expected JSONRPCMessage::Response, got {message:?}");
|
||||
};
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
pub async fn read_stream_until_error_message(
|
||||
&mut self,
|
||||
request_id: RequestId,
|
||||
) -> anyhow::Result<mcp_types::JSONRPCError> {
|
||||
loop {
|
||||
let message = self.read_jsonrpc_message().await?;
|
||||
match message {
|
||||
JSONRPCMessage::Notification(_) => {
|
||||
eprintln!("notification: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Request(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Request: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Response(_) => {
|
||||
// Keep scanning; we're waiting for an error with matching id.
|
||||
}
|
||||
JSONRPCMessage::Error(err) => {
|
||||
if err.id == request_id {
|
||||
return Ok(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
) -> anyhow::Result<JSONRPCError> {
|
||||
let message = self
|
||||
.read_stream_until_message(|message| {
|
||||
Self::message_request_id(message) == Some(&request_id)
|
||||
})
|
||||
.await?;
|
||||
|
||||
let JSONRPCMessage::Error(err) = message else {
|
||||
unreachable!("expected JSONRPCMessage::Error, got {message:?}");
|
||||
};
|
||||
Ok(err)
|
||||
}
|
||||
|
||||
pub async fn read_stream_until_notification_message(
|
||||
@@ -454,24 +667,64 @@ impl McpProcess {
|
||||
) -> anyhow::Result<JSONRPCNotification> {
|
||||
eprintln!("in read_stream_until_notification_message({method})");
|
||||
|
||||
let message = self
|
||||
.read_stream_until_message(|message| {
|
||||
matches!(
|
||||
message,
|
||||
JSONRPCMessage::Notification(notification) if notification.method == method
|
||||
)
|
||||
})
|
||||
.await?;
|
||||
|
||||
let JSONRPCMessage::Notification(notification) = message else {
|
||||
unreachable!("expected JSONRPCMessage::Notification, got {message:?}");
|
||||
};
|
||||
Ok(notification)
|
||||
}
|
||||
|
||||
/// Clears any buffered messages so future reads only consider new stream items.
|
||||
///
|
||||
/// We call this when e.g. we want to validate against the next turn and no longer care about
|
||||
/// messages buffered from the prior turn.
|
||||
pub fn clear_message_buffer(&mut self) {
|
||||
self.pending_messages.clear();
|
||||
}
|
||||
|
||||
/// Reads the stream until a message matches `predicate`, buffering any non-matching messages
|
||||
/// for later reads.
|
||||
async fn read_stream_until_message<F>(&mut self, predicate: F) -> anyhow::Result<JSONRPCMessage>
|
||||
where
|
||||
F: Fn(&JSONRPCMessage) -> bool,
|
||||
{
|
||||
if let Some(message) = self.take_pending_message(&predicate) {
|
||||
return Ok(message);
|
||||
}
|
||||
|
||||
loop {
|
||||
let message = self.read_jsonrpc_message().await?;
|
||||
match message {
|
||||
JSONRPCMessage::Notification(notification) => {
|
||||
if notification.method == method {
|
||||
return Ok(notification);
|
||||
}
|
||||
}
|
||||
JSONRPCMessage::Request(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Request: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Error(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Error: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Response(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Response: {message:?}");
|
||||
}
|
||||
if predicate(&message) {
|
||||
return Ok(message);
|
||||
}
|
||||
self.pending_messages.push_back(message);
|
||||
}
|
||||
}
|
||||
|
||||
fn take_pending_message<F>(&mut self, predicate: &F) -> Option<JSONRPCMessage>
|
||||
where
|
||||
F: Fn(&JSONRPCMessage) -> bool,
|
||||
{
|
||||
if let Some(pos) = self.pending_messages.iter().position(predicate) {
|
||||
return self.pending_messages.remove(pos);
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn message_request_id(message: &JSONRPCMessage) -> Option<&RequestId> {
|
||||
match message {
|
||||
JSONRPCMessage::Request(request) => Some(&request.id),
|
||||
JSONRPCMessage::Response(response) => Some(&response.id),
|
||||
JSONRPCMessage::Error(err) => Some(&err.id),
|
||||
JSONRPCMessage::Notification(_) => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,17 +1,18 @@
|
||||
use std::sync::atomic::AtomicUsize;
|
||||
use std::sync::atomic::Ordering;
|
||||
|
||||
use core_test_support::responses;
|
||||
use wiremock::Mock;
|
||||
use wiremock::MockServer;
|
||||
use wiremock::Respond;
|
||||
use wiremock::ResponseTemplate;
|
||||
use wiremock::matchers::method;
|
||||
use wiremock::matchers::path;
|
||||
use wiremock::matchers::path_regex;
|
||||
|
||||
/// Create a mock server that will provide the responses, in order, for
|
||||
/// requests to the `/v1/chat/completions` endpoint.
|
||||
pub async fn create_mock_chat_completions_server(responses: Vec<String>) -> MockServer {
|
||||
let server = MockServer::start().await;
|
||||
/// requests to the `/v1/responses` endpoint.
|
||||
pub async fn create_mock_responses_server_sequence(responses: Vec<String>) -> MockServer {
|
||||
let server = responses::start_mock_server().await;
|
||||
|
||||
let num_calls = responses.len();
|
||||
let seq_responder = SeqResponder {
|
||||
@@ -20,7 +21,7 @@ pub async fn create_mock_chat_completions_server(responses: Vec<String>) -> Mock
|
||||
};
|
||||
|
||||
Mock::given(method("POST"))
|
||||
.and(path("/v1/chat/completions"))
|
||||
.and(path_regex(".*/responses$"))
|
||||
.respond_with(seq_responder)
|
||||
.expect(num_calls as u64)
|
||||
.mount(&server)
|
||||
@@ -29,6 +30,25 @@ pub async fn create_mock_chat_completions_server(responses: Vec<String>) -> Mock
|
||||
server
|
||||
}
|
||||
|
||||
/// Same as `create_mock_responses_server_sequence` but does not enforce an
|
||||
/// expectation on the number of calls.
|
||||
pub async fn create_mock_responses_server_sequence_unchecked(responses: Vec<String>) -> MockServer {
|
||||
let server = responses::start_mock_server().await;
|
||||
|
||||
let seq_responder = SeqResponder {
|
||||
num_calls: AtomicUsize::new(0),
|
||||
responses,
|
||||
};
|
||||
|
||||
Mock::given(method("POST"))
|
||||
.and(path_regex(".*/responses$"))
|
||||
.respond_with(seq_responder)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
server
|
||||
}
|
||||
|
||||
struct SeqResponder {
|
||||
num_calls: AtomicUsize,
|
||||
responses: Vec<String>,
|
||||
@@ -38,10 +58,24 @@ impl Respond for SeqResponder {
|
||||
fn respond(&self, _: &wiremock::Request) -> ResponseTemplate {
|
||||
let call_num = self.num_calls.fetch_add(1, Ordering::SeqCst);
|
||||
match self.responses.get(call_num) {
|
||||
Some(response) => ResponseTemplate::new(200)
|
||||
.insert_header("content-type", "text/event-stream")
|
||||
.set_body_raw(response.clone(), "text/event-stream"),
|
||||
Some(response) => responses::sse_response(response.clone()),
|
||||
None => panic!("no response for {call_num}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a mock responses API server that returns the same assistant message for every request.
|
||||
pub async fn create_mock_responses_server_repeating_assistant(message: &str) -> MockServer {
|
||||
let server = responses::start_mock_server().await;
|
||||
let body = responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_assistant_message("msg-1", message),
|
||||
responses::ev_completed("resp-1"),
|
||||
]);
|
||||
Mock::given(method("POST"))
|
||||
.and(path_regex(".*/responses$"))
|
||||
.respond_with(responses::sse_response(body))
|
||||
.mount(&server)
|
||||
.await;
|
||||
server
|
||||
}
|
||||
|
||||
83
codex-rs/app-server/tests/common/models_cache.rs
Normal file
83
codex-rs/app-server/tests/common/models_cache.rs
Normal file
@@ -0,0 +1,83 @@
|
||||
use chrono::DateTime;
|
||||
use chrono::Utc;
|
||||
use codex_core::models_manager::model_presets::all_model_presets;
|
||||
use codex_protocol::openai_models::ConfigShellToolType;
|
||||
use codex_protocol::openai_models::ModelInfo;
|
||||
use codex_protocol::openai_models::ModelPreset;
|
||||
use codex_protocol::openai_models::ModelVisibility;
|
||||
use codex_protocol::openai_models::TruncationPolicyConfig;
|
||||
use serde_json::json;
|
||||
use std::path::Path;
|
||||
|
||||
/// Convert a ModelPreset to ModelInfo for cache storage.
|
||||
fn preset_to_info(preset: &ModelPreset, priority: i32) -> ModelInfo {
|
||||
ModelInfo {
|
||||
slug: preset.id.clone(),
|
||||
display_name: preset.display_name.clone(),
|
||||
description: Some(preset.description.clone()),
|
||||
default_reasoning_level: Some(preset.default_reasoning_effort),
|
||||
supported_reasoning_levels: preset.supported_reasoning_efforts.clone(),
|
||||
shell_type: ConfigShellToolType::ShellCommand,
|
||||
visibility: if preset.show_in_picker {
|
||||
ModelVisibility::List
|
||||
} else {
|
||||
ModelVisibility::Hide
|
||||
},
|
||||
supported_in_api: true,
|
||||
priority,
|
||||
upgrade: preset.upgrade.as_ref().map(|u| u.into()),
|
||||
base_instructions: "base instructions".to_string(),
|
||||
supports_reasoning_summaries: false,
|
||||
support_verbosity: false,
|
||||
default_verbosity: None,
|
||||
apply_patch_tool_type: None,
|
||||
truncation_policy: TruncationPolicyConfig::bytes(10_000),
|
||||
supports_parallel_tool_calls: false,
|
||||
context_window: Some(272_000),
|
||||
auto_compact_token_limit: None,
|
||||
effective_context_window_percent: 95,
|
||||
experimental_supported_tools: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Write a models_cache.json file to the codex home directory.
|
||||
/// This prevents ModelsManager from making network requests to refresh models.
|
||||
/// The cache will be treated as fresh (within TTL) and used instead of fetching from the network.
|
||||
/// Uses the built-in model presets from ModelsManager, converted to ModelInfo format.
|
||||
pub fn write_models_cache(codex_home: &Path) -> std::io::Result<()> {
|
||||
// Get all presets and filter for show_in_picker (same as builtin_model_presets does)
|
||||
let presets: Vec<&ModelPreset> = all_model_presets()
|
||||
.iter()
|
||||
.filter(|preset| preset.show_in_picker)
|
||||
.collect();
|
||||
// Convert presets to ModelInfo, assigning priorities (lower = earlier in list).
|
||||
// Priority is used for sorting, so the first model gets the lowest priority.
|
||||
let models: Vec<ModelInfo> = presets
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(idx, preset)| {
|
||||
// Lower priority = earlier in list.
|
||||
let priority = idx as i32;
|
||||
preset_to_info(preset, priority)
|
||||
})
|
||||
.collect();
|
||||
|
||||
write_models_cache_with_models(codex_home, models)
|
||||
}
|
||||
|
||||
/// Write a models_cache.json file with specific models.
|
||||
/// Useful when tests need specific models to be available.
|
||||
pub fn write_models_cache_with_models(
|
||||
codex_home: &Path,
|
||||
models: Vec<ModelInfo>,
|
||||
) -> std::io::Result<()> {
|
||||
let cache_path = codex_home.join("models_cache.json");
|
||||
// DateTime<Utc> serializes to RFC3339 format by default with serde
|
||||
let fetched_at: DateTime<Utc> = Utc::now();
|
||||
let cache = json!({
|
||||
"fetched_at": fetched_at,
|
||||
"etag": null,
|
||||
"models": models
|
||||
});
|
||||
std::fs::write(cache_path, serde_json::to_string_pretty(&cache)?)
|
||||
}
|
||||
@@ -1,95 +1,85 @@
|
||||
use core_test_support::responses;
|
||||
use serde_json::json;
|
||||
use std::path::Path;
|
||||
|
||||
pub fn create_shell_sse_response(
|
||||
pub fn create_shell_command_sse_response(
|
||||
command: Vec<String>,
|
||||
workdir: Option<&Path>,
|
||||
timeout_ms: Option<u64>,
|
||||
call_id: &str,
|
||||
) -> anyhow::Result<String> {
|
||||
// The `arguments`` for the `shell` tool is a serialized JSON object.
|
||||
// The `arguments` for the `shell_command` tool is a serialized JSON object.
|
||||
let command_str = shlex::try_join(command.iter().map(String::as_str))?;
|
||||
let tool_call_arguments = serde_json::to_string(&json!({
|
||||
"command": command,
|
||||
"command": command_str,
|
||||
"workdir": workdir.map(|w| w.to_string_lossy()),
|
||||
"timeout": timeout_ms
|
||||
"timeout_ms": timeout_ms
|
||||
}))?;
|
||||
let tool_call = json!({
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"tool_calls": [
|
||||
{
|
||||
"id": call_id,
|
||||
"function": {
|
||||
"name": "shell",
|
||||
"arguments": tool_call_arguments
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": "tool_calls"
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
let sse = format!(
|
||||
"data: {}\n\ndata: DONE\n\n",
|
||||
serde_json::to_string(&tool_call)?
|
||||
);
|
||||
Ok(sse)
|
||||
Ok(responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_function_call(call_id, "shell_command", &tool_call_arguments),
|
||||
responses::ev_completed("resp-1"),
|
||||
]))
|
||||
}
|
||||
|
||||
pub fn create_final_assistant_message_sse_response(message: &str) -> anyhow::Result<String> {
|
||||
let assistant_message = json!({
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": message
|
||||
},
|
||||
"finish_reason": "stop"
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
let sse = format!(
|
||||
"data: {}\n\ndata: DONE\n\n",
|
||||
serde_json::to_string(&assistant_message)?
|
||||
);
|
||||
Ok(sse)
|
||||
Ok(responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_assistant_message("msg-1", message),
|
||||
responses::ev_completed("resp-1"),
|
||||
]))
|
||||
}
|
||||
|
||||
pub fn create_apply_patch_sse_response(
|
||||
patch_content: &str,
|
||||
call_id: &str,
|
||||
) -> anyhow::Result<String> {
|
||||
// Use shell command to call apply_patch with heredoc format
|
||||
let shell_command = format!("apply_patch <<'EOF'\n{patch_content}\nEOF");
|
||||
Ok(responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_apply_patch_shell_command_call_via_heredoc(call_id, patch_content),
|
||||
responses::ev_completed("resp-1"),
|
||||
]))
|
||||
}
|
||||
|
||||
pub fn create_exec_command_sse_response(call_id: &str) -> anyhow::Result<String> {
|
||||
let (cmd, args) = if cfg!(windows) {
|
||||
("cmd.exe", vec!["/d", "/c", "echo hi"])
|
||||
} else {
|
||||
("/bin/sh", vec!["-c", "echo hi"])
|
||||
};
|
||||
let command = std::iter::once(cmd.to_string())
|
||||
.chain(args.into_iter().map(str::to_string))
|
||||
.collect::<Vec<_>>();
|
||||
let tool_call_arguments = serde_json::to_string(&json!({
|
||||
"command": ["bash", "-lc", shell_command]
|
||||
"cmd": command.join(" "),
|
||||
"yield_time_ms": 500
|
||||
}))?;
|
||||
Ok(responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_function_call(call_id, "exec_command", &tool_call_arguments),
|
||||
responses::ev_completed("resp-1"),
|
||||
]))
|
||||
}
|
||||
|
||||
pub fn create_request_user_input_sse_response(call_id: &str) -> anyhow::Result<String> {
|
||||
let tool_call_arguments = serde_json::to_string(&json!({
|
||||
"questions": [{
|
||||
"id": "confirm_path",
|
||||
"header": "Confirm",
|
||||
"question": "Proceed with the plan?",
|
||||
"options": [{
|
||||
"label": "Yes (Recommended)",
|
||||
"description": "Continue the current plan."
|
||||
}, {
|
||||
"label": "No",
|
||||
"description": "Stop and revisit the approach."
|
||||
}]
|
||||
}]
|
||||
}))?;
|
||||
|
||||
let tool_call = json!({
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"tool_calls": [
|
||||
{
|
||||
"id": call_id,
|
||||
"function": {
|
||||
"name": "shell",
|
||||
"arguments": tool_call_arguments
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": "tool_calls"
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
let sse = format!(
|
||||
"data: {}\n\ndata: DONE\n\n",
|
||||
serde_json::to_string(&tool_call)?
|
||||
);
|
||||
Ok(sse)
|
||||
Ok(responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_function_call(call_id, "request_user_input", &tool_call_arguments),
|
||||
responses::ev_completed("resp-1"),
|
||||
]))
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user