mirror of
https://github.com/openai/codex.git
synced 2026-02-02 06:57:03 +00:00
Compare commits
1623 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
124c7fc2be | ||
|
|
0a1323747b | ||
|
|
348d379509 | ||
|
|
6912ba9fda | ||
|
|
27cec53ddc | ||
|
|
42273d94e8 | ||
|
|
1a5289a4ef | ||
|
|
339b052d68 | ||
|
|
359142f22f | ||
|
|
ecff4d4f72 | ||
|
|
985333feff | ||
|
|
e01610f762 | ||
|
|
09693d259b | ||
|
|
f4371d2f6c | ||
|
|
f8ba48d995 | ||
|
|
8120c8765b | ||
|
|
d35337227a | ||
|
|
677532f97b | ||
|
|
ba835c3c36 | ||
|
|
beb83225e5 | ||
|
|
dcc01198e2 | ||
|
|
6c76d17713 | ||
|
|
3429de21b3 | ||
|
|
3d4ced3ff5 | ||
|
|
2d9826098e | ||
|
|
46baedd7cb | ||
|
|
358a5baba0 | ||
|
|
1cd1cf17c6 | ||
|
|
53f53173a8 | ||
|
|
9fb9ed6cea | ||
|
|
8f0b383621 | ||
|
|
d7ae342ff4 | ||
|
|
2f048f2063 | ||
|
|
4fb0b547d6 | ||
|
|
87abf06e78 | ||
|
|
6395430220 | ||
|
|
df46ea48a2 | ||
|
|
e9023d5662 | ||
|
|
ad41182ee8 | ||
|
|
2e5d52cb14 | ||
|
|
be274cbe62 | ||
|
|
7157421daa | ||
|
|
b903285746 | ||
|
|
425c8dc372 | ||
|
|
aea47b6553 | ||
|
|
f084e5264b | ||
|
|
4c9d589f14 | ||
|
|
deafead169 | ||
|
|
374d591311 | ||
|
|
9bf41e9262 | ||
|
|
1cfacbf56d | ||
|
|
cea76b85af | ||
|
|
5c8d22138a | ||
|
|
e1deeefa0f | ||
|
|
580c59aa9a | ||
|
|
50dafbc31b | ||
|
|
da3869eeb6 | ||
|
|
6f102e18c4 | ||
|
|
a8797019a1 | ||
|
|
774bd9e432 | ||
|
|
25ecd0c2e4 | ||
|
|
927a6acbea | ||
|
|
a9a7cf3488 | ||
|
|
df35189366 | ||
|
|
1e9babe178 | ||
|
|
3d92b443b0 | ||
|
|
167553f00d | ||
|
|
9f28c6251d | ||
|
|
3702793882 | ||
|
|
a2cc0032e0 | ||
|
|
f74e0cda92 | ||
|
|
ac6ba286aa | ||
|
|
9352c6b235 | ||
|
|
de3fa03e1c | ||
|
|
45c164a982 | ||
|
|
2e7e4f6ea6 | ||
|
|
0abaf1b57c | ||
|
|
2bf57674d6 | ||
|
|
813bdb9010 | ||
|
|
4897efcced | ||
|
|
2041b72da7 | ||
|
|
ebd1099b39 | ||
|
|
ae3793eb5d | ||
|
|
70913effc3 | ||
|
|
42b8f28ee8 | ||
|
|
14d80c35a9 | ||
|
|
3a0d9bca64 | ||
|
|
cafcd60ef0 | ||
|
|
600d01b33a | ||
|
|
3fbf379e02 | ||
|
|
a3b137d093 | ||
|
|
bbc5675974 | ||
|
|
51865695e4 | ||
|
|
3a32716e1c | ||
|
|
5ceeaa96b8 | ||
|
|
b27c702e83 | ||
|
|
e290d48264 | ||
|
|
3d14da9728 | ||
|
|
b53889aed5 | ||
|
|
d7482510b1 | ||
|
|
021c9a60e5 | ||
|
|
c9f5b9a6df | ||
|
|
ae57e18947 | ||
|
|
cf44511e77 | ||
|
|
bef36f4ae7 | ||
|
|
f074e5706b | ||
|
|
b9d1a087ee | ||
|
|
c0a12b3952 | ||
|
|
d802b18716 | ||
|
|
b093565bfb | ||
|
|
412dd37956 | ||
|
|
d9554c8191 | ||
|
|
3ee5c40261 | ||
|
|
f754b19e80 | ||
|
|
fbeb7d47a9 | ||
|
|
54def78a22 | ||
|
|
2c6995ca4d | ||
|
|
b4635ccc07 | ||
|
|
017a4a06b2 | ||
|
|
c696456bf1 | ||
|
|
5b472c933d | ||
|
|
4501c0ece4 | ||
|
|
0d9801d448 | ||
|
|
4274e6189a | ||
|
|
fc53411938 | ||
|
|
adbbcb0a15 | ||
|
|
3843cc7b34 | ||
|
|
a21f0ac033 | ||
|
|
b349ec4e94 | ||
|
|
1e3cad95c0 | ||
|
|
d39477ac06 | ||
|
|
dd68245a9d | ||
|
|
c3d5102f73 | ||
|
|
7c6a47958a | ||
|
|
5d77d4db6b | ||
|
|
a2c86e5d88 | ||
|
|
1ad261d681 | ||
|
|
6ec2831b91 | ||
|
|
ad7b9d63c3 | ||
|
|
596fcd040f | ||
|
|
7c18f7b680 | ||
|
|
b1905d3754 | ||
|
|
642b7566df | ||
|
|
3d07cd6c0c | ||
|
|
c978b6e222 | ||
|
|
54feceea46 | ||
|
|
4d2deb1098 | ||
|
|
9009490357 | ||
|
|
26d0d822a2 | ||
|
|
677732ff65 | ||
|
|
570eb5fe78 | ||
|
|
92098d36e8 | ||
|
|
149696d959 | ||
|
|
b3ddd50eee | ||
|
|
9429e8b219 | ||
|
|
f152b16ed9 | ||
|
|
b99ce883fe | ||
|
|
49bf49c2fa | ||
|
|
9287be762e | ||
|
|
60479a9674 | ||
|
|
4312cae005 | ||
|
|
190fa9e104 | ||
|
|
163a7e317e | ||
|
|
9e91e49edb | ||
|
|
c787e9d0c0 | ||
|
|
95f7d37ec6 | ||
|
|
43e6e75317 | ||
|
|
36610d975a | ||
|
|
e0d7ac51d3 | ||
|
|
bacbe871c8 | ||
|
|
b7fa7ca8e9 | ||
|
|
3e81ed4b91 | ||
|
|
c4f3f566a5 | ||
|
|
b9fb3b81e5 | ||
|
|
0af7e4a195 | ||
|
|
8c4c6a19e0 | ||
|
|
703bf12b36 | ||
|
|
bb8fdb20dc | ||
|
|
238ce7dfad | ||
|
|
d4554ce6c8 | ||
|
|
29381ba5c2 | ||
|
|
b2280d6205 | ||
|
|
dca7f4cb60 | ||
|
|
13c0919bff | ||
|
|
83aac0f985 | ||
|
|
057250020a | ||
|
|
3fc8b2894f | ||
|
|
ce19dbbb22 | ||
|
|
038767af69 | ||
|
|
7cabe54fc7 | ||
|
|
c1367808fb | ||
|
|
87f5b69b24 | ||
|
|
e2559ab28d | ||
|
|
90f262e9a4 | ||
|
|
321625072a | ||
|
|
b36ecb6c32 | ||
|
|
eb2e5458cc | ||
|
|
bfb4d5710b | ||
|
|
4953b2ae09 | ||
|
|
1a5809624d | ||
|
|
cb9a189857 | ||
|
|
8a71f8b634 | ||
|
|
4b684c53ae | ||
|
|
9f40d6eeeb | ||
|
|
bd51d1b103 | ||
|
|
f677d05871 | ||
|
|
c4af707e09 | ||
|
|
e0fb3ca1db | ||
|
|
97b90094cd | ||
|
|
463249eff3 | ||
|
|
0ad54982ae | ||
|
|
d1c5db5796 | ||
|
|
6fa24d65f5 | ||
|
|
ab9ddcd50b | ||
|
|
f11520f5f1 | ||
|
|
42e0817398 | ||
|
|
fc4249313b | ||
|
|
967d063f4b | ||
|
|
893f5261eb | ||
|
|
fa4cac1e6b | ||
|
|
0c8828c5e2 | ||
|
|
225a5f7ffb | ||
|
|
05e546ee1f | ||
|
|
7836aeddae | ||
|
|
ac3237721e | ||
|
|
9df70a0772 | ||
|
|
a7e3e37da8 | ||
|
|
164265bed1 | ||
|
|
2237b701b6 | ||
|
|
6382dc2338 | ||
|
|
80140c6d9d | ||
|
|
933e247e9f | ||
|
|
68505abf0f | ||
|
|
cacfd003ac | ||
|
|
0f2b589d5e | ||
|
|
06704b1a0f | ||
|
|
382f047a10 | ||
|
|
ac5fa6baf8 | ||
|
|
badda736c6 | ||
|
|
cb45139244 | ||
|
|
a9f566af7b | ||
|
|
71c75e648c | ||
|
|
0a32acaa2d | ||
|
|
222a491570 | ||
|
|
4a3e9ed88d | ||
|
|
28e7218c0b | ||
|
|
585f75bd5a | ||
|
|
da983c1761 | ||
|
|
c2bdee0946 | ||
|
|
cfda44b98b | ||
|
|
5e888ab48e | ||
|
|
9fa9e3e7bb | ||
|
|
7a6d6090d7 | ||
|
|
701f42b74b | ||
|
|
056c2ee276 | ||
|
|
98923654d0 | ||
|
|
57ba9fa100 | ||
|
|
acb8ed493f | ||
|
|
53a486f7ea | ||
|
|
3c3d3d1adc | ||
|
|
3c087e8fda | ||
|
|
7386e2efbc | ||
|
|
b2cb05d562 | ||
|
|
9a74228c66 | ||
|
|
315b1e957d | ||
|
|
82090803d9 | ||
|
|
f521d29726 | ||
|
|
93f61dbc5f | ||
|
|
6c9c563faf | ||
|
|
952d6c9465 | ||
|
|
2e4a402521 | ||
|
|
f48d88067e | ||
|
|
a8cbbdbc6e | ||
|
|
d08efb1743 | ||
|
|
5f80ad6da8 | ||
|
|
e91bb6b947 | ||
|
|
b8eab7ce90 | ||
|
|
b1c918d8f7 | ||
|
|
4c9762d15c | ||
|
|
7b359c9c8e | ||
|
|
6736d1828d | ||
|
|
073a8533b8 | ||
|
|
0972cd9404 | ||
|
|
28dcdb566a | ||
|
|
e8f6d65899 | ||
|
|
342c084cc3 | ||
|
|
903b7774bc | ||
|
|
6e6338aa87 | ||
|
|
7dfc3a4dc7 | ||
|
|
9b2055586d | ||
|
|
ce0b38c056 | ||
|
|
37c36024c7 | ||
|
|
291b54a762 | ||
|
|
2b5d0b2935 | ||
|
|
404a1ea34b | ||
|
|
36edb412b1 | ||
|
|
1b2509f05a | ||
|
|
f1b7cdc3bd | ||
|
|
c4e18f1b63 | ||
|
|
8f4e00e1f1 | ||
|
|
87666695ba | ||
|
|
871f44f385 | ||
|
|
3d35cb4619 | ||
|
|
e925a380dc | ||
|
|
ccdeb9d9c4 | ||
|
|
67e67e054f | ||
|
|
edd98dd3b7 | ||
|
|
3e6cd5660c | ||
|
|
cee37a32b2 | ||
|
|
8da91d1c89 | ||
|
|
00cc00ead8 | ||
|
|
70b97790be | ||
|
|
1cfc967eb8 | ||
|
|
9a50a04400 | ||
|
|
231ff19ca2 | ||
|
|
de08c735a6 | ||
|
|
3395ebd96e | ||
|
|
71504325d3 | ||
|
|
7f068cfbcc | ||
|
|
9e6c2c1e64 | ||
|
|
8d0f023fa9 | ||
|
|
2ad980abf4 | ||
|
|
3ef76ff29d | ||
|
|
844de19561 | ||
|
|
343aa35db1 | ||
|
|
c3e4f920b4 | ||
|
|
4785344c9c | ||
|
|
9b3251f28f | ||
|
|
45f3250eec | ||
|
|
51307eaf07 | ||
|
|
42ae738f67 | ||
|
|
00ef9d3784 | ||
|
|
f3989f6092 | ||
|
|
dbec741ef0 | ||
|
|
06e7667d0e | ||
|
|
1ef1fe67ec | ||
|
|
ee191dbe81 | ||
|
|
ad9eeeb287 | ||
|
|
6b5b9a687e | ||
|
|
58e1e570fa | ||
|
|
ec93b6daf3 | ||
|
|
4d4778ec1c | ||
|
|
77c457121e | ||
|
|
5ebdc9af1b | ||
|
|
f6a7da4ac3 | ||
|
|
1d09ac89a1 | ||
|
|
127e307f89 | ||
|
|
21ad1c1c90 | ||
|
|
349734e38d | ||
|
|
2222cab9ea | ||
|
|
c2f8c4e9f4 | ||
|
|
72b95db12f | ||
|
|
37ee6bf2c3 | ||
|
|
8b1e397211 | ||
|
|
85e687c74a | ||
|
|
9ee855ec57 | ||
|
|
4b78e2ab09 | ||
|
|
85e2fabc9f | ||
|
|
a8d5ad37b8 | ||
|
|
32e4a3a4d7 | ||
|
|
f443555728 | ||
|
|
ff4ca9959c | ||
|
|
5b25915d7e | ||
|
|
c0564edebe | ||
|
|
c936c68c84 | ||
|
|
41760f8a09 | ||
|
|
440c7acd8f | ||
|
|
0cc3b50228 | ||
|
|
8532876ad8 | ||
|
|
44d92675eb | ||
|
|
a421eba31f | ||
|
|
40006808a3 | ||
|
|
ba58184349 | ||
|
|
14df5c9492 | ||
|
|
cb85a7b96e | ||
|
|
3f12f1140f | ||
|
|
c22cd2e953 | ||
|
|
ebd485b1a0 | ||
|
|
457c9fdb87 | ||
|
|
6eeaf46ac1 | ||
|
|
aaec8abf58 | ||
|
|
cbd7d0d543 | ||
|
|
fabdbfef9c | ||
|
|
8b314e2d04 | ||
|
|
963009737f | ||
|
|
e953092949 | ||
|
|
28ff364c3a | ||
|
|
981e2f742d | ||
|
|
401f94ca31 | ||
|
|
865e225bde | ||
|
|
4502b1b263 | ||
|
|
2845e2c006 | ||
|
|
d8a7a63959 | ||
|
|
caf2749d5b | ||
|
|
9ba27cfa0a | ||
|
|
157a16cefa | ||
|
|
37d83e075e | ||
|
|
523b40a129 | ||
|
|
0dd822264a | ||
|
|
3308dc5e48 | ||
|
|
fc2ff624ac | ||
|
|
b897880378 | ||
|
|
99e5340c54 | ||
|
|
ec49b56874 | ||
|
|
4ed4c73d6b | ||
|
|
523dabc1ee | ||
|
|
3741f387e9 | ||
|
|
2c885edefa | ||
|
|
f6f4c8f5ee | ||
|
|
1875700220 | ||
|
|
207d94b0e7 | ||
|
|
481c84e118 | ||
|
|
6dd3606773 | ||
|
|
35850caff6 | ||
|
|
e8b6fb7937 | ||
|
|
be022be381 | ||
|
|
1e832b1438 | ||
|
|
c31663d745 | ||
|
|
35d89e820f | ||
|
|
486b1c4d9d | ||
|
|
b2cddec3d7 | ||
|
|
920239f272 | ||
|
|
99bcb90353 | ||
|
|
b519267d05 | ||
|
|
529eb4ff2a | ||
|
|
c6f68c9df8 | ||
|
|
af63e6eccc | ||
|
|
87b211709e | ||
|
|
67975ed33a | ||
|
|
7561a6aaf0 | ||
|
|
3ea33a0616 | ||
|
|
e8ef6d3c16 | ||
|
|
e52cc38dfd | ||
|
|
3bdcbc7292 | ||
|
|
a0434bbdb4 | ||
|
|
d5f661c91d | ||
|
|
8ecaad948b | ||
|
|
aa4e0d823e | ||
|
|
0e051644a9 | ||
|
|
40d14c0756 | ||
|
|
af65666561 | ||
|
|
2ae1f81d84 | ||
|
|
d363a0968e | ||
|
|
bce030ddb5 | ||
|
|
f4af6e389e | ||
|
|
b315b22f7b | ||
|
|
c9e149fd5c | ||
|
|
bacdc004be | ||
|
|
ab5972d447 | ||
|
|
767b66f407 | ||
|
|
830ab4ce20 | ||
|
|
3f73e2c892 | ||
|
|
1822ffe870 | ||
|
|
7e2165f394 | ||
|
|
8e5f38c0f0 | ||
|
|
1388e99674 | ||
|
|
f56d1dc8fc | ||
|
|
9be310041b | ||
|
|
0fbcdd77c8 | ||
|
|
9bce050385 | ||
|
|
3f92ad4190 | ||
|
|
54ee302a06 | ||
|
|
44fa06ae36 | ||
|
|
856f97f449 | ||
|
|
fe7a3f0c2b | ||
|
|
c30ca0d5b6 | ||
|
|
a8a6cbdd1c | ||
|
|
e4257f432e | ||
|
|
2c793083f4 | ||
|
|
e150798baf | ||
|
|
33a6cc66ab | ||
|
|
52d0ec4cd8 | ||
|
|
397279d46e | ||
|
|
30ca89424c | ||
|
|
d909048a85 | ||
|
|
888c6dd9e7 | ||
|
|
b5dd189067 | ||
|
|
54e6e4ac32 | ||
|
|
e8af41de8a | ||
|
|
d6c30ed25e | ||
|
|
fb9849e1e3 | ||
|
|
72a1453ac5 | ||
|
|
6d67b8b283 | ||
|
|
74a75679d9 | ||
|
|
92e3046733 | ||
|
|
65c13f1ae7 | ||
|
|
b00a7cf40d | ||
|
|
13d378f2ce | ||
|
|
a6597a9958 | ||
|
|
692989c277 | ||
|
|
2fde03b4a0 | ||
|
|
056c8f8279 | ||
|
|
c2ec477d93 | ||
|
|
20982d5c6a | ||
|
|
64ae9aa3c3 | ||
|
|
72af589398 | ||
|
|
b3d320433f | ||
|
|
91a1d20e2d | ||
|
|
87716e7cd0 | ||
|
|
8976551f0d | ||
|
|
f1d6767685 | ||
|
|
d62cab9a06 | ||
|
|
d5dfba2509 | ||
|
|
1924500250 | ||
|
|
cfc57e14c7 | ||
|
|
15b5eb30ed | ||
|
|
3e9e1d993d | ||
|
|
44c747837a | ||
|
|
4985a7a444 | ||
|
|
10d571f236 | ||
|
|
956d3bfac6 | ||
|
|
73488657cb | ||
|
|
efebc62fb7 | ||
|
|
75f38f16dd | ||
|
|
0440a3f105 | ||
|
|
ee0484a98c | ||
|
|
7e0e675db4 | ||
|
|
84458f12f6 | ||
|
|
793063070b | ||
|
|
030d1d5b1c | ||
|
|
7e6316d4aa | ||
|
|
a75321a64c | ||
|
|
7508e4fd2d | ||
|
|
cac0a6a29d | ||
|
|
b395dc1be6 | ||
|
|
4288091f63 | ||
|
|
526eb3ff82 | ||
|
|
b952bd2649 | ||
|
|
cf57320b9f | ||
|
|
4fb714fb46 | ||
|
|
c1391b9f94 | ||
|
|
9275e93364 | ||
|
|
b3a824ae3c | ||
|
|
ab30453dee | ||
|
|
c56d0c159b | ||
|
|
0bf857bc91 | ||
|
|
f7a921039c | ||
|
|
8ddae8cde3 | ||
|
|
29ca89c414 | ||
|
|
4bada5a84d | ||
|
|
3de8790714 | ||
|
|
b035c604b0 | ||
|
|
e9e644a119 | ||
|
|
f5d9939cda | ||
|
|
838531d3e4 | ||
|
|
0eb2e6f9ee | ||
|
|
c20df79a38 | ||
|
|
fc55fd7a81 | ||
|
|
f3d4e210d8 | ||
|
|
28ebe1c97a | ||
|
|
2b7378ac77 | ||
|
|
ddcc60a085 | ||
|
|
8465f1f2f4 | ||
|
|
7ab45487dd | ||
|
|
cecbd5b021 | ||
|
|
4000e26303 | ||
|
|
e032d338f2 | ||
|
|
8bebe86a47 | ||
|
|
ab2e7499f8 | ||
|
|
daf77b8452 | ||
|
|
03a6e853c0 | ||
|
|
837bc98a1d | ||
|
|
842a1b7fe7 | ||
|
|
03ffe4d595 | ||
|
|
ae2a084fae | ||
|
|
a941ae7632 | ||
|
|
2c665fb1dd | ||
|
|
98a90a3bb2 | ||
|
|
7c8d333980 | ||
|
|
497fb4a19c | ||
|
|
5860481bc4 | ||
|
|
a52cf4d2b4 | ||
|
|
e70c52a3af | ||
|
|
de1768d3ba | ||
|
|
702238f004 | ||
|
|
fa5f6e76c9 | ||
|
|
f828cd2897 | ||
|
|
326c1e0a7e | ||
|
|
3f1c4b9add | ||
|
|
0b28e72b66 | ||
|
|
94dfb211af | ||
|
|
b560c5cef1 | ||
|
|
4ae986967c | ||
|
|
89ecc00b79 | ||
|
|
018a2d2e50 | ||
|
|
cfcc87a953 | ||
|
|
c3951e505d | ||
|
|
abb7b79701 | ||
|
|
936650001f | ||
|
|
37fba28ac3 | ||
|
|
4ba562d2dd | ||
|
|
799364de87 | ||
|
|
4719cba19a | ||
|
|
526777c9b4 | ||
|
|
f17b392470 | ||
|
|
63c8c01f40 | ||
|
|
4788fb179a | ||
|
|
6c384eb9c6 | ||
|
|
2a6e9b20df | ||
|
|
f3c6b1334b | ||
|
|
9890ceb939 | ||
|
|
7b027e7536 | ||
|
|
db2aa57d73 | ||
|
|
b8ec97c0ef | ||
|
|
2c1b693da4 | ||
|
|
547be54ee8 | ||
|
|
b4a53aef47 | ||
|
|
439bc5dbbe | ||
|
|
c95bd345ea | ||
|
|
0792a7953d | ||
|
|
6cda3de3a4 | ||
|
|
041d6ad902 | ||
|
|
e6995174c1 | ||
|
|
d28e912214 | ||
|
|
ba74cee6f7 | ||
|
|
2a417c47ac | ||
|
|
8dcbd29edd | ||
|
|
34621166d5 | ||
|
|
e3dd362c94 | ||
|
|
305fe73d83 | ||
|
|
e3aaee00c8 | ||
|
|
b1979b70a8 | ||
|
|
73ed30d7e5 | ||
|
|
ad7eaa80f9 | ||
|
|
966d71c02a | ||
|
|
f97874093e | ||
|
|
e63ab0dd65 | ||
|
|
964220ac94 | ||
|
|
2f58e69997 | ||
|
|
ec69a4a810 | ||
|
|
ad09c138b9 | ||
|
|
e00eb50db3 | ||
|
|
7d9ad3effd | ||
|
|
c3a710ee14 | ||
|
|
29364f3a9b | ||
|
|
530db0ad73 | ||
|
|
424bfecd0b | ||
|
|
eb1c651c00 | ||
|
|
e357fc723d | ||
|
|
807e2c27f0 | ||
|
|
ad279eacdc | ||
|
|
052b052832 | ||
|
|
6951872776 | ||
|
|
bb7b0213a8 | ||
|
|
6c36318bd8 | ||
|
|
930f81a17b | ||
|
|
9aff64e017 | ||
|
|
3838d6739c | ||
|
|
60deb6773a | ||
|
|
0271c20d8f | ||
|
|
52e97b9b6b | ||
|
|
2ac49fea58 | ||
|
|
f01f2ec9ee | ||
|
|
980886498c | ||
|
|
e743d251a7 | ||
|
|
788badd221 | ||
|
|
fbdedd9a06 | ||
|
|
5916153157 | ||
|
|
b46012e483 | ||
|
|
42683dadfb | ||
|
|
65cb1a1b77 | ||
|
|
50a77dc138 | ||
|
|
557ac63094 | ||
|
|
131c384361 | ||
|
|
e2598f5094 | ||
|
|
78b2aeea55 | ||
|
|
082d2fa19a | ||
|
|
7c7c7567d5 | ||
|
|
625f2208c4 | ||
|
|
5f1fab0e7c | ||
|
|
c07461e6f3 | ||
|
|
8b80a0a269 | ||
|
|
a47181e471 | ||
|
|
5beb6167c8 | ||
|
|
917f39ec12 | ||
|
|
a2fdfce02a | ||
|
|
91b16b8682 | ||
|
|
183fc8e01a | ||
|
|
9fba811764 | ||
|
|
db408b9e62 | ||
|
|
2eecc1a2e4 | ||
|
|
c76528ca1f | ||
|
|
bb47f2226f | ||
|
|
c6ab92bc50 | ||
|
|
4c1a6f0ee0 | ||
|
|
361d43b969 | ||
|
|
2e81f1900d | ||
|
|
2030b28083 | ||
|
|
e84e39940b | ||
|
|
e8905f6d20 | ||
|
|
316352be94 | ||
|
|
f8b30af6dc | ||
|
|
039a4b070e | ||
|
|
c368c6aeea | ||
|
|
0c647bc566 | ||
|
|
e30f65118d | ||
|
|
1bd2d7a659 | ||
|
|
65d53fd4b1 | ||
|
|
b5349202e9 | ||
|
|
1b8cc8b625 | ||
|
|
8501b0b768 | ||
|
|
fe7eb18104 | ||
|
|
8c75ed39d5 | ||
|
|
fdb9fa301e | ||
|
|
871d442b8e | ||
|
|
dbad5eeec6 | ||
|
|
4b4252210b | ||
|
|
6582554926 | ||
|
|
649ce520c4 | ||
|
|
667e841d3e | ||
|
|
63e1ef25af | ||
|
|
229d18f4d2 | ||
|
|
4a1a7f9685 | ||
|
|
86c149ae8e | ||
|
|
05f0b4f590 | ||
|
|
d4eda9d10b | ||
|
|
d7953aed74 | ||
|
|
2ab1650d4d | ||
|
|
79aa83ee39 | ||
|
|
c4ebe4b078 | ||
|
|
1a89f70015 | ||
|
|
62474a30e8 | ||
|
|
9a10e80ab7 | ||
|
|
9b538a8672 | ||
|
|
95af417923 | ||
|
|
fff576cf98 | ||
|
|
1575f0504c | ||
|
|
edf4c3f627 | ||
|
|
d40a6b7f73 | ||
|
|
3a22018edd | ||
|
|
fe54c216a3 | ||
|
|
cb6584de46 | ||
|
|
7e068e1094 | ||
|
|
d3187dbc17 | ||
|
|
dc2f26f7b5 | ||
|
|
553db8def1 | ||
|
|
ab63a47173 | ||
|
|
e658c6c73b | ||
|
|
1e0e553304 | ||
|
|
07b7d28937 | ||
|
|
6ee7fbcfff | ||
|
|
5f3a0473f1 | ||
|
|
2eda75a8ee | ||
|
|
e1f098b9b7 | ||
|
|
e5e13479d0 | ||
|
|
7bc3ca9e40 | ||
|
|
4d8b71d412 | ||
|
|
b484672961 | ||
|
|
a1ee10b438 | ||
|
|
dccce34d84 | ||
|
|
f5945d7c03 | ||
|
|
5fcf923c19 | ||
|
|
0c7efa0cfd | ||
|
|
d5853d9c47 | ||
|
|
d9118c04bf | ||
|
|
91e65ac0ce | ||
|
|
1ac4fb45d2 | ||
|
|
07b8bdfbf1 | ||
|
|
0f22067242 | ||
|
|
d7f8b97541 | ||
|
|
611e00c862 | ||
|
|
c8ebb2a0dc | ||
|
|
88e083a9d0 | ||
|
|
1c8507b32a | ||
|
|
23f31c6bff | ||
|
|
ff48ae192b | ||
|
|
a2fe2f9fb1 | ||
|
|
01ca2b5df6 | ||
|
|
368f7adfc6 | ||
|
|
68731ac74d | ||
|
|
0508823075 | ||
|
|
2ac14d1145 | ||
|
|
2371d771cc | ||
|
|
9a638dbf4e | ||
|
|
dc2aeac21f | ||
|
|
f842849bec | ||
|
|
dcf73970d2 | ||
|
|
e761924dc2 | ||
|
|
cdc3df3790 | ||
|
|
a3d3719481 | ||
|
|
11e5327770 | ||
|
|
87cce88f48 | ||
|
|
ff6d4cec6b | ||
|
|
6ef658a9f9 | ||
|
|
8b8be343a7 | ||
|
|
89c00611c2 | ||
|
|
9572cfc782 | ||
|
|
4a55646a02 | ||
|
|
209af68611 | ||
|
|
f4f9695978 | ||
|
|
5fcc380bd9 | ||
|
|
aa76003e28 | ||
|
|
fac548e430 | ||
|
|
9bd3453592 | ||
|
|
b34efde2f3 | ||
|
|
7aa46ab5fc | ||
|
|
bf35105af6 | ||
|
|
3429e82e45 | ||
|
|
815ae4164a | ||
|
|
13e1d0362d | ||
|
|
db31f6966d | ||
|
|
2b20cd66af | ||
|
|
39e09c289d | ||
|
|
069a38a06c | ||
|
|
3183935bd7 | ||
|
|
060637b4d4 | ||
|
|
fa92cd92fa | ||
|
|
89591e4246 | ||
|
|
802d2440b4 | ||
|
|
e9135fa7c5 | ||
|
|
ef3e075ad6 | ||
|
|
149e198ce8 | ||
|
|
1d76ba5ebe | ||
|
|
a1635eea25 | ||
|
|
36113509f2 | ||
|
|
ba95d9862c | ||
|
|
ef55992ab0 | ||
|
|
e3f913f567 | ||
|
|
1b8f2543ac | ||
|
|
65107d24a2 | ||
|
|
36eb071998 | ||
|
|
9b33ce3409 | ||
|
|
926c89cb20 | ||
|
|
5ba2a17576 | ||
|
|
266419217e | ||
|
|
be4bdfec93 | ||
|
|
7ff142d93f | ||
|
|
4a42c4e142 | ||
|
|
66a4b89822 | ||
|
|
d7b333be97 | ||
|
|
4d6a42a622 | ||
|
|
b0bdc04c30 | ||
|
|
67a219ffc2 | ||
|
|
7226365397 | ||
|
|
0fc295d958 | ||
|
|
3e50f94d76 | ||
|
|
eb5b1b627f | ||
|
|
0c1ff1d3fd | ||
|
|
aea7610c76 | ||
|
|
775fbba6e0 | ||
|
|
5ee8a17b4e | ||
|
|
81be54b229 | ||
|
|
5e8659dcbc | ||
|
|
2338294b39 | ||
|
|
afc4eaab8b | ||
|
|
e92c4f6561 | ||
|
|
15fa2283e7 | ||
|
|
5907422d65 | ||
|
|
f178805252 | ||
|
|
a55b0c4bcc | ||
|
|
224222f09f | ||
|
|
7aab45e060 | ||
|
|
bcd64c7e72 | ||
|
|
c124f24354 | ||
|
|
c7e4e6d0ee | ||
|
|
88abbf58ce | ||
|
|
71f838389b | ||
|
|
0533bd2e7c | ||
|
|
6af83d86ff | ||
|
|
e2e1b65da6 | ||
|
|
817d1508bc | ||
|
|
f8af4f5c8d | ||
|
|
a4be4d78b9 | ||
|
|
00c1de0c56 | ||
|
|
190e7eb104 | ||
|
|
061862a0e2 | ||
|
|
c72b2ad766 | ||
|
|
80783a7bb9 | ||
|
|
ed77d2d977 | ||
|
|
abccd3e367 | ||
|
|
0f4fd33ddd | ||
|
|
e258f0f044 | ||
|
|
a6b9471548 | ||
|
|
3059373e06 | ||
|
|
0b4527146e | ||
|
|
6745b12427 | ||
|
|
f59978ed3d | ||
|
|
3ab6028e80 | ||
|
|
892eaff46d | ||
|
|
8e291a1706 | ||
|
|
aee321f62b | ||
|
|
ed32da04d7 | ||
|
|
8ae3949072 | ||
|
|
273819aaae | ||
|
|
4cd6b01494 | ||
|
|
dd59b16a17 | ||
|
|
bac7acaa7c | ||
|
|
3c90728a29 | ||
|
|
34c5a9eaa9 | ||
|
|
f522aafb7f | ||
|
|
fd0673e457 | ||
|
|
00b1e130b3 | ||
|
|
53cadb4df6 | ||
|
|
db7eb9a7ce | ||
|
|
cdd106b930 | ||
|
|
404cae7d40 | ||
|
|
682d05512f | ||
|
|
5cd8803998 | ||
|
|
26f314904a | ||
|
|
da82153a8d | ||
|
|
4bd68e4d9e | ||
|
|
1b10a3a1b2 | ||
|
|
ad9a289951 | ||
|
|
a517f6f55b | ||
|
|
789e65b9d2 | ||
|
|
42d5c35020 | ||
|
|
ab95eaa356 | ||
|
|
7fc01c6e9b | ||
|
|
df15a2f6ef | ||
|
|
ef806456e4 | ||
|
|
bd6ab8c665 | ||
|
|
d2bae07687 | ||
|
|
9c09094583 | ||
|
|
7e4ab31488 | ||
|
|
32d50bda94 | ||
|
|
740b4a95f4 | ||
|
|
c37469b5ba | ||
|
|
c782f8c68d | ||
|
|
7d6e318f87 | ||
|
|
58159383c4 | ||
|
|
5c680c6587 | ||
|
|
39a2446716 | ||
|
|
9c903c4716 | ||
|
|
5e4f3bbb0b | ||
|
|
c84fc83222 | ||
|
|
8044b55335 | ||
|
|
846960ae3d | ||
|
|
049a61bcfc | ||
|
|
cda6db6ccf | ||
|
|
73a1787eb8 | ||
|
|
0e8d937a3f | ||
|
|
3282e86a60 | ||
|
|
540abfa05e | ||
|
|
d87f87e25b | ||
|
|
d01f91ecec | ||
|
|
0170860ef2 | ||
|
|
2d9ee9dbe9 | ||
|
|
3ed728790b | ||
|
|
3e071c4c95 | ||
|
|
c127062b40 | ||
|
|
1d9b27387b | ||
|
|
4f46360aa4 | ||
|
|
2287d2afde | ||
|
|
d6a9e38575 | ||
|
|
c81e1477ae | ||
|
|
11c019d6c5 | ||
|
|
a182c1315c | ||
|
|
98c6dfa537 | ||
|
|
0e08dd6055 | ||
|
|
41900e9d0f | ||
|
|
c1bde2a4ef | ||
|
|
6b0c486861 | ||
|
|
44ceaf085b | ||
|
|
c03e31ecf5 | ||
|
|
6915ba2100 | ||
|
|
50f53e7071 | ||
|
|
40fba1bb4c | ||
|
|
bdda762deb | ||
|
|
da5492694b | ||
|
|
a5d48a775b | ||
|
|
78f2785595 | ||
|
|
fc1723f131 | ||
|
|
ed5b0bfeb3 | ||
|
|
4b01f0f50a | ||
|
|
0139f6780c | ||
|
|
86ba270926 | ||
|
|
c146585cdb | ||
|
|
5fa7844ad7 | ||
|
|
84c9b574f9 | ||
|
|
272e13dd90 | ||
|
|
18d00e36b9 | ||
|
|
17550fee9e | ||
|
|
995f5c3614 | ||
|
|
9b53a306e3 | ||
|
|
0016346dfb | ||
|
|
f38ad65254 | ||
|
|
774892c6d7 | ||
|
|
897d4d5f17 | ||
|
|
8a281cd1f4 | ||
|
|
e8863b233b | ||
|
|
8fed0b53c4 | ||
|
|
00debb6399 | ||
|
|
0a0a10d8b3 | ||
|
|
13035561cd | ||
|
|
9be704a934 | ||
|
|
f7b4e29609 | ||
|
|
d6c5df9a0a | ||
|
|
8662162f45 | ||
|
|
57584d6f34 | ||
|
|
268a10f917 | ||
|
|
5346cc422d | ||
|
|
26f7c46856 | ||
|
|
90af046c5c | ||
|
|
961ed31901 | ||
|
|
85e7357973 | ||
|
|
f98fa85b44 | ||
|
|
ddcaf3dccd | ||
|
|
56296cad82 | ||
|
|
95b41dd7f1 | ||
|
|
bf82353f45 | ||
|
|
0308febc23 | ||
|
|
7b4a4c2219 | ||
|
|
3ddd4d47d0 | ||
|
|
ca6a0358de | ||
|
|
0026b12615 | ||
|
|
4300236681 | ||
|
|
ec238a2c39 | ||
|
|
b6165aee0c | ||
|
|
f4bc03d7c0 | ||
|
|
3c5e12e2a4 | ||
|
|
c89229db97 | ||
|
|
d3820f4782 | ||
|
|
e896db1180 | ||
|
|
96acb8a74e | ||
|
|
687a13bbe5 | ||
|
|
fe8122e514 | ||
|
|
876d4f450a | ||
|
|
f52320be86 | ||
|
|
a43ae86b6c | ||
|
|
496cb801e1 | ||
|
|
abd517091f | ||
|
|
b8b04514bc | ||
|
|
0e5d72cc57 | ||
|
|
60f9e85c16 | ||
|
|
b016a3e7d8 | ||
|
|
a0d56541cf | ||
|
|
226215f36d | ||
|
|
338c2c873c | ||
|
|
4b0f5eb6a8 | ||
|
|
75176dae70 | ||
|
|
12fd2b4160 | ||
|
|
f2555422b9 | ||
|
|
27f169bb91 | ||
|
|
b16c985ed2 | ||
|
|
35a770e871 | ||
|
|
b09f62a1c3 | ||
|
|
5833508a17 | ||
|
|
d73055c5b1 | ||
|
|
7e3a272b29 | ||
|
|
661663c98a | ||
|
|
721003c552 | ||
|
|
36f1cca1b1 | ||
|
|
d3e1beb26c | ||
|
|
c264ae6021 | ||
|
|
8cd882c4bd | ||
|
|
90fe5e4a7e | ||
|
|
a90a58f7a1 | ||
|
|
b2d81a7cac | ||
|
|
77a8b7fdeb | ||
|
|
7fa5e95c1f | ||
|
|
191d620707 | ||
|
|
53504a38d2 | ||
|
|
5c42419b02 | ||
|
|
aecbe0f333 | ||
|
|
a30a902db5 | ||
|
|
f3b4a26f32 | ||
|
|
dc3c6bf62a | ||
|
|
3203862167 | ||
|
|
06853d94f0 | ||
|
|
cc2f4aafd7 | ||
|
|
356ea6ea34 | ||
|
|
4764fc1ee7 | ||
|
|
90ef94d3b3 | ||
|
|
6c2969d22d | ||
|
|
0ad1b0782b | ||
|
|
d7acd146fb | ||
|
|
c5465aed60 | ||
|
|
a95605a867 | ||
|
|
848058f05b | ||
|
|
a4f1c9d67e | ||
|
|
665341c9b1 | ||
|
|
fae0e6c52c | ||
|
|
1b4a79f03c | ||
|
|
640192ac3d | ||
|
|
205c36e393 | ||
|
|
d13ee79c41 | ||
|
|
bde468ff8d | ||
|
|
e292d1ed21 | ||
|
|
de8d77274a | ||
|
|
a5b7675e42 | ||
|
|
9823de3cc6 | ||
|
|
c32e9cfe86 | ||
|
|
1d17ca1fa3 | ||
|
|
bfe3328129 | ||
|
|
e0b38bd7a2 | ||
|
|
153338c20f | ||
|
|
3495a7dc37 | ||
|
|
042d4d55d9 | ||
|
|
5af08e0719 | ||
|
|
33d3ecbccc | ||
|
|
69cb72f842 | ||
|
|
69ac5153d4 | ||
|
|
16b6951648 | ||
|
|
231c36f8d3 | ||
|
|
1e4541b982 | ||
|
|
7be3b484ad | ||
|
|
9617b69c8a | ||
|
|
1d94b9111c | ||
|
|
2d6cd6951a | ||
|
|
310e3c32e5 | ||
|
|
37786593a0 | ||
|
|
819a5782b6 | ||
|
|
c0a84473a4 | ||
|
|
591a8ecc16 | ||
|
|
c405d8c06c | ||
|
|
138be0fd73 | ||
|
|
25a2e15ec5 | ||
|
|
62cc8a4b8d | ||
|
|
f895d4cbb3 | ||
|
|
ed5d656fa8 | ||
|
|
c43a561916 | ||
|
|
b93cc0f431 | ||
|
|
4c566d484a | ||
|
|
06e34d4607 | ||
|
|
45936f8fbd | ||
|
|
ec98445abf | ||
|
|
b07aafa5f5 | ||
|
|
b727d3f98a | ||
|
|
2f6fb37d72 | ||
|
|
35c76ad47d | ||
|
|
c07fb71186 | ||
|
|
e899ae7d8a | ||
|
|
6f97ec4990 | ||
|
|
07c1db351a | ||
|
|
31102af54b | ||
|
|
5d78c1edd3 | ||
|
|
170c685882 | ||
|
|
609f75acec | ||
|
|
eabe18714f | ||
|
|
ceaba36c7f | ||
|
|
d94e8bad8b | ||
|
|
8a367ef6bf | ||
|
|
400a5a90bf | ||
|
|
2f370e946d | ||
|
|
751b3b50ac | ||
|
|
d78d0764aa | ||
|
|
699c121606 | ||
|
|
dde615f482 | ||
|
|
325fad1d92 | ||
|
|
f815157dd9 | ||
|
|
b8195a17e5 | ||
|
|
349ef7edc6 | ||
|
|
5881c0d6d4 | ||
|
|
8dd771d217 | ||
|
|
32853ecbc5 | ||
|
|
7fc3edf8a7 | ||
|
|
01e6503672 | ||
|
|
9c259737d3 | ||
|
|
b8e1fe60c5 | ||
|
|
ddfb7eb548 | ||
|
|
6910be3224 | ||
|
|
a534356fe1 | ||
|
|
c89b0e1235 | ||
|
|
f6a152848a | ||
|
|
3592ecb23c | ||
|
|
516acc030b | ||
|
|
5b038135de | ||
|
|
d9dbf48828 | ||
|
|
2e95e5602d | ||
|
|
87a654cf6b | ||
|
|
27c6c5d7a7 | ||
|
|
c09e131653 | ||
|
|
ea82f86662 | ||
|
|
a8edc57740 | ||
|
|
52e591ce60 | ||
|
|
079303091f | ||
|
|
4a80059b1b | ||
|
|
bf76258cdc | ||
|
|
c64da4ff71 | ||
|
|
98efd352ae | ||
|
|
80ccec6530 | ||
|
|
c81baaabda | ||
|
|
55b74c95e2 | ||
|
|
16057e76b0 | ||
|
|
adbc38a978 | ||
|
|
83a4d4d8ed | ||
|
|
197f45a3be | ||
|
|
04c1782e52 | ||
|
|
d15253415a | ||
|
|
c4120a265b | ||
|
|
618a42adf5 | ||
|
|
a9d54b9e92 | ||
|
|
79e51dd607 | ||
|
|
ff6dbff0b6 | ||
|
|
99841332e2 | ||
|
|
7407469791 | ||
|
|
43615becf0 | ||
|
|
9ee6e6f342 | ||
|
|
d7286e9829 | ||
|
|
bcf2bc0aa5 | ||
|
|
68765214b3 | ||
|
|
5c67dc3af1 | ||
|
|
c0960c0f49 | ||
|
|
90c3a5650c | ||
|
|
a3254696c8 | ||
|
|
2719fdd12a | ||
|
|
3a1be084f9 | ||
|
|
43b63ccae8 | ||
|
|
cc1b21e47f | ||
|
|
55801700de | ||
|
|
1fba99ed85 | ||
|
|
d3f6f6629b | ||
|
|
e555a36c6a | ||
|
|
ea095e30c1 | ||
|
|
c549481513 | ||
|
|
8797145678 | ||
|
|
a53720e278 | ||
|
|
41f5d61f24 | ||
|
|
02609184be | ||
|
|
1fc3413a46 | ||
|
|
eb2b739d6a | ||
|
|
a10403d697 | ||
|
|
8e3a048fec | ||
|
|
9f2ab97fbc | ||
|
|
38c9d7dca1 | ||
|
|
67aab04c66 | ||
|
|
7355ca48c5 | ||
|
|
affb5fc1d0 | ||
|
|
4a5f05c136 | ||
|
|
acc2b63dfb | ||
|
|
344d4a1d68 | ||
|
|
a0c37f5d07 | ||
|
|
103adcdf2d | ||
|
|
d61dea6fe6 | ||
|
|
e363dac249 | ||
|
|
250b244ab4 | ||
|
|
d1ed3a4cef | ||
|
|
e85742635f | ||
|
|
87b299aa3f | ||
|
|
0e58870634 | ||
|
|
42847baaf7 | ||
|
|
6032d784ee | ||
|
|
7bff8df10e | ||
|
|
addc946d13 | ||
|
|
bffdbec2c5 | ||
|
|
353a5c2046 | ||
|
|
00c7f7a16c | ||
|
|
82e65975b2 | ||
|
|
639a6fd2f3 | ||
|
|
db4aa6f916 | ||
|
|
cb96f4f596 | ||
|
|
5b910f1f05 | ||
|
|
af6304c641 | ||
|
|
b90eeabd74 | ||
|
|
f7d2f3e54d | ||
|
|
3fe3b6328b | ||
|
|
8144ddb3da | ||
|
|
9336f2b84b | ||
|
|
af37785bca | ||
|
|
594248f415 | ||
|
|
8227a5ba1b | ||
|
|
fdb8dadcae | ||
|
|
0f9a796617 | ||
|
|
c6e8671b2a | ||
|
|
b84a920067 | ||
|
|
6cd5309d91 | ||
|
|
664ee07540 | ||
|
|
51c465bddc | ||
|
|
e0fbc112c7 | ||
|
|
76ecbb3d8e | ||
|
|
2451b19d13 | ||
|
|
5c7d9e27b1 | ||
|
|
c93e77b68b | ||
|
|
c415827ac2 | ||
|
|
4e0550b995 | ||
|
|
f54a49157b | ||
|
|
dd56750612 | ||
|
|
8bc73a2bfd | ||
|
|
be366a31ab | ||
|
|
c75920a071 | ||
|
|
8daba53808 | ||
|
|
d2940bd4c3 | ||
|
|
76a9b11678 | ||
|
|
fa80bbb587 | ||
|
|
434eb4fd49 | ||
|
|
19f46439ae | ||
|
|
e258ca61b4 | ||
|
|
e5fe50d3ce | ||
|
|
14a115d488 | ||
|
|
5996ee0e5f | ||
|
|
a4ebd069e5 | ||
|
|
04504d8218 | ||
|
|
42d335deb8 | ||
|
|
ad0c2b4db3 | ||
|
|
ff389dc52f | ||
|
|
9b18875a42 | ||
|
|
881c7978f1 | ||
|
|
a7fda70053 | ||
|
|
de64f5f007 | ||
|
|
8595237505 | ||
|
|
62258df92f | ||
|
|
b34e906396 | ||
|
|
71038381aa | ||
|
|
277fc6254e | ||
|
|
992b531180 | ||
|
|
84a0ba9bf5 | ||
|
|
4a5d6f7c71 | ||
|
|
1b3c8b8e94 | ||
|
|
d4aba772cb | ||
|
|
4c97eeb32a | ||
|
|
c9505488a1 | ||
|
|
530382db05 | ||
|
|
208089e58e | ||
|
|
e5fdb5b0fd | ||
|
|
5332f6e215 | ||
|
|
5d87f5d24a | ||
|
|
791d7b125f | ||
|
|
72733e34c4 | ||
|
|
b8d2b1a576 | ||
|
|
7fe4021f95 | ||
|
|
11285655c4 | ||
|
|
244687303b | ||
|
|
5e2c4f7e35 | ||
|
|
a8026d3846 | ||
|
|
45bccd36b0 | ||
|
|
404c126fc3 | ||
|
|
88027552dd | ||
|
|
ca8bd09d56 | ||
|
|
39ed8a7d26 | ||
|
|
2df7f7efe5 | ||
|
|
0560079c41 | ||
|
|
0de154194d | ||
|
|
5c583fe89b | ||
|
|
cf63cbf153 | ||
|
|
b1c291e2bb | ||
|
|
934d728946 | ||
|
|
f037b2fd56 | ||
|
|
d60cbed691 | ||
|
|
6aafe37752 | ||
|
|
d555b68469 | ||
|
|
9baa5c33da | ||
|
|
fdf4a68646 | ||
|
|
adc9e1526b | ||
|
|
b9af1d2b16 | ||
|
|
2d52e3b40a | ||
|
|
6039f8a126 | ||
|
|
50262a44ce | ||
|
|
839b2ae7cf | ||
|
|
6a8e743d57 | ||
|
|
a797051921 | ||
|
|
d7d9d96d6c | ||
|
|
26f1246a89 | ||
|
|
6581da9b57 | ||
|
|
900bb01486 | ||
|
|
2ad6a37192 | ||
|
|
e5dd7f0934 | ||
|
|
b6673838e8 | ||
|
|
1823906215 | ||
|
|
5185d69f13 | ||
|
|
4dffa496ac | ||
|
|
ce984b2c71 | ||
|
|
c47febf221 | ||
|
|
76c37c5493 | ||
|
|
2aa84b8891 | ||
|
|
9177bdae5e | ||
|
|
a30e5e40ee | ||
|
|
99e1d33bd1 | ||
|
|
b2f6fc3b9a | ||
|
|
51f88fd04a | ||
|
|
916fdc2a37 | ||
|
|
863d9c237e | ||
|
|
7e1543f5d8 | ||
|
|
d701eb32d7 | ||
|
|
9baae77533 | ||
|
|
e932722292 | ||
|
|
bbea6bbf7e | ||
|
|
4891ee29c5 | ||
|
|
bac8a427f3 | ||
|
|
14ab1063a7 | ||
|
|
a77364bbaa | ||
|
|
19b4ed3c96 | ||
|
|
3d4acbaea0 | ||
|
|
414b8be8b6 | ||
|
|
90a0fd342f | ||
|
|
8d56d2f655 | ||
|
|
8408f3e8ed | ||
|
|
b8ccfe9b65 | ||
|
|
e3c6903199 | ||
|
|
5f6e95b592 | ||
|
|
a2e9cc5530 | ||
|
|
ea225df22e | ||
|
|
d4848e558b | ||
|
|
1a6a95fb2a | ||
|
|
c6fd056aa6 | ||
|
|
abdcb40f4c | ||
|
|
4ae6b9787a | ||
|
|
bba567cee9 | ||
|
|
ba6af23cb6 | ||
|
|
f805d17930 | ||
|
|
90965fbc84 | ||
|
|
c172e8e997 | ||
|
|
9bbeb75361 | ||
|
|
6ccd32c601 | ||
|
|
3b5a5412bb | ||
|
|
44bb53df1e | ||
|
|
8453915e02 | ||
|
|
44587c2443 | ||
|
|
8f7b22b652 | ||
|
|
027944c64e | ||
|
|
bec51f6c05 | ||
|
|
66967500bb | ||
|
|
167b4f0e25 | ||
|
|
167154178b | ||
|
|
674e3d3c90 | ||
|
|
114ce9ff4d | ||
|
|
e13b35ecb0 | ||
|
|
377af75730 | ||
|
|
86e0f31a7e | ||
|
|
8f837f1093 | ||
|
|
162e1235a8 | ||
|
|
c09ed74a16 | ||
|
|
65f3528cad | ||
|
|
44262d8fd8 | ||
|
|
95a9938d3a | ||
|
|
f69f07b028 | ||
|
|
8d766088e6 | ||
|
|
87654ec0b7 | ||
|
|
51d9e05de7 | ||
|
|
8068cc75f8 | ||
|
|
acb28bf914 | ||
|
|
97338de578 | ||
|
|
5200b7a95d | ||
|
|
64e6c4afbb | ||
|
|
39db113cc9 | ||
|
|
45bd5ca4b9 | ||
|
|
c13c3dadbf | ||
|
|
8636bff46d | ||
|
|
43809a454e | ||
|
|
5c48600bb3 | ||
|
|
de6559f2ab | ||
|
|
5bcc9d8b77 | ||
|
|
5eab4c7ab4 | ||
|
|
f656e192bf | ||
|
|
ee5ecae7c0 | ||
|
|
58bb2048ac | ||
|
|
ac8a3155d6 | ||
|
|
ace14e8d36 | ||
|
|
2a76a08a9e | ||
|
|
16309d6b68 | ||
|
|
62bd0e3d9d | ||
|
|
a9c68ea270 | ||
|
|
ac58749bd3 | ||
|
|
79cbd2ab1b | ||
|
|
5eaaf307e1 | ||
|
|
18330c2362 | ||
|
|
4c46490e53 | ||
|
|
5c1416d99b | ||
|
|
0525b48baa | ||
|
|
1f4f9cde8e | ||
|
|
cad37009e1 | ||
|
|
e2b3053b2b | ||
|
|
e47bd33689 | ||
|
|
6b878bea01 | ||
|
|
ca46510fd3 | ||
|
|
6efb52e545 | ||
|
|
d84a799ec0 | ||
|
|
c8fab51372 | ||
|
|
58d77ca4e7 | ||
|
|
0269096229 | ||
|
|
70a6d4b1b4 | ||
|
|
b1d5f7c0bd | ||
|
|
066c6cce02 | ||
|
|
bd65f81e54 | ||
|
|
ba9620aea7 | ||
|
|
45c3b20041 | ||
|
|
6cfc012e9d | ||
|
|
17a80d43c8 | ||
|
|
c11696f6b1 | ||
|
|
5775174ec2 | ||
|
|
ba631e7928 | ||
|
|
db3834733a | ||
|
|
d6182becbe | ||
|
|
323a5cb7e7 | ||
|
|
3f40fbc0a8 | ||
|
|
742feaf40f | ||
|
|
907d3dd348 | ||
|
|
7df9e9c664 | ||
|
|
b795fbe244 | ||
|
|
82ed7bd285 | ||
|
|
1c04e1314d | ||
|
|
bef7ed0ccc | ||
|
|
be23fe1353 | ||
|
|
2073fa7139 | ||
|
|
e60a44cbab | ||
|
|
075e385969 | ||
|
|
aa083b795d | ||
|
|
91708bb031 | ||
|
|
82dfec5b10 | ||
|
|
1e82bf9d98 | ||
|
|
0a83db5512 | ||
|
|
bd4fa85507 | ||
|
|
234c0a0469 | ||
|
|
0f4ae1b5b0 | ||
|
|
2b96f9f569 | ||
|
|
f2036572b6 | ||
|
|
bea64569c1 | ||
|
|
e83c5f429c | ||
|
|
ed0d23d560 | ||
|
|
4ae45a6c8d | ||
|
|
6b83c1c3f3 | ||
|
|
db5276f8e6 | ||
|
|
77fb9f3465 | ||
|
|
0e827b6598 | ||
|
|
daaadfb260 | ||
|
|
c636f821ae | ||
|
|
af338cc505 | ||
|
|
97000c6e6d | ||
|
|
fb5dfe3396 | ||
|
|
a56eb48195 | ||
|
|
d77b33ded7 | ||
|
|
9ad2e726fc | ||
|
|
6aa306c584 | ||
|
|
44dce748b6 | ||
|
|
d489690efe | ||
|
|
3f76220055 | ||
|
|
90725fe3d5 | ||
|
|
53413c728e | ||
|
|
b127a3643f | ||
|
|
a93a907c7e | ||
|
|
03e2796ca4 | ||
|
|
051f185ce3 | ||
|
|
6f75114695 | ||
|
|
3baccba0ac | ||
|
|
578ff09e17 | ||
|
|
0d5ffb000e | ||
|
|
431a10fc50 | ||
|
|
8b993b557d | ||
|
|
60fdfc5f14 | ||
|
|
13e5b567f5 | ||
|
|
46e35a2345 | ||
|
|
7bcdc5cc7c | ||
|
|
4b426f7e1e | ||
|
|
fcb62a0fa5 | ||
|
|
eb40fe3451 | ||
|
|
b32c79e371 | ||
|
|
e442ecedab | ||
|
|
3f8d6021ac | ||
|
|
7ac6194c22 | ||
|
|
619436c58f | ||
|
|
1cc6b97227 | ||
|
|
7eee69d821 | ||
|
|
65636802f7 | ||
|
|
c988ce28fe | ||
|
|
cb2f952143 | ||
|
|
7d734bff65 | ||
|
|
970e466ab3 | ||
|
|
5d2d3002ef | ||
|
|
bb30996f7c | ||
|
|
3f8184034f | ||
|
|
f7cb2f87a0 | ||
|
|
9dbe7284d2 | ||
|
|
b8e8454b3f | ||
|
|
bbcfd63aba | ||
|
|
6209d49520 | ||
|
|
c3a8b96a60 | ||
|
|
c9ca63dc1e | ||
|
|
ed06f90fb3 | ||
|
|
f09170b574 | ||
|
|
1e9e703b96 | ||
|
|
74d2741729 | ||
|
|
e5611aab07 | ||
|
|
4e9ad23864 | ||
|
|
3e309805ae | ||
|
|
488a40211a | ||
|
|
903178eeeb | ||
|
|
6e4c9d5243 | ||
|
|
459363e17b | ||
|
|
ffe585387b | ||
|
|
0cec0770e2 | ||
|
|
2d2f66f9c5 | ||
|
|
d0e06f74e2 | ||
|
|
4b6c6ce98f | ||
|
|
5df04c8a13 | ||
|
|
3d8bca7814 | ||
|
|
3eb11c10d0 | ||
|
|
bd65c4db87 | ||
|
|
b367790d9b | ||
|
|
435154ce93 | ||
|
|
fb3f6456cf | ||
|
|
f2603a4e50 | ||
|
|
eb161116f0 | ||
|
|
c229a67312 | ||
|
|
aa5fc5855d | ||
|
|
db98d2ce25 | ||
|
|
274d9b413f | ||
|
|
8192cf147e | ||
|
|
d32e4f25cf | ||
|
|
a4d34235bc | ||
|
|
d085f73a2a | ||
|
|
ab9250e714 | ||
|
|
e5283b6126 | ||
|
|
d63e44ae29 | ||
|
|
17e5077507 | ||
|
|
b1079187e4 | ||
|
|
ae8f772ef2 | ||
|
|
468a8b4c38 | ||
|
|
cb32f9c64e | ||
|
|
907afc9425 | ||
|
|
7f7d1e30f3 | ||
|
|
568d6f819f | ||
|
|
251c4c2ba9 | ||
|
|
a6c346b9e1 | ||
|
|
e307040f10 | ||
|
|
7d67e54628 | ||
|
|
295ca27e98 | ||
|
|
7b20db942a | ||
|
|
ee2ccb5cb6 | ||
|
|
8b49346657 | ||
|
|
e49116a4c5 | ||
|
|
517ffd00c6 | ||
|
|
4157788310 | ||
|
|
32bbbbad61 | ||
|
|
c6a52d611c | ||
|
|
363636f5eb | ||
|
|
957d44918d | ||
|
|
eca97d8559 | ||
|
|
09819d9b47 | ||
|
|
e3b03eaccb | ||
|
|
311ad0ce26 | ||
|
|
5fa7d46ddf | ||
|
|
d994019f3f | ||
|
|
6de9541f0a | ||
|
|
85099017fd |
@@ -1 +1,3 @@
|
||||
iTerm
|
||||
iTerm2
|
||||
psuedo
|
||||
@@ -1,6 +1,6 @@
|
||||
[codespell]
|
||||
# Ref: https://github.com/codespell-project/codespell#using-a-config-file
|
||||
skip = .git*,vendor,*-lock.yaml,*.lock,.codespellrc,*test.ts,*.jsonl
|
||||
skip = .git*,vendor,*-lock.yaml,*.lock,.codespellrc,*test.ts,*.jsonl,frame*.txt
|
||||
check-hidden = true
|
||||
ignore-regex = ^\s*"image/\S+": ".*|\b(afterAll)\b
|
||||
ignore-words-list = ratatui,ser
|
||||
ignore-words-list = ratatui,ser,iTerm,iterm2,iterm
|
||||
|
||||
22
.github/ISSUE_TEMPLATE/2-bug-report.yml
vendored
22
.github/ISSUE_TEMPLATE/2-bug-report.yml
vendored
@@ -20,6 +20,14 @@ body:
|
||||
attributes:
|
||||
label: What version of Codex is running?
|
||||
description: Copy the output of `codex --version`
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: plan
|
||||
attributes:
|
||||
label: What subscription do you have?
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: model
|
||||
attributes:
|
||||
@@ -32,11 +40,18 @@ body:
|
||||
description: |
|
||||
For MacOS and Linux: copy the output of `uname -mprs`
|
||||
For Windows: copy the output of `"$([Environment]::OSVersion | ForEach-Object VersionString) $(if ([Environment]::Is64BitOperatingSystem) { "x64" } else { "x86" })"` in the PowerShell console
|
||||
- type: textarea
|
||||
id: actual
|
||||
attributes:
|
||||
label: What issue are you seeing?
|
||||
description: Please include the full error messages and prompts with PII redacted. If possible, please provide text instead of a screenshot.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: steps
|
||||
attributes:
|
||||
label: What steps can reproduce the bug?
|
||||
description: Explain the bug and provide a code snippet that can reproduce it.
|
||||
description: Explain the bug and provide a code snippet that can reproduce it. Please include session id, token limit usage, context window usage if applicable.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
@@ -44,11 +59,6 @@ body:
|
||||
attributes:
|
||||
label: What is the expected behavior?
|
||||
description: If possible, please provide text instead of a screenshot.
|
||||
- type: textarea
|
||||
id: actual
|
||||
attributes:
|
||||
label: What do you see instead?
|
||||
description: If possible, please provide text instead of a screenshot.
|
||||
- type: textarea
|
||||
id: notes
|
||||
attributes:
|
||||
|
||||
6
.github/ISSUE_TEMPLATE/4-feature-request.yml
vendored
6
.github/ISSUE_TEMPLATE/4-feature-request.yml
vendored
@@ -2,7 +2,6 @@ name: 🎁 Feature Request
|
||||
description: Propose a new feature for Codex
|
||||
labels:
|
||||
- enhancement
|
||||
- needs triage
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
@@ -19,11 +18,6 @@ body:
|
||||
label: What feature would you like to see?
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: author
|
||||
attributes:
|
||||
label: Are you interested in implementing this feature?
|
||||
description: Please wait for acknowledgement before implementing or opening a PR.
|
||||
- type: textarea
|
||||
id: notes
|
||||
attributes:
|
||||
|
||||
62
.github/ISSUE_TEMPLATE/5-vs-code-extension.yml
vendored
Normal file
62
.github/ISSUE_TEMPLATE/5-vs-code-extension.yml
vendored
Normal file
@@ -0,0 +1,62 @@
|
||||
name: 🧑💻 VS Code Extension
|
||||
description: Report an issue with the VS Code extension
|
||||
labels:
|
||||
- extension
|
||||
- needs triage
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Before submitting a new issue, please search for existing issues to see if your issue has already been reported.
|
||||
If it has, please add a 👍 reaction (no need to leave a comment) to the existing issue instead of creating a new one.
|
||||
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: What version of the VS Code extension are you using?
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: plan
|
||||
attributes:
|
||||
label: What subscription do you have?
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: ide
|
||||
attributes:
|
||||
label: Which IDE are you using?
|
||||
description: Like `VS Code`, `Cursor`, `Windsurf`, etc.
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: platform
|
||||
attributes:
|
||||
label: What platform is your computer?
|
||||
description: |
|
||||
For MacOS and Linux: copy the output of `uname -mprs`
|
||||
For Windows: copy the output of `"$([Environment]::OSVersion | ForEach-Object VersionString) $(if ([Environment]::Is64BitOperatingSystem) { "x64" } else { "x86" })"` in the PowerShell console
|
||||
- type: textarea
|
||||
id: actual
|
||||
attributes:
|
||||
label: What issue are you seeing?
|
||||
description: Please include the full error messages and prompts with PII redacted. If possible, please provide text instead of a screenshot.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: steps
|
||||
attributes:
|
||||
label: What steps can reproduce the bug?
|
||||
description: Explain the bug and provide a code snippet that can reproduce it. Please include session id, token limit usage, context window usage if applicable.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: expected
|
||||
attributes:
|
||||
label: What is the expected behavior?
|
||||
description: If possible, please provide text instead of a screenshot.
|
||||
- type: textarea
|
||||
id: notes
|
||||
attributes:
|
||||
label: Additional information
|
||||
description: Is there anything else you think we should know?
|
||||
8
.github/actions/codex/.prettierrc.toml
vendored
8
.github/actions/codex/.prettierrc.toml
vendored
@@ -1,8 +0,0 @@
|
||||
printWidth = 80
|
||||
quoteProps = "consistent"
|
||||
semi = true
|
||||
tabWidth = 2
|
||||
trailingComma = "all"
|
||||
|
||||
# Preserve existing behavior for markdown/text wrapping.
|
||||
proseWrap = "preserve"
|
||||
140
.github/actions/codex/README.md
vendored
140
.github/actions/codex/README.md
vendored
@@ -1,140 +0,0 @@
|
||||
# openai/codex-action
|
||||
|
||||
`openai/codex-action` is a GitHub Action that facilitates the use of [Codex](https://github.com/openai/codex) on GitHub issues and pull requests. Using the action, associate **labels** to run Codex with the appropriate prompt for the given context. Codex will respond by posting comments or creating PRs, whichever you specify!
|
||||
|
||||
Here is a sample workflow that uses `openai/codex-action`:
|
||||
|
||||
```yaml
|
||||
name: Codex
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [opened, labeled]
|
||||
pull_request:
|
||||
branches: [main]
|
||||
types: [labeled]
|
||||
|
||||
jobs:
|
||||
codex:
|
||||
if: ... # optional, but can be effective in conserving CI resources
|
||||
runs-on: ubuntu-latest
|
||||
# TODO(mbolin): Need to verify if/when `write` is necessary.
|
||||
permissions:
|
||||
contents: write
|
||||
issues: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
# By default, Codex runs network disabled using --full-auto, so perform
|
||||
# any setup that requires network (such as installing dependencies)
|
||||
# before openai/codex-action.
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Run Codex
|
||||
uses: openai/codex-action@latest
|
||||
with:
|
||||
openai_api_key: ${{ secrets.CODEX_OPENAI_API_KEY }}
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
```
|
||||
|
||||
See sample usage in [`codex.yml`](../../workflows/codex.yml).
|
||||
|
||||
## Triggering the Action
|
||||
|
||||
Using the sample workflow above, we have:
|
||||
|
||||
```yaml
|
||||
on:
|
||||
issues:
|
||||
types: [opened, labeled]
|
||||
pull_request:
|
||||
branches: [main]
|
||||
types: [labeled]
|
||||
```
|
||||
|
||||
which means our workflow will be triggered when any of the following events occur:
|
||||
|
||||
- a label is added to an issue
|
||||
- a label is added to a pull request against the `main` branch
|
||||
|
||||
### Label-Based Triggers
|
||||
|
||||
To define a GitHub label that should trigger Codex, create a file named `.github/codex/labels/LABEL-NAME.md` in your repository where `LABEL-NAME` is the name of the label. The content of the file is the prompt template to use when the label is added (see more on [Prompt Template Variables](#prompt-template-variables) below).
|
||||
|
||||
For example, if the file `.github/codex/labels/codex-review.md` exists, then:
|
||||
|
||||
- Adding the `codex-review` label will trigger the workflow containing the `openai/codex-action` GitHub Action.
|
||||
- When `openai/codex-action` starts, it will replace the `codex-review` label with `codex-review-in-progress`.
|
||||
- When `openai/codex-action` is finished, it will replace the `codex-review-in-progress` label with `codex-review-completed`.
|
||||
|
||||
If Codex sees that either `codex-review-in-progress` or `codex-review-completed` is already present, it will not perform the action.
|
||||
|
||||
As determined by the [default config](./src/default-label-config.ts), Codex will act on the following labels by default:
|
||||
|
||||
- Adding the `codex-review` label to a pull request will have Codex review the PR and add it to the PR as a comment.
|
||||
- Adding the `codex-triage` label to an issue will have Codex investigate the issue and report its findings as a comment.
|
||||
- Adding the `codex-issue-fix` label to an issue will have Codex attempt to fix the issue and create a PR wit the fix, if any.
|
||||
|
||||
## Action Inputs
|
||||
|
||||
The `openai/codex-action` GitHub Action takes the following inputs
|
||||
|
||||
### `openai_api_key` (required)
|
||||
|
||||
Set your `OPENAI_API_KEY` as a [repository secret](https://docs.github.com/en/actions/security-for-github-actions/security-guides/using-secrets-in-github-actions). See **Secrets and varaibles** then **Actions** in the settings for your GitHub repo.
|
||||
|
||||
Note that the secret name does not have to be `OPENAI_API_KEY`. For example, you might want to name it `CODEX_OPENAI_API_KEY` and then configure it on `openai/codex-action` as follows:
|
||||
|
||||
```yaml
|
||||
openai_api_key: ${{ secrets.CODEX_OPENAI_API_KEY }}
|
||||
```
|
||||
|
||||
### `github_token` (required)
|
||||
|
||||
This is required so that Codex can post a comment or create a PR. Set this value on the action as follows:
|
||||
|
||||
```yaml
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
```
|
||||
|
||||
### `codex_args`
|
||||
|
||||
A whitespace-delimited list of arguments to pass to Codex. Defaults to `--full-auto`, but if you want to override the default model to use `o3`:
|
||||
|
||||
```yaml
|
||||
codex_args: "--full-auto --model o3"
|
||||
```
|
||||
|
||||
For more complex configurations, use the `codex_home` input.
|
||||
|
||||
### `codex_home`
|
||||
|
||||
If set, the value to use for the `$CODEX_HOME` environment variable when running Codex. As explained [in the docs](https://github.com/openai/codex/tree/main/codex-rs#readme), this folder can contain the `config.toml` to configure Codex, custom instructions, and log files.
|
||||
|
||||
This should be a relative path within your repo.
|
||||
|
||||
## Prompt Template Variables
|
||||
|
||||
As shown above, `"prompt"` and `"promptPath"` are used to define prompt templates that will be populated and passed to Codex in response to certain events. All template variables are of the form `{CODEX_ACTION_...}` and the supported values are defined below.
|
||||
|
||||
### `CODEX_ACTION_ISSUE_TITLE`
|
||||
|
||||
If the action was triggered on a GitHub issue, this is the issue title.
|
||||
|
||||
Specifically it is read as the `.issue.title` from the `$GITHUB_EVENT_PATH`.
|
||||
|
||||
### `CODEX_ACTION_ISSUE_BODY`
|
||||
|
||||
If the action was triggered on a GitHub issue, this is the issue body.
|
||||
|
||||
Specifically it is read as the `.issue.body` from the `$GITHUB_EVENT_PATH`.
|
||||
|
||||
### `CODEX_ACTION_GITHUB_EVENT_PATH`
|
||||
|
||||
The value of the `$GITHUB_EVENT_PATH` environment variable, which is the path to the file that contains the JSON payload for the event that triggered the workflow. Codex can use `jq` to read only the fields of interest from this file.
|
||||
|
||||
### `CODEX_ACTION_PR_DIFF`
|
||||
|
||||
If the action was triggered on a pull request, this is the diff between the base and head commits of the PR. It is the output from `git diff`.
|
||||
|
||||
Note that the content of the diff could be quite large, so is generally safer to point Codex at `CODEX_ACTION_GITHUB_EVENT_PATH` and let it decide how it wants to explore the change.
|
||||
125
.github/actions/codex/action.yml
vendored
125
.github/actions/codex/action.yml
vendored
@@ -1,125 +0,0 @@
|
||||
name: "Codex [reusable action]"
|
||||
description: "A reusable action that runs a Codex model."
|
||||
|
||||
inputs:
|
||||
openai_api_key:
|
||||
description: "The value to use as the OPENAI_API_KEY environment variable when running Codex."
|
||||
required: true
|
||||
trigger_phrase:
|
||||
description: "Text to trigger Codex from a PR/issue body or comment."
|
||||
required: false
|
||||
default: ""
|
||||
github_token:
|
||||
description: "Token so Codex can comment on the PR or issue."
|
||||
required: true
|
||||
codex_args:
|
||||
description: "A whitespace-delimited list of arguments to pass to Codex. Due to limitations in YAML, arguments with spaces are not supported. For more complex configurations, use the `codex_home` input."
|
||||
required: false
|
||||
default: "--config hide_agent_reasoning=true --full-auto"
|
||||
codex_home:
|
||||
description: "Value to use as the CODEX_HOME environment variable when running Codex."
|
||||
required: false
|
||||
codex_release_tag:
|
||||
description: "The release tag of the Codex model to run, e.g., 'rust-v0.3.0'. Defaults to the latest release."
|
||||
required: false
|
||||
default: ""
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
# Do this in Bash so we do not even bother to install Bun if the sender does
|
||||
# not have write access to the repo.
|
||||
- name: Verify user has write access to the repo.
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
PERMISSION=$(gh api \
|
||||
"/repos/${GITHUB_REPOSITORY}/collaborators/${{ github.event.sender.login }}/permission" \
|
||||
| jq -r '.permission')
|
||||
|
||||
if [[ "$PERMISSION" != "admin" && "$PERMISSION" != "write" ]]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Download Codex
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Determine OS/arch and corresponding Codex artifact name.
|
||||
uname_s=$(uname -s)
|
||||
uname_m=$(uname -m)
|
||||
|
||||
case "$uname_s" in
|
||||
Linux*) os="linux" ;;
|
||||
Darwin*) os="apple-darwin" ;;
|
||||
*) echo "Unsupported operating system: $uname_s"; exit 1 ;;
|
||||
esac
|
||||
|
||||
case "$uname_m" in
|
||||
x86_64*) arch="x86_64" ;;
|
||||
arm64*|aarch64*) arch="aarch64" ;;
|
||||
*) echo "Unsupported architecture: $uname_m"; exit 1 ;;
|
||||
esac
|
||||
|
||||
# linux builds differentiate between musl and gnu.
|
||||
if [[ "$os" == "linux" ]]; then
|
||||
if [[ "$arch" == "x86_64" ]]; then
|
||||
triple="${arch}-unknown-linux-musl"
|
||||
else
|
||||
# Only other supported linux build is aarch64 gnu.
|
||||
triple="${arch}-unknown-linux-gnu"
|
||||
fi
|
||||
else
|
||||
# macOS
|
||||
triple="${arch}-apple-darwin"
|
||||
fi
|
||||
|
||||
# Note that if we start baking version numbers into the artifact name,
|
||||
# we will need to update this action.yml file to match.
|
||||
artifact="codex-${triple}.tar.gz"
|
||||
|
||||
TAG_ARG="${{ inputs.codex_release_tag }}"
|
||||
# The usage is `gh release download [<tag>] [flags]`, so if TAG_ARG
|
||||
# is empty, we do not pass it so we can default to the latest release.
|
||||
gh release download ${TAG_ARG:+$TAG_ARG} --repo openai/codex \
|
||||
--pattern "$artifact" --output - \
|
||||
| tar xzO > /usr/local/bin/codex
|
||||
chmod +x /usr/local/bin/codex
|
||||
|
||||
# Display Codex version to confirm binary integrity.
|
||||
codex --version
|
||||
|
||||
- name: Install Bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
bun-version: 1.2.11
|
||||
|
||||
- name: Install dependencies
|
||||
shell: bash
|
||||
run: |
|
||||
cd ${{ github.action_path }}
|
||||
bun install --production
|
||||
|
||||
- name: Run Codex
|
||||
shell: bash
|
||||
run: bun run ${{ github.action_path }}/src/main.ts
|
||||
# Process args plus environment variables often have a max of 128 KiB,
|
||||
# so we should fit within that limit?
|
||||
env:
|
||||
INPUT_CODEX_ARGS: ${{ inputs.codex_args || '' }}
|
||||
INPUT_CODEX_HOME: ${{ inputs.codex_home || ''}}
|
||||
INPUT_TRIGGER_PHRASE: ${{ inputs.trigger_phrase || '' }}
|
||||
OPENAI_API_KEY: ${{ inputs.openai_api_key }}
|
||||
GITHUB_TOKEN: ${{ inputs.github_token }}
|
||||
GITHUB_EVENT_ACTION: ${{ github.event.action || '' }}
|
||||
GITHUB_EVENT_LABEL_NAME: ${{ github.event.label.name || '' }}
|
||||
GITHUB_EVENT_ISSUE_NUMBER: ${{ github.event.issue.number || '' }}
|
||||
GITHUB_EVENT_ISSUE_BODY: ${{ github.event.issue.body || '' }}
|
||||
GITHUB_EVENT_REVIEW_BODY: ${{ github.event.review.body || '' }}
|
||||
GITHUB_EVENT_COMMENT_BODY: ${{ github.event.comment.body || '' }}
|
||||
91
.github/actions/codex/bun.lock
vendored
91
.github/actions/codex/bun.lock
vendored
@@ -1,91 +0,0 @@
|
||||
{
|
||||
"lockfileVersion": 1,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "codex-action",
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.11.1",
|
||||
"@actions/github": "^6.0.1",
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/bun": "^1.2.20",
|
||||
"@types/node": "^24.3.0",
|
||||
"prettier": "^3.6.2",
|
||||
"typescript": "^5.9.2",
|
||||
},
|
||||
},
|
||||
},
|
||||
"packages": {
|
||||
"@actions/core": ["@actions/core@1.11.1", "", { "dependencies": { "@actions/exec": "^1.1.1", "@actions/http-client": "^2.0.1" } }, "sha512-hXJCSrkwfA46Vd9Z3q4cpEpHB1rL5NG04+/rbqW9d3+CSvtB1tYe8UTpAlixa1vj0m/ULglfEK2UKxMGxCxv5A=="],
|
||||
|
||||
"@actions/exec": ["@actions/exec@1.1.1", "", { "dependencies": { "@actions/io": "^1.0.1" } }, "sha512-+sCcHHbVdk93a0XT19ECtO/gIXoxvdsgQLzb2fE2/5sIZmWQuluYyjPQtrtTHdU1YzTZ7bAPN4sITq2xi1679w=="],
|
||||
|
||||
"@actions/github": ["@actions/github@6.0.1", "", { "dependencies": { "@actions/http-client": "^2.2.0", "@octokit/core": "^5.0.1", "@octokit/plugin-paginate-rest": "^9.2.2", "@octokit/plugin-rest-endpoint-methods": "^10.4.0", "@octokit/request": "^8.4.1", "@octokit/request-error": "^5.1.1", "undici": "^5.28.5" } }, "sha512-xbZVcaqD4XnQAe35qSQqskb3SqIAfRyLBrHMd/8TuL7hJSz2QtbDwnNM8zWx4zO5l2fnGtseNE3MbEvD7BxVMw=="],
|
||||
|
||||
"@actions/http-client": ["@actions/http-client@2.2.3", "", { "dependencies": { "tunnel": "^0.0.6", "undici": "^5.25.4" } }, "sha512-mx8hyJi/hjFvbPokCg4uRd4ZX78t+YyRPtnKWwIl+RzNaVuFpQHfmlGVfsKEJN8LwTCvL+DfVgAM04XaHkm6bA=="],
|
||||
|
||||
"@actions/io": ["@actions/io@1.1.3", "", {}, "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q=="],
|
||||
|
||||
"@fastify/busboy": ["@fastify/busboy@2.1.1", "", {}, "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA=="],
|
||||
|
||||
"@octokit/auth-token": ["@octokit/auth-token@4.0.0", "", {}, "sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA=="],
|
||||
|
||||
"@octokit/core": ["@octokit/core@5.2.1", "", { "dependencies": { "@octokit/auth-token": "^4.0.0", "@octokit/graphql": "^7.1.0", "@octokit/request": "^8.4.1", "@octokit/request-error": "^5.1.1", "@octokit/types": "^13.0.0", "before-after-hook": "^2.2.0", "universal-user-agent": "^6.0.0" } }, "sha512-dKYCMuPO1bmrpuogcjQ8z7ICCH3FP6WmxpwC03yjzGfZhj9fTJg6+bS1+UAplekbN2C+M61UNllGOOoAfGCrdQ=="],
|
||||
|
||||
"@octokit/endpoint": ["@octokit/endpoint@9.0.6", "", { "dependencies": { "@octokit/types": "^13.1.0", "universal-user-agent": "^6.0.0" } }, "sha512-H1fNTMA57HbkFESSt3Y9+FBICv+0jFceJFPWDePYlR/iMGrwM5ph+Dd4XRQs+8X+PUFURLQgX9ChPfhJ/1uNQw=="],
|
||||
|
||||
"@octokit/graphql": ["@octokit/graphql@7.1.1", "", { "dependencies": { "@octokit/request": "^8.4.1", "@octokit/types": "^13.0.0", "universal-user-agent": "^6.0.0" } }, "sha512-3mkDltSfcDUoa176nlGoA32RGjeWjl3K7F/BwHwRMJUW/IteSa4bnSV8p2ThNkcIcZU2umkZWxwETSSCJf2Q7g=="],
|
||||
|
||||
"@octokit/openapi-types": ["@octokit/openapi-types@24.2.0", "", {}, "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg=="],
|
||||
|
||||
"@octokit/plugin-paginate-rest": ["@octokit/plugin-paginate-rest@9.2.2", "", { "dependencies": { "@octokit/types": "^12.6.0" }, "peerDependencies": { "@octokit/core": "5" } }, "sha512-u3KYkGF7GcZnSD/3UP0S7K5XUFT2FkOQdcfXZGZQPGv3lm4F2Xbf71lvjldr8c1H3nNbF+33cLEkWYbokGWqiQ=="],
|
||||
|
||||
"@octokit/plugin-rest-endpoint-methods": ["@octokit/plugin-rest-endpoint-methods@10.4.1", "", { "dependencies": { "@octokit/types": "^12.6.0" }, "peerDependencies": { "@octokit/core": "5" } }, "sha512-xV1b+ceKV9KytQe3zCVqjg+8GTGfDYwaT1ATU5isiUyVtlVAO3HNdzpS4sr4GBx4hxQ46s7ITtZrAsxG22+rVg=="],
|
||||
|
||||
"@octokit/request": ["@octokit/request@8.4.1", "", { "dependencies": { "@octokit/endpoint": "^9.0.6", "@octokit/request-error": "^5.1.1", "@octokit/types": "^13.1.0", "universal-user-agent": "^6.0.0" } }, "sha512-qnB2+SY3hkCmBxZsR/MPCybNmbJe4KAlfWErXq+rBKkQJlbjdJeS85VI9r8UqeLYLvnAenU8Q1okM/0MBsAGXw=="],
|
||||
|
||||
"@octokit/request-error": ["@octokit/request-error@5.1.1", "", { "dependencies": { "@octokit/types": "^13.1.0", "deprecation": "^2.0.0", "once": "^1.4.0" } }, "sha512-v9iyEQJH6ZntoENr9/yXxjuezh4My67CBSu9r6Ve/05Iu5gNgnisNWOsoJHTP6k0Rr0+HQIpnH+kyammu90q/g=="],
|
||||
|
||||
"@octokit/types": ["@octokit/types@13.10.0", "", { "dependencies": { "@octokit/openapi-types": "^24.2.0" } }, "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA=="],
|
||||
|
||||
"@types/bun": ["@types/bun@1.2.20", "", { "dependencies": { "bun-types": "1.2.20" } }, "sha512-dX3RGzQ8+KgmMw7CsW4xT5ITBSCrSbfHc36SNT31EOUg/LA9JWq0VDdEXDRSe1InVWpd2yLUM1FUF/kEOyTzYA=="],
|
||||
|
||||
"@types/node": ["@types/node@24.3.0", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-aPTXCrfwnDLj4VvXrm+UUCQjNEvJgNA8s5F1cvwQU+3KNltTOkBm1j30uNLyqqPNe7gE3KFzImYoZEfLhp4Yow=="],
|
||||
|
||||
"@types/react": ["@types/react@19.1.8", "", { "dependencies": { "csstype": "^3.0.2" } }, "sha512-AwAfQ2Wa5bCx9WP8nZL2uMZWod7J7/JSplxbTmBQ5ms6QpqNYm672H0Vu9ZVKVngQ+ii4R/byguVEUZQyeg44g=="],
|
||||
|
||||
"before-after-hook": ["before-after-hook@2.2.3", "", {}, "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ=="],
|
||||
|
||||
"bun-types": ["bun-types@1.2.20", "", { "dependencies": { "@types/node": "*" }, "peerDependencies": { "@types/react": "^19" } }, "sha512-pxTnQYOrKvdOwyiyd/7sMt9yFOenN004Y6O4lCcCUoKVej48FS5cvTw9geRaEcB9TsDZaJKAxPTVvi8tFsVuXA=="],
|
||||
|
||||
"csstype": ["csstype@3.1.3", "", {}, "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw=="],
|
||||
|
||||
"deprecation": ["deprecation@2.3.1", "", {}, "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ=="],
|
||||
|
||||
"once": ["once@1.4.0", "", { "dependencies": { "wrappy": "1" } }, "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w=="],
|
||||
|
||||
"prettier": ["prettier@3.6.2", "", { "bin": { "prettier": "bin/prettier.cjs" } }, "sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ=="],
|
||||
|
||||
"tunnel": ["tunnel@0.0.6", "", {}, "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg=="],
|
||||
|
||||
"typescript": ["typescript@5.9.2", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A=="],
|
||||
|
||||
"undici": ["undici@5.29.0", "", { "dependencies": { "@fastify/busboy": "^2.0.0" } }, "sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg=="],
|
||||
|
||||
"undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="],
|
||||
|
||||
"universal-user-agent": ["universal-user-agent@6.0.1", "", {}, "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ=="],
|
||||
|
||||
"wrappy": ["wrappy@1.0.2", "", {}, "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="],
|
||||
|
||||
"@octokit/plugin-paginate-rest/@octokit/types": ["@octokit/types@12.6.0", "", { "dependencies": { "@octokit/openapi-types": "^20.0.0" } }, "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw=="],
|
||||
|
||||
"@octokit/plugin-rest-endpoint-methods/@octokit/types": ["@octokit/types@12.6.0", "", { "dependencies": { "@octokit/openapi-types": "^20.0.0" } }, "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw=="],
|
||||
|
||||
"bun-types/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="],
|
||||
|
||||
"@octokit/plugin-paginate-rest/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@20.0.0", "", {}, "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA=="],
|
||||
|
||||
"@octokit/plugin-rest-endpoint-methods/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@20.0.0", "", {}, "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA=="],
|
||||
}
|
||||
}
|
||||
21
.github/actions/codex/package.json
vendored
21
.github/actions/codex/package.json
vendored
@@ -1,21 +0,0 @@
|
||||
{
|
||||
"name": "codex-action",
|
||||
"version": "0.0.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"format": "prettier --check src",
|
||||
"format:fix": "prettier --write src",
|
||||
"test": "bun test",
|
||||
"typecheck": "tsc"
|
||||
},
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.11.1",
|
||||
"@actions/github": "^6.0.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/bun": "^1.2.20",
|
||||
"@types/node": "^24.3.0",
|
||||
"prettier": "^3.6.2",
|
||||
"typescript": "^5.9.2"
|
||||
}
|
||||
}
|
||||
85
.github/actions/codex/src/add-reaction.ts
vendored
85
.github/actions/codex/src/add-reaction.ts
vendored
@@ -1,85 +0,0 @@
|
||||
import * as github from "@actions/github";
|
||||
import type { EnvContext } from "./env-context";
|
||||
|
||||
/**
|
||||
* Add an "eyes" reaction to the entity (issue, issue comment, or pull request
|
||||
* review comment) that triggered the current Codex invocation.
|
||||
*
|
||||
* The purpose is to provide immediate feedback to the user – similar to the
|
||||
* *-in-progress label flow – indicating that the bot has acknowledged the
|
||||
* request and is working on it.
|
||||
*
|
||||
* We attempt to add the reaction best suited for the current GitHub event:
|
||||
*
|
||||
* • issues → POST /repos/{owner}/{repo}/issues/{issue_number}/reactions
|
||||
* • issue_comment → POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions
|
||||
* • pull_request_review_comment → POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions
|
||||
*
|
||||
* If the specific target is unavailable (e.g. unexpected payload shape) we
|
||||
* silently skip instead of failing the whole action because the reaction is
|
||||
* merely cosmetic.
|
||||
*/
|
||||
export async function addEyesReaction(ctx: EnvContext): Promise<void> {
|
||||
const octokit = ctx.getOctokit();
|
||||
const { owner, repo } = github.context.repo;
|
||||
const eventName = github.context.eventName;
|
||||
|
||||
try {
|
||||
switch (eventName) {
|
||||
case "issue_comment": {
|
||||
const commentId = (github.context.payload as any)?.comment?.id;
|
||||
if (commentId) {
|
||||
await octokit.rest.reactions.createForIssueComment({
|
||||
owner,
|
||||
repo,
|
||||
comment_id: commentId,
|
||||
content: "eyes",
|
||||
});
|
||||
return;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case "pull_request_review_comment": {
|
||||
const commentId = (github.context.payload as any)?.comment?.id;
|
||||
if (commentId) {
|
||||
await octokit.rest.reactions.createForPullRequestReviewComment({
|
||||
owner,
|
||||
repo,
|
||||
comment_id: commentId,
|
||||
content: "eyes",
|
||||
});
|
||||
return;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case "issues": {
|
||||
const issueNumber = github.context.issue.number;
|
||||
if (issueNumber) {
|
||||
await octokit.rest.reactions.createForIssue({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: issueNumber,
|
||||
content: "eyes",
|
||||
});
|
||||
return;
|
||||
}
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
// Fallback: try to react to the issue/PR if we have a number.
|
||||
const issueNumber = github.context.issue.number;
|
||||
if (issueNumber) {
|
||||
await octokit.rest.reactions.createForIssue({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: issueNumber,
|
||||
content: "eyes",
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
// Do not fail the action if reaction creation fails – log and continue.
|
||||
console.warn(`Failed to add \"eyes\" reaction: ${error}`);
|
||||
}
|
||||
}
|
||||
53
.github/actions/codex/src/comment.ts
vendored
53
.github/actions/codex/src/comment.ts
vendored
@@ -1,53 +0,0 @@
|
||||
import type { EnvContext } from "./env-context";
|
||||
import { runCodex } from "./run-codex";
|
||||
import { postComment } from "./post-comment";
|
||||
import { addEyesReaction } from "./add-reaction";
|
||||
|
||||
/**
|
||||
* Handle `issue_comment` and `pull_request_review_comment` events once we know
|
||||
* the action is supported.
|
||||
*/
|
||||
export async function onComment(ctx: EnvContext): Promise<void> {
|
||||
const triggerPhrase = ctx.tryGet("INPUT_TRIGGER_PHRASE");
|
||||
if (!triggerPhrase) {
|
||||
console.warn("Empty trigger phrase: skipping.");
|
||||
return;
|
||||
}
|
||||
|
||||
// Attempt to get the body of the comment from the environment. Depending on
|
||||
// the event type either `GITHUB_EVENT_COMMENT_BODY` (issue & PR comments) or
|
||||
// `GITHUB_EVENT_REVIEW_BODY` (PR reviews) is set.
|
||||
const commentBody =
|
||||
ctx.tryGetNonEmpty("GITHUB_EVENT_COMMENT_BODY") ??
|
||||
ctx.tryGetNonEmpty("GITHUB_EVENT_REVIEW_BODY") ??
|
||||
ctx.tryGetNonEmpty("GITHUB_EVENT_ISSUE_BODY");
|
||||
|
||||
if (!commentBody) {
|
||||
console.warn("Comment body not found in environment: skipping.");
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if the trigger phrase is present.
|
||||
if (!commentBody.includes(triggerPhrase)) {
|
||||
console.log(
|
||||
`Trigger phrase '${triggerPhrase}' not found: nothing to do for this comment.`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Derive the prompt by removing the trigger phrase. Remove only the first
|
||||
// occurrence to keep any additional occurrences that might be meaningful.
|
||||
const prompt = commentBody.replace(triggerPhrase, "").trim();
|
||||
|
||||
if (prompt.length === 0) {
|
||||
console.warn("Prompt is empty after removing trigger phrase: skipping");
|
||||
return;
|
||||
}
|
||||
|
||||
// Provide immediate feedback that we are working on the request.
|
||||
await addEyesReaction(ctx);
|
||||
|
||||
// Run Codex and post the response as a new comment.
|
||||
const lastMessage = await runCodex(prompt, ctx);
|
||||
await postComment(lastMessage, ctx);
|
||||
}
|
||||
11
.github/actions/codex/src/config.ts
vendored
11
.github/actions/codex/src/config.ts
vendored
@@ -1,11 +0,0 @@
|
||||
import { readdirSync, statSync } from "fs";
|
||||
import * as path from "path";
|
||||
|
||||
export interface Config {
|
||||
labels: Record<string, LabelConfig>;
|
||||
}
|
||||
|
||||
export interface LabelConfig {
|
||||
/** Returns the prompt template. */
|
||||
getPromptTemplate(): string;
|
||||
}
|
||||
@@ -1,44 +0,0 @@
|
||||
import type { Config } from "./config";
|
||||
|
||||
export function getDefaultConfig(): Config {
|
||||
return {
|
||||
labels: {
|
||||
"codex-investigate-issue": {
|
||||
getPromptTemplate: () =>
|
||||
`
|
||||
Troubleshoot whether the reported issue is valid.
|
||||
|
||||
Provide a concise and respectful comment summarizing the findings.
|
||||
|
||||
### {CODEX_ACTION_ISSUE_TITLE}
|
||||
|
||||
{CODEX_ACTION_ISSUE_BODY}
|
||||
`.trim(),
|
||||
},
|
||||
"codex-code-review": {
|
||||
getPromptTemplate: () =>
|
||||
`
|
||||
Review this PR and respond with a very concise final message, formatted in Markdown.
|
||||
|
||||
There should be a summary of the changes (1-2 sentences) and a few bullet points if necessary.
|
||||
|
||||
Then provide the **review** (1-2 sentences plus bullet points, friendly tone).
|
||||
|
||||
{CODEX_ACTION_GITHUB_EVENT_PATH} contains the JSON that triggered this GitHub workflow. It contains the \`base\` and \`head\` refs that define this PR. Both refs are available locally.
|
||||
`.trim(),
|
||||
},
|
||||
"codex-attempt-fix": {
|
||||
getPromptTemplate: () =>
|
||||
`
|
||||
Attempt to solve the reported issue.
|
||||
|
||||
If a code change is required, create a new branch, commit the fix, and open a pull-request that resolves the problem.
|
||||
|
||||
### {CODEX_ACTION_ISSUE_TITLE}
|
||||
|
||||
{CODEX_ACTION_ISSUE_BODY}
|
||||
`.trim(),
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
116
.github/actions/codex/src/env-context.ts
vendored
116
.github/actions/codex/src/env-context.ts
vendored
@@ -1,116 +0,0 @@
|
||||
/*
|
||||
* Centralised access to environment variables used by the Codex GitHub
|
||||
* Action.
|
||||
*
|
||||
* To enable proper unit-testing we avoid reading from `process.env` at module
|
||||
* initialisation time. Instead a `EnvContext` object is created (usually from
|
||||
* the real `process.env`) and passed around explicitly or – where that is not
|
||||
* yet practical – imported as the shared `defaultContext` singleton. Tests can
|
||||
* create their own context backed by a stubbed map of variables without having
|
||||
* to mutate global state.
|
||||
*/
|
||||
|
||||
import { fail } from "./fail";
|
||||
import * as github from "@actions/github";
|
||||
|
||||
export interface EnvContext {
|
||||
/**
|
||||
* Return the value for a given environment variable or terminate the action
|
||||
* via `fail` if it is missing / empty.
|
||||
*/
|
||||
get(name: string): string;
|
||||
|
||||
/**
|
||||
* Attempt to read an environment variable. Returns the value when present;
|
||||
* otherwise returns undefined (does not call `fail`).
|
||||
*/
|
||||
tryGet(name: string): string | undefined;
|
||||
|
||||
/**
|
||||
* Attempt to read an environment variable. Returns non-empty string value or
|
||||
* null if unset or empty string.
|
||||
*/
|
||||
tryGetNonEmpty(name: string): string | null;
|
||||
|
||||
/**
|
||||
* Return a memoised Octokit instance authenticated via the token resolved
|
||||
* from the provided argument (when defined) or the environment variables
|
||||
* `GITHUB_TOKEN`/`GH_TOKEN`.
|
||||
*
|
||||
* Subsequent calls return the same cached instance to avoid spawning
|
||||
* multiple REST clients within a single action run.
|
||||
*/
|
||||
getOctokit(token?: string): ReturnType<typeof github.getOctokit>;
|
||||
}
|
||||
|
||||
/** Internal helper – *not* exported. */
|
||||
function _getRequiredEnv(
|
||||
name: string,
|
||||
env: Record<string, string | undefined>,
|
||||
): string | undefined {
|
||||
const value = env[name];
|
||||
|
||||
// Avoid leaking secrets into logs while still logging non-secret variables.
|
||||
if (name.endsWith("KEY") || name.endsWith("TOKEN")) {
|
||||
if (value) {
|
||||
console.log(`value for ${name} was found`);
|
||||
}
|
||||
} else {
|
||||
console.log(`${name}=${value}`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
/** Create a context backed by the supplied environment map (defaults to `process.env`). */
|
||||
export function createEnvContext(
|
||||
env: Record<string, string | undefined> = process.env,
|
||||
): EnvContext {
|
||||
// Lazily instantiated Octokit client – shared across this context.
|
||||
let cachedOctokit: ReturnType<typeof github.getOctokit> | null = null;
|
||||
|
||||
return {
|
||||
get(name: string): string {
|
||||
const value = _getRequiredEnv(name, env);
|
||||
if (value == null) {
|
||||
fail(`Missing required environment variable: ${name}`);
|
||||
}
|
||||
return value;
|
||||
},
|
||||
|
||||
tryGet(name: string): string | undefined {
|
||||
return _getRequiredEnv(name, env);
|
||||
},
|
||||
|
||||
tryGetNonEmpty(name: string): string | null {
|
||||
const value = _getRequiredEnv(name, env);
|
||||
return value == null || value === "" ? null : value;
|
||||
},
|
||||
|
||||
getOctokit(token?: string) {
|
||||
if (cachedOctokit) {
|
||||
return cachedOctokit;
|
||||
}
|
||||
|
||||
// Determine the token to authenticate with.
|
||||
const githubToken = token ?? env["GITHUB_TOKEN"] ?? env["GH_TOKEN"];
|
||||
|
||||
if (!githubToken) {
|
||||
fail(
|
||||
"Unable to locate a GitHub token. `github_token` should have been set on the action.",
|
||||
);
|
||||
}
|
||||
|
||||
cachedOctokit = github.getOctokit(githubToken!);
|
||||
return cachedOctokit;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Shared context built from the actual `process.env`. Production code that is
|
||||
* not yet refactored to receive a context explicitly may import and use this
|
||||
* singleton. Tests should avoid the singleton and instead pass their own
|
||||
* context to the functions they exercise.
|
||||
*/
|
||||
export const defaultContext: EnvContext = createEnvContext();
|
||||
4
.github/actions/codex/src/fail.ts
vendored
4
.github/actions/codex/src/fail.ts
vendored
@@ -1,4 +0,0 @@
|
||||
export function fail(message: string): never {
|
||||
console.error(message);
|
||||
process.exit(1);
|
||||
}
|
||||
149
.github/actions/codex/src/git-helpers.ts
vendored
149
.github/actions/codex/src/git-helpers.ts
vendored
@@ -1,149 +0,0 @@
|
||||
import { spawnSync } from "child_process";
|
||||
import * as github from "@actions/github";
|
||||
import { EnvContext } from "./env-context";
|
||||
|
||||
function runGit(args: string[], silent = true): string {
|
||||
console.info(`Running git ${args.join(" ")}`);
|
||||
const res = spawnSync("git", args, {
|
||||
encoding: "utf8",
|
||||
stdio: silent ? ["ignore", "pipe", "pipe"] : "inherit",
|
||||
});
|
||||
if (res.error) {
|
||||
throw res.error;
|
||||
}
|
||||
if (res.status !== 0) {
|
||||
// Return stderr so caller may handle; else throw.
|
||||
throw new Error(
|
||||
`git ${args.join(" ")} failed with code ${res.status}: ${res.stderr}`,
|
||||
);
|
||||
}
|
||||
return res.stdout.trim();
|
||||
}
|
||||
|
||||
function stageAllChanges() {
|
||||
runGit(["add", "-A"]);
|
||||
}
|
||||
|
||||
function hasStagedChanges(): boolean {
|
||||
const res = spawnSync("git", ["diff", "--cached", "--quiet", "--exit-code"]);
|
||||
return res.status !== 0;
|
||||
}
|
||||
|
||||
function ensureOnBranch(
|
||||
issueNumber: number,
|
||||
protectedBranches: string[],
|
||||
suggestedSlug?: string,
|
||||
): string {
|
||||
let branch = "";
|
||||
try {
|
||||
branch = runGit(["symbolic-ref", "--short", "-q", "HEAD"]);
|
||||
} catch {
|
||||
branch = "";
|
||||
}
|
||||
|
||||
// If detached HEAD or on a protected branch, create a new branch.
|
||||
if (!branch || protectedBranches.includes(branch)) {
|
||||
if (suggestedSlug) {
|
||||
const safeSlug = suggestedSlug
|
||||
.toLowerCase()
|
||||
.replace(/[^\w\s-]/g, "")
|
||||
.trim()
|
||||
.replace(/\s+/g, "-");
|
||||
branch = `codex-fix-${issueNumber}-${safeSlug}`;
|
||||
} else {
|
||||
branch = `codex-fix-${issueNumber}-${Date.now()}`;
|
||||
}
|
||||
runGit(["switch", "-c", branch]);
|
||||
}
|
||||
return branch;
|
||||
}
|
||||
|
||||
function commitIfNeeded(issueNumber: number) {
|
||||
if (hasStagedChanges()) {
|
||||
runGit([
|
||||
"commit",
|
||||
"-m",
|
||||
`fix: automated fix for #${issueNumber} via Codex`,
|
||||
]);
|
||||
}
|
||||
}
|
||||
|
||||
function pushBranch(branch: string, githubToken: string, ctx: EnvContext) {
|
||||
const repoSlug = ctx.get("GITHUB_REPOSITORY"); // owner/repo
|
||||
const remoteUrl = `https://x-access-token:${githubToken}@github.com/${repoSlug}.git`;
|
||||
|
||||
runGit(["push", "--force-with-lease", "-u", remoteUrl, `HEAD:${branch}`]);
|
||||
}
|
||||
|
||||
/**
|
||||
* If this returns a string, it is the URL of the created PR.
|
||||
*/
|
||||
export async function maybePublishPRForIssue(
|
||||
issueNumber: number,
|
||||
lastMessage: string,
|
||||
ctx: EnvContext,
|
||||
): Promise<string | undefined> {
|
||||
// Only proceed if GITHUB_TOKEN available.
|
||||
const githubToken =
|
||||
ctx.tryGetNonEmpty("GITHUB_TOKEN") ?? ctx.tryGetNonEmpty("GH_TOKEN");
|
||||
if (!githubToken) {
|
||||
console.warn("No GitHub token - skipping PR creation.");
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// Print `git status` for debugging.
|
||||
runGit(["status"]);
|
||||
|
||||
// Stage any remaining changes so they can be committed and pushed.
|
||||
stageAllChanges();
|
||||
|
||||
const octokit = ctx.getOctokit(githubToken);
|
||||
|
||||
const { owner, repo } = github.context.repo;
|
||||
|
||||
// Determine default branch to treat as protected.
|
||||
let defaultBranch = "main";
|
||||
try {
|
||||
const repoInfo = await octokit.rest.repos.get({ owner, repo });
|
||||
defaultBranch = repoInfo.data.default_branch ?? "main";
|
||||
} catch (e) {
|
||||
console.warn(`Failed to get default branch, assuming 'main': ${e}`);
|
||||
}
|
||||
|
||||
const sanitizedMessage = lastMessage.replace(/\u2022/g, "-");
|
||||
const [summaryLine] = sanitizedMessage.split(/\r?\n/);
|
||||
const branch = ensureOnBranch(issueNumber, [defaultBranch, "master"], summaryLine);
|
||||
commitIfNeeded(issueNumber);
|
||||
pushBranch(branch, githubToken, ctx);
|
||||
|
||||
// Try to find existing PR for this branch
|
||||
const headParam = `${owner}:${branch}`;
|
||||
const existing = await octokit.rest.pulls.list({
|
||||
owner,
|
||||
repo,
|
||||
head: headParam,
|
||||
state: "open",
|
||||
});
|
||||
if (existing.data.length > 0) {
|
||||
return existing.data[0].html_url;
|
||||
}
|
||||
|
||||
// Determine base branch (default to main)
|
||||
let baseBranch = "main";
|
||||
try {
|
||||
const repoInfo = await octokit.rest.repos.get({ owner, repo });
|
||||
baseBranch = repoInfo.data.default_branch ?? "main";
|
||||
} catch (e) {
|
||||
console.warn(`Failed to get default branch, assuming 'main': ${e}`);
|
||||
}
|
||||
|
||||
const pr = await octokit.rest.pulls.create({
|
||||
owner,
|
||||
repo,
|
||||
title: summaryLine,
|
||||
head: branch,
|
||||
base: baseBranch,
|
||||
body: sanitizedMessage,
|
||||
});
|
||||
return pr.data.html_url;
|
||||
}
|
||||
16
.github/actions/codex/src/git-user.ts
vendored
16
.github/actions/codex/src/git-user.ts
vendored
@@ -1,16 +0,0 @@
|
||||
export function setGitHubActionsUser(): void {
|
||||
const commands = [
|
||||
["git", "config", "--global", "user.name", "github-actions[bot]"],
|
||||
[
|
||||
"git",
|
||||
"config",
|
||||
"--global",
|
||||
"user.email",
|
||||
"41898282+github-actions[bot]@users.noreply.github.com",
|
||||
],
|
||||
];
|
||||
|
||||
for (const command of commands) {
|
||||
Bun.spawnSync(command);
|
||||
}
|
||||
}
|
||||
11
.github/actions/codex/src/github-workspace.ts
vendored
11
.github/actions/codex/src/github-workspace.ts
vendored
@@ -1,11 +0,0 @@
|
||||
import * as pathMod from "path";
|
||||
import { EnvContext } from "./env-context";
|
||||
|
||||
export function resolveWorkspacePath(path: string, ctx: EnvContext): string {
|
||||
if (pathMod.isAbsolute(path)) {
|
||||
return path;
|
||||
} else {
|
||||
const workspace = ctx.get("GITHUB_WORKSPACE");
|
||||
return pathMod.join(workspace, path);
|
||||
}
|
||||
}
|
||||
56
.github/actions/codex/src/load-config.ts
vendored
56
.github/actions/codex/src/load-config.ts
vendored
@@ -1,56 +0,0 @@
|
||||
import type { Config, LabelConfig } from "./config";
|
||||
|
||||
import { getDefaultConfig } from "./default-label-config";
|
||||
import { readFileSync, readdirSync, statSync } from "fs";
|
||||
import * as path from "path";
|
||||
|
||||
/**
|
||||
* Build an in-memory configuration object by scanning the repository for
|
||||
* Markdown templates located in `.github/codex/labels`.
|
||||
*
|
||||
* Each `*.md` file in that directory represents a label that can trigger the
|
||||
* Codex GitHub Action. The filename **without** the extension is interpreted
|
||||
* as the label name, e.g. `codex-review.md` ➜ `codex-review`.
|
||||
*
|
||||
* For every such label we derive the corresponding `doneLabel` by appending
|
||||
* the suffix `-completed`.
|
||||
*/
|
||||
export function loadConfig(workspace: string): Config {
|
||||
const labelsDir = path.join(workspace, ".github", "codex", "labels");
|
||||
|
||||
let entries: string[];
|
||||
try {
|
||||
entries = readdirSync(labelsDir);
|
||||
} catch {
|
||||
// If the directory is missing, return the default configuration.
|
||||
return getDefaultConfig();
|
||||
}
|
||||
|
||||
const labels: Record<string, LabelConfig> = {};
|
||||
|
||||
for (const entry of entries) {
|
||||
if (!entry.endsWith(".md")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const fullPath = path.join(labelsDir, entry);
|
||||
|
||||
if (!statSync(fullPath).isFile()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const labelName = entry.slice(0, -3); // trim ".md"
|
||||
|
||||
labels[labelName] = new FileLabelConfig(fullPath);
|
||||
}
|
||||
|
||||
return { labels };
|
||||
}
|
||||
|
||||
class FileLabelConfig implements LabelConfig {
|
||||
constructor(private readonly promptPath: string) {}
|
||||
|
||||
getPromptTemplate(): string {
|
||||
return readFileSync(this.promptPath, "utf8");
|
||||
}
|
||||
}
|
||||
80
.github/actions/codex/src/main.ts
vendored
80
.github/actions/codex/src/main.ts
vendored
@@ -1,80 +0,0 @@
|
||||
#!/usr/bin/env bun
|
||||
|
||||
import type { Config } from "./config";
|
||||
|
||||
import { defaultContext, EnvContext } from "./env-context";
|
||||
import { loadConfig } from "./load-config";
|
||||
import { setGitHubActionsUser } from "./git-user";
|
||||
import { onLabeled } from "./process-label";
|
||||
import { ensureBaseAndHeadCommitsForPRAreAvailable } from "./prompt-template";
|
||||
import { performAdditionalValidation } from "./verify-inputs";
|
||||
import { onComment } from "./comment";
|
||||
import { onReview } from "./review";
|
||||
|
||||
async function main(): Promise<void> {
|
||||
const ctx: EnvContext = defaultContext;
|
||||
|
||||
// Build the configuration dynamically by scanning `.github/codex/labels`.
|
||||
const GITHUB_WORKSPACE = ctx.get("GITHUB_WORKSPACE");
|
||||
const config: Config = loadConfig(GITHUB_WORKSPACE);
|
||||
|
||||
// Optionally perform additional validation of prompt template files.
|
||||
performAdditionalValidation(config, GITHUB_WORKSPACE);
|
||||
|
||||
const GITHUB_EVENT_NAME = ctx.get("GITHUB_EVENT_NAME");
|
||||
const GITHUB_EVENT_ACTION = ctx.get("GITHUB_EVENT_ACTION");
|
||||
|
||||
// Set user.name and user.email to a bot before Codex runs, just in case it
|
||||
// creates a commit.
|
||||
setGitHubActionsUser();
|
||||
|
||||
switch (GITHUB_EVENT_NAME) {
|
||||
case "issues": {
|
||||
if (GITHUB_EVENT_ACTION === "labeled") {
|
||||
await onLabeled(config, ctx);
|
||||
return;
|
||||
} else if (GITHUB_EVENT_ACTION === "opened") {
|
||||
await onComment(ctx);
|
||||
return;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case "issue_comment": {
|
||||
if (GITHUB_EVENT_ACTION === "created") {
|
||||
await onComment(ctx);
|
||||
return;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case "pull_request": {
|
||||
if (GITHUB_EVENT_ACTION === "labeled") {
|
||||
await ensureBaseAndHeadCommitsForPRAreAvailable(ctx);
|
||||
await onLabeled(config, ctx);
|
||||
return;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case "pull_request_review": {
|
||||
await ensureBaseAndHeadCommitsForPRAreAvailable(ctx);
|
||||
if (GITHUB_EVENT_ACTION === "submitted") {
|
||||
await onReview(ctx);
|
||||
return;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case "pull_request_review_comment": {
|
||||
await ensureBaseAndHeadCommitsForPRAreAvailable(ctx);
|
||||
if (GITHUB_EVENT_ACTION === "created") {
|
||||
await onComment(ctx);
|
||||
return;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
console.warn(
|
||||
`Unsupported action '${GITHUB_EVENT_ACTION}' for event '${GITHUB_EVENT_NAME}'.`,
|
||||
);
|
||||
}
|
||||
|
||||
main();
|
||||
62
.github/actions/codex/src/post-comment.ts
vendored
62
.github/actions/codex/src/post-comment.ts
vendored
@@ -1,62 +0,0 @@
|
||||
import { fail } from "./fail";
|
||||
import * as github from "@actions/github";
|
||||
import { EnvContext } from "./env-context";
|
||||
|
||||
/**
|
||||
* Post a comment to the issue / pull request currently in scope.
|
||||
*
|
||||
* Provide the environment context so that token lookup (inside getOctokit) does
|
||||
* not rely on global state.
|
||||
*/
|
||||
export async function postComment(
|
||||
commentBody: string,
|
||||
ctx: EnvContext,
|
||||
): Promise<void> {
|
||||
// Append a footer with a link back to the workflow run, if available.
|
||||
const footer = buildWorkflowRunFooter(ctx);
|
||||
const bodyWithFooter = footer ? `${commentBody}${footer}` : commentBody;
|
||||
|
||||
const octokit = ctx.getOctokit();
|
||||
console.info("Got Octokit instance for posting comment");
|
||||
const { owner, repo } = github.context.repo;
|
||||
const issueNumber = github.context.issue.number;
|
||||
|
||||
if (!issueNumber) {
|
||||
console.warn(
|
||||
"No issue or pull_request number found in GitHub context; skipping comment creation.",
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
console.info("Calling octokit.rest.issues.createComment()");
|
||||
await octokit.rest.issues.createComment({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: issueNumber,
|
||||
body: bodyWithFooter,
|
||||
});
|
||||
} catch (error) {
|
||||
fail(`Failed to create comment via GitHub API: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to build a Markdown fragment linking back to the workflow run that
|
||||
* generated the current comment. Returns `undefined` if required environment
|
||||
* variables are missing – e.g. when running outside of GitHub Actions – so we
|
||||
* can gracefully skip the footer in those cases.
|
||||
*/
|
||||
function buildWorkflowRunFooter(ctx: EnvContext): string | undefined {
|
||||
const serverUrl =
|
||||
ctx.tryGetNonEmpty("GITHUB_SERVER_URL") ?? "https://github.com";
|
||||
const repository = ctx.tryGetNonEmpty("GITHUB_REPOSITORY");
|
||||
const runId = ctx.tryGetNonEmpty("GITHUB_RUN_ID");
|
||||
|
||||
if (!repository || !runId) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const url = `${serverUrl}/${repository}/actions/runs/${runId}`;
|
||||
return `\n\n---\n*[_View workflow run_](${url})*`;
|
||||
}
|
||||
226
.github/actions/codex/src/process-label.ts
vendored
226
.github/actions/codex/src/process-label.ts
vendored
@@ -1,226 +0,0 @@
|
||||
import { fail } from "./fail";
|
||||
import { EnvContext } from "./env-context";
|
||||
import { renderPromptTemplate } from "./prompt-template";
|
||||
|
||||
import { postComment } from "./post-comment";
|
||||
import { runCodex } from "./run-codex";
|
||||
|
||||
import * as github from "@actions/github";
|
||||
import { Config, LabelConfig } from "./config";
|
||||
import { maybePublishPRForIssue } from "./git-helpers";
|
||||
|
||||
export async function onLabeled(
|
||||
config: Config,
|
||||
ctx: EnvContext,
|
||||
): Promise<void> {
|
||||
const GITHUB_EVENT_LABEL_NAME = ctx.get("GITHUB_EVENT_LABEL_NAME");
|
||||
const labelConfig = config.labels[GITHUB_EVENT_LABEL_NAME] as
|
||||
| LabelConfig
|
||||
| undefined;
|
||||
if (!labelConfig) {
|
||||
fail(
|
||||
`Label \`${GITHUB_EVENT_LABEL_NAME}\` not found in config: ${JSON.stringify(config)}`,
|
||||
);
|
||||
}
|
||||
|
||||
await processLabelConfig(ctx, GITHUB_EVENT_LABEL_NAME, labelConfig);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper that handles `-in-progress` and `-completed` semantics around the core lint/fix/review
|
||||
* processing. It will:
|
||||
*
|
||||
* - Skip execution if the `-in-progress` or `-completed` label is already present.
|
||||
* - Mark the PR/issue as `-in-progress`.
|
||||
* - After successful execution, mark the PR/issue as `-completed`.
|
||||
*/
|
||||
async function processLabelConfig(
|
||||
ctx: EnvContext,
|
||||
label: string,
|
||||
labelConfig: LabelConfig,
|
||||
): Promise<void> {
|
||||
const octokit = ctx.getOctokit();
|
||||
const { owner, repo, issueNumber, labelNames } =
|
||||
await getCurrentLabels(octokit);
|
||||
|
||||
const inProgressLabel = `${label}-in-progress`;
|
||||
const completedLabel = `${label}-completed`;
|
||||
for (const markerLabel of [inProgressLabel, completedLabel]) {
|
||||
if (labelNames.includes(markerLabel)) {
|
||||
console.log(
|
||||
`Label '${markerLabel}' already present on issue/PR #${issueNumber}. Skipping Codex action.`,
|
||||
);
|
||||
|
||||
// Clean up: remove the triggering label to avoid confusion and re-runs.
|
||||
await addAndRemoveLabels(octokit, {
|
||||
owner,
|
||||
repo,
|
||||
issueNumber,
|
||||
remove: markerLabel,
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Mark the PR/issue as in progress.
|
||||
await addAndRemoveLabels(octokit, {
|
||||
owner,
|
||||
repo,
|
||||
issueNumber,
|
||||
add: inProgressLabel,
|
||||
remove: label,
|
||||
});
|
||||
|
||||
// Run the core Codex processing.
|
||||
await processLabel(ctx, label, labelConfig);
|
||||
|
||||
// Mark the PR/issue as completed.
|
||||
await addAndRemoveLabels(octokit, {
|
||||
owner,
|
||||
repo,
|
||||
issueNumber,
|
||||
add: completedLabel,
|
||||
remove: inProgressLabel,
|
||||
});
|
||||
}
|
||||
|
||||
async function processLabel(
|
||||
ctx: EnvContext,
|
||||
label: string,
|
||||
labelConfig: LabelConfig,
|
||||
): Promise<void> {
|
||||
const template = labelConfig.getPromptTemplate();
|
||||
|
||||
// If this is a review label, prepend explicit PR-diff scoping guidance to
|
||||
// reduce out-of-scope feedback. Do this before rendering so placeholders in
|
||||
// the guidance (e.g., {CODEX_ACTION_GITHUB_EVENT_PATH}) are substituted.
|
||||
const isReview = label.toLowerCase().includes("review");
|
||||
const reviewScopeGuidance = `
|
||||
PR Diff Scope
|
||||
- Only review changes between the PR's merge-base and head; do not comment on commits or files outside this range.
|
||||
- Derive the base/head SHAs from the event JSON at {CODEX_ACTION_GITHUB_EVENT_PATH}, then compute and use the PR diff for all analysis and comments.
|
||||
|
||||
Commands to determine scope
|
||||
- Resolve SHAs:
|
||||
- BASE_SHA=$(jq -r '.pull_request.base.sha // .pull_request.base.ref' "{CODEX_ACTION_GITHUB_EVENT_PATH}")
|
||||
- HEAD_SHA=$(jq -r '.pull_request.head.sha // .pull_request.head.ref' "{CODEX_ACTION_GITHUB_EVENT_PATH}")
|
||||
- BASE_SHA=$(git rev-parse "$BASE_SHA")
|
||||
- HEAD_SHA=$(git rev-parse "$HEAD_SHA")
|
||||
- Prefer triple-dot (merge-base) semantics for PR diffs:
|
||||
- Changed commits: git log --oneline "$BASE_SHA...$HEAD_SHA"
|
||||
- Changed files: git diff --name-status "$BASE_SHA...$HEAD_SHA"
|
||||
- Review hunks: git diff -U0 "$BASE_SHA...$HEAD_SHA"
|
||||
|
||||
Review rules
|
||||
- Anchor every comment to a file and hunk present in git diff "$BASE_SHA...$HEAD_SHA".
|
||||
- If you mention context outside the diff, label it as "Follow-up (outside this PR scope)" and keep it brief (<=2 bullets).
|
||||
- Do not critique commits or files not reachable in the PR range (merge-base(base, head) → head).
|
||||
`.trim();
|
||||
|
||||
const effectiveTemplate = isReview
|
||||
? `${reviewScopeGuidance}\n\n${template}`
|
||||
: template;
|
||||
|
||||
const populatedTemplate = await renderPromptTemplate(effectiveTemplate, ctx);
|
||||
|
||||
// Always run Codex and post the resulting message as a comment.
|
||||
let commentBody = await runCodex(populatedTemplate, ctx);
|
||||
|
||||
// Current heuristic: only try to create a PR if "attempt" or "fix" is in the
|
||||
// label name. (Yes, we plan to evolve this.)
|
||||
if (label.indexOf("fix") !== -1 || label.indexOf("attempt") !== -1) {
|
||||
console.info(`label ${label} indicates we should attempt to create a PR`);
|
||||
const prUrl = await maybeFixIssue(ctx, commentBody);
|
||||
if (prUrl) {
|
||||
commentBody += `\n\n---\nOpened pull request: ${prUrl}`;
|
||||
}
|
||||
} else {
|
||||
console.info(
|
||||
`label ${label} does not indicate we should attempt to create a PR`,
|
||||
);
|
||||
}
|
||||
|
||||
await postComment(commentBody, ctx);
|
||||
}
|
||||
|
||||
async function maybeFixIssue(
|
||||
ctx: EnvContext,
|
||||
lastMessage: string,
|
||||
): Promise<string | undefined> {
|
||||
// Attempt to create a PR out of any changes Codex produced.
|
||||
const issueNumber = github.context.issue.number!; // exists for issues triggering this path
|
||||
try {
|
||||
return await maybePublishPRForIssue(issueNumber, lastMessage, ctx);
|
||||
} catch (e) {
|
||||
console.warn(`Failed to publish PR: ${e}`);
|
||||
}
|
||||
}
|
||||
|
||||
async function getCurrentLabels(
|
||||
octokit: ReturnType<typeof github.getOctokit>,
|
||||
): Promise<{
|
||||
owner: string;
|
||||
repo: string;
|
||||
issueNumber: number;
|
||||
labelNames: Array<string>;
|
||||
}> {
|
||||
const { owner, repo } = github.context.repo;
|
||||
const issueNumber = github.context.issue.number;
|
||||
|
||||
if (!issueNumber) {
|
||||
fail("No issue or pull_request number found in GitHub context.");
|
||||
}
|
||||
|
||||
const { data: issueData } = await octokit.rest.issues.get({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: issueNumber,
|
||||
});
|
||||
|
||||
const labelNames =
|
||||
issueData.labels?.map((label: any) =>
|
||||
typeof label === "string" ? label : label.name,
|
||||
) ?? [];
|
||||
|
||||
return { owner, repo, issueNumber, labelNames };
|
||||
}
|
||||
|
||||
async function addAndRemoveLabels(
|
||||
octokit: ReturnType<typeof github.getOctokit>,
|
||||
opts: {
|
||||
owner: string;
|
||||
repo: string;
|
||||
issueNumber: number;
|
||||
add?: string;
|
||||
remove?: string;
|
||||
},
|
||||
): Promise<void> {
|
||||
const { owner, repo, issueNumber, add, remove } = opts;
|
||||
|
||||
if (add) {
|
||||
try {
|
||||
await octokit.rest.issues.addLabels({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: issueNumber,
|
||||
labels: [add],
|
||||
});
|
||||
} catch (error) {
|
||||
console.warn(`Failed to add label '${add}': ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (remove) {
|
||||
try {
|
||||
await octokit.rest.issues.removeLabel({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: issueNumber,
|
||||
name: remove,
|
||||
});
|
||||
} catch (error) {
|
||||
console.warn(`Failed to remove label '${remove}': ${error}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
284
.github/actions/codex/src/prompt-template.ts
vendored
284
.github/actions/codex/src/prompt-template.ts
vendored
@@ -1,284 +0,0 @@
|
||||
/*
|
||||
* Utilities to render Codex prompt templates.
|
||||
*
|
||||
* A template is a Markdown (or plain-text) file that may contain one or more
|
||||
* placeholders of the form `{CODEX_ACTION_<NAME>}`. At runtime these
|
||||
* placeholders are substituted with dynamically generated content. Each
|
||||
* placeholder is resolved **exactly once** even if it appears multiple times
|
||||
* in the same template.
|
||||
*/
|
||||
|
||||
import { readFile } from "fs/promises";
|
||||
|
||||
import { EnvContext } from "./env-context";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Lazily caches parsed `$GITHUB_EVENT_PATH` contents keyed by the file path so
|
||||
* we only hit the filesystem once per unique event payload.
|
||||
*/
|
||||
const githubEventDataCache: Map<string, Promise<any>> = new Map();
|
||||
|
||||
function getGitHubEventData(ctx: EnvContext): Promise<any> {
|
||||
const eventPath = ctx.get("GITHUB_EVENT_PATH");
|
||||
let cached = githubEventDataCache.get(eventPath);
|
||||
if (!cached) {
|
||||
cached = readFile(eventPath, "utf8").then((raw) => JSON.parse(raw));
|
||||
githubEventDataCache.set(eventPath, cached);
|
||||
}
|
||||
return cached;
|
||||
}
|
||||
|
||||
async function runCommand(args: Array<string>): Promise<string> {
|
||||
const result = Bun.spawnSync(args, {
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
if (result.success) {
|
||||
return result.stdout.toString();
|
||||
}
|
||||
|
||||
console.error(`Error running ${JSON.stringify(args)}: ${result.stderr}`);
|
||||
return "";
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Public API
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
// Regex that captures the variable name without the surrounding { } braces.
|
||||
const VAR_REGEX = /\{(CODEX_ACTION_[A-Z0-9_]+)\}/g;
|
||||
|
||||
// Cache individual placeholder values so each one is resolved at most once per
|
||||
// process even if many templates reference it.
|
||||
const placeholderCache: Map<string, Promise<string>> = new Map();
|
||||
|
||||
/**
|
||||
* Parse a template string, resolve all placeholders and return the rendered
|
||||
* result.
|
||||
*/
|
||||
export async function renderPromptTemplate(
|
||||
template: string,
|
||||
ctx: EnvContext,
|
||||
): Promise<string> {
|
||||
// ---------------------------------------------------------------------
|
||||
// 1) Gather all *unique* placeholders present in the template.
|
||||
// ---------------------------------------------------------------------
|
||||
const variables = new Set<string>();
|
||||
for (const match of template.matchAll(VAR_REGEX)) {
|
||||
variables.add(match[1]);
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------
|
||||
// 2) Kick off (or reuse) async resolution for each variable.
|
||||
// ---------------------------------------------------------------------
|
||||
for (const variable of variables) {
|
||||
if (!placeholderCache.has(variable)) {
|
||||
placeholderCache.set(variable, resolveVariable(variable, ctx));
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------
|
||||
// 3) Await completion so we can perform a simple synchronous replace below.
|
||||
// ---------------------------------------------------------------------
|
||||
const resolvedEntries: [string, string][] = [];
|
||||
for (const [key, promise] of placeholderCache.entries()) {
|
||||
resolvedEntries.push([key, await promise]);
|
||||
}
|
||||
const resolvedMap = new Map<string, string>(resolvedEntries);
|
||||
|
||||
// ---------------------------------------------------------------------
|
||||
// 4) Replace each occurrence. We use replace with a callback to ensure
|
||||
// correct substitution even if variable names overlap (they shouldn't,
|
||||
// but better safe than sorry).
|
||||
// ---------------------------------------------------------------------
|
||||
return template.replace(VAR_REGEX, (_, varName: string) => {
|
||||
return resolvedMap.get(varName) ?? "";
|
||||
});
|
||||
}
|
||||
|
||||
export async function ensureBaseAndHeadCommitsForPRAreAvailable(
|
||||
ctx: EnvContext,
|
||||
): Promise<{ baseSha: string; headSha: string } | null> {
|
||||
const prShas = await getPrShas(ctx);
|
||||
if (prShas == null) {
|
||||
console.warn("Unable to resolve PR branches");
|
||||
return null;
|
||||
}
|
||||
|
||||
const event = await getGitHubEventData(ctx);
|
||||
const pr = event.pull_request;
|
||||
if (!pr) {
|
||||
console.warn("event.pull_request is not defined - unexpected");
|
||||
return null;
|
||||
}
|
||||
|
||||
const workspace = ctx.get("GITHUB_WORKSPACE");
|
||||
|
||||
// Refs (branch names)
|
||||
const baseRef: string | undefined = pr.base?.ref;
|
||||
const headRef: string | undefined = pr.head?.ref;
|
||||
|
||||
// Clone URLs
|
||||
const baseRemoteUrl: string | undefined = pr.base?.repo?.clone_url;
|
||||
const headRemoteUrl: string | undefined = pr.head?.repo?.clone_url;
|
||||
|
||||
if (!baseRef || !headRef || !baseRemoteUrl || !headRemoteUrl) {
|
||||
console.warn(
|
||||
"Missing PR ref or remote URL information - cannot fetch commits",
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
// Ensure we have the base branch.
|
||||
await runCommand([
|
||||
"git",
|
||||
"-C",
|
||||
workspace,
|
||||
"fetch",
|
||||
"--no-tags",
|
||||
"origin",
|
||||
baseRef,
|
||||
]);
|
||||
|
||||
// Ensure we have the head branch.
|
||||
if (headRemoteUrl === baseRemoteUrl) {
|
||||
// Same repository – the commit is available from `origin`.
|
||||
await runCommand([
|
||||
"git",
|
||||
"-C",
|
||||
workspace,
|
||||
"fetch",
|
||||
"--no-tags",
|
||||
"origin",
|
||||
headRef,
|
||||
]);
|
||||
} else {
|
||||
// Fork – make sure a `pr` remote exists that points at the fork. Attempting
|
||||
// to add a remote that already exists causes git to error, so we swallow
|
||||
// any non-zero exit codes from that specific command.
|
||||
await runCommand([
|
||||
"git",
|
||||
"-C",
|
||||
workspace,
|
||||
"remote",
|
||||
"add",
|
||||
"pr",
|
||||
headRemoteUrl,
|
||||
]);
|
||||
|
||||
// Whether adding succeeded or the remote already existed, attempt to fetch
|
||||
// the head ref from the `pr` remote.
|
||||
await runCommand([
|
||||
"git",
|
||||
"-C",
|
||||
workspace,
|
||||
"fetch",
|
||||
"--no-tags",
|
||||
"pr",
|
||||
headRef,
|
||||
]);
|
||||
}
|
||||
|
||||
return prShas;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Internal helpers – still exported for use by other modules.
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export async function resolvePrDiff(ctx: EnvContext): Promise<string> {
|
||||
const prShas = await ensureBaseAndHeadCommitsForPRAreAvailable(ctx);
|
||||
if (prShas == null) {
|
||||
console.warn("Unable to resolve PR branches");
|
||||
return "";
|
||||
}
|
||||
|
||||
const workspace = ctx.get("GITHUB_WORKSPACE");
|
||||
const { baseSha, headSha } = prShas;
|
||||
return runCommand([
|
||||
"git",
|
||||
"-C",
|
||||
workspace,
|
||||
"diff",
|
||||
"--color=never",
|
||||
`${baseSha}..${headSha}`,
|
||||
]);
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Placeholder resolution
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
async function resolveVariable(name: string, ctx: EnvContext): Promise<string> {
|
||||
switch (name) {
|
||||
case "CODEX_ACTION_ISSUE_TITLE": {
|
||||
const event = await getGitHubEventData(ctx);
|
||||
const issue = event.issue ?? event.pull_request;
|
||||
return issue?.title ?? "";
|
||||
}
|
||||
|
||||
case "CODEX_ACTION_ISSUE_BODY": {
|
||||
const event = await getGitHubEventData(ctx);
|
||||
const issue = event.issue ?? event.pull_request;
|
||||
return issue?.body ?? "";
|
||||
}
|
||||
|
||||
case "CODEX_ACTION_GITHUB_EVENT_PATH": {
|
||||
return ctx.get("GITHUB_EVENT_PATH");
|
||||
}
|
||||
|
||||
case "CODEX_ACTION_BASE_REF": {
|
||||
const event = await getGitHubEventData(ctx);
|
||||
return event?.pull_request?.base?.ref ?? "";
|
||||
}
|
||||
|
||||
case "CODEX_ACTION_HEAD_REF": {
|
||||
const event = await getGitHubEventData(ctx);
|
||||
return event?.pull_request?.head?.ref ?? "";
|
||||
}
|
||||
|
||||
case "CODEX_ACTION_PR_DIFF": {
|
||||
return resolvePrDiff(ctx);
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
// Add new template variables here.
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
default: {
|
||||
// Unknown variable – leave it blank to avoid leaking placeholders to the
|
||||
// final prompt. The alternative would be to `fail()` here, but silently
|
||||
// ignoring unknown placeholders is more forgiving and better matches the
|
||||
// behaviour of typical template engines.
|
||||
console.warn(`Unknown template variable: ${name}`);
|
||||
return "";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function getPrShas(
|
||||
ctx: EnvContext,
|
||||
): Promise<{ baseSha: string; headSha: string } | null> {
|
||||
const event = await getGitHubEventData(ctx);
|
||||
const pr = event.pull_request;
|
||||
if (!pr) {
|
||||
console.warn("event.pull_request is not defined");
|
||||
return null;
|
||||
}
|
||||
|
||||
// Prefer explicit SHAs if available to avoid relying on local branch names.
|
||||
const baseSha: string | undefined = pr.base?.sha;
|
||||
const headSha: string | undefined = pr.head?.sha;
|
||||
|
||||
if (!baseSha || !headSha) {
|
||||
console.warn("one of base or head is not defined on event.pull_request");
|
||||
return null;
|
||||
}
|
||||
|
||||
return { baseSha, headSha };
|
||||
}
|
||||
42
.github/actions/codex/src/review.ts
vendored
42
.github/actions/codex/src/review.ts
vendored
@@ -1,42 +0,0 @@
|
||||
import type { EnvContext } from "./env-context";
|
||||
import { runCodex } from "./run-codex";
|
||||
import { postComment } from "./post-comment";
|
||||
import { addEyesReaction } from "./add-reaction";
|
||||
|
||||
/**
|
||||
* Handle `pull_request_review` events. We treat the review body the same way
|
||||
* as a normal comment.
|
||||
*/
|
||||
export async function onReview(ctx: EnvContext): Promise<void> {
|
||||
const triggerPhrase = ctx.tryGet("INPUT_TRIGGER_PHRASE");
|
||||
if (!triggerPhrase) {
|
||||
console.warn("Empty trigger phrase: skipping.");
|
||||
return;
|
||||
}
|
||||
|
||||
const reviewBody = ctx.tryGet("GITHUB_EVENT_REVIEW_BODY");
|
||||
|
||||
if (!reviewBody) {
|
||||
console.warn("Review body not found in environment: skipping.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (!reviewBody.includes(triggerPhrase)) {
|
||||
console.log(
|
||||
`Trigger phrase '${triggerPhrase}' not found: nothing to do for this review.`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const prompt = reviewBody.replace(triggerPhrase, "").trim();
|
||||
|
||||
if (prompt.length === 0) {
|
||||
console.warn("Prompt is empty after removing trigger phrase: skipping.");
|
||||
return;
|
||||
}
|
||||
|
||||
await addEyesReaction(ctx);
|
||||
|
||||
const lastMessage = await runCodex(prompt, ctx);
|
||||
await postComment(lastMessage, ctx);
|
||||
}
|
||||
58
.github/actions/codex/src/run-codex.ts
vendored
58
.github/actions/codex/src/run-codex.ts
vendored
@@ -1,58 +0,0 @@
|
||||
import { fail } from "./fail";
|
||||
import { EnvContext } from "./env-context";
|
||||
import { tmpdir } from "os";
|
||||
import { join } from "node:path";
|
||||
import { readFile, mkdtemp } from "fs/promises";
|
||||
import { resolveWorkspacePath } from "./github-workspace";
|
||||
|
||||
/**
|
||||
* Runs the Codex CLI with the provided prompt and returns the output written
|
||||
* to the "last message" file.
|
||||
*/
|
||||
export async function runCodex(
|
||||
prompt: string,
|
||||
ctx: EnvContext,
|
||||
): Promise<string> {
|
||||
const OPENAI_API_KEY = ctx.get("OPENAI_API_KEY");
|
||||
|
||||
const tempDirPath = await mkdtemp(join(tmpdir(), "codex-"));
|
||||
const lastMessageOutput = join(tempDirPath, "codex-prompt.md");
|
||||
|
||||
// Use the unified CLI and its `exec` subcommand instead of the old
|
||||
// standalone `codex-exec` binary.
|
||||
const args = ["/usr/local/bin/codex", "exec"];
|
||||
|
||||
const inputCodexArgs = ctx.tryGet("INPUT_CODEX_ARGS")?.trim();
|
||||
if (inputCodexArgs) {
|
||||
args.push(...inputCodexArgs.split(/\s+/));
|
||||
}
|
||||
|
||||
args.push("--output-last-message", lastMessageOutput, prompt);
|
||||
|
||||
const env: Record<string, string> = { ...process.env, OPENAI_API_KEY };
|
||||
const INPUT_CODEX_HOME = ctx.tryGet("INPUT_CODEX_HOME");
|
||||
if (INPUT_CODEX_HOME) {
|
||||
env.CODEX_HOME = resolveWorkspacePath(INPUT_CODEX_HOME, ctx);
|
||||
}
|
||||
|
||||
console.log(`Running Codex: ${JSON.stringify(args)}`);
|
||||
const result = Bun.spawnSync(args, {
|
||||
stdout: "inherit",
|
||||
stderr: "inherit",
|
||||
env,
|
||||
});
|
||||
|
||||
if (!result.success) {
|
||||
fail(`Codex failed: see above for details.`);
|
||||
}
|
||||
|
||||
// Read the output generated by Codex.
|
||||
let lastMessage: string;
|
||||
try {
|
||||
lastMessage = await readFile(lastMessageOutput, "utf8");
|
||||
} catch (err) {
|
||||
fail(`Failed to read Codex output at '${lastMessageOutput}': ${err}`);
|
||||
}
|
||||
|
||||
return lastMessage;
|
||||
}
|
||||
33
.github/actions/codex/src/verify-inputs.ts
vendored
33
.github/actions/codex/src/verify-inputs.ts
vendored
@@ -1,33 +0,0 @@
|
||||
// Validate the inputs passed to the composite action.
|
||||
// The script currently ensures that the provided configuration file exists and
|
||||
// matches the expected schema.
|
||||
|
||||
import type { Config } from "./config";
|
||||
|
||||
import { existsSync } from "fs";
|
||||
import * as path from "path";
|
||||
import { fail } from "./fail";
|
||||
|
||||
export function performAdditionalValidation(config: Config, workspace: string) {
|
||||
// Additional validation: ensure referenced prompt files exist and are Markdown.
|
||||
for (const [label, details] of Object.entries(config.labels)) {
|
||||
// Determine which prompt key is present (the schema guarantees exactly one).
|
||||
const promptPathStr =
|
||||
(details as any).prompt ?? (details as any).promptPath;
|
||||
|
||||
if (promptPathStr) {
|
||||
const promptPath = path.isAbsolute(promptPathStr)
|
||||
? promptPathStr
|
||||
: path.join(workspace, promptPathStr);
|
||||
|
||||
if (!existsSync(promptPath)) {
|
||||
fail(`Prompt file for label '${label}' not found: ${promptPath}`);
|
||||
}
|
||||
if (!promptPath.endsWith(".md")) {
|
||||
fail(
|
||||
`Prompt file for label '${label}' must be a .md file (got ${promptPathStr}).`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
15
.github/actions/codex/tsconfig.json
vendored
15
.github/actions/codex/tsconfig.json
vendored
@@ -1,15 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"lib": ["ESNext"],
|
||||
"target": "ESNext",
|
||||
"module": "ESNext",
|
||||
"moduleDetection": "force",
|
||||
"moduleResolution": "bundler",
|
||||
|
||||
"noEmit": true,
|
||||
"strict": true,
|
||||
"skipLibCheck": true
|
||||
},
|
||||
|
||||
"include": ["src"]
|
||||
}
|
||||
44
.github/actions/linux-code-sign/action.yml
vendored
Normal file
44
.github/actions/linux-code-sign/action.yml
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
name: linux-code-sign
|
||||
description: Sign Linux artifacts with cosign.
|
||||
inputs:
|
||||
target:
|
||||
description: Target triple for the artifacts to sign.
|
||||
required: true
|
||||
artifacts-dir:
|
||||
description: Absolute path to the directory containing built binaries to sign.
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Install cosign
|
||||
uses: sigstore/cosign-installer@v3.7.0
|
||||
|
||||
- name: Cosign Linux artifacts
|
||||
shell: bash
|
||||
env:
|
||||
COSIGN_EXPERIMENTAL: "1"
|
||||
COSIGN_YES: "true"
|
||||
COSIGN_OIDC_CLIENT_ID: "sigstore"
|
||||
COSIGN_OIDC_ISSUER: "https://oauth2.sigstore.dev/auth"
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
dest="${{ inputs.artifacts-dir }}"
|
||||
if [[ ! -d "$dest" ]]; then
|
||||
echo "Destination $dest does not exist"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
for binary in codex codex-responses-api-proxy; do
|
||||
artifact="${dest}/${binary}"
|
||||
if [[ ! -f "$artifact" ]]; then
|
||||
echo "Binary $artifact not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cosign sign-blob \
|
||||
--yes \
|
||||
--bundle "${artifact}.sigstore" \
|
||||
"$artifact"
|
||||
done
|
||||
246
.github/actions/macos-code-sign/action.yml
vendored
Normal file
246
.github/actions/macos-code-sign/action.yml
vendored
Normal file
@@ -0,0 +1,246 @@
|
||||
name: macos-code-sign
|
||||
description: Configure, sign, notarize, and clean up macOS code signing artifacts.
|
||||
inputs:
|
||||
target:
|
||||
description: Rust compilation target triple (e.g. aarch64-apple-darwin).
|
||||
required: true
|
||||
sign-binaries:
|
||||
description: Whether to sign and notarize the macOS binaries.
|
||||
required: false
|
||||
default: "true"
|
||||
sign-dmg:
|
||||
description: Whether to sign and notarize the macOS dmg.
|
||||
required: false
|
||||
default: "true"
|
||||
apple-certificate:
|
||||
description: Base64-encoded Apple signing certificate (P12).
|
||||
required: true
|
||||
apple-certificate-password:
|
||||
description: Password for the signing certificate.
|
||||
required: true
|
||||
apple-notarization-key-p8:
|
||||
description: Base64-encoded Apple notarization key (P8).
|
||||
required: true
|
||||
apple-notarization-key-id:
|
||||
description: Apple notarization key ID.
|
||||
required: true
|
||||
apple-notarization-issuer-id:
|
||||
description: Apple notarization issuer ID.
|
||||
required: true
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Configure Apple code signing
|
||||
shell: bash
|
||||
env:
|
||||
KEYCHAIN_PASSWORD: actions
|
||||
APPLE_CERTIFICATE: ${{ inputs.apple-certificate }}
|
||||
APPLE_CERTIFICATE_PASSWORD: ${{ inputs.apple-certificate-password }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
if [[ -z "${APPLE_CERTIFICATE:-}" ]]; then
|
||||
echo "APPLE_CERTIFICATE is required for macOS signing"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ -z "${APPLE_CERTIFICATE_PASSWORD:-}" ]]; then
|
||||
echo "APPLE_CERTIFICATE_PASSWORD is required for macOS signing"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cert_path="${RUNNER_TEMP}/apple_signing_certificate.p12"
|
||||
echo "$APPLE_CERTIFICATE" | base64 -d > "$cert_path"
|
||||
|
||||
keychain_path="${RUNNER_TEMP}/codex-signing.keychain-db"
|
||||
security create-keychain -p "$KEYCHAIN_PASSWORD" "$keychain_path"
|
||||
security set-keychain-settings -lut 21600 "$keychain_path"
|
||||
security unlock-keychain -p "$KEYCHAIN_PASSWORD" "$keychain_path"
|
||||
|
||||
keychain_args=()
|
||||
cleanup_keychain() {
|
||||
if ((${#keychain_args[@]} > 0)); then
|
||||
security list-keychains -s "${keychain_args[@]}" || true
|
||||
security default-keychain -s "${keychain_args[0]}" || true
|
||||
else
|
||||
security list-keychains -s || true
|
||||
fi
|
||||
if [[ -f "$keychain_path" ]]; then
|
||||
security delete-keychain "$keychain_path" || true
|
||||
fi
|
||||
}
|
||||
|
||||
while IFS= read -r keychain; do
|
||||
[[ -n "$keychain" ]] && keychain_args+=("$keychain")
|
||||
done < <(security list-keychains | sed 's/^[[:space:]]*//;s/[[:space:]]*$//;s/"//g')
|
||||
|
||||
if ((${#keychain_args[@]} > 0)); then
|
||||
security list-keychains -s "$keychain_path" "${keychain_args[@]}"
|
||||
else
|
||||
security list-keychains -s "$keychain_path"
|
||||
fi
|
||||
|
||||
security default-keychain -s "$keychain_path"
|
||||
security import "$cert_path" -k "$keychain_path" -P "$APPLE_CERTIFICATE_PASSWORD" -T /usr/bin/codesign -T /usr/bin/security
|
||||
security set-key-partition-list -S apple-tool:,apple: -s -k "$KEYCHAIN_PASSWORD" "$keychain_path" > /dev/null
|
||||
|
||||
codesign_hashes=()
|
||||
while IFS= read -r hash; do
|
||||
[[ -n "$hash" ]] && codesign_hashes+=("$hash")
|
||||
done < <(security find-identity -v -p codesigning "$keychain_path" \
|
||||
| sed -n 's/.*\([0-9A-F]\{40\}\).*/\1/p' \
|
||||
| sort -u)
|
||||
|
||||
if ((${#codesign_hashes[@]} == 0)); then
|
||||
echo "No signing identities found in $keychain_path"
|
||||
cleanup_keychain
|
||||
rm -f "$cert_path"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ((${#codesign_hashes[@]} > 1)); then
|
||||
echo "Multiple signing identities found in $keychain_path:"
|
||||
printf ' %s\n' "${codesign_hashes[@]}"
|
||||
cleanup_keychain
|
||||
rm -f "$cert_path"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
APPLE_CODESIGN_IDENTITY="${codesign_hashes[0]}"
|
||||
|
||||
rm -f "$cert_path"
|
||||
|
||||
echo "APPLE_CODESIGN_IDENTITY=$APPLE_CODESIGN_IDENTITY" >> "$GITHUB_ENV"
|
||||
echo "APPLE_CODESIGN_KEYCHAIN=$keychain_path" >> "$GITHUB_ENV"
|
||||
echo "::add-mask::$APPLE_CODESIGN_IDENTITY"
|
||||
|
||||
- name: Sign macOS binaries
|
||||
if: ${{ inputs.sign-binaries == 'true' }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
if [[ -z "${APPLE_CODESIGN_IDENTITY:-}" ]]; then
|
||||
echo "APPLE_CODESIGN_IDENTITY is required for macOS signing"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
keychain_args=()
|
||||
if [[ -n "${APPLE_CODESIGN_KEYCHAIN:-}" && -f "${APPLE_CODESIGN_KEYCHAIN}" ]]; then
|
||||
keychain_args+=(--keychain "${APPLE_CODESIGN_KEYCHAIN}")
|
||||
fi
|
||||
|
||||
for binary in codex codex-responses-api-proxy; do
|
||||
path="codex-rs/target/${{ inputs.target }}/release/${binary}"
|
||||
codesign --force --options runtime --timestamp --sign "$APPLE_CODESIGN_IDENTITY" "${keychain_args[@]}" "$path"
|
||||
done
|
||||
|
||||
- name: Notarize macOS binaries
|
||||
if: ${{ inputs.sign-binaries == 'true' }}
|
||||
shell: bash
|
||||
env:
|
||||
APPLE_NOTARIZATION_KEY_P8: ${{ inputs.apple-notarization-key-p8 }}
|
||||
APPLE_NOTARIZATION_KEY_ID: ${{ inputs.apple-notarization-key-id }}
|
||||
APPLE_NOTARIZATION_ISSUER_ID: ${{ inputs.apple-notarization-issuer-id }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
for var in APPLE_NOTARIZATION_KEY_P8 APPLE_NOTARIZATION_KEY_ID APPLE_NOTARIZATION_ISSUER_ID; do
|
||||
if [[ -z "${!var:-}" ]]; then
|
||||
echo "$var is required for notarization"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
notary_key_path="${RUNNER_TEMP}/notarytool.key.p8"
|
||||
echo "$APPLE_NOTARIZATION_KEY_P8" | base64 -d > "$notary_key_path"
|
||||
cleanup_notary() {
|
||||
rm -f "$notary_key_path"
|
||||
}
|
||||
trap cleanup_notary EXIT
|
||||
|
||||
source "$GITHUB_ACTION_PATH/notary_helpers.sh"
|
||||
|
||||
notarize_binary() {
|
||||
local binary="$1"
|
||||
local source_path="codex-rs/target/${{ inputs.target }}/release/${binary}"
|
||||
local archive_path="${RUNNER_TEMP}/${binary}.zip"
|
||||
|
||||
if [[ ! -f "$source_path" ]]; then
|
||||
echo "Binary $source_path not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
rm -f "$archive_path"
|
||||
ditto -c -k --keepParent "$source_path" "$archive_path"
|
||||
|
||||
notarize_submission "$binary" "$archive_path" "$notary_key_path"
|
||||
}
|
||||
|
||||
notarize_binary "codex"
|
||||
notarize_binary "codex-responses-api-proxy"
|
||||
|
||||
- name: Sign and notarize macOS dmg
|
||||
if: ${{ inputs.sign-dmg == 'true' }}
|
||||
shell: bash
|
||||
env:
|
||||
APPLE_NOTARIZATION_KEY_P8: ${{ inputs.apple-notarization-key-p8 }}
|
||||
APPLE_NOTARIZATION_KEY_ID: ${{ inputs.apple-notarization-key-id }}
|
||||
APPLE_NOTARIZATION_ISSUER_ID: ${{ inputs.apple-notarization-issuer-id }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
for var in APPLE_CODESIGN_IDENTITY APPLE_NOTARIZATION_KEY_P8 APPLE_NOTARIZATION_KEY_ID APPLE_NOTARIZATION_ISSUER_ID; do
|
||||
if [[ -z "${!var:-}" ]]; then
|
||||
echo "$var is required"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
notary_key_path="${RUNNER_TEMP}/notarytool.key.p8"
|
||||
echo "$APPLE_NOTARIZATION_KEY_P8" | base64 -d > "$notary_key_path"
|
||||
cleanup_notary() {
|
||||
rm -f "$notary_key_path"
|
||||
}
|
||||
trap cleanup_notary EXIT
|
||||
|
||||
source "$GITHUB_ACTION_PATH/notary_helpers.sh"
|
||||
|
||||
dmg_path="codex-rs/target/${{ inputs.target }}/release/codex-${{ inputs.target }}.dmg"
|
||||
|
||||
if [[ ! -f "$dmg_path" ]]; then
|
||||
echo "dmg $dmg_path not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
keychain_args=()
|
||||
if [[ -n "${APPLE_CODESIGN_KEYCHAIN:-}" && -f "${APPLE_CODESIGN_KEYCHAIN}" ]]; then
|
||||
keychain_args+=(--keychain "${APPLE_CODESIGN_KEYCHAIN}")
|
||||
fi
|
||||
|
||||
codesign --force --timestamp --sign "$APPLE_CODESIGN_IDENTITY" "${keychain_args[@]}" "$dmg_path"
|
||||
notarize_submission "codex-${{ inputs.target }}.dmg" "$dmg_path" "$notary_key_path"
|
||||
xcrun stapler staple "$dmg_path"
|
||||
|
||||
- name: Remove signing keychain
|
||||
if: ${{ always() }}
|
||||
shell: bash
|
||||
env:
|
||||
APPLE_CODESIGN_KEYCHAIN: ${{ env.APPLE_CODESIGN_KEYCHAIN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [[ -n "${APPLE_CODESIGN_KEYCHAIN:-}" ]]; then
|
||||
keychain_args=()
|
||||
while IFS= read -r keychain; do
|
||||
[[ "$keychain" == "$APPLE_CODESIGN_KEYCHAIN" ]] && continue
|
||||
[[ -n "$keychain" ]] && keychain_args+=("$keychain")
|
||||
done < <(security list-keychains | sed 's/^[[:space:]]*//;s/[[:space:]]*$//;s/"//g')
|
||||
if ((${#keychain_args[@]} > 0)); then
|
||||
security list-keychains -s "${keychain_args[@]}"
|
||||
security default-keychain -s "${keychain_args[0]}"
|
||||
fi
|
||||
|
||||
if [[ -f "$APPLE_CODESIGN_KEYCHAIN" ]]; then
|
||||
security delete-keychain "$APPLE_CODESIGN_KEYCHAIN"
|
||||
fi
|
||||
fi
|
||||
46
.github/actions/macos-code-sign/notary_helpers.sh
vendored
Normal file
46
.github/actions/macos-code-sign/notary_helpers.sh
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
notarize_submission() {
|
||||
local label="$1"
|
||||
local path="$2"
|
||||
local notary_key_path="$3"
|
||||
|
||||
if [[ -z "${APPLE_NOTARIZATION_KEY_ID:-}" || -z "${APPLE_NOTARIZATION_ISSUER_ID:-}" ]]; then
|
||||
echo "APPLE_NOTARIZATION_KEY_ID and APPLE_NOTARIZATION_ISSUER_ID are required for notarization"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ -z "$notary_key_path" || ! -f "$notary_key_path" ]]; then
|
||||
echo "Notary key file $notary_key_path not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ ! -f "$path" ]]; then
|
||||
echo "Notarization payload $path not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
local submission_json
|
||||
submission_json=$(xcrun notarytool submit "$path" \
|
||||
--key "$notary_key_path" \
|
||||
--key-id "$APPLE_NOTARIZATION_KEY_ID" \
|
||||
--issuer "$APPLE_NOTARIZATION_ISSUER_ID" \
|
||||
--output-format json \
|
||||
--wait)
|
||||
|
||||
local status submission_id
|
||||
status=$(printf '%s\n' "$submission_json" | jq -r '.status // "Unknown"')
|
||||
submission_id=$(printf '%s\n' "$submission_json" | jq -r '.id // ""')
|
||||
|
||||
if [[ -z "$submission_id" ]]; then
|
||||
echo "Failed to retrieve submission ID for $label"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "::notice title=Notarization::$label submission ${submission_id} completed with status ${status}"
|
||||
|
||||
if [[ "$status" != "Accepted" ]]; then
|
||||
echo "Notarization failed for ${label} (submission ${submission_id}, status ${status})"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
57
.github/actions/windows-code-sign/action.yml
vendored
Normal file
57
.github/actions/windows-code-sign/action.yml
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
name: windows-code-sign
|
||||
description: Sign Windows binaries with Azure Trusted Signing.
|
||||
inputs:
|
||||
target:
|
||||
description: Target triple for the artifacts to sign.
|
||||
required: true
|
||||
client-id:
|
||||
description: Azure Trusted Signing client ID.
|
||||
required: true
|
||||
tenant-id:
|
||||
description: Azure tenant ID for Trusted Signing.
|
||||
required: true
|
||||
subscription-id:
|
||||
description: Azure subscription ID for Trusted Signing.
|
||||
required: true
|
||||
endpoint:
|
||||
description: Azure Trusted Signing endpoint.
|
||||
required: true
|
||||
account-name:
|
||||
description: Azure Trusted Signing account name.
|
||||
required: true
|
||||
certificate-profile-name:
|
||||
description: Certificate profile name for signing.
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Azure login for Trusted Signing (OIDC)
|
||||
uses: azure/login@v2
|
||||
with:
|
||||
client-id: ${{ inputs.client-id }}
|
||||
tenant-id: ${{ inputs.tenant-id }}
|
||||
subscription-id: ${{ inputs.subscription-id }}
|
||||
|
||||
- name: Sign Windows binaries with Azure Trusted Signing
|
||||
uses: azure/trusted-signing-action@v0
|
||||
with:
|
||||
endpoint: ${{ inputs.endpoint }}
|
||||
trusted-signing-account-name: ${{ inputs.account-name }}
|
||||
certificate-profile-name: ${{ inputs.certificate-profile-name }}
|
||||
exclude-environment-credential: true
|
||||
exclude-workload-identity-credential: true
|
||||
exclude-managed-identity-credential: true
|
||||
exclude-shared-token-cache-credential: true
|
||||
exclude-visual-studio-credential: true
|
||||
exclude-visual-studio-code-credential: true
|
||||
exclude-azure-cli-credential: false
|
||||
exclude-azure-powershell-credential: true
|
||||
exclude-azure-developer-cli-credential: true
|
||||
exclude-interactive-browser-credential: true
|
||||
cache-dependencies: false
|
||||
files: |
|
||||
${{ github.workspace }}/codex-rs/target/${{ inputs.target }}/release/codex.exe
|
||||
${{ github.workspace }}/codex-rs/target/${{ inputs.target }}/release/codex-responses-api-proxy.exe
|
||||
${{ github.workspace }}/codex-rs/target/${{ inputs.target }}/release/codex-windows-sandbox-setup.exe
|
||||
${{ github.workspace }}/codex-rs/target/${{ inputs.target }}/release/codex-command-runner.exe
|
||||
BIN
.github/codex-cli-login.png
vendored
BIN
.github/codex-cli-login.png
vendored
Binary file not shown.
|
Before Width: | Height: | Size: 410 KiB After Width: | Height: | Size: 2.9 MiB |
BIN
.github/codex-cli-splash.png
vendored
BIN
.github/codex-cli-splash.png
vendored
Binary file not shown.
|
Before Width: | Height: | Size: 412 KiB After Width: | Height: | Size: 3.1 MiB |
2
.github/codex/home/config.toml
vendored
2
.github/codex/home/config.toml
vendored
@@ -1,3 +1,3 @@
|
||||
model = "gpt-5"
|
||||
model = "gpt-5.1"
|
||||
|
||||
# Consider setting [mcp_servers] here!
|
||||
|
||||
56
.github/dotslash-config.json
vendored
56
.github/dotslash-config.json
vendored
@@ -21,6 +21,62 @@
|
||||
"windows-x86_64": {
|
||||
"regex": "^codex-x86_64-pc-windows-msvc\\.exe\\.zst$",
|
||||
"path": "codex.exe"
|
||||
},
|
||||
"windows-aarch64": {
|
||||
"regex": "^codex-aarch64-pc-windows-msvc\\.exe\\.zst$",
|
||||
"path": "codex.exe"
|
||||
}
|
||||
}
|
||||
},
|
||||
"codex-responses-api-proxy": {
|
||||
"platforms": {
|
||||
"macos-aarch64": {
|
||||
"regex": "^codex-responses-api-proxy-aarch64-apple-darwin\\.zst$",
|
||||
"path": "codex-responses-api-proxy"
|
||||
},
|
||||
"macos-x86_64": {
|
||||
"regex": "^codex-responses-api-proxy-x86_64-apple-darwin\\.zst$",
|
||||
"path": "codex-responses-api-proxy"
|
||||
},
|
||||
"linux-x86_64": {
|
||||
"regex": "^codex-responses-api-proxy-x86_64-unknown-linux-musl\\.zst$",
|
||||
"path": "codex-responses-api-proxy"
|
||||
},
|
||||
"linux-aarch64": {
|
||||
"regex": "^codex-responses-api-proxy-aarch64-unknown-linux-musl\\.zst$",
|
||||
"path": "codex-responses-api-proxy"
|
||||
},
|
||||
"windows-x86_64": {
|
||||
"regex": "^codex-responses-api-proxy-x86_64-pc-windows-msvc\\.exe\\.zst$",
|
||||
"path": "codex-responses-api-proxy.exe"
|
||||
},
|
||||
"windows-aarch64": {
|
||||
"regex": "^codex-responses-api-proxy-aarch64-pc-windows-msvc\\.exe\\.zst$",
|
||||
"path": "codex-responses-api-proxy.exe"
|
||||
}
|
||||
}
|
||||
},
|
||||
"codex-command-runner": {
|
||||
"platforms": {
|
||||
"windows-x86_64": {
|
||||
"regex": "^codex-command-runner-x86_64-pc-windows-msvc\\.exe\\.zst$",
|
||||
"path": "codex-command-runner.exe"
|
||||
},
|
||||
"windows-aarch64": {
|
||||
"regex": "^codex-command-runner-aarch64-pc-windows-msvc\\.exe\\.zst$",
|
||||
"path": "codex-command-runner.exe"
|
||||
}
|
||||
}
|
||||
},
|
||||
"codex-windows-sandbox-setup": {
|
||||
"platforms": {
|
||||
"windows-x86_64": {
|
||||
"regex": "^codex-windows-sandbox-setup-x86_64-pc-windows-msvc\\.exe\\.zst$",
|
||||
"path": "codex-windows-sandbox-setup.exe"
|
||||
},
|
||||
"windows-aarch64": {
|
||||
"regex": "^codex-windows-sandbox-setup-aarch64-pc-windows-msvc\\.exe\\.zst$",
|
||||
"path": "codex-windows-sandbox-setup.exe"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
18
.github/prompts/issue-deduplicator.txt
vendored
Normal file
18
.github/prompts/issue-deduplicator.txt
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
You are an assistant that triages new GitHub issues by identifying potential duplicates.
|
||||
|
||||
You will receive the following JSON files located in the current working directory:
|
||||
- `codex-current-issue.json`: JSON object describing the newly created issue (fields: number, title, body).
|
||||
- `codex-existing-issues.json`: JSON array of recent issues (each element includes number, title, body, createdAt).
|
||||
|
||||
Instructions:
|
||||
- Load both files as JSON and review their contents carefully. The codex-existing-issues.json file is large, ensure you explore all of it.
|
||||
- Compare the current issue against the existing issues to find up to five that appear to describe the same underlying problem or request.
|
||||
- Only consider an issue a potential duplicate if there is a clear overlap in symptoms, feature requests, reproduction steps, or error messages.
|
||||
- Prioritize newer issues when similarity is comparable.
|
||||
- Ignore pull requests and issues whose similarity is tenuous.
|
||||
- When unsure, prefer returning fewer matches.
|
||||
|
||||
Output requirements:
|
||||
- Respond with a JSON array of issue numbers (integers), ordered from most likely duplicate to least.
|
||||
- Include at most five numbers.
|
||||
- If you find no plausible duplicates, respond with `[]`.
|
||||
26
.github/prompts/issue-labeler.txt
vendored
Normal file
26
.github/prompts/issue-labeler.txt
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
You are an assistant that reviews GitHub issues for the repository.
|
||||
|
||||
Your job is to choose the most appropriate existing labels for the issue described later in this prompt.
|
||||
Follow these rules:
|
||||
- Only pick labels out of the list below.
|
||||
- Prefer a small set of precise labels over many broad ones.
|
||||
- If none of the labels fit, respond with an empty JSON array: []
|
||||
- Output must be a JSON array of label names (strings) with no additional commentary.
|
||||
|
||||
Labels to apply:
|
||||
1. bug — Reproducible defects in Codex products (CLI, VS Code extension, web, auth).
|
||||
2. enhancement — Feature requests or usability improvements that ask for new capabilities, better ergonomics, or quality-of-life tweaks.
|
||||
3. extension — VS Code (or other IDE) extension-specific issues.
|
||||
4. windows-os — Bugs or friction specific to Windows environments (PowerShell behavior, path handling, copy/paste, OS-specific auth or tooling failures).
|
||||
5. mcp — Topics involving Model Context Protocol servers/clients.
|
||||
6. codex-web — Issues targeting the Codex web UI/Cloud experience.
|
||||
8. azure — Problems or requests tied to Azure OpenAI deployments.
|
||||
9. documentation — Updates or corrections needed in docs/README/config references (broken links, missing examples, outdated keys, clarification requests).
|
||||
10. model-behavior — Undesirable LLM behavior: forgetting goals, refusing work, hallucinating environment details, quota misreports, or other reasoning/performance anomalies.
|
||||
|
||||
Issue information is available in environment variables:
|
||||
|
||||
ISSUE_NUMBER
|
||||
ISSUE_TITLE
|
||||
ISSUE_BODY
|
||||
REPO_FULL_NAME
|
||||
6
.github/pull_request_template.md
vendored
6
.github/pull_request_template.md
vendored
@@ -1,6 +1,8 @@
|
||||
# External (non-OpenAI) Pull Request Requirements
|
||||
|
||||
Before opening this Pull Request, please read the "Contributing" section of the README or your PR may be closed:
|
||||
https://github.com/openai/codex#contributing
|
||||
Before opening this Pull Request, please read the dedicated "Contributing" markdown file or your PR may be closed:
|
||||
https://github.com/openai/codex/blob/main/docs/contributing.md
|
||||
|
||||
If your PR conforms to our contribution guidelines, replace this text with a detailed and high quality description of your changes.
|
||||
|
||||
Include a link to a bug report or enhancement request.
|
||||
|
||||
26
.github/workflows/cargo-deny.yml
vendored
Normal file
26
.github/workflows/cargo-deny.yml
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
name: cargo-deny
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
cargo-deny:
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./codex-rs
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Install Rust toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
|
||||
- name: Run cargo-deny
|
||||
uses: EmbarkStudios/cargo-deny-action@v1
|
||||
with:
|
||||
rust-version: stable
|
||||
manifest-path: ./codex-rs/Cargo.toml
|
||||
54
.github/workflows/ci.yml
vendored
54
.github/workflows/ci.yml
vendored
@@ -1,7 +1,7 @@
|
||||
name: ci
|
||||
|
||||
on:
|
||||
pull_request: { branches: [main] }
|
||||
pull_request: {}
|
||||
push: { branches: [main] }
|
||||
|
||||
jobs:
|
||||
@@ -12,42 +12,45 @@ jobs:
|
||||
NODE_OPTIONS: --max-old-space-size=4096
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10.8.1
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "store_path=$(pnpm store path --silent)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Setup pnpm cache
|
||||
uses: actions/cache@v4
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache.outputs.store_path }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
node-version: 22
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
# Run all tasks using workspace filters
|
||||
# stage_npm_packages.py requires DotSlash when staging releases.
|
||||
- uses: facebook/install-dotslash@v2
|
||||
|
||||
- name: Ensure staging a release works.
|
||||
- name: Stage npm package
|
||||
id: stage_npm_package
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: ./codex-cli/scripts/stage_release.sh
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Use a rust-release version that includes all native binaries.
|
||||
CODEX_VERSION=0.74.0
|
||||
OUTPUT_DIR="${RUNNER_TEMP}"
|
||||
python3 ./scripts/stage_npm_packages.py \
|
||||
--release-version "$CODEX_VERSION" \
|
||||
--package codex \
|
||||
--output-dir "$OUTPUT_DIR"
|
||||
PACK_OUTPUT="${OUTPUT_DIR}/codex-npm-${CODEX_VERSION}.tgz"
|
||||
echo "pack_output=$PACK_OUTPUT" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Upload staged npm package artifact
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: codex-npm-staging
|
||||
path: ${{ steps.stage_npm_package.outputs.pack_output }}
|
||||
|
||||
- name: Ensure root README.md contains only ASCII and certain Unicode code points
|
||||
run: ./scripts/asciicheck.py README.md
|
||||
@@ -58,3 +61,6 @@ jobs:
|
||||
run: ./scripts/asciicheck.py codex-cli/README.md
|
||||
- name: Check codex-cli/README ToC
|
||||
run: python3 scripts/readme_toc.py codex-cli/README.md
|
||||
|
||||
- name: Prettier (run `pnpm run format:fix` to fix)
|
||||
run: pnpm run format
|
||||
|
||||
28
.github/workflows/cla.yml
vendored
28
.github/workflows/cla.yml
vendored
@@ -13,17 +13,37 @@ permissions:
|
||||
|
||||
jobs:
|
||||
cla:
|
||||
# Only run the CLA assistant for the canonical openai repo so forks are not blocked
|
||||
# and contributors who signed previously do not receive duplicate CLA notifications.
|
||||
if: ${{ github.repository_owner == 'openai' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: contributor-assistant/github-action@v2.6.1
|
||||
# Run on close only if the PR was merged. This will lock the PR to preserve
|
||||
# the CLA agreement. We don't want to lock PRs that have been closed without
|
||||
# merging because the contributor may want to respond with additional comments.
|
||||
# This action has a "lock-pullrequest-aftermerge" option that can be set to false,
|
||||
# but that would unconditionally skip locking even in cases where the PR was merged.
|
||||
if: |
|
||||
github.event_name == 'pull_request_target' ||
|
||||
github.event.comment.body == 'recheck' ||
|
||||
github.event.comment.body == 'I have read the CLA Document and I hereby sign the CLA'
|
||||
(
|
||||
github.event_name == 'pull_request_target' &&
|
||||
(
|
||||
github.event.action == 'opened' ||
|
||||
github.event.action == 'synchronize' ||
|
||||
(github.event.action == 'closed' && github.event.pull_request.merged == true)
|
||||
)
|
||||
) ||
|
||||
(
|
||||
github.event_name == 'issue_comment' &&
|
||||
(
|
||||
github.event.comment.body == 'recheck' ||
|
||||
github.event.comment.body == 'I have read the CLA Document and I hereby sign the CLA'
|
||||
)
|
||||
)
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
path-to-document: https://github.com/openai/codex/blob/main/docs/CLA.md
|
||||
path-to-signatures: signatures/cla.json
|
||||
branch: cla-signatures
|
||||
allowlist: dependabot[bot]
|
||||
allowlist: codex,dependabot,dependabot[bot],github-actions[bot]
|
||||
|
||||
105
.github/workflows/close-stale-contributor-prs.yml
vendored
Normal file
105
.github/workflows/close-stale-contributor-prs.yml
vendored
Normal file
@@ -0,0 +1,105 @@
|
||||
name: Close stale contributor PRs
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "0 6 * * *"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
close-stale-contributor-prs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Close inactive PRs from contributors
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
const DAYS_INACTIVE = 14;
|
||||
const cutoff = new Date(Date.now() - DAYS_INACTIVE * 24 * 60 * 60 * 1000);
|
||||
const { owner, repo } = context.repo;
|
||||
const dryRun = false;
|
||||
const stalePrs = [];
|
||||
|
||||
core.info(`Dry run mode: ${dryRun}`);
|
||||
|
||||
const prs = await github.paginate(github.rest.pulls.list, {
|
||||
owner,
|
||||
repo,
|
||||
state: "open",
|
||||
per_page: 100,
|
||||
sort: "updated",
|
||||
direction: "asc",
|
||||
});
|
||||
|
||||
for (const pr of prs) {
|
||||
const lastUpdated = new Date(pr.updated_at);
|
||||
if (lastUpdated > cutoff) {
|
||||
core.info(`PR ${pr.number} is fresh`);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!pr.user || pr.user.type !== "User") {
|
||||
core.info(`PR ${pr.number} wasn't created by a user`);
|
||||
continue;
|
||||
}
|
||||
|
||||
let permission;
|
||||
try {
|
||||
const permissionResponse = await github.rest.repos.getCollaboratorPermissionLevel({
|
||||
owner,
|
||||
repo,
|
||||
username: pr.user.login,
|
||||
});
|
||||
permission = permissionResponse.data.permission;
|
||||
} catch (error) {
|
||||
if (error.status === 404) {
|
||||
core.info(`Author ${pr.user.login} is not a collaborator; skipping #${pr.number}`);
|
||||
continue;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
|
||||
const hasContributorAccess = ["admin", "maintain", "write"].includes(permission);
|
||||
if (!hasContributorAccess) {
|
||||
core.info(`Author ${pr.user.login} has ${permission} access; skipping #${pr.number}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
stalePrs.push(pr);
|
||||
}
|
||||
|
||||
if (!stalePrs.length) {
|
||||
core.info("No stale contributor pull requests found.");
|
||||
return;
|
||||
}
|
||||
|
||||
for (const pr of stalePrs) {
|
||||
const issue_number = pr.number;
|
||||
const closeComment = `Closing this pull request because it has had no updates for more than ${DAYS_INACTIVE} days. If you plan to continue working on it, feel free to reopen or open a new PR.`;
|
||||
|
||||
if (dryRun) {
|
||||
core.info(`[dry-run] Would close contributor PR #${issue_number} from ${pr.user.login}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
await github.rest.issues.createComment({
|
||||
owner,
|
||||
repo,
|
||||
issue_number,
|
||||
body: closeComment,
|
||||
});
|
||||
|
||||
await github.rest.pulls.update({
|
||||
owner,
|
||||
repo,
|
||||
pull_number: issue_number,
|
||||
state: "closed",
|
||||
});
|
||||
|
||||
core.info(`Closed contributor PR #${issue_number} from ${pr.user.login}`);
|
||||
}
|
||||
4
.github/workflows/codespell.yml
vendored
4
.github/workflows/codespell.yml
vendored
@@ -18,10 +18,10 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
- name: Annotate locations with typos
|
||||
uses: codespell-project/codespell-problem-matcher@b80729f885d32f78a716c2f107b4db1025001c42 # v1
|
||||
- name: Codespell
|
||||
uses: codespell-project/actions-codespell@406322ec52dd7b488e48c1c4b82e2a8b3a1bf630 # v2
|
||||
uses: codespell-project/actions-codespell@8f01853be192eb0f849a5c7d721450e7a467c579 # v2.2
|
||||
with:
|
||||
ignore_words_file: .codespellignore
|
||||
|
||||
64
.github/workflows/codex.yml
vendored
64
.github/workflows/codex.yml
vendored
@@ -1,64 +0,0 @@
|
||||
name: Codex
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [opened, labeled]
|
||||
pull_request:
|
||||
branches: [main]
|
||||
types: [labeled]
|
||||
|
||||
jobs:
|
||||
codex:
|
||||
# This `if` check provides complex filtering logic to avoid running Codex
|
||||
# on every PR. Admittedly, one thing this does not verify is whether the
|
||||
# sender has write access to the repo: that must be done as part of a
|
||||
# runtime step.
|
||||
#
|
||||
# Note the label values should match the ones in the .github/codex/labels
|
||||
# folder.
|
||||
if: |
|
||||
(github.event_name == 'issues' && (
|
||||
(github.event.action == 'labeled' && (github.event.label.name == 'codex-attempt' || github.event.label.name == 'codex-triage'))
|
||||
)) ||
|
||||
(github.event_name == 'pull_request' && github.event.action == 'labeled' && (github.event.label.name == 'codex-review' || github.event.label.name == 'codex-rust-review'))
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write # can push or create branches
|
||||
issues: write # for comments + labels on issues/PRs
|
||||
pull-requests: write # for PR comments/labels
|
||||
steps:
|
||||
# TODO: Consider adding an optional mode (--dry-run?) to actions/codex
|
||||
# that verifies whether Codex should actually be run for this event.
|
||||
# (For example, it may be rejected because the sender does not have
|
||||
# write access to the repo.) The benefit would be two-fold:
|
||||
# 1. As the first step of this job, it gives us a chance to add a reaction
|
||||
# or comment to the PR/issue ASAP to "ack" the request.
|
||||
# 2. It saves resources by skipping the clone and setup steps below if
|
||||
# Codex is not going to run.
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
with:
|
||||
targets: x86_64-unknown-linux-gnu
|
||||
components: clippy
|
||||
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
${{ github.workspace }}/codex-rs/target/
|
||||
key: cargo-ubuntu-24.04-x86_64-unknown-linux-gnu-dev-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
# Note it is possible that the `verify` step internal to Run Codex will
|
||||
# fail, in which case the work to setup the repo was worthless :(
|
||||
- name: Run Codex
|
||||
uses: ./.github/actions/codex
|
||||
with:
|
||||
openai_api_key: ${{ secrets.CODEX_OPENAI_API_KEY }}
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
codex_home: ./.github/codex/home
|
||||
139
.github/workflows/issue-deduplicator.yml
vendored
Normal file
139
.github/workflows/issue-deduplicator.yml
vendored
Normal file
@@ -0,0 +1,139 @@
|
||||
name: Issue Deduplicator
|
||||
|
||||
on:
|
||||
issues:
|
||||
types:
|
||||
- opened
|
||||
- labeled
|
||||
|
||||
jobs:
|
||||
gather-duplicates:
|
||||
name: Identify potential duplicates
|
||||
if: ${{ github.event.action == 'opened' || (github.event.action == 'labeled' && github.event.label.name == 'codex-deduplicate') }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
codex_output: ${{ steps.codex.outputs.final-message }}
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Prepare Codex inputs
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
set -eo pipefail
|
||||
|
||||
CURRENT_ISSUE_FILE=codex-current-issue.json
|
||||
EXISTING_ISSUES_FILE=codex-existing-issues.json
|
||||
|
||||
gh issue list --repo "${{ github.repository }}" \
|
||||
--json number,title,body,createdAt \
|
||||
--limit 1000 \
|
||||
--state all \
|
||||
--search "sort:created-desc" \
|
||||
| jq '.' \
|
||||
> "$EXISTING_ISSUES_FILE"
|
||||
|
||||
gh issue view "${{ github.event.issue.number }}" \
|
||||
--repo "${{ github.repository }}" \
|
||||
--json number,title,body \
|
||||
| jq '.' \
|
||||
> "$CURRENT_ISSUE_FILE"
|
||||
|
||||
- id: codex
|
||||
uses: openai/codex-action@main
|
||||
with:
|
||||
openai-api-key: ${{ secrets.CODEX_OPENAI_API_KEY }}
|
||||
allow-users: "*"
|
||||
prompt: |
|
||||
You are an assistant that triages new GitHub issues by identifying potential duplicates.
|
||||
|
||||
You will receive the following JSON files located in the current working directory:
|
||||
- `codex-current-issue.json`: JSON object describing the newly created issue (fields: number, title, body).
|
||||
- `codex-existing-issues.json`: JSON array of recent issues (each element includes number, title, body, createdAt).
|
||||
|
||||
Instructions:
|
||||
- Compare the current issue against the existing issues to find up to five that appear to describe the same underlying problem or request.
|
||||
- Focus on the underlying intent and context of each issue—such as reported symptoms, feature requests, reproduction steps, or error messages—rather than relying solely on string similarity or synthetic metrics.
|
||||
- After your analysis, validate your results in 1-2 lines explaining your decision to return the selected matches.
|
||||
- When unsure, prefer returning fewer matches.
|
||||
- Include at most five numbers.
|
||||
|
||||
output-schema: |
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"issues": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"reason": { "type": "string" }
|
||||
},
|
||||
"required": ["issues", "reason"],
|
||||
"additionalProperties": false
|
||||
}
|
||||
|
||||
comment-on-issue:
|
||||
name: Comment with potential duplicates
|
||||
needs: gather-duplicates
|
||||
if: ${{ needs.gather-duplicates.result != 'skipped' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
steps:
|
||||
- name: Comment on issue
|
||||
uses: actions/github-script@v8
|
||||
env:
|
||||
CODEX_OUTPUT: ${{ needs.gather-duplicates.outputs.codex_output }}
|
||||
with:
|
||||
github-token: ${{ github.token }}
|
||||
script: |
|
||||
const raw = process.env.CODEX_OUTPUT ?? '';
|
||||
let parsed;
|
||||
try {
|
||||
parsed = JSON.parse(raw);
|
||||
} catch (error) {
|
||||
core.info(`Codex output was not valid JSON. Raw output: ${raw}`);
|
||||
core.info(`Parse error: ${error.message}`);
|
||||
return;
|
||||
}
|
||||
|
||||
const issues = Array.isArray(parsed?.issues) ? parsed.issues : [];
|
||||
const currentIssueNumber = String(context.payload.issue.number);
|
||||
|
||||
console.log(`Current issue number: ${currentIssueNumber}`);
|
||||
console.log(issues);
|
||||
|
||||
const filteredIssues = issues.filter((value) => String(value) !== currentIssueNumber);
|
||||
|
||||
if (filteredIssues.length === 0) {
|
||||
core.info('Codex reported no potential duplicates.');
|
||||
return;
|
||||
}
|
||||
|
||||
const lines = [
|
||||
'Potential duplicates detected. Please review them and close your issue if it is a duplicate.',
|
||||
'',
|
||||
...filteredIssues.map((value) => `- #${String(value)}`),
|
||||
'',
|
||||
'*Powered by [Codex Action](https://github.com/openai/codex-action)*'];
|
||||
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.payload.issue.number,
|
||||
body: lines.join("\n"),
|
||||
});
|
||||
|
||||
- name: Remove codex-deduplicate label
|
||||
if: ${{ always() && github.event.action == 'labeled' && github.event.label.name == 'codex-deduplicate' }}
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
GH_REPO: ${{ github.repository }}
|
||||
run: |
|
||||
gh issue edit "${{ github.event.issue.number }}" --remove-label codex-deduplicate || true
|
||||
echo "Attempted to remove label: codex-deduplicate"
|
||||
130
.github/workflows/issue-labeler.yml
vendored
Normal file
130
.github/workflows/issue-labeler.yml
vendored
Normal file
@@ -0,0 +1,130 @@
|
||||
name: Issue Labeler
|
||||
|
||||
on:
|
||||
issues:
|
||||
types:
|
||||
- opened
|
||||
- labeled
|
||||
|
||||
jobs:
|
||||
gather-labels:
|
||||
name: Generate label suggestions
|
||||
if: ${{ github.event.action == 'opened' || (github.event.action == 'labeled' && github.event.label.name == 'codex-label') }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
codex_output: ${{ steps.codex.outputs.final-message }}
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- id: codex
|
||||
uses: openai/codex-action@main
|
||||
with:
|
||||
openai-api-key: ${{ secrets.CODEX_OPENAI_API_KEY }}
|
||||
allow-users: "*"
|
||||
prompt: |
|
||||
You are an assistant that reviews GitHub issues for the repository.
|
||||
|
||||
Your job is to choose the most appropriate labels for the issue described later in this prompt.
|
||||
Follow these rules:
|
||||
|
||||
- Add one (and only one) of the following three labels to distinguish the type of issue. Default to "bug" if unsure.
|
||||
1. bug — Reproducible defects in Codex products (CLI, VS Code extension, web, auth).
|
||||
2. enhancement — Feature requests or usability improvements that ask for new capabilities, better ergonomics, or quality-of-life tweaks.
|
||||
3. documentation — Updates or corrections needed in docs/README/config references (broken links, missing examples, outdated keys, clarification requests).
|
||||
|
||||
- If applicable, add one of the following labels to specify which sub-product or product surface the issue relates to.
|
||||
1. CLI — the Codex command line interface.
|
||||
2. extension — VS Code (or other IDE) extension-specific issues.
|
||||
3. codex-web — Issues targeting the Codex web UI/Cloud experience.
|
||||
4. github-action — Issues with the Codex GitHub action.
|
||||
5. iOS — Issues with the Codex iOS app.
|
||||
|
||||
- Additionally add zero or more of the following labels that are relevant to the issue content. Prefer a small set of precise labels over many broad ones.
|
||||
1. windows-os — Bugs or friction specific to Windows environments (always when PowerShell is mentioned, path handling, copy/paste, OS-specific auth or tooling failures).
|
||||
2. mcp — Topics involving Model Context Protocol servers/clients.
|
||||
3. mcp-server — Problems related to the codex mcp-server command, where codex runs as an MCP server.
|
||||
4. azure — Problems or requests tied to Azure OpenAI deployments.
|
||||
5. model-behavior — Undesirable LLM behavior: forgetting goals, refusing work, hallucinating environment details, quota misreports, or other reasoning/performance anomalies.
|
||||
6. code-review — Issues related to the code review feature or functionality.
|
||||
7. auth - Problems related to authentication, login, or access tokens.
|
||||
8. codex-exec - Problems related to the "codex exec" command or functionality.
|
||||
9. context-management - Problems related to compaction, context windows, or available context reporting.
|
||||
10. custom-model - Problems that involve using custom model providers, local models, or OSS models.
|
||||
11. rate-limits - Problems related to token limits, rate limits, or token usage reporting.
|
||||
12. sandbox - Issues related to local sandbox environments or tool call approvals to override sandbox restrictions.
|
||||
13. tool-calls - Problems related to specific tool call invocations including unexpected errors, failures, or hangs.
|
||||
14. TUI - Problems with the terminal user interface (TUI) including keyboard shortcuts, copy & pasting, menus, or screen update issues.
|
||||
|
||||
Issue number: ${{ github.event.issue.number }}
|
||||
|
||||
Issue title:
|
||||
${{ github.event.issue.title }}
|
||||
|
||||
Issue body:
|
||||
${{ github.event.issue.body }}
|
||||
|
||||
Repository full name:
|
||||
${{ github.repository }}
|
||||
|
||||
output-schema: |
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"labels": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": ["labels"],
|
||||
"additionalProperties": false
|
||||
}
|
||||
|
||||
apply-labels:
|
||||
name: Apply labels from Codex output
|
||||
needs: gather-labels
|
||||
if: ${{ needs.gather-labels.result != 'skipped' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
GH_REPO: ${{ github.repository }}
|
||||
ISSUE_NUMBER: ${{ github.event.issue.number }}
|
||||
CODEX_OUTPUT: ${{ needs.gather-labels.outputs.codex_output }}
|
||||
steps:
|
||||
- name: Apply labels
|
||||
run: |
|
||||
json=${CODEX_OUTPUT//$'\r'/}
|
||||
if [ -z "$json" ]; then
|
||||
echo "Codex produced no output. Skipping label application."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if ! printf '%s' "$json" | jq -e 'type == "object" and (.labels | type == "array")' >/dev/null 2>&1; then
|
||||
echo "Codex output did not include a labels array. Raw output: $json"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
labels=$(printf '%s' "$json" | jq -r '.labels[] | tostring')
|
||||
if [ -z "$labels" ]; then
|
||||
echo "Codex returned an empty array. Nothing to do."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
cmd=(gh issue edit "$ISSUE_NUMBER")
|
||||
while IFS= read -r label; do
|
||||
cmd+=(--add-label "$label")
|
||||
done <<< "$labels"
|
||||
|
||||
"${cmd[@]}" || true
|
||||
|
||||
- name: Remove codex-label trigger
|
||||
if: ${{ always() && github.event.action == 'labeled' && github.event.label.name == 'codex-label' }}
|
||||
run: |
|
||||
gh issue edit "$ISSUE_NUMBER" --remove-label codex-label || true
|
||||
echo "Attempted to remove label: codex-label"
|
||||
407
.github/workflows/rust-ci.yml
vendored
407
.github/workflows/rust-ci.yml
vendored
@@ -9,7 +9,7 @@ on:
|
||||
# CI builds in debug (dev) for faster signal.
|
||||
|
||||
jobs:
|
||||
# --- Detect what changed (always runs) -------------------------------------
|
||||
# --- Detect what changed to detect which tests to run (always runs) -------------------------------------
|
||||
changed:
|
||||
name: Detect changed areas
|
||||
runs-on: ubuntu-24.04
|
||||
@@ -17,7 +17,7 @@ jobs:
|
||||
codex: ${{ steps.detect.outputs.codex }}
|
||||
workflows: ${{ steps.detect.outputs.workflows }}
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Detect changed paths (no external action)
|
||||
@@ -28,9 +28,11 @@ jobs:
|
||||
|
||||
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
|
||||
BASE_SHA='${{ github.event.pull_request.base.sha }}'
|
||||
HEAD_SHA='${{ github.event.pull_request.head.sha }}'
|
||||
echo "Base SHA: $BASE_SHA"
|
||||
# List files changed between base and current HEAD (merge-base aware)
|
||||
mapfile -t files < <(git diff --name-only --no-renames "$BASE_SHA"...HEAD)
|
||||
echo "Head SHA: $HEAD_SHA"
|
||||
# List files changed between base and PR head
|
||||
mapfile -t files < <(git diff --name-only --no-renames "$BASE_SHA" "$HEAD_SHA")
|
||||
else
|
||||
# On push / manual runs, default to running everything
|
||||
files=("codex-rs/force" ".github/force")
|
||||
@@ -56,16 +58,36 @@ jobs:
|
||||
run:
|
||||
working-directory: codex-rs
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- uses: actions/checkout@v6
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
with:
|
||||
components: rustfmt
|
||||
- name: cargo fmt
|
||||
run: cargo fmt -- --config imports_granularity=Item --check
|
||||
- name: Verify codegen for mcp-types
|
||||
run: ./mcp-types/check_lib_rs.py
|
||||
|
||||
cargo_shear:
|
||||
name: cargo shear
|
||||
runs-on: ubuntu-24.04
|
||||
needs: changed
|
||||
if: ${{ needs.changed.outputs.codex == 'true' || needs.changed.outputs.workflows == 'true' || github.event_name == 'push' }}
|
||||
defaults:
|
||||
run:
|
||||
working-directory: codex-rs
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
with:
|
||||
tool: cargo-shear
|
||||
version: 1.5.1
|
||||
- name: cargo shear
|
||||
run: cargo shear
|
||||
|
||||
# --- CI to validate on different os/targets --------------------------------
|
||||
lint_build_test:
|
||||
name: ${{ matrix.runner }} - ${{ matrix.target }}${{ matrix.profile == 'release' && ' (release)' || '' }}
|
||||
lint_build:
|
||||
name: Lint/Build — ${{ matrix.runner }} - ${{ matrix.target }}${{ matrix.profile == 'release' && ' (release)' || '' }}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
timeout-minutes: 30
|
||||
needs: changed
|
||||
@@ -74,6 +96,11 @@ jobs:
|
||||
defaults:
|
||||
run:
|
||||
working-directory: codex-rs
|
||||
env:
|
||||
# Speed up repeated builds across CI runs by caching compiled objects (non-Windows).
|
||||
USE_SCCACHE: ${{ startsWith(matrix.runner, 'windows') && 'false' || 'true' }}
|
||||
CARGO_INCREMENTAL: "0"
|
||||
SCCACHE_CACHE_SIZE: 10G
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
@@ -100,41 +127,142 @@ jobs:
|
||||
- runner: windows-latest
|
||||
target: x86_64-pc-windows-msvc
|
||||
profile: dev
|
||||
- runner: windows-11-arm
|
||||
target: aarch64-pc-windows-msvc
|
||||
profile: dev
|
||||
|
||||
# Also run representative release builds on Mac and Linux because
|
||||
# there could be release-only build errors we want to catch.
|
||||
# Hopefully this also pre-populates the build cache to speed up
|
||||
# releases.
|
||||
- runner: macos-14
|
||||
target: aarch64-apple-darwin
|
||||
profile: release
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
profile: release
|
||||
- runner: windows-latest
|
||||
target: x86_64-pc-windows-msvc
|
||||
profile: release
|
||||
- runner: windows-11-arm
|
||||
target: aarch64-pc-windows-msvc
|
||||
profile: release
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- uses: actions/checkout@v6
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
components: clippy
|
||||
|
||||
- uses: actions/cache@v4
|
||||
- name: Compute lockfile hash
|
||||
id: lockhash
|
||||
working-directory: codex-rs
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "hash=$(sha256sum Cargo.lock | cut -d' ' -f1)" >> "$GITHUB_OUTPUT"
|
||||
echo "toolchain_hash=$(sha256sum rust-toolchain.toml | cut -d' ' -f1)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
# Explicit cache restore: split cargo home vs target, so we can
|
||||
# avoid caching the large target dir on the gnu-dev job.
|
||||
- name: Restore cargo home cache
|
||||
id: cache_cargo_home_restore
|
||||
uses: actions/cache/restore@v5
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
${{ github.workspace }}/codex-rs/target/
|
||||
key: cargo-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ steps.lockhash.outputs.toolchain_hash }}
|
||||
restore-keys: |
|
||||
cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
|
||||
|
||||
# Install and restore sccache cache
|
||||
- name: Install sccache
|
||||
if: ${{ env.USE_SCCACHE == 'true' }}
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
with:
|
||||
tool: sccache
|
||||
version: 0.7.5
|
||||
|
||||
- name: Configure sccache backend
|
||||
if: ${{ env.USE_SCCACHE == 'true' }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [[ -n "${ACTIONS_CACHE_URL:-}" && -n "${ACTIONS_RUNTIME_TOKEN:-}" ]]; then
|
||||
echo "SCCACHE_GHA_ENABLED=true" >> "$GITHUB_ENV"
|
||||
echo "Using sccache GitHub backend"
|
||||
else
|
||||
echo "SCCACHE_GHA_ENABLED=false" >> "$GITHUB_ENV"
|
||||
echo "SCCACHE_DIR=${{ github.workspace }}/.sccache" >> "$GITHUB_ENV"
|
||||
echo "Using sccache local disk + actions/cache fallback"
|
||||
fi
|
||||
|
||||
- name: Enable sccache wrapper
|
||||
if: ${{ env.USE_SCCACHE == 'true' }}
|
||||
shell: bash
|
||||
run: echo "RUSTC_WRAPPER=sccache" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Restore sccache cache (fallback)
|
||||
if: ${{ env.USE_SCCACHE == 'true' && env.SCCACHE_GHA_ENABLED != 'true' }}
|
||||
id: cache_sccache_restore
|
||||
uses: actions/cache/restore@v5
|
||||
with:
|
||||
path: ${{ github.workspace }}/.sccache/
|
||||
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ github.run_id }}
|
||||
restore-keys: |
|
||||
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-
|
||||
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Prepare APT cache directories (musl)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
sudo mkdir -p /var/cache/apt/archives /var/lib/apt/lists
|
||||
sudo chown -R "$USER:$USER" /var/cache/apt /var/lib/apt/lists
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Restore APT cache (musl)
|
||||
id: cache_apt_restore
|
||||
uses: actions/cache/restore@v5
|
||||
with:
|
||||
path: |
|
||||
/var/cache/apt
|
||||
key: apt-${{ matrix.runner }}-${{ matrix.target }}-v1
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Install musl build tools
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
shell: bash
|
||||
run: |
|
||||
sudo apt install -y musl-tools pkg-config && sudo rm -rf /var/lib/apt/lists/*
|
||||
set -euo pipefail
|
||||
sudo apt-get -y update -o Acquire::Retries=3
|
||||
sudo apt-get -y install --no-install-recommends musl-tools pkg-config
|
||||
|
||||
- name: Install cargo-chef
|
||||
if: ${{ matrix.profile == 'release' }}
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
with:
|
||||
tool: cargo-chef
|
||||
version: 0.1.71
|
||||
|
||||
- name: Pre-warm dependency cache (cargo-chef)
|
||||
if: ${{ matrix.profile == 'release' }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
RECIPE="${RUNNER_TEMP}/chef-recipe.json"
|
||||
cargo chef prepare --recipe-path "$RECIPE"
|
||||
cargo chef cook --recipe-path "$RECIPE" --target ${{ matrix.target }} --release --all-features
|
||||
|
||||
- name: cargo clippy
|
||||
id: clippy
|
||||
run: cargo clippy --target ${{ matrix.target }} --all-features --tests -- -D warnings
|
||||
run: cargo clippy --target ${{ matrix.target }} --all-features --tests --profile ${{ matrix.profile }} -- -D warnings
|
||||
|
||||
# Running `cargo build` from the workspace root builds the workspace using
|
||||
# the union of all features from third-party crates. This can mask errors
|
||||
@@ -149,29 +277,243 @@ jobs:
|
||||
find . -name Cargo.toml -mindepth 2 -maxdepth 2 -print0 \
|
||||
| xargs -0 -n1 -I{} bash -c 'cd "$(dirname "{}")" && cargo check --profile ${{ matrix.profile }}'
|
||||
|
||||
- name: cargo test
|
||||
id: test
|
||||
# `cargo test` takes too long for release builds to run them on every PR
|
||||
if: ${{ matrix.profile != 'release' }}
|
||||
# Save caches explicitly; make non-fatal so cache packaging
|
||||
# never fails the overall job. Only save when key wasn't hit.
|
||||
- name: Save cargo home cache
|
||||
if: always() && !cancelled() && steps.cache_cargo_home_restore.outputs.cache-hit != 'true'
|
||||
continue-on-error: true
|
||||
run: cargo test --all-features --target ${{ matrix.target }} --profile ${{ matrix.profile }}
|
||||
env:
|
||||
RUST_BACKTRACE: 1
|
||||
uses: actions/cache/save@v5
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ steps.lockhash.outputs.toolchain_hash }}
|
||||
|
||||
- name: Save sccache cache (fallback)
|
||||
if: always() && !cancelled() && env.USE_SCCACHE == 'true' && env.SCCACHE_GHA_ENABLED != 'true'
|
||||
continue-on-error: true
|
||||
uses: actions/cache/save@v5
|
||||
with:
|
||||
path: ${{ github.workspace }}/.sccache/
|
||||
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ github.run_id }}
|
||||
|
||||
- name: sccache stats
|
||||
if: always() && env.USE_SCCACHE == 'true'
|
||||
continue-on-error: true
|
||||
run: sccache --show-stats || true
|
||||
|
||||
- name: sccache summary
|
||||
if: always() && env.USE_SCCACHE == 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
{
|
||||
echo "### sccache stats — ${{ matrix.target }} (${{ matrix.profile }})";
|
||||
echo;
|
||||
echo '```';
|
||||
sccache --show-stats || true;
|
||||
echo '```';
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
- name: Save APT cache (musl)
|
||||
if: always() && !cancelled() && (matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl') && steps.cache_apt_restore.outputs.cache-hit != 'true'
|
||||
continue-on-error: true
|
||||
uses: actions/cache/save@v5
|
||||
with:
|
||||
path: |
|
||||
/var/cache/apt
|
||||
key: apt-${{ matrix.runner }}-${{ matrix.target }}-v1
|
||||
|
||||
# Fail the job if any of the previous steps failed.
|
||||
- name: verify all steps passed
|
||||
if: |
|
||||
steps.clippy.outcome == 'failure' ||
|
||||
steps.cargo_check_all_crates.outcome == 'failure' ||
|
||||
steps.test.outcome == 'failure'
|
||||
steps.cargo_check_all_crates.outcome == 'failure'
|
||||
run: |
|
||||
echo "One or more checks failed (clippy, cargo_check_all_crates, or test). See logs for details."
|
||||
echo "One or more checks failed (clippy or cargo_check_all_crates). See logs for details."
|
||||
exit 1
|
||||
|
||||
tests:
|
||||
name: Tests — ${{ matrix.runner }} - ${{ matrix.target }}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
timeout-minutes: 30
|
||||
needs: changed
|
||||
if: ${{ needs.changed.outputs.codex == 'true' || needs.changed.outputs.workflows == 'true' || github.event_name == 'push' }}
|
||||
defaults:
|
||||
run:
|
||||
working-directory: codex-rs
|
||||
env:
|
||||
# Speed up repeated builds across CI runs by caching compiled objects (non-Windows).
|
||||
USE_SCCACHE: ${{ startsWith(matrix.runner, 'windows') && 'false' || 'true' }}
|
||||
CARGO_INCREMENTAL: "0"
|
||||
SCCACHE_CACHE_SIZE: 10G
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- runner: macos-14
|
||||
target: aarch64-apple-darwin
|
||||
profile: dev
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-gnu
|
||||
profile: dev
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-gnu
|
||||
profile: dev
|
||||
- runner: windows-latest
|
||||
target: x86_64-pc-windows-msvc
|
||||
profile: dev
|
||||
- runner: windows-11-arm
|
||||
target: aarch64-pc-windows-msvc
|
||||
profile: dev
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
# We have been running out of space when running this job on Linux for
|
||||
# x86_64-unknown-linux-gnu, so remove some unnecessary dependencies.
|
||||
- name: Remove unnecessary dependencies to save space
|
||||
if: ${{ startsWith(matrix.runner, 'ubuntu') }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
sudo rm -rf \
|
||||
/usr/local/lib/android \
|
||||
/usr/share/dotnet \
|
||||
/usr/local/share/boost \
|
||||
/usr/local/lib/node_modules \
|
||||
/opt/ghc
|
||||
sudo apt-get remove -y docker.io docker-compose podman buildah
|
||||
|
||||
# Some integration tests rely on DotSlash being installed.
|
||||
# See https://github.com/openai/codex/pull/7617.
|
||||
- name: Install DotSlash
|
||||
uses: facebook/install-dotslash@v2
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
- name: Compute lockfile hash
|
||||
id: lockhash
|
||||
working-directory: codex-rs
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "hash=$(sha256sum Cargo.lock | cut -d' ' -f1)" >> "$GITHUB_OUTPUT"
|
||||
echo "toolchain_hash=$(sha256sum rust-toolchain.toml | cut -d' ' -f1)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Restore cargo home cache
|
||||
id: cache_cargo_home_restore
|
||||
uses: actions/cache/restore@v5
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ steps.lockhash.outputs.toolchain_hash }}
|
||||
restore-keys: |
|
||||
cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
|
||||
|
||||
- name: Install sccache
|
||||
if: ${{ env.USE_SCCACHE == 'true' }}
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
with:
|
||||
tool: sccache
|
||||
version: 0.7.5
|
||||
|
||||
- name: Configure sccache backend
|
||||
if: ${{ env.USE_SCCACHE == 'true' }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [[ -n "${ACTIONS_CACHE_URL:-}" && -n "${ACTIONS_RUNTIME_TOKEN:-}" ]]; then
|
||||
echo "SCCACHE_GHA_ENABLED=true" >> "$GITHUB_ENV"
|
||||
echo "Using sccache GitHub backend"
|
||||
else
|
||||
echo "SCCACHE_GHA_ENABLED=false" >> "$GITHUB_ENV"
|
||||
echo "SCCACHE_DIR=${{ github.workspace }}/.sccache" >> "$GITHUB_ENV"
|
||||
echo "Using sccache local disk + actions/cache fallback"
|
||||
fi
|
||||
|
||||
- name: Enable sccache wrapper
|
||||
if: ${{ env.USE_SCCACHE == 'true' }}
|
||||
shell: bash
|
||||
run: echo "RUSTC_WRAPPER=sccache" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Restore sccache cache (fallback)
|
||||
if: ${{ env.USE_SCCACHE == 'true' && env.SCCACHE_GHA_ENABLED != 'true' }}
|
||||
id: cache_sccache_restore
|
||||
uses: actions/cache/restore@v5
|
||||
with:
|
||||
path: ${{ github.workspace }}/.sccache/
|
||||
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ github.run_id }}
|
||||
restore-keys: |
|
||||
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-
|
||||
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
|
||||
|
||||
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
with:
|
||||
tool: nextest
|
||||
version: 0.9.103
|
||||
|
||||
- name: tests
|
||||
id: test
|
||||
run: cargo nextest run --all-features --no-fail-fast --target ${{ matrix.target }} --cargo-profile ci-test
|
||||
env:
|
||||
RUST_BACKTRACE: 1
|
||||
NEXTEST_STATUS_LEVEL: leak
|
||||
|
||||
- name: Save cargo home cache
|
||||
if: always() && !cancelled() && steps.cache_cargo_home_restore.outputs.cache-hit != 'true'
|
||||
continue-on-error: true
|
||||
uses: actions/cache/save@v5
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ steps.lockhash.outputs.toolchain_hash }}
|
||||
|
||||
- name: Save sccache cache (fallback)
|
||||
if: always() && !cancelled() && env.USE_SCCACHE == 'true' && env.SCCACHE_GHA_ENABLED != 'true'
|
||||
continue-on-error: true
|
||||
uses: actions/cache/save@v5
|
||||
with:
|
||||
path: ${{ github.workspace }}/.sccache/
|
||||
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ github.run_id }}
|
||||
|
||||
- name: sccache stats
|
||||
if: always() && env.USE_SCCACHE == 'true'
|
||||
continue-on-error: true
|
||||
run: sccache --show-stats || true
|
||||
|
||||
- name: sccache summary
|
||||
if: always() && env.USE_SCCACHE == 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
{
|
||||
echo "### sccache stats — ${{ matrix.target }} (tests)";
|
||||
echo;
|
||||
echo '```';
|
||||
sccache --show-stats || true;
|
||||
echo '```';
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
- name: verify tests passed
|
||||
if: steps.test.outcome == 'failure'
|
||||
run: |
|
||||
echo "Tests failed. See logs for details."
|
||||
exit 1
|
||||
|
||||
# --- Gatherer job that you mark as the ONLY required status -----------------
|
||||
results:
|
||||
name: CI results (required)
|
||||
needs: [changed, general, lint_build_test]
|
||||
needs: [changed, general, cargo_shear, lint_build, tests]
|
||||
if: always()
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
@@ -179,7 +521,9 @@ jobs:
|
||||
shell: bash
|
||||
run: |
|
||||
echo "general: ${{ needs.general.result }}"
|
||||
echo "matrix : ${{ needs.lint_build_test.result }}"
|
||||
echo "shear : ${{ needs.cargo_shear.result }}"
|
||||
echo "lint : ${{ needs.lint_build.result }}"
|
||||
echo "tests : ${{ needs.tests.result }}"
|
||||
|
||||
# If nothing relevant changed (PR touching only root README, etc.),
|
||||
# declare success regardless of other jobs.
|
||||
@@ -190,4 +534,11 @@ jobs:
|
||||
|
||||
# Otherwise require the jobs to have succeeded
|
||||
[[ '${{ needs.general.result }}' == 'success' ]] || { echo 'general failed'; exit 1; }
|
||||
[[ '${{ needs.lint_build_test.result }}' == 'success' ]] || { echo 'matrix failed'; exit 1; }
|
||||
[[ '${{ needs.cargo_shear.result }}' == 'success' ]] || { echo 'cargo_shear failed'; exit 1; }
|
||||
[[ '${{ needs.lint_build.result }}' == 'success' ]] || { echo 'lint_build failed'; exit 1; }
|
||||
[[ '${{ needs.tests.result }}' == 'success' ]] || { echo 'tests failed'; exit 1; }
|
||||
|
||||
- name: sccache summary note
|
||||
if: always()
|
||||
run: |
|
||||
echo "Per-job sccache stats are attached to each matrix job's Step Summary."
|
||||
|
||||
51
.github/workflows/rust-release-prepare.yml
vendored
Normal file
51
.github/workflows/rust-release-prepare.yml
vendored
Normal file
@@ -0,0 +1,51 @@
|
||||
name: rust-release-prepare
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "0 */4 * * *"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
cancel-in-progress: false
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
ref: main
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Update models.json
|
||||
env:
|
||||
OPENAI_API_KEY: ${{ secrets.CODEX_OPENAI_API_KEY }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
client_version="99.99.99"
|
||||
terminal_info="github-actions"
|
||||
user_agent="codex_cli_rs/99.99.99 (Linux $(uname -r); $(uname -m)) ${terminal_info}"
|
||||
base_url="${OPENAI_BASE_URL:-https://chatgpt.com/backend-api/codex}"
|
||||
|
||||
headers=(
|
||||
-H "Authorization: Bearer ${OPENAI_API_KEY}"
|
||||
-H "User-Agent: ${user_agent}"
|
||||
)
|
||||
|
||||
url="${base_url%/}/models?client_version=${client_version}"
|
||||
curl --http1.1 --fail --show-error --location "${headers[@]}" "${url}" | jq '.' > codex-rs/core/models.json
|
||||
|
||||
- name: Open pull request (if changed)
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
commit-message: "Update models.json"
|
||||
title: "Update models.json"
|
||||
body: "Automated update of models.json."
|
||||
branch: "bot/update-models-json"
|
||||
reviewers: "pakrym-oai,aibrahim-oai"
|
||||
delete-branch: true
|
||||
335
.github/workflows/rust-release.yml
vendored
335
.github/workflows/rust-release.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
tag-check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Validate tag matches Cargo.toml version
|
||||
shell: bash
|
||||
@@ -47,9 +47,12 @@ jobs:
|
||||
|
||||
build:
|
||||
needs: tag-check
|
||||
name: ${{ matrix.runner }} - ${{ matrix.target }}
|
||||
name: Build - ${{ matrix.runner }} - ${{ matrix.target }}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
defaults:
|
||||
run:
|
||||
working-directory: codex-rs
|
||||
@@ -58,9 +61,9 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- runner: macos-14
|
||||
- runner: macos-15-xlarge
|
||||
target: aarch64-apple-darwin
|
||||
- runner: macos-14
|
||||
- runner: macos-15-xlarge
|
||||
target: x86_64-apple-darwin
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
@@ -72,14 +75,16 @@ jobs:
|
||||
target: aarch64-unknown-linux-gnu
|
||||
- runner: windows-latest
|
||||
target: x86_64-pc-windows-msvc
|
||||
- runner: windows-11-arm
|
||||
target: aarch64-pc-windows-msvc
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- uses: actions/checkout@v6
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
- uses: actions/cache@v4
|
||||
- uses: actions/cache@v5
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
@@ -87,15 +92,113 @@ jobs:
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
${{ github.workspace }}/codex-rs/target/
|
||||
key: cargo-release-${{ matrix.runner }}-${{ matrix.target }}-release-${{ hashFiles('**/Cargo.lock') }}
|
||||
key: cargo-${{ matrix.runner }}-${{ matrix.target }}-release-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Install musl build tools
|
||||
run: |
|
||||
sudo apt install -y musl-tools pkg-config
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y musl-tools pkg-config
|
||||
|
||||
- name: Cargo build
|
||||
run: cargo build --target ${{ matrix.target }} --release --bin codex
|
||||
shell: bash
|
||||
run: |
|
||||
if [[ "${{ contains(matrix.target, 'windows') }}" == 'true' ]]; then
|
||||
cargo build --target ${{ matrix.target }} --release --bin codex --bin codex-responses-api-proxy --bin codex-windows-sandbox-setup --bin codex-command-runner
|
||||
else
|
||||
cargo build --target ${{ matrix.target }} --release --bin codex --bin codex-responses-api-proxy
|
||||
fi
|
||||
|
||||
- if: ${{ contains(matrix.target, 'linux') }}
|
||||
name: Cosign Linux artifacts
|
||||
uses: ./.github/actions/linux-code-sign
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
artifacts-dir: ${{ github.workspace }}/codex-rs/target/${{ matrix.target }}/release
|
||||
|
||||
- if: ${{ contains(matrix.target, 'windows') }}
|
||||
name: Sign Windows binaries with Azure Trusted Signing
|
||||
uses: ./.github/actions/windows-code-sign
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
client-id: ${{ secrets.AZURE_TRUSTED_SIGNING_CLIENT_ID }}
|
||||
tenant-id: ${{ secrets.AZURE_TRUSTED_SIGNING_TENANT_ID }}
|
||||
subscription-id: ${{ secrets.AZURE_TRUSTED_SIGNING_SUBSCRIPTION_ID }}
|
||||
endpoint: ${{ secrets.AZURE_TRUSTED_SIGNING_ENDPOINT }}
|
||||
account-name: ${{ secrets.AZURE_TRUSTED_SIGNING_ACCOUNT_NAME }}
|
||||
certificate-profile-name: ${{ secrets.AZURE_TRUSTED_SIGNING_CERTIFICATE_PROFILE_NAME }}
|
||||
|
||||
- if: ${{ runner.os == 'macOS' }}
|
||||
name: MacOS code signing (binaries)
|
||||
uses: ./.github/actions/macos-code-sign
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
sign-binaries: "true"
|
||||
sign-dmg: "false"
|
||||
apple-certificate: ${{ secrets.APPLE_CERTIFICATE_P12 }}
|
||||
apple-certificate-password: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
|
||||
apple-notarization-key-p8: ${{ secrets.APPLE_NOTARIZATION_KEY_P8 }}
|
||||
apple-notarization-key-id: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
|
||||
apple-notarization-issuer-id: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
|
||||
|
||||
- if: ${{ runner.os == 'macOS' }}
|
||||
name: Build macOS dmg
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
target="${{ matrix.target }}"
|
||||
release_dir="target/${target}/release"
|
||||
dmg_root="${RUNNER_TEMP}/codex-dmg-root"
|
||||
volname="Codex (${target})"
|
||||
dmg_path="${release_dir}/codex-${target}.dmg"
|
||||
|
||||
# The previous "MacOS code signing (binaries)" step signs + notarizes the
|
||||
# built artifacts in `${release_dir}`. This step packages *those same*
|
||||
# signed binaries into a dmg.
|
||||
codex_binary_path="${release_dir}/codex"
|
||||
proxy_binary_path="${release_dir}/codex-responses-api-proxy"
|
||||
|
||||
rm -rf "$dmg_root"
|
||||
mkdir -p "$dmg_root"
|
||||
|
||||
if [[ ! -f "$codex_binary_path" ]]; then
|
||||
echo "Binary $codex_binary_path not found"
|
||||
exit 1
|
||||
fi
|
||||
if [[ ! -f "$proxy_binary_path" ]]; then
|
||||
echo "Binary $proxy_binary_path not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ditto "$codex_binary_path" "${dmg_root}/codex"
|
||||
ditto "$proxy_binary_path" "${dmg_root}/codex-responses-api-proxy"
|
||||
|
||||
rm -f "$dmg_path"
|
||||
hdiutil create \
|
||||
-volname "$volname" \
|
||||
-srcfolder "$dmg_root" \
|
||||
-format UDZO \
|
||||
-ov \
|
||||
"$dmg_path"
|
||||
|
||||
if [[ ! -f "$dmg_path" ]]; then
|
||||
echo "dmg $dmg_path not found after build"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- if: ${{ runner.os == 'macOS' }}
|
||||
name: MacOS code signing (dmg)
|
||||
uses: ./.github/actions/macos-code-sign
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
sign-binaries: "false"
|
||||
sign-dmg: "true"
|
||||
apple-certificate: ${{ secrets.APPLE_CERTIFICATE_P12 }}
|
||||
apple-certificate-password: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
|
||||
apple-notarization-key-p8: ${{ secrets.APPLE_NOTARIZATION_KEY_P8 }}
|
||||
apple-notarization-key-id: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
|
||||
apple-notarization-issuer-id: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
|
||||
|
||||
- name: Stage artifacts
|
||||
shell: bash
|
||||
@@ -105,10 +208,28 @@ jobs:
|
||||
|
||||
if [[ "${{ matrix.runner }}" == windows* ]]; then
|
||||
cp target/${{ matrix.target }}/release/codex.exe "$dest/codex-${{ matrix.target }}.exe"
|
||||
cp target/${{ matrix.target }}/release/codex-responses-api-proxy.exe "$dest/codex-responses-api-proxy-${{ matrix.target }}.exe"
|
||||
cp target/${{ matrix.target }}/release/codex-windows-sandbox-setup.exe "$dest/codex-windows-sandbox-setup-${{ matrix.target }}.exe"
|
||||
cp target/${{ matrix.target }}/release/codex-command-runner.exe "$dest/codex-command-runner-${{ matrix.target }}.exe"
|
||||
else
|
||||
cp target/${{ matrix.target }}/release/codex "$dest/codex-${{ matrix.target }}"
|
||||
cp target/${{ matrix.target }}/release/codex-responses-api-proxy "$dest/codex-responses-api-proxy-${{ matrix.target }}"
|
||||
fi
|
||||
|
||||
if [[ "${{ matrix.target }}" == *linux* ]]; then
|
||||
cp target/${{ matrix.target }}/release/codex.sigstore "$dest/codex-${{ matrix.target }}.sigstore"
|
||||
cp target/${{ matrix.target }}/release/codex-responses-api-proxy.sigstore "$dest/codex-responses-api-proxy-${{ matrix.target }}.sigstore"
|
||||
fi
|
||||
|
||||
if [[ "${{ matrix.target }}" == *apple-darwin ]]; then
|
||||
cp target/${{ matrix.target }}/release/codex-${{ matrix.target }}.dmg "$dest/codex-${{ matrix.target }}.dmg"
|
||||
fi
|
||||
|
||||
- if: ${{ matrix.runner == 'windows-11-arm' }}
|
||||
name: Install zstd
|
||||
shell: powershell
|
||||
run: choco install -y zstandard
|
||||
|
||||
- name: Compress artifacts
|
||||
shell: bash
|
||||
run: |
|
||||
@@ -116,6 +237,15 @@ jobs:
|
||||
# ${{ matrix.target }}
|
||||
dest="dist/${{ matrix.target }}"
|
||||
|
||||
# We want to ship the raw Windows executables in the GitHub Release
|
||||
# in addition to the compressed archives. Keep the originals for
|
||||
# Windows targets; remove them elsewhere to limit the number of
|
||||
# artifacts that end up in the GitHub Release.
|
||||
keep_originals=false
|
||||
if [[ "${{ matrix.runner }}" == windows* ]]; then
|
||||
keep_originals=true
|
||||
fi
|
||||
|
||||
# For compatibility with environments that lack the `zstd` tool we
|
||||
# additionally create a `.tar.gz` for all platforms and `.zip` for
|
||||
# Windows alongside every single binary that we publish. The end result is:
|
||||
@@ -129,7 +259,12 @@ jobs:
|
||||
base="$(basename "$f")"
|
||||
# Skip files that are already archives (shouldn't happen, but be
|
||||
# safe).
|
||||
if [[ "$base" == *.tar.gz || "$base" == *.zip ]]; then
|
||||
if [[ "$base" == *.tar.gz || "$base" == *.zip || "$base" == *.dmg ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
# Don't try to compress signature bundles.
|
||||
if [[ "$base" == *.sigstore ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
@@ -145,10 +280,14 @@ jobs:
|
||||
|
||||
# Also create .zst (existing behaviour) *and* remove the original
|
||||
# uncompressed binary to keep the directory small.
|
||||
zstd -T0 -19 --rm "$dest/$base"
|
||||
zstd_args=(-T0 -19)
|
||||
if [[ "${keep_originals}" == false ]]; then
|
||||
zstd_args+=(--rm)
|
||||
fi
|
||||
zstd "${zstd_args[@]}" "$dest/$base"
|
||||
done
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
- uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: ${{ matrix.target }}
|
||||
# Upload the per-binary .zst files as well as the new .tar.gz
|
||||
@@ -156,22 +295,49 @@ jobs:
|
||||
path: |
|
||||
codex-rs/dist/${{ matrix.target }}/*
|
||||
|
||||
shell-tool-mcp:
|
||||
name: shell-tool-mcp
|
||||
needs: tag-check
|
||||
uses: ./.github/workflows/shell-tool-mcp.yml
|
||||
with:
|
||||
release-tag: ${{ github.ref_name }}
|
||||
publish: true
|
||||
secrets: inherit
|
||||
|
||||
release:
|
||||
needs: build
|
||||
needs:
|
||||
- build
|
||||
- shell-tool-mcp
|
||||
name: release
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
actions: read
|
||||
outputs:
|
||||
version: ${{ steps.release_name.outputs.name }}
|
||||
tag: ${{ github.ref_name }}
|
||||
should_publish_npm: ${{ steps.npm_publish_settings.outputs.should_publish }}
|
||||
npm_tag: ${{ steps.npm_publish_settings.outputs.npm_tag }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
- uses: actions/download-artifact@v7
|
||||
with:
|
||||
path: dist
|
||||
|
||||
- name: List
|
||||
run: ls -R dist/
|
||||
|
||||
# This is a temporary fix: we should modify shell-tool-mcp.yml so these
|
||||
# files do not end up in dist/ in the first place.
|
||||
- name: Delete entries from dist/ that should not go in the release
|
||||
run: |
|
||||
rm -rf dist/shell-tool-mcp*
|
||||
|
||||
ls -R dist/
|
||||
|
||||
- name: Define release name
|
||||
id: release_name
|
||||
run: |
|
||||
@@ -180,21 +346,49 @@ jobs:
|
||||
version="${GITHUB_REF_NAME#rust-v}"
|
||||
echo "name=${version}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Stage npm package
|
||||
- name: Determine npm publish settings
|
||||
id: npm_publish_settings
|
||||
env:
|
||||
VERSION: ${{ steps.release_name.outputs.name }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
version="${VERSION}"
|
||||
|
||||
if [[ "${version}" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
echo "should_publish=true" >> "$GITHUB_OUTPUT"
|
||||
echo "npm_tag=" >> "$GITHUB_OUTPUT"
|
||||
elif [[ "${version}" =~ ^[0-9]+\.[0-9]+\.[0-9]+-alpha\.[0-9]+$ ]]; then
|
||||
echo "should_publish=true" >> "$GITHUB_OUTPUT"
|
||||
echo "npm_tag=alpha" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "should_publish=false" >> "$GITHUB_OUTPUT"
|
||||
echo "npm_tag=" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js for npm packaging
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 22
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
# stage_npm_packages.py requires DotSlash when staging releases.
|
||||
- uses: facebook/install-dotslash@v2
|
||||
- name: Stage npm packages
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
TMP_DIR="${RUNNER_TEMP}/npm-stage"
|
||||
python3 codex-cli/scripts/stage_rust_release.py \
|
||||
./scripts/stage_npm_packages.py \
|
||||
--release-version "${{ steps.release_name.outputs.name }}" \
|
||||
--tmp "${TMP_DIR}"
|
||||
mkdir -p dist/npm
|
||||
# Produce an npm-ready tarball using `npm pack` and store it in dist/npm.
|
||||
# We then rename it to a stable name used by our publishing script.
|
||||
(cd "$TMP_DIR" && npm pack --pack-destination "${GITHUB_WORKSPACE}/dist/npm")
|
||||
mv "${GITHUB_WORKSPACE}"/dist/npm/*.tgz \
|
||||
"${GITHUB_WORKSPACE}/dist/npm/codex-npm-${{ steps.release_name.outputs.name }}.tgz"
|
||||
--package codex \
|
||||
--package codex-responses-api-proxy \
|
||||
--package codex-sdk
|
||||
|
||||
- name: Create GitHub Release
|
||||
uses: softprops/action-gh-release@v2
|
||||
@@ -212,3 +406,90 @@ jobs:
|
||||
with:
|
||||
tag: ${{ github.ref_name }}
|
||||
config: .github/dotslash-config.json
|
||||
|
||||
# Publish to npm using OIDC authentication.
|
||||
# July 31, 2025: https://github.blog/changelog/2025-07-31-npm-trusted-publishing-with-oidc-is-generally-available/
|
||||
# npm docs: https://docs.npmjs.com/trusted-publishers
|
||||
publish-npm:
|
||||
# Publish to npm for stable releases and alpha pre-releases with numeric suffixes.
|
||||
if: ${{ needs.release.outputs.should_publish_npm == 'true' }}
|
||||
name: publish-npm
|
||||
needs: release
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write # Required for OIDC
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 22
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
scope: "@openai"
|
||||
|
||||
# Trusted publishing requires npm CLI version 11.5.1 or later.
|
||||
- name: Update npm
|
||||
run: npm install -g npm@latest
|
||||
|
||||
- name: Download npm tarballs from release
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
version="${{ needs.release.outputs.version }}"
|
||||
tag="${{ needs.release.outputs.tag }}"
|
||||
mkdir -p dist/npm
|
||||
gh release download "$tag" \
|
||||
--repo "${GITHUB_REPOSITORY}" \
|
||||
--pattern "codex-npm-${version}.tgz" \
|
||||
--dir dist/npm
|
||||
gh release download "$tag" \
|
||||
--repo "${GITHUB_REPOSITORY}" \
|
||||
--pattern "codex-responses-api-proxy-npm-${version}.tgz" \
|
||||
--dir dist/npm
|
||||
gh release download "$tag" \
|
||||
--repo "${GITHUB_REPOSITORY}" \
|
||||
--pattern "codex-sdk-npm-${version}.tgz" \
|
||||
--dir dist/npm
|
||||
|
||||
# No NODE_AUTH_TOKEN needed because we use OIDC.
|
||||
- name: Publish to npm
|
||||
env:
|
||||
VERSION: ${{ needs.release.outputs.version }}
|
||||
NPM_TAG: ${{ needs.release.outputs.npm_tag }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
tag_args=()
|
||||
if [[ -n "${NPM_TAG}" ]]; then
|
||||
tag_args+=(--tag "${NPM_TAG}")
|
||||
fi
|
||||
|
||||
tarballs=(
|
||||
"codex-npm-${VERSION}.tgz"
|
||||
"codex-responses-api-proxy-npm-${VERSION}.tgz"
|
||||
"codex-sdk-npm-${VERSION}.tgz"
|
||||
)
|
||||
|
||||
for tarball in "${tarballs[@]}"; do
|
||||
npm publish "${GITHUB_WORKSPACE}/dist/npm/${tarball}" "${tag_args[@]}"
|
||||
done
|
||||
|
||||
update-branch:
|
||||
name: Update latest-alpha-cli branch
|
||||
permissions:
|
||||
contents: write
|
||||
needs: release
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Update latest-alpha-cli branch
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
gh api \
|
||||
repos/${GITHUB_REPOSITORY}/git/refs/heads/latest-alpha-cli \
|
||||
-X PATCH \
|
||||
-f sha="${GITHUB_SHA}" \
|
||||
-F force=true
|
||||
|
||||
43
.github/workflows/sdk.yml
vendored
Normal file
43
.github/workflows/sdk.yml
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
name: sdk
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
pull_request: {}
|
||||
|
||||
jobs:
|
||||
sdks:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 22
|
||||
cache: pnpm
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
|
||||
- name: build codex
|
||||
run: cargo build --bin codex
|
||||
working-directory: codex-rs
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build SDK packages
|
||||
run: pnpm -r --filter ./sdk/typescript run build
|
||||
|
||||
- name: Lint SDK packages
|
||||
run: pnpm -r --filter ./sdk/typescript run lint
|
||||
|
||||
- name: Test SDK packages
|
||||
run: pnpm -r --filter ./sdk/typescript run test
|
||||
48
.github/workflows/shell-tool-mcp-ci.yml
vendored
Normal file
48
.github/workflows/shell-tool-mcp-ci.yml
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
name: shell-tool-mcp CI
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- "shell-tool-mcp/**"
|
||||
- ".github/workflows/shell-tool-mcp-ci.yml"
|
||||
- "pnpm-lock.yaml"
|
||||
- "pnpm-workspace.yaml"
|
||||
pull_request:
|
||||
paths:
|
||||
- "shell-tool-mcp/**"
|
||||
- ".github/workflows/shell-tool-mcp-ci.yml"
|
||||
- "pnpm-lock.yaml"
|
||||
- "pnpm-workspace.yaml"
|
||||
|
||||
env:
|
||||
NODE_VERSION: 22
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: "pnpm"
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Format check
|
||||
run: pnpm --filter @openai/codex-shell-tool-mcp run format
|
||||
|
||||
- name: Run tests
|
||||
run: pnpm --filter @openai/codex-shell-tool-mcp test
|
||||
|
||||
- name: Build
|
||||
run: pnpm --filter @openai/codex-shell-tool-mcp run build
|
||||
405
.github/workflows/shell-tool-mcp.yml
vendored
Normal file
405
.github/workflows/shell-tool-mcp.yml
vendored
Normal file
@@ -0,0 +1,405 @@
|
||||
name: shell-tool-mcp
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
release-version:
|
||||
description: Version to publish (x.y.z or x.y.z-alpha.N). Defaults to GITHUB_REF_NAME when it starts with rust-v.
|
||||
required: false
|
||||
type: string
|
||||
release-tag:
|
||||
description: Tag name to use when downloading release artifacts (defaults to rust-v<version>).
|
||||
required: false
|
||||
type: string
|
||||
publish:
|
||||
description: Whether to publish to npm when the version is releasable.
|
||||
required: false
|
||||
default: true
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
NODE_VERSION: 22
|
||||
|
||||
jobs:
|
||||
metadata:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
version: ${{ steps.compute.outputs.version }}
|
||||
release_tag: ${{ steps.compute.outputs.release_tag }}
|
||||
should_publish: ${{ steps.compute.outputs.should_publish }}
|
||||
npm_tag: ${{ steps.compute.outputs.npm_tag }}
|
||||
steps:
|
||||
- name: Compute version and tags
|
||||
id: compute
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
version="${{ inputs.release-version }}"
|
||||
release_tag="${{ inputs.release-tag }}"
|
||||
|
||||
if [[ -z "$version" ]]; then
|
||||
if [[ -n "$release_tag" && "$release_tag" =~ ^rust-v.+ ]]; then
|
||||
version="${release_tag#rust-v}"
|
||||
elif [[ "${GITHUB_REF_NAME:-}" =~ ^rust-v.+ ]]; then
|
||||
version="${GITHUB_REF_NAME#rust-v}"
|
||||
release_tag="${GITHUB_REF_NAME}"
|
||||
else
|
||||
echo "release-version is required when GITHUB_REF_NAME is not a rust-v tag."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -z "$release_tag" ]]; then
|
||||
release_tag="rust-v${version}"
|
||||
fi
|
||||
|
||||
npm_tag=""
|
||||
should_publish="false"
|
||||
if [[ "$version" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
should_publish="true"
|
||||
elif [[ "$version" =~ ^[0-9]+\.[0-9]+\.[0-9]+-alpha\.[0-9]+$ ]]; then
|
||||
should_publish="true"
|
||||
npm_tag="alpha"
|
||||
fi
|
||||
|
||||
echo "version=${version}" >> "$GITHUB_OUTPUT"
|
||||
echo "release_tag=${release_tag}" >> "$GITHUB_OUTPUT"
|
||||
echo "npm_tag=${npm_tag}" >> "$GITHUB_OUTPUT"
|
||||
echo "should_publish=${should_publish}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
rust-binaries:
|
||||
name: Build Rust - ${{ matrix.target }}
|
||||
needs: metadata
|
||||
runs-on: ${{ matrix.runner }}
|
||||
timeout-minutes: 30
|
||||
defaults:
|
||||
run:
|
||||
working-directory: codex-rs
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- runner: macos-15-xlarge
|
||||
target: aarch64-apple-darwin
|
||||
- runner: macos-15-xlarge
|
||||
target: x86_64-apple-darwin
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
install_musl: true
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
install_musl: true
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
- if: ${{ matrix.install_musl }}
|
||||
name: Install musl build dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y musl-tools pkg-config
|
||||
|
||||
- name: Build exec server binaries
|
||||
run: cargo build --release --target ${{ matrix.target }} --bin codex-exec-mcp-server --bin codex-execve-wrapper
|
||||
|
||||
- name: Stage exec server binaries
|
||||
run: |
|
||||
dest="${GITHUB_WORKSPACE}/artifacts/vendor/${{ matrix.target }}"
|
||||
mkdir -p "$dest"
|
||||
cp "target/${{ matrix.target }}/release/codex-exec-mcp-server" "$dest/"
|
||||
cp "target/${{ matrix.target }}/release/codex-execve-wrapper" "$dest/"
|
||||
|
||||
- uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: shell-tool-mcp-rust-${{ matrix.target }}
|
||||
path: artifacts/**
|
||||
if-no-files-found: error
|
||||
|
||||
bash-linux:
|
||||
name: Build Bash (Linux) - ${{ matrix.variant }} - ${{ matrix.target }}
|
||||
needs: metadata
|
||||
runs-on: ${{ matrix.runner }}
|
||||
timeout-minutes: 30
|
||||
container:
|
||||
image: ${{ matrix.image }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
variant: ubuntu-24.04
|
||||
image: ubuntu:24.04
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
variant: ubuntu-22.04
|
||||
image: ubuntu:22.04
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
variant: debian-12
|
||||
image: debian:12
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
variant: debian-11
|
||||
image: debian:11
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
variant: centos-9
|
||||
image: quay.io/centos/centos:stream9
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
variant: ubuntu-24.04
|
||||
image: arm64v8/ubuntu:24.04
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
variant: ubuntu-22.04
|
||||
image: arm64v8/ubuntu:22.04
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
variant: ubuntu-20.04
|
||||
image: arm64v8/ubuntu:20.04
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
variant: debian-12
|
||||
image: arm64v8/debian:12
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
variant: debian-11
|
||||
image: arm64v8/debian:11
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
variant: centos-9
|
||||
image: quay.io/centos/centos:stream9
|
||||
steps:
|
||||
- name: Install build prerequisites
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if command -v apt-get >/dev/null 2>&1; then
|
||||
apt-get update
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -y git build-essential bison autoconf gettext
|
||||
elif command -v dnf >/dev/null 2>&1; then
|
||||
dnf install -y git gcc gcc-c++ make bison autoconf gettext
|
||||
elif command -v yum >/dev/null 2>&1; then
|
||||
yum install -y git gcc gcc-c++ make bison autoconf gettext
|
||||
else
|
||||
echo "Unsupported package manager in container"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Build patched Bash
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
git clone --depth 1 https://github.com/bminor/bash /tmp/bash
|
||||
cd /tmp/bash
|
||||
git fetch --depth 1 origin a8a1c2fac029404d3f42cd39f5a20f24b6e4fe4b
|
||||
git checkout a8a1c2fac029404d3f42cd39f5a20f24b6e4fe4b
|
||||
git apply "${GITHUB_WORKSPACE}/shell-tool-mcp/patches/bash-exec-wrapper.patch"
|
||||
./configure --without-bash-malloc
|
||||
cores="$(command -v nproc >/dev/null 2>&1 && nproc || getconf _NPROCESSORS_ONLN)"
|
||||
make -j"${cores}"
|
||||
|
||||
dest="${GITHUB_WORKSPACE}/artifacts/vendor/${{ matrix.target }}/bash/${{ matrix.variant }}"
|
||||
mkdir -p "$dest"
|
||||
cp bash "$dest/bash"
|
||||
|
||||
- uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: shell-tool-mcp-bash-${{ matrix.target }}-${{ matrix.variant }}
|
||||
path: artifacts/**
|
||||
if-no-files-found: error
|
||||
|
||||
bash-darwin:
|
||||
name: Build Bash (macOS) - ${{ matrix.variant }} - ${{ matrix.target }}
|
||||
needs: metadata
|
||||
runs-on: ${{ matrix.runner }}
|
||||
timeout-minutes: 30
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- runner: macos-15-xlarge
|
||||
target: aarch64-apple-darwin
|
||||
variant: macos-15
|
||||
- runner: macos-14
|
||||
target: aarch64-apple-darwin
|
||||
variant: macos-14
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Build patched Bash
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
git clone --depth 1 https://github.com/bminor/bash /tmp/bash
|
||||
cd /tmp/bash
|
||||
git fetch --depth 1 origin a8a1c2fac029404d3f42cd39f5a20f24b6e4fe4b
|
||||
git checkout a8a1c2fac029404d3f42cd39f5a20f24b6e4fe4b
|
||||
git apply "${GITHUB_WORKSPACE}/shell-tool-mcp/patches/bash-exec-wrapper.patch"
|
||||
./configure --without-bash-malloc
|
||||
cores="$(getconf _NPROCESSORS_ONLN)"
|
||||
make -j"${cores}"
|
||||
|
||||
dest="${GITHUB_WORKSPACE}/artifacts/vendor/${{ matrix.target }}/bash/${{ matrix.variant }}"
|
||||
mkdir -p "$dest"
|
||||
cp bash "$dest/bash"
|
||||
|
||||
- uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: shell-tool-mcp-bash-${{ matrix.target }}-${{ matrix.variant }}
|
||||
path: artifacts/**
|
||||
if-no-files-found: error
|
||||
|
||||
package:
|
||||
name: Package npm module
|
||||
needs:
|
||||
- metadata
|
||||
- rust-binaries
|
||||
- bash-linux
|
||||
- bash-darwin
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
PACKAGE_VERSION: ${{ needs.metadata.outputs.version }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10.8.1
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
|
||||
- name: Install JavaScript dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build (shell-tool-mcp)
|
||||
run: pnpm --filter @openai/codex-shell-tool-mcp run build
|
||||
|
||||
- name: Download build artifacts
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
path: artifacts
|
||||
|
||||
- name: Assemble staging directory
|
||||
id: staging
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
staging="${STAGING_DIR}"
|
||||
mkdir -p "$staging" "$staging/vendor"
|
||||
cp shell-tool-mcp/README.md "$staging/"
|
||||
cp shell-tool-mcp/package.json "$staging/"
|
||||
cp -R shell-tool-mcp/bin "$staging/"
|
||||
|
||||
found_vendor="false"
|
||||
shopt -s nullglob
|
||||
for vendor_dir in artifacts/*/vendor; do
|
||||
rsync -av "$vendor_dir/" "$staging/vendor/"
|
||||
found_vendor="true"
|
||||
done
|
||||
if [[ "$found_vendor" == "false" ]]; then
|
||||
echo "No vendor payloads were downloaded."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
node - <<'NODE'
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
|
||||
const stagingDir = process.env.STAGING_DIR;
|
||||
const version = process.env.PACKAGE_VERSION;
|
||||
const pkgPath = path.join(stagingDir, "package.json");
|
||||
const pkg = JSON.parse(fs.readFileSync(pkgPath, "utf8"));
|
||||
pkg.version = version;
|
||||
fs.writeFileSync(pkgPath, JSON.stringify(pkg, null, 2) + "\n");
|
||||
NODE
|
||||
|
||||
echo "dir=$staging" >> "$GITHUB_OUTPUT"
|
||||
env:
|
||||
STAGING_DIR: ${{ runner.temp }}/shell-tool-mcp
|
||||
|
||||
- name: Ensure binaries are executable
|
||||
run: |
|
||||
set -euo pipefail
|
||||
staging="${{ steps.staging.outputs.dir }}"
|
||||
chmod +x \
|
||||
"$staging"/vendor/*/codex-exec-mcp-server \
|
||||
"$staging"/vendor/*/codex-execve-wrapper \
|
||||
"$staging"/vendor/*/bash/*/bash
|
||||
|
||||
- name: Create npm tarball
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p dist/npm
|
||||
staging="${{ steps.staging.outputs.dir }}"
|
||||
pack_info=$(cd "$staging" && npm pack --ignore-scripts --json --pack-destination "${GITHUB_WORKSPACE}/dist/npm")
|
||||
filename=$(PACK_INFO="$pack_info" node -e 'const data = JSON.parse(process.env.PACK_INFO); console.log(data[0].filename);')
|
||||
mv "dist/npm/${filename}" "dist/npm/codex-shell-tool-mcp-npm-${PACKAGE_VERSION}.tgz"
|
||||
|
||||
- uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: codex-shell-tool-mcp-npm
|
||||
path: dist/npm/codex-shell-tool-mcp-npm-${{ env.PACKAGE_VERSION }}.tgz
|
||||
if-no-files-found: error
|
||||
|
||||
publish:
|
||||
name: Publish npm package
|
||||
needs:
|
||||
- metadata
|
||||
- package
|
||||
if: ${{ inputs.publish && needs.metadata.outputs.should_publish == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: read
|
||||
steps:
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10.8.1
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
registry-url: https://registry.npmjs.org
|
||||
scope: "@openai"
|
||||
|
||||
- name: Update npm
|
||||
run: npm install -g npm@latest
|
||||
|
||||
- name: Download npm tarball
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
name: codex-shell-tool-mcp-npm
|
||||
path: dist/npm
|
||||
|
||||
- name: Publish to npm
|
||||
env:
|
||||
NPM_TAG: ${{ needs.metadata.outputs.npm_tag }}
|
||||
VERSION: ${{ needs.metadata.outputs.version }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
tag_args=()
|
||||
if [[ -n "${NPM_TAG}" ]]; then
|
||||
tag_args+=(--tag "${NPM_TAG}")
|
||||
fi
|
||||
npm publish "dist/npm/codex-shell-tool-mcp-npm-${VERSION}.tgz" "${tag_args[@]}"
|
||||
9
.gitignore
vendored
9
.gitignore
vendored
@@ -30,6 +30,7 @@ result
|
||||
# cli tools
|
||||
CLAUDE.md
|
||||
.claude/
|
||||
AGENTS.override.md
|
||||
|
||||
# caches
|
||||
.cache/
|
||||
@@ -63,6 +64,9 @@ apply_patch/
|
||||
# coverage
|
||||
coverage/
|
||||
|
||||
# personal files
|
||||
personal/
|
||||
|
||||
# os
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
@@ -81,3 +85,8 @@ CHANGELOG.ignore.md
|
||||
# nix related
|
||||
.direnv
|
||||
.envrc
|
||||
|
||||
# Python bytecode files
|
||||
__pycache__/
|
||||
*.pyc
|
||||
|
||||
|
||||
6
.vscode/extensions.json
vendored
6
.vscode/extensions.json
vendored
@@ -1,5 +1,11 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"rust-lang.rust-analyzer",
|
||||
"tamasfe.even-better-toml",
|
||||
"vadimcn.vscode-lldb",
|
||||
|
||||
// Useful if touching files in .github/workflows, though most
|
||||
// contributors will not be doing that?
|
||||
// "github.vscode-github-actions",
|
||||
]
|
||||
}
|
||||
|
||||
1
.vscode/settings.json
vendored
1
.vscode/settings.json
vendored
@@ -3,6 +3,7 @@
|
||||
"rust-analyzer.check.command": "clippy",
|
||||
"rust-analyzer.check.extraArgs": ["--all-features", "--tests"],
|
||||
"rust-analyzer.rustfmt.extraArgs": ["--config", "imports_granularity=Item"],
|
||||
"rust-analyzer.cargo.targetDir": "${workspaceFolder}/codex-rs/target/rust-analyzer",
|
||||
"[rust]": {
|
||||
"editor.defaultFormatter": "rust-lang.rust-analyzer",
|
||||
"editor.formatOnSave": true,
|
||||
|
||||
69
AGENTS.md
69
AGENTS.md
@@ -2,15 +2,23 @@
|
||||
|
||||
In the codex-rs folder where the rust code lives:
|
||||
|
||||
- Crate names are prefixed with `codex-`. For examole, the `core` folder's crate is named `codex-core`
|
||||
- Crate names are prefixed with `codex-`. For example, the `core` folder's crate is named `codex-core`
|
||||
- When using format! and you can inline variables into {}, always do that.
|
||||
- Install any commands the repo relies on (for example `just`, `rg`, or `cargo-insta`) if they aren't already available before running instructions here.
|
||||
- Never add or modify any code related to `CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR` or `CODEX_SANDBOX_ENV_VAR`.
|
||||
- You operate in a sandbox where `CODEX_SANDBOX_NETWORK_DISABLED=1` will be set whenever you use the `shell` tool. Any existing code that uses `CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR` was authored with this fact in mind. It is often used to early exit out of tests that the author knew you would not be able to run given your sandbox limitations.
|
||||
- Similarly, when you spawn a process using Seatbelt (`/usr/bin/sandbox-exec`), `CODEX_SANDBOX=seatbelt` will be set on the child process. Integration tests that want to run Seatbelt themselves cannot be run under Seatbelt, so checks for `CODEX_SANDBOX=seatbelt` are also often used to early exit out of tests, as appropriate.
|
||||
- Always collapse if statements per https://rust-lang.github.io/rust-clippy/master/index.html#collapsible_if
|
||||
- Always inline format! args when possible per https://rust-lang.github.io/rust-clippy/master/index.html#uninlined_format_args
|
||||
- Use method references over closures when possible per https://rust-lang.github.io/rust-clippy/master/index.html#redundant_closure_for_method_calls
|
||||
- When writing tests, prefer comparing the equality of entire objects over fields one by one.
|
||||
- When making a change that adds or changes an API, ensure that the documentation in the `docs/` folder is up to date if applicable.
|
||||
|
||||
Run `just fmt` (in `codex-rs` directory) automatically after making Rust code changes; do not ask for approval to run it. Before finalizing a change to `codex-rs`, run `just fix -p <project>` (in `codex-rs` directory) to fix any linter issues in the code. Prefer scoping with `-p` to avoid slow workspace‑wide Clippy builds; only run `just fix` without `-p` if you changed shared crates. Additionally, run the tests:
|
||||
|
||||
Before finalizing a change to `codex-rs`, run `just fmt` (in `codex-rs` directory) to format the code and `just fix -p <project>` (in `codex-rs` directory) to fix any linter issues in the code. Additionally, run the tests:
|
||||
1. Run the test for the specific project that was changed. For example, if changes were made in `codex-rs/tui`, run `cargo test -p codex-tui`.
|
||||
2. Once those pass, if any changes were made in common, core, or protocol, run the complete test suite with `cargo test --all-features`.
|
||||
When running interactively, ask the user before running `just fix` to finalize. `just fmt` does not require approval. project-specific or individual tests can be run without asking the user, but do ask the user before running the complete test suite.
|
||||
|
||||
## TUI style conventions
|
||||
|
||||
@@ -25,7 +33,28 @@ See `codex-rs/tui/styles.md`.
|
||||
- Example: patch summary file lines
|
||||
- Desired: vec![" └ ".into(), "M".red(), " ".dim(), "tui/src/app.rs".dim()]
|
||||
|
||||
## Snapshot tests
|
||||
### TUI Styling (ratatui)
|
||||
|
||||
- Prefer Stylize helpers: use "text".dim(), .bold(), .cyan(), .italic(), .underlined() instead of manual Style where possible.
|
||||
- Prefer simple conversions: use "text".into() for spans and vec![…].into() for lines; when inference is ambiguous (e.g., Paragraph::new/Cell::from), use Line::from(spans) or Span::from(text).
|
||||
- Computed styles: if the Style is computed at runtime, using `Span::styled` is OK (`Span::from(text).set_style(style)` is also acceptable).
|
||||
- Avoid hardcoded white: do not use `.white()`; prefer the default foreground (no color).
|
||||
- Chaining: combine helpers by chaining for readability (e.g., url.cyan().underlined()).
|
||||
- Single items: prefer "text".into(); use Line::from(text) or Span::from(text) only when the target type isn’t obvious from context, or when using .into() would require extra type annotations.
|
||||
- Building lines: use vec![…].into() to construct a Line when the target type is obvious and no extra type annotations are needed; otherwise use Line::from(vec![…]).
|
||||
- Avoid churn: don’t refactor between equivalent forms (Span::styled ↔ set_style, Line::from ↔ .into()) without a clear readability or functional gain; follow file‑local conventions and do not introduce type annotations solely to satisfy .into().
|
||||
- Compactness: prefer the form that stays on one line after rustfmt; if only one of Line::from(vec![…]) or vec![…].into() avoids wrapping, choose that. If both wrap, pick the one with fewer wrapped lines.
|
||||
|
||||
### Text wrapping
|
||||
|
||||
- Always use textwrap::wrap to wrap plain strings.
|
||||
- If you have a ratatui Line and you want to wrap it, use the helpers in tui/src/wrapping.rs, e.g. word_wrap_lines / word_wrap_line.
|
||||
- If you need to indent wrapped lines, use the initial_indent / subsequent_indent options from RtOptions if you can, rather than writing custom logic.
|
||||
- If you have a list of lines and you need to prefix them all with some prefix (optionally different on the first vs subsequent lines), use the `prefix_lines` helper from line_utils.
|
||||
|
||||
## Tests
|
||||
|
||||
### Snapshot tests
|
||||
|
||||
This repo uses snapshot tests (via `insta`), especially in `codex-rs/tui`, to validate rendered output. When UI or text output changes intentionally, update the snapshots as follows:
|
||||
|
||||
@@ -39,4 +68,38 @@ This repo uses snapshot tests (via `insta`), especially in `codex-rs/tui`, to va
|
||||
- `cargo insta accept -p codex-tui`
|
||||
|
||||
If you don’t have the tool:
|
||||
|
||||
- `cargo install cargo-insta`
|
||||
|
||||
### Test assertions
|
||||
|
||||
- Tests should use pretty_assertions::assert_eq for clearer diffs. Import this at the top of the test module if it isn't already.
|
||||
- Prefer deep equals comparisons whenever possible. Perform `assert_eq!()` on entire objects, rather than individual fields.
|
||||
- Avoid mutating process environment in tests; prefer passing environment-derived flags or dependencies from above.
|
||||
|
||||
### Integration tests (core)
|
||||
|
||||
- Prefer the utilities in `core_test_support::responses` when writing end-to-end Codex tests.
|
||||
|
||||
- All `mount_sse*` helpers return a `ResponseMock`; hold onto it so you can assert against outbound `/responses` POST bodies.
|
||||
- Use `ResponseMock::single_request()` when a test should only issue one POST, or `ResponseMock::requests()` to inspect every captured `ResponsesRequest`.
|
||||
- `ResponsesRequest` exposes helpers (`body_json`, `input`, `function_call_output`, `custom_tool_call_output`, `call_output`, `header`, `path`, `query_param`) so assertions can target structured payloads instead of manual JSON digging.
|
||||
- Build SSE payloads with the provided `ev_*` constructors and the `sse(...)`.
|
||||
- Prefer `wait_for_event` over `wait_for_event_with_timeout`.
|
||||
- Prefer `mount_sse_once` over `mount_sse_once_match` or `mount_sse_sequence`
|
||||
|
||||
- Typical pattern:
|
||||
|
||||
```rust
|
||||
let mock = responses::mount_sse_once(&server, responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_function_call(call_id, "shell", &serde_json::to_string(&args)?),
|
||||
responses::ev_completed("resp-1"),
|
||||
])).await;
|
||||
|
||||
codex.submit(Op::UserTurn { ... }).await?;
|
||||
|
||||
// Assert request body if needed.
|
||||
let request = mock.single_request();
|
||||
// assert using request.function_call_output(call_id) or request.json_body() or other helpers.
|
||||
```
|
||||
|
||||
212
CHANGELOG.md
212
CHANGELOG.md
@@ -1,211 +1 @@
|
||||
# Changelog
|
||||
|
||||
You can install any of these versions: `npm install -g codex@version`
|
||||
|
||||
## `0.1.2505172129`
|
||||
|
||||
### 🪲 Bug Fixes
|
||||
|
||||
- Add node version check (#1007)
|
||||
- Persist token after refresh (#1006)
|
||||
|
||||
## `0.1.2505171619`
|
||||
|
||||
- `codex --login` + `codex --free` (#998)
|
||||
|
||||
## `0.1.2505161800`
|
||||
|
||||
- Sign in with chatgpt credits (#974)
|
||||
- Add support for OpenAI tool type, local_shell (#961)
|
||||
|
||||
## `0.1.2505161243`
|
||||
|
||||
- Sign in with chatgpt (#963)
|
||||
- Session history viewer (#912)
|
||||
- Apply patch issue when using different cwd (#942)
|
||||
- Diff command for filenames with special characters (#954)
|
||||
|
||||
## `0.1.2505160811`
|
||||
|
||||
- `codex-mini-latest` (#951)
|
||||
|
||||
## `0.1.2505140839`
|
||||
|
||||
### 🪲 Bug Fixes
|
||||
|
||||
- Gpt-4.1 apply_patch handling (#930)
|
||||
- Add support for fileOpener in config.json (#911)
|
||||
- Patch in #366 and #367 for marked-terminal (#916)
|
||||
- Remember to set lastIndex = 0 on shared RegExp (#918)
|
||||
- Always load version from package.json at runtime (#909)
|
||||
- Tweak the label for citations for better rendering (#919)
|
||||
- Tighten up some logic around session timestamps and ids (#922)
|
||||
- Change EventMsg enum so every variant takes a single struct (#925)
|
||||
- Reasoning default to medium, show workdir when supplied (#931)
|
||||
- Test_dev_null_write() was not using echo as intended (#923)
|
||||
|
||||
## `0.1.2504301751`
|
||||
|
||||
### 🚀 Features
|
||||
|
||||
- User config api key (#569)
|
||||
- `@mention` files in codex (#701)
|
||||
- Add `--reasoning` CLI flag (#314)
|
||||
- Lower default retry wait time and increase number of tries (#720)
|
||||
- Add common package registries domains to allowed-domains list (#414)
|
||||
|
||||
### 🪲 Bug Fixes
|
||||
|
||||
- Insufficient quota message (#758)
|
||||
- Input keyboard shortcut opt+delete (#685)
|
||||
- `/diff` should include untracked files (#686)
|
||||
- Only allow running without sandbox if explicitly marked in safe container (#699)
|
||||
- Tighten up check for /usr/bin/sandbox-exec (#710)
|
||||
- Check if sandbox-exec is available (#696)
|
||||
- Duplicate messages in quiet mode (#680)
|
||||
|
||||
## `0.1.2504251709`
|
||||
|
||||
### 🚀 Features
|
||||
|
||||
- Add openai model info configuration (#551)
|
||||
- Added provider to run quiet mode function (#571)
|
||||
- Create parent directories when creating new files (#552)
|
||||
- Print bug report URL in terminal instead of opening browser (#510) (#528)
|
||||
- Add support for custom provider configuration in the user config (#537)
|
||||
- Add support for OpenAI-Organization and OpenAI-Project headers (#626)
|
||||
- Add specific instructions for creating API keys in error msg (#581)
|
||||
- Enhance toCodePoints to prevent potential unicode 14 errors (#615)
|
||||
- More native keyboard navigation in multiline editor (#655)
|
||||
- Display error on selection of invalid model (#594)
|
||||
|
||||
### 🪲 Bug Fixes
|
||||
|
||||
- Model selection (#643)
|
||||
- Nits in apply patch (#640)
|
||||
- Input keyboard shortcuts (#676)
|
||||
- `apply_patch` unicode characters (#625)
|
||||
- Don't clear turn input before retries (#611)
|
||||
- More loosely match context for apply_patch (#610)
|
||||
- Update bug report template - there is no --revision flag (#614)
|
||||
- Remove outdated copy of text input and external editor feature (#670)
|
||||
- Remove unreachable "disableResponseStorage" logic flow introduced in #543 (#573)
|
||||
- Non-openai mode - fix for gemini content: null, fix 429 to throw before stream (#563)
|
||||
- Only allow going up in history when not already in history if input is empty (#654)
|
||||
- Do not grant "node" user sudo access when using run_in_container.sh (#627)
|
||||
- Update scripts/build_container.sh to use pnpm instead of npm (#631)
|
||||
- Update lint-staged config to use pnpm --filter (#582)
|
||||
- Non-openai mode - don't default temp and top_p (#572)
|
||||
- Fix error catching when checking for updates (#597)
|
||||
- Close stdin when running an exec tool call (#636)
|
||||
|
||||
## `0.1.2504221401`
|
||||
|
||||
### 🚀 Features
|
||||
|
||||
- Show actionable errors when api keys are missing (#523)
|
||||
- Add CLI `--version` flag (#492)
|
||||
|
||||
### 🪲 Bug Fixes
|
||||
|
||||
- Agent loop for ZDR (`disableResponseStorage`) (#543)
|
||||
- Fix relative `workdir` check for `apply_patch` (#556)
|
||||
- Minimal mid-stream #429 retry loop using existing back-off (#506)
|
||||
- Inconsistent usage of base URL and API key (#507)
|
||||
- Remove requirement for api key for ollama (#546)
|
||||
- Support `[provider]_BASE_URL` (#542)
|
||||
|
||||
## `0.1.2504220136`
|
||||
|
||||
### 🚀 Features
|
||||
|
||||
- Add support for ZDR orgs (#481)
|
||||
- Include fractional portion of chunk that exceeds stdout/stderr limit (#497)
|
||||
|
||||
## `0.1.2504211509`
|
||||
|
||||
### 🚀 Features
|
||||
|
||||
- Support multiple providers via Responses-Completion transformation (#247)
|
||||
- Add user-defined safe commands configuration and approval logic #380 (#386)
|
||||
- Allow switching approval modes when prompted to approve an edit/command (#400)
|
||||
- Add support for `/diff` command autocomplete in TerminalChatInput (#431)
|
||||
- Auto-open model selector if user selects deprecated model (#427)
|
||||
- Read approvalMode from config file (#298)
|
||||
- `/diff` command to view git diff (#426)
|
||||
- Tab completions for file paths (#279)
|
||||
- Add /command autocomplete (#317)
|
||||
- Allow multi-line input (#438)
|
||||
|
||||
### 🪲 Bug Fixes
|
||||
|
||||
- `full-auto` support in quiet mode (#374)
|
||||
- Enable shell option for child process execution (#391)
|
||||
- Configure husky and lint-staged for pnpm monorepo (#384)
|
||||
- Command pipe execution by improving shell detection (#437)
|
||||
- Name of the file not matching the name of the component (#354)
|
||||
- Allow proper exit from new Switch approval mode dialog (#453)
|
||||
- Ensure /clear resets context and exclude system messages from approximateTokenUsed count (#443)
|
||||
- `/clear` now clears terminal screen and resets context left indicator (#425)
|
||||
- Correct fish completion function name in CLI script (#485)
|
||||
- Auto-open model-selector when model is not found (#448)
|
||||
- Remove unnecessary isLoggingEnabled() checks (#420)
|
||||
- Improve test reliability for `raw-exec` (#434)
|
||||
- Unintended tear down of agent loop (#483)
|
||||
- Remove extraneous type casts (#462)
|
||||
|
||||
## `0.1.2504181820`
|
||||
|
||||
### 🚀 Features
|
||||
|
||||
- Add `/bug` report command (#312)
|
||||
- Notify when a newer version is available (#333)
|
||||
|
||||
### 🪲 Bug Fixes
|
||||
|
||||
- Update context left display logic in TerminalChatInput component (#307)
|
||||
- Improper spawn of sh on Windows Powershell (#318)
|
||||
- `/bug` report command, thinking indicator (#381)
|
||||
- Include pnpm lock file (#377)
|
||||
|
||||
## `0.1.2504172351`
|
||||
|
||||
### 🚀 Features
|
||||
|
||||
- Add Nix flake for reproducible development environments (#225)
|
||||
|
||||
### 🪲 Bug Fixes
|
||||
|
||||
- Handle invalid commands (#304)
|
||||
- Raw-exec-process-group.test improve reliability and error handling (#280)
|
||||
- Canonicalize the writeable paths used in seatbelt policy (#275)
|
||||
|
||||
## `0.1.2504172304`
|
||||
|
||||
### 🚀 Features
|
||||
|
||||
- Add shell completion subcommand (#138)
|
||||
- Add command history persistence (#152)
|
||||
- Shell command explanation option (#173)
|
||||
- Support bun fallback runtime for codex CLI (#282)
|
||||
- Add notifications for MacOS using Applescript (#160)
|
||||
- Enhance image path detection in input processing (#189)
|
||||
- `--config`/`-c` flag to open global instructions in nvim (#158)
|
||||
- Update position of cursor when navigating input history with arrow keys to the end of the text (#255)
|
||||
|
||||
### 🪲 Bug Fixes
|
||||
|
||||
- Correct word deletion logic for trailing spaces (Ctrl+Backspace) (#131)
|
||||
- Improve Windows compatibility for CLI commands and sandbox (#261)
|
||||
- Correct typos in thinking texts (transcendent & parroting) (#108)
|
||||
- Add empty vite config file to prevent resolving to parent (#273)
|
||||
- Update regex to better match the retry error messages (#266)
|
||||
- Add missing "as" in prompt prefix in agent loop (#186)
|
||||
- Allow continuing after interrupting assistant (#178)
|
||||
- Standardize filename to kebab-case 🐍➡️🥙 (#302)
|
||||
- Small update to bug report template (#288)
|
||||
- Duplicated message on model change (#276)
|
||||
- Typos in prompts and comments (#195)
|
||||
- Check workdir before spawn (#221)
|
||||
|
||||
<!-- generated - do not edit -->
|
||||
The changelog can be found on the [releases page](https://github.com/openai/codex/releases).
|
||||
|
||||
721
README.md
721
README.md
@@ -1,77 +1,30 @@
|
||||
<h1 align="center">OpenAI Codex CLI</h1>
|
||||
<p align="center"><code>npm i -g @openai/codex</code><br />or <code>brew install --cask codex</code></p>
|
||||
|
||||
<p align="center"><code>npm i -g @openai/codex</code><br />or <code>brew install codex</code></p>
|
||||
|
||||
<p align="center"><strong>Codex CLI</strong> is a coding agent from OpenAI that runs locally on your computer.</br>If you are looking for the <em>cloud-based agent</em> from OpenAI, <strong>Codex Web</strong>, see <a href="https://chatgpt.com/codex">chatgpt.com/codex</a>.</p>
|
||||
<p align="center"><strong>Codex CLI</strong> is a coding agent from OpenAI that runs locally on your computer.
|
||||
</br>
|
||||
</br>If you want Codex in your code editor (VS Code, Cursor, Windsurf), <a href="https://developers.openai.com/codex/ide">install in your IDE</a>
|
||||
</br>If you are looking for the <em>cloud-based agent</em> from OpenAI, <strong>Codex Web</strong>, go to <a href="https://chatgpt.com/codex">chatgpt.com/codex</a></p>
|
||||
|
||||
<p align="center">
|
||||
<img src="./.github/codex-cli-splash.png" alt="Codex CLI splash" width="50%" />
|
||||
<img src="./.github/codex-cli-splash.png" alt="Codex CLI splash" width="80%" />
|
||||
</p>
|
||||
|
||||
---
|
||||
|
||||
<details>
|
||||
<summary><strong>Table of contents</strong></summary>
|
||||
|
||||
<!-- Begin ToC -->
|
||||
|
||||
- [Quickstart](#quickstart)
|
||||
- [Installing and running Codex CLI](#installing-and-running-codex-cli)
|
||||
- [Using Codex with your ChatGPT plan](#using-codex-with-your-chatgpt-plan)
|
||||
- [Connecting on a "Headless" Machine](#connecting-on-a-headless-machine)
|
||||
- [Authenticate locally and copy your credentials to the "headless" machine](#authenticate-locally-and-copy-your-credentials-to-the-headless-machine)
|
||||
- [Connecting through VPS or remote](#connecting-through-vps-or-remote)
|
||||
- [Usage-based billing alternative: Use an OpenAI API key](#usage-based-billing-alternative-use-an-openai-api-key)
|
||||
- [Forcing a specific auth method (advanced)](#forcing-a-specific-auth-method-advanced)
|
||||
- [Choosing Codex's level of autonomy](#choosing-codexs-level-of-autonomy)
|
||||
- [**1. Read/write**](#1-readwrite)
|
||||
- [**2. Read-only**](#2-read-only)
|
||||
- [**3. Advanced configuration**](#3-advanced-configuration)
|
||||
- [Can I run without ANY approvals?](#can-i-run-without-any-approvals)
|
||||
- [Fine-tuning in `config.toml`](#fine-tuning-in-configtoml)
|
||||
- [Example prompts](#example-prompts)
|
||||
- [Running with a prompt as input](#running-with-a-prompt-as-input)
|
||||
- [Using Open Source Models](#using-open-source-models)
|
||||
- [Platform sandboxing details](#platform-sandboxing-details)
|
||||
- [Experimental technology disclaimer](#experimental-technology-disclaimer)
|
||||
- [System requirements](#system-requirements)
|
||||
- [CLI reference](#cli-reference)
|
||||
- [Memory & project docs](#memory--project-docs)
|
||||
- [Non-interactive / CI mode](#non-interactive--ci-mode)
|
||||
- [Model Context Protocol (MCP)](#model-context-protocol-mcp)
|
||||
- [Tracing / verbose logging](#tracing--verbose-logging)
|
||||
- [DotSlash](#dotslash)
|
||||
- [Configuration](#configuration)
|
||||
- [FAQ](#faq)
|
||||
- [Zero data retention (ZDR) usage](#zero-data-retention-zdr-usage)
|
||||
- [Codex open source fund](#codex-open-source-fund)
|
||||
- [Contributing](#contributing)
|
||||
- [Development workflow](#development-workflow)
|
||||
- [Writing high-impact code changes](#writing-high-impact-code-changes)
|
||||
- [Opening a pull request](#opening-a-pull-request)
|
||||
- [Review process](#review-process)
|
||||
- [Community values](#community-values)
|
||||
- [Getting help](#getting-help)
|
||||
- [Contributor license agreement (CLA)](#contributor-license-agreement-cla)
|
||||
- [Quick fixes](#quick-fixes)
|
||||
- [Releasing `codex`](#releasing-codex)
|
||||
- [Security & responsible AI](#security--responsible-ai)
|
||||
- [License](#license)
|
||||
|
||||
<!-- End ToC -->
|
||||
|
||||
</details>
|
||||
|
||||
---
|
||||
|
||||
## Quickstart
|
||||
|
||||
### Installing and running Codex CLI
|
||||
|
||||
Install globally with your preferred package manager:
|
||||
Install globally with your preferred package manager. If you use npm:
|
||||
|
||||
```shell
|
||||
npm install -g @openai/codex # Alternatively: `brew install codex`
|
||||
npm install -g @openai/codex
|
||||
```
|
||||
|
||||
Alternatively, if you use Homebrew:
|
||||
|
||||
```shell
|
||||
brew install --cask codex
|
||||
```
|
||||
|
||||
Then simply run `codex` to get started:
|
||||
@@ -80,6 +33,8 @@ Then simply run `codex` to get started:
|
||||
codex
|
||||
```
|
||||
|
||||
If you're running into upgrade issues with Homebrew, see the [FAQ entry on brew upgrade codex](./docs/faq.md#brew-upgrade-codex-isnt-upgrading-me).
|
||||
|
||||
<details>
|
||||
<summary>You can also go to the <a href="https://github.com/openai/codex/releases/latest">latest GitHub Release</a> and download the appropriate binary for your platform.</summary>
|
||||
|
||||
@@ -99,604 +54,56 @@ Each archive contains a single entry with the platform baked into the name (e.g.
|
||||
### Using Codex with your ChatGPT plan
|
||||
|
||||
<p align="center">
|
||||
<img src="./.github/codex-cli-login.png" alt="Codex CLI login" width="50%" />
|
||||
<img src="./.github/codex-cli-login.png" alt="Codex CLI login" width="80%" />
|
||||
</p>
|
||||
|
||||
Run `codex` and select **Sign in with ChatGPT**. You'll need a Plus, Pro, or Team ChatGPT account, and will get access to our latest models, including `gpt-5`, at no extra cost to your plan. (Enterprise is coming soon.)
|
||||
|
||||
> Important: If you've used the Codex CLI before, follow these steps to migrate from usage-based billing with your API key:
|
||||
>
|
||||
> 1. Update the CLI and ensure `codex --version` is `0.20.0` or later
|
||||
> 2. Delete `~/.codex/auth.json` (this should be `C:\Users\USERNAME\.codex\auth.json` on Windows)
|
||||
> 3. Run `codex login` again
|
||||
|
||||
If you encounter problems with the login flow, please comment on [this issue](https://github.com/openai/codex/issues/1243).
|
||||
|
||||
### Connecting on a "Headless" Machine
|
||||
|
||||
Today, the login process entails running a server on `localhost:1455`. If you are on a "headless" server, such as a Docker container or are `ssh`'d into a remote machine, loading `localhost:1455` in the browser on your local machine will not automatically connect to the webserver running on the _headless_ machine, so you must use one of the following workarounds:
|
||||
|
||||
#### Authenticate locally and copy your credentials to the "headless" machine
|
||||
|
||||
The easiest solution is likely to run through the `codex login` process on your local machine such that `localhost:1455` _is_ accessible in your web browser. When you complete the authentication process, an `auth.json` file should be available at `$CODEX_HOME/auth.json` (on Mac/Linux, `$CODEX_HOME` defaults to `~/.codex` whereas on Windows, it defaults to `%USERPROFILE%\.codex`).
|
||||
|
||||
Because the `auth.json` file is not tied to a specific host, once you complete the authentication flow locally, you can copy the `$CODEX_HOME/auth.json` file to the headless machine and then `codex` should "just work" on that machine. Note to copy a file to a Docker container, you can do:
|
||||
|
||||
```shell
|
||||
# substitute MY_CONTAINER with the name or id of your Docker container:
|
||||
CONTAINER_HOME=$(docker exec MY_CONTAINER printenv HOME)
|
||||
docker exec MY_CONTAINER mkdir -p "$CONTAINER_HOME/.codex"
|
||||
docker cp auth.json MY_CONTAINER:"$CONTAINER_HOME/.codex/auth.json"
|
||||
```
|
||||
|
||||
whereas if you are `ssh`'d into a remote machine, you likely want to use [`scp`](https://en.wikipedia.org/wiki/Secure_copy_protocol):
|
||||
|
||||
```shell
|
||||
ssh user@remote 'mkdir -p ~/.codex'
|
||||
scp ~/.codex/auth.json user@remote:~/.codex/auth.json
|
||||
```
|
||||
|
||||
or try this one-liner:
|
||||
|
||||
```shell
|
||||
ssh user@remote 'mkdir -p ~/.codex && cat > ~/.codex/auth.json' < ~/.codex/auth.json
|
||||
```
|
||||
|
||||
#### Connecting through VPS or remote
|
||||
|
||||
If you run Codex on a remote machine (VPS/server) without a local browser, the login helper starts a server on `localhost:1455` on the remote host. To complete login in your local browser, forward that port to your machine before starting the login flow:
|
||||
|
||||
```bash
|
||||
# From your local machine
|
||||
ssh -L 1455:localhost:1455 <user>@<remote-host>
|
||||
```
|
||||
|
||||
Then, in that SSH session, run `codex` and select "Sign in with ChatGPT". When prompted, open the printed URL (it will be `http://localhost:1455/...`) in your local browser. The traffic will be tunneled to the remote server.
|
||||
|
||||
### Usage-based billing alternative: Use an OpenAI API key
|
||||
|
||||
If you prefer to pay-as-you-go, you can still authenticate with your OpenAI API key by setting it as an environment variable:
|
||||
|
||||
```shell
|
||||
export OPENAI_API_KEY="your-api-key-here"
|
||||
```
|
||||
|
||||
Notes:
|
||||
|
||||
- This command only sets the key for your current terminal session, which we recommend. To set it for all future sessions, you can also add the `export` line to your shell's configuration file (e.g., `~/.zshrc`).
|
||||
- If you have signed in with ChatGPT, Codex will default to using your ChatGPT credits. If you wish to use your API key, use the `/logout` command to clear your ChatGPT authentication.
|
||||
|
||||
#### Forcing a specific auth method (advanced)
|
||||
|
||||
You can explicitly choose which authentication Codex should prefer when both are available.
|
||||
|
||||
- To always use your API key (even when ChatGPT auth exists), set:
|
||||
|
||||
```toml
|
||||
# ~/.codex/config.toml
|
||||
preferred_auth_method = "apikey"
|
||||
```
|
||||
|
||||
Or override ad-hoc via CLI:
|
||||
|
||||
```bash
|
||||
codex --config preferred_auth_method="apikey"
|
||||
```
|
||||
|
||||
- To prefer ChatGPT auth (default), set:
|
||||
|
||||
```toml
|
||||
# ~/.codex/config.toml
|
||||
preferred_auth_method = "chatgpt"
|
||||
```
|
||||
|
||||
Notes:
|
||||
|
||||
- When `preferred_auth_method = "apikey"` and an API key is available, the login screen is skipped.
|
||||
- When `preferred_auth_method = "chatgpt"` (default), Codex prefers ChatGPT auth if present; if only an API key is present, it will use the API key. Certain account types may also require API-key mode.
|
||||
|
||||
### Choosing Codex's level of autonomy
|
||||
|
||||
We always recommend running Codex in its default sandbox that gives you strong guardrails around what the agent can do. The default sandbox prevents it from editing files outside its workspace, or from accessing the network.
|
||||
|
||||
When you launch Codex in a new folder, it detects whether the folder is version controlled and recommends one of two levels of autonomy:
|
||||
|
||||
#### **1. Read/write**
|
||||
|
||||
- Codex can run commands and write files in the workspace without approval.
|
||||
- To write files in other folders, access network, update git or perform other actions protected by the sandbox, Codex will need your permission.
|
||||
- By default, the workspace includes the current directory, as well as temporary directories like `/tmp`. You can see what directories are in the workspace with the `/status` command. See the docs for how to customize this behavior.
|
||||
- Advanced: You can manually specify this configuration by running `codex --sandbox workspace-write --ask-for-approval on-request`
|
||||
- This is the recommended default for version-controlled folders.
|
||||
|
||||
#### **2. Read-only**
|
||||
|
||||
- Codex can run read-only commands without approval.
|
||||
- To edit files, access network, or perform other actions protected by the sandbox, Codex will need your permission.
|
||||
- Advanced: You can manually specify this configuration by running `codex --sandbox read-only --ask-for-approval on-request`
|
||||
- This is the recommended default non-version-controlled folders.
|
||||
|
||||
#### **3. Advanced configuration**
|
||||
|
||||
Codex gives you fine-grained control over the sandbox with the `--sandbox` option, and over when it requests approval with the `--ask-for-approval` option. Run `codex help` for more on these options.
|
||||
|
||||
#### Can I run without ANY approvals?
|
||||
|
||||
Yes, run codex non-interactively with `--ask-for-approval never`. This option works with all `--sandbox` options, so you still have full control over Codex's level of autonomy. It will make its best attempt with whatever contrainsts you provide. For example:
|
||||
|
||||
- Use `codex --ask-for-approval never --sandbox read-only` when you are running many agents to answer questions in parallel in the same workspace.
|
||||
- Use `codex --ask-for-approval never --sandbox workspace-write` when you want the agent to non-interactively take time to produce the best outcome, with strong guardrails around its behavior.
|
||||
- Use `codex --ask-for-approval never --sandbox danger-full-access` to dangerously give the agent full autonomy. Because this disables important safety mechanisms, we recommend against using this unless running Codex in an isolated environment.
|
||||
|
||||
#### Fine-tuning in `config.toml`
|
||||
|
||||
```toml
|
||||
# approval mode
|
||||
approval_policy = "untrusted"
|
||||
sandbox_mode = "read-only"
|
||||
|
||||
# full-auto mode
|
||||
approval_policy = "on-request"
|
||||
sandbox_mode = "workspace-write"
|
||||
|
||||
# Optional: allow network in workspace-write mode
|
||||
[sandbox_workspace_write]
|
||||
network_access = true
|
||||
```
|
||||
|
||||
You can also save presets as **profiles**:
|
||||
|
||||
```toml
|
||||
[profiles.full_auto]
|
||||
approval_policy = "on-request"
|
||||
sandbox_mode = "workspace-write"
|
||||
|
||||
[profiles.readonly_quiet]
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "read-only"
|
||||
```
|
||||
|
||||
### Example prompts
|
||||
|
||||
Below are a few bite-size examples you can copy-paste. Replace the text in quotes with your own task. See the [prompting guide](https://github.com/openai/codex/blob/main/codex-cli/examples/prompting_guide.md) for more tips and usage patterns.
|
||||
|
||||
| ✨ | What you type | What happens |
|
||||
| --- | ------------------------------------------------------------------------------- | -------------------------------------------------------------------------- |
|
||||
| 1 | `codex "Refactor the Dashboard component to React Hooks"` | Codex rewrites the class component, runs `npm test`, and shows the diff. |
|
||||
| 2 | `codex "Generate SQL migrations for adding a users table"` | Infers your ORM, creates migration files, and runs them in a sandboxed DB. |
|
||||
| 3 | `codex "Write unit tests for utils/date.ts"` | Generates tests, executes them, and iterates until they pass. |
|
||||
| 4 | `codex "Bulk-rename *.jpeg -> *.jpg with git mv"` | Safely renames files and updates imports/usages. |
|
||||
| 5 | `codex "Explain what this regex does: ^(?=.*[A-Z]).{8,}$"` | Outputs a step-by-step human explanation. |
|
||||
| 6 | `codex "Carefully review this repo, and propose 3 high impact well-scoped PRs"` | Suggests impactful PRs in the current codebase. |
|
||||
| 7 | `codex "Look for vulnerabilities and create a security review report"` | Finds and explains security bugs. |
|
||||
|
||||
## Running with a prompt as input
|
||||
|
||||
You can also run Codex CLI with a prompt as input:
|
||||
|
||||
```shell
|
||||
codex "explain this codebase to me"
|
||||
```
|
||||
|
||||
```shell
|
||||
codex --full-auto "create the fanciest todo-list app"
|
||||
```
|
||||
|
||||
That's it - Codex will scaffold a file, run it inside a sandbox, install any
|
||||
missing dependencies, and show you the live result. Approve the changes and
|
||||
they'll be committed to your working directory.
|
||||
|
||||
## Using Open Source Models
|
||||
|
||||
<details>
|
||||
<summary><strong>Use <code>--profile</code> to use other models</strong></summary>
|
||||
|
||||
Codex also allows you to use other providers that support the OpenAI Chat Completions (or Responses) API.
|
||||
|
||||
To do so, you must first define custom [providers](./config.md#model_providers) in `~/.codex/config.toml`. For example, the provider for a standard Ollama setup would be defined as follows:
|
||||
|
||||
```toml
|
||||
[model_providers.ollama]
|
||||
name = "Ollama"
|
||||
base_url = "http://localhost:11434/v1"
|
||||
```
|
||||
|
||||
The `base_url` will have `/chat/completions` appended to it to build the full URL for the request.
|
||||
|
||||
For providers that also require an `Authorization` header of the form `Bearer: SECRET`, an `env_key` can be specified, which indicates the environment variable to read to use as the value of `SECRET` when making a request:
|
||||
|
||||
```toml
|
||||
[model_providers.openrouter]
|
||||
name = "OpenRouter"
|
||||
base_url = "https://openrouter.ai/api/v1"
|
||||
env_key = "OPENROUTER_API_KEY"
|
||||
```
|
||||
|
||||
Providers that speak the Responses API are also supported by adding `wire_api = "responses"` as part of the definition. Accessing OpenAI models via Azure is an example of such a provider, though it also requires specifying additional `query_params` that need to be appended to the request URL:
|
||||
|
||||
```toml
|
||||
[model_providers.azure]
|
||||
name = "Azure"
|
||||
# Make sure you set the appropriate subdomain for this URL.
|
||||
base_url = "https://YOUR_PROJECT_NAME.openai.azure.com/openai"
|
||||
env_key = "AZURE_OPENAI_API_KEY" # Or "OPENAI_API_KEY", whichever you use.
|
||||
# Newer versions appear to support the responses API, see https://github.com/openai/codex/pull/1321
|
||||
query_params = { api-version = "2025-04-01-preview" }
|
||||
wire_api = "responses"
|
||||
```
|
||||
|
||||
Once you have defined a provider you wish to use, you can configure it as your default provider as follows:
|
||||
|
||||
```toml
|
||||
model_provider = "azure"
|
||||
```
|
||||
|
||||
> [!TIP]
|
||||
> If you find yourself experimenting with a variety of models and providers, then you likely want to invest in defining a _profile_ for each configuration like so:
|
||||
|
||||
```toml
|
||||
[profiles.o3]
|
||||
model_provider = "azure"
|
||||
model = "o3"
|
||||
|
||||
[profiles.mistral]
|
||||
model_provider = "ollama"
|
||||
model = "mistral"
|
||||
```
|
||||
|
||||
This way, you can specify one command-line argument (.e.g., `--profile o3`, `--profile mistral`) to override multiple settings together.
|
||||
|
||||
</details>
|
||||
|
||||
Codex can run fully locally against an OpenAI-compatible OSS host (like Ollama) using the `--oss` flag:
|
||||
|
||||
- Interactive UI:
|
||||
- codex --oss
|
||||
- Non-interactive (programmatic) mode:
|
||||
- echo "Refactor utils" | codex exec --oss
|
||||
|
||||
Model selection when using `--oss`:
|
||||
|
||||
- If you omit `-m/--model`, Codex defaults to -m gpt-oss:20b and will verify it exists locally (downloading if needed).
|
||||
- To pick a different size, pass one of:
|
||||
- -m "gpt-oss:20b"
|
||||
- -m "gpt-oss:120b"
|
||||
|
||||
Point Codex at your own OSS host:
|
||||
|
||||
- By default, `--oss` talks to http://localhost:11434/v1.
|
||||
- To use a different host, set one of these environment variables before running Codex:
|
||||
- CODEX_OSS_BASE_URL, for example:
|
||||
- CODEX_OSS_BASE_URL="http://my-ollama.example.com:11434/v1" codex --oss -m gpt-oss:20b
|
||||
- or CODEX_OSS_PORT (when the host is localhost):
|
||||
- CODEX_OSS_PORT=11434 codex --oss
|
||||
|
||||
Advanced: you can persist this in your config instead of environment variables by overriding the built-in `oss` provider in `~/.codex/config.toml`:
|
||||
|
||||
```toml
|
||||
[model_providers.oss]
|
||||
name = "Open Source"
|
||||
base_url = "http://my-ollama.example.com:11434/v1"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Platform sandboxing details
|
||||
|
||||
By default, Codex CLI runs code and shell commands inside a restricted sandbox to protect your system.
|
||||
|
||||
> [!IMPORTANT]
|
||||
> Not all tool calls are sandboxed. Specifically, **trusted Model Context Protocol (MCP) tool calls** are executed outside of the sandbox.
|
||||
> This is intentional: MCP tools are explicitly configured and trusted by you, and they often need to connect to **external applications or services** (e.g. issue trackers, databases, messaging systems).
|
||||
> Running them outside the sandbox allows Codex to integrate with these external systems without being blocked by sandbox restrictions.
|
||||
|
||||
The mechanism Codex uses to implement the sandbox policy depends on your OS:
|
||||
|
||||
- **macOS 12+** uses **Apple Seatbelt** and runs commands using `sandbox-exec` with a profile (`-p`) that corresponds to the `--sandbox` that was specified.
|
||||
- **Linux** uses a combination of Landlock/seccomp APIs to enforce the `sandbox` configuration.
|
||||
|
||||
Note that when running Linux in a containerized environment such as Docker, sandboxing may not work if the host/container configuration does not support the necessary Landlock/seccomp APIs. In such cases, we recommend configuring your Docker container so that it provides the sandbox guarantees you are looking for and then running `codex` with `--sandbox danger-full-access` (or, more simply, the `--dangerously-bypass-approvals-and-sandbox` flag) within your container.
|
||||
|
||||
---
|
||||
|
||||
## Experimental technology disclaimer
|
||||
|
||||
Codex CLI is an experimental project under active development. It is not yet stable, may contain bugs, incomplete features, or undergo breaking changes. We're building it in the open with the community and welcome:
|
||||
|
||||
- Bug reports
|
||||
- Feature requests
|
||||
- Pull requests
|
||||
- Good vibes
|
||||
|
||||
Help us improve by filing issues or submitting PRs (see the section below for how to contribute)!
|
||||
|
||||
---
|
||||
|
||||
## System requirements
|
||||
|
||||
| Requirement | Details |
|
||||
| --------------------------- | --------------------------------------------------------------- |
|
||||
| Operating systems | macOS 12+, Ubuntu 20.04+/Debian 10+, or Windows 11 **via WSL2** |
|
||||
| Git (optional, recommended) | 2.23+ for built-in PR helpers |
|
||||
| RAM | 4-GB minimum (8-GB recommended) |
|
||||
|
||||
---
|
||||
|
||||
## CLI reference
|
||||
|
||||
| Command | Purpose | Example |
|
||||
| ------------------ | ---------------------------------- | ------------------------------- |
|
||||
| `codex` | Interactive TUI | `codex` |
|
||||
| `codex "..."` | Initial prompt for interactive TUI | `codex "fix lint errors"` |
|
||||
| `codex exec "..."` | Non-interactive "automation mode" | `codex exec "explain utils.ts"` |
|
||||
|
||||
Key flags: `--model/-m`, `--ask-for-approval/-a`.
|
||||
|
||||
---
|
||||
|
||||
## Memory & project docs
|
||||
|
||||
You can give Codex extra instructions and guidance using `AGENTS.md` files. Codex looks for `AGENTS.md` files in the following places, and merges them top-down:
|
||||
|
||||
1. `~/.codex/AGENTS.md` - personal global guidance
|
||||
2. `AGENTS.md` at repo root - shared project notes
|
||||
3. `AGENTS.md` in the current working directory - sub-folder/feature specifics
|
||||
|
||||
---
|
||||
|
||||
## Non-interactive / CI mode
|
||||
|
||||
Run Codex head-less in pipelines. Example GitHub Action step:
|
||||
|
||||
```yaml
|
||||
- name: Update changelog via Codex
|
||||
run: |
|
||||
npm install -g @openai/codex
|
||||
export OPENAI_API_KEY="${{ secrets.OPENAI_KEY }}"
|
||||
codex exec --full-auto "update CHANGELOG for next release"
|
||||
```
|
||||
|
||||
## Model Context Protocol (MCP)
|
||||
|
||||
The Codex CLI can be configured to leverage MCP servers by defining an [`mcp_servers`](./codex-rs/config.md#mcp_servers) section in `~/.codex/config.toml`. It is intended to mirror how tools such as Claude and Cursor define `mcpServers` in their respective JSON config files, though the Codex format is slightly different since it uses TOML rather than JSON, e.g.:
|
||||
|
||||
```toml
|
||||
# IMPORTANT: the top-level key is `mcp_servers` rather than `mcpServers`.
|
||||
[mcp_servers.server-name]
|
||||
command = "npx"
|
||||
args = ["-y", "mcp-server"]
|
||||
env = { "API_KEY" = "value" }
|
||||
```
|
||||
|
||||
> [!TIP]
|
||||
> It is somewhat experimental, but the Codex CLI can also be run as an MCP _server_ via `codex mcp`. If you launch it with an MCP client such as `npx @modelcontextprotocol/inspector codex mcp` and send it a `tools/list` request, you will see that there is only one tool, `codex`, that accepts a grab-bag of inputs, including a catch-all `config` map for anything you might want to override. Feel free to play around with it and provide feedback via GitHub issues.
|
||||
|
||||
## Tracing / verbose logging
|
||||
|
||||
Because Codex is written in Rust, it honors the `RUST_LOG` environment variable to configure its logging behavior.
|
||||
|
||||
The TUI defaults to `RUST_LOG=codex_core=info,codex_tui=info` and log messages are written to `~/.codex/log/codex-tui.log`, so you can leave the following running in a separate terminal to monitor log messages as they are written:
|
||||
|
||||
```
|
||||
tail -F ~/.codex/log/codex-tui.log
|
||||
```
|
||||
|
||||
By comparison, the non-interactive mode (`codex exec`) defaults to `RUST_LOG=error`, but messages are printed inline, so there is no need to monitor a separate file.
|
||||
|
||||
See the Rust documentation on [`RUST_LOG`](https://docs.rs/env_logger/latest/env_logger/#enabling-logging) for more information on the configuration options.
|
||||
|
||||
---
|
||||
|
||||
### DotSlash
|
||||
|
||||
The GitHub Release also contains a [DotSlash](https://dotslash-cli.com/) file for the Codex CLI named `codex`. Using a DotSlash file makes it possible to make a lightweight commit to source control to ensure all contributors use the same version of an executable, regardless of what platform they use for development.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Build from source</strong></summary>
|
||||
|
||||
```bash
|
||||
# Clone the repository and navigate to the root of the Cargo workspace.
|
||||
git clone https://github.com/openai/codex.git
|
||||
cd codex/codex-rs
|
||||
|
||||
# Install the Rust toolchain, if necessary.
|
||||
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
|
||||
source "$HOME/.cargo/env"
|
||||
rustup component add rustfmt
|
||||
rustup component add clippy
|
||||
|
||||
# Build Codex.
|
||||
cargo build
|
||||
|
||||
# Launch the TUI with a sample prompt.
|
||||
cargo run --bin codex -- "explain this codebase to me"
|
||||
|
||||
# After making changes, ensure the code is clean.
|
||||
cargo fmt -- --config imports_granularity=Item
|
||||
cargo clippy --tests
|
||||
|
||||
# Run the tests.
|
||||
cargo test
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
---
|
||||
|
||||
## Configuration
|
||||
|
||||
Codex supports a rich set of configuration options documented in [`codex-rs/config.md`](./codex-rs/config.md).
|
||||
|
||||
By default, Codex loads its configuration from `~/.codex/config.toml`.
|
||||
|
||||
Though `--config` can be used to set/override ad-hoc config values for individual invocations of `codex`.
|
||||
|
||||
---
|
||||
|
||||
## FAQ
|
||||
|
||||
<details>
|
||||
<summary>OpenAI released a model called Codex in 2021 - is this related?</summary>
|
||||
|
||||
In 2021, OpenAI released Codex, an AI system designed to generate code from natural language prompts. That original Codex model was deprecated as of March 2023 and is separate from the CLI tool.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Which models are supported?</summary>
|
||||
|
||||
Any model available with [Responses API](https://platform.openai.com/docs/api-reference/responses). The default is `o4-mini`, but pass `--model gpt-4.1` or set `model: gpt-4.1` in your config file to override.
|
||||
|
||||
</details>
|
||||
<details>
|
||||
<summary>Why does <code>o3</code> or <code>o4-mini</code> not work for me?</summary>
|
||||
|
||||
It's possible that your [API account needs to be verified](https://help.openai.com/en/articles/10910291-api-organization-verification) in order to start streaming responses and seeing chain of thought summaries from the API. If you're still running into issues, please let us know!
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>How do I stop Codex from editing my files?</summary>
|
||||
|
||||
Codex runs model-generated commands in a sandbox. If a proposed command or file change doesn't look right, you can simply type **n** to deny the command or give the model feedback.
|
||||
|
||||
</details>
|
||||
<details>
|
||||
<summary>Does it work on Windows?</summary>
|
||||
|
||||
Not directly. It requires [Windows Subsystem for Linux (WSL2)](https://learn.microsoft.com/en-us/windows/wsl/install) - Codex has been tested on macOS and Linux with Node 22.
|
||||
|
||||
</details>
|
||||
|
||||
---
|
||||
|
||||
## Zero data retention (ZDR) usage
|
||||
|
||||
Codex CLI **does** support OpenAI organizations with [Zero Data Retention (ZDR)](https://platform.openai.com/docs/guides/your-data#zero-data-retention) enabled. If your OpenAI organization has Zero Data Retention enabled and you still encounter errors such as:
|
||||
|
||||
```
|
||||
OpenAI rejected the request. Error details: Status: 400, Code: unsupported_parameter, Type: invalid_request_error, Message: 400 Previous response cannot be used for this organization due to Zero Data Retention.
|
||||
```
|
||||
|
||||
Ensure you are running `codex` with `--config disable_response_storage=true` or add this line to `~/.codex/config.toml` to avoid specifying the command line option each time:
|
||||
|
||||
```toml
|
||||
disable_response_storage = true
|
||||
```
|
||||
|
||||
See [the configuration documentation on `disable_response_storage`](./codex-rs/config.md#disable_response_storage) for details.
|
||||
|
||||
---
|
||||
|
||||
## Codex open source fund
|
||||
|
||||
We're excited to launch a **$1 million initiative** supporting open source projects that use Codex CLI and other OpenAI models.
|
||||
|
||||
- Grants are awarded up to **$25,000** API credits.
|
||||
- Applications are reviewed **on a rolling basis**.
|
||||
|
||||
**Interested? [Apply here](https://openai.com/form/codex-open-source-fund/).**
|
||||
|
||||
---
|
||||
|
||||
## Contributing
|
||||
|
||||
This project is under active development and the code will likely change pretty significantly.
|
||||
|
||||
**At the moment, we only plan to prioritize reviewing external contributions for bugs or security fixes.**
|
||||
|
||||
If you want to add a new feature or change the behavior of an existing one, please open an issue proposing the feature and get approval from an OpenAI team member before spending time building it.
|
||||
|
||||
**New contributions that don't go through this process may be closed** if they aren't aligned with our current roadmap or conflict with other priorities/upcoming features.
|
||||
|
||||
### Development workflow
|
||||
|
||||
- Create a _topic branch_ from `main` - e.g. `feat/interactive-prompt`.
|
||||
- Keep your changes focused. Multiple unrelated fixes should be opened as separate PRs.
|
||||
- Following the [development setup](#development-workflow) instructions above, ensure your change is free of lint warnings and test failures.
|
||||
|
||||
### Writing high-impact code changes
|
||||
|
||||
1. **Start with an issue.** Open a new one or comment on an existing discussion so we can agree on the solution before code is written.
|
||||
2. **Add or update tests.** Every new feature or bug-fix should come with test coverage that fails before your change and passes afterwards. 100% coverage is not required, but aim for meaningful assertions.
|
||||
3. **Document behaviour.** If your change affects user-facing behaviour, update the README, inline help (`codex --help`), or relevant example projects.
|
||||
4. **Keep commits atomic.** Each commit should compile and the tests should pass. This makes reviews and potential rollbacks easier.
|
||||
|
||||
### Opening a pull request
|
||||
|
||||
- Fill in the PR template (or include similar information) - **What? Why? How?**
|
||||
- Run **all** checks locally (`cargo test && cargo clippy --tests && cargo fmt -- --config imports_granularity=Item`). CI failures that could have been caught locally slow down the process.
|
||||
- Make sure your branch is up-to-date with `main` and that you have resolved merge conflicts.
|
||||
- Mark the PR as **Ready for review** only when you believe it is in a merge-able state.
|
||||
|
||||
### Review process
|
||||
|
||||
1. One maintainer will be assigned as a primary reviewer.
|
||||
2. If your PR adds a new feature that was not previously discussed and approved, we may choose to close your PR (see [Contributing](#contributing)).
|
||||
3. We may ask for changes - please do not take this personally. We value the work, but we also value consistency and long-term maintainability.
|
||||
5. When there is consensus that the PR meets the bar, a maintainer will squash-and-merge.
|
||||
|
||||
### Community values
|
||||
|
||||
- **Be kind and inclusive.** Treat others with respect; we follow the [Contributor Covenant](https://www.contributor-covenant.org/).
|
||||
- **Assume good intent.** Written communication is hard - err on the side of generosity.
|
||||
- **Teach & learn.** If you spot something confusing, open an issue or PR with improvements.
|
||||
|
||||
### Getting help
|
||||
|
||||
If you run into problems setting up the project, would like feedback on an idea, or just want to say _hi_ - please open a Discussion or jump into the relevant issue. We are happy to help.
|
||||
|
||||
Together we can make Codex CLI an incredible tool. **Happy hacking!** :rocket:
|
||||
|
||||
### Contributor license agreement (CLA)
|
||||
|
||||
All contributors **must** accept the CLA. The process is lightweight:
|
||||
|
||||
1. Open your pull request.
|
||||
2. Paste the following comment (or reply `recheck` if you've signed before):
|
||||
|
||||
```text
|
||||
I have read the CLA Document and I hereby sign the CLA
|
||||
```
|
||||
|
||||
3. The CLA-Assistant bot records your signature in the repo and marks the status check as passed.
|
||||
|
||||
No special Git commands, email attachments, or commit footers required.
|
||||
|
||||
#### Quick fixes
|
||||
|
||||
| Scenario | Command |
|
||||
| ----------------- | ------------------------------------------------ |
|
||||
| Amend last commit | `git commit --amend -s --no-edit && git push -f` |
|
||||
|
||||
The **DCO check** blocks merges until every commit in the PR carries the footer (with squash this is just the one).
|
||||
|
||||
### Releasing `codex`
|
||||
|
||||
_For admins only._
|
||||
|
||||
Make sure you are on `main` and have no local changes. Then run:
|
||||
|
||||
```shell
|
||||
VERSION=0.2.0 # Can also be 0.2.0-alpha.1 or any valid Rust version.
|
||||
./codex-rs/scripts/create_github_release.sh "$VERSION"
|
||||
```
|
||||
|
||||
This will make a local commit on top of `main` with `version` set to `$VERSION` in `codex-rs/Cargo.toml` (note that on `main`, we leave the version as `version = "0.0.0"`).
|
||||
|
||||
This will push the commit using the tag `rust-v${VERSION}`, which in turn kicks off [the release workflow](.github/workflows/rust-release.yml). This will create a new GitHub Release named `$VERSION`.
|
||||
|
||||
If everything looks good in the generated GitHub Release, uncheck the **pre-release** box so it is the latest release.
|
||||
|
||||
Create a PR to update [`Formula/c/codex.rb`](https://github.com/Homebrew/homebrew-core/blob/main/Formula/c/codex.rb) on Homebrew.
|
||||
|
||||
---
|
||||
|
||||
## Security & responsible AI
|
||||
|
||||
Have you discovered a vulnerability or have concerns about model output? Please e-mail **security@openai.com** and we will respond promptly.
|
||||
Run `codex` and select **Sign in with ChatGPT**. We recommend signing into your ChatGPT account to use Codex as part of your Plus, Pro, Team, Edu, or Enterprise plan. [Learn more about what's included in your ChatGPT plan](https://help.openai.com/en/articles/11369540-codex-in-chatgpt).
|
||||
|
||||
You can also use Codex with an API key, but this requires [additional setup](./docs/authentication.md#usage-based-billing-alternative-use-an-openai-api-key). If you previously used an API key for usage-based billing, see the [migration steps](./docs/authentication.md#migrating-from-usage-based-billing-api-key). If you're having trouble with login, please comment on [this issue](https://github.com/openai/codex/issues/1243).
|
||||
|
||||
### Model Context Protocol (MCP)
|
||||
|
||||
Codex can access MCP servers. To configure them, refer to the [config docs](./docs/config.md#mcp_servers).
|
||||
|
||||
### Configuration
|
||||
|
||||
Codex CLI supports a rich set of configuration options, with preferences stored in `~/.codex/config.toml`. For full configuration options, see [Configuration](./docs/config.md).
|
||||
|
||||
### Execpolicy
|
||||
|
||||
See the [Execpolicy quickstart](./docs/execpolicy.md) to set up rules that govern what commands Codex can execute.
|
||||
|
||||
### Docs & FAQ
|
||||
|
||||
- [**Getting started**](./docs/getting-started.md)
|
||||
- [CLI usage](./docs/getting-started.md#cli-usage)
|
||||
- [Slash Commands](./docs/slash_commands.md)
|
||||
- [Running with a prompt as input](./docs/getting-started.md#running-with-a-prompt-as-input)
|
||||
- [Example prompts](./docs/getting-started.md#example-prompts)
|
||||
- [Custom prompts](./docs/prompts.md)
|
||||
- [Memory with AGENTS.md](./docs/getting-started.md#memory-with-agentsmd)
|
||||
- [**Configuration**](./docs/config.md)
|
||||
- [Example config](./docs/example-config.md)
|
||||
- [**Sandbox & approvals**](./docs/sandbox.md)
|
||||
- [**Execpolicy quickstart**](./docs/execpolicy.md)
|
||||
- [**Authentication**](./docs/authentication.md)
|
||||
- [Auth methods](./docs/authentication.md#forcing-a-specific-auth-method-advanced)
|
||||
- [Login on a "Headless" machine](./docs/authentication.md#connecting-on-a-headless-machine)
|
||||
- **Automating Codex**
|
||||
- [GitHub Action](https://github.com/openai/codex-action)
|
||||
- [TypeScript SDK](./sdk/typescript/README.md)
|
||||
- [Non-interactive mode (`codex exec`)](./docs/exec.md)
|
||||
- [**Advanced**](./docs/advanced.md)
|
||||
- [Tracing / verbose logging](./docs/advanced.md#tracing--verbose-logging)
|
||||
- [Model Context Protocol (MCP)](./docs/advanced.md#model-context-protocol-mcp)
|
||||
- [**Zero data retention (ZDR)**](./docs/zdr.md)
|
||||
- [**Contributing**](./docs/contributing.md)
|
||||
- [**Install & build**](./docs/install.md)
|
||||
- [System Requirements](./docs/install.md#system-requirements)
|
||||
- [DotSlash](./docs/install.md#dotslash)
|
||||
- [Build from source](./docs/install.md#build-from-source)
|
||||
- [**FAQ**](./docs/faq.md)
|
||||
- [**Open source fund**](./docs/open-source-fund.md)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
header = """
|
||||
# Changelog
|
||||
|
||||
You can install any of these versions: `npm install -g codex@version`
|
||||
You can install any of these versions: `npm install -g @openai/codex@<version>`
|
||||
"""
|
||||
|
||||
body = """
|
||||
|
||||
8
codex-cli/.gitignore
vendored
8
codex-cli/.gitignore
vendored
@@ -1,7 +1 @@
|
||||
# Added by ./scripts/install_native_deps.sh
|
||||
/bin/codex-aarch64-apple-darwin
|
||||
/bin/codex-aarch64-unknown-linux-musl
|
||||
/bin/codex-linux-sandbox-arm64
|
||||
/bin/codex-linux-sandbox-x64
|
||||
/bin/codex-x86_64-apple-darwin
|
||||
/bin/codex-x86_64-unknown-linux-musl
|
||||
/vendor/
|
||||
|
||||
@@ -208,7 +208,7 @@ The hardening mechanism Codex uses depends on your OS:
|
||||
| Requirement | Details |
|
||||
| --------------------------- | --------------------------------------------------------------- |
|
||||
| Operating systems | macOS 12+, Ubuntu 20.04+/Debian 10+, or Windows 11 **via WSL2** |
|
||||
| Node.js | **22 or newer** (LTS recommended) |
|
||||
| Node.js | **16 or newer** (Node 20 LTS recommended) |
|
||||
| Git (optional, recommended) | 2.23+ for built-in PR helpers |
|
||||
| RAM | 4-GB minimum (8-GB recommended) |
|
||||
|
||||
@@ -513,7 +513,7 @@ Codex runs model-generated commands in a sandbox. If a proposed command or file
|
||||
<details>
|
||||
<summary>Does it work on Windows?</summary>
|
||||
|
||||
Not directly. It requires [Windows Subsystem for Linux (WSL2)](https://learn.microsoft.com/en-us/windows/wsl/install) - Codex has been tested on macOS and Linux with Node 22.
|
||||
Not directly. It requires [Windows Subsystem for Linux (WSL2)](https://learn.microsoft.com/en-us/windows/wsl/install) - Codex is regularly tested on macOS and Linux with Node 20+, and also supports Node 16.
|
||||
|
||||
</details>
|
||||
|
||||
|
||||
71
codex-cli/bin/codex.js
Executable file → Normal file
71
codex-cli/bin/codex.js
Executable file → Normal file
@@ -1,6 +1,8 @@
|
||||
#!/usr/bin/env node
|
||||
// Unified entry point for the Codex CLI.
|
||||
|
||||
import { spawn } from "node:child_process";
|
||||
import { existsSync } from "fs";
|
||||
import path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
@@ -40,10 +42,11 @@ switch (platform) {
|
||||
case "win32":
|
||||
switch (arch) {
|
||||
case "x64":
|
||||
targetTriple = "x86_64-pc-windows-msvc.exe";
|
||||
targetTriple = "x86_64-pc-windows-msvc";
|
||||
break;
|
||||
case "arm64":
|
||||
// We do not build this today, fall through...
|
||||
targetTriple = "aarch64-pc-windows-msvc";
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
@@ -56,31 +59,16 @@ if (!targetTriple) {
|
||||
throw new Error(`Unsupported platform: ${platform} (${arch})`);
|
||||
}
|
||||
|
||||
const binaryPath = path.join(__dirname, "..", "bin", `codex-${targetTriple}`);
|
||||
const vendorRoot = path.join(__dirname, "..", "vendor");
|
||||
const archRoot = path.join(vendorRoot, targetTriple);
|
||||
const codexBinaryName = process.platform === "win32" ? "codex.exe" : "codex";
|
||||
const binaryPath = path.join(archRoot, "codex", codexBinaryName);
|
||||
|
||||
// Use an asynchronous spawn instead of spawnSync so that Node is able to
|
||||
// respond to signals (e.g. Ctrl-C / SIGINT) while the native binary is
|
||||
// executing. This allows us to forward those signals to the child process
|
||||
// and guarantees that when either the child terminates or the parent
|
||||
// receives a fatal signal, both processes exit in a predictable manner.
|
||||
const { spawn } = await import("child_process");
|
||||
|
||||
async function tryImport(moduleName) {
|
||||
try {
|
||||
// eslint-disable-next-line node/no-unsupported-features/es-syntax
|
||||
return await import(moduleName);
|
||||
} catch (err) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async function resolveRgDir() {
|
||||
const ripgrep = await tryImport("@vscode/ripgrep");
|
||||
if (!ripgrep?.rgPath) {
|
||||
return null;
|
||||
}
|
||||
return path.dirname(ripgrep.rgPath);
|
||||
}
|
||||
|
||||
function getUpdatedPath(newDirs) {
|
||||
const pathSep = process.platform === "win32" ? ";" : ":";
|
||||
@@ -92,16 +80,49 @@ function getUpdatedPath(newDirs) {
|
||||
return updatedPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Use heuristics to detect the package manager that was used to install Codex
|
||||
* in order to give the user a hint about how to update it.
|
||||
*/
|
||||
function detectPackageManager() {
|
||||
const userAgent = process.env.npm_config_user_agent || "";
|
||||
if (/\bbun\//.test(userAgent)) {
|
||||
return "bun";
|
||||
}
|
||||
|
||||
const execPath = process.env.npm_execpath || "";
|
||||
if (execPath.includes("bun")) {
|
||||
return "bun";
|
||||
}
|
||||
|
||||
|
||||
if (
|
||||
__dirname.includes(".bun/install/global") ||
|
||||
__dirname.includes(".bun\\install\\global")
|
||||
) {
|
||||
return "bun";
|
||||
}
|
||||
|
||||
return userAgent ? "npm" : null;
|
||||
}
|
||||
|
||||
const additionalDirs = [];
|
||||
const rgDir = await resolveRgDir();
|
||||
if (rgDir) {
|
||||
additionalDirs.push(rgDir);
|
||||
const pathDir = path.join(archRoot, "path");
|
||||
if (existsSync(pathDir)) {
|
||||
additionalDirs.push(pathDir);
|
||||
}
|
||||
const updatedPath = getUpdatedPath(additionalDirs);
|
||||
|
||||
const env = { ...process.env, PATH: updatedPath };
|
||||
const packageManagerEnvVar =
|
||||
detectPackageManager() === "bun"
|
||||
? "CODEX_MANAGED_BY_BUN"
|
||||
: "CODEX_MANAGED_BY_NPM";
|
||||
env[packageManagerEnvVar] = "1";
|
||||
|
||||
const child = spawn(binaryPath, process.argv.slice(2), {
|
||||
stdio: "inherit",
|
||||
env: { ...process.env, PATH: updatedPath, CODEX_MANAGED_BY_NPM: "1" },
|
||||
env,
|
||||
});
|
||||
|
||||
child.on("error", (err) => {
|
||||
|
||||
79
codex-cli/bin/rg
Executable file
79
codex-cli/bin/rg
Executable file
@@ -0,0 +1,79 @@
|
||||
#!/usr/bin/env dotslash
|
||||
|
||||
{
|
||||
"name": "rg",
|
||||
"platforms": {
|
||||
"macos-aarch64": {
|
||||
"size": 1787248,
|
||||
"hash": "blake3",
|
||||
"digest": "8d9942032585ea8ee805937634238d9aee7b210069f4703c88fbe568e26fb78a",
|
||||
"format": "tar.gz",
|
||||
"path": "ripgrep-14.1.1-aarch64-apple-darwin/rg",
|
||||
"providers": [
|
||||
{
|
||||
"url": "https://github.com/BurntSushi/ripgrep/releases/download/14.1.1/ripgrep-14.1.1-aarch64-apple-darwin.tar.gz"
|
||||
}
|
||||
]
|
||||
},
|
||||
"linux-aarch64": {
|
||||
"size": 2047405,
|
||||
"hash": "blake3",
|
||||
"digest": "0b670b8fa0a3df2762af2fc82cc4932f684ca4c02dbd1260d4f3133fd4b2a515",
|
||||
"format": "tar.gz",
|
||||
"path": "ripgrep-14.1.1-aarch64-unknown-linux-gnu/rg",
|
||||
"providers": [
|
||||
{
|
||||
"url": "https://github.com/BurntSushi/ripgrep/releases/download/14.1.1/ripgrep-14.1.1-aarch64-unknown-linux-gnu.tar.gz"
|
||||
}
|
||||
]
|
||||
},
|
||||
"macos-x86_64": {
|
||||
"size": 2082672,
|
||||
"hash": "blake3",
|
||||
"digest": "e9b862fc8da3127f92791f0ff6a799504154ca9d36c98bf3e60a81c6b1f7289e",
|
||||
"format": "tar.gz",
|
||||
"path": "ripgrep-14.1.1-x86_64-apple-darwin/rg",
|
||||
"providers": [
|
||||
{
|
||||
"url": "https://github.com/BurntSushi/ripgrep/releases/download/14.1.1/ripgrep-14.1.1-x86_64-apple-darwin.tar.gz"
|
||||
}
|
||||
]
|
||||
},
|
||||
"linux-x86_64": {
|
||||
"size": 2566310,
|
||||
"hash": "blake3",
|
||||
"digest": "f73cca4e54d78c31f832c7f6e2c0b4db8b04fa3eaa747915727d570893dbee76",
|
||||
"format": "tar.gz",
|
||||
"path": "ripgrep-14.1.1-x86_64-unknown-linux-musl/rg",
|
||||
"providers": [
|
||||
{
|
||||
"url": "https://github.com/BurntSushi/ripgrep/releases/download/14.1.1/ripgrep-14.1.1-x86_64-unknown-linux-musl.tar.gz"
|
||||
}
|
||||
]
|
||||
},
|
||||
"windows-x86_64": {
|
||||
"size": 2058893,
|
||||
"hash": "blake3",
|
||||
"digest": "a8ce1a6fed4f8093ee997e57f33254e94b2cd18e26358b09db599c89882eadbd",
|
||||
"format": "zip",
|
||||
"path": "ripgrep-14.1.1-x86_64-pc-windows-msvc/rg.exe",
|
||||
"providers": [
|
||||
{
|
||||
"url": "https://github.com/BurntSushi/ripgrep/releases/download/14.1.1/ripgrep-14.1.1-x86_64-pc-windows-msvc.zip"
|
||||
}
|
||||
]
|
||||
},
|
||||
"windows-aarch64": {
|
||||
"size": 1667740,
|
||||
"hash": "blake3",
|
||||
"digest": "47b971a8c4fca1d23a4e7c19bd4d88465ebc395598458133139406d3bf85f3fa",
|
||||
"format": "zip",
|
||||
"path": "rg.exe",
|
||||
"providers": [
|
||||
{
|
||||
"url": "https://github.com/microsoft/ripgrep-prebuilt/releases/download/v13.0.0-13/ripgrep-v13.0.0-13-aarch64-pc-windows-msvc.zip"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
103
codex-cli/package-lock.json
generated
103
codex-cli/package-lock.json
generated
@@ -2,117 +2,16 @@
|
||||
"name": "@openai/codex",
|
||||
"version": "0.0.0-dev",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@openai/codex",
|
||||
"version": "0.0.0-dev",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@vscode/ripgrep": "^1.15.14"
|
||||
},
|
||||
"bin": {
|
||||
"codex": "bin/codex.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=20"
|
||||
}
|
||||
},
|
||||
"node_modules/@vscode/ripgrep": {
|
||||
"version": "1.15.14",
|
||||
"resolved": "https://registry.npmjs.org/@vscode/ripgrep/-/ripgrep-1.15.14.tgz",
|
||||
"integrity": "sha512-/G1UJPYlm+trBWQ6cMO3sv6b8D1+G16WaJH1/DSqw32JOVlzgZbLkDxRyzIpTpv30AcYGMkCf5tUqGlW6HbDWw==",
|
||||
"hasInstallScript": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"https-proxy-agent": "^7.0.2",
|
||||
"proxy-from-env": "^1.1.0",
|
||||
"yauzl": "^2.9.2"
|
||||
}
|
||||
},
|
||||
"node_modules/agent-base": {
|
||||
"version": "7.1.4",
|
||||
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.4.tgz",
|
||||
"integrity": "sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 14"
|
||||
}
|
||||
},
|
||||
"node_modules/buffer-crc32": {
|
||||
"version": "0.2.13",
|
||||
"resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz",
|
||||
"integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/debug": {
|
||||
"version": "4.4.1",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz",
|
||||
"integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"ms": "^2.1.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"supports-color": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/fd-slicer": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz",
|
||||
"integrity": "sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"pend": "~1.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/https-proxy-agent": {
|
||||
"version": "7.0.6",
|
||||
"resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz",
|
||||
"integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"agent-base": "^7.1.2",
|
||||
"debug": "4"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 14"
|
||||
}
|
||||
},
|
||||
"node_modules/ms": {
|
||||
"version": "2.1.3",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
|
||||
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/pend": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz",
|
||||
"integrity": "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/proxy-from-env": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
|
||||
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/yauzl": {
|
||||
"version": "2.10.0",
|
||||
"resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz",
|
||||
"integrity": "sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"buffer-crc32": "~0.2.3",
|
||||
"fd-slicer": "~1.1.0"
|
||||
"node": ">=16"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,20 +7,15 @@
|
||||
},
|
||||
"type": "module",
|
||||
"engines": {
|
||||
"node": ">=20"
|
||||
"node": ">=16"
|
||||
},
|
||||
"files": [
|
||||
"bin",
|
||||
"dist"
|
||||
"vendor"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/openai/codex.git"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vscode/ripgrep": "^1.15.14"
|
||||
},
|
||||
"devDependencies": {
|
||||
"prettier": "^3.3.3"
|
||||
"url": "git+https://github.com/openai/codex.git",
|
||||
"directory": "codex-cli"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,19 @@
|
||||
# npm releases
|
||||
|
||||
Run the following:
|
||||
|
||||
To build the 0.2.x or later version of the npm module, which runs the Rust version of the CLI, build it as follows:
|
||||
Use the staging helper in the repo root to generate npm tarballs for a release. For
|
||||
example, to stage the CLI, responses proxy, and SDK packages for version `0.6.0`:
|
||||
|
||||
```bash
|
||||
./codex-cli/scripts/stage_rust_release.py --release-version 0.6.0
|
||||
./scripts/stage_npm_packages.py \
|
||||
--release-version 0.6.0 \
|
||||
--package codex \
|
||||
--package codex-responses-api-proxy \
|
||||
--package codex-sdk
|
||||
```
|
||||
|
||||
This downloads the native artifacts once, hydrates `vendor/` for each package, and writes
|
||||
tarballs to `dist/npm/`.
|
||||
|
||||
If you need to invoke `build_npm_package.py` directly, run
|
||||
`codex-cli/scripts/install_native_deps.py` first and pass `--vendor-src` pointing to the
|
||||
directory that contains the populated `vendor/` tree.
|
||||
|
||||
321
codex-cli/scripts/build_npm_package.py
Executable file
321
codex-cli/scripts/build_npm_package.py
Executable file
@@ -0,0 +1,321 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Stage and optionally package the @openai/codex npm module."""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
SCRIPT_DIR = Path(__file__).resolve().parent
|
||||
CODEX_CLI_ROOT = SCRIPT_DIR.parent
|
||||
REPO_ROOT = CODEX_CLI_ROOT.parent
|
||||
RESPONSES_API_PROXY_NPM_ROOT = REPO_ROOT / "codex-rs" / "responses-api-proxy" / "npm"
|
||||
CODEX_SDK_ROOT = REPO_ROOT / "sdk" / "typescript"
|
||||
|
||||
PACKAGE_NATIVE_COMPONENTS: dict[str, list[str]] = {
|
||||
"codex": ["codex", "rg"],
|
||||
"codex-responses-api-proxy": ["codex-responses-api-proxy"],
|
||||
"codex-sdk": ["codex"],
|
||||
}
|
||||
WINDOWS_ONLY_COMPONENTS: dict[str, list[str]] = {
|
||||
"codex": ["codex-windows-sandbox-setup", "codex-command-runner"],
|
||||
}
|
||||
COMPONENT_DEST_DIR: dict[str, str] = {
|
||||
"codex": "codex",
|
||||
"codex-responses-api-proxy": "codex-responses-api-proxy",
|
||||
"codex-windows-sandbox-setup": "codex",
|
||||
"codex-command-runner": "codex",
|
||||
"rg": "path",
|
||||
}
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description="Build or stage the Codex CLI npm package.")
|
||||
parser.add_argument(
|
||||
"--package",
|
||||
choices=("codex", "codex-responses-api-proxy", "codex-sdk"),
|
||||
default="codex",
|
||||
help="Which npm package to stage (default: codex).",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--version",
|
||||
help="Version number to write to package.json inside the staged package.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--release-version",
|
||||
help=(
|
||||
"Version to stage for npm release."
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--staging-dir",
|
||||
type=Path,
|
||||
help=(
|
||||
"Directory to stage the package contents. Defaults to a new temporary directory "
|
||||
"if omitted. The directory must be empty when provided."
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--tmp",
|
||||
dest="staging_dir",
|
||||
type=Path,
|
||||
help=argparse.SUPPRESS,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--pack-output",
|
||||
type=Path,
|
||||
help="Path where the generated npm tarball should be written.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--vendor-src",
|
||||
type=Path,
|
||||
help="Directory containing pre-installed native binaries to bundle (vendor root).",
|
||||
)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def main() -> int:
|
||||
args = parse_args()
|
||||
|
||||
package = args.package
|
||||
version = args.version
|
||||
release_version = args.release_version
|
||||
if release_version:
|
||||
if version and version != release_version:
|
||||
raise RuntimeError("--version and --release-version must match when both are provided.")
|
||||
version = release_version
|
||||
|
||||
if not version:
|
||||
raise RuntimeError("Must specify --version or --release-version.")
|
||||
|
||||
staging_dir, created_temp = prepare_staging_dir(args.staging_dir)
|
||||
|
||||
try:
|
||||
stage_sources(staging_dir, version, package)
|
||||
|
||||
vendor_src = args.vendor_src.resolve() if args.vendor_src else None
|
||||
native_components = PACKAGE_NATIVE_COMPONENTS.get(package, [])
|
||||
|
||||
if native_components:
|
||||
if vendor_src is None:
|
||||
components_str = ", ".join(native_components)
|
||||
raise RuntimeError(
|
||||
"Native components "
|
||||
f"({components_str}) required for package '{package}'. Provide --vendor-src "
|
||||
"pointing to a directory containing pre-installed binaries."
|
||||
)
|
||||
|
||||
copy_native_binaries(vendor_src, staging_dir, package, native_components)
|
||||
|
||||
if release_version:
|
||||
staging_dir_str = str(staging_dir)
|
||||
if package == "codex":
|
||||
print(
|
||||
f"Staged version {version} for release in {staging_dir_str}\n\n"
|
||||
"Verify the CLI:\n"
|
||||
f" node {staging_dir_str}/bin/codex.js --version\n"
|
||||
f" node {staging_dir_str}/bin/codex.js --help\n\n"
|
||||
)
|
||||
elif package == "codex-responses-api-proxy":
|
||||
print(
|
||||
f"Staged version {version} for release in {staging_dir_str}\n\n"
|
||||
"Verify the responses API proxy:\n"
|
||||
f" node {staging_dir_str}/bin/codex-responses-api-proxy.js --help\n\n"
|
||||
)
|
||||
else:
|
||||
print(
|
||||
f"Staged version {version} for release in {staging_dir_str}\n\n"
|
||||
"Verify the SDK contents:\n"
|
||||
f" ls {staging_dir_str}/dist\n"
|
||||
f" ls {staging_dir_str}/vendor\n"
|
||||
" node -e \"import('./dist/index.js').then(() => console.log('ok'))\"\n\n"
|
||||
)
|
||||
else:
|
||||
print(f"Staged package in {staging_dir}")
|
||||
|
||||
if args.pack_output is not None:
|
||||
output_path = run_npm_pack(staging_dir, args.pack_output)
|
||||
print(f"npm pack output written to {output_path}")
|
||||
finally:
|
||||
if created_temp:
|
||||
# Preserve the staging directory for further inspection.
|
||||
pass
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def prepare_staging_dir(staging_dir: Path | None) -> tuple[Path, bool]:
|
||||
if staging_dir is not None:
|
||||
staging_dir = staging_dir.resolve()
|
||||
staging_dir.mkdir(parents=True, exist_ok=True)
|
||||
if any(staging_dir.iterdir()):
|
||||
raise RuntimeError(f"Staging directory {staging_dir} is not empty.")
|
||||
return staging_dir, False
|
||||
|
||||
temp_dir = Path(tempfile.mkdtemp(prefix="codex-npm-stage-"))
|
||||
return temp_dir, True
|
||||
|
||||
|
||||
def stage_sources(staging_dir: Path, version: str, package: str) -> None:
|
||||
if package == "codex":
|
||||
bin_dir = staging_dir / "bin"
|
||||
bin_dir.mkdir(parents=True, exist_ok=True)
|
||||
shutil.copy2(CODEX_CLI_ROOT / "bin" / "codex.js", bin_dir / "codex.js")
|
||||
rg_manifest = CODEX_CLI_ROOT / "bin" / "rg"
|
||||
if rg_manifest.exists():
|
||||
shutil.copy2(rg_manifest, bin_dir / "rg")
|
||||
|
||||
readme_src = REPO_ROOT / "README.md"
|
||||
if readme_src.exists():
|
||||
shutil.copy2(readme_src, staging_dir / "README.md")
|
||||
|
||||
package_json_path = CODEX_CLI_ROOT / "package.json"
|
||||
elif package == "codex-responses-api-proxy":
|
||||
bin_dir = staging_dir / "bin"
|
||||
bin_dir.mkdir(parents=True, exist_ok=True)
|
||||
launcher_src = RESPONSES_API_PROXY_NPM_ROOT / "bin" / "codex-responses-api-proxy.js"
|
||||
shutil.copy2(launcher_src, bin_dir / "codex-responses-api-proxy.js")
|
||||
|
||||
readme_src = RESPONSES_API_PROXY_NPM_ROOT / "README.md"
|
||||
if readme_src.exists():
|
||||
shutil.copy2(readme_src, staging_dir / "README.md")
|
||||
|
||||
package_json_path = RESPONSES_API_PROXY_NPM_ROOT / "package.json"
|
||||
elif package == "codex-sdk":
|
||||
package_json_path = CODEX_SDK_ROOT / "package.json"
|
||||
stage_codex_sdk_sources(staging_dir)
|
||||
else:
|
||||
raise RuntimeError(f"Unknown package '{package}'.")
|
||||
|
||||
with open(package_json_path, "r", encoding="utf-8") as fh:
|
||||
package_json = json.load(fh)
|
||||
package_json["version"] = version
|
||||
|
||||
if package == "codex-sdk":
|
||||
scripts = package_json.get("scripts")
|
||||
if isinstance(scripts, dict):
|
||||
scripts.pop("prepare", None)
|
||||
|
||||
files = package_json.get("files")
|
||||
if isinstance(files, list):
|
||||
if "vendor" not in files:
|
||||
files.append("vendor")
|
||||
else:
|
||||
package_json["files"] = ["dist", "vendor"]
|
||||
|
||||
with open(staging_dir / "package.json", "w", encoding="utf-8") as out:
|
||||
json.dump(package_json, out, indent=2)
|
||||
out.write("\n")
|
||||
|
||||
|
||||
def run_command(cmd: list[str], cwd: Path | None = None) -> None:
|
||||
print("+", " ".join(cmd))
|
||||
subprocess.run(cmd, cwd=cwd, check=True)
|
||||
|
||||
|
||||
def stage_codex_sdk_sources(staging_dir: Path) -> None:
|
||||
package_root = CODEX_SDK_ROOT
|
||||
|
||||
run_command(["pnpm", "install", "--frozen-lockfile"], cwd=package_root)
|
||||
run_command(["pnpm", "run", "build"], cwd=package_root)
|
||||
|
||||
dist_src = package_root / "dist"
|
||||
if not dist_src.exists():
|
||||
raise RuntimeError("codex-sdk build did not produce a dist directory.")
|
||||
|
||||
shutil.copytree(dist_src, staging_dir / "dist")
|
||||
|
||||
readme_src = package_root / "README.md"
|
||||
if readme_src.exists():
|
||||
shutil.copy2(readme_src, staging_dir / "README.md")
|
||||
|
||||
license_src = REPO_ROOT / "LICENSE"
|
||||
if license_src.exists():
|
||||
shutil.copy2(license_src, staging_dir / "LICENSE")
|
||||
|
||||
|
||||
def copy_native_binaries(
|
||||
vendor_src: Path,
|
||||
staging_dir: Path,
|
||||
package: str,
|
||||
components: list[str],
|
||||
) -> None:
|
||||
vendor_src = vendor_src.resolve()
|
||||
if not vendor_src.exists():
|
||||
raise RuntimeError(f"Vendor source directory not found: {vendor_src}")
|
||||
|
||||
components_set = {component for component in components if component in COMPONENT_DEST_DIR}
|
||||
if not components_set:
|
||||
return
|
||||
|
||||
vendor_dest = staging_dir / "vendor"
|
||||
if vendor_dest.exists():
|
||||
shutil.rmtree(vendor_dest)
|
||||
vendor_dest.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
for target_dir in vendor_src.iterdir():
|
||||
if not target_dir.is_dir():
|
||||
continue
|
||||
|
||||
if "windows" in target_dir.name:
|
||||
components_set.update(WINDOWS_ONLY_COMPONENTS.get(package, []))
|
||||
|
||||
dest_target_dir = vendor_dest / target_dir.name
|
||||
dest_target_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
for component in components_set:
|
||||
dest_dir_name = COMPONENT_DEST_DIR.get(component)
|
||||
if dest_dir_name is None:
|
||||
continue
|
||||
|
||||
src_component_dir = target_dir / dest_dir_name
|
||||
if not src_component_dir.exists():
|
||||
raise RuntimeError(
|
||||
f"Missing native component '{component}' in vendor source: {src_component_dir}"
|
||||
)
|
||||
|
||||
dest_component_dir = dest_target_dir / dest_dir_name
|
||||
if dest_component_dir.exists():
|
||||
shutil.rmtree(dest_component_dir)
|
||||
shutil.copytree(src_component_dir, dest_component_dir)
|
||||
|
||||
|
||||
def run_npm_pack(staging_dir: Path, output_path: Path) -> Path:
|
||||
output_path = output_path.resolve()
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
with tempfile.TemporaryDirectory(prefix="codex-npm-pack-") as pack_dir_str:
|
||||
pack_dir = Path(pack_dir_str)
|
||||
stdout = subprocess.check_output(
|
||||
["npm", "pack", "--json", "--pack-destination", str(pack_dir)],
|
||||
cwd=staging_dir,
|
||||
text=True,
|
||||
)
|
||||
try:
|
||||
pack_output = json.loads(stdout)
|
||||
except json.JSONDecodeError as exc:
|
||||
raise RuntimeError("Failed to parse npm pack output.") from exc
|
||||
|
||||
if not pack_output:
|
||||
raise RuntimeError("npm pack did not produce an output tarball.")
|
||||
|
||||
tarball_name = pack_output[0].get("filename") or pack_output[0].get("name")
|
||||
if not tarball_name:
|
||||
raise RuntimeError("Unable to determine npm pack output filename.")
|
||||
|
||||
tarball_path = pack_dir / tarball_name
|
||||
if not tarball_path.exists():
|
||||
raise RuntimeError(f"Expected npm pack output not found: {tarball_path}")
|
||||
|
||||
shutil.move(str(tarball_path), output_path)
|
||||
|
||||
return output_path
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
|
||||
sys.exit(main())
|
||||
399
codex-cli/scripts/install_native_deps.py
Executable file
399
codex-cli/scripts/install_native_deps.py
Executable file
@@ -0,0 +1,399 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Install Codex native binaries (Rust CLI plus ripgrep helpers)."""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import tarfile
|
||||
import tempfile
|
||||
import zipfile
|
||||
from dataclasses import dataclass
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
from pathlib import Path
|
||||
from typing import Iterable, Sequence
|
||||
from urllib.parse import urlparse
|
||||
from urllib.request import urlopen
|
||||
|
||||
SCRIPT_DIR = Path(__file__).resolve().parent
|
||||
CODEX_CLI_ROOT = SCRIPT_DIR.parent
|
||||
DEFAULT_WORKFLOW_URL = "https://github.com/openai/codex/actions/runs/17952349351" # rust-v0.40.0
|
||||
VENDOR_DIR_NAME = "vendor"
|
||||
RG_MANIFEST = CODEX_CLI_ROOT / "bin" / "rg"
|
||||
BINARY_TARGETS = (
|
||||
"x86_64-unknown-linux-musl",
|
||||
"aarch64-unknown-linux-musl",
|
||||
"x86_64-apple-darwin",
|
||||
"aarch64-apple-darwin",
|
||||
"x86_64-pc-windows-msvc",
|
||||
"aarch64-pc-windows-msvc",
|
||||
)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class BinaryComponent:
|
||||
artifact_prefix: str # matches the artifact filename prefix (e.g. codex-<target>.zst)
|
||||
dest_dir: str # directory under vendor/<target>/ where the binary is installed
|
||||
binary_basename: str # executable name inside dest_dir (before optional .exe)
|
||||
targets: tuple[str, ...] | None = None # limit installation to specific targets
|
||||
|
||||
|
||||
WINDOWS_TARGETS = tuple(target for target in BINARY_TARGETS if "windows" in target)
|
||||
|
||||
BINARY_COMPONENTS = {
|
||||
"codex": BinaryComponent(
|
||||
artifact_prefix="codex",
|
||||
dest_dir="codex",
|
||||
binary_basename="codex",
|
||||
),
|
||||
"codex-responses-api-proxy": BinaryComponent(
|
||||
artifact_prefix="codex-responses-api-proxy",
|
||||
dest_dir="codex-responses-api-proxy",
|
||||
binary_basename="codex-responses-api-proxy",
|
||||
),
|
||||
"codex-windows-sandbox-setup": BinaryComponent(
|
||||
artifact_prefix="codex-windows-sandbox-setup",
|
||||
dest_dir="codex",
|
||||
binary_basename="codex-windows-sandbox-setup",
|
||||
targets=WINDOWS_TARGETS,
|
||||
),
|
||||
"codex-command-runner": BinaryComponent(
|
||||
artifact_prefix="codex-command-runner",
|
||||
dest_dir="codex",
|
||||
binary_basename="codex-command-runner",
|
||||
targets=WINDOWS_TARGETS,
|
||||
),
|
||||
}
|
||||
|
||||
RG_TARGET_PLATFORM_PAIRS: list[tuple[str, str]] = [
|
||||
("x86_64-unknown-linux-musl", "linux-x86_64"),
|
||||
("aarch64-unknown-linux-musl", "linux-aarch64"),
|
||||
("x86_64-apple-darwin", "macos-x86_64"),
|
||||
("aarch64-apple-darwin", "macos-aarch64"),
|
||||
("x86_64-pc-windows-msvc", "windows-x86_64"),
|
||||
("aarch64-pc-windows-msvc", "windows-aarch64"),
|
||||
]
|
||||
RG_TARGET_TO_PLATFORM = {target: platform for target, platform in RG_TARGET_PLATFORM_PAIRS}
|
||||
DEFAULT_RG_TARGETS = [target for target, _ in RG_TARGET_PLATFORM_PAIRS]
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description="Install native Codex binaries.")
|
||||
parser.add_argument(
|
||||
"--workflow-url",
|
||||
help=(
|
||||
"GitHub Actions workflow URL that produced the artifacts. Defaults to a "
|
||||
"known good run when omitted."
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--component",
|
||||
dest="components",
|
||||
action="append",
|
||||
choices=tuple(list(BINARY_COMPONENTS) + ["rg"]),
|
||||
help=(
|
||||
"Limit installation to the specified components."
|
||||
" May be repeated. Defaults to codex, codex-windows-sandbox-setup,"
|
||||
" codex-command-runner, and rg."
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"root",
|
||||
nargs="?",
|
||||
type=Path,
|
||||
help=(
|
||||
"Directory containing package.json for the staged package. If omitted, the "
|
||||
"repository checkout is used."
|
||||
),
|
||||
)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def main() -> int:
|
||||
args = parse_args()
|
||||
|
||||
codex_cli_root = (args.root or CODEX_CLI_ROOT).resolve()
|
||||
vendor_dir = codex_cli_root / VENDOR_DIR_NAME
|
||||
vendor_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
components = args.components or [
|
||||
"codex",
|
||||
"codex-windows-sandbox-setup",
|
||||
"codex-command-runner",
|
||||
"rg",
|
||||
]
|
||||
|
||||
workflow_url = (args.workflow_url or DEFAULT_WORKFLOW_URL).strip()
|
||||
if not workflow_url:
|
||||
workflow_url = DEFAULT_WORKFLOW_URL
|
||||
|
||||
workflow_id = workflow_url.rstrip("/").split("/")[-1]
|
||||
print(f"Downloading native artifacts from workflow {workflow_id}...")
|
||||
|
||||
with tempfile.TemporaryDirectory(prefix="codex-native-artifacts-") as artifacts_dir_str:
|
||||
artifacts_dir = Path(artifacts_dir_str)
|
||||
_download_artifacts(workflow_id, artifacts_dir)
|
||||
install_binary_components(
|
||||
artifacts_dir,
|
||||
vendor_dir,
|
||||
[BINARY_COMPONENTS[name] for name in components if name in BINARY_COMPONENTS],
|
||||
)
|
||||
|
||||
if "rg" in components:
|
||||
print("Fetching ripgrep binaries...")
|
||||
fetch_rg(vendor_dir, DEFAULT_RG_TARGETS, manifest_path=RG_MANIFEST)
|
||||
|
||||
print(f"Installed native dependencies into {vendor_dir}")
|
||||
return 0
|
||||
|
||||
|
||||
def fetch_rg(
|
||||
vendor_dir: Path,
|
||||
targets: Sequence[str] | None = None,
|
||||
*,
|
||||
manifest_path: Path,
|
||||
) -> list[Path]:
|
||||
"""Download ripgrep binaries described by the DotSlash manifest."""
|
||||
|
||||
if targets is None:
|
||||
targets = DEFAULT_RG_TARGETS
|
||||
|
||||
if not manifest_path.exists():
|
||||
raise FileNotFoundError(f"DotSlash manifest not found: {manifest_path}")
|
||||
|
||||
manifest = _load_manifest(manifest_path)
|
||||
platforms = manifest.get("platforms", {})
|
||||
|
||||
vendor_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
targets = list(targets)
|
||||
if not targets:
|
||||
return []
|
||||
|
||||
task_configs: list[tuple[str, str, dict]] = []
|
||||
for target in targets:
|
||||
platform_key = RG_TARGET_TO_PLATFORM.get(target)
|
||||
if platform_key is None:
|
||||
raise ValueError(f"Unsupported ripgrep target '{target}'.")
|
||||
|
||||
platform_info = platforms.get(platform_key)
|
||||
if platform_info is None:
|
||||
raise RuntimeError(f"Platform '{platform_key}' not found in manifest {manifest_path}.")
|
||||
|
||||
task_configs.append((target, platform_key, platform_info))
|
||||
|
||||
results: dict[str, Path] = {}
|
||||
max_workers = min(len(task_configs), max(1, (os.cpu_count() or 1)))
|
||||
|
||||
print("Installing ripgrep binaries for targets: " + ", ".join(targets))
|
||||
|
||||
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
||||
future_map = {
|
||||
executor.submit(
|
||||
_fetch_single_rg,
|
||||
vendor_dir,
|
||||
target,
|
||||
platform_key,
|
||||
platform_info,
|
||||
manifest_path,
|
||||
): target
|
||||
for target, platform_key, platform_info in task_configs
|
||||
}
|
||||
|
||||
for future in as_completed(future_map):
|
||||
target = future_map[future]
|
||||
results[target] = future.result()
|
||||
print(f" installed ripgrep for {target}")
|
||||
|
||||
return [results[target] for target in targets]
|
||||
|
||||
|
||||
def _download_artifacts(workflow_id: str, dest_dir: Path) -> None:
|
||||
cmd = [
|
||||
"gh",
|
||||
"run",
|
||||
"download",
|
||||
"--dir",
|
||||
str(dest_dir),
|
||||
"--repo",
|
||||
"openai/codex",
|
||||
workflow_id,
|
||||
]
|
||||
subprocess.check_call(cmd)
|
||||
|
||||
|
||||
def install_binary_components(
|
||||
artifacts_dir: Path,
|
||||
vendor_dir: Path,
|
||||
selected_components: Sequence[BinaryComponent],
|
||||
) -> None:
|
||||
if not selected_components:
|
||||
return
|
||||
|
||||
for component in selected_components:
|
||||
component_targets = list(component.targets or BINARY_TARGETS)
|
||||
|
||||
print(
|
||||
f"Installing {component.binary_basename} binaries for targets: "
|
||||
+ ", ".join(component_targets)
|
||||
)
|
||||
max_workers = min(len(component_targets), max(1, (os.cpu_count() or 1)))
|
||||
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
||||
futures = {
|
||||
executor.submit(
|
||||
_install_single_binary,
|
||||
artifacts_dir,
|
||||
vendor_dir,
|
||||
target,
|
||||
component,
|
||||
): target
|
||||
for target in component_targets
|
||||
}
|
||||
for future in as_completed(futures):
|
||||
installed_path = future.result()
|
||||
print(f" installed {installed_path}")
|
||||
|
||||
|
||||
def _install_single_binary(
|
||||
artifacts_dir: Path,
|
||||
vendor_dir: Path,
|
||||
target: str,
|
||||
component: BinaryComponent,
|
||||
) -> Path:
|
||||
artifact_subdir = artifacts_dir / target
|
||||
archive_name = _archive_name_for_target(component.artifact_prefix, target)
|
||||
archive_path = artifact_subdir / archive_name
|
||||
if not archive_path.exists():
|
||||
raise FileNotFoundError(f"Expected artifact not found: {archive_path}")
|
||||
|
||||
dest_dir = vendor_dir / target / component.dest_dir
|
||||
dest_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
binary_name = (
|
||||
f"{component.binary_basename}.exe" if "windows" in target else component.binary_basename
|
||||
)
|
||||
dest = dest_dir / binary_name
|
||||
dest.unlink(missing_ok=True)
|
||||
extract_archive(archive_path, "zst", None, dest)
|
||||
if "windows" not in target:
|
||||
dest.chmod(0o755)
|
||||
return dest
|
||||
|
||||
|
||||
def _archive_name_for_target(artifact_prefix: str, target: str) -> str:
|
||||
if "windows" in target:
|
||||
return f"{artifact_prefix}-{target}.exe.zst"
|
||||
return f"{artifact_prefix}-{target}.zst"
|
||||
|
||||
|
||||
def _fetch_single_rg(
|
||||
vendor_dir: Path,
|
||||
target: str,
|
||||
platform_key: str,
|
||||
platform_info: dict,
|
||||
manifest_path: Path,
|
||||
) -> Path:
|
||||
providers = platform_info.get("providers", [])
|
||||
if not providers:
|
||||
raise RuntimeError(f"No providers listed for platform '{platform_key}' in {manifest_path}.")
|
||||
|
||||
url = providers[0]["url"]
|
||||
archive_format = platform_info.get("format", "zst")
|
||||
archive_member = platform_info.get("path")
|
||||
|
||||
dest_dir = vendor_dir / target / "path"
|
||||
dest_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
is_windows = platform_key.startswith("win")
|
||||
binary_name = "rg.exe" if is_windows else "rg"
|
||||
dest = dest_dir / binary_name
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmp_dir_str:
|
||||
tmp_dir = Path(tmp_dir_str)
|
||||
archive_filename = os.path.basename(urlparse(url).path)
|
||||
download_path = tmp_dir / archive_filename
|
||||
_download_file(url, download_path)
|
||||
|
||||
dest.unlink(missing_ok=True)
|
||||
extract_archive(download_path, archive_format, archive_member, dest)
|
||||
|
||||
if not is_windows:
|
||||
dest.chmod(0o755)
|
||||
|
||||
return dest
|
||||
|
||||
|
||||
def _download_file(url: str, dest: Path) -> None:
|
||||
dest.parent.mkdir(parents=True, exist_ok=True)
|
||||
with urlopen(url) as response, open(dest, "wb") as out:
|
||||
shutil.copyfileobj(response, out)
|
||||
|
||||
|
||||
def extract_archive(
|
||||
archive_path: Path,
|
||||
archive_format: str,
|
||||
archive_member: str | None,
|
||||
dest: Path,
|
||||
) -> None:
|
||||
dest.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if archive_format == "zst":
|
||||
output_path = archive_path.parent / dest.name
|
||||
subprocess.check_call(
|
||||
["zstd", "-f", "-d", str(archive_path), "-o", str(output_path)]
|
||||
)
|
||||
shutil.move(str(output_path), dest)
|
||||
return
|
||||
|
||||
if archive_format == "tar.gz":
|
||||
if not archive_member:
|
||||
raise RuntimeError("Missing 'path' for tar.gz archive in DotSlash manifest.")
|
||||
with tarfile.open(archive_path, "r:gz") as tar:
|
||||
try:
|
||||
member = tar.getmember(archive_member)
|
||||
except KeyError as exc:
|
||||
raise RuntimeError(
|
||||
f"Entry '{archive_member}' not found in archive {archive_path}."
|
||||
) from exc
|
||||
tar.extract(member, path=archive_path.parent, filter="data")
|
||||
extracted = archive_path.parent / archive_member
|
||||
shutil.move(str(extracted), dest)
|
||||
return
|
||||
|
||||
if archive_format == "zip":
|
||||
if not archive_member:
|
||||
raise RuntimeError("Missing 'path' for zip archive in DotSlash manifest.")
|
||||
with zipfile.ZipFile(archive_path) as archive:
|
||||
try:
|
||||
with archive.open(archive_member) as src, open(dest, "wb") as out:
|
||||
shutil.copyfileobj(src, out)
|
||||
except KeyError as exc:
|
||||
raise RuntimeError(
|
||||
f"Entry '{archive_member}' not found in archive {archive_path}."
|
||||
) from exc
|
||||
return
|
||||
|
||||
raise RuntimeError(f"Unsupported archive format '{archive_format}'.")
|
||||
|
||||
|
||||
def _load_manifest(manifest_path: Path) -> dict:
|
||||
cmd = ["dotslash", "--", "parse", str(manifest_path)]
|
||||
stdout = subprocess.check_output(cmd, text=True)
|
||||
try:
|
||||
manifest = json.loads(stdout)
|
||||
except json.JSONDecodeError as exc:
|
||||
raise RuntimeError(f"Invalid DotSlash manifest output from {manifest_path}.") from exc
|
||||
|
||||
if not isinstance(manifest, dict):
|
||||
raise RuntimeError(
|
||||
f"Unexpected DotSlash manifest structure for {manifest_path}: {type(manifest)!r}"
|
||||
)
|
||||
|
||||
return manifest
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
|
||||
sys.exit(main())
|
||||
@@ -1,91 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Install native runtime dependencies for codex-cli.
|
||||
#
|
||||
# Usage
|
||||
# install_native_deps.sh [--workflow-url URL] [CODEX_CLI_ROOT]
|
||||
#
|
||||
# The optional RELEASE_ROOT is the path that contains package.json. Omitting
|
||||
# it installs the binaries into the repository's own bin/ folder to support
|
||||
# local development.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# ------------------
|
||||
# Parse arguments
|
||||
# ------------------
|
||||
|
||||
CODEX_CLI_ROOT=""
|
||||
|
||||
# Until we start publishing stable GitHub releases, we have to grab the binaries
|
||||
# from the GitHub Action that created them. Update the URL below to point to the
|
||||
# appropriate workflow run:
|
||||
WORKFLOW_URL="https://github.com/openai/codex/actions/runs/16840150768" # rust-v0.20.0-alpha.2
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--workflow-url)
|
||||
shift || { echo "--workflow-url requires an argument"; exit 1; }
|
||||
if [ -n "$1" ]; then
|
||||
WORKFLOW_URL="$1"
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
if [[ -z "$CODEX_CLI_ROOT" ]]; then
|
||||
CODEX_CLI_ROOT="$1"
|
||||
else
|
||||
echo "Unexpected argument: $1" >&2
|
||||
exit 1
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Determine where the binaries should be installed.
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
if [ -n "$CODEX_CLI_ROOT" ]; then
|
||||
# The caller supplied a release root directory.
|
||||
BIN_DIR="$CODEX_CLI_ROOT/bin"
|
||||
else
|
||||
# No argument; fall back to the repo’s own bin directory.
|
||||
# Resolve the path of this script, then walk up to the repo root.
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
CODEX_CLI_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
BIN_DIR="$CODEX_CLI_ROOT/bin"
|
||||
fi
|
||||
|
||||
# Make sure the destination directory exists.
|
||||
mkdir -p "$BIN_DIR"
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Download and decompress the artifacts from the GitHub Actions workflow.
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
WORKFLOW_ID="${WORKFLOW_URL##*/}"
|
||||
|
||||
ARTIFACTS_DIR="$(mktemp -d)"
|
||||
trap 'rm -rf "$ARTIFACTS_DIR"' EXIT
|
||||
|
||||
# NB: The GitHub CLI `gh` must be installed and authenticated.
|
||||
gh run download --dir "$ARTIFACTS_DIR" --repo openai/codex "$WORKFLOW_ID"
|
||||
|
||||
# x64 Linux
|
||||
zstd -d "$ARTIFACTS_DIR/x86_64-unknown-linux-musl/codex-x86_64-unknown-linux-musl.zst" \
|
||||
-o "$BIN_DIR/codex-x86_64-unknown-linux-musl"
|
||||
# ARM64 Linux
|
||||
zstd -d "$ARTIFACTS_DIR/aarch64-unknown-linux-musl/codex-aarch64-unknown-linux-musl.zst" \
|
||||
-o "$BIN_DIR/codex-aarch64-unknown-linux-musl"
|
||||
# x64 macOS
|
||||
zstd -d "$ARTIFACTS_DIR/x86_64-apple-darwin/codex-x86_64-apple-darwin.zst" \
|
||||
-o "$BIN_DIR/codex-x86_64-apple-darwin"
|
||||
# ARM64 macOS
|
||||
zstd -d "$ARTIFACTS_DIR/aarch64-apple-darwin/codex-aarch64-apple-darwin.zst" \
|
||||
-o "$BIN_DIR/codex-aarch64-apple-darwin"
|
||||
# x64 Windows
|
||||
zstd -d "$ARTIFACTS_DIR/x86_64-pc-windows-msvc/codex-x86_64-pc-windows-msvc.exe.zst" \
|
||||
-o "$BIN_DIR/codex-x86_64-pc-windows-msvc.exe"
|
||||
|
||||
echo "Installed native dependencies into $BIN_DIR"
|
||||
@@ -1,120 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# -----------------------------------------------------------------------------
|
||||
# stage_release.sh
|
||||
# -----------------------------------------------------------------------------
|
||||
# Stages an npm release for @openai/codex.
|
||||
#
|
||||
# Usage:
|
||||
#
|
||||
# --tmp <dir> : Use <dir> instead of a freshly created temp directory.
|
||||
# -h|--help : Print usage.
|
||||
#
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Helper - usage / flag parsing
|
||||
|
||||
usage() {
|
||||
cat <<EOF
|
||||
Usage: $(basename "$0") [--tmp DIR] [--version VERSION]
|
||||
|
||||
Options
|
||||
--tmp DIR Use DIR to stage the release (defaults to a fresh mktemp dir)
|
||||
--version Specify the version to release (defaults to a timestamp-based version)
|
||||
-h, --help Show this help
|
||||
|
||||
Legacy positional argument: the first non-flag argument is still interpreted
|
||||
as the temporary directory (for backwards compatibility) but is deprecated.
|
||||
EOF
|
||||
exit "${1:-0}"
|
||||
}
|
||||
|
||||
TMPDIR=""
|
||||
# Default to a timestamp-based version (keep same scheme as before)
|
||||
VERSION="$(printf '0.1.%d' "$(date +%y%m%d%H%M)")"
|
||||
WORKFLOW_URL=""
|
||||
|
||||
# Manual flag parser - Bash getopts does not handle GNU long options well.
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--tmp)
|
||||
shift || { echo "--tmp requires an argument"; usage 1; }
|
||||
TMPDIR="$1"
|
||||
;;
|
||||
--tmp=*)
|
||||
TMPDIR="${1#*=}"
|
||||
;;
|
||||
--version)
|
||||
shift || { echo "--version requires an argument"; usage 1; }
|
||||
VERSION="$1"
|
||||
;;
|
||||
--workflow-url)
|
||||
shift || { echo "--workflow-url requires an argument"; exit 1; }
|
||||
WORKFLOW_URL="$1"
|
||||
;;
|
||||
-h|--help)
|
||||
usage 0
|
||||
;;
|
||||
--*)
|
||||
echo "Unknown option: $1" >&2
|
||||
usage 1
|
||||
;;
|
||||
*)
|
||||
echo "Unexpected extra argument: $1" >&2
|
||||
usage 1
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
# Fallback when the caller did not specify a directory.
|
||||
# If no directory was specified create a fresh temporary one.
|
||||
if [[ -z "$TMPDIR" ]]; then
|
||||
TMPDIR="$(mktemp -d)"
|
||||
fi
|
||||
|
||||
# Ensure the directory exists, then resolve to an absolute path.
|
||||
mkdir -p "$TMPDIR"
|
||||
TMPDIR="$(cd "$TMPDIR" && pwd)"
|
||||
|
||||
# Main build logic
|
||||
|
||||
echo "Staging release in $TMPDIR"
|
||||
|
||||
# The script lives in codex-cli/scripts/ - change into codex-cli root so that
|
||||
# relative paths keep working.
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
CODEX_CLI_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
|
||||
pushd "$CODEX_CLI_ROOT" >/dev/null
|
||||
|
||||
# 1. Build the JS artifacts ---------------------------------------------------
|
||||
|
||||
# Paths inside the staged package
|
||||
mkdir -p "$TMPDIR/bin"
|
||||
|
||||
cp -r bin/codex.js "$TMPDIR/bin/codex.js"
|
||||
cp ../README.md "$TMPDIR" || true # README is one level up - ignore if missing
|
||||
|
||||
# Modify package.json - bump version and optionally add the native directory to
|
||||
# the files array so that the binaries are published to npm.
|
||||
|
||||
jq --arg version "$VERSION" \
|
||||
'.version = $version' \
|
||||
package.json > "$TMPDIR/package.json"
|
||||
|
||||
# 2. Native runtime deps (sandbox plus optional Rust binaries)
|
||||
|
||||
./scripts/install_native_deps.sh --workflow-url "$WORKFLOW_URL" "$TMPDIR"
|
||||
|
||||
popd >/dev/null
|
||||
|
||||
echo "Staged version $VERSION for release in $TMPDIR"
|
||||
|
||||
echo "Verify the CLI:"
|
||||
echo " node ${TMPDIR}/bin/codex.js --version"
|
||||
echo " node ${TMPDIR}/bin/codex.js --help"
|
||||
|
||||
# Print final hint for convenience
|
||||
echo "Next: cd \"$TMPDIR\" && npm publish"
|
||||
@@ -1,70 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import json
|
||||
import subprocess
|
||||
import sys
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser(
|
||||
description="""Stage a release for the npm module.
|
||||
|
||||
Run this after the GitHub Release has been created and use
|
||||
`--release-version` to specify the version to release.
|
||||
|
||||
Optionally pass `--tmp` to control the temporary staging directory that will be
|
||||
forwarded to stage_release.sh.
|
||||
"""
|
||||
)
|
||||
parser.add_argument(
|
||||
"--release-version", required=True, help="Version to release, e.g., 0.3.0"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--tmp",
|
||||
help="Optional path to stage the npm package; forwarded to stage_release.sh",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
version = args.release_version
|
||||
|
||||
gh_run = subprocess.run(
|
||||
[
|
||||
"gh",
|
||||
"run",
|
||||
"list",
|
||||
"--branch",
|
||||
f"rust-v{version}",
|
||||
"--json",
|
||||
"workflowName,url,headSha",
|
||||
"--jq",
|
||||
'first(.[] | select(.workflowName == "rust-release"))',
|
||||
],
|
||||
stdout=subprocess.PIPE,
|
||||
check=True,
|
||||
)
|
||||
gh_run.check_returncode()
|
||||
workflow = json.loads(gh_run.stdout)
|
||||
sha = workflow["headSha"]
|
||||
|
||||
print(f"should `git checkout {sha}`")
|
||||
|
||||
current_dir = Path(__file__).parent.resolve()
|
||||
cmd = [
|
||||
str(current_dir / "stage_release.sh"),
|
||||
"--version",
|
||||
version,
|
||||
"--workflow-url",
|
||||
workflow["url"],
|
||||
]
|
||||
if args.tmp:
|
||||
cmd.extend(["--tmp", args.tmp])
|
||||
|
||||
stage_release = subprocess.run(cmd)
|
||||
stage_release.check_returncode()
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
6
codex-rs/.cargo/audit.toml
Normal file
6
codex-rs/.cargo/audit.toml
Normal file
@@ -0,0 +1,6 @@
|
||||
[advisories]
|
||||
ignore = [
|
||||
"RUSTSEC-2024-0388", # derivative 2.2.0 via starlark; upstream crate is unmaintained
|
||||
"RUSTSEC-2025-0057", # fxhash 0.2.1 via starlark_map; upstream crate is unmaintained
|
||||
"RUSTSEC-2024-0436", # paste 1.0.15 via starlark/ratatui; upstream crate is unmaintained
|
||||
]
|
||||
5
codex-rs/.cargo/config.toml
Normal file
5
codex-rs/.cargo/config.toml
Normal file
@@ -0,0 +1,5 @@
|
||||
[target.'cfg(all(windows, target_env = "msvc"))']
|
||||
rustflags = ["-C", "link-arg=/STACK:8388608"]
|
||||
|
||||
[target.'cfg(all(windows, target_env = "gnu"))']
|
||||
rustflags = ["-C", "link-arg=-Wl,--stack,8388608"]
|
||||
13
codex-rs/.config/nextest.toml
Normal file
13
codex-rs/.config/nextest.toml
Normal file
@@ -0,0 +1,13 @@
|
||||
[profile.default]
|
||||
# Do not increase, fix your test instead
|
||||
slow-timeout = { period = "15s", terminate-after = 2 }
|
||||
|
||||
|
||||
[[profile.default.overrides]]
|
||||
# Do not add new tests here
|
||||
filter = 'test(rmcp_client) | test(humanlike_typing_1000_chars_appears_live_no_placeholder)'
|
||||
slow-timeout = { period = "1m", terminate-after = 4 }
|
||||
|
||||
[[profile.default.overrides]]
|
||||
filter = 'test(approval_matrix_covers_all_modes)'
|
||||
slow-timeout = { period = "30s", terminate-after = 2 }
|
||||
26
codex-rs/.github/workflows/cargo-audit.yml
vendored
Normal file
26
codex-rs/.github/workflows/cargo-audit.yml
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
name: Cargo audit
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
audit:
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: codex-rs
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
- name: Install cargo-audit
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-audit
|
||||
- name: Run cargo audit
|
||||
run: cargo audit --deny warnings
|
||||
1
codex-rs/.gitignore
vendored
1
codex-rs/.gitignore
vendored
@@ -1,4 +1,5 @@
|
||||
/target/
|
||||
/target-*/
|
||||
|
||||
# Recommended value of CARGO_TARGET_DIR when using Docker as explained in .devcontainer/README.md.
|
||||
/target-amd64/
|
||||
|
||||
4111
codex-rs/Cargo.lock
generated
4111
codex-rs/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,23 +1,50 @@
|
||||
[workspace]
|
||||
members = [
|
||||
"backend-client",
|
||||
"ansi-escape",
|
||||
"async-utils",
|
||||
"app-server",
|
||||
"app-server-protocol",
|
||||
"app-server-test-client",
|
||||
"apply-patch",
|
||||
"arg0",
|
||||
"feedback",
|
||||
"codex-backend-openapi-models",
|
||||
"cloud-tasks",
|
||||
"cloud-tasks-client",
|
||||
"cli",
|
||||
"common",
|
||||
"core",
|
||||
"exec",
|
||||
"exec-server",
|
||||
"execpolicy",
|
||||
"execpolicy-legacy",
|
||||
"keyring-store",
|
||||
"file-search",
|
||||
"linux-sandbox",
|
||||
"lmstudio",
|
||||
"login",
|
||||
"mcp-client",
|
||||
"mcp-server",
|
||||
"mcp-types",
|
||||
"ollama",
|
||||
"process-hardening",
|
||||
"protocol",
|
||||
"protocol-ts",
|
||||
"rmcp-client",
|
||||
"responses-api-proxy",
|
||||
"stdio-to-uds",
|
||||
"otel",
|
||||
"tui",
|
||||
"tui2",
|
||||
"utils/absolute-path",
|
||||
"utils/git",
|
||||
"utils/cache",
|
||||
"utils/image",
|
||||
"utils/json-to-toml",
|
||||
"utils/pty",
|
||||
"utils/readiness",
|
||||
"utils/string",
|
||||
"codex-client",
|
||||
"codex-api",
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
@@ -28,14 +55,227 @@ version = "0.0.0"
|
||||
# crates created with `cargo new -w ...` automatically inherit the 2024
|
||||
# edition.
|
||||
edition = "2024"
|
||||
license = "Apache-2.0"
|
||||
|
||||
[workspace.dependencies]
|
||||
# Internal
|
||||
app_test_support = { path = "app-server/tests/common" }
|
||||
codex-ansi-escape = { path = "ansi-escape" }
|
||||
codex-api = { path = "codex-api" }
|
||||
codex-app-server = { path = "app-server" }
|
||||
codex-app-server-protocol = { path = "app-server-protocol" }
|
||||
codex-apply-patch = { path = "apply-patch" }
|
||||
codex-arg0 = { path = "arg0" }
|
||||
codex-async-utils = { path = "async-utils" }
|
||||
codex-backend-client = { path = "backend-client" }
|
||||
codex-chatgpt = { path = "chatgpt" }
|
||||
codex-client = { path = "codex-client" }
|
||||
codex-common = { path = "common" }
|
||||
codex-core = { path = "core" }
|
||||
codex-exec = { path = "exec" }
|
||||
codex-execpolicy = { path = "execpolicy" }
|
||||
codex-feedback = { path = "feedback" }
|
||||
codex-file-search = { path = "file-search" }
|
||||
codex-git = { path = "utils/git" }
|
||||
codex-keyring-store = { path = "keyring-store" }
|
||||
codex-linux-sandbox = { path = "linux-sandbox" }
|
||||
codex-lmstudio = { path = "lmstudio" }
|
||||
codex-login = { path = "login" }
|
||||
codex-mcp-server = { path = "mcp-server" }
|
||||
codex-ollama = { path = "ollama" }
|
||||
codex-otel = { path = "otel" }
|
||||
codex-process-hardening = { path = "process-hardening" }
|
||||
codex-protocol = { path = "protocol" }
|
||||
codex-responses-api-proxy = { path = "responses-api-proxy" }
|
||||
codex-rmcp-client = { path = "rmcp-client" }
|
||||
codex-stdio-to-uds = { path = "stdio-to-uds" }
|
||||
codex-tui = { path = "tui" }
|
||||
codex-tui2 = { path = "tui2" }
|
||||
codex-utils-absolute-path = { path = "utils/absolute-path" }
|
||||
codex-utils-cache = { path = "utils/cache" }
|
||||
codex-utils-image = { path = "utils/image" }
|
||||
codex-utils-json-to-toml = { path = "utils/json-to-toml" }
|
||||
codex-utils-pty = { path = "utils/pty" }
|
||||
codex-utils-readiness = { path = "utils/readiness" }
|
||||
codex-utils-string = { path = "utils/string" }
|
||||
codex-windows-sandbox = { path = "windows-sandbox-rs" }
|
||||
core_test_support = { path = "core/tests/common" }
|
||||
exec_server_test_support = { path = "exec-server/tests/common" }
|
||||
mcp-types = { path = "mcp-types" }
|
||||
mcp_test_support = { path = "mcp-server/tests/common" }
|
||||
|
||||
# External
|
||||
allocative = "0.3.3"
|
||||
ansi-to-tui = "7.0.0"
|
||||
anyhow = "1"
|
||||
arboard = { version = "3", features = ["wayland-data-control"] }
|
||||
assert_cmd = "2"
|
||||
assert_matches = "1.5.0"
|
||||
async-channel = "2.3.1"
|
||||
async-stream = "0.3.6"
|
||||
async-trait = "0.1.89"
|
||||
axum = { version = "0.8", default-features = false }
|
||||
base64 = "0.22.1"
|
||||
bytes = "1.10.1"
|
||||
chardetng = "0.1.17"
|
||||
chrono = "0.4.42"
|
||||
clap = "4"
|
||||
clap_complete = "4"
|
||||
color-eyre = "0.6.3"
|
||||
crossterm = "0.28.1"
|
||||
ctor = "0.5.0"
|
||||
derive_more = "2"
|
||||
diffy = "0.4.2"
|
||||
dirs = "6"
|
||||
dotenvy = "0.15.7"
|
||||
dunce = "1.0.4"
|
||||
encoding_rs = "0.8.35"
|
||||
env-flags = "0.1.1"
|
||||
env_logger = "0.11.5"
|
||||
escargot = "0.5"
|
||||
eventsource-stream = "0.2.3"
|
||||
futures = { version = "0.3", default-features = false }
|
||||
http = "1.3.1"
|
||||
icu_decimal = "2.1"
|
||||
icu_locale_core = "2.1"
|
||||
icu_provider = { version = "2.1", features = ["sync"] }
|
||||
ignore = "0.4.23"
|
||||
image = { version = "^0.25.9", default-features = false }
|
||||
include_dir = "0.7.4"
|
||||
indexmap = "2.12.0"
|
||||
insta = "1.44.3"
|
||||
itertools = "0.14.0"
|
||||
keyring = { version = "3.6", default-features = false }
|
||||
landlock = "0.4.1"
|
||||
lazy_static = "1"
|
||||
libc = "0.2.177"
|
||||
log = "0.4"
|
||||
lru = "0.16.2"
|
||||
maplit = "1.0.2"
|
||||
mime_guess = "2.0.5"
|
||||
multimap = "0.10.0"
|
||||
notify = "8.2.0"
|
||||
nucleo-matcher = "0.3.1"
|
||||
once_cell = "1.20.2"
|
||||
openssl-sys = "*"
|
||||
opentelemetry = "0.30.0"
|
||||
opentelemetry-appender-tracing = "0.30.0"
|
||||
opentelemetry-otlp = "0.30.0"
|
||||
opentelemetry-semantic-conventions = "0.30.0"
|
||||
opentelemetry_sdk = "0.30.0"
|
||||
tracing-opentelemetry = "0.31.0"
|
||||
os_info = "3.12.0"
|
||||
owo-colors = "4.2.0"
|
||||
path-absolutize = "3.1.1"
|
||||
pathdiff = "0.2"
|
||||
portable-pty = "0.9.0"
|
||||
predicates = "3"
|
||||
pretty_assertions = "1.4.1"
|
||||
pulldown-cmark = "0.10"
|
||||
rand = "0.9"
|
||||
ratatui = "0.29.0"
|
||||
ratatui-macros = "0.6.0"
|
||||
regex = "1.12.2"
|
||||
regex-lite = "0.1.7"
|
||||
reqwest = "0.12"
|
||||
rmcp = { version = "0.12.0", default-features = false }
|
||||
schemars = "0.8.22"
|
||||
seccompiler = "0.5.0"
|
||||
sentry = "0.46.0"
|
||||
serde = "1"
|
||||
serde_json = "1"
|
||||
serde_with = "3.16"
|
||||
serde_yaml = "0.9"
|
||||
serial_test = "3.2.0"
|
||||
sha1 = "0.10.6"
|
||||
sha2 = "0.10"
|
||||
shlex = "1.3.0"
|
||||
similar = "2.7.0"
|
||||
socket2 = "0.6.1"
|
||||
starlark = "0.13.0"
|
||||
strum = "0.27.2"
|
||||
strum_macros = "0.27.2"
|
||||
supports-color = "3.0.2"
|
||||
sys-locale = "0.3.2"
|
||||
tempfile = "3.23.0"
|
||||
test-log = "0.2.18"
|
||||
textwrap = "0.16.2"
|
||||
thiserror = "2.0.17"
|
||||
time = "0.3"
|
||||
tiny_http = "0.12"
|
||||
tokio = "1"
|
||||
tokio-stream = "0.1.17"
|
||||
tokio-test = "0.4"
|
||||
tokio-util = "0.7.16"
|
||||
toml = "0.9.5"
|
||||
toml_edit = "0.23.5"
|
||||
tonic = "0.13.1"
|
||||
tracing = "0.1.43"
|
||||
tracing-appender = "0.2.3"
|
||||
tracing-subscriber = "0.3.20"
|
||||
tracing-test = "0.2.5"
|
||||
tree-sitter = "0.25.10"
|
||||
tree-sitter-bash = "0.25"
|
||||
tree-sitter-highlight = "0.25.10"
|
||||
ts-rs = "11"
|
||||
uds_windows = "1.1.0"
|
||||
unicode-segmentation = "1.12.0"
|
||||
unicode-width = "0.2"
|
||||
url = "2"
|
||||
urlencoding = "2.1"
|
||||
uuid = "1"
|
||||
vt100 = "0.16.2"
|
||||
walkdir = "2.5.0"
|
||||
webbrowser = "1.0"
|
||||
which = "6"
|
||||
wildmatch = "2.6.1"
|
||||
|
||||
wiremock = "0.6"
|
||||
zeroize = "1.8.2"
|
||||
|
||||
[workspace.lints]
|
||||
rust = {}
|
||||
|
||||
[workspace.lints.clippy]
|
||||
expect_used = "deny"
|
||||
identity_op = "deny"
|
||||
manual_clamp = "deny"
|
||||
manual_filter = "deny"
|
||||
manual_find = "deny"
|
||||
manual_flatten = "deny"
|
||||
manual_map = "deny"
|
||||
manual_memcpy = "deny"
|
||||
manual_non_exhaustive = "deny"
|
||||
manual_ok_or = "deny"
|
||||
manual_range_contains = "deny"
|
||||
manual_retain = "deny"
|
||||
manual_strip = "deny"
|
||||
manual_try_fold = "deny"
|
||||
manual_unwrap_or = "deny"
|
||||
needless_borrow = "deny"
|
||||
needless_borrowed_reference = "deny"
|
||||
needless_collect = "deny"
|
||||
needless_late_init = "deny"
|
||||
needless_option_as_deref = "deny"
|
||||
needless_question_mark = "deny"
|
||||
needless_update = "deny"
|
||||
redundant_clone = "deny"
|
||||
redundant_closure = "deny"
|
||||
redundant_closure_for_method_calls = "deny"
|
||||
redundant_static_lifetimes = "deny"
|
||||
trivially_copy_pass_by_ref = "deny"
|
||||
uninlined_format_args = "deny"
|
||||
unnecessary_filter_map = "deny"
|
||||
unnecessary_lazy_evaluations = "deny"
|
||||
unnecessary_sort_by = "deny"
|
||||
unnecessary_to_owned = "deny"
|
||||
unwrap_used = "deny"
|
||||
|
||||
# cargo-shear cannot see the platform-specific openssl-sys usage, so we
|
||||
# silence the false positive here instead of deleting a real dependency.
|
||||
[workspace.metadata.cargo-shear]
|
||||
ignored = ["icu_provider", "openssl-sys", "codex-utils-readiness"]
|
||||
|
||||
[profile.release]
|
||||
lto = "fat"
|
||||
# Because we bundle some of these executables with the TypeScript CLI, we
|
||||
@@ -45,6 +285,16 @@ strip = "symbols"
|
||||
# See https://github.com/openai/codex/issues/1411 for details.
|
||||
codegen-units = 1
|
||||
|
||||
[profile.ci-test]
|
||||
debug = 1 # Reduce debug symbol size
|
||||
inherits = "test"
|
||||
opt-level = 0
|
||||
|
||||
[patch.crates-io]
|
||||
# Uncomment to debug local changes.
|
||||
# ratatui = { path = "../../ratatui" }
|
||||
crossterm = { git = "https://github.com/nornagon/crossterm", branch = "nornagon/color-query" }
|
||||
ratatui = { git = "https://github.com/nornagon/ratatui", branch = "nornagon-v0.29.0-patch" }
|
||||
|
||||
# Uncomment to debug local changes.
|
||||
# rmcp = { path = "../../rust-sdk/crates/rmcp" }
|
||||
|
||||
@@ -4,68 +4,70 @@ We provide Codex CLI as a standalone, native executable to ensure a zero-depende
|
||||
|
||||
## Installing Codex
|
||||
|
||||
Today, the easiest way to install Codex is via `npm`, though we plan to publish Codex to other package managers soon.
|
||||
Today, the easiest way to install Codex is via `npm`:
|
||||
|
||||
```shell
|
||||
npm i -g @openai/codex@native
|
||||
npm i -g @openai/codex
|
||||
codex
|
||||
```
|
||||
|
||||
You can also download a platform-specific release directly from our [GitHub Releases](https://github.com/openai/codex/releases).
|
||||
You can also install via Homebrew (`brew install --cask codex`) or download a platform-specific release directly from our [GitHub Releases](https://github.com/openai/codex/releases).
|
||||
|
||||
## Documentation quickstart
|
||||
|
||||
- First run with Codex? Follow the walkthrough in [`docs/getting-started.md`](../docs/getting-started.md) for prompts, keyboard shortcuts, and session management.
|
||||
- Already shipping with Codex and want deeper control? Jump to [`docs/advanced.md`](../docs/advanced.md) and the configuration reference at [`docs/config.md`](../docs/config.md).
|
||||
|
||||
## What's new in the Rust CLI
|
||||
|
||||
While we are [working to close the gap between the TypeScript and Rust implementations of Codex CLI](https://github.com/openai/codex/issues/1262), note that the Rust CLI has a number of features that the TypeScript CLI does not!
|
||||
The Rust implementation is now the maintained Codex CLI and serves as the default experience. It includes a number of features that the legacy TypeScript CLI never supported.
|
||||
|
||||
### Config
|
||||
|
||||
Codex supports a rich set of configuration options. Note that the Rust CLI uses `config.toml` instead of `config.json`. See [`config.md`](./config.md) for details.
|
||||
Codex supports a rich set of configuration options. Note that the Rust CLI uses `config.toml` instead of `config.json`. See [`docs/config.md`](../docs/config.md) for details.
|
||||
|
||||
### Model Context Protocol Support
|
||||
|
||||
Codex CLI functions as an MCP client that can connect to MCP servers on startup. See the [`mcp_servers`](./config.md#mcp_servers) section in the configuration documentation for details.
|
||||
#### MCP client
|
||||
|
||||
It is still experimental, but you can also launch Codex as an MCP _server_ by running `codex mcp`. Use the [`@modelcontextprotocol/inspector`](https://github.com/modelcontextprotocol/inspector) to try it out:
|
||||
Codex CLI functions as an MCP client that allows the Codex CLI and IDE extension to connect to MCP servers on startup. See the [`configuration documentation`](../docs/config.md#mcp_servers) for details.
|
||||
|
||||
#### MCP server (experimental)
|
||||
|
||||
Codex can be launched as an MCP _server_ by running `codex mcp-server`. This allows _other_ MCP clients to use Codex as a tool for another agent.
|
||||
|
||||
Use the [`@modelcontextprotocol/inspector`](https://github.com/modelcontextprotocol/inspector) to try it out:
|
||||
|
||||
```shell
|
||||
npx @modelcontextprotocol/inspector codex mcp
|
||||
npx @modelcontextprotocol/inspector codex mcp-server
|
||||
```
|
||||
|
||||
Use `codex mcp` to add/list/get/remove MCP server launchers defined in `config.toml`, and `codex mcp-server` to run the MCP server directly.
|
||||
|
||||
### Notifications
|
||||
|
||||
You can enable notifications by configuring a script that is run whenever the agent finishes a turn. The [notify documentation](./config.md#notify) includes a detailed example that explains how to get desktop notifications via [terminal-notifier](https://github.com/julienXX/terminal-notifier) on macOS.
|
||||
You can enable notifications by configuring a script that is run whenever the agent finishes a turn. The [notify documentation](../docs/config.md#notify) includes a detailed example that explains how to get desktop notifications via [terminal-notifier](https://github.com/julienXX/terminal-notifier) on macOS. When Codex detects that it is running under WSL 2 inside Windows Terminal (`WT_SESSION` is set), the TUI automatically falls back to native Windows toast notifications so approval prompts and completed turns surface even though Windows Terminal does not implement OSC 9.
|
||||
|
||||
### `codex exec` to run Codex programmatially/non-interactively
|
||||
### `codex exec` to run Codex programmatically/non-interactively
|
||||
|
||||
To run Codex non-interactively, run `codex exec PROMPT` (you can also pass the prompt via `stdin`) and Codex will work on your task until it decides that it is done and exits. Output is printed to the terminal directly. You can set the `RUST_LOG` environment variable to see more about what's going on.
|
||||
|
||||
### Use `@` for file search
|
||||
|
||||
Typing `@` triggers a fuzzy-filename search over the workspace root. Use up/down to select among the results and Tab or Enter to replace the `@` with the selected path. You can use Esc to cancel the search.
|
||||
|
||||
### `--cd`/`-C` flag
|
||||
|
||||
Sometimes it is not convenient to `cd` to the directory you want Codex to use as the "working root" before running Codex. Fortunately, `codex` supports a `--cd` option so you can specify whatever folder you want. You can confirm that Codex is honoring `--cd` by double-checking the **workdir** it reports in the TUI at the start of a new session.
|
||||
|
||||
### Shell completions
|
||||
|
||||
Generate shell completion scripts via:
|
||||
|
||||
```shell
|
||||
codex completion bash
|
||||
codex completion zsh
|
||||
codex completion fish
|
||||
```
|
||||
|
||||
### Experimenting with the Codex Sandbox
|
||||
|
||||
To test to see what happens when a command is run under the sandbox provided by Codex, we provide the following subcommands in Codex CLI:
|
||||
|
||||
```
|
||||
# macOS
|
||||
codex debug seatbelt [--full-auto] [COMMAND]...
|
||||
codex sandbox macos [--full-auto] [--log-denials] [COMMAND]...
|
||||
|
||||
# Linux
|
||||
codex sandbox linux [--full-auto] [COMMAND]...
|
||||
|
||||
# Windows
|
||||
codex sandbox windows [--full-auto] [COMMAND]...
|
||||
|
||||
# Legacy aliases
|
||||
codex debug seatbelt [--full-auto] [--log-denials] [COMMAND]...
|
||||
codex debug landlock [--full-auto] [COMMAND]...
|
||||
```
|
||||
|
||||
|
||||
@@ -1,16 +1,17 @@
|
||||
[package]
|
||||
edition = "2024"
|
||||
name = "codex-ansi-escape"
|
||||
version = { workspace = true }
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[lib]
|
||||
name = "codex_ansi_escape"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[dependencies]
|
||||
ansi-to-tui = "7.0.0"
|
||||
ratatui = { version = "0.29.0", features = [
|
||||
ansi-to-tui = { workspace = true }
|
||||
ratatui = { workspace = true, features = [
|
||||
"unstable-rendered-line-info",
|
||||
"unstable-widget-ref",
|
||||
] }
|
||||
tracing = { version = "0.1.41", features = ["log"] }
|
||||
tracing = { workspace = true, features = ["log"] }
|
||||
|
||||
@@ -3,13 +3,32 @@ use ansi_to_tui::IntoText;
|
||||
use ratatui::text::Line;
|
||||
use ratatui::text::Text;
|
||||
|
||||
// Expand tabs in a best-effort way for transcript rendering.
|
||||
// Tabs can interact poorly with left-gutter prefixes in our TUI and CLI
|
||||
// transcript views (e.g., `nl` separates line numbers from content with a tab).
|
||||
// Replacing tabs with spaces avoids odd visual artifacts without changing
|
||||
// semantics for our use cases.
|
||||
fn expand_tabs(s: &str) -> std::borrow::Cow<'_, str> {
|
||||
if s.contains('\t') {
|
||||
// Keep it simple: replace each tab with 4 spaces.
|
||||
// We do not try to align to tab stops since most usages (like `nl`)
|
||||
// look acceptable with a fixed substitution and this avoids stateful math
|
||||
// across spans.
|
||||
std::borrow::Cow::Owned(s.replace('\t', " "))
|
||||
} else {
|
||||
std::borrow::Cow::Borrowed(s)
|
||||
}
|
||||
}
|
||||
|
||||
/// This function should be used when the contents of `s` are expected to match
|
||||
/// a single line. If multiple lines are found, a warning is logged and only the
|
||||
/// first line is returned.
|
||||
pub fn ansi_escape_line(s: &str) -> Line<'static> {
|
||||
let text = ansi_escape(s);
|
||||
// Normalize tabs to spaces to avoid odd gutter collisions in transcript mode.
|
||||
let s = expand_tabs(s);
|
||||
let text = ansi_escape(&s);
|
||||
match text.lines.as_slice() {
|
||||
[] => Line::from(""),
|
||||
[] => "".into(),
|
||||
[only] => only.clone(),
|
||||
[first, rest @ ..] => {
|
||||
tracing::warn!("ansi_escape_line: expected a single line, got {first:?} and {rest:?}");
|
||||
|
||||
30
codex-rs/app-server-protocol/Cargo.toml
Normal file
30
codex-rs/app-server-protocol/Cargo.toml
Normal file
@@ -0,0 +1,30 @@
|
||||
[package]
|
||||
name = "codex-app-server-protocol"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[lib]
|
||||
name = "codex_app_server_protocol"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = { workspace = true }
|
||||
clap = { workspace = true, features = ["derive"] }
|
||||
codex-protocol = { workspace = true }
|
||||
codex-utils-absolute-path = { workspace = true }
|
||||
mcp-types = { workspace = true }
|
||||
schemars = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
strum_macros = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
ts-rs = { workspace = true }
|
||||
uuid = { workspace = true, features = ["serde", "v7"] }
|
||||
|
||||
[dev-dependencies]
|
||||
anyhow = { workspace = true }
|
||||
pretty_assertions = { workspace = true }
|
||||
@@ -3,18 +3,20 @@ use clap::Parser;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(about = "Generate TypeScript bindings for the Codex protocol")]
|
||||
#[command(
|
||||
about = "Generate TypeScript bindings and JSON Schemas for the Codex app-server protocol"
|
||||
)]
|
||||
struct Args {
|
||||
/// Output directory where .ts files will be written
|
||||
/// Output directory where generated files will be written
|
||||
#[arg(short = 'o', long = "out", value_name = "DIR")]
|
||||
out_dir: PathBuf,
|
||||
|
||||
/// Optional path to the Prettier executable to format generated files
|
||||
/// Optional Prettier executable path to format generated TypeScript files
|
||||
#[arg(short = 'p', long = "prettier", value_name = "PRETTIER_BIN")]
|
||||
prettier: Option<PathBuf>,
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
let args = Args::parse();
|
||||
codex_protocol_ts::generate_ts(&args.out_dir, args.prettier.as_deref())
|
||||
codex_app_server_protocol::generate_types(&args.out_dir, args.prettier.as_deref())
|
||||
}
|
||||
946
codex-rs/app-server-protocol/src/export.rs
Normal file
946
codex-rs/app-server-protocol/src/export.rs
Normal file
@@ -0,0 +1,946 @@
|
||||
use crate::ClientNotification;
|
||||
use crate::ClientRequest;
|
||||
use crate::ServerNotification;
|
||||
use crate::ServerRequest;
|
||||
use crate::export_client_notification_schemas;
|
||||
use crate::export_client_param_schemas;
|
||||
use crate::export_client_response_schemas;
|
||||
use crate::export_client_responses;
|
||||
use crate::export_server_notification_schemas;
|
||||
use crate::export_server_param_schemas;
|
||||
use crate::export_server_response_schemas;
|
||||
use crate::export_server_responses;
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use anyhow::anyhow;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use schemars::JsonSchema;
|
||||
use schemars::schema_for;
|
||||
use serde::Serialize;
|
||||
use serde_json::Map;
|
||||
use serde_json::Value;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::ffi::OsStr;
|
||||
use std::fs;
|
||||
use std::io::Read;
|
||||
use std::io::Write;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
use ts_rs::TS;
|
||||
|
||||
const HEADER: &str = "// GENERATED CODE! DO NOT MODIFY BY HAND!\n\n";
|
||||
const IGNORED_DEFINITIONS: &[&str] = &["Option<()>"];
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct GeneratedSchema {
|
||||
namespace: Option<String>,
|
||||
logical_name: String,
|
||||
value: Value,
|
||||
in_v1_dir: bool,
|
||||
}
|
||||
|
||||
impl GeneratedSchema {
|
||||
fn namespace(&self) -> Option<&str> {
|
||||
self.namespace.as_deref()
|
||||
}
|
||||
|
||||
fn logical_name(&self) -> &str {
|
||||
&self.logical_name
|
||||
}
|
||||
|
||||
fn value(&self) -> &Value {
|
||||
&self.value
|
||||
}
|
||||
}
|
||||
|
||||
type JsonSchemaEmitter = fn(&Path) -> Result<GeneratedSchema>;
|
||||
pub fn generate_types(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
|
||||
generate_ts(out_dir, prettier)?;
|
||||
generate_json(out_dir)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct GenerateTsOptions {
|
||||
pub generate_indices: bool,
|
||||
pub ensure_headers: bool,
|
||||
pub run_prettier: bool,
|
||||
}
|
||||
|
||||
impl Default for GenerateTsOptions {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
generate_indices: true,
|
||||
ensure_headers: true,
|
||||
run_prettier: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn generate_ts(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
|
||||
generate_ts_with_options(out_dir, prettier, GenerateTsOptions::default())
|
||||
}
|
||||
|
||||
pub fn generate_ts_with_options(
|
||||
out_dir: &Path,
|
||||
prettier: Option<&Path>,
|
||||
options: GenerateTsOptions,
|
||||
) -> Result<()> {
|
||||
let v2_out_dir = out_dir.join("v2");
|
||||
ensure_dir(out_dir)?;
|
||||
ensure_dir(&v2_out_dir)?;
|
||||
|
||||
ClientRequest::export_all_to(out_dir)?;
|
||||
export_client_responses(out_dir)?;
|
||||
ClientNotification::export_all_to(out_dir)?;
|
||||
|
||||
ServerRequest::export_all_to(out_dir)?;
|
||||
export_server_responses(out_dir)?;
|
||||
ServerNotification::export_all_to(out_dir)?;
|
||||
|
||||
if options.generate_indices {
|
||||
generate_index_ts(out_dir)?;
|
||||
generate_index_ts(&v2_out_dir)?;
|
||||
}
|
||||
|
||||
// Ensure our header is present on all TS files (root + subdirs like v2/).
|
||||
let mut ts_files = Vec::new();
|
||||
let should_collect_ts_files =
|
||||
options.ensure_headers || (options.run_prettier && prettier.is_some());
|
||||
if should_collect_ts_files {
|
||||
ts_files = ts_files_in_recursive(out_dir)?;
|
||||
}
|
||||
|
||||
if options.ensure_headers {
|
||||
for file in &ts_files {
|
||||
prepend_header_if_missing(file)?;
|
||||
}
|
||||
}
|
||||
|
||||
// Optionally run Prettier on all generated TS files.
|
||||
if options.run_prettier
|
||||
&& let Some(prettier_bin) = prettier
|
||||
&& !ts_files.is_empty()
|
||||
{
|
||||
let status = Command::new(prettier_bin)
|
||||
.arg("--write")
|
||||
.arg("--log-level")
|
||||
.arg("warn")
|
||||
.args(ts_files.iter().map(|p| p.as_os_str()))
|
||||
.status()
|
||||
.with_context(|| format!("Failed to invoke Prettier at {}", prettier_bin.display()))?;
|
||||
if !status.success() {
|
||||
return Err(anyhow!("Prettier failed with status {status}"));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn generate_json(out_dir: &Path) -> Result<()> {
|
||||
ensure_dir(out_dir)?;
|
||||
let envelope_emitters: &[JsonSchemaEmitter] = &[
|
||||
|d| write_json_schema_with_return::<crate::RequestId>(d, "RequestId"),
|
||||
|d| write_json_schema_with_return::<crate::JSONRPCMessage>(d, "JSONRPCMessage"),
|
||||
|d| write_json_schema_with_return::<crate::JSONRPCRequest>(d, "JSONRPCRequest"),
|
||||
|d| write_json_schema_with_return::<crate::JSONRPCNotification>(d, "JSONRPCNotification"),
|
||||
|d| write_json_schema_with_return::<crate::JSONRPCResponse>(d, "JSONRPCResponse"),
|
||||
|d| write_json_schema_with_return::<crate::JSONRPCError>(d, "JSONRPCError"),
|
||||
|d| write_json_schema_with_return::<crate::JSONRPCErrorError>(d, "JSONRPCErrorError"),
|
||||
|d| write_json_schema_with_return::<crate::ClientRequest>(d, "ClientRequest"),
|
||||
|d| write_json_schema_with_return::<crate::ServerRequest>(d, "ServerRequest"),
|
||||
|d| write_json_schema_with_return::<crate::ClientNotification>(d, "ClientNotification"),
|
||||
|d| write_json_schema_with_return::<crate::ServerNotification>(d, "ServerNotification"),
|
||||
|d| write_json_schema_with_return::<EventMsg>(d, "EventMsg"),
|
||||
];
|
||||
|
||||
let mut schemas: Vec<GeneratedSchema> = Vec::new();
|
||||
for emit in envelope_emitters {
|
||||
schemas.push(emit(out_dir)?);
|
||||
}
|
||||
|
||||
schemas.extend(export_client_param_schemas(out_dir)?);
|
||||
schemas.extend(export_client_response_schemas(out_dir)?);
|
||||
schemas.extend(export_server_param_schemas(out_dir)?);
|
||||
schemas.extend(export_server_response_schemas(out_dir)?);
|
||||
schemas.extend(export_client_notification_schemas(out_dir)?);
|
||||
schemas.extend(export_server_notification_schemas(out_dir)?);
|
||||
|
||||
let bundle = build_schema_bundle(schemas)?;
|
||||
write_pretty_json(
|
||||
out_dir.join("codex_app_server_protocol.schemas.json"),
|
||||
&bundle,
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn build_schema_bundle(schemas: Vec<GeneratedSchema>) -> Result<Value> {
|
||||
const SPECIAL_DEFINITIONS: &[&str] = &[
|
||||
"ClientNotification",
|
||||
"ClientRequest",
|
||||
"EventMsg",
|
||||
"ServerNotification",
|
||||
"ServerRequest",
|
||||
];
|
||||
|
||||
let namespaced_types = collect_namespaced_types(&schemas);
|
||||
let mut definitions = Map::new();
|
||||
|
||||
for schema in schemas {
|
||||
let GeneratedSchema {
|
||||
namespace,
|
||||
logical_name,
|
||||
mut value,
|
||||
in_v1_dir,
|
||||
} = schema;
|
||||
|
||||
if IGNORED_DEFINITIONS.contains(&logical_name.as_str()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(ref ns) = namespace {
|
||||
rewrite_refs_to_namespace(&mut value, ns);
|
||||
}
|
||||
|
||||
let mut forced_namespace_refs: Vec<(String, String)> = Vec::new();
|
||||
if let Value::Object(ref mut obj) = value
|
||||
&& let Some(defs) = obj.remove("definitions")
|
||||
&& let Value::Object(defs_obj) = defs
|
||||
{
|
||||
for (def_name, mut def_schema) in defs_obj {
|
||||
if IGNORED_DEFINITIONS.contains(&def_name.as_str()) {
|
||||
continue;
|
||||
}
|
||||
if SPECIAL_DEFINITIONS.contains(&def_name.as_str()) {
|
||||
continue;
|
||||
}
|
||||
annotate_schema(&mut def_schema, Some(def_name.as_str()));
|
||||
let target_namespace = match namespace {
|
||||
Some(ref ns) => Some(ns.clone()),
|
||||
None => namespace_for_definition(&def_name, &namespaced_types)
|
||||
.cloned()
|
||||
.filter(|_| !in_v1_dir),
|
||||
};
|
||||
if let Some(ref ns) = target_namespace {
|
||||
if namespace.as_deref() == Some(ns.as_str()) {
|
||||
rewrite_refs_to_namespace(&mut def_schema, ns);
|
||||
insert_into_namespace(&mut definitions, ns, def_name.clone(), def_schema)?;
|
||||
} else if !forced_namespace_refs
|
||||
.iter()
|
||||
.any(|(name, existing_ns)| name == &def_name && existing_ns == ns)
|
||||
{
|
||||
forced_namespace_refs.push((def_name.clone(), ns.clone()));
|
||||
}
|
||||
} else {
|
||||
definitions.insert(def_name, def_schema);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (name, ns) in forced_namespace_refs {
|
||||
rewrite_named_ref_to_namespace(&mut value, &ns, &name);
|
||||
}
|
||||
|
||||
if let Some(ref ns) = namespace {
|
||||
insert_into_namespace(&mut definitions, ns, logical_name.clone(), value)?;
|
||||
} else {
|
||||
definitions.insert(logical_name, value);
|
||||
}
|
||||
}
|
||||
|
||||
let mut root = Map::new();
|
||||
root.insert(
|
||||
"$schema".to_string(),
|
||||
Value::String("http://json-schema.org/draft-07/schema#".into()),
|
||||
);
|
||||
root.insert(
|
||||
"title".to_string(),
|
||||
Value::String("CodexAppServerProtocol".into()),
|
||||
);
|
||||
root.insert("type".to_string(), Value::String("object".into()));
|
||||
root.insert("definitions".to_string(), Value::Object(definitions));
|
||||
|
||||
Ok(Value::Object(root))
|
||||
}
|
||||
|
||||
fn insert_into_namespace(
|
||||
definitions: &mut Map<String, Value>,
|
||||
namespace: &str,
|
||||
name: String,
|
||||
schema: Value,
|
||||
) -> Result<()> {
|
||||
let entry = definitions
|
||||
.entry(namespace.to_string())
|
||||
.or_insert_with(|| Value::Object(Map::new()));
|
||||
match entry {
|
||||
Value::Object(map) => {
|
||||
map.insert(name, schema);
|
||||
Ok(())
|
||||
}
|
||||
_ => Err(anyhow!("expected namespace {namespace} to be an object")),
|
||||
}
|
||||
}
|
||||
|
||||
fn write_json_schema_with_return<T>(out_dir: &Path, name: &str) -> Result<GeneratedSchema>
|
||||
where
|
||||
T: JsonSchema,
|
||||
{
|
||||
let file_stem = name.trim();
|
||||
let schema = schema_for!(T);
|
||||
let mut schema_value = serde_json::to_value(schema)?;
|
||||
annotate_schema(&mut schema_value, Some(file_stem));
|
||||
// If the name looks like a namespaced path (e.g., "v2::Type"), mirror
|
||||
// the TypeScript layout and write to out_dir/v2/Type.json. Otherwise
|
||||
// write alongside the legacy files.
|
||||
let (raw_namespace, logical_name) = split_namespace(file_stem);
|
||||
let out_path = if let Some(ns) = raw_namespace {
|
||||
let dir = out_dir.join(ns);
|
||||
ensure_dir(&dir)?;
|
||||
dir.join(format!("{logical_name}.json"))
|
||||
} else {
|
||||
out_dir.join(format!("{file_stem}.json"))
|
||||
};
|
||||
|
||||
if !IGNORED_DEFINITIONS.contains(&logical_name) {
|
||||
write_pretty_json(out_path, &schema_value)
|
||||
.with_context(|| format!("Failed to write JSON schema for {file_stem}"))?;
|
||||
}
|
||||
|
||||
let namespace = match raw_namespace {
|
||||
Some("v1") | None => None,
|
||||
Some(ns) => Some(ns.to_string()),
|
||||
};
|
||||
Ok(GeneratedSchema {
|
||||
in_v1_dir: raw_namespace == Some("v1"),
|
||||
namespace,
|
||||
logical_name: logical_name.to_string(),
|
||||
value: schema_value,
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn write_json_schema<T>(out_dir: &Path, name: &str) -> Result<GeneratedSchema>
|
||||
where
|
||||
T: JsonSchema,
|
||||
{
|
||||
write_json_schema_with_return::<T>(out_dir, name)
|
||||
}
|
||||
|
||||
fn write_pretty_json(path: PathBuf, value: &impl Serialize) -> Result<()> {
|
||||
let json = serde_json::to_vec_pretty(value)
|
||||
.with_context(|| format!("Failed to serialize JSON schema to {}", path.display()))?;
|
||||
fs::write(&path, json).with_context(|| format!("Failed to write {}", path.display()))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Split a fully-qualified type name like "v2::Type" into its namespace and logical name.
|
||||
fn split_namespace(name: &str) -> (Option<&str>, &str) {
|
||||
name.split_once("::")
|
||||
.map_or((None, name), |(ns, rest)| (Some(ns), rest))
|
||||
}
|
||||
|
||||
/// Recursively rewrite $ref values that point at "#/definitions/..." so that
|
||||
/// they point to a namespaced location under the bundle.
|
||||
fn rewrite_refs_to_namespace(value: &mut Value, ns: &str) {
|
||||
match value {
|
||||
Value::Object(obj) => {
|
||||
if let Some(Value::String(r)) = obj.get_mut("$ref")
|
||||
&& let Some(suffix) = r.strip_prefix("#/definitions/")
|
||||
{
|
||||
let prefix = format!("{ns}/");
|
||||
if !suffix.starts_with(&prefix) {
|
||||
*r = format!("#/definitions/{ns}/{suffix}");
|
||||
}
|
||||
}
|
||||
for v in obj.values_mut() {
|
||||
rewrite_refs_to_namespace(v, ns);
|
||||
}
|
||||
}
|
||||
Value::Array(items) => {
|
||||
for v in items.iter_mut() {
|
||||
rewrite_refs_to_namespace(v, ns);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn collect_namespaced_types(schemas: &[GeneratedSchema]) -> HashMap<String, String> {
|
||||
let mut types = HashMap::new();
|
||||
for schema in schemas {
|
||||
if let Some(ns) = schema.namespace() {
|
||||
types
|
||||
.entry(schema.logical_name().to_string())
|
||||
.or_insert_with(|| ns.to_string());
|
||||
if let Some(Value::Object(defs)) = schema.value().get("definitions") {
|
||||
for key in defs.keys() {
|
||||
types.entry(key.clone()).or_insert_with(|| ns.to_string());
|
||||
}
|
||||
}
|
||||
if let Some(Value::Object(defs)) = schema.value().get("$defs") {
|
||||
for key in defs.keys() {
|
||||
types.entry(key.clone()).or_insert_with(|| ns.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
types
|
||||
}
|
||||
|
||||
fn namespace_for_definition<'a>(
|
||||
name: &str,
|
||||
types: &'a HashMap<String, String>,
|
||||
) -> Option<&'a String> {
|
||||
if let Some(ns) = types.get(name) {
|
||||
return Some(ns);
|
||||
}
|
||||
let trimmed = name.trim_end_matches(|c: char| c.is_ascii_digit());
|
||||
if trimmed != name {
|
||||
return types.get(trimmed);
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn variant_definition_name(base: &str, variant: &Value) -> Option<String> {
|
||||
if let Some(props) = variant.get("properties").and_then(Value::as_object) {
|
||||
if let Some(method_literal) = literal_from_property(props, "method") {
|
||||
let pascal = to_pascal_case(method_literal);
|
||||
return Some(match base {
|
||||
"ClientRequest" | "ServerRequest" => format!("{pascal}Request"),
|
||||
"ClientNotification" | "ServerNotification" => format!("{pascal}Notification"),
|
||||
_ => format!("{pascal}{base}"),
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(type_literal) = literal_from_property(props, "type") {
|
||||
let pascal = to_pascal_case(type_literal);
|
||||
return Some(match base {
|
||||
"EventMsg" => format!("{pascal}EventMsg"),
|
||||
_ => format!("{pascal}{base}"),
|
||||
});
|
||||
}
|
||||
|
||||
if props.len() == 1
|
||||
&& let Some(key) = props.keys().next()
|
||||
{
|
||||
let pascal = to_pascal_case(key);
|
||||
return Some(format!("{pascal}{base}"));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(required) = variant.get("required").and_then(Value::as_array)
|
||||
&& required.len() == 1
|
||||
&& let Some(key) = required[0].as_str()
|
||||
{
|
||||
let pascal = to_pascal_case(key);
|
||||
return Some(format!("{pascal}{base}"));
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
fn literal_from_property<'a>(props: &'a Map<String, Value>, key: &str) -> Option<&'a str> {
|
||||
props.get(key).and_then(string_literal)
|
||||
}
|
||||
|
||||
fn string_literal(value: &Value) -> Option<&str> {
|
||||
value.get("const").and_then(Value::as_str).or_else(|| {
|
||||
value
|
||||
.get("enum")
|
||||
.and_then(Value::as_array)
|
||||
.and_then(|arr| arr.first())
|
||||
.and_then(Value::as_str)
|
||||
})
|
||||
}
|
||||
|
||||
fn annotate_schema(value: &mut Value, base: Option<&str>) {
|
||||
match value {
|
||||
Value::Object(map) => annotate_object(map, base),
|
||||
Value::Array(items) => {
|
||||
for item in items {
|
||||
annotate_schema(item, base);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn annotate_object(map: &mut Map<String, Value>, base: Option<&str>) {
|
||||
let owner = map.get("title").and_then(Value::as_str).map(str::to_owned);
|
||||
if let Some(owner) = owner.as_deref()
|
||||
&& let Some(Value::Object(props)) = map.get_mut("properties")
|
||||
{
|
||||
set_discriminator_titles(props, owner);
|
||||
}
|
||||
|
||||
if let Some(Value::Array(variants)) = map.get_mut("oneOf") {
|
||||
annotate_variant_list(variants, base);
|
||||
}
|
||||
if let Some(Value::Array(variants)) = map.get_mut("anyOf") {
|
||||
annotate_variant_list(variants, base);
|
||||
}
|
||||
|
||||
if let Some(Value::Object(defs)) = map.get_mut("definitions") {
|
||||
for (name, schema) in defs.iter_mut() {
|
||||
annotate_schema(schema, Some(name.as_str()));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(Value::Object(defs)) = map.get_mut("$defs") {
|
||||
for (name, schema) in defs.iter_mut() {
|
||||
annotate_schema(schema, Some(name.as_str()));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(Value::Object(props)) = map.get_mut("properties") {
|
||||
for value in props.values_mut() {
|
||||
annotate_schema(value, base);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(items) = map.get_mut("items") {
|
||||
annotate_schema(items, base);
|
||||
}
|
||||
|
||||
if let Some(additional) = map.get_mut("additionalProperties") {
|
||||
annotate_schema(additional, base);
|
||||
}
|
||||
|
||||
for (key, child) in map.iter_mut() {
|
||||
match key.as_str() {
|
||||
"oneOf"
|
||||
| "anyOf"
|
||||
| "definitions"
|
||||
| "$defs"
|
||||
| "properties"
|
||||
| "items"
|
||||
| "additionalProperties" => {}
|
||||
_ => annotate_schema(child, base),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn annotate_variant_list(variants: &mut [Value], base: Option<&str>) {
|
||||
let mut seen = HashSet::new();
|
||||
|
||||
for variant in variants.iter() {
|
||||
if let Some(name) = variant_title(variant) {
|
||||
seen.insert(name.to_owned());
|
||||
}
|
||||
}
|
||||
|
||||
for variant in variants.iter_mut() {
|
||||
let mut variant_name = variant_title(variant).map(str::to_owned);
|
||||
|
||||
if variant_name.is_none()
|
||||
&& let Some(base_name) = base
|
||||
&& let Some(name) = variant_definition_name(base_name, variant)
|
||||
{
|
||||
let mut candidate = name.clone();
|
||||
let mut index = 2;
|
||||
while seen.contains(&candidate) {
|
||||
candidate = format!("{name}{index}");
|
||||
index += 1;
|
||||
}
|
||||
if let Some(obj) = variant.as_object_mut() {
|
||||
obj.insert("title".into(), Value::String(candidate.clone()));
|
||||
}
|
||||
seen.insert(candidate.clone());
|
||||
variant_name = Some(candidate);
|
||||
}
|
||||
|
||||
if let Some(name) = variant_name.as_deref()
|
||||
&& let Some(obj) = variant.as_object_mut()
|
||||
&& let Some(Value::Object(props)) = obj.get_mut("properties")
|
||||
{
|
||||
set_discriminator_titles(props, name);
|
||||
}
|
||||
|
||||
annotate_schema(variant, base);
|
||||
}
|
||||
}
|
||||
|
||||
const DISCRIMINATOR_KEYS: &[&str] = &["type", "method", "mode", "status", "role", "reason"];
|
||||
|
||||
fn set_discriminator_titles(props: &mut Map<String, Value>, owner: &str) {
|
||||
for key in DISCRIMINATOR_KEYS {
|
||||
if let Some(prop_schema) = props.get_mut(*key)
|
||||
&& string_literal(prop_schema).is_some()
|
||||
&& let Value::Object(prop_obj) = prop_schema
|
||||
{
|
||||
if prop_obj.contains_key("title") {
|
||||
continue;
|
||||
}
|
||||
let suffix = to_pascal_case(key);
|
||||
prop_obj.insert("title".into(), Value::String(format!("{owner}{suffix}")));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn variant_title(value: &Value) -> Option<&str> {
|
||||
value
|
||||
.as_object()
|
||||
.and_then(|obj| obj.get("title"))
|
||||
.and_then(Value::as_str)
|
||||
}
|
||||
|
||||
fn to_pascal_case(input: &str) -> String {
|
||||
let mut result = String::new();
|
||||
let mut capitalize_next = true;
|
||||
|
||||
for c in input.chars() {
|
||||
if c == '_' || c == '-' {
|
||||
capitalize_next = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if capitalize_next {
|
||||
result.extend(c.to_uppercase());
|
||||
capitalize_next = false;
|
||||
} else {
|
||||
result.push(c);
|
||||
}
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn ensure_dir(dir: &Path) -> Result<()> {
|
||||
fs::create_dir_all(dir)
|
||||
.with_context(|| format!("Failed to create output directory {}", dir.display()))
|
||||
}
|
||||
|
||||
fn rewrite_named_ref_to_namespace(value: &mut Value, ns: &str, name: &str) {
|
||||
let direct = format!("#/definitions/{name}");
|
||||
let prefixed = format!("{direct}/");
|
||||
let replacement = format!("#/definitions/{ns}/{name}");
|
||||
let replacement_prefixed = format!("{replacement}/");
|
||||
match value {
|
||||
Value::Object(obj) => {
|
||||
if let Some(Value::String(reference)) = obj.get_mut("$ref") {
|
||||
if reference == &direct {
|
||||
*reference = replacement;
|
||||
} else if let Some(rest) = reference.strip_prefix(&prefixed) {
|
||||
*reference = format!("{replacement_prefixed}{rest}");
|
||||
}
|
||||
}
|
||||
for child in obj.values_mut() {
|
||||
rewrite_named_ref_to_namespace(child, ns, name);
|
||||
}
|
||||
}
|
||||
Value::Array(items) => {
|
||||
for child in items {
|
||||
rewrite_named_ref_to_namespace(child, ns, name);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn prepend_header_if_missing(path: &Path) -> Result<()> {
|
||||
let mut content = String::new();
|
||||
{
|
||||
let mut f = fs::File::open(path)
|
||||
.with_context(|| format!("Failed to open {} for reading", path.display()))?;
|
||||
f.read_to_string(&mut content)
|
||||
.with_context(|| format!("Failed to read {}", path.display()))?;
|
||||
}
|
||||
|
||||
if content.starts_with(HEADER) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut f = fs::File::create(path)
|
||||
.with_context(|| format!("Failed to open {} for writing", path.display()))?;
|
||||
f.write_all(HEADER.as_bytes())
|
||||
.with_context(|| format!("Failed to write header to {}", path.display()))?;
|
||||
f.write_all(content.as_bytes())
|
||||
.with_context(|| format!("Failed to write content to {}", path.display()))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn ts_files_in(dir: &Path) -> Result<Vec<PathBuf>> {
|
||||
let mut files = Vec::new();
|
||||
for entry in
|
||||
fs::read_dir(dir).with_context(|| format!("Failed to read dir {}", dir.display()))?
|
||||
{
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
if path.is_file() && path.extension() == Some(OsStr::new("ts")) {
|
||||
files.push(path);
|
||||
}
|
||||
}
|
||||
files.sort();
|
||||
Ok(files)
|
||||
}
|
||||
|
||||
fn ts_files_in_recursive(dir: &Path) -> Result<Vec<PathBuf>> {
|
||||
let mut files = Vec::new();
|
||||
let mut stack = vec![dir.to_path_buf()];
|
||||
while let Some(d) = stack.pop() {
|
||||
for entry in
|
||||
fs::read_dir(&d).with_context(|| format!("Failed to read dir {}", d.display()))?
|
||||
{
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
if path.is_dir() {
|
||||
stack.push(path);
|
||||
} else if path.is_file() && path.extension() == Some(OsStr::new("ts")) {
|
||||
files.push(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
files.sort();
|
||||
Ok(files)
|
||||
}
|
||||
|
||||
/// Generate an index.ts file that re-exports all generated types.
|
||||
/// This allows consumers to import all types from a single file.
|
||||
fn generate_index_ts(out_dir: &Path) -> Result<PathBuf> {
|
||||
let mut entries: Vec<String> = Vec::new();
|
||||
let mut stems: Vec<String> = ts_files_in(out_dir)?
|
||||
.into_iter()
|
||||
.filter_map(|p| {
|
||||
let stem = p.file_stem()?.to_string_lossy().into_owned();
|
||||
if stem == "index" { None } else { Some(stem) }
|
||||
})
|
||||
.collect();
|
||||
stems.sort();
|
||||
stems.dedup();
|
||||
|
||||
for name in stems {
|
||||
entries.push(format!("export type {{ {name} }} from \"./{name}\";\n"));
|
||||
}
|
||||
|
||||
// If this is the root out_dir and a ./v2 folder exists with TS files,
|
||||
// expose it as a namespace to avoid symbol collisions at the root.
|
||||
let v2_dir = out_dir.join("v2");
|
||||
let has_v2_ts = ts_files_in(&v2_dir).map(|v| !v.is_empty()).unwrap_or(false);
|
||||
if has_v2_ts {
|
||||
entries.push("export * as v2 from \"./v2\";\n".to_string());
|
||||
}
|
||||
|
||||
let mut content =
|
||||
String::with_capacity(HEADER.len() + entries.iter().map(String::len).sum::<usize>());
|
||||
content.push_str(HEADER);
|
||||
for line in &entries {
|
||||
content.push_str(line);
|
||||
}
|
||||
|
||||
let index_path = out_dir.join("index.ts");
|
||||
let mut f = fs::File::create(&index_path)
|
||||
.with_context(|| format!("Failed to create {}", index_path.display()))?;
|
||||
f.write_all(content.as_bytes())
|
||||
.with_context(|| format!("Failed to write {}", index_path.display()))?;
|
||||
Ok(index_path)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use anyhow::Result;
|
||||
use std::collections::BTreeSet;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use uuid::Uuid;
|
||||
|
||||
#[test]
|
||||
fn generated_ts_has_no_optional_nullable_fields() -> Result<()> {
|
||||
// Assert that there are no types of the form "?: T | null" in the generated TS files.
|
||||
let output_dir = std::env::temp_dir().join(format!("codex_ts_types_{}", Uuid::now_v7()));
|
||||
fs::create_dir(&output_dir)?;
|
||||
|
||||
struct TempDirGuard(PathBuf);
|
||||
|
||||
impl Drop for TempDirGuard {
|
||||
fn drop(&mut self) {
|
||||
let _ = fs::remove_dir_all(&self.0);
|
||||
}
|
||||
}
|
||||
|
||||
let _guard = TempDirGuard(output_dir.clone());
|
||||
|
||||
// Avoid doing more work than necessary to keep the test from timing out.
|
||||
let options = GenerateTsOptions {
|
||||
generate_indices: false,
|
||||
ensure_headers: false,
|
||||
run_prettier: false,
|
||||
};
|
||||
generate_ts_with_options(&output_dir, None, options)?;
|
||||
|
||||
let mut undefined_offenders = Vec::new();
|
||||
let mut optional_nullable_offenders = BTreeSet::new();
|
||||
let mut stack = vec![output_dir];
|
||||
while let Some(dir) = stack.pop() {
|
||||
for entry in fs::read_dir(&dir)? {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
if path.is_dir() {
|
||||
stack.push(path);
|
||||
continue;
|
||||
}
|
||||
|
||||
if matches!(path.extension().and_then(|ext| ext.to_str()), Some("ts")) {
|
||||
let contents = fs::read_to_string(&path)?;
|
||||
if contents.contains("| undefined") {
|
||||
undefined_offenders.push(path.clone());
|
||||
}
|
||||
|
||||
const SKIP_PREFIXES: &[&str] = &[
|
||||
"const ",
|
||||
"let ",
|
||||
"var ",
|
||||
"export const ",
|
||||
"export let ",
|
||||
"export var ",
|
||||
];
|
||||
|
||||
let mut search_start = 0;
|
||||
while let Some(idx) = contents[search_start..].find("| null") {
|
||||
let abs_idx = search_start + idx;
|
||||
// Find the property-colon for this field by scanning forward
|
||||
// from the start of the segment and ignoring nested braces,
|
||||
// brackets, and parens. This avoids colons inside nested
|
||||
// type literals like `{ [k in string]?: string }`.
|
||||
|
||||
let line_start_idx =
|
||||
contents[..abs_idx].rfind('\n').map(|i| i + 1).unwrap_or(0);
|
||||
|
||||
let mut segment_start_idx = line_start_idx;
|
||||
if let Some(rel_idx) = contents[line_start_idx..abs_idx].rfind(',') {
|
||||
segment_start_idx = segment_start_idx.max(line_start_idx + rel_idx + 1);
|
||||
}
|
||||
if let Some(rel_idx) = contents[line_start_idx..abs_idx].rfind('{') {
|
||||
segment_start_idx = segment_start_idx.max(line_start_idx + rel_idx + 1);
|
||||
}
|
||||
if let Some(rel_idx) = contents[line_start_idx..abs_idx].rfind('}') {
|
||||
segment_start_idx = segment_start_idx.max(line_start_idx + rel_idx + 1);
|
||||
}
|
||||
|
||||
// Scan forward for the colon that separates the field name from its type.
|
||||
let mut level_brace = 0_i32;
|
||||
let mut level_brack = 0_i32;
|
||||
let mut level_paren = 0_i32;
|
||||
let mut in_single = false;
|
||||
let mut in_double = false;
|
||||
let mut escape = false;
|
||||
let mut prop_colon_idx = None;
|
||||
for (i, ch) in contents[segment_start_idx..abs_idx].char_indices() {
|
||||
let idx_abs = segment_start_idx + i;
|
||||
if escape {
|
||||
escape = false;
|
||||
continue;
|
||||
}
|
||||
match ch {
|
||||
'\\' => {
|
||||
// Only treat as escape when inside a string.
|
||||
if in_single || in_double {
|
||||
escape = true;
|
||||
}
|
||||
}
|
||||
'\'' => {
|
||||
if !in_double {
|
||||
in_single = !in_single;
|
||||
}
|
||||
}
|
||||
'"' => {
|
||||
if !in_single {
|
||||
in_double = !in_double;
|
||||
}
|
||||
}
|
||||
'{' if !in_single && !in_double => level_brace += 1,
|
||||
'}' if !in_single && !in_double => level_brace -= 1,
|
||||
'[' if !in_single && !in_double => level_brack += 1,
|
||||
']' if !in_single && !in_double => level_brack -= 1,
|
||||
'(' if !in_single && !in_double => level_paren += 1,
|
||||
')' if !in_single && !in_double => level_paren -= 1,
|
||||
':' if !in_single
|
||||
&& !in_double
|
||||
&& level_brace == 0
|
||||
&& level_brack == 0
|
||||
&& level_paren == 0 =>
|
||||
{
|
||||
prop_colon_idx = Some(idx_abs);
|
||||
break;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
let Some(colon_idx) = prop_colon_idx else {
|
||||
search_start = abs_idx + 5;
|
||||
continue;
|
||||
};
|
||||
|
||||
let mut field_prefix = contents[segment_start_idx..colon_idx].trim();
|
||||
if field_prefix.is_empty() {
|
||||
search_start = abs_idx + 5;
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(comment_idx) = field_prefix.rfind("*/") {
|
||||
field_prefix = field_prefix[comment_idx + 2..].trim_start();
|
||||
}
|
||||
|
||||
if field_prefix.is_empty() {
|
||||
search_start = abs_idx + 5;
|
||||
continue;
|
||||
}
|
||||
|
||||
if SKIP_PREFIXES
|
||||
.iter()
|
||||
.any(|prefix| field_prefix.starts_with(prefix))
|
||||
{
|
||||
search_start = abs_idx + 5;
|
||||
continue;
|
||||
}
|
||||
|
||||
if field_prefix.contains('(') {
|
||||
search_start = abs_idx + 5;
|
||||
continue;
|
||||
}
|
||||
|
||||
// If the last non-whitespace before ':' is '?', then this is an
|
||||
// optional field with a nullable type (i.e., "?: T | null"),
|
||||
// which we explicitly disallow.
|
||||
if field_prefix.chars().rev().find(|c| !c.is_whitespace()) == Some('?') {
|
||||
let line_number =
|
||||
contents[..abs_idx].chars().filter(|c| *c == '\n').count() + 1;
|
||||
let offending_line_end = contents[line_start_idx..]
|
||||
.find('\n')
|
||||
.map(|i| line_start_idx + i)
|
||||
.unwrap_or(contents.len());
|
||||
let offending_snippet =
|
||||
contents[line_start_idx..offending_line_end].trim();
|
||||
|
||||
optional_nullable_offenders.insert(format!(
|
||||
"{}:{}: {offending_snippet}",
|
||||
path.display(),
|
||||
line_number
|
||||
));
|
||||
}
|
||||
|
||||
search_start = abs_idx + 5;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assert!(
|
||||
undefined_offenders.is_empty(),
|
||||
"Generated TypeScript still includes unions with `undefined` in {undefined_offenders:?}"
|
||||
);
|
||||
|
||||
// If this assertion fails, it means a field was generated as
|
||||
// "?: T | null" — i.e., both optional (undefined) and nullable (null).
|
||||
// We only want either "?: T" or ": T | null".
|
||||
assert!(
|
||||
optional_nullable_offenders.is_empty(),
|
||||
"Generated TypeScript has optional fields with nullable types (disallowed '?: T | null'), add #[ts(optional)] to fix:\n{optional_nullable_offenders:?}"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
71
codex-rs/app-server-protocol/src/jsonrpc_lite.rs
Normal file
71
codex-rs/app-server-protocol/src/jsonrpc_lite.rs
Normal file
@@ -0,0 +1,71 @@
|
||||
//! We do not do true JSON-RPC 2.0, as we neither send nor expect the
|
||||
//! "jsonrpc": "2.0" field.
|
||||
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use ts_rs::TS;
|
||||
|
||||
pub const JSONRPC_VERSION: &str = "2.0";
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, Hash, Eq, JsonSchema, TS)]
|
||||
#[serde(untagged)]
|
||||
pub enum RequestId {
|
||||
String(String),
|
||||
#[ts(type = "number")]
|
||||
Integer(i64),
|
||||
}
|
||||
|
||||
pub type Result = serde_json::Value;
|
||||
|
||||
/// Refers to any valid JSON-RPC object that can be decoded off the wire, or encoded to be sent.
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, JsonSchema, TS)]
|
||||
#[serde(untagged)]
|
||||
pub enum JSONRPCMessage {
|
||||
Request(JSONRPCRequest),
|
||||
Notification(JSONRPCNotification),
|
||||
Response(JSONRPCResponse),
|
||||
Error(JSONRPCError),
|
||||
}
|
||||
|
||||
/// A request that expects a response.
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, JsonSchema, TS)]
|
||||
pub struct JSONRPCRequest {
|
||||
pub id: RequestId,
|
||||
pub method: String,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub params: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
/// A notification which does not expect a response.
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, JsonSchema, TS)]
|
||||
pub struct JSONRPCNotification {
|
||||
pub method: String,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub params: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
/// A successful (non-error) response to a request.
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, JsonSchema, TS)]
|
||||
pub struct JSONRPCResponse {
|
||||
pub id: RequestId,
|
||||
pub result: Result,
|
||||
}
|
||||
|
||||
/// A response to a request that indicates an error occurred.
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, JsonSchema, TS)]
|
||||
pub struct JSONRPCError {
|
||||
pub error: JSONRPCErrorError,
|
||||
pub id: RequestId,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, JsonSchema, TS)]
|
||||
pub struct JSONRPCErrorError {
|
||||
pub code: i64,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub data: Option<serde_json::Value>,
|
||||
pub message: String,
|
||||
}
|
||||
12
codex-rs/app-server-protocol/src/lib.rs
Normal file
12
codex-rs/app-server-protocol/src/lib.rs
Normal file
@@ -0,0 +1,12 @@
|
||||
mod export;
|
||||
mod jsonrpc_lite;
|
||||
mod protocol;
|
||||
|
||||
pub use export::generate_json;
|
||||
pub use export::generate_ts;
|
||||
pub use export::generate_types;
|
||||
pub use jsonrpc_lite::*;
|
||||
pub use protocol::common::*;
|
||||
pub use protocol::thread_history::*;
|
||||
pub use protocol::v1::*;
|
||||
pub use protocol::v2::*;
|
||||
834
codex-rs/app-server-protocol/src/protocol/common.rs
Normal file
834
codex-rs/app-server-protocol/src/protocol/common.rs
Normal file
@@ -0,0 +1,834 @@
|
||||
use std::path::Path;
|
||||
|
||||
use crate::JSONRPCNotification;
|
||||
use crate::JSONRPCRequest;
|
||||
use crate::RequestId;
|
||||
use crate::export::GeneratedSchema;
|
||||
use crate::export::write_json_schema;
|
||||
use crate::protocol::v1;
|
||||
use crate::protocol::v2;
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use strum_macros::Display;
|
||||
use ts_rs::TS;
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema, TS)]
|
||||
#[ts(type = "string")]
|
||||
pub struct GitSha(pub String);
|
||||
|
||||
impl GitSha {
|
||||
pub fn new(sha: &str) -> Self {
|
||||
Self(sha.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Display, JsonSchema, TS)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum AuthMode {
|
||||
ApiKey,
|
||||
ChatGPT,
|
||||
}
|
||||
|
||||
/// Generates an `enum ClientRequest` where each variant is a request that the
|
||||
/// client can send to the server. Each variant has associated `params` and
|
||||
/// `response` types. Also generates a `export_client_responses()` function to
|
||||
/// export all response types to TypeScript.
|
||||
macro_rules! client_request_definitions {
|
||||
(
|
||||
$(
|
||||
$(#[$variant_meta:meta])*
|
||||
$variant:ident $(=> $wire:literal)? {
|
||||
params: $(#[$params_meta:meta])* $params:ty,
|
||||
response: $response:ty,
|
||||
}
|
||||
),* $(,)?
|
||||
) => {
|
||||
/// Request from the client to the server.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(tag = "method", rename_all = "camelCase")]
|
||||
pub enum ClientRequest {
|
||||
$(
|
||||
$(#[$variant_meta])*
|
||||
$(#[serde(rename = $wire)] #[ts(rename = $wire)])?
|
||||
$variant {
|
||||
#[serde(rename = "id")]
|
||||
request_id: RequestId,
|
||||
$(#[$params_meta])*
|
||||
params: $params,
|
||||
},
|
||||
)*
|
||||
}
|
||||
|
||||
pub fn export_client_responses(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::std::result::Result<(), ::ts_rs::ExportError> {
|
||||
$(
|
||||
<$response as ::ts_rs::TS>::export_all_to(out_dir)?;
|
||||
)*
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(clippy::vec_init_then_push)]
|
||||
pub fn export_client_response_schemas(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
|
||||
let mut schemas = Vec::new();
|
||||
$(
|
||||
schemas.push(write_json_schema::<$response>(out_dir, stringify!($response))?);
|
||||
)*
|
||||
Ok(schemas)
|
||||
}
|
||||
|
||||
#[allow(clippy::vec_init_then_push)]
|
||||
pub fn export_client_param_schemas(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
|
||||
let mut schemas = Vec::new();
|
||||
$(
|
||||
schemas.push(write_json_schema::<$params>(out_dir, stringify!($params))?);
|
||||
)*
|
||||
Ok(schemas)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
client_request_definitions! {
|
||||
Initialize {
|
||||
params: v1::InitializeParams,
|
||||
response: v1::InitializeResponse,
|
||||
},
|
||||
|
||||
/// NEW APIs
|
||||
// Thread lifecycle
|
||||
ThreadStart => "thread/start" {
|
||||
params: v2::ThreadStartParams,
|
||||
response: v2::ThreadStartResponse,
|
||||
},
|
||||
ThreadResume => "thread/resume" {
|
||||
params: v2::ThreadResumeParams,
|
||||
response: v2::ThreadResumeResponse,
|
||||
},
|
||||
ThreadArchive => "thread/archive" {
|
||||
params: v2::ThreadArchiveParams,
|
||||
response: v2::ThreadArchiveResponse,
|
||||
},
|
||||
ThreadList => "thread/list" {
|
||||
params: v2::ThreadListParams,
|
||||
response: v2::ThreadListResponse,
|
||||
},
|
||||
SkillsList => "skills/list" {
|
||||
params: v2::SkillsListParams,
|
||||
response: v2::SkillsListResponse,
|
||||
},
|
||||
TurnStart => "turn/start" {
|
||||
params: v2::TurnStartParams,
|
||||
response: v2::TurnStartResponse,
|
||||
},
|
||||
TurnInterrupt => "turn/interrupt" {
|
||||
params: v2::TurnInterruptParams,
|
||||
response: v2::TurnInterruptResponse,
|
||||
},
|
||||
ReviewStart => "review/start" {
|
||||
params: v2::ReviewStartParams,
|
||||
response: v2::ReviewStartResponse,
|
||||
},
|
||||
|
||||
ModelList => "model/list" {
|
||||
params: v2::ModelListParams,
|
||||
response: v2::ModelListResponse,
|
||||
},
|
||||
|
||||
McpServerOauthLogin => "mcpServer/oauth/login" {
|
||||
params: v2::McpServerOauthLoginParams,
|
||||
response: v2::McpServerOauthLoginResponse,
|
||||
},
|
||||
|
||||
McpServerStatusList => "mcpServerStatus/list" {
|
||||
params: v2::ListMcpServerStatusParams,
|
||||
response: v2::ListMcpServerStatusResponse,
|
||||
},
|
||||
|
||||
LoginAccount => "account/login/start" {
|
||||
params: v2::LoginAccountParams,
|
||||
response: v2::LoginAccountResponse,
|
||||
},
|
||||
|
||||
CancelLoginAccount => "account/login/cancel" {
|
||||
params: v2::CancelLoginAccountParams,
|
||||
response: v2::CancelLoginAccountResponse,
|
||||
},
|
||||
|
||||
LogoutAccount => "account/logout" {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v2::LogoutAccountResponse,
|
||||
},
|
||||
|
||||
GetAccountRateLimits => "account/rateLimits/read" {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v2::GetAccountRateLimitsResponse,
|
||||
},
|
||||
|
||||
FeedbackUpload => "feedback/upload" {
|
||||
params: v2::FeedbackUploadParams,
|
||||
response: v2::FeedbackUploadResponse,
|
||||
},
|
||||
|
||||
/// Execute a command (argv vector) under the server's sandbox.
|
||||
OneOffCommandExec => "command/exec" {
|
||||
params: v2::CommandExecParams,
|
||||
response: v2::CommandExecResponse,
|
||||
},
|
||||
|
||||
ConfigRead => "config/read" {
|
||||
params: v2::ConfigReadParams,
|
||||
response: v2::ConfigReadResponse,
|
||||
},
|
||||
ConfigValueWrite => "config/value/write" {
|
||||
params: v2::ConfigValueWriteParams,
|
||||
response: v2::ConfigWriteResponse,
|
||||
},
|
||||
ConfigBatchWrite => "config/batchWrite" {
|
||||
params: v2::ConfigBatchWriteParams,
|
||||
response: v2::ConfigWriteResponse,
|
||||
},
|
||||
|
||||
GetAccount => "account/read" {
|
||||
params: v2::GetAccountParams,
|
||||
response: v2::GetAccountResponse,
|
||||
},
|
||||
|
||||
/// DEPRECATED APIs below
|
||||
NewConversation {
|
||||
params: v1::NewConversationParams,
|
||||
response: v1::NewConversationResponse,
|
||||
},
|
||||
GetConversationSummary {
|
||||
params: v1::GetConversationSummaryParams,
|
||||
response: v1::GetConversationSummaryResponse,
|
||||
},
|
||||
/// List recorded Codex conversations (rollouts) with optional pagination and search.
|
||||
ListConversations {
|
||||
params: v1::ListConversationsParams,
|
||||
response: v1::ListConversationsResponse,
|
||||
},
|
||||
/// Resume a recorded Codex conversation from a rollout file.
|
||||
ResumeConversation {
|
||||
params: v1::ResumeConversationParams,
|
||||
response: v1::ResumeConversationResponse,
|
||||
},
|
||||
ArchiveConversation {
|
||||
params: v1::ArchiveConversationParams,
|
||||
response: v1::ArchiveConversationResponse,
|
||||
},
|
||||
SendUserMessage {
|
||||
params: v1::SendUserMessageParams,
|
||||
response: v1::SendUserMessageResponse,
|
||||
},
|
||||
SendUserTurn {
|
||||
params: v1::SendUserTurnParams,
|
||||
response: v1::SendUserTurnResponse,
|
||||
},
|
||||
InterruptConversation {
|
||||
params: v1::InterruptConversationParams,
|
||||
response: v1::InterruptConversationResponse,
|
||||
},
|
||||
AddConversationListener {
|
||||
params: v1::AddConversationListenerParams,
|
||||
response: v1::AddConversationSubscriptionResponse,
|
||||
},
|
||||
RemoveConversationListener {
|
||||
params: v1::RemoveConversationListenerParams,
|
||||
response: v1::RemoveConversationSubscriptionResponse,
|
||||
},
|
||||
GitDiffToRemote {
|
||||
params: v1::GitDiffToRemoteParams,
|
||||
response: v1::GitDiffToRemoteResponse,
|
||||
},
|
||||
LoginApiKey {
|
||||
params: v1::LoginApiKeyParams,
|
||||
response: v1::LoginApiKeyResponse,
|
||||
},
|
||||
LoginChatGpt {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v1::LoginChatGptResponse,
|
||||
},
|
||||
// DEPRECATED in favor of CancelLoginAccount
|
||||
CancelLoginChatGpt {
|
||||
params: v1::CancelLoginChatGptParams,
|
||||
response: v1::CancelLoginChatGptResponse,
|
||||
},
|
||||
LogoutChatGpt {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v1::LogoutChatGptResponse,
|
||||
},
|
||||
/// DEPRECATED in favor of GetAccount
|
||||
GetAuthStatus {
|
||||
params: v1::GetAuthStatusParams,
|
||||
response: v1::GetAuthStatusResponse,
|
||||
},
|
||||
GetUserSavedConfig {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v1::GetUserSavedConfigResponse,
|
||||
},
|
||||
SetDefaultModel {
|
||||
params: v1::SetDefaultModelParams,
|
||||
response: v1::SetDefaultModelResponse,
|
||||
},
|
||||
GetUserAgent {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v1::GetUserAgentResponse,
|
||||
},
|
||||
UserInfo {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v1::UserInfoResponse,
|
||||
},
|
||||
FuzzyFileSearch {
|
||||
params: FuzzyFileSearchParams,
|
||||
response: FuzzyFileSearchResponse,
|
||||
},
|
||||
/// Execute a command (argv vector) under the server's sandbox.
|
||||
ExecOneOffCommand {
|
||||
params: v1::ExecOneOffCommandParams,
|
||||
response: v1::ExecOneOffCommandResponse,
|
||||
},
|
||||
}
|
||||
|
||||
/// Generates an `enum ServerRequest` where each variant is a request that the
|
||||
/// server can send to the client along with the corresponding params and
|
||||
/// response types. It also generates helper types used by the app/server
|
||||
/// infrastructure (payload enum, request constructor, and export helpers).
|
||||
macro_rules! server_request_definitions {
|
||||
(
|
||||
$(
|
||||
$(#[$variant_meta:meta])*
|
||||
$variant:ident $(=> $wire:literal)? {
|
||||
params: $params:ty,
|
||||
response: $response:ty,
|
||||
}
|
||||
),* $(,)?
|
||||
) => {
|
||||
/// Request initiated from the server and sent to the client.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(tag = "method", rename_all = "camelCase")]
|
||||
pub enum ServerRequest {
|
||||
$(
|
||||
$(#[$variant_meta])*
|
||||
$(#[serde(rename = $wire)] #[ts(rename = $wire)])?
|
||||
$variant {
|
||||
#[serde(rename = "id")]
|
||||
request_id: RequestId,
|
||||
params: $params,
|
||||
},
|
||||
)*
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, JsonSchema)]
|
||||
pub enum ServerRequestPayload {
|
||||
$( $variant($params), )*
|
||||
}
|
||||
|
||||
impl ServerRequestPayload {
|
||||
pub fn request_with_id(self, request_id: RequestId) -> ServerRequest {
|
||||
match self {
|
||||
$(Self::$variant(params) => ServerRequest::$variant { request_id, params },)*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn export_server_responses(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::std::result::Result<(), ::ts_rs::ExportError> {
|
||||
$(
|
||||
<$response as ::ts_rs::TS>::export_all_to(out_dir)?;
|
||||
)*
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(clippy::vec_init_then_push)]
|
||||
pub fn export_server_response_schemas(
|
||||
out_dir: &Path,
|
||||
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
|
||||
let mut schemas = Vec::new();
|
||||
$(
|
||||
schemas.push(crate::export::write_json_schema::<$response>(
|
||||
out_dir,
|
||||
concat!(stringify!($variant), "Response"),
|
||||
)?);
|
||||
)*
|
||||
Ok(schemas)
|
||||
}
|
||||
|
||||
#[allow(clippy::vec_init_then_push)]
|
||||
pub fn export_server_param_schemas(
|
||||
out_dir: &Path,
|
||||
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
|
||||
let mut schemas = Vec::new();
|
||||
$(
|
||||
schemas.push(crate::export::write_json_schema::<$params>(
|
||||
out_dir,
|
||||
concat!(stringify!($variant), "Params"),
|
||||
)?);
|
||||
)*
|
||||
Ok(schemas)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// Generates `ServerNotification` enum and helpers, including a JSON Schema
|
||||
/// exporter for each notification.
|
||||
macro_rules! server_notification_definitions {
|
||||
(
|
||||
$(
|
||||
$(#[$variant_meta:meta])*
|
||||
$variant:ident $(=> $wire:literal)? ( $payload:ty )
|
||||
),* $(,)?
|
||||
) => {
|
||||
/// Notification sent from the server to the client.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS, Display)]
|
||||
#[serde(tag = "method", content = "params", rename_all = "camelCase")]
|
||||
#[strum(serialize_all = "camelCase")]
|
||||
pub enum ServerNotification {
|
||||
$(
|
||||
$(#[$variant_meta])*
|
||||
$(#[serde(rename = $wire)] #[ts(rename = $wire)] #[strum(serialize = $wire)])?
|
||||
$variant($payload),
|
||||
)*
|
||||
}
|
||||
|
||||
impl ServerNotification {
|
||||
pub fn to_params(self) -> Result<serde_json::Value, serde_json::Error> {
|
||||
match self {
|
||||
$(Self::$variant(params) => serde_json::to_value(params),)*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<JSONRPCNotification> for ServerNotification {
|
||||
type Error = serde_json::Error;
|
||||
|
||||
fn try_from(value: JSONRPCNotification) -> Result<Self, serde_json::Error> {
|
||||
serde_json::from_value(serde_json::to_value(value)?)
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::vec_init_then_push)]
|
||||
pub fn export_server_notification_schemas(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
|
||||
let mut schemas = Vec::new();
|
||||
$(schemas.push(crate::export::write_json_schema::<$payload>(out_dir, stringify!($payload))?);)*
|
||||
Ok(schemas)
|
||||
}
|
||||
};
|
||||
}
|
||||
/// Notifications sent from the client to the server.
|
||||
macro_rules! client_notification_definitions {
|
||||
(
|
||||
$(
|
||||
$(#[$variant_meta:meta])*
|
||||
$variant:ident $( ( $payload:ty ) )?
|
||||
),* $(,)?
|
||||
) => {
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS, Display)]
|
||||
#[serde(tag = "method", content = "params", rename_all = "camelCase")]
|
||||
#[strum(serialize_all = "camelCase")]
|
||||
pub enum ClientNotification {
|
||||
$(
|
||||
$(#[$variant_meta])*
|
||||
$variant $( ( $payload ) )?,
|
||||
)*
|
||||
}
|
||||
|
||||
pub fn export_client_notification_schemas(
|
||||
_out_dir: &::std::path::Path,
|
||||
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
|
||||
let schemas = Vec::new();
|
||||
$( $(schemas.push(crate::export::write_json_schema::<$payload>(_out_dir, stringify!($payload))?);)? )*
|
||||
Ok(schemas)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl TryFrom<JSONRPCRequest> for ServerRequest {
|
||||
type Error = serde_json::Error;
|
||||
|
||||
fn try_from(value: JSONRPCRequest) -> Result<Self, Self::Error> {
|
||||
serde_json::from_value(serde_json::to_value(value)?)
|
||||
}
|
||||
}
|
||||
|
||||
server_request_definitions! {
|
||||
/// NEW APIs
|
||||
/// Sent when approval is requested for a specific command execution.
|
||||
/// This request is used for Turns started via turn/start.
|
||||
CommandExecutionRequestApproval => "item/commandExecution/requestApproval" {
|
||||
params: v2::CommandExecutionRequestApprovalParams,
|
||||
response: v2::CommandExecutionRequestApprovalResponse,
|
||||
},
|
||||
|
||||
/// Sent when approval is requested for a specific file change.
|
||||
/// This request is used for Turns started via turn/start.
|
||||
FileChangeRequestApproval => "item/fileChange/requestApproval" {
|
||||
params: v2::FileChangeRequestApprovalParams,
|
||||
response: v2::FileChangeRequestApprovalResponse,
|
||||
},
|
||||
|
||||
/// DEPRECATED APIs below
|
||||
/// Request to approve a patch.
|
||||
/// This request is used for Turns started via the legacy APIs (i.e. SendUserTurn, SendUserMessage).
|
||||
ApplyPatchApproval {
|
||||
params: v1::ApplyPatchApprovalParams,
|
||||
response: v1::ApplyPatchApprovalResponse,
|
||||
},
|
||||
/// Request to exec a command.
|
||||
/// This request is used for Turns started via the legacy APIs (i.e. SendUserTurn, SendUserMessage).
|
||||
ExecCommandApproval {
|
||||
params: v1::ExecCommandApprovalParams,
|
||||
response: v1::ExecCommandApprovalResponse,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename_all = "camelCase")]
|
||||
pub struct FuzzyFileSearchParams {
|
||||
pub query: String,
|
||||
pub roots: Vec<String>,
|
||||
// if provided, will cancel any previous request that used the same value
|
||||
pub cancellation_token: Option<String>,
|
||||
}
|
||||
|
||||
/// Superset of [`codex_file_search::FileMatch`]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
pub struct FuzzyFileSearchResult {
|
||||
pub root: String,
|
||||
pub path: String,
|
||||
pub file_name: String,
|
||||
pub score: u32,
|
||||
pub indices: Option<Vec<u32>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
pub struct FuzzyFileSearchResponse {
|
||||
pub files: Vec<FuzzyFileSearchResult>,
|
||||
}
|
||||
|
||||
server_notification_definitions! {
|
||||
/// NEW NOTIFICATIONS
|
||||
Error => "error" (v2::ErrorNotification),
|
||||
ThreadStarted => "thread/started" (v2::ThreadStartedNotification),
|
||||
ThreadTokenUsageUpdated => "thread/tokenUsage/updated" (v2::ThreadTokenUsageUpdatedNotification),
|
||||
TurnStarted => "turn/started" (v2::TurnStartedNotification),
|
||||
TurnCompleted => "turn/completed" (v2::TurnCompletedNotification),
|
||||
TurnDiffUpdated => "turn/diff/updated" (v2::TurnDiffUpdatedNotification),
|
||||
TurnPlanUpdated => "turn/plan/updated" (v2::TurnPlanUpdatedNotification),
|
||||
ItemStarted => "item/started" (v2::ItemStartedNotification),
|
||||
ItemCompleted => "item/completed" (v2::ItemCompletedNotification),
|
||||
/// This event is internal-only. Used by Codex Cloud.
|
||||
RawResponseItemCompleted => "rawResponseItem/completed" (v2::RawResponseItemCompletedNotification),
|
||||
AgentMessageDelta => "item/agentMessage/delta" (v2::AgentMessageDeltaNotification),
|
||||
CommandExecutionOutputDelta => "item/commandExecution/outputDelta" (v2::CommandExecutionOutputDeltaNotification),
|
||||
TerminalInteraction => "item/commandExecution/terminalInteraction" (v2::TerminalInteractionNotification),
|
||||
FileChangeOutputDelta => "item/fileChange/outputDelta" (v2::FileChangeOutputDeltaNotification),
|
||||
McpToolCallProgress => "item/mcpToolCall/progress" (v2::McpToolCallProgressNotification),
|
||||
McpServerOauthLoginCompleted => "mcpServer/oauthLogin/completed" (v2::McpServerOauthLoginCompletedNotification),
|
||||
AccountUpdated => "account/updated" (v2::AccountUpdatedNotification),
|
||||
AccountRateLimitsUpdated => "account/rateLimits/updated" (v2::AccountRateLimitsUpdatedNotification),
|
||||
ReasoningSummaryTextDelta => "item/reasoning/summaryTextDelta" (v2::ReasoningSummaryTextDeltaNotification),
|
||||
ReasoningSummaryPartAdded => "item/reasoning/summaryPartAdded" (v2::ReasoningSummaryPartAddedNotification),
|
||||
ReasoningTextDelta => "item/reasoning/textDelta" (v2::ReasoningTextDeltaNotification),
|
||||
ContextCompacted => "thread/compacted" (v2::ContextCompactedNotification),
|
||||
DeprecationNotice => "deprecationNotice" (v2::DeprecationNoticeNotification),
|
||||
|
||||
/// Notifies the user of world-writable directories on Windows, which cannot be protected by the sandbox.
|
||||
WindowsWorldWritableWarning => "windows/worldWritableWarning" (v2::WindowsWorldWritableWarningNotification),
|
||||
|
||||
#[serde(rename = "account/login/completed")]
|
||||
#[ts(rename = "account/login/completed")]
|
||||
#[strum(serialize = "account/login/completed")]
|
||||
AccountLoginCompleted(v2::AccountLoginCompletedNotification),
|
||||
|
||||
/// DEPRECATED NOTIFICATIONS below
|
||||
AuthStatusChange(v1::AuthStatusChangeNotification),
|
||||
|
||||
/// Deprecated: use `account/login/completed` instead.
|
||||
LoginChatGptComplete(v1::LoginChatGptCompleteNotification),
|
||||
SessionConfigured(v1::SessionConfiguredNotification),
|
||||
}
|
||||
|
||||
client_notification_definitions! {
|
||||
Initialized,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use anyhow::Result;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::account::PlanType;
|
||||
use codex_protocol::parse_command::ParsedCommand;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[test]
|
||||
fn serialize_new_conversation() -> Result<()> {
|
||||
let request = ClientRequest::NewConversation {
|
||||
request_id: RequestId::Integer(42),
|
||||
params: v1::NewConversationParams {
|
||||
model: Some("gpt-5.1-codex-max".to_string()),
|
||||
model_provider: None,
|
||||
profile: None,
|
||||
cwd: None,
|
||||
approval_policy: Some(AskForApproval::OnRequest),
|
||||
sandbox: None,
|
||||
config: None,
|
||||
base_instructions: None,
|
||||
developer_instructions: None,
|
||||
compact_prompt: None,
|
||||
include_apply_patch_tool: None,
|
||||
},
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "newConversation",
|
||||
"id": 42,
|
||||
"params": {
|
||||
"model": "gpt-5.1-codex-max",
|
||||
"modelProvider": null,
|
||||
"profile": null,
|
||||
"cwd": null,
|
||||
"approvalPolicy": "on-request",
|
||||
"sandbox": null,
|
||||
"config": null,
|
||||
"baseInstructions": null,
|
||||
"includeApplyPatchTool": null
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn conversation_id_serializes_as_plain_string() -> Result<()> {
|
||||
let id = ConversationId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?;
|
||||
|
||||
assert_eq!(
|
||||
json!("67e55044-10b1-426f-9247-bb680e5fe0c8"),
|
||||
serde_json::to_value(id)?
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn conversation_id_deserializes_from_plain_string() -> Result<()> {
|
||||
let id: ConversationId =
|
||||
serde_json::from_value(json!("67e55044-10b1-426f-9247-bb680e5fe0c8"))?;
|
||||
|
||||
assert_eq!(
|
||||
ConversationId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?,
|
||||
id,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_client_notification() -> Result<()> {
|
||||
let notification = ClientNotification::Initialized;
|
||||
// Note there is no "params" field for this notification.
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "initialized",
|
||||
}),
|
||||
serde_json::to_value(¬ification)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_server_request() -> Result<()> {
|
||||
let conversation_id = ConversationId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?;
|
||||
let params = v1::ExecCommandApprovalParams {
|
||||
conversation_id,
|
||||
call_id: "call-42".to_string(),
|
||||
command: vec!["echo".to_string(), "hello".to_string()],
|
||||
cwd: PathBuf::from("/tmp"),
|
||||
reason: Some("because tests".to_string()),
|
||||
parsed_cmd: vec![ParsedCommand::Unknown {
|
||||
cmd: "echo hello".to_string(),
|
||||
}],
|
||||
};
|
||||
let request = ServerRequest::ExecCommandApproval {
|
||||
request_id: RequestId::Integer(7),
|
||||
params: params.clone(),
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "execCommandApproval",
|
||||
"id": 7,
|
||||
"params": {
|
||||
"conversationId": "67e55044-10b1-426f-9247-bb680e5fe0c8",
|
||||
"callId": "call-42",
|
||||
"command": ["echo", "hello"],
|
||||
"cwd": "/tmp",
|
||||
"reason": "because tests",
|
||||
"parsedCmd": [
|
||||
{
|
||||
"type": "unknown",
|
||||
"cmd": "echo hello"
|
||||
}
|
||||
]
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
|
||||
let payload = ServerRequestPayload::ExecCommandApproval(params);
|
||||
assert_eq!(payload.request_with_id(RequestId::Integer(7)), request);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_get_account_rate_limits() -> Result<()> {
|
||||
let request = ClientRequest::GetAccountRateLimits {
|
||||
request_id: RequestId::Integer(1),
|
||||
params: None,
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/rateLimits/read",
|
||||
"id": 1,
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_account_login_api_key() -> Result<()> {
|
||||
let request = ClientRequest::LoginAccount {
|
||||
request_id: RequestId::Integer(2),
|
||||
params: v2::LoginAccountParams::ApiKey {
|
||||
api_key: "secret".to_string(),
|
||||
},
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/login/start",
|
||||
"id": 2,
|
||||
"params": {
|
||||
"type": "apiKey",
|
||||
"apiKey": "secret"
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_account_login_chatgpt() -> Result<()> {
|
||||
let request = ClientRequest::LoginAccount {
|
||||
request_id: RequestId::Integer(3),
|
||||
params: v2::LoginAccountParams::Chatgpt,
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/login/start",
|
||||
"id": 3,
|
||||
"params": {
|
||||
"type": "chatgpt"
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_account_logout() -> Result<()> {
|
||||
let request = ClientRequest::LogoutAccount {
|
||||
request_id: RequestId::Integer(4),
|
||||
params: None,
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/logout",
|
||||
"id": 4,
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_get_account() -> Result<()> {
|
||||
let request = ClientRequest::GetAccount {
|
||||
request_id: RequestId::Integer(5),
|
||||
params: v2::GetAccountParams {
|
||||
refresh_token: false,
|
||||
},
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "account/read",
|
||||
"id": 5,
|
||||
"params": {
|
||||
"refreshToken": false
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn account_serializes_fields_in_camel_case() -> Result<()> {
|
||||
let api_key = v2::Account::ApiKey {};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"type": "apiKey",
|
||||
}),
|
||||
serde_json::to_value(&api_key)?,
|
||||
);
|
||||
|
||||
let chatgpt = v2::Account::Chatgpt {
|
||||
email: "user@example.com".to_string(),
|
||||
plan_type: PlanType::Plus,
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"type": "chatgpt",
|
||||
"email": "user@example.com",
|
||||
"planType": "plus",
|
||||
}),
|
||||
serde_json::to_value(&chatgpt)?,
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_list_models() -> Result<()> {
|
||||
let request = ClientRequest::ModelList {
|
||||
request_id: RequestId::Integer(6),
|
||||
params: v2::ModelListParams::default(),
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "model/list",
|
||||
"id": 6,
|
||||
"params": {
|
||||
"limit": null,
|
||||
"cursor": null
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
15
codex-rs/app-server-protocol/src/protocol/mappers.rs
Normal file
15
codex-rs/app-server-protocol/src/protocol/mappers.rs
Normal file
@@ -0,0 +1,15 @@
|
||||
use crate::protocol::v1;
|
||||
use crate::protocol::v2;
|
||||
|
||||
impl From<v1::ExecOneOffCommandParams> for v2::CommandExecParams {
|
||||
fn from(value: v1::ExecOneOffCommandParams) -> Self {
|
||||
Self {
|
||||
command: value.command,
|
||||
timeout_ms: value
|
||||
.timeout_ms
|
||||
.map(|timeout| i64::try_from(timeout).unwrap_or(60_000)),
|
||||
cwd: value.cwd,
|
||||
sandbox_policy: value.sandbox_policy.map(std::convert::Into::into),
|
||||
}
|
||||
}
|
||||
}
|
||||
8
codex-rs/app-server-protocol/src/protocol/mod.rs
Normal file
8
codex-rs/app-server-protocol/src/protocol/mod.rs
Normal file
@@ -0,0 +1,8 @@
|
||||
// Module declarations for the app-server protocol namespace.
|
||||
// Exposes protocol pieces used by `lib.rs` via `pub use protocol::common::*;`.
|
||||
|
||||
pub mod common;
|
||||
mod mappers;
|
||||
pub mod thread_history;
|
||||
pub mod v1;
|
||||
pub mod v2;
|
||||
413
codex-rs/app-server-protocol/src/protocol/thread_history.rs
Normal file
413
codex-rs/app-server-protocol/src/protocol/thread_history.rs
Normal file
@@ -0,0 +1,413 @@
|
||||
use crate::protocol::v2::ThreadItem;
|
||||
use crate::protocol::v2::Turn;
|
||||
use crate::protocol::v2::TurnError;
|
||||
use crate::protocol::v2::TurnStatus;
|
||||
use crate::protocol::v2::UserInput;
|
||||
use codex_protocol::protocol::AgentReasoningEvent;
|
||||
use codex_protocol::protocol::AgentReasoningRawContentEvent;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use codex_protocol::protocol::TurnAbortedEvent;
|
||||
use codex_protocol::protocol::UserMessageEvent;
|
||||
|
||||
/// Convert persisted [`EventMsg`] entries into a sequence of [`Turn`] values.
|
||||
///
|
||||
/// The purpose of this is to convert the EventMsgs persisted in a rollout file
|
||||
/// into a sequence of Turns and ThreadItems, which allows the client to render
|
||||
/// the historical messages when resuming a thread.
|
||||
pub fn build_turns_from_event_msgs(events: &[EventMsg]) -> Vec<Turn> {
|
||||
let mut builder = ThreadHistoryBuilder::new();
|
||||
for event in events {
|
||||
builder.handle_event(event);
|
||||
}
|
||||
builder.finish()
|
||||
}
|
||||
|
||||
struct ThreadHistoryBuilder {
|
||||
turns: Vec<Turn>,
|
||||
current_turn: Option<PendingTurn>,
|
||||
next_turn_index: i64,
|
||||
next_item_index: i64,
|
||||
}
|
||||
|
||||
impl ThreadHistoryBuilder {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
turns: Vec::new(),
|
||||
current_turn: None,
|
||||
next_turn_index: 1,
|
||||
next_item_index: 1,
|
||||
}
|
||||
}
|
||||
|
||||
fn finish(mut self) -> Vec<Turn> {
|
||||
self.finish_current_turn();
|
||||
self.turns
|
||||
}
|
||||
|
||||
/// This function should handle all EventMsg variants that can be persisted in a rollout file.
|
||||
/// See `should_persist_event_msg` in `codex-rs/core/rollout/policy.rs`.
|
||||
fn handle_event(&mut self, event: &EventMsg) {
|
||||
match event {
|
||||
EventMsg::UserMessage(payload) => self.handle_user_message(payload),
|
||||
EventMsg::AgentMessage(payload) => self.handle_agent_message(payload.message.clone()),
|
||||
EventMsg::AgentReasoning(payload) => self.handle_agent_reasoning(payload),
|
||||
EventMsg::AgentReasoningRawContent(payload) => {
|
||||
self.handle_agent_reasoning_raw_content(payload)
|
||||
}
|
||||
EventMsg::TokenCount(_) => {}
|
||||
EventMsg::EnteredReviewMode(_) => {}
|
||||
EventMsg::ExitedReviewMode(_) => {}
|
||||
EventMsg::UndoCompleted(_) => {}
|
||||
EventMsg::TurnAborted(payload) => self.handle_turn_aborted(payload),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_user_message(&mut self, payload: &UserMessageEvent) {
|
||||
self.finish_current_turn();
|
||||
let mut turn = self.new_turn();
|
||||
let id = self.next_item_id();
|
||||
let content = self.build_user_inputs(payload);
|
||||
turn.items.push(ThreadItem::UserMessage { id, content });
|
||||
self.current_turn = Some(turn);
|
||||
}
|
||||
|
||||
fn handle_agent_message(&mut self, text: String) {
|
||||
if text.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let id = self.next_item_id();
|
||||
self.ensure_turn()
|
||||
.items
|
||||
.push(ThreadItem::AgentMessage { id, text });
|
||||
}
|
||||
|
||||
fn handle_agent_reasoning(&mut self, payload: &AgentReasoningEvent) {
|
||||
if payload.text.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
// If the last item is a reasoning item, add the new text to the summary.
|
||||
if let Some(ThreadItem::Reasoning { summary, .. }) = self.ensure_turn().items.last_mut() {
|
||||
summary.push(payload.text.clone());
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise, create a new reasoning item.
|
||||
let id = self.next_item_id();
|
||||
self.ensure_turn().items.push(ThreadItem::Reasoning {
|
||||
id,
|
||||
summary: vec![payload.text.clone()],
|
||||
content: Vec::new(),
|
||||
});
|
||||
}
|
||||
|
||||
fn handle_agent_reasoning_raw_content(&mut self, payload: &AgentReasoningRawContentEvent) {
|
||||
if payload.text.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
// If the last item is a reasoning item, add the new text to the content.
|
||||
if let Some(ThreadItem::Reasoning { content, .. }) = self.ensure_turn().items.last_mut() {
|
||||
content.push(payload.text.clone());
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise, create a new reasoning item.
|
||||
let id = self.next_item_id();
|
||||
self.ensure_turn().items.push(ThreadItem::Reasoning {
|
||||
id,
|
||||
summary: Vec::new(),
|
||||
content: vec![payload.text.clone()],
|
||||
});
|
||||
}
|
||||
|
||||
fn handle_turn_aborted(&mut self, _payload: &TurnAbortedEvent) {
|
||||
let Some(turn) = self.current_turn.as_mut() else {
|
||||
return;
|
||||
};
|
||||
turn.status = TurnStatus::Interrupted;
|
||||
}
|
||||
|
||||
fn finish_current_turn(&mut self) {
|
||||
if let Some(turn) = self.current_turn.take() {
|
||||
if turn.items.is_empty() {
|
||||
return;
|
||||
}
|
||||
self.turns.push(turn.into());
|
||||
}
|
||||
}
|
||||
|
||||
fn new_turn(&mut self) -> PendingTurn {
|
||||
PendingTurn {
|
||||
id: self.next_turn_id(),
|
||||
items: Vec::new(),
|
||||
error: None,
|
||||
status: TurnStatus::Completed,
|
||||
}
|
||||
}
|
||||
|
||||
fn ensure_turn(&mut self) -> &mut PendingTurn {
|
||||
if self.current_turn.is_none() {
|
||||
let turn = self.new_turn();
|
||||
return self.current_turn.insert(turn);
|
||||
}
|
||||
|
||||
if let Some(turn) = self.current_turn.as_mut() {
|
||||
return turn;
|
||||
}
|
||||
|
||||
unreachable!("current turn must exist after initialization");
|
||||
}
|
||||
|
||||
fn next_turn_id(&mut self) -> String {
|
||||
let id = format!("turn-{}", self.next_turn_index);
|
||||
self.next_turn_index += 1;
|
||||
id
|
||||
}
|
||||
|
||||
fn next_item_id(&mut self) -> String {
|
||||
let id = format!("item-{}", self.next_item_index);
|
||||
self.next_item_index += 1;
|
||||
id
|
||||
}
|
||||
|
||||
fn build_user_inputs(&self, payload: &UserMessageEvent) -> Vec<UserInput> {
|
||||
let mut content = Vec::new();
|
||||
if !payload.message.trim().is_empty() {
|
||||
content.push(UserInput::Text {
|
||||
text: payload.message.clone(),
|
||||
});
|
||||
}
|
||||
if let Some(images) = &payload.images {
|
||||
for image in images {
|
||||
content.push(UserInput::Image { url: image.clone() });
|
||||
}
|
||||
}
|
||||
content
|
||||
}
|
||||
}
|
||||
|
||||
struct PendingTurn {
|
||||
id: String,
|
||||
items: Vec<ThreadItem>,
|
||||
error: Option<TurnError>,
|
||||
status: TurnStatus,
|
||||
}
|
||||
|
||||
impl From<PendingTurn> for Turn {
|
||||
fn from(value: PendingTurn) -> Self {
|
||||
Self {
|
||||
id: value.id,
|
||||
items: value.items,
|
||||
error: value.error,
|
||||
status: value.status,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use codex_protocol::protocol::AgentMessageEvent;
|
||||
use codex_protocol::protocol::AgentReasoningEvent;
|
||||
use codex_protocol::protocol::AgentReasoningRawContentEvent;
|
||||
use codex_protocol::protocol::TurnAbortReason;
|
||||
use codex_protocol::protocol::TurnAbortedEvent;
|
||||
use codex_protocol::protocol::UserMessageEvent;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn builds_multiple_turns_with_reasoning_items() {
|
||||
let events = vec![
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "First turn".into(),
|
||||
images: Some(vec!["https://example.com/one.png".into()]),
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "Hi there".into(),
|
||||
}),
|
||||
EventMsg::AgentReasoning(AgentReasoningEvent {
|
||||
text: "thinking".into(),
|
||||
}),
|
||||
EventMsg::AgentReasoningRawContent(AgentReasoningRawContentEvent {
|
||||
text: "full reasoning".into(),
|
||||
}),
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "Second turn".into(),
|
||||
images: None,
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "Reply two".into(),
|
||||
}),
|
||||
];
|
||||
|
||||
let turns = build_turns_from_event_msgs(&events);
|
||||
assert_eq!(turns.len(), 2);
|
||||
|
||||
let first = &turns[0];
|
||||
assert_eq!(first.id, "turn-1");
|
||||
assert_eq!(first.status, TurnStatus::Completed);
|
||||
assert_eq!(first.items.len(), 3);
|
||||
assert_eq!(
|
||||
first.items[0],
|
||||
ThreadItem::UserMessage {
|
||||
id: "item-1".into(),
|
||||
content: vec![
|
||||
UserInput::Text {
|
||||
text: "First turn".into(),
|
||||
},
|
||||
UserInput::Image {
|
||||
url: "https://example.com/one.png".into(),
|
||||
}
|
||||
],
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
first.items[1],
|
||||
ThreadItem::AgentMessage {
|
||||
id: "item-2".into(),
|
||||
text: "Hi there".into(),
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
first.items[2],
|
||||
ThreadItem::Reasoning {
|
||||
id: "item-3".into(),
|
||||
summary: vec!["thinking".into()],
|
||||
content: vec!["full reasoning".into()],
|
||||
}
|
||||
);
|
||||
|
||||
let second = &turns[1];
|
||||
assert_eq!(second.id, "turn-2");
|
||||
assert_eq!(second.items.len(), 2);
|
||||
assert_eq!(
|
||||
second.items[0],
|
||||
ThreadItem::UserMessage {
|
||||
id: "item-4".into(),
|
||||
content: vec![UserInput::Text {
|
||||
text: "Second turn".into()
|
||||
}],
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
second.items[1],
|
||||
ThreadItem::AgentMessage {
|
||||
id: "item-5".into(),
|
||||
text: "Reply two".into(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn splits_reasoning_when_interleaved() {
|
||||
let events = vec![
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "Turn start".into(),
|
||||
images: None,
|
||||
}),
|
||||
EventMsg::AgentReasoning(AgentReasoningEvent {
|
||||
text: "first summary".into(),
|
||||
}),
|
||||
EventMsg::AgentReasoningRawContent(AgentReasoningRawContentEvent {
|
||||
text: "first content".into(),
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "interlude".into(),
|
||||
}),
|
||||
EventMsg::AgentReasoning(AgentReasoningEvent {
|
||||
text: "second summary".into(),
|
||||
}),
|
||||
];
|
||||
|
||||
let turns = build_turns_from_event_msgs(&events);
|
||||
assert_eq!(turns.len(), 1);
|
||||
let turn = &turns[0];
|
||||
assert_eq!(turn.items.len(), 4);
|
||||
|
||||
assert_eq!(
|
||||
turn.items[1],
|
||||
ThreadItem::Reasoning {
|
||||
id: "item-2".into(),
|
||||
summary: vec!["first summary".into()],
|
||||
content: vec!["first content".into()],
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
turn.items[3],
|
||||
ThreadItem::Reasoning {
|
||||
id: "item-4".into(),
|
||||
summary: vec!["second summary".into()],
|
||||
content: Vec::new(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn marks_turn_as_interrupted_when_aborted() {
|
||||
let events = vec![
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "Please do the thing".into(),
|
||||
images: None,
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "Working...".into(),
|
||||
}),
|
||||
EventMsg::TurnAborted(TurnAbortedEvent {
|
||||
reason: TurnAbortReason::Replaced,
|
||||
}),
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "Let's try again".into(),
|
||||
images: None,
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "Second attempt complete.".into(),
|
||||
}),
|
||||
];
|
||||
|
||||
let turns = build_turns_from_event_msgs(&events);
|
||||
assert_eq!(turns.len(), 2);
|
||||
|
||||
let first_turn = &turns[0];
|
||||
assert_eq!(first_turn.status, TurnStatus::Interrupted);
|
||||
assert_eq!(first_turn.items.len(), 2);
|
||||
assert_eq!(
|
||||
first_turn.items[0],
|
||||
ThreadItem::UserMessage {
|
||||
id: "item-1".into(),
|
||||
content: vec![UserInput::Text {
|
||||
text: "Please do the thing".into()
|
||||
}],
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
first_turn.items[1],
|
||||
ThreadItem::AgentMessage {
|
||||
id: "item-2".into(),
|
||||
text: "Working...".into(),
|
||||
}
|
||||
);
|
||||
|
||||
let second_turn = &turns[1];
|
||||
assert_eq!(second_turn.status, TurnStatus::Completed);
|
||||
assert_eq!(second_turn.items.len(), 2);
|
||||
assert_eq!(
|
||||
second_turn.items[0],
|
||||
ThreadItem::UserMessage {
|
||||
id: "item-3".into(),
|
||||
content: vec![UserInput::Text {
|
||||
text: "Let's try again".into()
|
||||
}],
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
second_turn.items[1],
|
||||
ThreadItem::AgentMessage {
|
||||
id: "item-4".into(),
|
||||
text: "Second attempt complete.".into(),
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
461
codex-rs/app-server-protocol/src/protocol/v1.rs
Normal file
461
codex-rs/app-server-protocol/src/protocol/v1.rs
Normal file
@@ -0,0 +1,461 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::config_types::ForcedLoginMethod;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use codex_protocol::config_types::SandboxMode;
|
||||
use codex_protocol::config_types::Verbosity;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::openai_models::ReasoningEffort;
|
||||
use codex_protocol::parse_command::ParsedCommand;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use codex_protocol::protocol::FileChange;
|
||||
use codex_protocol::protocol::ReviewDecision;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use codex_protocol::protocol::TurnAbortReason;
|
||||
use codex_utils_absolute_path::AbsolutePathBuf;
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use ts_rs::TS;
|
||||
use uuid::Uuid;
|
||||
|
||||
// Reuse shared types defined in `common.rs`.
|
||||
use crate::protocol::common::AuthMode;
|
||||
use crate::protocol::common::GitSha;
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InitializeParams {
|
||||
pub client_info: ClientInfo,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ClientInfo {
|
||||
pub name: String,
|
||||
pub title: Option<String>,
|
||||
pub version: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InitializeResponse {
|
||||
pub user_agent: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct NewConversationParams {
|
||||
pub model: Option<String>,
|
||||
pub model_provider: Option<String>,
|
||||
pub profile: Option<String>,
|
||||
pub cwd: Option<String>,
|
||||
pub approval_policy: Option<AskForApproval>,
|
||||
pub sandbox: Option<SandboxMode>,
|
||||
pub config: Option<HashMap<String, serde_json::Value>>,
|
||||
pub base_instructions: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub developer_instructions: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub compact_prompt: Option<String>,
|
||||
pub include_apply_patch_tool: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct NewConversationResponse {
|
||||
pub conversation_id: ConversationId,
|
||||
pub model: String,
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
pub rollout_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ResumeConversationResponse {
|
||||
pub conversation_id: ConversationId,
|
||||
pub model: String,
|
||||
pub initial_messages: Option<Vec<EventMsg>>,
|
||||
pub rollout_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(untagged)]
|
||||
pub enum GetConversationSummaryParams {
|
||||
RolloutPath {
|
||||
#[serde(rename = "rolloutPath")]
|
||||
rollout_path: PathBuf,
|
||||
},
|
||||
ConversationId {
|
||||
#[serde(rename = "conversationId")]
|
||||
conversation_id: ConversationId,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetConversationSummaryResponse {
|
||||
pub summary: ConversationSummary,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ListConversationsParams {
|
||||
pub page_size: Option<usize>,
|
||||
pub cursor: Option<String>,
|
||||
pub model_providers: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ConversationSummary {
|
||||
pub conversation_id: ConversationId,
|
||||
pub path: PathBuf,
|
||||
pub preview: String,
|
||||
pub timestamp: Option<String>,
|
||||
pub model_provider: String,
|
||||
pub cwd: PathBuf,
|
||||
pub cli_version: String,
|
||||
pub source: SessionSource,
|
||||
pub git_info: Option<ConversationGitInfo>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub struct ConversationGitInfo {
|
||||
pub sha: Option<String>,
|
||||
pub branch: Option<String>,
|
||||
pub origin_url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ListConversationsResponse {
|
||||
pub items: Vec<ConversationSummary>,
|
||||
pub next_cursor: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ResumeConversationParams {
|
||||
pub path: Option<PathBuf>,
|
||||
pub conversation_id: Option<ConversationId>,
|
||||
pub history: Option<Vec<ResponseItem>>,
|
||||
pub overrides: Option<NewConversationParams>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct AddConversationSubscriptionResponse {
|
||||
#[schemars(with = "String")]
|
||||
pub subscription_id: Uuid,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ArchiveConversationParams {
|
||||
pub conversation_id: ConversationId,
|
||||
pub rollout_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ArchiveConversationResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct RemoveConversationSubscriptionResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LoginApiKeyParams {
|
||||
pub api_key: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LoginApiKeyResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LoginChatGptResponse {
|
||||
#[schemars(with = "String")]
|
||||
pub login_id: Uuid,
|
||||
pub auth_url: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GitDiffToRemoteResponse {
|
||||
pub sha: GitSha,
|
||||
pub diff: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ApplyPatchApprovalParams {
|
||||
pub conversation_id: ConversationId,
|
||||
/// Use to correlate this with [codex_core::protocol::PatchApplyBeginEvent]
|
||||
/// and [codex_core::protocol::PatchApplyEndEvent].
|
||||
pub call_id: String,
|
||||
pub file_changes: HashMap<PathBuf, FileChange>,
|
||||
/// Optional explanatory reason (e.g. request for extra write access).
|
||||
pub reason: Option<String>,
|
||||
/// When set, the agent is asking the user to allow writes under this root
|
||||
/// for the remainder of the session (unclear if this is honored today).
|
||||
pub grant_root: Option<PathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ApplyPatchApprovalResponse {
|
||||
pub decision: ReviewDecision,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ExecCommandApprovalParams {
|
||||
pub conversation_id: ConversationId,
|
||||
/// Use to correlate this with [codex_core::protocol::ExecCommandBeginEvent]
|
||||
/// and [codex_core::protocol::ExecCommandEndEvent].
|
||||
pub call_id: String,
|
||||
pub command: Vec<String>,
|
||||
pub cwd: PathBuf,
|
||||
pub reason: Option<String>,
|
||||
pub parsed_cmd: Vec<ParsedCommand>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
pub struct ExecCommandApprovalResponse {
|
||||
pub decision: ReviewDecision,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CancelLoginChatGptParams {
|
||||
#[schemars(with = "String")]
|
||||
pub login_id: Uuid,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GitDiffToRemoteParams {
|
||||
pub cwd: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CancelLoginChatGptResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LogoutChatGptParams {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LogoutChatGptResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetAuthStatusParams {
|
||||
pub include_token: Option<bool>,
|
||||
pub refresh_token: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ExecOneOffCommandParams {
|
||||
pub command: Vec<String>,
|
||||
pub timeout_ms: Option<u64>,
|
||||
pub cwd: Option<PathBuf>,
|
||||
pub sandbox_policy: Option<SandboxPolicy>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ExecOneOffCommandResponse {
|
||||
pub exit_code: i32,
|
||||
pub stdout: String,
|
||||
pub stderr: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetAuthStatusResponse {
|
||||
pub auth_method: Option<AuthMode>,
|
||||
pub auth_token: Option<String>,
|
||||
pub requires_openai_auth: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetUserAgentResponse {
|
||||
pub user_agent: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct UserInfoResponse {
|
||||
pub alleged_user_email: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GetUserSavedConfigResponse {
|
||||
pub config: UserSavedConfig,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SetDefaultModelParams {
|
||||
pub model: Option<String>,
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SetDefaultModelResponse {}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct UserSavedConfig {
|
||||
pub approval_policy: Option<AskForApproval>,
|
||||
pub sandbox_mode: Option<SandboxMode>,
|
||||
pub sandbox_settings: Option<SandboxSettings>,
|
||||
pub forced_chatgpt_workspace_id: Option<String>,
|
||||
pub forced_login_method: Option<ForcedLoginMethod>,
|
||||
pub model: Option<String>,
|
||||
pub model_reasoning_effort: Option<ReasoningEffort>,
|
||||
pub model_reasoning_summary: Option<ReasoningSummary>,
|
||||
pub model_verbosity: Option<Verbosity>,
|
||||
pub tools: Option<Tools>,
|
||||
pub profile: Option<String>,
|
||||
pub profiles: HashMap<String, Profile>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Profile {
|
||||
pub model: Option<String>,
|
||||
pub model_provider: Option<String>,
|
||||
pub approval_policy: Option<AskForApproval>,
|
||||
pub model_reasoning_effort: Option<ReasoningEffort>,
|
||||
pub model_reasoning_summary: Option<ReasoningSummary>,
|
||||
pub model_verbosity: Option<Verbosity>,
|
||||
pub chatgpt_base_url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Tools {
|
||||
pub web_search: Option<bool>,
|
||||
pub view_image: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SandboxSettings {
|
||||
#[serde(default)]
|
||||
pub writable_roots: Vec<AbsolutePathBuf>,
|
||||
pub network_access: Option<bool>,
|
||||
pub exclude_tmpdir_env_var: Option<bool>,
|
||||
pub exclude_slash_tmp: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SendUserMessageParams {
|
||||
pub conversation_id: ConversationId,
|
||||
pub items: Vec<InputItem>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SendUserTurnParams {
|
||||
pub conversation_id: ConversationId,
|
||||
pub items: Vec<InputItem>,
|
||||
pub cwd: PathBuf,
|
||||
pub approval_policy: AskForApproval,
|
||||
pub sandbox_policy: SandboxPolicy,
|
||||
pub model: String,
|
||||
pub effort: Option<ReasoningEffort>,
|
||||
pub summary: ReasoningSummary,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SendUserTurnResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InterruptConversationParams {
|
||||
pub conversation_id: ConversationId,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InterruptConversationResponse {
|
||||
pub abort_reason: TurnAbortReason,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SendUserMessageResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct AddConversationListenerParams {
|
||||
pub conversation_id: ConversationId,
|
||||
#[serde(default)]
|
||||
pub experimental_raw_events: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct RemoveConversationListenerParams {
|
||||
#[schemars(with = "String")]
|
||||
pub subscription_id: Uuid,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[serde(tag = "type", content = "data")]
|
||||
pub enum InputItem {
|
||||
Text { text: String },
|
||||
Image { image_url: String },
|
||||
LocalImage { path: PathBuf },
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
/// Deprecated in favor of AccountLoginCompletedNotification.
|
||||
pub struct LoginChatGptCompleteNotification {
|
||||
#[schemars(with = "String")]
|
||||
pub login_id: Uuid,
|
||||
pub success: bool,
|
||||
pub error: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SessionConfiguredNotification {
|
||||
pub session_id: ConversationId,
|
||||
pub model: String,
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
pub history_log_id: u64,
|
||||
#[ts(type = "number")]
|
||||
pub history_entry_count: usize,
|
||||
pub initial_messages: Option<Vec<EventMsg>>,
|
||||
pub rollout_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
/// Deprecated notification. Use AccountUpdatedNotification instead.
|
||||
pub struct AuthStatusChangeNotification {
|
||||
pub auth_method: Option<AuthMode>,
|
||||
}
|
||||
2100
codex-rs/app-server-protocol/src/protocol/v2.rs
Normal file
2100
codex-rs/app-server-protocol/src/protocol/v2.rs
Normal file
File diff suppressed because it is too large
Load Diff
1298
codex-rs/app-server-test-client/Cargo.lock
generated
Normal file
1298
codex-rs/app-server-test-client/Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
17
codex-rs/app-server-test-client/Cargo.toml
Normal file
17
codex-rs/app-server-test-client/Cargo.toml
Normal file
@@ -0,0 +1,17 @@
|
||||
[package]
|
||||
name = "codex-app-server-test-client"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = { workspace = true }
|
||||
clap = { workspace = true, features = ["derive", "env"] }
|
||||
codex-app-server-protocol = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
uuid = { workspace = true, features = ["v4"] }
|
||||
2
codex-rs/app-server-test-client/README.md
Normal file
2
codex-rs/app-server-test-client/README.md
Normal file
@@ -0,0 +1,2 @@
|
||||
# App Server Test Client
|
||||
Exercises simple `codex app-server` flows end-to-end, logging JSON-RPC messages sent between client and server to stdout.
|
||||
864
codex-rs/app-server-test-client/src/main.rs
Normal file
864
codex-rs/app-server-test-client/src/main.rs
Normal file
@@ -0,0 +1,864 @@
|
||||
use std::collections::VecDeque;
|
||||
use std::io::BufRead;
|
||||
use std::io::BufReader;
|
||||
use std::io::Write;
|
||||
use std::process::Child;
|
||||
use std::process::ChildStdin;
|
||||
use std::process::ChildStdout;
|
||||
use std::process::Command;
|
||||
use std::process::Stdio;
|
||||
use std::thread;
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use anyhow::bail;
|
||||
use clap::Parser;
|
||||
use clap::Subcommand;
|
||||
use codex_app_server_protocol::AddConversationListenerParams;
|
||||
use codex_app_server_protocol::AddConversationSubscriptionResponse;
|
||||
use codex_app_server_protocol::ApprovalDecision;
|
||||
use codex_app_server_protocol::AskForApproval;
|
||||
use codex_app_server_protocol::ClientInfo;
|
||||
use codex_app_server_protocol::ClientRequest;
|
||||
use codex_app_server_protocol::CommandExecutionRequestApprovalParams;
|
||||
use codex_app_server_protocol::CommandExecutionRequestApprovalResponse;
|
||||
use codex_app_server_protocol::FileChangeRequestApprovalParams;
|
||||
use codex_app_server_protocol::FileChangeRequestApprovalResponse;
|
||||
use codex_app_server_protocol::GetAccountRateLimitsResponse;
|
||||
use codex_app_server_protocol::InitializeParams;
|
||||
use codex_app_server_protocol::InitializeResponse;
|
||||
use codex_app_server_protocol::InputItem;
|
||||
use codex_app_server_protocol::JSONRPCMessage;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCRequest;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::LoginChatGptCompleteNotification;
|
||||
use codex_app_server_protocol::LoginChatGptResponse;
|
||||
use codex_app_server_protocol::NewConversationParams;
|
||||
use codex_app_server_protocol::NewConversationResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::SandboxPolicy;
|
||||
use codex_app_server_protocol::SendUserMessageParams;
|
||||
use codex_app_server_protocol::SendUserMessageResponse;
|
||||
use codex_app_server_protocol::ServerNotification;
|
||||
use codex_app_server_protocol::ServerRequest;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_app_server_protocol::TurnStartParams;
|
||||
use codex_app_server_protocol::TurnStartResponse;
|
||||
use codex_app_server_protocol::TurnStatus;
|
||||
use codex_app_server_protocol::UserInput as V2UserInput;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::protocol::Event;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use serde::Serialize;
|
||||
use serde::de::DeserializeOwned;
|
||||
use serde_json::Value;
|
||||
use uuid::Uuid;
|
||||
|
||||
/// Minimal launcher that initializes the Codex app-server and logs the handshake.
|
||||
#[derive(Parser)]
|
||||
#[command(author = "Codex", version, about = "Bootstrap Codex app-server", long_about = None)]
|
||||
struct Cli {
|
||||
/// Path to the `codex` CLI binary.
|
||||
#[arg(long, env = "CODEX_BIN", default_value = "codex")]
|
||||
codex_bin: String,
|
||||
|
||||
#[command(subcommand)]
|
||||
command: CliCommand,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
enum CliCommand {
|
||||
/// Send a user message through the Codex app-server.
|
||||
SendMessage {
|
||||
/// User message to send to Codex.
|
||||
#[arg()]
|
||||
user_message: String,
|
||||
},
|
||||
/// Send a user message through the app-server V2 thread/turn APIs.
|
||||
SendMessageV2 {
|
||||
/// User message to send to Codex.
|
||||
#[arg()]
|
||||
user_message: String,
|
||||
},
|
||||
/// Start a V2 turn that elicits an ExecCommand approval.
|
||||
#[command(name = "trigger-cmd-approval")]
|
||||
TriggerCmdApproval {
|
||||
/// Optional prompt; defaults to a simple python command.
|
||||
#[arg()]
|
||||
user_message: Option<String>,
|
||||
},
|
||||
/// Start a V2 turn that elicits an ApplyPatch approval.
|
||||
#[command(name = "trigger-patch-approval")]
|
||||
TriggerPatchApproval {
|
||||
/// Optional prompt; defaults to creating a file via apply_patch.
|
||||
#[arg()]
|
||||
user_message: Option<String>,
|
||||
},
|
||||
/// Start a V2 turn that should not elicit an ExecCommand approval.
|
||||
#[command(name = "no-trigger-cmd-approval")]
|
||||
NoTriggerCmdApproval,
|
||||
/// Send two sequential V2 turns in the same thread to test follow-up behavior.
|
||||
SendFollowUpV2 {
|
||||
/// Initial user message for the first turn.
|
||||
#[arg()]
|
||||
first_message: String,
|
||||
/// Follow-up user message for the second turn.
|
||||
#[arg()]
|
||||
follow_up_message: String,
|
||||
},
|
||||
/// Trigger the ChatGPT login flow and wait for completion.
|
||||
TestLogin,
|
||||
/// Fetch the current account rate limits from the Codex app-server.
|
||||
GetAccountRateLimits,
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
let Cli { codex_bin, command } = Cli::parse();
|
||||
|
||||
match command {
|
||||
CliCommand::SendMessage { user_message } => send_message(codex_bin, user_message),
|
||||
CliCommand::SendMessageV2 { user_message } => send_message_v2(codex_bin, user_message),
|
||||
CliCommand::TriggerCmdApproval { user_message } => {
|
||||
trigger_cmd_approval(codex_bin, user_message)
|
||||
}
|
||||
CliCommand::TriggerPatchApproval { user_message } => {
|
||||
trigger_patch_approval(codex_bin, user_message)
|
||||
}
|
||||
CliCommand::NoTriggerCmdApproval => no_trigger_cmd_approval(codex_bin),
|
||||
CliCommand::SendFollowUpV2 {
|
||||
first_message,
|
||||
follow_up_message,
|
||||
} => send_follow_up_v2(codex_bin, first_message, follow_up_message),
|
||||
CliCommand::TestLogin => test_login(codex_bin),
|
||||
CliCommand::GetAccountRateLimits => get_account_rate_limits(codex_bin),
|
||||
}
|
||||
}
|
||||
|
||||
fn send_message(codex_bin: String, user_message: String) -> Result<()> {
|
||||
let mut client = CodexClient::spawn(codex_bin)?;
|
||||
|
||||
let initialize = client.initialize()?;
|
||||
println!("< initialize response: {initialize:?}");
|
||||
|
||||
let conversation = client.new_conversation()?;
|
||||
println!("< newConversation response: {conversation:?}");
|
||||
|
||||
let subscription = client.add_conversation_listener(&conversation.conversation_id)?;
|
||||
println!("< addConversationListener response: {subscription:?}");
|
||||
|
||||
let send_response = client.send_user_message(&conversation.conversation_id, &user_message)?;
|
||||
println!("< sendUserMessage response: {send_response:?}");
|
||||
|
||||
client.stream_conversation(&conversation.conversation_id)?;
|
||||
|
||||
client.remove_conversation_listener(subscription.subscription_id)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn send_message_v2(codex_bin: String, user_message: String) -> Result<()> {
|
||||
send_message_v2_with_policies(codex_bin, user_message, None, None)
|
||||
}
|
||||
|
||||
fn trigger_cmd_approval(codex_bin: String, user_message: Option<String>) -> Result<()> {
|
||||
let default_prompt =
|
||||
"Run `touch /tmp/should-trigger-approval` so I can confirm the file exists.";
|
||||
let message = user_message.unwrap_or_else(|| default_prompt.to_string());
|
||||
send_message_v2_with_policies(
|
||||
codex_bin,
|
||||
message,
|
||||
Some(AskForApproval::OnRequest),
|
||||
Some(SandboxPolicy::ReadOnly),
|
||||
)
|
||||
}
|
||||
|
||||
fn trigger_patch_approval(codex_bin: String, user_message: Option<String>) -> Result<()> {
|
||||
let default_prompt =
|
||||
"Create a file named APPROVAL_DEMO.txt containing a short hello message using apply_patch.";
|
||||
let message = user_message.unwrap_or_else(|| default_prompt.to_string());
|
||||
send_message_v2_with_policies(
|
||||
codex_bin,
|
||||
message,
|
||||
Some(AskForApproval::OnRequest),
|
||||
Some(SandboxPolicy::ReadOnly),
|
||||
)
|
||||
}
|
||||
|
||||
fn no_trigger_cmd_approval(codex_bin: String) -> Result<()> {
|
||||
let prompt = "Run `touch should_not_trigger_approval.txt`";
|
||||
send_message_v2_with_policies(codex_bin, prompt.to_string(), None, None)
|
||||
}
|
||||
|
||||
fn send_message_v2_with_policies(
|
||||
codex_bin: String,
|
||||
user_message: String,
|
||||
approval_policy: Option<AskForApproval>,
|
||||
sandbox_policy: Option<SandboxPolicy>,
|
||||
) -> Result<()> {
|
||||
let mut client = CodexClient::spawn(codex_bin)?;
|
||||
|
||||
let initialize = client.initialize()?;
|
||||
println!("< initialize response: {initialize:?}");
|
||||
|
||||
let thread_response = client.thread_start(ThreadStartParams::default())?;
|
||||
println!("< thread/start response: {thread_response:?}");
|
||||
let mut turn_params = TurnStartParams {
|
||||
thread_id: thread_response.thread.id.clone(),
|
||||
input: vec![V2UserInput::Text { text: user_message }],
|
||||
..Default::default()
|
||||
};
|
||||
turn_params.approval_policy = approval_policy;
|
||||
turn_params.sandbox_policy = sandbox_policy;
|
||||
|
||||
let turn_response = client.turn_start(turn_params)?;
|
||||
println!("< turn/start response: {turn_response:?}");
|
||||
|
||||
client.stream_turn(&thread_response.thread.id, &turn_response.turn.id)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn send_follow_up_v2(
|
||||
codex_bin: String,
|
||||
first_message: String,
|
||||
follow_up_message: String,
|
||||
) -> Result<()> {
|
||||
let mut client = CodexClient::spawn(codex_bin)?;
|
||||
|
||||
let initialize = client.initialize()?;
|
||||
println!("< initialize response: {initialize:?}");
|
||||
|
||||
let thread_response = client.thread_start(ThreadStartParams::default())?;
|
||||
println!("< thread/start response: {thread_response:?}");
|
||||
|
||||
let first_turn_params = TurnStartParams {
|
||||
thread_id: thread_response.thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: first_message,
|
||||
}],
|
||||
..Default::default()
|
||||
};
|
||||
let first_turn_response = client.turn_start(first_turn_params)?;
|
||||
println!("< turn/start response (initial): {first_turn_response:?}");
|
||||
client.stream_turn(&thread_response.thread.id, &first_turn_response.turn.id)?;
|
||||
|
||||
let follow_up_params = TurnStartParams {
|
||||
thread_id: thread_response.thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: follow_up_message,
|
||||
}],
|
||||
..Default::default()
|
||||
};
|
||||
let follow_up_response = client.turn_start(follow_up_params)?;
|
||||
println!("< turn/start response (follow-up): {follow_up_response:?}");
|
||||
client.stream_turn(&thread_response.thread.id, &follow_up_response.turn.id)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn test_login(codex_bin: String) -> Result<()> {
|
||||
let mut client = CodexClient::spawn(codex_bin)?;
|
||||
|
||||
let initialize = client.initialize()?;
|
||||
println!("< initialize response: {initialize:?}");
|
||||
|
||||
let login_response = client.login_chat_gpt()?;
|
||||
println!("< loginChatGpt response: {login_response:?}");
|
||||
println!(
|
||||
"Open the following URL in your browser to continue:\n{}",
|
||||
login_response.auth_url
|
||||
);
|
||||
|
||||
let completion = client.wait_for_login_completion(&login_response.login_id)?;
|
||||
println!("< loginChatGptComplete notification: {completion:?}");
|
||||
|
||||
if completion.success {
|
||||
println!("Login succeeded.");
|
||||
Ok(())
|
||||
} else {
|
||||
bail!(
|
||||
"login failed: {}",
|
||||
completion
|
||||
.error
|
||||
.as_deref()
|
||||
.unwrap_or("unknown error from loginChatGptComplete")
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn get_account_rate_limits(codex_bin: String) -> Result<()> {
|
||||
let mut client = CodexClient::spawn(codex_bin)?;
|
||||
|
||||
let initialize = client.initialize()?;
|
||||
println!("< initialize response: {initialize:?}");
|
||||
|
||||
let response = client.get_account_rate_limits()?;
|
||||
println!("< account/rateLimits/read response: {response:?}");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct CodexClient {
|
||||
child: Child,
|
||||
stdin: Option<ChildStdin>,
|
||||
stdout: BufReader<ChildStdout>,
|
||||
pending_notifications: VecDeque<JSONRPCNotification>,
|
||||
}
|
||||
|
||||
impl CodexClient {
|
||||
fn spawn(codex_bin: String) -> Result<Self> {
|
||||
let mut codex_app_server = Command::new(&codex_bin)
|
||||
.arg("app-server")
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::inherit())
|
||||
.spawn()
|
||||
.with_context(|| format!("failed to start `{codex_bin}` app-server"))?;
|
||||
|
||||
let stdin = codex_app_server
|
||||
.stdin
|
||||
.take()
|
||||
.context("codex app-server stdin unavailable")?;
|
||||
let stdout = codex_app_server
|
||||
.stdout
|
||||
.take()
|
||||
.context("codex app-server stdout unavailable")?;
|
||||
|
||||
Ok(Self {
|
||||
child: codex_app_server,
|
||||
stdin: Some(stdin),
|
||||
stdout: BufReader::new(stdout),
|
||||
pending_notifications: VecDeque::new(),
|
||||
})
|
||||
}
|
||||
|
||||
fn initialize(&mut self) -> Result<InitializeResponse> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::Initialize {
|
||||
request_id: request_id.clone(),
|
||||
params: InitializeParams {
|
||||
client_info: ClientInfo {
|
||||
name: "codex-toy-app-server".to_string(),
|
||||
title: Some("Codex Toy App Server".to_string()),
|
||||
version: env!("CARGO_PKG_VERSION").to_string(),
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
self.send_request(request, request_id, "initialize")
|
||||
}
|
||||
|
||||
fn new_conversation(&mut self) -> Result<NewConversationResponse> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::NewConversation {
|
||||
request_id: request_id.clone(),
|
||||
params: NewConversationParams::default(),
|
||||
};
|
||||
|
||||
self.send_request(request, request_id, "newConversation")
|
||||
}
|
||||
|
||||
fn add_conversation_listener(
|
||||
&mut self,
|
||||
conversation_id: &ConversationId,
|
||||
) -> Result<AddConversationSubscriptionResponse> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::AddConversationListener {
|
||||
request_id: request_id.clone(),
|
||||
params: AddConversationListenerParams {
|
||||
conversation_id: *conversation_id,
|
||||
experimental_raw_events: false,
|
||||
},
|
||||
};
|
||||
|
||||
self.send_request(request, request_id, "addConversationListener")
|
||||
}
|
||||
|
||||
fn remove_conversation_listener(&mut self, subscription_id: Uuid) -> Result<()> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::RemoveConversationListener {
|
||||
request_id: request_id.clone(),
|
||||
params: codex_app_server_protocol::RemoveConversationListenerParams { subscription_id },
|
||||
};
|
||||
|
||||
self.send_request::<codex_app_server_protocol::RemoveConversationSubscriptionResponse>(
|
||||
request,
|
||||
request_id,
|
||||
"removeConversationListener",
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn send_user_message(
|
||||
&mut self,
|
||||
conversation_id: &ConversationId,
|
||||
message: &str,
|
||||
) -> Result<SendUserMessageResponse> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::SendUserMessage {
|
||||
request_id: request_id.clone(),
|
||||
params: SendUserMessageParams {
|
||||
conversation_id: *conversation_id,
|
||||
items: vec![InputItem::Text {
|
||||
text: message.to_string(),
|
||||
}],
|
||||
},
|
||||
};
|
||||
|
||||
self.send_request(request, request_id, "sendUserMessage")
|
||||
}
|
||||
|
||||
fn thread_start(&mut self, params: ThreadStartParams) -> Result<ThreadStartResponse> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::ThreadStart {
|
||||
request_id: request_id.clone(),
|
||||
params,
|
||||
};
|
||||
|
||||
self.send_request(request, request_id, "thread/start")
|
||||
}
|
||||
|
||||
fn turn_start(&mut self, params: TurnStartParams) -> Result<TurnStartResponse> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::TurnStart {
|
||||
request_id: request_id.clone(),
|
||||
params,
|
||||
};
|
||||
|
||||
self.send_request(request, request_id, "turn/start")
|
||||
}
|
||||
|
||||
fn login_chat_gpt(&mut self) -> Result<LoginChatGptResponse> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::LoginChatGpt {
|
||||
request_id: request_id.clone(),
|
||||
params: None,
|
||||
};
|
||||
|
||||
self.send_request(request, request_id, "loginChatGpt")
|
||||
}
|
||||
|
||||
fn get_account_rate_limits(&mut self) -> Result<GetAccountRateLimitsResponse> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::GetAccountRateLimits {
|
||||
request_id: request_id.clone(),
|
||||
params: None,
|
||||
};
|
||||
|
||||
self.send_request(request, request_id, "account/rateLimits/read")
|
||||
}
|
||||
|
||||
fn stream_conversation(&mut self, conversation_id: &ConversationId) -> Result<()> {
|
||||
loop {
|
||||
let notification = self.next_notification()?;
|
||||
|
||||
if !notification.method.starts_with("codex/event/") {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(event) = self.extract_event(notification, conversation_id)? {
|
||||
match &event.msg {
|
||||
EventMsg::AgentMessage(event) => {
|
||||
println!("{}", event.message);
|
||||
}
|
||||
EventMsg::AgentMessageDelta(event) => {
|
||||
print!("{}", event.delta);
|
||||
std::io::stdout().flush().ok();
|
||||
}
|
||||
EventMsg::TaskComplete(event) => {
|
||||
println!("\n[task complete: {event:?}]");
|
||||
break;
|
||||
}
|
||||
EventMsg::TurnAborted(event) => {
|
||||
println!("\n[turn aborted: {:?}]", event.reason);
|
||||
break;
|
||||
}
|
||||
EventMsg::Error(event) => {
|
||||
println!("[error] {event:?}");
|
||||
}
|
||||
_ => {
|
||||
println!("[UNKNOWN EVENT] {:?}", event.msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn wait_for_login_completion(
|
||||
&mut self,
|
||||
expected_login_id: &Uuid,
|
||||
) -> Result<LoginChatGptCompleteNotification> {
|
||||
loop {
|
||||
let notification = self.next_notification()?;
|
||||
|
||||
if let Ok(server_notification) = ServerNotification::try_from(notification) {
|
||||
match server_notification {
|
||||
ServerNotification::LoginChatGptComplete(completion) => {
|
||||
if &completion.login_id == expected_login_id {
|
||||
return Ok(completion);
|
||||
}
|
||||
|
||||
println!(
|
||||
"[ignoring loginChatGptComplete for unexpected login_id: {}]",
|
||||
completion.login_id
|
||||
);
|
||||
}
|
||||
ServerNotification::AuthStatusChange(status) => {
|
||||
println!("< authStatusChange notification: {status:?}");
|
||||
}
|
||||
ServerNotification::AccountRateLimitsUpdated(snapshot) => {
|
||||
println!("< accountRateLimitsUpdated notification: {snapshot:?}");
|
||||
}
|
||||
ServerNotification::SessionConfigured(_) => {
|
||||
// SessionConfigured notifications are unrelated to login; skip.
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
// Not a server notification (likely a conversation event); keep waiting.
|
||||
}
|
||||
}
|
||||
|
||||
fn stream_turn(&mut self, thread_id: &str, turn_id: &str) -> Result<()> {
|
||||
loop {
|
||||
let notification = self.next_notification()?;
|
||||
|
||||
let Ok(server_notification) = ServerNotification::try_from(notification) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
match server_notification {
|
||||
ServerNotification::ThreadStarted(payload) => {
|
||||
if payload.thread.id == thread_id {
|
||||
println!("< thread/started notification: {:?}", payload.thread);
|
||||
}
|
||||
}
|
||||
ServerNotification::TurnStarted(payload) => {
|
||||
if payload.turn.id == turn_id {
|
||||
println!("< turn/started notification: {:?}", payload.turn.status);
|
||||
}
|
||||
}
|
||||
ServerNotification::AgentMessageDelta(delta) => {
|
||||
print!("{}", delta.delta);
|
||||
std::io::stdout().flush().ok();
|
||||
}
|
||||
ServerNotification::CommandExecutionOutputDelta(delta) => {
|
||||
print!("{}", delta.delta);
|
||||
std::io::stdout().flush().ok();
|
||||
}
|
||||
ServerNotification::TerminalInteraction(delta) => {
|
||||
println!("[stdin sent: {}]", delta.stdin);
|
||||
std::io::stdout().flush().ok();
|
||||
}
|
||||
ServerNotification::ItemStarted(payload) => {
|
||||
println!("\n< item started: {:?}", payload.item);
|
||||
}
|
||||
ServerNotification::ItemCompleted(payload) => {
|
||||
println!("< item completed: {:?}", payload.item);
|
||||
}
|
||||
ServerNotification::TurnCompleted(payload) => {
|
||||
if payload.turn.id == turn_id {
|
||||
println!("\n< turn/completed notification: {:?}", payload.turn.status);
|
||||
if payload.turn.status == TurnStatus::Failed
|
||||
&& let Some(error) = payload.turn.error
|
||||
{
|
||||
println!("[turn error] {}", error.message);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
ServerNotification::McpToolCallProgress(payload) => {
|
||||
println!("< MCP tool progress: {}", payload.message);
|
||||
}
|
||||
_ => {
|
||||
println!("[UNKNOWN SERVER NOTIFICATION] {server_notification:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn extract_event(
|
||||
&self,
|
||||
notification: JSONRPCNotification,
|
||||
conversation_id: &ConversationId,
|
||||
) -> Result<Option<Event>> {
|
||||
let params = notification
|
||||
.params
|
||||
.context("event notification missing params")?;
|
||||
|
||||
let mut map = match params {
|
||||
Value::Object(map) => map,
|
||||
other => bail!("unexpected params shape: {other:?}"),
|
||||
};
|
||||
|
||||
let conversation_value = map
|
||||
.remove("conversationId")
|
||||
.context("event missing conversationId")?;
|
||||
let notification_conversation: ConversationId = serde_json::from_value(conversation_value)
|
||||
.context("conversationId was not a valid UUID")?;
|
||||
|
||||
if ¬ification_conversation != conversation_id {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let event_value = Value::Object(map);
|
||||
let event: Event =
|
||||
serde_json::from_value(event_value).context("failed to decode event payload")?;
|
||||
Ok(Some(event))
|
||||
}
|
||||
|
||||
fn send_request<T>(
|
||||
&mut self,
|
||||
request: ClientRequest,
|
||||
request_id: RequestId,
|
||||
method: &str,
|
||||
) -> Result<T>
|
||||
where
|
||||
T: DeserializeOwned,
|
||||
{
|
||||
self.write_request(&request)?;
|
||||
self.wait_for_response(request_id, method)
|
||||
}
|
||||
|
||||
fn write_request(&mut self, request: &ClientRequest) -> Result<()> {
|
||||
let request_json = serde_json::to_string(request)?;
|
||||
let request_pretty = serde_json::to_string_pretty(request)?;
|
||||
print_multiline_with_prefix("> ", &request_pretty);
|
||||
|
||||
if let Some(stdin) = self.stdin.as_mut() {
|
||||
writeln!(stdin, "{request_json}")?;
|
||||
stdin
|
||||
.flush()
|
||||
.context("failed to flush request to codex app-server")?;
|
||||
} else {
|
||||
bail!("codex app-server stdin closed");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn wait_for_response<T>(&mut self, request_id: RequestId, method: &str) -> Result<T>
|
||||
where
|
||||
T: DeserializeOwned,
|
||||
{
|
||||
loop {
|
||||
let message = self.read_jsonrpc_message()?;
|
||||
|
||||
match message {
|
||||
JSONRPCMessage::Response(JSONRPCResponse { id, result }) => {
|
||||
if id == request_id {
|
||||
return serde_json::from_value(result)
|
||||
.with_context(|| format!("{method} response missing payload"));
|
||||
}
|
||||
}
|
||||
JSONRPCMessage::Error(err) => {
|
||||
if err.id == request_id {
|
||||
bail!("{method} failed: {err:?}");
|
||||
}
|
||||
}
|
||||
JSONRPCMessage::Notification(notification) => {
|
||||
self.pending_notifications.push_back(notification);
|
||||
}
|
||||
JSONRPCMessage::Request(request) => {
|
||||
self.handle_server_request(request)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn next_notification(&mut self) -> Result<JSONRPCNotification> {
|
||||
if let Some(notification) = self.pending_notifications.pop_front() {
|
||||
return Ok(notification);
|
||||
}
|
||||
|
||||
loop {
|
||||
let message = self.read_jsonrpc_message()?;
|
||||
|
||||
match message {
|
||||
JSONRPCMessage::Notification(notification) => return Ok(notification),
|
||||
JSONRPCMessage::Response(_) | JSONRPCMessage::Error(_) => {
|
||||
// No outstanding requests, so ignore stray responses/errors for now.
|
||||
continue;
|
||||
}
|
||||
JSONRPCMessage::Request(request) => {
|
||||
self.handle_server_request(request)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn read_jsonrpc_message(&mut self) -> Result<JSONRPCMessage> {
|
||||
loop {
|
||||
let mut response_line = String::new();
|
||||
let bytes = self
|
||||
.stdout
|
||||
.read_line(&mut response_line)
|
||||
.context("failed to read from codex app-server")?;
|
||||
|
||||
if bytes == 0 {
|
||||
bail!("codex app-server closed stdout");
|
||||
}
|
||||
|
||||
let trimmed = response_line.trim();
|
||||
if trimmed.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let parsed: Value =
|
||||
serde_json::from_str(trimmed).context("response was not valid JSON-RPC")?;
|
||||
let pretty = serde_json::to_string_pretty(&parsed)?;
|
||||
print_multiline_with_prefix("< ", &pretty);
|
||||
let message: JSONRPCMessage = serde_json::from_value(parsed)
|
||||
.context("response was not a valid JSON-RPC message")?;
|
||||
return Ok(message);
|
||||
}
|
||||
}
|
||||
|
||||
fn request_id(&self) -> RequestId {
|
||||
RequestId::String(Uuid::new_v4().to_string())
|
||||
}
|
||||
|
||||
fn handle_server_request(&mut self, request: JSONRPCRequest) -> Result<()> {
|
||||
let server_request = ServerRequest::try_from(request)
|
||||
.context("failed to deserialize ServerRequest from JSONRPCRequest")?;
|
||||
|
||||
match server_request {
|
||||
ServerRequest::CommandExecutionRequestApproval { request_id, params } => {
|
||||
self.handle_command_execution_request_approval(request_id, params)?;
|
||||
}
|
||||
ServerRequest::FileChangeRequestApproval { request_id, params } => {
|
||||
self.approve_file_change_request(request_id, params)?;
|
||||
}
|
||||
other => {
|
||||
bail!("received unsupported server request: {other:?}");
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn handle_command_execution_request_approval(
|
||||
&mut self,
|
||||
request_id: RequestId,
|
||||
params: CommandExecutionRequestApprovalParams,
|
||||
) -> Result<()> {
|
||||
let CommandExecutionRequestApprovalParams {
|
||||
thread_id,
|
||||
turn_id,
|
||||
item_id,
|
||||
reason,
|
||||
proposed_execpolicy_amendment,
|
||||
} = params;
|
||||
|
||||
println!(
|
||||
"\n< commandExecution approval requested for thread {thread_id}, turn {turn_id}, item {item_id}"
|
||||
);
|
||||
if let Some(reason) = reason.as_deref() {
|
||||
println!("< reason: {reason}");
|
||||
}
|
||||
if let Some(execpolicy_amendment) = proposed_execpolicy_amendment.as_ref() {
|
||||
println!("< proposed execpolicy amendment: {execpolicy_amendment:?}");
|
||||
}
|
||||
|
||||
let response = CommandExecutionRequestApprovalResponse {
|
||||
decision: ApprovalDecision::Accept,
|
||||
};
|
||||
self.send_server_request_response(request_id, &response)?;
|
||||
println!("< approved commandExecution request for item {item_id}");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn approve_file_change_request(
|
||||
&mut self,
|
||||
request_id: RequestId,
|
||||
params: FileChangeRequestApprovalParams,
|
||||
) -> Result<()> {
|
||||
let FileChangeRequestApprovalParams {
|
||||
thread_id,
|
||||
turn_id,
|
||||
item_id,
|
||||
reason,
|
||||
grant_root,
|
||||
} = params;
|
||||
|
||||
println!(
|
||||
"\n< fileChange approval requested for thread {thread_id}, turn {turn_id}, item {item_id}"
|
||||
);
|
||||
if let Some(reason) = reason.as_deref() {
|
||||
println!("< reason: {reason}");
|
||||
}
|
||||
if let Some(grant_root) = grant_root.as_deref() {
|
||||
println!("< grant root: {}", grant_root.display());
|
||||
}
|
||||
|
||||
let response = FileChangeRequestApprovalResponse {
|
||||
decision: ApprovalDecision::Accept,
|
||||
};
|
||||
self.send_server_request_response(request_id, &response)?;
|
||||
println!("< approved fileChange request for item {item_id}");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn send_server_request_response<T>(&mut self, request_id: RequestId, response: &T) -> Result<()>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
let message = JSONRPCMessage::Response(JSONRPCResponse {
|
||||
id: request_id,
|
||||
result: serde_json::to_value(response)?,
|
||||
});
|
||||
self.write_jsonrpc_message(message)
|
||||
}
|
||||
|
||||
fn write_jsonrpc_message(&mut self, message: JSONRPCMessage) -> Result<()> {
|
||||
let payload = serde_json::to_string(&message)?;
|
||||
let pretty = serde_json::to_string_pretty(&message)?;
|
||||
print_multiline_with_prefix("> ", &pretty);
|
||||
|
||||
if let Some(stdin) = self.stdin.as_mut() {
|
||||
writeln!(stdin, "{payload}")?;
|
||||
stdin
|
||||
.flush()
|
||||
.context("failed to flush response to codex app-server")?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
bail!("codex app-server stdin closed")
|
||||
}
|
||||
}
|
||||
|
||||
fn print_multiline_with_prefix(prefix: &str, payload: &str) {
|
||||
for line in payload.lines() {
|
||||
println!("{prefix}{line}");
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for CodexClient {
|
||||
fn drop(&mut self) {
|
||||
let _ = self.stdin.take();
|
||||
|
||||
if let Ok(Some(status)) = self.child.try_wait() {
|
||||
println!("[codex app-server exited: {status}]");
|
||||
return;
|
||||
}
|
||||
|
||||
thread::sleep(Duration::from_millis(100));
|
||||
|
||||
if let Ok(Some(status)) = self.child.try_wait() {
|
||||
println!("[codex app-server exited: {status}]");
|
||||
return;
|
||||
}
|
||||
|
||||
let _ = self.child.kill();
|
||||
let _ = self.child.wait();
|
||||
}
|
||||
}
|
||||
59
codex-rs/app-server/Cargo.toml
Normal file
59
codex-rs/app-server/Cargo.toml
Normal file
@@ -0,0 +1,59 @@
|
||||
[package]
|
||||
name = "codex-app-server"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[[bin]]
|
||||
name = "codex-app-server"
|
||||
path = "src/main.rs"
|
||||
|
||||
[lib]
|
||||
name = "codex_app_server"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = { workspace = true }
|
||||
codex-arg0 = { workspace = true }
|
||||
codex-common = { workspace = true, features = ["cli"] }
|
||||
codex-core = { workspace = true }
|
||||
codex-backend-client = { workspace = true }
|
||||
codex-file-search = { workspace = true }
|
||||
codex-login = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
codex-app-server-protocol = { workspace = true }
|
||||
codex-feedback = { workspace = true }
|
||||
codex-rmcp-client = { workspace = true }
|
||||
codex-utils-absolute-path = { workspace = true }
|
||||
codex-utils-json-to-toml = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
mcp-types = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
tokio = { workspace = true, features = [
|
||||
"io-std",
|
||||
"macros",
|
||||
"process",
|
||||
"rt-multi-thread",
|
||||
"signal",
|
||||
] }
|
||||
tracing = { workspace = true, features = ["log"] }
|
||||
tracing-subscriber = { workspace = true, features = ["env-filter", "fmt"] }
|
||||
uuid = { workspace = true, features = ["serde", "v7"] }
|
||||
|
||||
[dev-dependencies]
|
||||
app_test_support = { workspace = true }
|
||||
assert_cmd = { workspace = true }
|
||||
base64 = { workspace = true }
|
||||
core_test_support = { workspace = true }
|
||||
mcp-types = { workspace = true }
|
||||
os_info = { workspace = true }
|
||||
pretty_assertions = { workspace = true }
|
||||
serial_test = { workspace = true }
|
||||
wiremock = { workspace = true }
|
||||
shlex = { workspace = true }
|
||||
499
codex-rs/app-server/README.md
Normal file
499
codex-rs/app-server/README.md
Normal file
@@ -0,0 +1,499 @@
|
||||
# codex-app-server
|
||||
|
||||
`codex app-server` is the interface Codex uses to power rich interfaces such as the [Codex VS Code extension](https://marketplace.visualstudio.com/items?itemName=openai.chatgpt).
|
||||
|
||||
## Table of Contents
|
||||
|
||||
- [Protocol](#protocol)
|
||||
- [Message Schema](#message-schema)
|
||||
- [Core Primitives](#core-primitives)
|
||||
- [Lifecycle Overview](#lifecycle-overview)
|
||||
- [Initialization](#initialization)
|
||||
- [API Overview](#api-overview)
|
||||
- [Events](#events)
|
||||
- [Auth endpoints](#auth-endpoints)
|
||||
|
||||
## Protocol
|
||||
|
||||
Similar to [MCP](https://modelcontextprotocol.io/), `codex app-server` supports bidirectional communication, streaming JSONL over stdio. The protocol is JSON-RPC 2.0, though the `"jsonrpc":"2.0"` header is omitted.
|
||||
|
||||
## Message Schema
|
||||
|
||||
Currently, you can dump a TypeScript version of the schema using `codex app-server generate-ts`, or a JSON Schema bundle via `codex app-server generate-json-schema`. Each output is specific to the version of Codex you used to run the command, so the generated artifacts are guaranteed to match that version.
|
||||
|
||||
```
|
||||
codex app-server generate-ts --out DIR
|
||||
codex app-server generate-json-schema --out DIR
|
||||
```
|
||||
|
||||
## Core Primitives
|
||||
|
||||
The API exposes three top level primitives representing an interaction between a user and Codex:
|
||||
|
||||
- **Thread**: A conversation between a user and the Codex agent. Each thread contains multiple turns.
|
||||
- **Turn**: One turn of the conversation, typically starting with a user message and finishing with an agent message. Each turn contains multiple items.
|
||||
- **Item**: Represents user inputs and agent outputs as part of the turn, persisted and used as the context for future conversations. Example items include user message, agent reasoning, agent message, shell command, file edit, etc.
|
||||
|
||||
Use the thread APIs to create, list, or archive conversations. Drive a conversation with turn APIs and stream progress via turn notifications.
|
||||
|
||||
## Lifecycle Overview
|
||||
|
||||
- Initialize once: Immediately after launching the codex app-server process, send an `initialize` request with your client metadata, then emit an `initialized` notification. Any other request before this handshake gets rejected.
|
||||
- Start (or resume) a thread: Call `thread/start` to open a fresh conversation. The response returns the thread object and you’ll also get a `thread/started` notification. If you’re continuing an existing conversation, call `thread/resume` with its ID instead.
|
||||
- Begin a turn: To send user input, call `turn/start` with the target `threadId` and the user's input. Optional fields let you override model, cwd, sandbox policy, etc. This immediately returns the new turn object and triggers a `turn/started` notification.
|
||||
- Stream events: After `turn/start`, keep reading JSON-RPC notifications on stdout. You’ll see `item/started`, `item/completed`, deltas like `item/agentMessage/delta`, tool progress, etc. These represent streaming model output plus any side effects (commands, tool calls, reasoning notes).
|
||||
- Finish the turn: When the model is done (or the turn is interrupted via making the `turn/interrupt` call), the server sends `turn/completed` with the final turn state and token usage.
|
||||
|
||||
## Initialization
|
||||
|
||||
Clients must send a single `initialize` request before invoking any other method, then acknowledge with an `initialized` notification. The server returns the user agent string it will present to upstream services; subsequent requests issued before initialization receive a `"Not initialized"` error, and repeated `initialize` calls receive an `"Already initialized"` error.
|
||||
|
||||
Applications building on top of `codex app-server` should identify themselves via the `clientInfo` parameter.
|
||||
|
||||
Example (from OpenAI's official VSCode extension):
|
||||
|
||||
```json
|
||||
{
|
||||
"method": "initialize",
|
||||
"id": 0,
|
||||
"params": {
|
||||
"clientInfo": {
|
||||
"name": "codex-vscode",
|
||||
"title": "Codex VS Code Extension",
|
||||
"version": "0.1.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## API Overview
|
||||
|
||||
- `thread/start` — create a new thread; emits `thread/started` and auto-subscribes you to turn/item events for that thread.
|
||||
- `thread/resume` — reopen an existing thread by id so subsequent `turn/start` calls append to it.
|
||||
- `thread/list` — page through stored rollouts; supports cursor-based pagination and optional `modelProviders` filtering.
|
||||
- `thread/archive` — move a thread’s rollout file into the archived directory; returns `{}` on success.
|
||||
- `turn/start` — add user input to a thread and begin Codex generation; responds with the initial `turn` object and streams `turn/started`, `item/*`, and `turn/completed` notifications.
|
||||
- `turn/interrupt` — request cancellation of an in-flight turn by `(thread_id, turn_id)`; success is an empty `{}` response and the turn finishes with `status: "interrupted"`.
|
||||
- `review/start` — kick off Codex’s automated reviewer for a thread; responds like `turn/start` and emits `item/started`/`item/completed` notifications with `enteredReviewMode` and `exitedReviewMode` items, plus a final assistant `agentMessage` containing the review.
|
||||
- `command/exec` — run a single command under the server sandbox without starting a thread/turn (handy for utilities and validation).
|
||||
- `model/list` — list available models (with reasoning effort options).
|
||||
- `skills/list` — list skills for one or more `cwd` values (optional `forceReload`).
|
||||
- `mcpServer/oauth/login` — start an OAuth login for a configured MCP server; returns an `authorization_url` and later emits `mcpServer/oauthLogin/completed` once the browser flow finishes.
|
||||
- `mcpServerStatus/list` — enumerate configured MCP servers with their tools, resources, resource templates, and auth status; supports cursor+limit pagination.
|
||||
- `feedback/upload` — submit a feedback report (classification + optional reason/logs and conversation_id); returns the tracking thread id.
|
||||
- `command/exec` — run a single command under the server sandbox without starting a thread/turn (handy for utilities and validation).
|
||||
- `config/read` — fetch the effective config on disk after resolving config layering.
|
||||
- `config/value/write` — write a single config key/value to the user's config.toml on disk.
|
||||
- `config/batchWrite` — apply multiple config edits atomically to the user's config.toml on disk.
|
||||
|
||||
### Example: Start or resume a thread
|
||||
|
||||
Start a fresh thread when you need a new Codex conversation.
|
||||
|
||||
```json
|
||||
{ "method": "thread/start", "id": 10, "params": {
|
||||
// Optionally set config settings. If not specified, will use the user's
|
||||
// current config settings.
|
||||
"model": "gpt-5.1-codex",
|
||||
"cwd": "/Users/me/project",
|
||||
"approvalPolicy": "never",
|
||||
"sandbox": "workspaceWrite",
|
||||
} }
|
||||
{ "id": 10, "result": {
|
||||
"thread": {
|
||||
"id": "thr_123",
|
||||
"preview": "",
|
||||
"modelProvider": "openai",
|
||||
"createdAt": 1730910000
|
||||
}
|
||||
} }
|
||||
{ "method": "thread/started", "params": { "thread": { … } } }
|
||||
```
|
||||
|
||||
To continue a stored session, call `thread/resume` with the `thread.id` you previously recorded. The response shape matches `thread/start`, and no additional notifications are emitted:
|
||||
|
||||
```json
|
||||
{ "method": "thread/resume", "id": 11, "params": { "threadId": "thr_123" } }
|
||||
{ "id": 11, "result": { "thread": { "id": "thr_123", … } } }
|
||||
```
|
||||
|
||||
### Example: List threads (with pagination & filters)
|
||||
|
||||
`thread/list` lets you render a history UI. Pass any combination of:
|
||||
|
||||
- `cursor` — opaque string from a prior response; omit for the first page.
|
||||
- `limit` — server defaults to a reasonable page size if unset.
|
||||
- `modelProviders` — restrict results to specific providers; unset, null, or an empty array will include all providers.
|
||||
|
||||
Example:
|
||||
|
||||
```json
|
||||
{ "method": "thread/list", "id": 20, "params": {
|
||||
"cursor": null,
|
||||
"limit": 25,
|
||||
} }
|
||||
{ "id": 20, "result": {
|
||||
"data": [
|
||||
{ "id": "thr_a", "preview": "Create a TUI", "modelProvider": "openai", "createdAt": 1730831111 },
|
||||
{ "id": "thr_b", "preview": "Fix tests", "modelProvider": "openai", "createdAt": 1730750000 }
|
||||
],
|
||||
"nextCursor": "opaque-token-or-null"
|
||||
} }
|
||||
```
|
||||
|
||||
When `nextCursor` is `null`, you’ve reached the final page.
|
||||
|
||||
### Example: Archive a thread
|
||||
|
||||
Use `thread/archive` to move the persisted rollout (stored as a JSONL file on disk) into the archived sessions directory.
|
||||
|
||||
```json
|
||||
{ "method": "thread/archive", "id": 21, "params": { "threadId": "thr_b" } }
|
||||
{ "id": 21, "result": {} }
|
||||
```
|
||||
|
||||
An archived thread will not appear in future calls to `thread/list`.
|
||||
|
||||
### Example: Start a turn (send user input)
|
||||
|
||||
Turns attach user input (text or images) to a thread and trigger Codex generation. The `input` field is a list of discriminated unions:
|
||||
|
||||
- `{"type":"text","text":"Explain this diff"}`
|
||||
- `{"type":"image","url":"https://…png"}`
|
||||
- `{"type":"localImage","path":"/tmp/screenshot.png"}`
|
||||
|
||||
You can optionally specify config overrides on the new turn. If specified, these settings become the default for subsequent turns on the same thread.
|
||||
|
||||
```json
|
||||
{ "method": "turn/start", "id": 30, "params": {
|
||||
"threadId": "thr_123",
|
||||
"input": [ { "type": "text", "text": "Run tests" } ],
|
||||
// Below are optional config overrides
|
||||
"cwd": "/Users/me/project",
|
||||
"approvalPolicy": "unlessTrusted",
|
||||
"sandboxPolicy": {
|
||||
"type": "workspaceWrite",
|
||||
"writableRoots": ["/Users/me/project"],
|
||||
"networkAccess": true
|
||||
},
|
||||
"model": "gpt-5.1-codex",
|
||||
"effort": "medium",
|
||||
"summary": "concise"
|
||||
} }
|
||||
{ "id": 30, "result": { "turn": {
|
||||
"id": "turn_456",
|
||||
"status": "inProgress",
|
||||
"items": [],
|
||||
"error": null
|
||||
} } }
|
||||
```
|
||||
|
||||
### Example: Interrupt an active turn
|
||||
|
||||
You can cancel a running Turn with `turn/interrupt`.
|
||||
|
||||
```json
|
||||
{ "method": "turn/interrupt", "id": 31, "params": {
|
||||
"threadId": "thr_123",
|
||||
"turnId": "turn_456"
|
||||
} }
|
||||
{ "id": 31, "result": {} }
|
||||
```
|
||||
|
||||
The server requests cancellations for running subprocesses, then emits a `turn/completed` event with `status: "interrupted"`. Rely on the `turn/completed` to know when Codex-side cleanup is done.
|
||||
|
||||
### Example: Request a code review
|
||||
|
||||
Use `review/start` to run Codex’s reviewer on the currently checked-out project. The request takes the thread id plus a `target` describing what should be reviewed:
|
||||
|
||||
- `{"type":"uncommittedChanges"}` — staged, unstaged, and untracked files.
|
||||
- `{"type":"baseBranch","branch":"main"}` — diff against the provided branch’s upstream (see prompt for the exact `git merge-base`/`git diff` instructions Codex will run).
|
||||
- `{"type":"commit","sha":"abc1234","title":"Optional subject"}` — review a specific commit.
|
||||
- `{"type":"custom","instructions":"Free-form reviewer instructions"}` — fallback prompt equivalent to the legacy manual review request.
|
||||
- `delivery` (`"inline"` or `"detached"`, default `"inline"`) — where the review runs:
|
||||
- `"inline"`: run the review as a new turn on the existing thread. The response’s `reviewThreadId` equals the original `threadId`, and no new `thread/started` notification is emitted.
|
||||
- `"detached"`: fork a new review thread from the parent conversation and run the review there. The response’s `reviewThreadId` is the id of this new review thread, and the server emits a `thread/started` notification for it before streaming review items.
|
||||
|
||||
Example request/response:
|
||||
|
||||
```json
|
||||
{ "method": "review/start", "id": 40, "params": {
|
||||
"threadId": "thr_123",
|
||||
"delivery": "inline",
|
||||
"target": { "type": "commit", "sha": "1234567deadbeef", "title": "Polish tui colors" }
|
||||
} }
|
||||
{ "id": 40, "result": {
|
||||
"turn": {
|
||||
"id": "turn_900",
|
||||
"status": "inProgress",
|
||||
"items": [
|
||||
{ "type": "userMessage", "id": "turn_900", "content": [ { "type": "text", "text": "Review commit 1234567: Polish tui colors" } ] }
|
||||
],
|
||||
"error": null
|
||||
},
|
||||
"reviewThreadId": "thr_123"
|
||||
} }
|
||||
```
|
||||
|
||||
For a detached review, use `"delivery": "detached"`. The response is the same shape, but `reviewThreadId` will be the id of the new review thread (different from the original `threadId`). The server also emits a `thread/started` notification for that new thread before streaming the review turn.
|
||||
|
||||
Codex streams the usual `turn/started` notification followed by an `item/started`
|
||||
with an `enteredReviewMode` item so clients can show progress:
|
||||
|
||||
```json
|
||||
{
|
||||
"method": "item/started",
|
||||
"params": {
|
||||
"item": {
|
||||
"type": "enteredReviewMode",
|
||||
"id": "turn_900",
|
||||
"review": "current changes"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
When the reviewer finishes, the server emits `item/started` and `item/completed`
|
||||
containing an `exitedReviewMode` item with the final review text:
|
||||
|
||||
```json
|
||||
{
|
||||
"method": "item/completed",
|
||||
"params": {
|
||||
"item": {
|
||||
"type": "exitedReviewMode",
|
||||
"id": "turn_900",
|
||||
"review": "Looks solid overall...\n\n- Prefer Stylize helpers — app.rs:10-20\n ..."
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
The `review` string is plain text that already bundles the overall explanation plus a bullet list for each structured finding (matching `ThreadItem::ExitedReviewMode` in the generated schema). Use this notification to render the reviewer output in your client.
|
||||
|
||||
### Example: One-off command execution
|
||||
|
||||
Run a standalone command (argv vector) in the server’s sandbox without creating a thread or turn:
|
||||
|
||||
```json
|
||||
{ "method": "command/exec", "id": 32, "params": {
|
||||
"command": ["ls", "-la"],
|
||||
"cwd": "/Users/me/project", // optional; defaults to server cwd
|
||||
"sandboxPolicy": { "type": "workspaceWrite" }, // optional; defaults to user config
|
||||
"timeoutMs": 10000 // optional; ms timeout; defaults to server timeout
|
||||
} }
|
||||
{ "id": 32, "result": { "exitCode": 0, "stdout": "...", "stderr": "" } }
|
||||
```
|
||||
|
||||
- For clients that are already sandboxed externally, set `sandboxPolicy` to `{"type":"externalSandbox","networkAccess":"enabled"}` (or omit `networkAccess` to keep it restricted). Codex will not enforce its own sandbox in this mode; it tells the model it has full file-system access and passes the `networkAccess` state through `environment_context`.
|
||||
|
||||
Notes:
|
||||
|
||||
- Empty `command` arrays are rejected.
|
||||
- `sandboxPolicy` accepts the same shape used by `turn/start` (e.g., `dangerFullAccess`, `readOnly`, `workspaceWrite` with flags, `externalSandbox` with `networkAccess` `restricted|enabled`).
|
||||
- When omitted, `timeoutMs` falls back to the server default.
|
||||
|
||||
## Events
|
||||
|
||||
Event notifications are the server-initiated event stream for thread lifecycles, turn lifecycles, and the items within them. After you start or resume a thread, keep reading stdout for `thread/started`, `turn/*`, and `item/*` notifications.
|
||||
|
||||
### Turn events
|
||||
|
||||
The app-server streams JSON-RPC notifications while a turn is running. Each turn starts with `turn/started` (initial `turn`) and ends with `turn/completed` (final `turn` status). Token usage events stream separately via `thread/tokenUsage/updated`. Clients subscribe to the events they care about, rendering each item incrementally as updates arrive. The per-item lifecycle is always: `item/started` → zero or more item-specific deltas → `item/completed`.
|
||||
|
||||
- `turn/started` — `{ turn }` with the turn id, empty `items`, and `status: "inProgress"`.
|
||||
- `turn/completed` — `{ turn }` where `turn.status` is `completed`, `interrupted`, or `failed`; failures carry `{ error: { message, codexErrorInfo? } }`.
|
||||
- `turn/diff/updated` — `{ threadId, turnId, diff }` represents the up-to-date snapshot of the turn-level unified diff, emitted after every FileChange item. `diff` is the latest aggregated unified diff across every file change in the turn. UIs can render this to show the full "what changed" view without stitching individual `fileChange` items.
|
||||
- `turn/plan/updated` — `{ turnId, explanation?, plan }` whenever the agent shares or changes its plan; each `plan` entry is `{ step, status }` with `status` in `pending`, `inProgress`, or `completed`.
|
||||
|
||||
Today both notifications carry an empty `items` array even when item events were streamed; rely on `item/*` notifications for the canonical item list until this is fixed.
|
||||
|
||||
#### Items
|
||||
|
||||
`ThreadItem` is the tagged union carried in turn responses and `item/*` notifications. Currently we support events for the following items:
|
||||
|
||||
- `userMessage` — `{id, content}` where `content` is a list of user inputs (`text`, `image`, or `localImage`).
|
||||
- `agentMessage` — `{id, text}` containing the accumulated agent reply.
|
||||
- `reasoning` — `{id, summary, content}` where `summary` holds streamed reasoning summaries (applicable for most OpenAI models) and `content` holds raw reasoning blocks (applicable for e.g. open source models).
|
||||
- `commandExecution` — `{id, command, cwd, status, commandActions, aggregatedOutput?, exitCode?, durationMs?}` for sandboxed commands; `status` is `inProgress`, `completed`, `failed`, or `declined`.
|
||||
- `fileChange` — `{id, changes, status}` describing proposed edits; `changes` list `{path, kind, diff}` and `status` is `inProgress`, `completed`, `failed`, or `declined`.
|
||||
- `mcpToolCall` — `{id, server, tool, status, arguments, result?, error?}` describing MCP calls; `status` is `inProgress`, `completed`, or `failed`.
|
||||
- `webSearch` — `{id, query}` for a web search request issued by the agent.
|
||||
- `imageView` — `{id, path}` emitted when the agent invokes the image viewer tool.
|
||||
- `enteredReviewMode` — `{id, review}` sent when the reviewer starts; `review` is a short user-facing label such as `"current changes"` or the requested target description.
|
||||
- `exitedReviewMode` — `{id, review}` emitted when the reviewer finishes; `review` is the full plain-text review (usually, overall notes plus bullet point findings).
|
||||
- `compacted` - `{threadId, turnId}` when codex compacts the conversation history. This can happen automatically.
|
||||
|
||||
All items emit two shared lifecycle events:
|
||||
|
||||
- `item/started` — emits the full `item` when a new unit of work begins so the UI can render it immediately; the `item.id` in this payload matches the `itemId` used by deltas.
|
||||
- `item/completed` — sends the final `item` once that work finishes (e.g., after a tool call or message completes); treat this as the authoritative state.
|
||||
|
||||
There are additional item-specific events:
|
||||
|
||||
#### agentMessage
|
||||
|
||||
- `item/agentMessage/delta` — appends streamed text for the agent message; concatenate `delta` values for the same `itemId` in order to reconstruct the full reply.
|
||||
|
||||
#### reasoning
|
||||
|
||||
- `item/reasoning/summaryTextDelta` — streams readable reasoning summaries; `summaryIndex` increments when a new summary section opens.
|
||||
- `item/reasoning/summaryPartAdded` — marks the boundary between reasoning summary sections for an `itemId`; subsequent `summaryTextDelta` entries share the same `summaryIndex`.
|
||||
- `item/reasoning/textDelta` — streams raw reasoning text (only applicable for e.g. open source models); use `contentIndex` to group deltas that belong together before showing them in the UI.
|
||||
|
||||
#### commandExecution
|
||||
|
||||
- `item/commandExecution/outputDelta` — streams stdout/stderr for the command; append deltas in order to render live output alongside `aggregatedOutput` in the final item.
|
||||
Final `commandExecution` items include parsed `commandActions`, `status`, `exitCode`, and `durationMs` so the UI can summarize what ran and whether it succeeded.
|
||||
|
||||
#### fileChange
|
||||
|
||||
- `item/fileChange/outputDelta` - contains the tool call response of the underlying `apply_patch` tool call.
|
||||
|
||||
### Errors
|
||||
|
||||
`error` event is emitted whenever the server hits an error mid-turn (for example, upstream model errors or quota limits). Carries the same `{ error: { message, codexErrorInfo? } }` payload as `turn.status: "failed"` and may precede that terminal notification.
|
||||
|
||||
`codexErrorInfo` maps to the `CodexErrorInfo` enum. Common values:
|
||||
|
||||
- `ContextWindowExceeded`
|
||||
- `UsageLimitExceeded`
|
||||
- `HttpConnectionFailed { httpStatusCode? }`: upstream HTTP failures including 4xx/5xx
|
||||
- `ResponseStreamConnectionFailed { httpStatusCode? }`: failure to connect to the response SSE stream
|
||||
- `ResponseStreamDisconnected { httpStatusCode? }`: disconnect of the response SSE stream in the middle of a turn before completion
|
||||
- `ResponseTooManyFailedAttempts { httpStatusCode? }`
|
||||
- `BadRequest`
|
||||
- `Unauthorized`
|
||||
- `SandboxError`
|
||||
- `InternalServerError`
|
||||
- `Other`: all unclassified errors
|
||||
|
||||
When an upstream HTTP status is available (for example, from the Responses API or a provider), it is forwarded in `httpStatusCode` on the relevant `codexErrorInfo` variant.
|
||||
|
||||
## Approvals
|
||||
|
||||
Certain actions (shell commands or modifying files) may require explicit user approval depending on the user's config. When `turn/start` is used, the app-server drives an approval flow by sending a server-initiated JSON-RPC request to the client. The client must respond to tell Codex whether to proceed. UIs should present these requests inline with the active turn so users can review the proposed command or diff before choosing.
|
||||
|
||||
- Requests include `threadId` and `turnId`—use them to scope UI state to the active conversation.
|
||||
- Respond with a single `{ "decision": "accept" | "decline" }` payload (plus optional `acceptSettings` on command executions). The server resumes or declines the work and ends the item with `item/completed`.
|
||||
|
||||
### Command execution approvals
|
||||
|
||||
Order of messages:
|
||||
|
||||
1. `item/started` — shows the pending `commandExecution` item with `command`, `cwd`, and other fields so you can render the proposed action.
|
||||
2. `item/commandExecution/requestApproval` (request) — carries the same `itemId`, `threadId`, `turnId`, optionally `reason` or `risk`, plus `parsedCmd` for friendly display.
|
||||
3. Client response — `{ "decision": "accept", "acceptSettings": { "forSession": false } }` or `{ "decision": "decline" }`.
|
||||
4. `item/completed` — final `commandExecution` item with `status: "completed" | "failed" | "declined"` and execution output. Render this as the authoritative result.
|
||||
|
||||
### File change approvals
|
||||
|
||||
Order of messages:
|
||||
|
||||
1. `item/started` — emits a `fileChange` item with `changes` (diff chunk summaries) and `status: "inProgress"`. Show the proposed edits and paths to the user.
|
||||
2. `item/fileChange/requestApproval` (request) — includes `itemId`, `threadId`, `turnId`, and an optional `reason`.
|
||||
3. Client response — `{ "decision": "accept" }` or `{ "decision": "decline" }`.
|
||||
4. `item/completed` — returns the same `fileChange` item with `status` updated to `completed`, `failed`, or `declined` after the patch attempt. Rely on this to show success/failure and finalize the diff state in your UI.
|
||||
|
||||
UI guidance for IDEs: surface an approval dialog as soon as the request arrives. The turn will proceed after the server receives a response to the approval request. The terminal `item/completed` notification will be sent with the appropriate status.
|
||||
|
||||
## Auth endpoints
|
||||
|
||||
The JSON-RPC auth/account surface exposes request/response methods plus server-initiated notifications (no `id`). Use these to determine auth state, start or cancel logins, logout, and inspect ChatGPT rate limits.
|
||||
|
||||
### API Overview
|
||||
|
||||
- `account/read` — fetch current account info; optionally refresh tokens.
|
||||
- `account/login/start` — begin login (`apiKey` or `chatgpt`).
|
||||
- `account/login/completed` (notify) — emitted when a login attempt finishes (success or error).
|
||||
- `account/login/cancel` — cancel a pending ChatGPT login by `loginId`.
|
||||
- `account/logout` — sign out; triggers `account/updated`.
|
||||
- `account/updated` (notify) — emitted whenever auth mode changes (`authMode`: `apikey`, `chatgpt`, or `null`).
|
||||
- `account/rateLimits/read` — fetch ChatGPT rate limits; updates arrive via `account/rateLimits/updated` (notify).
|
||||
- `account/rateLimits/updated` (notify) — emitted whenever a user's ChatGPT rate limits change.
|
||||
- `mcpServer/oauthLogin/completed` (notify) — emitted after a `mcpServer/oauth/login` flow finishes for a server; payload includes `{ name, success, error? }`.
|
||||
|
||||
### 1) Check auth state
|
||||
|
||||
Request:
|
||||
|
||||
```json
|
||||
{ "method": "account/read", "id": 1, "params": { "refreshToken": false } }
|
||||
```
|
||||
|
||||
Response examples:
|
||||
|
||||
```json
|
||||
{ "id": 1, "result": { "account": null, "requiresOpenaiAuth": false } } // No OpenAI auth needed (e.g., OSS/local models)
|
||||
{ "id": 1, "result": { "account": null, "requiresOpenaiAuth": true } } // OpenAI auth required (typical for OpenAI-hosted models)
|
||||
{ "id": 1, "result": { "account": { "type": "apiKey" }, "requiresOpenaiAuth": true } }
|
||||
{ "id": 1, "result": { "account": { "type": "chatgpt", "email": "user@example.com", "planType": "pro" }, "requiresOpenaiAuth": true } }
|
||||
```
|
||||
|
||||
Field notes:
|
||||
|
||||
- `refreshToken` (bool): set `true` to force a token refresh.
|
||||
- `requiresOpenaiAuth` reflects the active provider; when `false`, Codex can run without OpenAI credentials.
|
||||
|
||||
### 2) Log in with an API key
|
||||
|
||||
1. Send:
|
||||
```json
|
||||
{
|
||||
"method": "account/login/start",
|
||||
"id": 2,
|
||||
"params": { "type": "apiKey", "apiKey": "sk-…" }
|
||||
}
|
||||
```
|
||||
2. Expect:
|
||||
```json
|
||||
{ "id": 2, "result": { "type": "apiKey" } }
|
||||
```
|
||||
3. Notifications:
|
||||
```json
|
||||
{ "method": "account/login/completed", "params": { "loginId": null, "success": true, "error": null } }
|
||||
{ "method": "account/updated", "params": { "authMode": "apikey" } }
|
||||
```
|
||||
|
||||
### 3) Log in with ChatGPT (browser flow)
|
||||
|
||||
1. Start:
|
||||
```json
|
||||
{ "method": "account/login/start", "id": 3, "params": { "type": "chatgpt" } }
|
||||
{ "id": 3, "result": { "type": "chatgpt", "loginId": "<uuid>", "authUrl": "https://chatgpt.com/…&redirect_uri=http%3A%2F%2Flocalhost%3A<port>%2Fauth%2Fcallback" } }
|
||||
```
|
||||
2. Open `authUrl` in a browser; the app-server hosts the local callback.
|
||||
3. Wait for notifications:
|
||||
```json
|
||||
{ "method": "account/login/completed", "params": { "loginId": "<uuid>", "success": true, "error": null } }
|
||||
{ "method": "account/updated", "params": { "authMode": "chatgpt" } }
|
||||
```
|
||||
|
||||
### 4) Cancel a ChatGPT login
|
||||
|
||||
```json
|
||||
{ "method": "account/login/cancel", "id": 4, "params": { "loginId": "<uuid>" } }
|
||||
{ "method": "account/login/completed", "params": { "loginId": "<uuid>", "success": false, "error": "…" } }
|
||||
```
|
||||
|
||||
### 5) Logout
|
||||
|
||||
```json
|
||||
{ "method": "account/logout", "id": 5 }
|
||||
{ "id": 5, "result": {} }
|
||||
{ "method": "account/updated", "params": { "authMode": null } }
|
||||
```
|
||||
|
||||
### 6) Rate limits (ChatGPT)
|
||||
|
||||
```json
|
||||
{ "method": "account/rateLimits/read", "id": 6 }
|
||||
{ "id": 6, "result": { "rateLimits": { "primary": { "usedPercent": 25, "windowDurationMins": 15, "resetsAt": 1730947200 }, "secondary": null } } }
|
||||
{ "method": "account/rateLimits/updated", "params": { "rateLimits": { … } } }
|
||||
```
|
||||
|
||||
Field notes:
|
||||
|
||||
- `usedPercent` is current usage within the OpenAI quota window.
|
||||
- `windowDurationMins` is the quota window length.
|
||||
- `resetsAt` is a Unix timestamp (seconds) for the next reset.
|
||||
1983
codex-rs/app-server/src/bespoke_event_handling.rs
Normal file
1983
codex-rs/app-server/src/bespoke_event_handling.rs
Normal file
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user