mirror of
https://github.com/anomalyco/opencode.git
synced 2026-02-09 10:24:11 +00:00
Compare commits
832 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b4c7042c17 | ||
|
|
6965787b33 | ||
|
|
ce064b8b0e | ||
|
|
0fc546fc6b | ||
|
|
77ac9e5ec2 | ||
|
|
af2c0b3695 | ||
|
|
811b22367d | ||
|
|
933d50e25a | ||
|
|
800bee2722 | ||
|
|
5b4fb96c2e | ||
|
|
1d20bf343d | ||
|
|
79d9bf57f7 | ||
|
|
7b63db6a13 | ||
|
|
0e1565449e | ||
|
|
f9a47fe5a3 | ||
|
|
2bf9d5d4ec | ||
|
|
c18f9ece69 | ||
|
|
4e3c73c4f5 | ||
|
|
8bf2eeccd0 | ||
|
|
6232e0fc58 | ||
|
|
a8b4aed446 | ||
|
|
03de0c406d | ||
|
|
faf8da8743 | ||
|
|
3386908fd6 | ||
|
|
5a8847952a | ||
|
|
87d21ebf2b | ||
|
|
a524fc545c | ||
|
|
4316edaf43 | ||
|
|
d845924e8b | ||
|
|
a29b322bdd | ||
|
|
9723ffa7a6 | ||
|
|
f06cd88773 | ||
|
|
9af92b6914 | ||
|
|
8f64c4b312 | ||
|
|
a32877e908 | ||
|
|
6465c9c44a | ||
|
|
4699739814 | ||
|
|
c1d87c32a2 | ||
|
|
9c5d9be33a | ||
|
|
97d9c851e6 | ||
|
|
76bd702992 | ||
|
|
50c453e577 | ||
|
|
86d5b25d18 | ||
|
|
2b44dbdbf1 | ||
|
|
4bbbbac5f6 | ||
|
|
3c3a997d2a | ||
|
|
1676f8b5dd | ||
|
|
c87a7469a0 | ||
|
|
132e26ddbf | ||
|
|
f1da70b1de | ||
|
|
5c9d1910af | ||
|
|
18abcab208 | ||
|
|
01e7dc2d02 | ||
|
|
611854e4b6 | ||
|
|
d56dec4ba7 | ||
|
|
c952e9ae3d | ||
|
|
6470243095 | ||
|
|
c8321cfbd9 | ||
|
|
46c246e01f | ||
|
|
9964d8e6c0 | ||
|
|
df33143396 | ||
|
|
571aeaaea2 | ||
|
|
edfea03917 | ||
|
|
81c88cc742 | ||
|
|
99b9390d80 | ||
|
|
23c30521d8 | ||
|
|
e681d610de | ||
|
|
a1fdeded3e | ||
|
|
2051312d12 | ||
|
|
20cb7a76af | ||
|
|
a493aec174 | ||
|
|
3ce3ac8e61 | ||
|
|
91ad64feda | ||
|
|
60b55f9d92 | ||
|
|
3c6c2bf13b | ||
|
|
d4f9375548 | ||
|
|
28b39f547e | ||
|
|
7520f5efa8 | ||
|
|
eb4cdf4b20 | ||
|
|
9f6fc1c3c5 | ||
|
|
dfede9ae6e | ||
|
|
fc45c0c944 | ||
|
|
9d869f784c | ||
|
|
bd244f73af | ||
|
|
dd34556e9c | ||
|
|
f7dd48e60d | ||
|
|
93c779cf48 | ||
|
|
360c04c542 | ||
|
|
529fd57e75 | ||
|
|
faea3777e1 | ||
|
|
a4664e2344 | ||
|
|
cdc1d8a94d | ||
|
|
fdd6d6600f | ||
|
|
9f44cfd595 | ||
|
|
70229b150c | ||
|
|
050ff943a6 | ||
|
|
88b58fd6a0 | ||
|
|
5d67e13df5 | ||
|
|
57d1a60efc | ||
|
|
add81b9739 | ||
|
|
81bdb8e269 | ||
|
|
a563fdd287 | ||
|
|
7c93bf5993 | ||
|
|
6a5a4247c6 | ||
|
|
a39136a2a0 | ||
|
|
9f5b59f336 | ||
|
|
01c125b058 | ||
|
|
d41aa2bc72 | ||
|
|
f45deb37f0 | ||
|
|
e89972a396 | ||
|
|
c3c647a21a | ||
|
|
b79167ce66 | ||
|
|
7ac0a2bc65 | ||
|
|
cb032cff2b | ||
|
|
867a69a751 | ||
|
|
20b8efcc50 | ||
|
|
a86d42149f | ||
|
|
82a36acfe3 | ||
|
|
0793c3f2a3 | ||
|
|
5c860b0d69 | ||
|
|
05bb127a8e | ||
|
|
1bbd84008f | ||
|
|
fdfd4d69d3 | ||
|
|
7f659cce36 | ||
|
|
48fcaa83be | ||
|
|
70c16c4c95 | ||
|
|
c1e1ef6eb5 | ||
|
|
bb155db8b2 | ||
|
|
7c91f668d1 | ||
|
|
1af103d29e | ||
|
|
8a3e581edc | ||
|
|
749e7838a4 | ||
|
|
73b46c2bf9 | ||
|
|
8bd250fb15 | ||
|
|
b1ab641905 | ||
|
|
76e256ed64 | ||
|
|
4f955f2127 | ||
|
|
bbeb579d3a | ||
|
|
f707fb3f8d | ||
|
|
6b98acb7be | ||
|
|
2487b18f62 | ||
|
|
533f64fe26 | ||
|
|
b5c85d3806 | ||
|
|
bcf952bc8a | ||
|
|
a6dc75a44c | ||
|
|
416daca9c6 | ||
|
|
636fe0fb64 | ||
|
|
95e0957d64 | ||
|
|
2eefdae6a9 | ||
|
|
d62746ceb7 | ||
|
|
4b2ce14ff3 | ||
|
|
294a11752e | ||
|
|
1cf1d1f634 | ||
|
|
2ce694d41f | ||
|
|
d6eff3b3a3 | ||
|
|
e63a6d45c1 | ||
|
|
93686519ba | ||
|
|
f593792fb5 | ||
|
|
2cdb37c32b | ||
|
|
535d79b64c | ||
|
|
b4e4c3f662 | ||
|
|
ba676e7ae0 | ||
|
|
a1c8e5af45 | ||
|
|
f1e7e7c138 | ||
|
|
80b77caec0 | ||
|
|
86a2ea44b5 | ||
|
|
a2002c88c6 | ||
|
|
d8bcf4f4e7 | ||
|
|
31e0326f78 | ||
|
|
a53d2ea356 | ||
|
|
229a280652 | ||
|
|
8d0350d923 | ||
|
|
4192d7eacc | ||
|
|
7b8b4cf8c7 | ||
|
|
1f4de75348 | ||
|
|
457755c690 | ||
|
|
052a1e7514 | ||
|
|
139d6e2818 | ||
|
|
06554efdf4 | ||
|
|
67e9bda94f | ||
|
|
53bb6b4c4f | ||
|
|
73d54c7068 | ||
|
|
90d6c4ab41 | ||
|
|
736396fc70 | ||
|
|
177bfed93e | ||
|
|
91f8477ef5 | ||
|
|
f04a5e50ee | ||
|
|
bb28b70700 | ||
|
|
7361a02ef3 | ||
|
|
d465f150fc | ||
|
|
17fa8c117b | ||
|
|
9aa0c40a00 | ||
|
|
fd4648da17 | ||
|
|
aadca5013a | ||
|
|
5c3d490e59 | ||
|
|
1254f48135 | ||
|
|
1729c310d9 | ||
|
|
0130190bbd | ||
|
|
97a31ddffc | ||
|
|
3249420ad1 | ||
|
|
4bb8536d34 | ||
|
|
c73d4a137e | ||
|
|
57ac8f2741 | ||
|
|
2f1acee5a1 | ||
|
|
9ca54020ac | ||
|
|
f7d44b178b | ||
|
|
b4950a157c | ||
|
|
dfbef066c7 | ||
|
|
26fd76fbee | ||
|
|
04769d8a26 | ||
|
|
34b576d9b5 | ||
|
|
22b244f847 | ||
|
|
7e1fc275e7 | ||
|
|
3b9b391320 | ||
|
|
766bfd025c | ||
|
|
c7f30e1065 | ||
|
|
1c4fd7f28f | ||
|
|
85805d2c38 | ||
|
|
982cb3e71a | ||
|
|
294d0e7ee3 | ||
|
|
8be1ca836c | ||
|
|
2e5f96fa41 | ||
|
|
c056b0add9 | ||
|
|
b00bb3c083 | ||
|
|
d9befd3aa6 | ||
|
|
49de703ba1 | ||
|
|
22988894c8 | ||
|
|
34b1754f25 | ||
|
|
54fe3504ba | ||
|
|
d2c862e32d | ||
|
|
afc53afb35 | ||
|
|
b56e49c5dc | ||
|
|
8b2a909e1f | ||
|
|
e9c954d45e | ||
|
|
6f449d13af | ||
|
|
6e375bef0d | ||
|
|
67106a6967 | ||
|
|
b5d690620d | ||
|
|
9db3ce1d0b | ||
|
|
1cc55b68ef | ||
|
|
469f667774 | ||
|
|
6603d9a9f0 | ||
|
|
5dc1920a4c | ||
|
|
d3e5f3f3a8 | ||
|
|
ce4cb820f7 | ||
|
|
ba5be6b625 | ||
|
|
f95c3f4177 | ||
|
|
d2b1307bff | ||
|
|
b40ba32adc | ||
|
|
ce0cebb7d7 | ||
|
|
f478f89a68 | ||
|
|
85d95f0f2b | ||
|
|
1515efc77c | ||
|
|
6d393759e1 | ||
|
|
a1701678cd | ||
|
|
c411a26d6f | ||
|
|
85dbfeb314 | ||
|
|
085c0e4e2b | ||
|
|
8404a97c3e | ||
|
|
0ee3b1ede2 | ||
|
|
a826936702 | ||
|
|
fd4a5d5a63 | ||
|
|
69cf1d7b7e | ||
|
|
8e0a1d1167 | ||
|
|
f22021187d | ||
|
|
febecc348a | ||
|
|
c5ccfc3e94 | ||
|
|
1f6efc6b94 | ||
|
|
727fe6f942 | ||
|
|
a91e79382e | ||
|
|
5c626e0a2f | ||
|
|
8e9e383219 | ||
|
|
f383008cc1 | ||
|
|
303ade25ed | ||
|
|
53f8e7850e | ||
|
|
ca8ce88354 | ||
|
|
37a86439c4 | ||
|
|
269b43f4de | ||
|
|
3f25e5bf86 | ||
|
|
67765fa47c | ||
|
|
58b1c58bc5 | ||
|
|
d80badc50f | ||
|
|
75279e5ccf | ||
|
|
7893b84614 | ||
|
|
cfc715bd48 | ||
|
|
39bcba85a9 | ||
|
|
da3df51316 | ||
|
|
12190e4efc | ||
|
|
d2a9b2f64a | ||
|
|
aacadd8a8a | ||
|
|
969154a473 | ||
|
|
4d6ca3fab1 | ||
|
|
00ea5082e7 | ||
|
|
4a878b88c0 | ||
|
|
6de955847c | ||
|
|
3ba5d528b4 | ||
|
|
f99e2b3429 | ||
|
|
7e4e6f6e51 | ||
|
|
0514f3f43b | ||
|
|
1e07384364 | ||
|
|
4c4739c422 | ||
|
|
2d8b90a6ff | ||
|
|
a2fa7ffa42 | ||
|
|
f7d6175283 | ||
|
|
9ed187ee52 | ||
|
|
14d81e574b | ||
|
|
6efe8cc8df | ||
|
|
daa5fc916a | ||
|
|
c659496b96 | ||
|
|
21fbf21cb6 | ||
|
|
f31cbf2744 | ||
|
|
8322f18e03 | ||
|
|
562bdb95e2 | ||
|
|
a57ce8365d | ||
|
|
0da83ae67e | ||
|
|
662d022a48 | ||
|
|
9efef03919 | ||
|
|
7a9fb3fa92 | ||
|
|
ea96ead346 | ||
|
|
6100a77b85 | ||
|
|
c7a59ee2b1 | ||
|
|
a272b58fe9 | ||
|
|
9948fcf1b6 | ||
|
|
0d50c867ff | ||
|
|
27f7e02f12 | ||
|
|
0f93ecd564 | ||
|
|
da909d9684 | ||
|
|
facd851b11 | ||
|
|
c51de945a5 | ||
|
|
9253a3ca9e | ||
|
|
7cfa297a78 | ||
|
|
661b74def6 | ||
|
|
b478e5655c | ||
|
|
f884766445 | ||
|
|
76b2e4539c | ||
|
|
d87922c0eb | ||
|
|
2446483df5 | ||
|
|
f4c453155d | ||
|
|
969ad80ed2 | ||
|
|
af064b41d7 | ||
|
|
ea6bfef21a | ||
|
|
107363b1d9 | ||
|
|
85214d7c59 | ||
|
|
997cb2d945 | ||
|
|
45b139390c | ||
|
|
994368de15 | ||
|
|
143fd8e076 | ||
|
|
06dba28bd6 | ||
|
|
b8d276a049 | ||
|
|
ee01f01271 | ||
|
|
32d5db4f0a | ||
|
|
f6108b7be8 | ||
|
|
94ef341c9d | ||
|
|
f9abc7c84f | ||
|
|
891ed6ebc0 | ||
|
|
163e23a68b | ||
|
|
f13b0af491 | ||
|
|
4a0be45d3d | ||
|
|
23788674c8 | ||
|
|
121eb24e73 | ||
|
|
571d60182a | ||
|
|
167a9dcaf3 | ||
|
|
37327259cb | ||
|
|
cdb25656d5 | ||
|
|
25c876caa2 | ||
|
|
cf83e31f23 | ||
|
|
3bc238b58b | ||
|
|
b8de69dced | ||
|
|
e7fcb692a4 | ||
|
|
dae38574ab | ||
|
|
ed4f862b49 | ||
|
|
fce59db94a | ||
|
|
3e2a0c7281 | ||
|
|
5a0910ea79 | ||
|
|
1dffabcfda | ||
|
|
c389e0ed43 | ||
|
|
204801052a | ||
|
|
2528d8cb88 | ||
|
|
6b73ffd1c1 | ||
|
|
0eadc50a33 | ||
|
|
aeea84a877 | ||
|
|
a54c5c6298 | ||
|
|
8825cd3811 | ||
|
|
3d9a5d9970 | ||
|
|
1f9e195fa6 | ||
|
|
73c012c76c | ||
|
|
2ace57404b | ||
|
|
8c4b5e088b | ||
|
|
69920a73d7 | ||
|
|
ae76a3467a | ||
|
|
701107cda4 | ||
|
|
b99565959b | ||
|
|
67aa7ce04d | ||
|
|
c663fbc3ee | ||
|
|
2090bab537 | ||
|
|
64d5fff9a3 | ||
|
|
925f695503 | ||
|
|
f1c925795d | ||
|
|
c82a060eca | ||
|
|
63e783ef79 | ||
|
|
35d6273fb3 | ||
|
|
b89d4a16fd | ||
|
|
2799a96032 | ||
|
|
8f4b79227c | ||
|
|
c810b6d206 | ||
|
|
fa35407572 | ||
|
|
8bbbc07aff | ||
|
|
75a21ba3ce | ||
|
|
0d6fb68a88 | ||
|
|
242b886434 | ||
|
|
caf465a9da | ||
|
|
bbf77c6139 | ||
|
|
53b7e04b86 | ||
|
|
9e75e3ed18 | ||
|
|
6389858d41 | ||
|
|
7e5941e14b | ||
|
|
c68aeed8d9 | ||
|
|
b199a609a8 | ||
|
|
4a5a93b3f8 | ||
|
|
e99bdcefac | ||
|
|
26dcb85de1 | ||
|
|
11d042be25 | ||
|
|
33b5fe236a | ||
|
|
d56991006c | ||
|
|
739a9f71c3 | ||
|
|
aef81fce0b | ||
|
|
8f3d7b4038 | ||
|
|
de15e67834 | ||
|
|
fea56d8de6 | ||
|
|
3d71be2b45 | ||
|
|
58baca2a5b | ||
|
|
ef73926db6 | ||
|
|
9ad1687f04 | ||
|
|
c573270e66 | ||
|
|
9ebad68274 | ||
|
|
03664ba588 | ||
|
|
5a107b275c | ||
|
|
dd5736fe5f | ||
|
|
9f3ba03965 | ||
|
|
d090c08ef0 | ||
|
|
68e82e4d94 | ||
|
|
a4aa0e6f8d | ||
|
|
8c1ae2717c | ||
|
|
72d48759d7 | ||
|
|
986144b377 | ||
|
|
1fdb326aa7 | ||
|
|
463257e7e4 | ||
|
|
0f41e60bd6 | ||
|
|
7df81f7b3e | ||
|
|
dd22cb2bb0 | ||
|
|
248325925f | ||
|
|
ca48a4f0fb | ||
|
|
98ee5a3d87 | ||
|
|
67480e5a1c | ||
|
|
2581a9b54c | ||
|
|
14a293e124 | ||
|
|
780419ecae | ||
|
|
f0962e2d9c | ||
|
|
3a9584a419 | ||
|
|
196f42cbff | ||
|
|
322385f6b1 | ||
|
|
b7446cd7b9 | ||
|
|
f618e569ab | ||
|
|
7b394b91e2 | ||
|
|
6a7983a4ea | ||
|
|
737146fca1 | ||
|
|
688f3fd12f | ||
|
|
145df08444 | ||
|
|
8b400515ea | ||
|
|
289797f56d | ||
|
|
be0811ecc3 | ||
|
|
0676bcd4fd | ||
|
|
d076def561 | ||
|
|
e0807d7317 | ||
|
|
fa2723f2d0 | ||
|
|
87d62514db | ||
|
|
2f8cf9146b | ||
|
|
8e0ec6b037 | ||
|
|
6dc434cb83 | ||
|
|
d972c27f03 | ||
|
|
9e2bb63688 | ||
|
|
49053b66a9 | ||
|
|
47497aef07 | ||
|
|
8455029de1 | ||
|
|
9f07f89384 | ||
|
|
d840d43e8f | ||
|
|
9ead2f3dfb | ||
|
|
f3742ddbb8 | ||
|
|
b61a841aa8 | ||
|
|
ebcf11e574 | ||
|
|
065f0aaddf | ||
|
|
c0773dc7c5 | ||
|
|
1c3c74bd36 | ||
|
|
79bbf90b72 | ||
|
|
226a4a7f36 | ||
|
|
df3b424830 | ||
|
|
3cfd9d80bc | ||
|
|
e0553b8d2c | ||
|
|
391c837b37 | ||
|
|
5773d9d1a3 | ||
|
|
ce611963c3 | ||
|
|
f865cacfb8 | ||
|
|
2ec0611f42 | ||
|
|
334161a30e | ||
|
|
dbb6e55226 | ||
|
|
d0f9260559 | ||
|
|
d2176064e1 | ||
|
|
ed8d277e49 | ||
|
|
59b3268c64 | ||
|
|
d043f67761 | ||
|
|
51bf193889 | ||
|
|
f8b78f08b4 | ||
|
|
a4f32d602b | ||
|
|
dc3dd21cf3 | ||
|
|
b4c2fcccf5 | ||
|
|
e950ad5306 | ||
|
|
8ca713b737 | ||
|
|
5b54554fd5 | ||
|
|
4bc651f958 | ||
|
|
3b6976a9c8 | ||
|
|
863d5c1e8e | ||
|
|
97e19e9677 | ||
|
|
b27851461f | ||
|
|
209687377a | ||
|
|
90face1c09 | ||
|
|
936e2ce48b | ||
|
|
16ee8ee379 | ||
|
|
ac39308dad | ||
|
|
346b49219d | ||
|
|
d84c1f20c7 | ||
|
|
dfb8777555 | ||
|
|
008af18156 | ||
|
|
ab23167f80 | ||
|
|
b17ec46463 | ||
|
|
2e26b58d16 | ||
|
|
31b56e5a05 | ||
|
|
47c401cf25 | ||
|
|
fab8dc9e6f | ||
|
|
f39a2b1f16 | ||
|
|
66830ced4e | ||
|
|
9d3fad754d | ||
|
|
dcd3131f58 | ||
|
|
3d02e07161 | ||
|
|
4dbc6a43a6 | ||
|
|
5394b5188b | ||
|
|
8e680b3957 | ||
|
|
1b8cd796d6 | ||
|
|
35fba793d0 | ||
|
|
5358d43b74 | ||
|
|
f777347bac | ||
|
|
17c8b914df | ||
|
|
43b467dd12 | ||
|
|
0e0770921e | ||
|
|
8edbb74352 | ||
|
|
e6bfa95758 | ||
|
|
e4120b6287 | ||
|
|
ccbc9e00f2 | ||
|
|
7d13baadc8 | ||
|
|
9acc83697f | ||
|
|
db24bf87c0 | ||
|
|
f4c0d2d2fd | ||
|
|
d240f4c676 | ||
|
|
9c90cdbe08 | ||
|
|
fc7af31fe5 | ||
|
|
2f8d23ec66 | ||
|
|
77ae3fb9b9 | ||
|
|
4e7f6c47fd | ||
|
|
50469ed750 | ||
|
|
aaab785493 | ||
|
|
9751937894 | ||
|
|
0fc8dfc77e | ||
|
|
81b7df61ec | ||
|
|
8217b96d4a | ||
|
|
7dd0918d32 | ||
|
|
4b26b43855 | ||
|
|
9d7cfda9fe | ||
|
|
a3cf18c905 | ||
|
|
0b1a8ae699 | ||
|
|
eb70b1e5c8 | ||
|
|
00a3d818b6 | ||
|
|
2384c7e734 | ||
|
|
1bad3d9894 | ||
|
|
4f715e66dc | ||
|
|
ec001ca02f | ||
|
|
a2d3b9f0c8 | ||
|
|
9cfb6ff964 | ||
|
|
6ed661c140 | ||
|
|
9dc00edfc9 | ||
|
|
e063bf888e | ||
|
|
6f18475428 | ||
|
|
3664b09812 | ||
|
|
7050cc0ac3 | ||
|
|
4d3d63294d | ||
|
|
6bc61cbc2d | ||
|
|
01d351bebe | ||
|
|
dbba4a97aa | ||
|
|
0dc586faef | ||
|
|
f19c6b05f2 | ||
|
|
bc34f08333 | ||
|
|
b7ee16aabd | ||
|
|
ed1b0d97bf | ||
|
|
8d3b2fb821 | ||
|
|
fa991920bc | ||
|
|
5e79e3d7a5 | ||
|
|
966015c9ae | ||
|
|
61f057337a | ||
|
|
0b261054a2 | ||
|
|
e2e481cbb5 | ||
|
|
5140e83012 | ||
|
|
100d6212be | ||
|
|
f0e19a6542 | ||
|
|
00c4d4f9f8 | ||
|
|
6e6fe6e013 | ||
|
|
d05b60291e | ||
|
|
5162361372 | ||
|
|
d271b9f75b | ||
|
|
333569bed3 | ||
|
|
09b89fdb23 | ||
|
|
0e8c3359d1 | ||
|
|
37e0a7050f | ||
|
|
774dcb6980 | ||
|
|
28bc49ad17 | ||
|
|
dc1947838c | ||
|
|
3ea2daaa4c | ||
|
|
137e964131 | ||
|
|
8efbe497fd | ||
|
|
119d2d966c | ||
|
|
194415e785 | ||
|
|
1684042fb6 | ||
|
|
59f0004d34 | ||
|
|
da35a64fa1 | ||
|
|
460338ca53 | ||
|
|
53c18a64b4 | ||
|
|
b8144c5654 | ||
|
|
9081e17fcc | ||
|
|
ef3fd5900f | ||
|
|
453d690c11 | ||
|
|
c45be6a645 | ||
|
|
7b9b177088 | ||
|
|
3cee5b0470 | ||
|
|
9246d1c901 | ||
|
|
cc12abc83e | ||
|
|
4f7e4a9436 | ||
|
|
eee396f903 | ||
|
|
0d2f8e175a | ||
|
|
4df40e0d9b | ||
|
|
b72e17a8b7 | ||
|
|
61160dc220 | ||
|
|
98734ff28c | ||
|
|
9991352663 | ||
|
|
91c4da5dbd | ||
|
|
2fd0e7dd6b | ||
|
|
d50b7ad481 | ||
|
|
df95c49401 | ||
|
|
8b73c52f00 | ||
|
|
5603098d17 | ||
|
|
f436a50125 | ||
|
|
e19e977591 | ||
|
|
addbe295b1 | ||
|
|
9a573dedc6 | ||
|
|
9ea0d71e8d | ||
|
|
b1a3599017 | ||
|
|
7b0329f67f | ||
|
|
311b9c74dd | ||
|
|
f7e8dd2ff8 | ||
|
|
40b1dd7ef2 | ||
|
|
261e76e0a3 | ||
|
|
a300bfaccb | ||
|
|
41dba0db08 | ||
|
|
6674c6083a | ||
|
|
f6afa2c6bb | ||
|
|
b2fb0508ea | ||
|
|
93f4252bb1 | ||
|
|
46ab9c16dd | ||
|
|
d869df4fee | ||
|
|
b99d4650ec | ||
|
|
261bb7f110 | ||
|
|
0515fbb260 | ||
|
|
88211d8c5b | ||
|
|
a812f95b9d | ||
|
|
3728a12bee | ||
|
|
af07e51213 | ||
|
|
3113788c92 | ||
|
|
efb5fe6d4e | ||
|
|
54dd6c644d | ||
|
|
39ad8f2667 | ||
|
|
c4a2c84e53 | ||
|
|
44fe012812 | ||
|
|
f5e7f079ea | ||
|
|
15a8936806 | ||
|
|
4e4cff49c0 | ||
|
|
5540503bee | ||
|
|
193718034b | ||
|
|
72108c0296 | ||
|
|
ec1c9f8cd1 | ||
|
|
a85b0a370e | ||
|
|
e7784d2864 | ||
|
|
97c4815444 | ||
|
|
7d1a1663c8 | ||
|
|
24c0ce6e53 | ||
|
|
4cdc86612c | ||
|
|
f1f3f8d12c | ||
|
|
e78d3b54bf | ||
|
|
f8a7cd372d | ||
|
|
f48eac638d | ||
|
|
e1f12f93eb | ||
|
|
7ca8334a8b | ||
|
|
f1a2b2eba4 | ||
|
|
4b132656df | ||
|
|
26bab00dab | ||
|
|
568c04753e | ||
|
|
4a06e164d2 | ||
|
|
c57b52c300 | ||
|
|
0b8f48f17f | ||
|
|
3862184ccb | ||
|
|
8619c50976 | ||
|
|
bb6b56b72a | ||
|
|
1252b65166 | ||
|
|
6840276dad | ||
|
|
bd8c3cd0f1 | ||
|
|
e5e9b3e3c0 | ||
|
|
1e8a681de9 | ||
|
|
a834bedc17 | ||
|
|
6a3392385e | ||
|
|
6a00e063c4 | ||
|
|
73a0ce2b7d | ||
|
|
4d1afd01fa | ||
|
|
801d5f47bd | ||
|
|
b6caae9708 | ||
|
|
183ca64ef9 | ||
|
|
8c32cfe829 | ||
|
|
73dcc88da1 | ||
|
|
14bded65dc | ||
|
|
87d1d3fb62 | ||
|
|
e054454109 | ||
|
|
a6142cf975 | ||
|
|
69332e5fa3 | ||
|
|
20201ba3c4 | ||
|
|
658067186a | ||
|
|
ac777b77cf | ||
|
|
5944ae2023 | ||
|
|
2f10961ba8 | ||
|
|
fae97978a3 | ||
|
|
3423415e49 | ||
|
|
1d0bfc2b2a | ||
|
|
bd46cf0f86 | ||
|
|
d4157d9a96 | ||
|
|
6e4ef585d8 | ||
|
|
e05c3b7a76 | ||
|
|
f99904bc1c | ||
|
|
b796d6763f | ||
|
|
c1250abdf8 | ||
|
|
ebe51534a1 | ||
|
|
b8bbee4718 | ||
|
|
8f852b396f | ||
|
|
ae4d089c06 | ||
|
|
5110fbdaf9 | ||
|
|
e6ddb474fc | ||
|
|
0dc71774ce | ||
|
|
b470466e30 | ||
|
|
d1f9311931 | ||
|
|
1c58023df9 | ||
|
|
4e0aa58b7e | ||
|
|
23ee34b35f | ||
|
|
674c9a5220 | ||
|
|
54c86ed43a | ||
|
|
676d75ee75 | ||
|
|
70dc0a12f2 | ||
|
|
d579c5e8aa | ||
|
|
ee91f31313 | ||
|
|
57b3051024 | ||
|
|
ae5cf3cc23 | ||
|
|
68e1b3c46c | ||
|
|
2d68814abc | ||
|
|
a5da5127fa | ||
|
|
b5a4439704 | ||
|
|
9c5616521d | ||
|
|
3fe163416d | ||
|
|
d054f88130 | ||
|
|
b929b4f4b9 | ||
|
|
4c0c83b02d | ||
|
|
d6d45bdc63 | ||
|
|
13a83721b0 | ||
|
|
f0edffbae9 | ||
|
|
8131bee49a | ||
|
|
b5f44ae13f | ||
|
|
0d23f2a7fd | ||
|
|
ac096d84ad | ||
|
|
fcaf0e6dbf | ||
|
|
19e259d90d | ||
|
|
2c9fd1e776 | ||
|
|
63996c4189 | ||
|
|
c7bb7ce4de | ||
|
|
c8eb1b24c3 | ||
|
|
b9f894f1e9 | ||
|
|
7c0d10a4ce | ||
|
|
06af406146 | ||
|
|
0e3458b112 | ||
|
|
2d15c683e0 | ||
|
|
3c94d26570 | ||
|
|
1a553e525f | ||
|
|
3c4e966216 | ||
|
|
0721620ed8 | ||
|
|
9fc6734f32 | ||
|
|
e1733a423d | ||
|
|
d42e3db7e0 | ||
|
|
cdb26f6d83 | ||
|
|
fe05edaa79 | ||
|
|
7d174767b0 | ||
|
|
c5eefd1752 | ||
|
|
77a6b3bdd6 | ||
|
|
7effff56c0 | ||
|
|
e30fba0d3c | ||
|
|
7fbb2ca9a6 | ||
|
|
230d0a1510 | ||
|
|
46ff2c0ae0 | ||
|
|
b8a89dab0f | ||
|
|
7351e12886 | ||
|
|
38879dee2d | ||
|
|
c4ff8dd205 | ||
|
|
0e035b3115 | ||
|
|
b855511d9a | ||
|
|
783faf554d | ||
|
|
bfd4269d7d | ||
|
|
25f78b053b | ||
|
|
87f260ee17 | ||
|
|
12931a869d | ||
|
|
f759e1804d | ||
|
|
c9b4564d36 | ||
|
|
d097c546db | ||
|
|
adb54521b4 | ||
|
|
2ea0399aa7 |
9
.editorconfig
Normal file
9
.editorconfig
Normal file
@@ -0,0 +1,9 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
charset = utf-8
|
||||
insert_final_newline = true
|
||||
end_of_line = lf
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
max_line_length = 80
|
||||
7
.github/workflows/deploy.yml
vendored
7
.github/workflows/deploy.yml
vendored
@@ -3,7 +3,8 @@ name: deploy
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- dontlook
|
||||
- dev
|
||||
- production
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency: ${{ github.workflow }}-${{ github.ref }}
|
||||
@@ -16,10 +17,10 @@ jobs:
|
||||
|
||||
- uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: latest
|
||||
bun-version: 1.2.17
|
||||
|
||||
- run: bun install
|
||||
|
||||
- run: bun sst deploy --stage=dev
|
||||
- run: bun sst deploy --stage=${{ github.ref_name }}
|
||||
env:
|
||||
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
|
||||
14
.github/workflows/notify-discord.yml
vendored
Normal file
14
.github/workflows/notify-discord.yml
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
name: discord
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published] # fires only when a release is published
|
||||
|
||||
jobs:
|
||||
notify:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Send nicely-formatted embed to Discord
|
||||
uses: SethCohen/github-releases-to-discord@v1
|
||||
with:
|
||||
webhook_url: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
24
.github/workflows/opencode.yml
vendored
Normal file
24
.github/workflows/opencode.yml
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
name: opencode
|
||||
|
||||
on:
|
||||
issue_comment:
|
||||
types: [created]
|
||||
|
||||
jobs:
|
||||
opencode:
|
||||
if: startsWith(github.event.comment.body, 'hey opencode')
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Run opencode
|
||||
uses: sst/opencode/sdks/github@github-v1
|
||||
env:
|
||||
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
with:
|
||||
model: anthropic/claude-sonnet-4-20250514
|
||||
30
.github/workflows/publish-github-action.yml
vendored
Normal file
30
.github/workflows/publish-github-action.yml
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
name: publish-github-action
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
tags:
|
||||
- "github-v*.*.*"
|
||||
- "!github-v1"
|
||||
|
||||
concurrency: ${{ github.workflow }}-${{ github.ref }}
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- run: git fetch --force --tags
|
||||
|
||||
- name: Publish
|
||||
run: |
|
||||
git config --global user.email "opencode@sst.dev"
|
||||
git config --global user.name "opencode"
|
||||
./script/publish
|
||||
working-directory: ./sdks/github
|
||||
36
.github/workflows/publish-vscode.yml
vendored
Normal file
36
.github/workflows/publish-vscode.yml
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
name: publish-vscode
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
tags:
|
||||
- "vscode-v*.*.*"
|
||||
|
||||
concurrency: ${{ github.workflow }}-${{ github.ref }}
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
bun-version: 1.2.17
|
||||
|
||||
- run: git fetch --force --tags
|
||||
- run: bun install -g @vscode/vsce
|
||||
|
||||
- name: Publish
|
||||
run: |
|
||||
bun install
|
||||
./script/publish
|
||||
working-directory: ./sdks/vscode
|
||||
env:
|
||||
VSCE_PAT: ${{ secrets.VSCE_PAT }}
|
||||
OPENVSX_TOKEN: ${{ secrets.OPENVSX_TOKEN }}
|
||||
4
.github/workflows/publish.yml
vendored
4
.github/workflows/publish.yml
vendored
@@ -7,6 +7,8 @@ on:
|
||||
- dev
|
||||
tags:
|
||||
- "*"
|
||||
- "!vscode-v*"
|
||||
- "!github-v*"
|
||||
|
||||
concurrency: ${{ github.workflow }}-${{ github.ref }}
|
||||
|
||||
@@ -32,7 +34,7 @@ jobs:
|
||||
|
||||
- uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
bun-version: 1.2.16
|
||||
bun-version: 1.2.17
|
||||
|
||||
- name: Install makepkg
|
||||
run: |
|
||||
|
||||
32
.github/workflows/stats.yml
vendored
Normal file
32
.github/workflows/stats.yml
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
name: stats
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 12 * * *" # Run daily at 12:00 UTC
|
||||
workflow_dispatch: # Allow manual trigger
|
||||
|
||||
jobs:
|
||||
stats:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
bun-version: latest
|
||||
|
||||
- name: Run stats script
|
||||
run: bun scripts/stats.ts
|
||||
|
||||
- name: Commit stats
|
||||
run: |
|
||||
git config --local user.email "action@github.com"
|
||||
git config --local user.name "GitHub Action"
|
||||
git add STATS.md
|
||||
git diff --staged --quiet || git commit -m "ignore: update download stats $(date -I)"
|
||||
git push
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -3,3 +3,6 @@ node_modules
|
||||
.opencode
|
||||
.sst
|
||||
.env
|
||||
.idea
|
||||
.vscode
|
||||
openapi.json
|
||||
|
||||
15
AGENTS.md
Normal file
15
AGENTS.md
Normal file
@@ -0,0 +1,15 @@
|
||||
# TUI Agent Guidelines
|
||||
|
||||
## Style
|
||||
|
||||
- prefer single word variable/function names
|
||||
- avoid try catch where possible - prefer to let exceptions bubble up
|
||||
- avoid else statements where possible
|
||||
- do not make useless helper functions - inline functionality unless the
|
||||
function is reusable or composable
|
||||
- prefer Bun apis
|
||||
|
||||
## Workflow
|
||||
|
||||
- you can regenerate the golang sdk by calling ./scripts/stainless.ts
|
||||
- we use bun for everything
|
||||
2
LICENSE
2
LICENSE
@@ -1,6 +1,6 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2025 OpenCode
|
||||
Copyright (c) 2025 opencode
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
||||
179
README.md
179
README.md
@@ -1,8 +1,22 @@
|
||||
[](https://github.com/sst/opencode)
|
||||
<p align="center">
|
||||
<a href="https://opencode.ai">
|
||||
<picture>
|
||||
<source srcset="packages/web/src/assets/logo-ornate-dark.svg" media="(prefers-color-scheme: dark)">
|
||||
<source srcset="packages/web/src/assets/logo-ornate-light.svg" media="(prefers-color-scheme: light)">
|
||||
<img src="packages/web/src/assets/logo-ornate-light.svg" alt="opencode logo">
|
||||
</picture>
|
||||
</a>
|
||||
</p>
|
||||
<p align="center">AI coding agent, built for the terminal.</p>
|
||||
<p align="center">
|
||||
<a href="https://opencode.ai/discord"><img alt="Discord" src="https://img.shields.io/discord/1391832426048651334?style=flat-square&label=discord" /></a>
|
||||
<a href="https://www.npmjs.com/package/opencode-ai"><img alt="npm" src="https://img.shields.io/npm/v/opencode-ai?style=flat-square" /></a>
|
||||
<a href="https://github.com/sst/opencode/actions/workflows/publish.yml"><img alt="Build status" src="https://img.shields.io/github/actions/workflow/status/sst/opencode/publish.yml?style=flat-square&branch=dev" /></a>
|
||||
</p>
|
||||
|
||||
AI coding agent, built for the terminal.
|
||||
[](https://opencode.ai)
|
||||
|
||||
⚠️ **Note:** version 0.1.x is a full rewrite and we do not have proper documentation for it yet. Should have this out week of June 17th 2025 📚
|
||||
---
|
||||
|
||||
### Installation
|
||||
|
||||
@@ -16,128 +30,81 @@ brew install sst/tap/opencode # macOS
|
||||
paru -S opencode-bin # Arch Linux
|
||||
```
|
||||
|
||||
> **Note:** Remove previous versions < 0.1.x first if installed
|
||||
> [!TIP]
|
||||
> Remove versions older than 0.1.x before installing.
|
||||
|
||||
### Providers
|
||||
#### Installation Directory
|
||||
|
||||
The recommended approach is to sign up for claude pro or max and do `opencode auth login` and select Anthropic. It is the most cost effective way to use this tool.
|
||||
The install script respects the following priority order for the installation path:
|
||||
|
||||
Additionally opencode is powered by the provider list at [models.dev](https://models.dev) so you can use `opencode auth login` to configure api keys for any provider you'd like to use. This is stored in `~/.local/share/opencode/auth.json`
|
||||
1. `$OPENCODE_INSTALL_DIR` - Custom installation directory
|
||||
2. `$XDG_BIN_DIR` - XDG Base Directory Specification compliant path
|
||||
3. `$HOME/bin` - Standard user binary directory (if exists or can be created)
|
||||
4. `$HOME/.opencode/bin` - Default fallback
|
||||
|
||||
```bash
|
||||
$ opencode auth login
|
||||
|
||||
┌ Add credential
|
||||
│
|
||||
◆ Select provider
|
||||
│ ● Anthropic (recommended)
|
||||
│ ○ OpenAI
|
||||
│ ○ Google
|
||||
│ ○ Amazon Bedrock
|
||||
│ ○ Azure
|
||||
│ ○ DeepSeek
|
||||
│ ○ Groq
|
||||
│ ...
|
||||
└
|
||||
# Examples
|
||||
OPENCODE_INSTALL_DIR=/usr/local/bin curl -fsSL https://opencode.ai/install | bash
|
||||
XDG_BIN_DIR=$HOME/.local/bin curl -fsSL https://opencode.ai/install | bash
|
||||
```
|
||||
|
||||
The models.dev dataset is also used to detect common environment variables like `OPENAI_API_KEY` to autoload that provider.
|
||||
### Documentation
|
||||
|
||||
If there are additional providers you want to use you can submit a PR to the [models.dev repo](https://github.com/sst/models.dev). If configuring just for yourself check out the Config section below
|
||||
|
||||
### Project Config
|
||||
|
||||
Project configuration is optional. You can place an `opencode.json` file in the root of your repo and it will be loaded.
|
||||
|
||||
```json title="opencode.json"
|
||||
{
|
||||
"$schema": "http://opencode.ai/config.json"
|
||||
}
|
||||
```
|
||||
|
||||
#### MCP
|
||||
|
||||
```json title="opencode.json"
|
||||
{
|
||||
"$schema": "http://opencode.ai/config.json",
|
||||
"mcp": {
|
||||
"localmcp": {
|
||||
"type": "local",
|
||||
"command": ["bun", "x", "my-mcp-command"],
|
||||
"environment": {
|
||||
"MY_ENV_VAR": "my_env_var_value"
|
||||
}
|
||||
},
|
||||
"remotemcp": {
|
||||
"type": "remote",
|
||||
"url": "https://my-mcp-server.com"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### Providers
|
||||
|
||||
You can use opencode with any provider listed at [here](https://ai-sdk.dev/providers/ai-sdk-providers). Use the npm package name as the key in your config. Note we use v5 of the ai-sdk and not all providers support that yet.
|
||||
|
||||
```json title="opencode.json"
|
||||
{
|
||||
"$schema": "http://opencode.ai/config.json",
|
||||
"provider": {
|
||||
"@ai-sdk/openai-compatible": {
|
||||
"name": "MySpecialProvider",
|
||||
"options": {
|
||||
"apiKey": "xxx",
|
||||
"baseURL": "https://api.provider.com/v1"
|
||||
},
|
||||
"models": {
|
||||
"my-model-name": {
|
||||
"name": "My Model Name"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
For more info on how to configure opencode [**head over to our docs**](https://opencode.ai/docs).
|
||||
|
||||
### Contributing
|
||||
|
||||
To run opencode locally you need
|
||||
opencode is an opinionated tool so any fundamental feature needs to go through a
|
||||
design process with the core team.
|
||||
|
||||
- bun
|
||||
- golang 1.24.x
|
||||
> [!IMPORTANT]
|
||||
> We do not accept PRs for core features.
|
||||
|
||||
To run
|
||||
However we still merge a ton of PRs - you can contribute:
|
||||
|
||||
```
|
||||
- Bug fixes
|
||||
- Improvements to LLM performance
|
||||
- Support for new providers
|
||||
- Fixes for env specific quirks
|
||||
- Missing standard behavior
|
||||
- Documentation
|
||||
|
||||
Take a look at the git history to see what kind of PRs we end up merging.
|
||||
|
||||
> [!NOTE]
|
||||
> If you do not follow the above guidelines we might close your PR.
|
||||
|
||||
To run opencode locally you need.
|
||||
|
||||
- Bun
|
||||
- Golang 1.24.x
|
||||
|
||||
And run.
|
||||
|
||||
```bash
|
||||
$ bun install
|
||||
$ cd packages/opencode
|
||||
$ bun run src/index.ts
|
||||
$ bun run packages/opencode/src/index.ts
|
||||
```
|
||||
|
||||
#### Development Notes
|
||||
|
||||
**API Client**: After making changes to the TypeScript API endpoints in `packages/opencode/src/server/server.ts`, you will need the opencode team to generate a new stainless sdk for the clients.
|
||||
|
||||
### FAQ
|
||||
|
||||
#### How do I use this with OpenRouter
|
||||
#### How is this different than Claude Code?
|
||||
|
||||
Theoretically you can use this with OpenRouter with config like this
|
||||
It's very similar to Claude Code in terms of capability. Here are the key differences:
|
||||
|
||||
```json title="opencode.json"
|
||||
{
|
||||
"$schema": "https://opencode.ai/config.json",
|
||||
"provider": {
|
||||
"@openrouter/ai-sdk-provider": {
|
||||
"name": "OpenRouter",
|
||||
"options": {
|
||||
"apiKey": "sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
|
||||
},
|
||||
"models": {
|
||||
"anthropic/claude-3.5-sonnet": {
|
||||
"name": "Claude 3.5 Sonnet"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
- 100% open source
|
||||
- Not coupled to any provider. Although Anthropic is recommended, opencode can be used with OpenAI, Google or even local models. As models evolve the gaps between them will close and pricing will drop so being provider agnostic is important.
|
||||
- A focus on TUI. opencode is built by neovim users and the creators of [terminal.shop](https://terminal.shop); we are going to push the limits of what's possible in the terminal.
|
||||
- A client/server architecture. This for example can allow opencode to run on your computer, while you can drive it remotely from a mobile app. Meaning that the TUI frontend is just one of the possible clients.
|
||||
|
||||
However we are using [ai-sdk v5](https://ai-sdk.dev) which OpenRouter does not support yet. The moment they do this will work
|
||||
#### What's the other repo?
|
||||
|
||||
The other confusingly named repo has no relation to this one. You can [read the story behind it here](https://x.com/thdxr/status/1933561254481666466).
|
||||
|
||||
---
|
||||
|
||||
**Join our community** [Discord](https://discord.gg/opencode) | [YouTube](https://www.youtube.com/c/sst-dev) | [X.com](https://x.com/SST_dev)
|
||||
|
||||
25
STATS.md
Normal file
25
STATS.md
Normal file
@@ -0,0 +1,25 @@
|
||||
# Download Stats
|
||||
|
||||
| Date | GitHub Downloads | npm Downloads | Total |
|
||||
| ---------- | ---------------- | ---------------- | ----------------- |
|
||||
| 2025-06-29 | 18,789 (+0) | 39,420 (+0) | 58,209 (+0) |
|
||||
| 2025-06-30 | 20,127 (+1,338) | 41,059 (+1,639) | 61,186 (+2,977) |
|
||||
| 2025-07-01 | 22,108 (+1,981) | 43,745 (+2,686) | 65,853 (+4,667) |
|
||||
| 2025-07-02 | 24,814 (+2,706) | 46,168 (+2,423) | 70,982 (+5,129) |
|
||||
| 2025-07-03 | 27,834 (+3,020) | 49,955 (+3,787) | 77,789 (+6,807) |
|
||||
| 2025-07-04 | 30,608 (+2,774) | 54,758 (+4,803) | 85,366 (+7,577) |
|
||||
| 2025-07-05 | 32,524 (+1,916) | 58,371 (+3,613) | 90,895 (+5,529) |
|
||||
| 2025-07-06 | 33,766 (+1,242) | 59,694 (+1,323) | 93,460 (+2,565) |
|
||||
| 2025-07-08 | 38,052 (+4,286) | 64,468 (+4,774) | 102,520 (+9,060) |
|
||||
| 2025-07-10 | 43,796 (+5,744) | 71,402 (+6,934) | 115,198 (+12,678) |
|
||||
| 2025-07-11 | 46,982 (+3,186) | 77,462 (+6,060) | 124,444 (+9,246) |
|
||||
| 2025-07-12 | 49,302 (+2,320) | 82,177 (+4,715) | 131,479 (+7,035) |
|
||||
| 2025-07-13 | 50,803 (+1,501) | 86,394 (+4,217) | 137,197 (+5,718) |
|
||||
| 2025-07-14 | 53,283 (+2,480) | 87,860 (+1,466) | 141,143 (+3,946) |
|
||||
| 2025-07-15 | 57,590 (+4,307) | 91,036 (+3,176) | 148,626 (+7,483) |
|
||||
| 2025-07-16 | 62,313 (+4,723) | 95,258 (+4,222) | 157,571 (+8,945) |
|
||||
| 2025-07-17 | 66,684 (+4,371) | 100,048 (+4,790) | 166,732 (+9,161) |
|
||||
| 2025-07-18 | 70,379 (+3,695) | 102,587 (+2,539) | 172,966 (+6,234) |
|
||||
| 2025-07-18 | 70,380 (+1) | 102,587 (+0) | 172,967 (+1) |
|
||||
| 2025-07-19 | 73,497 (+3,117) | 105,904 (+3,317) | 179,401 (+6,434) |
|
||||
| 2025-07-20 | 76,453 (+2,956) | 109,044 (+3,140) | 185,497 (+6,096) |
|
||||
25
infra/app.ts
25
infra/app.ts
@@ -4,13 +4,18 @@ export const domain = (() => {
|
||||
return `${$app.stage}.dev.opencode.ai`
|
||||
})()
|
||||
|
||||
const GITHUB_APP_ID = new sst.Secret("GITHUB_APP_ID")
|
||||
const GITHUB_APP_PRIVATE_KEY = new sst.Secret("GITHUB_APP_PRIVATE_KEY")
|
||||
const bucket = new sst.cloudflare.Bucket("Bucket")
|
||||
|
||||
export const api = new sst.cloudflare.Worker("Api", {
|
||||
domain: `api.${domain}`,
|
||||
handler: "packages/function/src/api.ts",
|
||||
environment: {
|
||||
WEB_DOMAIN: domain,
|
||||
},
|
||||
url: true,
|
||||
link: [bucket],
|
||||
link: [bucket, GITHUB_APP_ID, GITHUB_APP_PRIVATE_KEY],
|
||||
transform: {
|
||||
worker: (args) => {
|
||||
args.logpush = true
|
||||
@@ -23,29 +28,21 @@ export const api = new sst.cloudflare.Worker("Api", {
|
||||
},
|
||||
])
|
||||
args.migrations = {
|
||||
oldTag: "v1",
|
||||
newTag: "v1",
|
||||
// Note: when releasing the next tag, make sure all stages use tag v2
|
||||
oldTag: $app.stage === "production" ? "" : "v1",
|
||||
newTag: $app.stage === "production" ? "" : "v1",
|
||||
//newSqliteClasses: ["SyncServer"],
|
||||
}
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
// new sst.cloudflare.StaticSite("Web", {
|
||||
// path: "packages/web",
|
||||
// domain,
|
||||
// environment: {
|
||||
// VITE_API_URL: api.url,
|
||||
// },
|
||||
// build: {
|
||||
// command: "bun run build",
|
||||
// output: "dist",
|
||||
// },
|
||||
// })
|
||||
new sst.cloudflare.x.Astro("Web", {
|
||||
domain,
|
||||
path: "packages/web",
|
||||
environment: {
|
||||
// For astro config
|
||||
SST_STAGE: $app.stage,
|
||||
VITE_API_URL: api.url,
|
||||
},
|
||||
})
|
||||
|
||||
3
install
3
install
@@ -48,7 +48,7 @@ if [ -z "$requested_version" ]; then
|
||||
url="https://github.com/sst/opencode/releases/latest/download/$filename"
|
||||
specific_version=$(curl -s https://api.github.com/repos/sst/opencode/releases/latest | awk -F'"' '/"tag_name": "/ {gsub(/^v/, "", $4); print $4}')
|
||||
|
||||
if [[ $? -ne 0 ]]; then
|
||||
if [[ $? -ne 0 || -z "$specific_version" ]]; then
|
||||
echo "${RED}Failed to fetch version information${NC}"
|
||||
exit 1
|
||||
fi
|
||||
@@ -186,4 +186,3 @@ if [ -n "${GITHUB_ACTIONS-}" ] && [ "${GITHUB_ACTIONS}" == "true" ]; then
|
||||
echo "$INSTALL_DIR" >> $GITHUB_PATH
|
||||
print_message info "Added $INSTALL_DIR to \$GITHUB_PATH"
|
||||
fi
|
||||
|
||||
|
||||
@@ -1,16 +1,23 @@
|
||||
{
|
||||
"$schema": "https://opencode.ai/config.json",
|
||||
"provider": {
|
||||
"@openrouter/ai-sdk-provider": {
|
||||
"name": "OpenRouter",
|
||||
"options": {
|
||||
"apiKey": "sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
|
||||
},
|
||||
"openrouter": {
|
||||
"models": {
|
||||
"anthropic/claude-3.5-sonnet": {
|
||||
"name": "claude-3.5-sonnet"
|
||||
"moonshotai/kimi-k2": {
|
||||
"options": {
|
||||
"provider": {
|
||||
"order": ["baseten"],
|
||||
"allow_fallbacks": false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"mcp": {
|
||||
"weather": {
|
||||
"type": "local",
|
||||
"command": ["opencode", "x", "@h1deya/mcp-server-weather"]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
20
package.json
20
package.json
@@ -5,7 +5,10 @@
|
||||
"type": "module",
|
||||
"packageManager": "bun@1.2.14",
|
||||
"scripts": {
|
||||
"typecheck": "bun run --filter='*' typecheck"
|
||||
"dev": "bun run packages/opencode/src/index.ts",
|
||||
"typecheck": "bun run --filter='*' typecheck",
|
||||
"stainless": "./scripts/stainless",
|
||||
"postinstall": "./scripts/hooks"
|
||||
},
|
||||
"workspaces": {
|
||||
"packages": [
|
||||
@@ -14,13 +17,13 @@
|
||||
"catalog": {
|
||||
"typescript": "5.8.2",
|
||||
"@types/node": "22.13.9",
|
||||
"zod": "3.24.2",
|
||||
"ai": "4.3.16"
|
||||
"zod": "3.25.49",
|
||||
"ai": "5.0.0-beta.21"
|
||||
}
|
||||
},
|
||||
"devDependencies": {
|
||||
"prettier": "3.5.3",
|
||||
"sst": "3.17.4"
|
||||
"sst": "3.17.8"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@@ -28,14 +31,13 @@
|
||||
},
|
||||
"license": "MIT",
|
||||
"prettier": {
|
||||
"semi": false
|
||||
},
|
||||
"overrides": {
|
||||
"zod": "3.24.2"
|
||||
"semi": false,
|
||||
"printWidth": 120
|
||||
},
|
||||
"trustedDependencies": [
|
||||
"esbuild",
|
||||
"protobufjs",
|
||||
"sharp"
|
||||
]
|
||||
],
|
||||
"patchedDependencies": {}
|
||||
}
|
||||
|
||||
@@ -8,5 +8,10 @@
|
||||
"@cloudflare/workers-types": "4.20250522.0",
|
||||
"typescript": "catalog:",
|
||||
"@types/node": "catalog:"
|
||||
},
|
||||
"dependencies": {
|
||||
"@octokit/auth-app": "8.0.1",
|
||||
"@octokit/rest": "22.0.0",
|
||||
"jose": "6.0.11"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,14 @@
|
||||
import { DurableObject } from "cloudflare:workers"
|
||||
import { randomUUID } from "node:crypto"
|
||||
import { jwtVerify, createRemoteJWKSet } from "jose"
|
||||
import { createAppAuth } from "@octokit/auth-app"
|
||||
import { Octokit } from "@octokit/rest"
|
||||
import { Resource } from "sst"
|
||||
|
||||
type Env = {
|
||||
SYNC_SERVER: DurableObjectNamespace<SyncServer>
|
||||
Bucket: R2Bucket
|
||||
WEB_DOMAIN: string
|
||||
}
|
||||
|
||||
export class SyncServer extends DurableObject<Env> {
|
||||
@@ -19,9 +24,9 @@ export class SyncServer extends DurableObject<Env> {
|
||||
this.ctx.acceptWebSocket(server)
|
||||
|
||||
const data = await this.ctx.storage.list()
|
||||
for (const [key, content] of data.entries()) {
|
||||
server.send(JSON.stringify({ key, content }))
|
||||
}
|
||||
Array.from(data.entries())
|
||||
.filter(([key, _]) => key.startsWith("session/"))
|
||||
.map(([key, content]) => server.send(JSON.stringify({ key, content })))
|
||||
|
||||
return new Response(null, {
|
||||
status: 101,
|
||||
@@ -35,12 +40,12 @@ export class SyncServer extends DurableObject<Env> {
|
||||
ws.close(code, "Durable Object is closing WebSocket")
|
||||
}
|
||||
|
||||
async publish(secret: string, key: string, content: any) {
|
||||
if (secret !== (await this.getSecret())) throw new Error("Invalid secret")
|
||||
async publish(key: string, content: any) {
|
||||
const sessionID = await this.getSessionID()
|
||||
if (
|
||||
!key.startsWith(`session/info/${sessionID}`) &&
|
||||
!key.startsWith(`session/message/${sessionID}/`)
|
||||
!key.startsWith(`session/message/${sessionID}/`) &&
|
||||
!key.startsWith(`session/part/${sessionID}/`)
|
||||
)
|
||||
return new Response("Error: Invalid key", { status: 400 })
|
||||
|
||||
@@ -70,12 +75,14 @@ export class SyncServer extends DurableObject<Env> {
|
||||
}
|
||||
|
||||
public async getData() {
|
||||
const data = await this.ctx.storage.list()
|
||||
const messages = []
|
||||
for (const [key, content] of data.entries()) {
|
||||
messages.push({ key, content })
|
||||
}
|
||||
return messages
|
||||
const data = (await this.ctx.storage.list()) as Map<string, any>
|
||||
return Array.from(data.entries())
|
||||
.filter(([key, _]) => key.startsWith("session/"))
|
||||
.map(([key, content]) => ({ key, content }))
|
||||
}
|
||||
|
||||
public async assertSecret(secret: string) {
|
||||
if (secret !== (await this.getSecret())) throw new Error("Invalid secret")
|
||||
}
|
||||
|
||||
private async getSecret() {
|
||||
@@ -86,22 +93,26 @@ export class SyncServer extends DurableObject<Env> {
|
||||
return this.ctx.storage.get<string>("sessionID")
|
||||
}
|
||||
|
||||
async clear(secret: string) {
|
||||
await this.assertSecret(secret)
|
||||
async clear() {
|
||||
const sessionID = await this.getSessionID()
|
||||
const list = await this.env.Bucket.list({
|
||||
prefix: `session/message/${sessionID}/`,
|
||||
limit: 1000,
|
||||
})
|
||||
for (const item of list.objects) {
|
||||
await this.env.Bucket.delete(item.key)
|
||||
}
|
||||
await this.env.Bucket.delete(`session/info/${sessionID}`)
|
||||
await this.ctx.storage.deleteAll()
|
||||
}
|
||||
|
||||
private async assertSecret(secret: string) {
|
||||
if (secret !== (await this.getSecret())) throw new Error("Invalid secret")
|
||||
}
|
||||
|
||||
static shortName(id: string) {
|
||||
return id.substring(id.length - 8)
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
async fetch(request: Request, env: Env, ctx: ExecutionContext) {
|
||||
async fetch(request: Request, env: Env, ctx: ExecutionContext): Promise<Response> {
|
||||
const url = new URL(request.url)
|
||||
const splits = url.pathname.split("/")
|
||||
const method = splits[1]
|
||||
@@ -122,7 +133,7 @@ export default {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
secret,
|
||||
url: "https://dev.opencode.ai/s/" + short,
|
||||
url: `https://${env.WEB_DOMAIN}/s/${short}`,
|
||||
}),
|
||||
{
|
||||
headers: { "Content-Type": "application/json" },
|
||||
@@ -136,7 +147,17 @@ export default {
|
||||
const secret = body.secret
|
||||
const id = env.SYNC_SERVER.idFromName(SyncServer.shortName(sessionID))
|
||||
const stub = env.SYNC_SERVER.get(id)
|
||||
await stub.clear(secret)
|
||||
await stub.assertSecret(secret)
|
||||
await stub.clear()
|
||||
return new Response(JSON.stringify({}), {
|
||||
headers: { "Content-Type": "application/json" },
|
||||
})
|
||||
}
|
||||
|
||||
if (request.method === "POST" && method === "share_delete_admin") {
|
||||
const id = env.SYNC_SERVER.idFromName("oVF8Rsiv")
|
||||
const stub = env.SYNC_SERVER.get(id)
|
||||
await stub.clear()
|
||||
return new Response(JSON.stringify({}), {
|
||||
headers: { "Content-Type": "application/json" },
|
||||
})
|
||||
@@ -152,7 +173,8 @@ export default {
|
||||
const name = SyncServer.shortName(body.sessionID)
|
||||
const id = env.SYNC_SERVER.idFromName(name)
|
||||
const stub = env.SYNC_SERVER.get(id)
|
||||
await stub.publish(body.secret, body.key, body.content)
|
||||
await stub.assertSecret(body.secret)
|
||||
await stub.publish(body.key, body.content)
|
||||
return new Response(JSON.stringify({}), {
|
||||
headers: { "Content-Type": "application/json" },
|
||||
})
|
||||
@@ -167,8 +189,7 @@ export default {
|
||||
}
|
||||
const id = url.searchParams.get("id")
|
||||
console.log("share_poll", id)
|
||||
if (!id)
|
||||
return new Response("Error: Share ID is required", { status: 400 })
|
||||
if (!id) return new Response("Error: Share ID is required", { status: 400 })
|
||||
const stub = env.SYNC_SERVER.get(env.SYNC_SERVER.idFromName(id))
|
||||
return stub.fetch(request)
|
||||
}
|
||||
@@ -176,8 +197,7 @@ export default {
|
||||
if (request.method === "GET" && method === "share_data") {
|
||||
const id = url.searchParams.get("id")
|
||||
console.log("share_data", id)
|
||||
if (!id)
|
||||
return new Response("Error: Share ID is required", { status: 400 })
|
||||
if (!id) return new Response("Error: Share ID is required", { status: 400 })
|
||||
const stub = env.SYNC_SERVER.get(env.SYNC_SERVER.idFromName(id))
|
||||
const data = await stub.getData()
|
||||
|
||||
@@ -191,8 +211,13 @@ export default {
|
||||
return
|
||||
}
|
||||
if (type === "message") {
|
||||
const [, messageID] = splits
|
||||
messages[messageID] = d.content
|
||||
messages[d.content.id] = {
|
||||
parts: [],
|
||||
...d.content,
|
||||
}
|
||||
}
|
||||
if (type === "part") {
|
||||
messages[d.content.messageID].parts.push(d.content)
|
||||
}
|
||||
})
|
||||
|
||||
@@ -206,5 +231,95 @@ export default {
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Used by the GitHub action to get GitHub installation access token given the OIDC token
|
||||
*/
|
||||
if (request.method === "POST" && method === "exchange_github_app_token") {
|
||||
const EXPECTED_AUDIENCE = "opencode-github-action"
|
||||
const GITHUB_ISSUER = "https://token.actions.githubusercontent.com"
|
||||
const JWKS_URL = `${GITHUB_ISSUER}/.well-known/jwks`
|
||||
|
||||
// get Authorization header
|
||||
const authHeader = request.headers.get("Authorization")
|
||||
const token = authHeader?.replace(/^Bearer /, "")
|
||||
if (!token)
|
||||
return new Response(JSON.stringify({ error: "Authorization header is required" }), {
|
||||
status: 401,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
})
|
||||
|
||||
// verify token
|
||||
const JWKS = createRemoteJWKSet(new URL(JWKS_URL))
|
||||
let owner, repo
|
||||
try {
|
||||
const { payload } = await jwtVerify(token, JWKS, {
|
||||
issuer: GITHUB_ISSUER,
|
||||
audience: EXPECTED_AUDIENCE,
|
||||
})
|
||||
const sub = payload.sub // e.g. 'repo:my-org/my-repo:ref:refs/heads/main'
|
||||
const parts = sub.split(":")[1].split("/")
|
||||
owner = parts[0]
|
||||
repo = parts[1]
|
||||
} catch (err) {
|
||||
console.error("Token verification failed:", err)
|
||||
return new Response(JSON.stringify({ error: "Invalid or expired token" }), {
|
||||
status: 403,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
})
|
||||
}
|
||||
|
||||
// Create app JWT token
|
||||
const auth = createAppAuth({
|
||||
appId: Resource.GITHUB_APP_ID.value,
|
||||
privateKey: Resource.GITHUB_APP_PRIVATE_KEY.value,
|
||||
})
|
||||
const appAuth = await auth({ type: "app" })
|
||||
|
||||
// Lookup installation
|
||||
const octokit = new Octokit({ auth: appAuth.token })
|
||||
const { data: installation } = await octokit.apps.getRepoInstallation({ owner, repo })
|
||||
|
||||
// Get installation token
|
||||
const installationAuth = await auth({ type: "installation", installationId: installation.id })
|
||||
|
||||
return new Response(JSON.stringify({ token: installationAuth.token }), {
|
||||
headers: { "Content-Type": "application/json" },
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Used by the opencode CLI to check if the GitHub app is installed
|
||||
*/
|
||||
if (request.method === "GET" && method === "get_github_app_installation") {
|
||||
const owner = url.searchParams.get("owner")
|
||||
const repo = url.searchParams.get("repo")
|
||||
|
||||
const auth = createAppAuth({
|
||||
appId: Resource.GITHUB_APP_ID.value,
|
||||
privateKey: Resource.GITHUB_APP_PRIVATE_KEY.value,
|
||||
})
|
||||
const appAuth = await auth({ type: "app" })
|
||||
|
||||
// Lookup installation
|
||||
const octokit = new Octokit({ auth: appAuth.token })
|
||||
let installation
|
||||
try {
|
||||
const ret = await octokit.apps.getRepoInstallation({ owner, repo })
|
||||
installation = ret.data
|
||||
} catch (err) {
|
||||
if (err instanceof Error && err.message.includes("Not Found")) {
|
||||
// not installed
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
return new Response(JSON.stringify({ installation }), {
|
||||
headers: { "Content-Type": "application/json" },
|
||||
})
|
||||
}
|
||||
|
||||
return new Response("Not Found", { status: 404 })
|
||||
},
|
||||
}
|
||||
|
||||
8
packages/function/sst-env.d.ts
vendored
8
packages/function/sst-env.d.ts
vendored
@@ -6,6 +6,14 @@
|
||||
import "sst"
|
||||
declare module "sst" {
|
||||
export interface Resource {
|
||||
"GITHUB_APP_ID": {
|
||||
"type": "sst.sst.Secret"
|
||||
"value": string
|
||||
}
|
||||
"GITHUB_APP_PRIVATE_KEY": {
|
||||
"type": "sst.sst.Secret"
|
||||
"value": string
|
||||
}
|
||||
"Web": {
|
||||
"type": "sst.cloudflare.Astro"
|
||||
"url": string
|
||||
|
||||
1
packages/opencode/.gitignore
vendored
1
packages/opencode/.gitignore
vendored
@@ -1,4 +1,3 @@
|
||||
node_modules
|
||||
research
|
||||
dist
|
||||
gen
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# OpenCode Agent Guidelines
|
||||
# opencode agent guidelines
|
||||
|
||||
## Build/Test Commands
|
||||
|
||||
@@ -16,9 +16,19 @@
|
||||
- **Naming**: camelCase for variables/functions, PascalCase for classes/namespaces
|
||||
- **Error handling**: Use Result patterns, avoid throwing exceptions in tools
|
||||
- **File structure**: Namespace-based organization (e.g., `Tool.define()`, `Session.create()`)
|
||||
|
||||
## IMPORTANT
|
||||
|
||||
- Try to keep things in one function unless composable or reusable
|
||||
- DO NOT do unnecessary destructuring of variables
|
||||
- DO NOT use else statements unless necessary
|
||||
- DO NOT use try catch if it can be avoided
|
||||
- DO NOT use `else` statements unless necessary
|
||||
- DO NOT use `try`/`catch` if it can be avoided
|
||||
- AVOID `try`/`catch` where possible
|
||||
- AVOID `else` statements
|
||||
- AVOID using `any` type
|
||||
- AVOID `let` statements
|
||||
- PREFER single word variable names where possible
|
||||
- Use as many bun apis as possible like Bun.file()
|
||||
|
||||
## Architecture
|
||||
|
||||
@@ -27,4 +37,4 @@
|
||||
- **Validation**: All inputs validated with Zod schemas
|
||||
- **Logging**: Use `Log.create({ service: "name" })` pattern
|
||||
- **Storage**: Use `Storage` namespace for persistence
|
||||
|
||||
- **API Client**: Go TUI communicates with TypeScript server via stainless SDK. When adding/modifying server endpoints in `packages/opencode/src/server/server.ts`, ask the user to generate a new client SDK to proceed with client-side changes.
|
||||
|
||||
@@ -49,7 +49,7 @@ else
|
||||
done
|
||||
|
||||
if [ -z "$resolved" ]; then
|
||||
printf "It seems that your package manager failed to install the right version of the OpenCode CLI for your platform. You can try manually installing the \"%s\" package\n" "$name" >&2
|
||||
printf "It seems that your package manager failed to install the right version of the opencode CLI for your platform. You can try manually installing the \"%s\" package\n" "$name" >&2
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
56
packages/opencode/bin/opencode.cmd
Normal file
56
packages/opencode/bin/opencode.cmd
Normal file
@@ -0,0 +1,56 @@
|
||||
@echo off
|
||||
setlocal enabledelayedexpansion
|
||||
|
||||
if defined OPENCODE_BIN_PATH (
|
||||
set "resolved=%OPENCODE_BIN_PATH%"
|
||||
goto :execute
|
||||
)
|
||||
|
||||
rem Get the directory of this script
|
||||
set "script_dir=%~dp0"
|
||||
set "script_dir=%script_dir:~0,-1%"
|
||||
|
||||
rem Detect platform and architecture
|
||||
set "platform=win32"
|
||||
|
||||
rem Detect architecture
|
||||
if "%PROCESSOR_ARCHITECTURE%"=="AMD64" (
|
||||
set "arch=x64"
|
||||
) else if "%PROCESSOR_ARCHITECTURE%"=="ARM64" (
|
||||
set "arch=arm64"
|
||||
) else if "%PROCESSOR_ARCHITECTURE%"=="x86" (
|
||||
set "arch=x86"
|
||||
) else (
|
||||
set "arch=x64"
|
||||
)
|
||||
|
||||
set "name=opencode-!platform!-!arch!"
|
||||
set "binary=opencode.exe"
|
||||
|
||||
rem Search for the binary starting from script location
|
||||
set "resolved="
|
||||
set "current_dir=%script_dir%"
|
||||
|
||||
:search_loop
|
||||
set "candidate=%current_dir%\node_modules\%name%\bin\%binary%"
|
||||
if exist "%candidate%" (
|
||||
set "resolved=%candidate%"
|
||||
goto :execute
|
||||
)
|
||||
|
||||
rem Move up one directory
|
||||
for %%i in ("%current_dir%") do set "parent_dir=%%~dpi"
|
||||
set "parent_dir=%parent_dir:~0,-1%"
|
||||
|
||||
rem Check if we've reached the root
|
||||
if "%current_dir%"=="%parent_dir%" goto :not_found
|
||||
set "current_dir=%parent_dir%"
|
||||
goto :search_loop
|
||||
|
||||
:not_found
|
||||
echo It seems that your package manager failed to install the right version of the opencode CLI for your platform. You can try manually installing the "%name%" package >&2
|
||||
exit /b 1
|
||||
|
||||
:execute
|
||||
rem Execute the binary with all arguments
|
||||
"%resolved%" %*
|
||||
@@ -1,152 +0,0 @@
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"$schema": {
|
||||
"type": "string"
|
||||
},
|
||||
"provider": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"env": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"models": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"attachment": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"reasoning": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"temperature": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"cost": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"input": {
|
||||
"type": "number"
|
||||
},
|
||||
"output": {
|
||||
"type": "number"
|
||||
},
|
||||
"inputCached": {
|
||||
"type": "number"
|
||||
},
|
||||
"outputCached": {
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"input",
|
||||
"output",
|
||||
"inputCached",
|
||||
"outputCached"
|
||||
],
|
||||
"additionalProperties": false
|
||||
},
|
||||
"limit": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"context": {
|
||||
"type": "number"
|
||||
},
|
||||
"output": {
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"context",
|
||||
"output"
|
||||
],
|
||||
"additionalProperties": false
|
||||
},
|
||||
"id": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"type": "object",
|
||||
"additionalProperties": {}
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"models"
|
||||
],
|
||||
"additionalProperties": false
|
||||
}
|
||||
},
|
||||
"mcp": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": {
|
||||
"type": "string",
|
||||
"const": "local"
|
||||
},
|
||||
"command": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"environment": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"type",
|
||||
"command"
|
||||
],
|
||||
"additionalProperties": false
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": {
|
||||
"type": "string",
|
||||
"const": "remote"
|
||||
},
|
||||
"url": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"type",
|
||||
"url"
|
||||
],
|
||||
"additionalProperties": false
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"$schema": "http://json-schema.org/draft-07/schema#"
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/package.json",
|
||||
"version": "0.0.0",
|
||||
"version": "0.0.5",
|
||||
"name": "opencode",
|
||||
"type": "module",
|
||||
"private": true,
|
||||
@@ -8,42 +8,42 @@
|
||||
"typecheck": "tsc --noEmit",
|
||||
"dev": "bun run ./src/index.ts"
|
||||
},
|
||||
"bin": {
|
||||
"opencode": "./bin/opencode"
|
||||
},
|
||||
"exports": {
|
||||
"./*": [
|
||||
"./src/*.ts",
|
||||
"./src/*/index.ts"
|
||||
]
|
||||
"./*": "./src/*.ts"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@ai-sdk/amazon-bedrock": "2.2.10",
|
||||
"@ai-sdk/anthropic": "1.2.12",
|
||||
"@standard-schema/spec": "1.0.0",
|
||||
"@tsconfig/bun": "1.0.7",
|
||||
"@types/bun": "latest",
|
||||
"@types/turndown": "5.0.5",
|
||||
"@types/yargs": "17.0.33",
|
||||
"typescript": "catalog:",
|
||||
"vscode-languageserver-types": "3.17.5",
|
||||
"zod-to-json-schema": "3.24.5"
|
||||
},
|
||||
"dependencies": {
|
||||
"@clack/prompts": "0.11.0",
|
||||
"@flystorage/file-storage": "1.1.0",
|
||||
"@flystorage/local-fs": "1.1.0",
|
||||
"@hono/zod-validator": "0.5.0",
|
||||
"@hono/zod-validator": "0.4.2",
|
||||
"@modelcontextprotocol/sdk": "1.15.1",
|
||||
"@openauthjs/openauth": "0.4.3",
|
||||
"@standard-schema/spec": "1.0.0",
|
||||
"ai": "catalog:",
|
||||
"decimal.js": "10.5.0",
|
||||
"diff": "8.0.2",
|
||||
"env-paths": "3.0.0",
|
||||
"hono": "4.7.10",
|
||||
"hono-openapi": "0.4.8",
|
||||
"isomorphic-git": "1.32.1",
|
||||
"open": "10.1.2",
|
||||
"remeda": "2.22.3",
|
||||
"ts-lsp-client": "1.0.3",
|
||||
"turndown": "7.2.0",
|
||||
"vscode-jsonrpc": "8.2.1",
|
||||
"vscode-languageclient": "8",
|
||||
"xdg-basedir": "5.1.0",
|
||||
"yargs": "18.0.0",
|
||||
"zod": "catalog:",
|
||||
"zod-openapi": "4.2.4"
|
||||
"zod-openapi": "4.1.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -80,9 +80,9 @@ function main() {
|
||||
|
||||
// Create symlink to the actual binary
|
||||
fs.symlinkSync(binaryPath, binScript)
|
||||
console.log(`OpenCode binary symlinked: ${binScript} -> ${binaryPath}`)
|
||||
console.log(`opencode binary symlinked: ${binScript} -> ${binaryPath}`)
|
||||
} catch (error) {
|
||||
console.error("Failed to create OpenCode binary symlink:", error.message)
|
||||
console.error("Failed to create opencode binary symlink:", error.message)
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
@@ -9,7 +9,7 @@ const snapshot = process.argv.includes("--snapshot")
|
||||
|
||||
const version = snapshot
|
||||
? `0.0.0-${new Date().toISOString().slice(0, 16).replace(/[-:T]/g, "")}`
|
||||
: await $`git describe --tags --exact-match HEAD`
|
||||
: await $`git describe --tags --abbrev=0`
|
||||
.text()
|
||||
.then((x) => x.substring(1).trim())
|
||||
.catch(() => {
|
||||
@@ -40,7 +40,7 @@ for (const [os, arch] of targets) {
|
||||
console.log(`building ${os}-${arch}`)
|
||||
const name = `${pkg.name}-${os}-${arch}`
|
||||
await $`mkdir -p dist/${name}/bin`
|
||||
await $`GOOS=${os} GOARCH=${GOARCH[arch]} go build -ldflags="-s -w -X main.Version=${version}" -o ../opencode/dist/${name}/bin/tui ../tui/cmd/opencode/main.go`.cwd(
|
||||
await $`CGO_ENABLED=0 GOOS=${os} GOARCH=${GOARCH[arch]} go build -ldflags="-s -w -X main.Version=${version}" -o ../opencode/dist/${name}/bin/tui ../tui/cmd/opencode/main.go`.cwd(
|
||||
"../tui",
|
||||
)
|
||||
await $`bun build --define OPENCODE_VERSION="'${version}'" --compile --minify --target=bun-${os}-${arch} --outfile=dist/${name}/bin/opencode ./src/index.ts ./dist/${name}/bin/tui`
|
||||
@@ -57,14 +57,13 @@ for (const [os, arch] of targets) {
|
||||
2,
|
||||
),
|
||||
)
|
||||
if (!dry)
|
||||
await $`cd dist/${name} && bun publish --access public --tag ${npmTag}`
|
||||
if (!dry) await $`cd dist/${name} && bun publish --access public --tag ${npmTag}`
|
||||
optionalDependencies[name] = version
|
||||
}
|
||||
|
||||
await $`mkdir -p ./dist/${pkg.name}`
|
||||
await $`cp -r ./bin ./dist/${pkg.name}/bin`
|
||||
await $`cp ./script/postinstall.js ./dist/${pkg.name}/postinstall.js`
|
||||
await $`cp ./script/postinstall.mjs ./dist/${pkg.name}/postinstall.mjs`
|
||||
await Bun.file(`./dist/${pkg.name}/package.json`).write(
|
||||
JSON.stringify(
|
||||
{
|
||||
@@ -73,7 +72,7 @@ await Bun.file(`./dist/${pkg.name}/package.json`).write(
|
||||
[pkg.name]: `./bin/${pkg.name}`,
|
||||
},
|
||||
scripts: {
|
||||
postinstall: "node ./postinstall.js",
|
||||
postinstall: "node ./postinstall.mjs",
|
||||
},
|
||||
version,
|
||||
optionalDependencies,
|
||||
@@ -82,8 +81,7 @@ await Bun.file(`./dist/${pkg.name}/package.json`).write(
|
||||
2,
|
||||
),
|
||||
)
|
||||
if (!dry)
|
||||
await $`cd ./dist/${pkg.name} && bun publish --access public --tag ${npmTag}`
|
||||
if (!dry) await $`cd ./dist/${pkg.name} && bun publish --access public --tag ${npmTag}`
|
||||
|
||||
if (!snapshot) {
|
||||
// Github Release
|
||||
@@ -91,58 +89,44 @@ if (!snapshot) {
|
||||
await $`cd dist/${key}/bin && zip -r ../../${key}.zip *`
|
||||
}
|
||||
|
||||
const previous = await fetch(
|
||||
"https://api.github.com/repos/sst/opencode/releases/latest",
|
||||
)
|
||||
const previous = await fetch("https://api.github.com/repos/sst/opencode/releases/latest")
|
||||
.then((res) => res.json())
|
||||
.then((data) => data.tag_name)
|
||||
|
||||
const commits = await fetch(
|
||||
`https://api.github.com/repos/sst/opencode/compare/${previous}...HEAD`,
|
||||
)
|
||||
const commits = await fetch(`https://api.github.com/repos/sst/opencode/compare/${previous}...HEAD`)
|
||||
.then((res) => res.json())
|
||||
.then((data) => data.commits || [])
|
||||
|
||||
const notes = commits
|
||||
.map((commit: any) => `- ${commit.commit.message.split("\n")[0]}`)
|
||||
.filter((x: string) => {
|
||||
const lower = x.toLowerCase()
|
||||
return (
|
||||
!lower.includes("chore:") &&
|
||||
!lower.includes("ci:") &&
|
||||
!lower.includes("docs:") &&
|
||||
!lower.includes("doc:")
|
||||
)
|
||||
})
|
||||
.join("\n")
|
||||
const notes =
|
||||
commits
|
||||
.map((commit: any) => `- ${commit.commit.message.split("\n")[0]}`)
|
||||
.filter((x: string) => {
|
||||
const lower = x.toLowerCase()
|
||||
return (
|
||||
!lower.includes("ignore:") &&
|
||||
!lower.includes("chore:") &&
|
||||
!lower.includes("ci:") &&
|
||||
!lower.includes("wip:") &&
|
||||
!lower.includes("docs:") &&
|
||||
!lower.includes("doc:")
|
||||
)
|
||||
})
|
||||
.join("\n") || "No notable changes"
|
||||
|
||||
if (!dry)
|
||||
await $`gh release create v${version} --title "v${version}" --notes ${notes} ./dist/*.zip`
|
||||
if (!dry) await $`gh release create v${version} --title "v${version}" --notes ${notes} ./dist/*.zip`
|
||||
|
||||
// Calculate SHA values
|
||||
const arm64Sha =
|
||||
await $`sha256sum ./dist/opencode-linux-arm64.zip | cut -d' ' -f1`
|
||||
.text()
|
||||
.then((x) => x.trim())
|
||||
const x64Sha =
|
||||
await $`sha256sum ./dist/opencode-linux-x64.zip | cut -d' ' -f1`
|
||||
.text()
|
||||
.then((x) => x.trim())
|
||||
const macX64Sha =
|
||||
await $`sha256sum ./dist/opencode-darwin-x64.zip | cut -d' ' -f1`
|
||||
.text()
|
||||
.then((x) => x.trim())
|
||||
const macArm64Sha =
|
||||
await $`sha256sum ./dist/opencode-darwin-arm64.zip | cut -d' ' -f1`
|
||||
.text()
|
||||
.then((x) => x.trim())
|
||||
const arm64Sha = await $`sha256sum ./dist/opencode-linux-arm64.zip | cut -d' ' -f1`.text().then((x) => x.trim())
|
||||
const x64Sha = await $`sha256sum ./dist/opencode-linux-x64.zip | cut -d' ' -f1`.text().then((x) => x.trim())
|
||||
const macX64Sha = await $`sha256sum ./dist/opencode-darwin-x64.zip | cut -d' ' -f1`.text().then((x) => x.trim())
|
||||
const macArm64Sha = await $`sha256sum ./dist/opencode-darwin-arm64.zip | cut -d' ' -f1`.text().then((x) => x.trim())
|
||||
|
||||
// AUR package
|
||||
const pkgbuild = [
|
||||
"# Maintainer: dax",
|
||||
"# Maintainer: adam",
|
||||
"",
|
||||
"pkgname='opencode-bin'",
|
||||
"pkgname='${pkg}'",
|
||||
`pkgver=${version.split("-")[0]}`,
|
||||
"options=('!debug' '!strip')",
|
||||
"pkgrel=1",
|
||||
@@ -166,14 +150,16 @@ if (!snapshot) {
|
||||
"",
|
||||
].join("\n")
|
||||
|
||||
await $`rm -rf ./dist/aur-opencode-bin`
|
||||
|
||||
await $`git clone ssh://aur@aur.archlinux.org/opencode-bin.git ./dist/aur-opencode-bin`
|
||||
await Bun.file("./dist/aur-opencode-bin/PKGBUILD").write(pkgbuild)
|
||||
await $`cd ./dist/aur-opencode-bin && makepkg --printsrcinfo > .SRCINFO`
|
||||
await $`cd ./dist/aur-opencode-bin && git add PKGBUILD .SRCINFO`
|
||||
await $`cd ./dist/aur-opencode-bin && git commit -m "Update to v${version}"`
|
||||
if (!dry) await $`cd ./dist/aur-opencode-bin && git push`
|
||||
for (const pkg of ["opencode", "opencode-bin"]) {
|
||||
await $`rm -rf ./dist/aur-${pkg}`
|
||||
await $`git clone ssh://aur@aur.archlinux.org/${pkg}.git ./dist/aur-${pkg}`
|
||||
await $`cd ./dist/aur-${pkg} && git checkout master`
|
||||
await Bun.file(`./dist/aur-${pkg}/PKGBUILD`).write(pkgbuild.replace("${pkg}", pkg))
|
||||
await $`cd ./dist/aur-${pkg} && makepkg --printsrcinfo > .SRCINFO`
|
||||
await $`cd ./dist/aur-${pkg} && git add PKGBUILD .SRCINFO`
|
||||
await $`cd ./dist/aur-${pkg} && git commit -m "Update to v${version}"`
|
||||
if (!dry) await $`cd ./dist/aur-${pkg} && git push`
|
||||
}
|
||||
|
||||
// Homebrew formula
|
||||
const homebrewFormula = [
|
||||
|
||||
@@ -4,5 +4,32 @@ import "zod-openapi/extend"
|
||||
import { Config } from "../src/config/config"
|
||||
import { zodToJsonSchema } from "zod-to-json-schema"
|
||||
|
||||
const result = zodToJsonSchema(Config.Info)
|
||||
await Bun.write("config.schema.json", JSON.stringify(result, null, 2))
|
||||
const file = process.argv[2]
|
||||
|
||||
const result = zodToJsonSchema(Config.Info, {
|
||||
/**
|
||||
* We'll use the `default` values of the field as the only value in `examples`.
|
||||
* This will ensure no docs are needed to be read, as the configuration is
|
||||
* self-documenting.
|
||||
*
|
||||
* See https://json-schema.org/draft/2020-12/draft-bhutton-json-schema-validation-00#rfc.section.9.5
|
||||
*/
|
||||
postProcess(jsonSchema) {
|
||||
const schema = jsonSchema as typeof jsonSchema & {
|
||||
examples?: unknown[]
|
||||
}
|
||||
if (schema && typeof schema === "object" && "type" in schema && schema.type === "string" && schema?.default) {
|
||||
if (!schema.examples) {
|
||||
schema.examples = [schema.default]
|
||||
}
|
||||
|
||||
schema.description = [schema.description || "", `default: \`${schema.default}\``]
|
||||
.filter(Boolean)
|
||||
.join("\n\n")
|
||||
.trim()
|
||||
}
|
||||
|
||||
return jsonSchema
|
||||
},
|
||||
})
|
||||
await Bun.write(file, JSON.stringify(result, null, 2))
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import "zod-openapi/extend"
|
||||
import { Log } from "../util/log"
|
||||
import { Context } from "../util/context"
|
||||
import { Filesystem } from "../util/filesystem"
|
||||
@@ -11,47 +12,50 @@ export namespace App {
|
||||
|
||||
export const Info = z
|
||||
.object({
|
||||
user: z.string(),
|
||||
hostname: z.string(),
|
||||
git: z.boolean(),
|
||||
path: z.object({
|
||||
config: z.string(),
|
||||
data: z.string(),
|
||||
root: z.string(),
|
||||
cwd: z.string(),
|
||||
state: z.string(),
|
||||
}),
|
||||
time: z.object({
|
||||
initialized: z.number().optional(),
|
||||
}),
|
||||
})
|
||||
.openapi({
|
||||
ref: "App.Info",
|
||||
ref: "App",
|
||||
})
|
||||
export type Info = z.infer<typeof Info>
|
||||
|
||||
const ctx = Context.create<Awaited<ReturnType<typeof create>>>("app")
|
||||
const ctx = Context.create<{
|
||||
info: Info
|
||||
services: Map<any, { state: any; shutdown?: (input: any) => Promise<void> }>
|
||||
}>("app")
|
||||
|
||||
export const use = ctx.use
|
||||
|
||||
const APP_JSON = "app.json"
|
||||
|
||||
async function create(input: { cwd: string; version: string }) {
|
||||
export type Input = {
|
||||
cwd: string
|
||||
}
|
||||
|
||||
export const provideExisting = ctx.provide
|
||||
export async function provide<T>(input: Input, cb: (app: App.Info) => Promise<T>) {
|
||||
log.info("creating", {
|
||||
cwd: input.cwd,
|
||||
})
|
||||
const git = await Filesystem.findUp(".git", input.cwd).then(([x]) =>
|
||||
x ? path.dirname(x) : undefined,
|
||||
)
|
||||
const git = await Filesystem.findUp(".git", input.cwd).then(([x]) => (x ? path.dirname(x) : undefined))
|
||||
log.info("git", { git })
|
||||
|
||||
const data = path.join(
|
||||
Global.Path.data,
|
||||
"project",
|
||||
git ? git.split(path.sep).join("-") : "global",
|
||||
)
|
||||
const data = path.join(Global.Path.data, "project", git ? directory(git) : "global")
|
||||
const stateFile = Bun.file(path.join(data, APP_JSON))
|
||||
const state = (await stateFile.json().catch(() => ({}))) as {
|
||||
initialized: number
|
||||
version: string
|
||||
}
|
||||
state.version = input.version
|
||||
await stateFile.write(JSON.stringify(state))
|
||||
|
||||
const services = new Map<
|
||||
@@ -62,26 +66,39 @@ export namespace App {
|
||||
}
|
||||
>()
|
||||
|
||||
const root = git ?? input.cwd
|
||||
|
||||
const info: Info = {
|
||||
user: os.userInfo().username,
|
||||
hostname: os.hostname(),
|
||||
time: {
|
||||
initialized: state.initialized,
|
||||
},
|
||||
git: git !== undefined,
|
||||
path: {
|
||||
config: Global.Path.config,
|
||||
state: Global.Path.state,
|
||||
data,
|
||||
root: git ?? input.cwd,
|
||||
root,
|
||||
cwd: input.cwd,
|
||||
},
|
||||
}
|
||||
const result = {
|
||||
version: input.version,
|
||||
const app = {
|
||||
services,
|
||||
info,
|
||||
}
|
||||
|
||||
return result
|
||||
return ctx.provide(app, async () => {
|
||||
try {
|
||||
const result = await cb(app.info)
|
||||
return result
|
||||
} finally {
|
||||
for (const [key, entry] of app.services.entries()) {
|
||||
if (!entry.shutdown) continue
|
||||
log.info("shutdown", { name: key })
|
||||
await entry.shutdown?.(await entry.state)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
export function state<State>(
|
||||
@@ -107,31 +124,22 @@ export namespace App {
|
||||
return ctx.use().info
|
||||
}
|
||||
|
||||
export async function provide<T>(
|
||||
input: { cwd: string; version: string },
|
||||
cb: (app: Info) => Promise<T>,
|
||||
) {
|
||||
const app = await create(input)
|
||||
return ctx.provide(app, async () => {
|
||||
const result = await cb(app.info)
|
||||
for (const [key, entry] of app.services.entries()) {
|
||||
if (!entry.shutdown) continue
|
||||
log.info("shutdown", { name: key })
|
||||
await entry.shutdown?.(await entry.state)
|
||||
}
|
||||
return result
|
||||
})
|
||||
}
|
||||
|
||||
export async function initialize() {
|
||||
const { info, version } = ctx.use()
|
||||
const { info } = ctx.use()
|
||||
info.time.initialized = Date.now()
|
||||
await Bun.write(
|
||||
path.join(info.path.data, APP_JSON),
|
||||
JSON.stringify({
|
||||
version,
|
||||
initialized: Date.now(),
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
function directory(input: string): string {
|
||||
return input
|
||||
.split(path.sep)
|
||||
.filter(Boolean)
|
||||
.join("-")
|
||||
.replace(/[^A-Za-z0-9_]/g, "-")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,24 +1,21 @@
|
||||
import { generatePKCE } from "@openauthjs/openauth/pkce"
|
||||
import fs from "fs/promises"
|
||||
import { Auth } from "./index"
|
||||
|
||||
export namespace AuthAnthropic {
|
||||
const CLIENT_ID = "9d1c250a-e61b-44d9-88ed-5944d1962f5e"
|
||||
|
||||
export async function authorize() {
|
||||
export async function authorize(mode: "max" | "console") {
|
||||
const pkce = await generatePKCE()
|
||||
const url = new URL("https://claude.ai/oauth/authorize", import.meta.url)
|
||||
|
||||
const url = new URL(
|
||||
`https://${mode === "console" ? "console.anthropic.com" : "claude.ai"}/oauth/authorize`,
|
||||
import.meta.url,
|
||||
)
|
||||
url.searchParams.set("code", "true")
|
||||
url.searchParams.set("client_id", "9d1c250a-e61b-44d9-88ed-5944d1962f5e")
|
||||
url.searchParams.set("client_id", CLIENT_ID)
|
||||
url.searchParams.set("response_type", "code")
|
||||
url.searchParams.set(
|
||||
"redirect_uri",
|
||||
"https://console.anthropic.com/oauth/code/callback",
|
||||
)
|
||||
url.searchParams.set(
|
||||
"scope",
|
||||
"org:create_api_key user:profile user:inference",
|
||||
)
|
||||
url.searchParams.set("redirect_uri", "https://console.anthropic.com/oauth/code/callback")
|
||||
url.searchParams.set("scope", "org:create_api_key user:profile user:inference")
|
||||
url.searchParams.set("code_challenge", pkce.challenge)
|
||||
url.searchParams.set("code_challenge_method", "S256")
|
||||
url.searchParams.set("state", pkce.verifier)
|
||||
@@ -39,42 +36,41 @@ export namespace AuthAnthropic {
|
||||
code: splits[0],
|
||||
state: splits[1],
|
||||
grant_type: "authorization_code",
|
||||
client_id: "9d1c250a-e61b-44d9-88ed-5944d1962f5e",
|
||||
client_id: CLIENT_ID,
|
||||
redirect_uri: "https://console.anthropic.com/oauth/code/callback",
|
||||
code_verifier: verifier,
|
||||
}),
|
||||
})
|
||||
if (!result.ok) throw new ExchangeFailed()
|
||||
const json = await result.json()
|
||||
await Auth.set("anthropic", {
|
||||
type: "oauth",
|
||||
return {
|
||||
refresh: json.refresh_token as string,
|
||||
access: json.access_token as string,
|
||||
expires: Date.now() + json.expires_in * 1000,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export async function access() {
|
||||
const info = await Auth.get("anthropic")
|
||||
if (!info || info.type !== "oauth") return
|
||||
const response = await fetch(
|
||||
"https://console.anthropic.com/v1/oauth/token",
|
||||
{
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
grant_type: "refresh_token",
|
||||
refresh_token: info.refresh,
|
||||
client_id: CLIENT_ID,
|
||||
}),
|
||||
if (info.access && info.expires > Date.now()) return info.access
|
||||
const response = await fetch("https://console.anthropic.com/v1/oauth/token", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
)
|
||||
body: JSON.stringify({
|
||||
grant_type: "refresh_token",
|
||||
refresh_token: info.refresh,
|
||||
client_id: CLIENT_ID,
|
||||
}),
|
||||
})
|
||||
if (!response.ok) return
|
||||
const json = await response.json()
|
||||
await Auth.set("anthropic", {
|
||||
type: "oauth",
|
||||
refresh: json.refresh_token as string,
|
||||
access: json.access_token as string,
|
||||
expires: Date.now() + json.expires_in * 1000,
|
||||
})
|
||||
return json.access_token as string
|
||||
|
||||
19
packages/opencode/src/auth/copilot.ts
Normal file
19
packages/opencode/src/auth/copilot.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import { Global } from "../global"
|
||||
import { lazy } from "../util/lazy"
|
||||
import path from "path"
|
||||
|
||||
export const AuthCopilot = lazy(async () => {
|
||||
const file = Bun.file(path.join(Global.Path.state, "plugin", "copilot.ts"))
|
||||
const exists = await file.exists()
|
||||
const response = fetch("https://raw.githubusercontent.com/sst/opencode-github-copilot/refs/heads/main/auth.ts")
|
||||
.then((x) => Bun.write(file, x))
|
||||
.catch(() => {})
|
||||
|
||||
if (!exists) {
|
||||
const worked = await response
|
||||
if (!worked) return
|
||||
}
|
||||
const result = await import(file.name!).catch(() => {})
|
||||
if (!result) return
|
||||
return result.AuthCopilot
|
||||
})
|
||||
147
packages/opencode/src/auth/github-copilot.ts
Normal file
147
packages/opencode/src/auth/github-copilot.ts
Normal file
@@ -0,0 +1,147 @@
|
||||
import { z } from "zod"
|
||||
import { Auth } from "./index"
|
||||
import { NamedError } from "../util/error"
|
||||
|
||||
export namespace AuthGithubCopilot {
|
||||
const CLIENT_ID = "Iv1.b507a08c87ecfe98"
|
||||
const DEVICE_CODE_URL = "https://github.com/login/device/code"
|
||||
const ACCESS_TOKEN_URL = "https://github.com/login/oauth/access_token"
|
||||
const COPILOT_API_KEY_URL = "https://api.github.com/copilot_internal/v2/token"
|
||||
|
||||
interface DeviceCodeResponse {
|
||||
device_code: string
|
||||
user_code: string
|
||||
verification_uri: string
|
||||
expires_in: number
|
||||
interval: number
|
||||
}
|
||||
|
||||
interface AccessTokenResponse {
|
||||
access_token?: string
|
||||
error?: string
|
||||
error_description?: string
|
||||
}
|
||||
|
||||
interface CopilotTokenResponse {
|
||||
token: string
|
||||
expires_at: number
|
||||
refresh_in: number
|
||||
endpoints: {
|
||||
api: string
|
||||
}
|
||||
}
|
||||
|
||||
export async function authorize() {
|
||||
const deviceResponse = await fetch(DEVICE_CODE_URL, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
Accept: "application/json",
|
||||
"Content-Type": "application/json",
|
||||
"User-Agent": "GitHubCopilotChat/0.26.7",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
client_id: CLIENT_ID,
|
||||
scope: "read:user",
|
||||
}),
|
||||
})
|
||||
const deviceData: DeviceCodeResponse = await deviceResponse.json()
|
||||
return {
|
||||
device: deviceData.device_code,
|
||||
user: deviceData.user_code,
|
||||
verification: deviceData.verification_uri,
|
||||
interval: deviceData.interval || 5,
|
||||
expiry: deviceData.expires_in,
|
||||
}
|
||||
}
|
||||
|
||||
export async function poll(device_code: string) {
|
||||
const response = await fetch(ACCESS_TOKEN_URL, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
Accept: "application/json",
|
||||
"Content-Type": "application/json",
|
||||
"User-Agent": "GitHubCopilotChat/0.26.7",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
client_id: CLIENT_ID,
|
||||
device_code,
|
||||
grant_type: "urn:ietf:params:oauth:grant-type:device_code",
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) return "failed"
|
||||
|
||||
const data: AccessTokenResponse = await response.json()
|
||||
|
||||
if (data.access_token) {
|
||||
// Store the GitHub OAuth token
|
||||
await Auth.set("github-copilot", {
|
||||
type: "oauth",
|
||||
refresh: data.access_token,
|
||||
access: "",
|
||||
expires: 0,
|
||||
})
|
||||
return "complete"
|
||||
}
|
||||
|
||||
if (data.error === "authorization_pending") return "pending"
|
||||
|
||||
if (data.error) return "failed"
|
||||
|
||||
return "pending"
|
||||
}
|
||||
|
||||
export async function access() {
|
||||
const info = await Auth.get("github-copilot")
|
||||
if (!info || info.type !== "oauth") return
|
||||
if (info.access && info.expires > Date.now()) return info.access
|
||||
|
||||
// Get new Copilot API token
|
||||
const response = await fetch(COPILOT_API_KEY_URL, {
|
||||
headers: {
|
||||
Accept: "application/json",
|
||||
Authorization: `Bearer ${info.refresh}`,
|
||||
"User-Agent": "GitHubCopilotChat/0.26.7",
|
||||
"Editor-Version": "vscode/1.99.3",
|
||||
"Editor-Plugin-Version": "copilot-chat/0.26.7",
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) return
|
||||
|
||||
const tokenData: CopilotTokenResponse = await response.json()
|
||||
|
||||
// Store the Copilot API token
|
||||
await Auth.set("github-copilot", {
|
||||
type: "oauth",
|
||||
refresh: info.refresh,
|
||||
access: tokenData.token,
|
||||
expires: tokenData.expires_at * 1000,
|
||||
})
|
||||
|
||||
return tokenData.token
|
||||
}
|
||||
|
||||
export const DeviceCodeError = NamedError.create("DeviceCodeError", z.object({}))
|
||||
|
||||
export const TokenExchangeError = NamedError.create(
|
||||
"TokenExchangeError",
|
||||
z.object({
|
||||
message: z.string(),
|
||||
}),
|
||||
)
|
||||
|
||||
export const AuthenticationError = NamedError.create(
|
||||
"AuthenticationError",
|
||||
z.object({
|
||||
message: z.string(),
|
||||
}),
|
||||
)
|
||||
|
||||
export const CopilotTokenError = NamedError.create(
|
||||
"CopilotTokenError",
|
||||
z.object({
|
||||
message: z.string(),
|
||||
}),
|
||||
)
|
||||
}
|
||||
@@ -7,6 +7,7 @@ export namespace Auth {
|
||||
export const Oauth = z.object({
|
||||
type: z.literal("oauth"),
|
||||
refresh: z.string(),
|
||||
access: z.string(),
|
||||
expires: z.number(),
|
||||
})
|
||||
|
||||
|
||||
@@ -3,17 +3,15 @@ import { Global } from "../global"
|
||||
import { Log } from "../util/log"
|
||||
import path from "path"
|
||||
import { NamedError } from "../util/error"
|
||||
import { readableStreamToText } from "bun"
|
||||
|
||||
export namespace BunProc {
|
||||
const log = Log.create({ service: "bun" })
|
||||
|
||||
export async function run(
|
||||
cmd: string[],
|
||||
options?: Bun.SpawnOptions.OptionsObject<any, any, any>,
|
||||
) {
|
||||
export async function run(cmd: string[], options?: Bun.SpawnOptions.OptionsObject<any, any, any>) {
|
||||
log.info("running", {
|
||||
cmd: [which(), ...cmd],
|
||||
options,
|
||||
...options,
|
||||
})
|
||||
const result = Bun.spawn([which(), ...cmd], {
|
||||
...options,
|
||||
@@ -26,6 +24,21 @@ export namespace BunProc {
|
||||
},
|
||||
})
|
||||
const code = await result.exited
|
||||
const stdout = result.stdout
|
||||
? typeof result.stdout === "number"
|
||||
? result.stdout
|
||||
: await readableStreamToText(result.stdout)
|
||||
: undefined
|
||||
const stderr = result.stderr
|
||||
? typeof result.stderr === "number"
|
||||
? result.stderr
|
||||
: await readableStreamToText(result.stderr)
|
||||
: undefined
|
||||
log.info("done", {
|
||||
code,
|
||||
stdout,
|
||||
stderr,
|
||||
})
|
||||
if (code !== 0) {
|
||||
throw new Error(`Command failed with exit code ${result.exitCode}`)
|
||||
}
|
||||
@@ -43,25 +56,37 @@ export namespace BunProc {
|
||||
version: z.string(),
|
||||
}),
|
||||
)
|
||||
|
||||
export async function install(pkg: string, version = "latest") {
|
||||
const mod = path.join(Global.Path.cache, "node_modules", pkg)
|
||||
const pkgjson = Bun.file(path.join(Global.Path.cache, "package.json"))
|
||||
const parsed = await pkgjson.json().catch(() => ({
|
||||
dependencies: {},
|
||||
}))
|
||||
const parsed = await pkgjson.json().catch(async () => {
|
||||
const result = { dependencies: {} }
|
||||
await Bun.write(pkgjson.name!, JSON.stringify(result, null, 2))
|
||||
return result
|
||||
})
|
||||
if (parsed.dependencies[pkg] === version) return mod
|
||||
parsed.dependencies[pkg] = version
|
||||
await Bun.write(pkgjson, JSON.stringify(parsed, null, 2))
|
||||
await BunProc.run(["install"], {
|
||||
|
||||
// Build command arguments
|
||||
const args = ["add", "--force", "--exact", "--cwd", Global.Path.cache, pkg + "@" + version]
|
||||
|
||||
// Let Bun handle registry resolution:
|
||||
// - If .npmrc files exist, Bun will use them automatically
|
||||
// - If no .npmrc files exist, Bun will default to https://registry.npmjs.org
|
||||
log.info("installing package using Bun's default registry resolution", { pkg, version })
|
||||
|
||||
await BunProc.run(args, {
|
||||
cwd: Global.Path.cache,
|
||||
}).catch((e) => {
|
||||
new InstallFailedError(
|
||||
throw new InstallFailedError(
|
||||
{ pkg, version },
|
||||
{
|
||||
cause: e,
|
||||
},
|
||||
)
|
||||
})
|
||||
parsed.dependencies[pkg] = version
|
||||
await Bun.write(pkgjson.name!, JSON.stringify(parsed, null, 2))
|
||||
return mod
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,10 +18,7 @@ export namespace Bus {
|
||||
|
||||
const registry = new Map<string, EventDefinition>()
|
||||
|
||||
export function event<Type extends string, Properties extends ZodType>(
|
||||
type: Type,
|
||||
properties: Properties,
|
||||
) {
|
||||
export function event<Type extends string, Properties extends ZodType>(type: Type, properties: Properties) {
|
||||
const result = {
|
||||
type,
|
||||
properties,
|
||||
@@ -49,7 +46,7 @@ export namespace Bus {
|
||||
)
|
||||
}
|
||||
|
||||
export function publish<Definition extends EventDefinition>(
|
||||
export async function publish<Definition extends EventDefinition>(
|
||||
def: Definition,
|
||||
properties: z.output<Definition["properties"]>,
|
||||
) {
|
||||
@@ -60,20 +57,19 @@ export namespace Bus {
|
||||
log.info("publishing", {
|
||||
type: def.type,
|
||||
})
|
||||
const pending = []
|
||||
for (const key of [def.type, "*"]) {
|
||||
const match = state().subscriptions.get(key)
|
||||
for (const sub of match ?? []) {
|
||||
sub(payload)
|
||||
pending.push(sub(payload))
|
||||
}
|
||||
}
|
||||
return Promise.all(pending)
|
||||
}
|
||||
|
||||
export function subscribe<Definition extends EventDefinition>(
|
||||
def: Definition,
|
||||
callback: (event: {
|
||||
type: Definition["type"]
|
||||
properties: z.infer<Definition["properties"]>
|
||||
}) => void,
|
||||
callback: (event: { type: Definition["type"]; properties: z.infer<Definition["properties"]> }) => void,
|
||||
) {
|
||||
return raw(def.type, callback)
|
||||
}
|
||||
|
||||
16
packages/opencode/src/cli/bootstrap.ts
Normal file
16
packages/opencode/src/cli/bootstrap.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import { App } from "../app/app"
|
||||
import { ConfigHooks } from "../config/hooks"
|
||||
import { Format } from "../format"
|
||||
import { LSP } from "../lsp"
|
||||
import { Share } from "../share/share"
|
||||
|
||||
export async function bootstrap<T>(input: App.Input, cb: (app: App.Info) => Promise<T>) {
|
||||
return App.provide(input, async (app) => {
|
||||
Share.init()
|
||||
Format.init()
|
||||
ConfigHooks.init()
|
||||
LSP.init()
|
||||
|
||||
return cb(app)
|
||||
})
|
||||
}
|
||||
@@ -1,22 +1,22 @@
|
||||
import { AuthAnthropic } from "../../auth/anthropic"
|
||||
import { AuthCopilot } from "../../auth/copilot"
|
||||
import { Auth } from "../../auth"
|
||||
import { cmd } from "./cmd"
|
||||
import * as prompts from "@clack/prompts"
|
||||
import open from "open"
|
||||
import { UI } from "../ui"
|
||||
import { ModelsDev } from "../../provider/models"
|
||||
import { map, pipe, sort, sortBy, values } from "remeda"
|
||||
import { map, pipe, sortBy, values } from "remeda"
|
||||
import path from "path"
|
||||
import os from "os"
|
||||
import { Global } from "../../global"
|
||||
|
||||
export const AuthCommand = cmd({
|
||||
command: "auth",
|
||||
describe: "manage credentials",
|
||||
builder: (yargs) =>
|
||||
yargs
|
||||
.command(AuthLoginCommand)
|
||||
.command(AuthLogoutCommand)
|
||||
.command(AuthListCommand)
|
||||
.demandCommand(),
|
||||
async handler(args) {},
|
||||
yargs.command(AuthLoginCommand).command(AuthLogoutCommand).command(AuthListCommand).demandCommand(),
|
||||
async handler() {},
|
||||
})
|
||||
|
||||
export const AuthListCommand = cmd({
|
||||
@@ -25,30 +25,61 @@ export const AuthListCommand = cmd({
|
||||
describe: "list providers",
|
||||
async handler() {
|
||||
UI.empty()
|
||||
prompts.intro("Credentials")
|
||||
const authPath = path.join(Global.Path.data, "auth.json")
|
||||
const homedir = os.homedir()
|
||||
const displayPath = authPath.startsWith(homedir) ? authPath.replace(homedir, "~") : authPath
|
||||
prompts.intro(`Credentials ${UI.Style.TEXT_DIM}${displayPath}`)
|
||||
const results = await Auth.all().then((x) => Object.entries(x))
|
||||
const database = await ModelsDev.get()
|
||||
|
||||
for (const [providerID, result] of results) {
|
||||
const name = database[providerID]?.name || providerID
|
||||
prompts.log.info(`${name} ${UI.Style.TEXT_DIM}(${result.type})`)
|
||||
prompts.log.info(`${name} ${UI.Style.TEXT_DIM}${result.type}`)
|
||||
}
|
||||
|
||||
prompts.outro(`${results.length} credentials`)
|
||||
|
||||
// Environment variables section
|
||||
const activeEnvVars: Array<{ provider: string; envVar: string }> = []
|
||||
|
||||
for (const [providerID, provider] of Object.entries(database)) {
|
||||
for (const envVar of provider.env) {
|
||||
if (process.env[envVar]) {
|
||||
activeEnvVars.push({
|
||||
provider: provider.name || providerID,
|
||||
envVar,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (activeEnvVars.length > 0) {
|
||||
UI.empty()
|
||||
prompts.intro("Environment")
|
||||
|
||||
for (const { provider, envVar } of activeEnvVars) {
|
||||
prompts.log.info(`${provider} ${UI.Style.TEXT_DIM}${envVar}`)
|
||||
}
|
||||
|
||||
prompts.outro(`${activeEnvVars.length} environment variables`)
|
||||
}
|
||||
},
|
||||
})
|
||||
|
||||
export const AuthLoginCommand = cmd({
|
||||
command: "login",
|
||||
describe: "login to a provider",
|
||||
describe: "log in to a provider",
|
||||
async handler() {
|
||||
UI.empty()
|
||||
prompts.intro("Add credential")
|
||||
const providers = await ModelsDev.get()
|
||||
const priority: Record<string, number> = {
|
||||
anthropic: 0,
|
||||
openai: 1,
|
||||
google: 2,
|
||||
"github-copilot": 1,
|
||||
openai: 2,
|
||||
google: 3,
|
||||
openrouter: 4,
|
||||
vercel: 5,
|
||||
}
|
||||
let provider = await prompts.select({
|
||||
message: "Select provider",
|
||||
@@ -78,14 +109,20 @@ export const AuthLoginCommand = cmd({
|
||||
|
||||
if (provider === "other") {
|
||||
provider = await prompts.text({
|
||||
message: "Enter provider - must match @ai-sdk/<provider>",
|
||||
message: "Enter provider id",
|
||||
validate: (x) => (x.match(/^[0-9a-z-]+$/) ? undefined : "a-z, 0-9 and hyphens only"),
|
||||
})
|
||||
if (prompts.isCancel(provider)) throw new UI.CancelledError()
|
||||
provider = provider.replace(/^@ai-sdk\//, "")
|
||||
if (prompts.isCancel(provider)) throw new UI.CancelledError()
|
||||
prompts.log.warn(
|
||||
`This only stores a credential for ${provider} - you will need configure it in opencode.json, check the docs for examples.`,
|
||||
)
|
||||
}
|
||||
|
||||
if (provider === "amazon-bedrock") {
|
||||
prompts.log.info(
|
||||
"Amazon bedrock can be configured with standard AWS environment variables like AWS_PROFILE or AWS_ACCESS_KEY_ID",
|
||||
"Amazon bedrock can be configured with standard AWS environment variables like AWS_BEARER_TOKEN_BEDROCK, AWS_PROFILE or AWS_ACCESS_KEY_ID",
|
||||
)
|
||||
prompts.outro("Done")
|
||||
return
|
||||
@@ -97,22 +134,32 @@ export const AuthLoginCommand = cmd({
|
||||
options: [
|
||||
{
|
||||
label: "Claude Pro/Max",
|
||||
value: "oauth",
|
||||
value: "max",
|
||||
},
|
||||
{
|
||||
label: "API Key",
|
||||
label: "Create API Key",
|
||||
value: "console",
|
||||
},
|
||||
{
|
||||
label: "Manually enter API Key",
|
||||
value: "api",
|
||||
},
|
||||
],
|
||||
})
|
||||
if (prompts.isCancel(method)) throw new UI.CancelledError()
|
||||
|
||||
if (method === "oauth") {
|
||||
if (method === "max") {
|
||||
// some weird bug where program exits without this
|
||||
await new Promise((resolve) => setTimeout(resolve, 10))
|
||||
const { url, verifier } = await AuthAnthropic.authorize()
|
||||
prompts.note("Opening browser...")
|
||||
await open(url)
|
||||
const { url, verifier } = await AuthAnthropic.authorize("max")
|
||||
prompts.note("Trying to open browser...")
|
||||
try {
|
||||
await open(url)
|
||||
} catch (e) {
|
||||
prompts.log.error(
|
||||
"Failed to open browser perhaps you are running without a display or X server, please open the following URL in your browser:",
|
||||
)
|
||||
}
|
||||
prompts.log.info(url)
|
||||
|
||||
const code = await prompts.text({
|
||||
@@ -121,16 +168,107 @@ export const AuthLoginCommand = cmd({
|
||||
})
|
||||
if (prompts.isCancel(code)) throw new UI.CancelledError()
|
||||
|
||||
await AuthAnthropic.exchange(code, verifier)
|
||||
.then(() => {
|
||||
prompts.log.success("Login successful")
|
||||
})
|
||||
.catch(() => {
|
||||
prompts.log.error("Invalid code")
|
||||
try {
|
||||
const credentials = await AuthAnthropic.exchange(code, verifier)
|
||||
await Auth.set("anthropic", {
|
||||
type: "oauth",
|
||||
refresh: credentials.refresh,
|
||||
access: credentials.access,
|
||||
expires: credentials.expires,
|
||||
})
|
||||
prompts.log.success("Login successful")
|
||||
} catch {
|
||||
prompts.log.error("Invalid code")
|
||||
}
|
||||
prompts.outro("Done")
|
||||
return
|
||||
}
|
||||
|
||||
if (method === "console") {
|
||||
// some weird bug where program exits without this
|
||||
await new Promise((resolve) => setTimeout(resolve, 10))
|
||||
const { url, verifier } = await AuthAnthropic.authorize("console")
|
||||
prompts.note("Trying to open browser...")
|
||||
try {
|
||||
await open(url)
|
||||
} catch (e) {
|
||||
prompts.log.error(
|
||||
"Failed to open browser perhaps you are running without a display or X server, please open the following URL in your browser:",
|
||||
)
|
||||
}
|
||||
prompts.log.info(url)
|
||||
|
||||
const code = await prompts.text({
|
||||
message: "Paste the authorization code here: ",
|
||||
validate: (x) => (x.length > 0 ? undefined : "Required"),
|
||||
})
|
||||
if (prompts.isCancel(code)) throw new UI.CancelledError()
|
||||
|
||||
try {
|
||||
const credentials = await AuthAnthropic.exchange(code, verifier)
|
||||
const accessToken = credentials.access
|
||||
const response = await fetch("https://api.anthropic.com/api/oauth/claude_cli/create_api_key", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
Accept: "application/json, text/plain, */*",
|
||||
},
|
||||
})
|
||||
if (!response.ok) {
|
||||
throw new Error("Failed to create API key")
|
||||
}
|
||||
const json = await response.json()
|
||||
await Auth.set("anthropic", {
|
||||
type: "api",
|
||||
key: json.raw_key,
|
||||
})
|
||||
|
||||
prompts.log.success("Login successful - API key created and saved")
|
||||
} catch (error) {
|
||||
prompts.log.error("Invalid code or failed to create API key")
|
||||
}
|
||||
prompts.outro("Done")
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
const copilot = await AuthCopilot()
|
||||
if (provider === "github-copilot" && copilot) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 10))
|
||||
const deviceInfo = await copilot.authorize()
|
||||
|
||||
prompts.note(`Please visit: ${deviceInfo.verification}\nEnter code: ${deviceInfo.user}`)
|
||||
|
||||
const spinner = prompts.spinner()
|
||||
spinner.start("Waiting for authorization...")
|
||||
|
||||
while (true) {
|
||||
await new Promise((resolve) => setTimeout(resolve, deviceInfo.interval * 1000))
|
||||
const response = await copilot.poll(deviceInfo.device)
|
||||
if (response.status === "pending") continue
|
||||
if (response.status === "success") {
|
||||
await Auth.set("github-copilot", {
|
||||
type: "oauth",
|
||||
refresh: response.refresh,
|
||||
access: response.access,
|
||||
expires: response.expires,
|
||||
})
|
||||
spinner.stop("Login successful")
|
||||
break
|
||||
}
|
||||
if (response.status === "failed") {
|
||||
spinner.stop("Failed to authorize", 1)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
prompts.outro("Done")
|
||||
return
|
||||
}
|
||||
|
||||
if (provider === "vercel") {
|
||||
prompts.log.info("You can create an api key in the dashboard")
|
||||
}
|
||||
|
||||
const key = await prompts.password({
|
||||
@@ -149,7 +287,7 @@ export const AuthLoginCommand = cmd({
|
||||
|
||||
export const AuthLogoutCommand = cmd({
|
||||
command: "logout",
|
||||
describe: "logout from a configured provider",
|
||||
describe: "log out from a configured provider",
|
||||
async handler() {
|
||||
UI.empty()
|
||||
const credentials = await Auth.all().then((x) => Object.entries(x))
|
||||
@@ -162,12 +300,7 @@ export const AuthLogoutCommand = cmd({
|
||||
const providerID = await prompts.select({
|
||||
message: "Select provider",
|
||||
options: credentials.map(([key, value]) => ({
|
||||
label:
|
||||
(database[key]?.name || key) +
|
||||
UI.Style.TEXT_DIM +
|
||||
" (" +
|
||||
value.type +
|
||||
")",
|
||||
label: (database[key]?.name || key) + UI.Style.TEXT_DIM + " (" + value.type + ")",
|
||||
value: key,
|
||||
})),
|
||||
})
|
||||
|
||||
36
packages/opencode/src/cli/cmd/debug/file.ts
Normal file
36
packages/opencode/src/cli/cmd/debug/file.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import { File } from "../../../file"
|
||||
import { bootstrap } from "../../bootstrap"
|
||||
import { cmd } from "../cmd"
|
||||
|
||||
const FileReadCommand = cmd({
|
||||
command: "read <path>",
|
||||
builder: (yargs) =>
|
||||
yargs.positional("path", {
|
||||
type: "string",
|
||||
demandOption: true,
|
||||
description: "File path to read",
|
||||
}),
|
||||
async handler(args) {
|
||||
await bootstrap({ cwd: process.cwd() }, async () => {
|
||||
const content = await File.read(args.path)
|
||||
console.log(content)
|
||||
})
|
||||
},
|
||||
})
|
||||
|
||||
const FileStatusCommand = cmd({
|
||||
command: "status",
|
||||
builder: (yargs) => yargs,
|
||||
async handler() {
|
||||
await bootstrap({ cwd: process.cwd() }, async () => {
|
||||
const status = await File.status()
|
||||
console.log(JSON.stringify(status, null, 2))
|
||||
})
|
||||
},
|
||||
})
|
||||
|
||||
export const FileCommand = cmd({
|
||||
command: "file",
|
||||
builder: (yargs) => yargs.command(FileReadCommand).command(FileStatusCommand).demandCommand(),
|
||||
async handler() {},
|
||||
})
|
||||
28
packages/opencode/src/cli/cmd/debug/index.ts
Normal file
28
packages/opencode/src/cli/cmd/debug/index.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { bootstrap } from "../../bootstrap"
|
||||
import { cmd } from "../cmd"
|
||||
import { FileCommand } from "./file"
|
||||
import { LSPCommand } from "./lsp"
|
||||
import { RipgrepCommand } from "./ripgrep"
|
||||
import { ScrapCommand } from "./scrap"
|
||||
import { SnapshotCommand } from "./snapshot"
|
||||
|
||||
export const DebugCommand = cmd({
|
||||
command: "debug",
|
||||
builder: (yargs) =>
|
||||
yargs
|
||||
.command(LSPCommand)
|
||||
.command(RipgrepCommand)
|
||||
.command(FileCommand)
|
||||
.command(ScrapCommand)
|
||||
.command(SnapshotCommand)
|
||||
.command({
|
||||
command: "wait",
|
||||
async handler() {
|
||||
await bootstrap({ cwd: process.cwd() }, async () => {
|
||||
await new Promise((resolve) => setTimeout(resolve, 1_000 * 60 * 60 * 24))
|
||||
})
|
||||
},
|
||||
})
|
||||
.demandCommand(),
|
||||
async handler() {},
|
||||
})
|
||||
46
packages/opencode/src/cli/cmd/debug/lsp.ts
Normal file
46
packages/opencode/src/cli/cmd/debug/lsp.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
import { LSP } from "../../../lsp"
|
||||
import { bootstrap } from "../../bootstrap"
|
||||
import { cmd } from "../cmd"
|
||||
import { Log } from "../../../util/log"
|
||||
|
||||
export const LSPCommand = cmd({
|
||||
command: "lsp",
|
||||
builder: (yargs) =>
|
||||
yargs.command(DiagnosticsCommand).command(SymbolsCommand).command(DocumentSymbolsCommand).demandCommand(),
|
||||
async handler() {},
|
||||
})
|
||||
|
||||
const DiagnosticsCommand = cmd({
|
||||
command: "diagnostics <file>",
|
||||
builder: (yargs) => yargs.positional("file", { type: "string", demandOption: true }),
|
||||
async handler(args) {
|
||||
await bootstrap({ cwd: process.cwd() }, async () => {
|
||||
await LSP.touchFile(args.file, true)
|
||||
console.log(await LSP.diagnostics())
|
||||
})
|
||||
},
|
||||
})
|
||||
|
||||
export const SymbolsCommand = cmd({
|
||||
command: "symbols <query>",
|
||||
builder: (yargs) => yargs.positional("query", { type: "string", demandOption: true }),
|
||||
async handler(args) {
|
||||
await bootstrap({ cwd: process.cwd() }, async () => {
|
||||
using _ = Log.Default.time("symbols")
|
||||
const results = await LSP.workspaceSymbol(args.query)
|
||||
console.log(JSON.stringify(results, null, 2))
|
||||
})
|
||||
},
|
||||
})
|
||||
|
||||
export const DocumentSymbolsCommand = cmd({
|
||||
command: "document-symbols <uri>",
|
||||
builder: (yargs) => yargs.positional("uri", { type: "string", demandOption: true }),
|
||||
async handler(args) {
|
||||
await bootstrap({ cwd: process.cwd() }, async () => {
|
||||
using _ = Log.Default.time("document-symbols")
|
||||
const results = await LSP.documentSymbol(args.uri)
|
||||
console.log(JSON.stringify(results, null, 2))
|
||||
})
|
||||
},
|
||||
})
|
||||
82
packages/opencode/src/cli/cmd/debug/ripgrep.ts
Normal file
82
packages/opencode/src/cli/cmd/debug/ripgrep.ts
Normal file
@@ -0,0 +1,82 @@
|
||||
import { App } from "../../../app/app"
|
||||
import { Ripgrep } from "../../../file/ripgrep"
|
||||
import { bootstrap } from "../../bootstrap"
|
||||
import { cmd } from "../cmd"
|
||||
|
||||
export const RipgrepCommand = cmd({
|
||||
command: "rg",
|
||||
builder: (yargs) => yargs.command(TreeCommand).command(FilesCommand).command(SearchCommand).demandCommand(),
|
||||
async handler() {},
|
||||
})
|
||||
|
||||
const TreeCommand = cmd({
|
||||
command: "tree",
|
||||
builder: (yargs) =>
|
||||
yargs.option("limit", {
|
||||
type: "number",
|
||||
}),
|
||||
async handler(args) {
|
||||
await bootstrap({ cwd: process.cwd() }, async () => {
|
||||
const app = App.info()
|
||||
console.log(await Ripgrep.tree({ cwd: app.path.cwd, limit: args.limit }))
|
||||
})
|
||||
},
|
||||
})
|
||||
|
||||
const FilesCommand = cmd({
|
||||
command: "files",
|
||||
builder: (yargs) =>
|
||||
yargs
|
||||
.option("query", {
|
||||
type: "string",
|
||||
description: "Filter files by query",
|
||||
})
|
||||
.option("glob", {
|
||||
type: "string",
|
||||
description: "Glob pattern to match files",
|
||||
})
|
||||
.option("limit", {
|
||||
type: "number",
|
||||
description: "Limit number of results",
|
||||
}),
|
||||
async handler(args) {
|
||||
await bootstrap({ cwd: process.cwd() }, async () => {
|
||||
const app = App.info()
|
||||
const files = await Ripgrep.files({
|
||||
cwd: app.path.cwd,
|
||||
query: args.query,
|
||||
glob: args.glob ? [args.glob] : undefined,
|
||||
limit: args.limit,
|
||||
})
|
||||
console.log(files.join("\n"))
|
||||
})
|
||||
},
|
||||
})
|
||||
|
||||
const SearchCommand = cmd({
|
||||
command: "search <pattern>",
|
||||
builder: (yargs) =>
|
||||
yargs
|
||||
.positional("pattern", {
|
||||
type: "string",
|
||||
demandOption: true,
|
||||
description: "Search pattern",
|
||||
})
|
||||
.option("glob", {
|
||||
type: "array",
|
||||
description: "File glob patterns",
|
||||
})
|
||||
.option("limit", {
|
||||
type: "number",
|
||||
description: "Limit number of results",
|
||||
}),
|
||||
async handler(args) {
|
||||
const results = await Ripgrep.search({
|
||||
cwd: process.cwd(),
|
||||
pattern: args.pattern,
|
||||
glob: args.glob as string[] | undefined,
|
||||
limit: args.limit,
|
||||
})
|
||||
console.log(JSON.stringify(results, null, 2))
|
||||
},
|
||||
})
|
||||
7
packages/opencode/src/cli/cmd/debug/scrap.ts
Normal file
7
packages/opencode/src/cli/cmd/debug/scrap.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
import { cmd } from "../cmd"
|
||||
|
||||
export const ScrapCommand = cmd({
|
||||
command: "scrap",
|
||||
builder: (yargs) => yargs,
|
||||
async handler() {},
|
||||
})
|
||||
52
packages/opencode/src/cli/cmd/debug/snapshot.ts
Normal file
52
packages/opencode/src/cli/cmd/debug/snapshot.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
import { Snapshot } from "../../../snapshot"
|
||||
import { bootstrap } from "../../bootstrap"
|
||||
import { cmd } from "../cmd"
|
||||
|
||||
export const SnapshotCommand = cmd({
|
||||
command: "snapshot",
|
||||
builder: (yargs) => yargs.command(CreateCommand).command(RestoreCommand).command(DiffCommand).demandCommand(),
|
||||
async handler() {},
|
||||
})
|
||||
|
||||
const CreateCommand = cmd({
|
||||
command: "create",
|
||||
async handler() {
|
||||
await bootstrap({ cwd: process.cwd() }, async () => {
|
||||
const result = await Snapshot.create("test")
|
||||
console.log(result)
|
||||
})
|
||||
},
|
||||
})
|
||||
|
||||
const RestoreCommand = cmd({
|
||||
command: "restore <commit>",
|
||||
builder: (yargs) =>
|
||||
yargs.positional("commit", {
|
||||
type: "string",
|
||||
description: "commit",
|
||||
demandOption: true,
|
||||
}),
|
||||
async handler(args) {
|
||||
await bootstrap({ cwd: process.cwd() }, async () => {
|
||||
await Snapshot.restore("test", args.commit)
|
||||
console.log("restored")
|
||||
})
|
||||
},
|
||||
})
|
||||
|
||||
export const DiffCommand = cmd({
|
||||
command: "diff <commit>",
|
||||
describe: "diff",
|
||||
builder: (yargs) =>
|
||||
yargs.positional("commit", {
|
||||
type: "string",
|
||||
description: "commit",
|
||||
demandOption: true,
|
||||
}),
|
||||
async handler(args) {
|
||||
await bootstrap({ cwd: process.cwd() }, async () => {
|
||||
const diff = await Snapshot.diff("test", args.commit)
|
||||
console.log(diff)
|
||||
})
|
||||
},
|
||||
})
|
||||
@@ -2,7 +2,6 @@ import { Server } from "../../server/server"
|
||||
import fs from "fs/promises"
|
||||
import path from "path"
|
||||
import type { CommandModule } from "yargs"
|
||||
import { Config } from "../../config/config"
|
||||
|
||||
export const GenerateCommand = {
|
||||
command: "generate",
|
||||
@@ -11,9 +10,6 @@ export const GenerateCommand = {
|
||||
const dir = "gen"
|
||||
await fs.rmdir(dir, { recursive: true }).catch(() => {})
|
||||
await fs.mkdir(dir, { recursive: true })
|
||||
await Bun.write(
|
||||
path.join(dir, "openapi.json"),
|
||||
JSON.stringify(specs, null, 2),
|
||||
)
|
||||
await Bun.write(path.join(dir, "openapi.json"), JSON.stringify(specs, null, 2))
|
||||
},
|
||||
} satisfies CommandModule
|
||||
|
||||
221
packages/opencode/src/cli/cmd/install-github.ts
Normal file
221
packages/opencode/src/cli/cmd/install-github.ts
Normal file
@@ -0,0 +1,221 @@
|
||||
import { $ } from "bun"
|
||||
import path from "path"
|
||||
import { exec } from "child_process"
|
||||
import * as prompts from "@clack/prompts"
|
||||
import { map, pipe, sortBy, values } from "remeda"
|
||||
import { UI } from "../ui"
|
||||
import { cmd } from "./cmd"
|
||||
import { ModelsDev } from "../../provider/models"
|
||||
import { App } from "../../app/app"
|
||||
|
||||
const WORKFLOW_FILE = ".github/workflows/opencode.yml"
|
||||
|
||||
export const InstallGithubCommand = cmd({
|
||||
command: "install-github",
|
||||
describe: "install the GitHub agent",
|
||||
async handler() {
|
||||
await App.provide({ cwd: process.cwd() }, async () => {
|
||||
UI.empty()
|
||||
prompts.intro("Install GitHub agent")
|
||||
const app = await getAppInfo()
|
||||
await installGitHubApp()
|
||||
|
||||
const providers = await ModelsDev.get()
|
||||
const provider = await promptProvider()
|
||||
const model = await promptModel()
|
||||
//const key = await promptKey()
|
||||
|
||||
await addWorkflowFiles()
|
||||
printNextSteps()
|
||||
|
||||
function printNextSteps() {
|
||||
let step2
|
||||
if (provider === "amazon-bedrock") {
|
||||
step2 =
|
||||
"Configure OIDC in AWS - https://docs.github.com/en/actions/how-tos/security-for-github-actions/security-hardening-your-deployments/configuring-openid-connect-in-amazon-web-services"
|
||||
} else {
|
||||
const url = `https://github.com/organizations/${app.owner}/settings/secrets/actions`
|
||||
const env = providers[provider].env
|
||||
const envStr =
|
||||
env.length === 1
|
||||
? `\`${env[0]}\` secret`
|
||||
: `\`${[env.slice(0, -1).join("\`, \`"), ...env.slice(-1)].join("\` and \`")}\` secrets`
|
||||
step2 = `Add ${envStr} for ${providers[provider].name} - ${url}`
|
||||
}
|
||||
|
||||
prompts.outro(
|
||||
[
|
||||
"Next steps:",
|
||||
` 1. Commit "${WORKFLOW_FILE}" file and push`,
|
||||
` 2. ${step2}`,
|
||||
" 3. Learn how to use the GitHub agent - https://docs.opencode.ai/docs/github/getting-started",
|
||||
].join("\n"),
|
||||
)
|
||||
}
|
||||
|
||||
async function getAppInfo() {
|
||||
const app = App.info()
|
||||
if (!app.git) {
|
||||
prompts.log.error(`Could not find git repository. Please run this command from a git repository.`)
|
||||
throw new UI.CancelledError()
|
||||
}
|
||||
|
||||
// Get repo info
|
||||
const info = await $`git remote get-url origin`.quiet().nothrow().text()
|
||||
// match https or git pattern
|
||||
// ie. https://github.com/sst/opencode.git
|
||||
// ie. git@github.com:sst/opencode.git
|
||||
const parsed = info.match(/git@github\.com:(.*)\.git/) ?? info.match(/github\.com\/(.*)\.git/)
|
||||
if (!parsed) {
|
||||
prompts.log.error(`Could not find git repository. Please run this command from a git repository.`)
|
||||
throw new UI.CancelledError()
|
||||
}
|
||||
const [owner, repo] = parsed[1].split("/")
|
||||
return { owner, repo, root: app.path.root }
|
||||
}
|
||||
|
||||
async function promptProvider() {
|
||||
const priority: Record<string, number> = {
|
||||
anthropic: 0,
|
||||
"github-copilot": 1,
|
||||
openai: 2,
|
||||
google: 3,
|
||||
}
|
||||
let provider = await prompts.select({
|
||||
message: "Select provider",
|
||||
maxItems: 8,
|
||||
options: pipe(
|
||||
providers,
|
||||
values(),
|
||||
sortBy(
|
||||
(x) => priority[x.id] ?? 99,
|
||||
(x) => x.name ?? x.id,
|
||||
),
|
||||
map((x) => ({
|
||||
label: x.name,
|
||||
value: x.id,
|
||||
hint: priority[x.id] === 0 ? "recommended" : undefined,
|
||||
})),
|
||||
),
|
||||
})
|
||||
|
||||
if (prompts.isCancel(provider)) throw new UI.CancelledError()
|
||||
|
||||
return provider
|
||||
}
|
||||
|
||||
async function promptModel() {
|
||||
const providerData = providers[provider]!
|
||||
|
||||
const model = await prompts.select({
|
||||
message: "Select model",
|
||||
maxItems: 8,
|
||||
options: pipe(
|
||||
providerData.models,
|
||||
values(),
|
||||
sortBy((x) => x.name ?? x.id),
|
||||
map((x) => ({
|
||||
label: x.name ?? x.id,
|
||||
value: x.id,
|
||||
})),
|
||||
),
|
||||
})
|
||||
|
||||
if (prompts.isCancel(model)) throw new UI.CancelledError()
|
||||
return model
|
||||
}
|
||||
|
||||
async function installGitHubApp() {
|
||||
const s = prompts.spinner()
|
||||
s.start("Installing GitHub app")
|
||||
|
||||
// Get installation
|
||||
const installation = await getInstallation()
|
||||
if (installation) return s.stop("GitHub app already installed")
|
||||
|
||||
// Open browser
|
||||
const url = "https://github.com/apps/opencode-agent"
|
||||
const command =
|
||||
process.platform === "darwin"
|
||||
? `open "${url}"`
|
||||
: process.platform === "win32"
|
||||
? `start "${url}"`
|
||||
: `xdg-open "${url}"`
|
||||
|
||||
exec(command, (error) => {
|
||||
if (error) {
|
||||
prompts.log.warn(`Could not open browser. Please visit: ${url}`)
|
||||
}
|
||||
})
|
||||
|
||||
// Wait for installation
|
||||
s.message("Waiting for GitHub app to be installed")
|
||||
const MAX_RETRIES = 60
|
||||
let retries = 0
|
||||
do {
|
||||
const installation = await getInstallation()
|
||||
if (installation) break
|
||||
|
||||
if (retries > MAX_RETRIES) {
|
||||
s.stop(
|
||||
`Failed to detect GitHub app installation. Make sure to install the app for the \`${app.owner}/${app.repo}\` repository.`,
|
||||
)
|
||||
throw new UI.CancelledError()
|
||||
}
|
||||
|
||||
retries++
|
||||
await new Promise((resolve) => setTimeout(resolve, 1000))
|
||||
} while (true)
|
||||
|
||||
s.stop("Installed GitHub app")
|
||||
|
||||
async function getInstallation() {
|
||||
return await fetch(`https://api.opencode.ai/get_github_app_installation?owner=${app.owner}&repo=${app.repo}`)
|
||||
.then((res) => res.json())
|
||||
.then((data) => data.installation)
|
||||
}
|
||||
}
|
||||
|
||||
async function addWorkflowFiles() {
|
||||
const envStr =
|
||||
provider === "amazon-bedrock"
|
||||
? ""
|
||||
: `\n env:${providers[provider].env.map((e) => `\n ${e}: \${{ secrets.${e} }}`).join("")}`
|
||||
|
||||
await Bun.write(
|
||||
path.join(app.root, WORKFLOW_FILE),
|
||||
`
|
||||
name: opencode
|
||||
|
||||
on:
|
||||
issue_comment:
|
||||
types: [created]
|
||||
|
||||
jobs:
|
||||
opencode:
|
||||
if: |
|
||||
startsWith(github.event.comment.body, 'opencode') ||
|
||||
startsWith(github.event.comment.body, 'hi opencode') ||
|
||||
startsWith(github.event.comment.body, 'hey opencode') ||
|
||||
contains(github.event.comment.body, '@opencode-agent')
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Run opencode
|
||||
uses: sst/opencode/sdks/github@github-v1${envStr}
|
||||
with:
|
||||
model: ${provider}/${model}
|
||||
`.trim(),
|
||||
)
|
||||
|
||||
prompts.log.success(`Added workflow file: "${WORKFLOW_FILE}"`)
|
||||
}
|
||||
})
|
||||
},
|
||||
})
|
||||
@@ -1,20 +0,0 @@
|
||||
import { AuthAnthropic } from "../../auth/anthropic"
|
||||
import { UI } from "../ui"
|
||||
|
||||
// Example: https://claude.ai/oauth/authorize?code=true&client_id=9d1c250a-e61b-44d9-88ed-5944d1962f5e&response_type=code&redirect_uri=https%3A%2F%2Fconsole.anthropic.com%2Foauth%2Fcode%2Fcallback&scope=org%3Acreate_api_key+user%3Aprofile+user%3Ainference&code_challenge=MdFtFgFap23AWDSN0oa3-eaKjQRFE4CaEhXx8M9fHZg&code_challenge_method=S256&state=rKLtaDzm88GSwekyEqdi0wXX-YqIr13tSzYymSzpvfs
|
||||
|
||||
export const LoginAnthropicCommand = {
|
||||
command: "anthropic",
|
||||
describe: "Login to Anthropic",
|
||||
handler: async () => {
|
||||
const { url, verifier } = await AuthAnthropic.authorize()
|
||||
|
||||
UI.println("Login to Anthropic")
|
||||
UI.println("Open the following URL in your browser:")
|
||||
UI.println(url)
|
||||
UI.println("")
|
||||
|
||||
const code = await UI.input("Paste the authorization code here: ")
|
||||
await AuthAnthropic.exchange(code, verifier)
|
||||
},
|
||||
}
|
||||
79
packages/opencode/src/cli/cmd/mcp.ts
Normal file
79
packages/opencode/src/cli/cmd/mcp.ts
Normal file
@@ -0,0 +1,79 @@
|
||||
import { cmd } from "./cmd"
|
||||
import { Client } from "@modelcontextprotocol/sdk/client/index.js"
|
||||
import { StreamableHTTPClientTransport } from "@modelcontextprotocol/sdk/client/streamableHttp.js"
|
||||
import * as prompts from "@clack/prompts"
|
||||
import { UI } from "../ui"
|
||||
|
||||
export const McpCommand = cmd({
|
||||
command: "mcp",
|
||||
builder: (yargs) => yargs.command(McpAddCommand).demandCommand(),
|
||||
async handler() {},
|
||||
})
|
||||
|
||||
export const McpAddCommand = cmd({
|
||||
command: "add",
|
||||
describe: "add an MCP server",
|
||||
async handler() {
|
||||
UI.empty()
|
||||
prompts.intro("Add MCP server")
|
||||
|
||||
const name = await prompts.text({
|
||||
message: "Enter MCP server name",
|
||||
validate: (x) => (x.length > 0 ? undefined : "Required"),
|
||||
})
|
||||
if (prompts.isCancel(name)) throw new UI.CancelledError()
|
||||
|
||||
const type = await prompts.select({
|
||||
message: "Select MCP server type",
|
||||
options: [
|
||||
{
|
||||
label: "Local",
|
||||
value: "local",
|
||||
hint: "Run a local command",
|
||||
},
|
||||
{
|
||||
label: "Remote",
|
||||
value: "remote",
|
||||
hint: "Connect to a remote URL",
|
||||
},
|
||||
],
|
||||
})
|
||||
if (prompts.isCancel(type)) throw new UI.CancelledError()
|
||||
|
||||
if (type === "local") {
|
||||
const command = await prompts.text({
|
||||
message: "Enter command to run",
|
||||
placeholder: "e.g., opencode x @modelcontextprotocol/server-filesystem",
|
||||
validate: (x) => (x.length > 0 ? undefined : "Required"),
|
||||
})
|
||||
if (prompts.isCancel(command)) throw new UI.CancelledError()
|
||||
|
||||
prompts.log.info(`Local MCP server "${name}" configured with command: ${command}`)
|
||||
prompts.outro("MCP server added successfully")
|
||||
return
|
||||
}
|
||||
|
||||
if (type === "remote") {
|
||||
const url = await prompts.text({
|
||||
message: "Enter MCP server URL",
|
||||
placeholder: "e.g., https://example.com/mcp",
|
||||
validate: (x) => {
|
||||
if (x.length === 0) return "Required"
|
||||
const isValid = URL.canParse(x)
|
||||
return isValid ? undefined : "Invalid URL"
|
||||
},
|
||||
})
|
||||
if (prompts.isCancel(url)) throw new UI.CancelledError()
|
||||
|
||||
const client = new Client({
|
||||
name: "opencode",
|
||||
version: "1.0.0",
|
||||
})
|
||||
const transport = new StreamableHTTPClientTransport(new URL(url))
|
||||
await client.connect(transport)
|
||||
prompts.log.info(`Remote MCP server "${name}" configured with URL: ${url}`)
|
||||
}
|
||||
|
||||
prompts.outro("MCP server added successfully")
|
||||
},
|
||||
})
|
||||
19
packages/opencode/src/cli/cmd/models.ts
Normal file
19
packages/opencode/src/cli/cmd/models.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import { App } from "../../app/app"
|
||||
import { Provider } from "../../provider/provider"
|
||||
import { cmd } from "./cmd"
|
||||
|
||||
export const ModelsCommand = cmd({
|
||||
command: "models",
|
||||
describe: "list all available models",
|
||||
handler: async () => {
|
||||
await App.provide({ cwd: process.cwd() }, async () => {
|
||||
const providers = await Provider.list()
|
||||
|
||||
for (const [providerID, provider] of Object.entries(providers)) {
|
||||
for (const modelID of Object.keys(provider.info.models)) {
|
||||
console.log(`${providerID}/${modelID}`)
|
||||
}
|
||||
}
|
||||
})
|
||||
},
|
||||
})
|
||||
@@ -1,133 +1,190 @@
|
||||
import type { Argv } from "yargs"
|
||||
import { App } from "../../app/app"
|
||||
import { Bus } from "../../bus"
|
||||
import { Provider } from "../../provider/provider"
|
||||
import { Session } from "../../session"
|
||||
import { Share } from "../../share/share"
|
||||
import { Message } from "../../session/message"
|
||||
import { UI } from "../ui"
|
||||
import { VERSION } from "../version"
|
||||
|
||||
const COLOR = [
|
||||
UI.Style.TEXT_SUCCESS_BOLD,
|
||||
UI.Style.TEXT_INFO_BOLD,
|
||||
UI.Style.TEXT_HIGHLIGHT_BOLD,
|
||||
UI.Style.TEXT_WARNING_BOLD,
|
||||
]
|
||||
import { cmd } from "./cmd"
|
||||
import { Flag } from "../../flag/flag"
|
||||
import { Config } from "../../config/config"
|
||||
import { bootstrap } from "../bootstrap"
|
||||
import { MessageV2 } from "../../session/message-v2"
|
||||
import { Mode } from "../../session/mode"
|
||||
import { Identifier } from "../../id/id"
|
||||
|
||||
const TOOL: Record<string, [string, string]> = {
|
||||
opencode_todowrite: ["Todo", UI.Style.TEXT_WARNING_BOLD],
|
||||
opencode_todoread: ["Todo", UI.Style.TEXT_WARNING_BOLD],
|
||||
opencode_bash: ["Bash", UI.Style.TEXT_DANGER_BOLD],
|
||||
opencode_edit: ["Edit", UI.Style.TEXT_SUCCESS_BOLD],
|
||||
opencode_glob: ["Glob", UI.Style.TEXT_INFO_BOLD],
|
||||
opencode_grep: ["Grep", UI.Style.TEXT_INFO_BOLD],
|
||||
opencode_list: ["List", UI.Style.TEXT_INFO_BOLD],
|
||||
opencode_read: ["Read", UI.Style.TEXT_HIGHLIGHT_BOLD],
|
||||
opencode_write: ["Write", UI.Style.TEXT_SUCCESS_BOLD],
|
||||
todowrite: ["Todo", UI.Style.TEXT_WARNING_BOLD],
|
||||
todoread: ["Todo", UI.Style.TEXT_WARNING_BOLD],
|
||||
bash: ["Bash", UI.Style.TEXT_DANGER_BOLD],
|
||||
edit: ["Edit", UI.Style.TEXT_SUCCESS_BOLD],
|
||||
glob: ["Glob", UI.Style.TEXT_INFO_BOLD],
|
||||
grep: ["Grep", UI.Style.TEXT_INFO_BOLD],
|
||||
list: ["List", UI.Style.TEXT_INFO_BOLD],
|
||||
read: ["Read", UI.Style.TEXT_HIGHLIGHT_BOLD],
|
||||
write: ["Write", UI.Style.TEXT_SUCCESS_BOLD],
|
||||
websearch: ["Search", UI.Style.TEXT_DIM_BOLD],
|
||||
}
|
||||
|
||||
export const RunCommand = {
|
||||
export const RunCommand = cmd({
|
||||
command: "run [message..]",
|
||||
describe: "Run OpenCode with a message",
|
||||
describe: "run opencode with a message",
|
||||
builder: (yargs: Argv) => {
|
||||
return yargs
|
||||
.positional("message", {
|
||||
describe: "Message to send",
|
||||
describe: "message to send",
|
||||
type: "string",
|
||||
array: true,
|
||||
default: [],
|
||||
})
|
||||
.option("continue", {
|
||||
alias: ["c"],
|
||||
describe: "continue the last session",
|
||||
type: "boolean",
|
||||
})
|
||||
.option("session", {
|
||||
describe: "Session ID to continue",
|
||||
alias: ["s"],
|
||||
describe: "session id to continue",
|
||||
type: "string",
|
||||
})
|
||||
.option("share", {
|
||||
type: "boolean",
|
||||
describe: "share the session",
|
||||
})
|
||||
.option("model", {
|
||||
type: "string",
|
||||
alias: ["m"],
|
||||
describe: "model to use in the format of provider/model",
|
||||
})
|
||||
.option("mode", {
|
||||
type: "string",
|
||||
describe: "mode to use",
|
||||
})
|
||||
},
|
||||
handler: async (args: {
|
||||
message: string[]
|
||||
session?: string
|
||||
printLogs?: boolean
|
||||
}) => {
|
||||
const message = args.message.join(" ")
|
||||
await App.provide(
|
||||
{
|
||||
cwd: process.cwd(),
|
||||
version: VERSION,
|
||||
},
|
||||
async () => {
|
||||
await Share.init()
|
||||
const session = args.session
|
||||
? await Session.get(args.session)
|
||||
: await Session.create()
|
||||
handler: async (args) => {
|
||||
let message = args.message.join(" ")
|
||||
|
||||
UI.println(UI.Style.TEXT_HIGHLIGHT_BOLD + "◍ OpenCode", VERSION)
|
||||
UI.empty()
|
||||
UI.println(UI.Style.TEXT_NORMAL_BOLD + "> ", message)
|
||||
UI.empty()
|
||||
UI.println(
|
||||
UI.Style.TEXT_INFO_BOLD +
|
||||
"~ https://dev.opencode.ai/s/" +
|
||||
session.id.slice(-8),
|
||||
)
|
||||
UI.empty()
|
||||
if (!process.stdin.isTTY) message += "\n" + (await Bun.stdin.text())
|
||||
|
||||
function printEvent(color: string, type: string, title: string) {
|
||||
UI.println(
|
||||
color + `|`,
|
||||
UI.Style.TEXT_NORMAL +
|
||||
UI.Style.TEXT_DIM +
|
||||
` ${type.padEnd(7, " ")}`,
|
||||
"",
|
||||
UI.Style.TEXT_NORMAL + title,
|
||||
)
|
||||
await bootstrap({ cwd: process.cwd() }, async () => {
|
||||
const session = await (async () => {
|
||||
if (args.continue) {
|
||||
const list = Session.list()
|
||||
const first = await list.next()
|
||||
await list.return()
|
||||
if (first.done) return
|
||||
return first.value
|
||||
}
|
||||
|
||||
Bus.subscribe(Message.Event.PartUpdated, async (evt) => {
|
||||
if (evt.properties.sessionID !== session.id) return
|
||||
const part = evt.properties.part
|
||||
const message = await Session.getMessage(
|
||||
evt.properties.sessionID,
|
||||
evt.properties.messageID,
|
||||
)
|
||||
if (args.session) return Session.get(args.session)
|
||||
|
||||
if (
|
||||
part.type === "tool-invocation" &&
|
||||
part.toolInvocation.state === "result"
|
||||
) {
|
||||
const metadata =
|
||||
message.metadata.tool[part.toolInvocation.toolCallId]
|
||||
const [tool, color] = TOOL[part.toolInvocation.toolName] ?? [
|
||||
part.toolInvocation.toolName,
|
||||
UI.Style.TEXT_INFO_BOLD,
|
||||
]
|
||||
printEvent(color, tool, metadata.title)
|
||||
return Session.create()
|
||||
})()
|
||||
|
||||
if (!session) {
|
||||
UI.error("Session not found")
|
||||
return
|
||||
}
|
||||
|
||||
UI.empty()
|
||||
UI.println(UI.logo())
|
||||
UI.empty()
|
||||
|
||||
const cfg = await Config.get()
|
||||
if (cfg.share === "auto" || Flag.OPENCODE_AUTO_SHARE || args.share) {
|
||||
try {
|
||||
await Session.share(session.id)
|
||||
UI.println(UI.Style.TEXT_INFO_BOLD + "~ https://opencode.ai/s/" + session.id.slice(-8))
|
||||
} catch (error) {
|
||||
if (error instanceof Error && error.message.includes("disabled")) {
|
||||
UI.println(UI.Style.TEXT_DANGER_BOLD + "! " + error.message)
|
||||
} else {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
}
|
||||
UI.empty()
|
||||
|
||||
if (part.type === "text") {
|
||||
if (part.text.includes("\n")) {
|
||||
UI.empty()
|
||||
UI.println(part.text)
|
||||
UI.empty()
|
||||
return
|
||||
}
|
||||
printEvent(UI.Style.TEXT_NORMAL_BOLD, "Text", part.text)
|
||||
const { providerID, modelID } = args.model ? Provider.parseModel(args.model) : await Provider.defaultModel()
|
||||
UI.println(UI.Style.TEXT_NORMAL_BOLD + "@ ", UI.Style.TEXT_NORMAL + `${providerID}/${modelID}`)
|
||||
UI.empty()
|
||||
|
||||
function printEvent(color: string, type: string, title: string) {
|
||||
UI.println(
|
||||
color + `|`,
|
||||
UI.Style.TEXT_NORMAL + UI.Style.TEXT_DIM + ` ${type.padEnd(7, " ")}`,
|
||||
"",
|
||||
UI.Style.TEXT_NORMAL + title,
|
||||
)
|
||||
}
|
||||
|
||||
let text = ""
|
||||
Bus.subscribe(MessageV2.Event.PartUpdated, async (evt) => {
|
||||
if (evt.properties.part.sessionID !== session.id) return
|
||||
if (evt.properties.part.messageID === messageID) return
|
||||
const part = evt.properties.part
|
||||
|
||||
if (part.type === "tool" && part.state.status === "completed") {
|
||||
const [tool, color] = TOOL[part.tool] ?? [part.tool, UI.Style.TEXT_INFO_BOLD]
|
||||
const title =
|
||||
part.state.title || Object.keys(part.state.input).length > 0 ? JSON.stringify(part.state.input) : "Unknown"
|
||||
printEvent(color, tool, title)
|
||||
}
|
||||
|
||||
if (part.type === "text") {
|
||||
text = part.text
|
||||
|
||||
if (part.time?.end) {
|
||||
UI.empty()
|
||||
UI.println(UI.markdown(text))
|
||||
UI.empty()
|
||||
text = ""
|
||||
return
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
const { providerID, modelID } = await Provider.defaultModel()
|
||||
await Session.chat({
|
||||
sessionID: session.id,
|
||||
providerID,
|
||||
modelID,
|
||||
parts: [
|
||||
{
|
||||
type: "text",
|
||||
text: message,
|
||||
},
|
||||
],
|
||||
})
|
||||
UI.empty()
|
||||
},
|
||||
)
|
||||
let errorMsg: string | undefined
|
||||
Bus.subscribe(Session.Event.Error, async (evt) => {
|
||||
const { sessionID, error } = evt.properties
|
||||
if (sessionID !== session.id || !error) return
|
||||
let err = String(error.name)
|
||||
|
||||
if ("data" in error && error.data && "message" in error.data) {
|
||||
err = error.data.message
|
||||
}
|
||||
errorMsg = errorMsg ? errorMsg + "\n" + err : err
|
||||
|
||||
UI.error(err)
|
||||
})
|
||||
|
||||
const mode = args.mode ? await Mode.get(args.mode) : await Mode.list().then((x) => x[0])
|
||||
|
||||
const messageID = Identifier.ascending("message")
|
||||
const result = await Session.chat({
|
||||
sessionID: session.id,
|
||||
messageID,
|
||||
...(mode.model
|
||||
? mode.model
|
||||
: {
|
||||
providerID,
|
||||
modelID,
|
||||
}),
|
||||
mode: mode.name,
|
||||
parts: [
|
||||
{
|
||||
id: Identifier.ascending("part"),
|
||||
type: "text",
|
||||
text: message,
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
const isPiped = !process.stdout.isTTY
|
||||
if (isPiped) {
|
||||
const match = result.parts.findLast((x) => x.type === "text")
|
||||
if (match) process.stdout.write(UI.markdown(match.text))
|
||||
if (errorMsg) process.stdout.write(errorMsg)
|
||||
}
|
||||
UI.empty()
|
||||
})
|
||||
},
|
||||
}
|
||||
})
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
import { App } from "../../app/app"
|
||||
import { LSP } from "../../lsp"
|
||||
import { VERSION } from "../version"
|
||||
import { cmd } from "./cmd"
|
||||
|
||||
export const ScrapCommand = cmd({
|
||||
command: "scrap <file>",
|
||||
builder: (yargs) =>
|
||||
yargs.positional("file", { type: "string", demandOption: true }),
|
||||
async handler(args) {
|
||||
await App.provide({ cwd: process.cwd(), version: VERSION }, async (app) => {
|
||||
await LSP.touchFile(args.file, true)
|
||||
console.log(await LSP.diagnostics())
|
||||
})
|
||||
},
|
||||
})
|
||||
48
packages/opencode/src/cli/cmd/serve.ts
Normal file
48
packages/opencode/src/cli/cmd/serve.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import { Provider } from "../../provider/provider"
|
||||
import { Server } from "../../server/server"
|
||||
import { Share } from "../../share/share"
|
||||
import { bootstrap } from "../bootstrap"
|
||||
import { cmd } from "./cmd"
|
||||
|
||||
export const ServeCommand = cmd({
|
||||
command: "serve",
|
||||
builder: (yargs) =>
|
||||
yargs
|
||||
.option("port", {
|
||||
alias: ["p"],
|
||||
type: "number",
|
||||
describe: "port to listen on",
|
||||
default: 4096,
|
||||
})
|
||||
.option("hostname", {
|
||||
alias: ["h"],
|
||||
type: "string",
|
||||
describe: "hostname to listen on",
|
||||
default: "127.0.0.1",
|
||||
}),
|
||||
describe: "starts a headless opencode server",
|
||||
handler: async (args) => {
|
||||
const cwd = process.cwd()
|
||||
await bootstrap({ cwd }, async () => {
|
||||
const providers = await Provider.list()
|
||||
if (Object.keys(providers).length === 0) {
|
||||
return "needs_provider"
|
||||
}
|
||||
|
||||
const hostname = args.hostname
|
||||
const port = args.port
|
||||
|
||||
await Share.init()
|
||||
const server = Server.listen({
|
||||
port,
|
||||
hostname,
|
||||
})
|
||||
|
||||
console.log(`opencode server listening on http://${server.hostname}:${server.port}`)
|
||||
|
||||
await new Promise(() => {})
|
||||
|
||||
server.stop()
|
||||
})
|
||||
},
|
||||
})
|
||||
98
packages/opencode/src/cli/cmd/stats.ts
Normal file
98
packages/opencode/src/cli/cmd/stats.ts
Normal file
@@ -0,0 +1,98 @@
|
||||
import { cmd } from "./cmd"
|
||||
|
||||
interface SessionStats {
|
||||
totalSessions: number
|
||||
totalMessages: number
|
||||
totalCost: number
|
||||
totalTokens: {
|
||||
input: number
|
||||
output: number
|
||||
reasoning: number
|
||||
cache: {
|
||||
read: number
|
||||
write: number
|
||||
}
|
||||
}
|
||||
toolUsage: Record<string, number>
|
||||
dateRange: {
|
||||
earliest: number
|
||||
latest: number
|
||||
}
|
||||
days: number
|
||||
costPerDay: number
|
||||
}
|
||||
|
||||
export const StatsCommand = cmd({
|
||||
command: "stats",
|
||||
handler: async () => {},
|
||||
})
|
||||
|
||||
export function displayStats(stats: SessionStats) {
|
||||
const width = 56
|
||||
|
||||
function renderRow(label: string, value: string): string {
|
||||
const availableWidth = width - 1
|
||||
const paddingNeeded = availableWidth - label.length - value.length
|
||||
const padding = Math.max(0, paddingNeeded)
|
||||
return `│${label}${" ".repeat(padding)}${value} │`
|
||||
}
|
||||
|
||||
// Overview section
|
||||
console.log("┌────────────────────────────────────────────────────────┐")
|
||||
console.log("│ OVERVIEW │")
|
||||
console.log("├────────────────────────────────────────────────────────┤")
|
||||
console.log(renderRow("Sessions", stats.totalSessions.toLocaleString()))
|
||||
console.log(renderRow("Messages", stats.totalMessages.toLocaleString()))
|
||||
console.log(renderRow("Days", stats.days.toString()))
|
||||
console.log("└────────────────────────────────────────────────────────┘")
|
||||
console.log()
|
||||
|
||||
// Cost & Tokens section
|
||||
console.log("┌────────────────────────────────────────────────────────┐")
|
||||
console.log("│ COST & TOKENS │")
|
||||
console.log("├────────────────────────────────────────────────────────┤")
|
||||
const cost = isNaN(stats.totalCost) ? 0 : stats.totalCost
|
||||
const costPerDay = isNaN(stats.costPerDay) ? 0 : stats.costPerDay
|
||||
console.log(renderRow("Total Cost", `$${cost.toFixed(2)}`))
|
||||
console.log(renderRow("Cost/Day", `$${costPerDay.toFixed(2)}`))
|
||||
console.log(renderRow("Input", formatNumber(stats.totalTokens.input)))
|
||||
console.log(renderRow("Output", formatNumber(stats.totalTokens.output)))
|
||||
console.log(renderRow("Cache Read", formatNumber(stats.totalTokens.cache.read)))
|
||||
console.log(renderRow("Cache Write", formatNumber(stats.totalTokens.cache.write)))
|
||||
console.log("└────────────────────────────────────────────────────────┘")
|
||||
console.log()
|
||||
|
||||
// Tool Usage section
|
||||
if (Object.keys(stats.toolUsage).length > 0) {
|
||||
const sortedTools = Object.entries(stats.toolUsage)
|
||||
.sort(([, a], [, b]) => b - a)
|
||||
.slice(0, 10)
|
||||
|
||||
console.log("┌────────────────────────────────────────────────────────┐")
|
||||
console.log("│ TOOL USAGE │")
|
||||
console.log("├────────────────────────────────────────────────────────┤")
|
||||
|
||||
const maxCount = Math.max(...sortedTools.map(([, count]) => count))
|
||||
const totalToolUsage = Object.values(stats.toolUsage).reduce((a, b) => a + b, 0)
|
||||
|
||||
for (const [tool, count] of sortedTools) {
|
||||
const barLength = Math.max(1, Math.floor((count / maxCount) * 20))
|
||||
const bar = "█".repeat(barLength)
|
||||
const percentage = ((count / totalToolUsage) * 100).toFixed(1)
|
||||
|
||||
const content = ` ${tool.padEnd(10)} ${bar.padEnd(20)} ${count.toString().padStart(3)} (${percentage.padStart(4)}%)`
|
||||
const padding = Math.max(0, width - content.length)
|
||||
console.log(`│${content}${" ".repeat(padding)} │`)
|
||||
}
|
||||
console.log("└────────────────────────────────────────────────────────┘")
|
||||
}
|
||||
console.log()
|
||||
}
|
||||
function formatNumber(num: number): string {
|
||||
if (num >= 1000000) {
|
||||
return (num / 1000000).toFixed(1) + "M"
|
||||
} else if (num >= 1000) {
|
||||
return (num / 1000).toFixed(1) + "K"
|
||||
}
|
||||
return num.toString()
|
||||
}
|
||||
163
packages/opencode/src/cli/cmd/tui.ts
Normal file
163
packages/opencode/src/cli/cmd/tui.ts
Normal file
@@ -0,0 +1,163 @@
|
||||
import { Global } from "../../global"
|
||||
import { Provider } from "../../provider/provider"
|
||||
import { Server } from "../../server/server"
|
||||
import { bootstrap } from "../bootstrap"
|
||||
import { UI } from "../ui"
|
||||
import { cmd } from "./cmd"
|
||||
import path from "path"
|
||||
import fs from "fs/promises"
|
||||
import { Installation } from "../../installation"
|
||||
import { Config } from "../../config/config"
|
||||
import { Bus } from "../../bus"
|
||||
import { Log } from "../../util/log"
|
||||
import { FileWatcher } from "../../file/watch"
|
||||
import { Mode } from "../../session/mode"
|
||||
|
||||
export const TuiCommand = cmd({
|
||||
command: "$0 [project]",
|
||||
describe: "start opencode tui",
|
||||
builder: (yargs) =>
|
||||
yargs
|
||||
.positional("project", {
|
||||
type: "string",
|
||||
describe: "path to start opencode in",
|
||||
})
|
||||
.option("model", {
|
||||
type: "string",
|
||||
alias: ["m"],
|
||||
describe: "model to use in the format of provider/model",
|
||||
})
|
||||
.option("prompt", {
|
||||
alias: ["p"],
|
||||
type: "string",
|
||||
describe: "prompt to use",
|
||||
})
|
||||
.option("mode", {
|
||||
type: "string",
|
||||
describe: "mode to use",
|
||||
}),
|
||||
handler: async (args) => {
|
||||
while (true) {
|
||||
const cwd = args.project ? path.resolve(args.project) : process.cwd()
|
||||
try {
|
||||
process.chdir(cwd)
|
||||
} catch (e) {
|
||||
UI.error("Failed to change directory to " + cwd)
|
||||
return
|
||||
}
|
||||
const result = await bootstrap({ cwd }, async (app) => {
|
||||
FileWatcher.init()
|
||||
const providers = await Provider.list()
|
||||
if (Object.keys(providers).length === 0) {
|
||||
return "needs_provider"
|
||||
}
|
||||
|
||||
const server = Server.listen({
|
||||
port: 0,
|
||||
hostname: "127.0.0.1",
|
||||
})
|
||||
|
||||
let cmd = ["go", "run", "./main.go"]
|
||||
let cwd = Bun.fileURLToPath(new URL("../../../../tui/cmd/opencode", import.meta.url))
|
||||
if (Bun.embeddedFiles.length > 0) {
|
||||
const blob = Bun.embeddedFiles[0] as File
|
||||
let binaryName = blob.name
|
||||
if (process.platform === "win32" && !binaryName.endsWith(".exe")) {
|
||||
binaryName += ".exe"
|
||||
}
|
||||
const binary = path.join(Global.Path.cache, "tui", binaryName)
|
||||
const file = Bun.file(binary)
|
||||
if (!(await file.exists())) {
|
||||
await Bun.write(file, blob, { mode: 0o755 })
|
||||
await fs.chmod(binary, 0o755)
|
||||
}
|
||||
cwd = process.cwd()
|
||||
cmd = [binary]
|
||||
}
|
||||
Log.Default.info("tui", {
|
||||
cmd,
|
||||
})
|
||||
const proc = Bun.spawn({
|
||||
cmd: [
|
||||
...cmd,
|
||||
...(args.model ? ["--model", args.model] : []),
|
||||
...(args.prompt ? ["--prompt", args.prompt] : []),
|
||||
...(args.mode ? ["--mode", args.mode] : []),
|
||||
],
|
||||
cwd,
|
||||
stdout: "inherit",
|
||||
stderr: "inherit",
|
||||
stdin: "inherit",
|
||||
env: {
|
||||
...process.env,
|
||||
CGO_ENABLED: "0",
|
||||
OPENCODE_SERVER: server.url.toString(),
|
||||
OPENCODE_APP_INFO: JSON.stringify(app),
|
||||
OPENCODE_MODES: JSON.stringify(await Mode.list()),
|
||||
},
|
||||
onExit: () => {
|
||||
server.stop()
|
||||
},
|
||||
})
|
||||
|
||||
;(async () => {
|
||||
if (Installation.VERSION === "dev") return
|
||||
if (Installation.isSnapshot()) return
|
||||
const config = await Config.global()
|
||||
if (config.autoupdate === false) return
|
||||
const latest = await Installation.latest().catch(() => {})
|
||||
if (!latest) return
|
||||
if (Installation.VERSION === latest) return
|
||||
const method = await Installation.method()
|
||||
if (method === "unknown") return
|
||||
await Installation.upgrade(method, latest)
|
||||
.then(() => {
|
||||
Bus.publish(Installation.Event.Updated, { version: latest })
|
||||
})
|
||||
.catch(() => {})
|
||||
})()
|
||||
|
||||
await proc.exited
|
||||
server.stop()
|
||||
|
||||
return "done"
|
||||
})
|
||||
if (result === "done") break
|
||||
if (result === "needs_provider") {
|
||||
UI.empty()
|
||||
UI.println(UI.logo(" "))
|
||||
const result = await Bun.spawn({
|
||||
cmd: [...getOpencodeCommand(), "auth", "login"],
|
||||
cwd: process.cwd(),
|
||||
stdout: "inherit",
|
||||
stderr: "inherit",
|
||||
stdin: "inherit",
|
||||
}).exited
|
||||
if (result !== 0) return
|
||||
UI.empty()
|
||||
}
|
||||
}
|
||||
},
|
||||
})
|
||||
|
||||
/**
|
||||
* Get the correct command to run opencode CLI
|
||||
* In development: ["bun", "run", "packages/opencode/src/index.ts"]
|
||||
* In production: ["/path/to/opencode"]
|
||||
*/
|
||||
function getOpencodeCommand(): string[] {
|
||||
// Check if OPENCODE_BIN_PATH is set (used by shell wrapper scripts)
|
||||
if (process.env["OPENCODE_BIN_PATH"]) {
|
||||
return [process.env["OPENCODE_BIN_PATH"]]
|
||||
}
|
||||
|
||||
const execPath = process.execPath.toLowerCase()
|
||||
|
||||
if (Installation.isDev()) {
|
||||
// In development, use bun to run the TypeScript entry point
|
||||
return [execPath, "run", process.argv[1]]
|
||||
}
|
||||
|
||||
// In production, use the current executable path
|
||||
return [process.execPath]
|
||||
}
|
||||
57
packages/opencode/src/cli/cmd/upgrade.ts
Normal file
57
packages/opencode/src/cli/cmd/upgrade.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
import type { Argv } from "yargs"
|
||||
import { UI } from "../ui"
|
||||
import * as prompts from "@clack/prompts"
|
||||
import { Installation } from "../../installation"
|
||||
|
||||
export const UpgradeCommand = {
|
||||
command: "upgrade [target]",
|
||||
describe: "upgrade opencode to the latest or a specific version",
|
||||
builder: (yargs: Argv) => {
|
||||
return yargs
|
||||
.positional("target", {
|
||||
describe: "version to upgrade to, for ex '0.1.48' or 'v0.1.48'",
|
||||
type: "string",
|
||||
})
|
||||
.option("method", {
|
||||
alias: "m",
|
||||
describe: "installation method to use",
|
||||
type: "string",
|
||||
choices: ["curl", "npm", "pnpm", "bun", "brew"],
|
||||
})
|
||||
},
|
||||
handler: async (args: { target?: string; method?: string }) => {
|
||||
UI.empty()
|
||||
UI.println(UI.logo(" "))
|
||||
UI.empty()
|
||||
prompts.intro("Upgrade")
|
||||
const detectedMethod = await Installation.method()
|
||||
const method = (args.method as Installation.Method) ?? detectedMethod
|
||||
if (method === "unknown") {
|
||||
prompts.log.error(`opencode is installed to ${process.execPath} and seems to be managed by a package manager`)
|
||||
prompts.outro("Done")
|
||||
return
|
||||
}
|
||||
prompts.log.info("Using method: " + method)
|
||||
const target = args.target ?? (await Installation.latest())
|
||||
|
||||
if (Installation.VERSION === target) {
|
||||
prompts.log.warn(`opencode upgrade skipped: ${target} is already installed`)
|
||||
prompts.outro("Done")
|
||||
return
|
||||
}
|
||||
|
||||
prompts.log.info(`From ${Installation.VERSION} → ${target}`)
|
||||
const spinner = prompts.spinner()
|
||||
spinner.start("Upgrading...")
|
||||
const err = await Installation.upgrade(method, target).catch((err) => err)
|
||||
if (err) {
|
||||
spinner.stop("Upgrade failed")
|
||||
if (err instanceof Installation.UpgradeFailedError) prompts.log.error(err.data.stderr)
|
||||
else if (err instanceof Error) prompts.log.error(err.message)
|
||||
prompts.outro("Done")
|
||||
return
|
||||
}
|
||||
spinner.stop("Upgrade complete")
|
||||
prompts.outro("Done")
|
||||
},
|
||||
}
|
||||
16
packages/opencode/src/cli/error.ts
Normal file
16
packages/opencode/src/cli/error.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import { Config } from "../config/config"
|
||||
import { MCP } from "../mcp"
|
||||
import { UI } from "./ui"
|
||||
|
||||
export function FormatError(input: unknown) {
|
||||
if (MCP.Failed.isInstance(input))
|
||||
return `MCP server "${input.data.name}" failed. Note, opencode does not support MCP authentication yet.`
|
||||
if (Config.JsonError.isInstance(input)) return `Config file at ${input.data.path} is not valid JSON`
|
||||
if (Config.InvalidError.isInstance(input))
|
||||
return [
|
||||
`Config file at ${input.data.path} is invalid`,
|
||||
...(input.data.issues?.map((issue) => "↳ " + issue.message + " " + issue.path.join(".")) ?? []),
|
||||
].join("\n")
|
||||
|
||||
if (UI.CancelledError.isInstance(input)) return ""
|
||||
}
|
||||
@@ -1,193 +0,0 @@
|
||||
import { createCli, type TrpcCliMeta } from "trpc-cli"
|
||||
import { initTRPC } from "@trpc/server"
|
||||
import { z } from "zod"
|
||||
import { Server } from "../server/server"
|
||||
import { AuthAnthropic } from "../auth/anthropic"
|
||||
import { UI } from "./ui"
|
||||
import { App } from "../app/app"
|
||||
import { Bus } from "../bus"
|
||||
import { Provider } from "../provider/provider"
|
||||
import { Session } from "../session"
|
||||
import { Share } from "../share/share"
|
||||
import { Message } from "../session/message"
|
||||
import { VERSION } from "./version"
|
||||
import { LSP } from "../lsp"
|
||||
import fs from "fs/promises"
|
||||
import path from "path"
|
||||
|
||||
const t = initTRPC.meta<TrpcCliMeta>().create()
|
||||
|
||||
export const router = t.router({
|
||||
generate: t.procedure
|
||||
.meta({
|
||||
description: "Generate OpenAPI and event specs",
|
||||
})
|
||||
.input(z.object({}))
|
||||
.mutation(async () => {
|
||||
const specs = await Server.openapi()
|
||||
const dir = "gen"
|
||||
await fs.rmdir(dir, { recursive: true }).catch(() => {})
|
||||
await fs.mkdir(dir, { recursive: true })
|
||||
await Bun.write(
|
||||
path.join(dir, "openapi.json"),
|
||||
JSON.stringify(specs, null, 2),
|
||||
)
|
||||
return "Generated OpenAPI specs in gen/ directory"
|
||||
}),
|
||||
|
||||
run: t.procedure
|
||||
.meta({
|
||||
description: "Run OpenCode with a message",
|
||||
})
|
||||
.input(
|
||||
z.object({
|
||||
message: z.array(z.string()).default([]).describe("Message to send"),
|
||||
session: z.string().optional().describe("Session ID to continue"),
|
||||
}),
|
||||
)
|
||||
.mutation(
|
||||
async ({ input }: { input: { message: string[]; session?: string } }) => {
|
||||
const message = input.message.join(" ")
|
||||
await App.provide(
|
||||
{
|
||||
cwd: process.cwd(),
|
||||
version: "0.0.0",
|
||||
},
|
||||
async () => {
|
||||
await Share.init()
|
||||
const session = input.session
|
||||
? await Session.get(input.session)
|
||||
: await Session.create()
|
||||
|
||||
UI.println(UI.Style.TEXT_HIGHLIGHT_BOLD + "◍ OpenCode", VERSION)
|
||||
UI.empty()
|
||||
UI.println(UI.Style.TEXT_NORMAL_BOLD + "> ", message)
|
||||
UI.empty()
|
||||
UI.println(
|
||||
UI.Style.TEXT_INFO_BOLD +
|
||||
"~ https://dev.opencode.ai/s?id=" +
|
||||
session.id.slice(-8),
|
||||
)
|
||||
UI.empty()
|
||||
|
||||
function printEvent(color: string, type: string, title: string) {
|
||||
UI.println(
|
||||
color + `|`,
|
||||
UI.Style.TEXT_NORMAL +
|
||||
UI.Style.TEXT_DIM +
|
||||
` ${type.padEnd(7, " ")}`,
|
||||
"",
|
||||
UI.Style.TEXT_NORMAL + title,
|
||||
)
|
||||
}
|
||||
|
||||
Bus.subscribe(Message.Event.PartUpdated, async (message) => {
|
||||
const part = message.properties.part
|
||||
if (
|
||||
part.type === "tool-invocation" &&
|
||||
part.toolInvocation.state === "result"
|
||||
) {
|
||||
if (part.toolInvocation.toolName === "opencode_todowrite")
|
||||
return
|
||||
|
||||
const args = part.toolInvocation.args as any
|
||||
const tool = part.toolInvocation.toolName
|
||||
|
||||
if (tool === "opencode_edit")
|
||||
printEvent(UI.Style.TEXT_SUCCESS_BOLD, "Edit", args.filePath)
|
||||
if (tool === "opencode_bash")
|
||||
printEvent(
|
||||
UI.Style.TEXT_WARNING_BOLD,
|
||||
"Execute",
|
||||
args.command,
|
||||
)
|
||||
if (tool === "opencode_read")
|
||||
printEvent(UI.Style.TEXT_INFO_BOLD, "Read", args.filePath)
|
||||
if (tool === "opencode_write")
|
||||
printEvent(
|
||||
UI.Style.TEXT_SUCCESS_BOLD,
|
||||
"Create",
|
||||
args.filePath,
|
||||
)
|
||||
if (tool === "opencode_list")
|
||||
printEvent(UI.Style.TEXT_INFO_BOLD, "List", args.path)
|
||||
if (tool === "opencode_glob")
|
||||
printEvent(
|
||||
UI.Style.TEXT_INFO_BOLD,
|
||||
"Glob",
|
||||
args.pattern + (args.path ? " in " + args.path : ""),
|
||||
)
|
||||
}
|
||||
|
||||
if (part.type === "text") {
|
||||
if (part.text.includes("\n")) {
|
||||
UI.empty()
|
||||
UI.println(part.text)
|
||||
UI.empty()
|
||||
return
|
||||
}
|
||||
printEvent(UI.Style.TEXT_NORMAL_BOLD, "Text", part.text)
|
||||
}
|
||||
})
|
||||
|
||||
const { providerID, modelID } = await Provider.defaultModel()
|
||||
await Session.chat({
|
||||
sessionID: session.id,
|
||||
providerID,
|
||||
modelID,
|
||||
parts: [
|
||||
{
|
||||
type: "text",
|
||||
text: message,
|
||||
},
|
||||
],
|
||||
})
|
||||
UI.empty()
|
||||
},
|
||||
)
|
||||
return "Session completed"
|
||||
},
|
||||
),
|
||||
|
||||
scrap: t.procedure
|
||||
.meta({
|
||||
description: "Test command for scraping files",
|
||||
})
|
||||
.input(
|
||||
z.object({
|
||||
file: z.string().describe("File to process"),
|
||||
}),
|
||||
)
|
||||
.mutation(async ({ input }: { input: { file: string } }) => {
|
||||
await App.provide({ cwd: process.cwd(), version: VERSION }, async () => {
|
||||
await LSP.touchFile(input.file, true)
|
||||
await LSP.diagnostics()
|
||||
})
|
||||
return `Processed file: ${input.file}`
|
||||
}),
|
||||
|
||||
login: t.router({
|
||||
anthropic: t.procedure
|
||||
.meta({
|
||||
description: "Login to Anthropic",
|
||||
})
|
||||
.input(z.object({}))
|
||||
.mutation(async () => {
|
||||
const { url, verifier } = await AuthAnthropic.authorize()
|
||||
|
||||
UI.println("Login to Anthropic")
|
||||
UI.println("Open the following URL in your browser:")
|
||||
UI.println(url)
|
||||
UI.println("")
|
||||
|
||||
const code = await UI.input("Paste the authorization code here: ")
|
||||
await AuthAnthropic.exchange(code, verifier)
|
||||
return "Successfully logged in to Anthropic"
|
||||
}),
|
||||
}),
|
||||
})
|
||||
|
||||
export function createOpenCodeCli() {
|
||||
return createCli({ router })
|
||||
}
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
import { z } from "zod"
|
||||
import { EOL } from "os"
|
||||
import { NamedError } from "../util/error"
|
||||
|
||||
export namespace UI {
|
||||
const LOGO = [
|
||||
`█▀▀█ █▀▀█ █▀▀ █▀▀▄ █▀▀ █▀▀█ █▀▀▄ █▀▀`,
|
||||
`█░░█ █░░█ █▀▀ █░░█ █░░ █░░█ █░░█ █▀▀`,
|
||||
`▀▀▀▀ █▀▀▀ ▀▀▀ ▀ ▀ ▀▀▀ ▀▀▀▀ ▀▀▀ ▀▀▀`,
|
||||
[`█▀▀█ █▀▀█ █▀▀ █▀▀▄ `, `█▀▀ █▀▀█ █▀▀▄ █▀▀`],
|
||||
[`█░░█ █░░█ █▀▀ █░░█ `, `█░░ █░░█ █░░█ █▀▀`],
|
||||
[`▀▀▀▀ █▀▀▀ ▀▀▀ ▀ ▀ `, `▀▀▀ ▀▀▀▀ ▀▀▀ ▀▀▀`],
|
||||
]
|
||||
|
||||
export const CancelledError = NamedError.create("UICancelledError", z.void())
|
||||
@@ -29,7 +30,7 @@ export namespace UI {
|
||||
|
||||
export function println(...message: string[]) {
|
||||
print(...message)
|
||||
Bun.stderr.write("\n")
|
||||
Bun.stderr.write(EOL)
|
||||
}
|
||||
|
||||
export function print(...message: string[]) {
|
||||
@@ -48,13 +49,11 @@ export namespace UI {
|
||||
const result = []
|
||||
for (const row of LOGO) {
|
||||
if (pad) result.push(pad)
|
||||
for (let i = 0; i < row.length; i++) {
|
||||
const color =
|
||||
i > 18 ? Bun.color("white", "ansi") : Bun.color("gray", "ansi")
|
||||
const char = row[i]
|
||||
result.push(color + char)
|
||||
}
|
||||
result.push("\n")
|
||||
result.push(Bun.color("gray", "ansi"))
|
||||
result.push(row[0])
|
||||
result.push("\x1b[0m")
|
||||
result.push(row[1])
|
||||
result.push(EOL)
|
||||
}
|
||||
return result.join("").trimEnd()
|
||||
}
|
||||
@@ -73,4 +72,12 @@ export namespace UI {
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
export function error(message: string) {
|
||||
println(Style.TEXT_DANGER_BOLD + "Error: " + Style.TEXT_NORMAL + message)
|
||||
}
|
||||
|
||||
export function markdown(text: string): string {
|
||||
return text
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
declare global {
|
||||
const OPENCODE_VERSION: string
|
||||
}
|
||||
|
||||
export const VERSION =
|
||||
typeof OPENCODE_VERSION === "string" ? OPENCODE_VERSION : "dev"
|
||||
@@ -1,56 +1,172 @@
|
||||
import { Log } from "../util/log"
|
||||
import path from "path"
|
||||
import { z } from "zod"
|
||||
import { App } from "../app/app"
|
||||
import { Filesystem } from "../util/filesystem"
|
||||
import { ModelsDev } from "../provider/models"
|
||||
import { mergeDeep, pipe } from "remeda"
|
||||
import { Global } from "../global"
|
||||
import fs from "fs/promises"
|
||||
import { lazy } from "../util/lazy"
|
||||
import { NamedError } from "../util/error"
|
||||
|
||||
export namespace Config {
|
||||
const log = Log.create({ service: "config" })
|
||||
|
||||
export const state = App.state("config", async (app) => {
|
||||
let result: Info = {}
|
||||
let result = await global()
|
||||
for (const file of ["opencode.jsonc", "opencode.json"]) {
|
||||
const [resolved] = await Filesystem.findUp(
|
||||
file,
|
||||
app.path.cwd,
|
||||
app.path.root,
|
||||
)
|
||||
if (!resolved) continue
|
||||
try {
|
||||
result = await import(resolved).then((mod) => Info.parse(mod.default))
|
||||
log.info("found", { path: resolved })
|
||||
break
|
||||
} catch (e) {
|
||||
if (e instanceof z.ZodError) {
|
||||
for (const issue of e.issues) {
|
||||
log.info(issue.message)
|
||||
}
|
||||
throw e
|
||||
}
|
||||
continue
|
||||
const found = await Filesystem.findUp(file, app.path.cwd, app.path.root)
|
||||
for (const resolved of found.toReversed()) {
|
||||
result = mergeDeep(result, await load(resolved))
|
||||
}
|
||||
}
|
||||
|
||||
// Handle migration from autoshare to share field
|
||||
if (result.autoshare === true && !result.share) {
|
||||
result.share = "auto"
|
||||
}
|
||||
|
||||
if (!result.username) {
|
||||
const os = await import("os")
|
||||
result.username = os.userInfo().username
|
||||
}
|
||||
|
||||
log.info("loaded", result)
|
||||
|
||||
return result
|
||||
})
|
||||
|
||||
export const McpLocal = z.object({
|
||||
type: z.literal("local"),
|
||||
command: z.string().array(),
|
||||
environment: z.record(z.string(), z.string()).optional(),
|
||||
})
|
||||
export const McpLocal = z
|
||||
.object({
|
||||
type: z.literal("local").describe("Type of MCP server connection"),
|
||||
command: z.string().array().describe("Command and arguments to run the MCP server"),
|
||||
environment: z
|
||||
.record(z.string(), z.string())
|
||||
.optional()
|
||||
.describe("Environment variables to set when running the MCP server"),
|
||||
enabled: z.boolean().optional().describe("Enable or disable the MCP server on startup"),
|
||||
})
|
||||
.strict()
|
||||
.openapi({
|
||||
ref: "McpLocalConfig",
|
||||
})
|
||||
|
||||
export const McpRemote = z.object({
|
||||
type: z.literal("remote"),
|
||||
url: z.string(),
|
||||
})
|
||||
export const McpRemote = z
|
||||
.object({
|
||||
type: z.literal("remote").describe("Type of MCP server connection"),
|
||||
url: z.string().describe("URL of the remote MCP server"),
|
||||
enabled: z.boolean().optional().describe("Enable or disable the MCP server on startup"),
|
||||
headers: z.record(z.string(), z.string()).optional().describe("Headers to send with the request"),
|
||||
})
|
||||
.strict()
|
||||
.openapi({
|
||||
ref: "McpRemoteConfig",
|
||||
})
|
||||
|
||||
export const Mcp = z.discriminatedUnion("type", [McpLocal, McpRemote])
|
||||
export type Mcp = z.infer<typeof Mcp>
|
||||
|
||||
export const Mode = z
|
||||
.object({
|
||||
model: z.string().optional(),
|
||||
prompt: z.string().optional(),
|
||||
tools: z.record(z.string(), z.boolean()).optional(),
|
||||
})
|
||||
.openapi({
|
||||
ref: "ModeConfig",
|
||||
})
|
||||
export type Mode = z.infer<typeof Mode>
|
||||
|
||||
export const Keybinds = z
|
||||
.object({
|
||||
leader: z.string().optional().default("ctrl+x").describe("Leader key for keybind combinations"),
|
||||
app_help: z.string().optional().default("<leader>h").describe("Show help dialog"),
|
||||
switch_mode: z.string().optional().default("tab").describe("Next mode"),
|
||||
switch_mode_reverse: z.string().optional().default("shift+tab").describe("Previous Mode"),
|
||||
editor_open: z.string().optional().default("<leader>e").describe("Open external editor"),
|
||||
session_export: z.string().optional().default("<leader>x").describe("Export session to editor"),
|
||||
session_new: z.string().optional().default("<leader>n").describe("Create a new session"),
|
||||
session_list: z.string().optional().default("<leader>l").describe("List all sessions"),
|
||||
session_share: z.string().optional().default("<leader>s").describe("Share current session"),
|
||||
session_unshare: z.string().optional().default("<leader>u").describe("Unshare current session"),
|
||||
session_interrupt: z.string().optional().default("esc").describe("Interrupt current session"),
|
||||
session_compact: z.string().optional().default("<leader>c").describe("Compact the session"),
|
||||
tool_details: z.string().optional().default("<leader>d").describe("Toggle tool details"),
|
||||
model_list: z.string().optional().default("<leader>m").describe("List available models"),
|
||||
theme_list: z.string().optional().default("<leader>t").describe("List available themes"),
|
||||
file_list: z.string().optional().default("<leader>f").describe("List files"),
|
||||
file_close: z.string().optional().default("esc").describe("Close file"),
|
||||
file_search: z.string().optional().default("<leader>/").describe("Search file"),
|
||||
file_diff_toggle: z.string().optional().default("<leader>v").describe("Split/unified diff"),
|
||||
project_init: z.string().optional().default("<leader>i").describe("Create/update AGENTS.md"),
|
||||
input_clear: z.string().optional().default("ctrl+c").describe("Clear input field"),
|
||||
input_paste: z.string().optional().default("ctrl+v").describe("Paste from clipboard"),
|
||||
input_submit: z.string().optional().default("enter").describe("Submit input"),
|
||||
input_newline: z.string().optional().default("shift+enter,ctrl+j").describe("Insert newline in input"),
|
||||
messages_page_up: z.string().optional().default("pgup").describe("Scroll messages up by one page"),
|
||||
messages_page_down: z.string().optional().default("pgdown").describe("Scroll messages down by one page"),
|
||||
messages_half_page_up: z.string().optional().default("ctrl+alt+u").describe("Scroll messages up by half page"),
|
||||
messages_half_page_down: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("ctrl+alt+d")
|
||||
.describe("Scroll messages down by half page"),
|
||||
messages_previous: z.string().optional().default("ctrl+up").describe("Navigate to previous message"),
|
||||
messages_next: z.string().optional().default("ctrl+down").describe("Navigate to next message"),
|
||||
messages_first: z.string().optional().default("ctrl+g").describe("Navigate to first message"),
|
||||
messages_last: z.string().optional().default("ctrl+alt+g").describe("Navigate to last message"),
|
||||
messages_layout_toggle: z.string().optional().default("<leader>p").describe("Toggle layout"),
|
||||
messages_copy: z.string().optional().default("<leader>y").describe("Copy message"),
|
||||
messages_revert: z.string().optional().default("<leader>r").describe("Revert message"),
|
||||
app_exit: z.string().optional().default("ctrl+c,<leader>q").describe("Exit the application"),
|
||||
})
|
||||
.strict()
|
||||
.openapi({
|
||||
ref: "KeybindsConfig",
|
||||
})
|
||||
|
||||
export const Layout = z.enum(["auto", "stretch"]).openapi({
|
||||
ref: "LayoutConfig",
|
||||
})
|
||||
export type Layout = z.infer<typeof Layout>
|
||||
|
||||
export const Info = z
|
||||
.object({
|
||||
$schema: z.string().optional(),
|
||||
$schema: z.string().optional().describe("JSON schema reference for configuration validation"),
|
||||
theme: z.string().optional().describe("Theme name to use for the interface"),
|
||||
keybinds: Keybinds.optional().describe("Custom keybind configurations"),
|
||||
share: z
|
||||
.enum(["manual", "auto", "disabled"])
|
||||
.optional()
|
||||
.describe(
|
||||
"Control sharing behavior:'manual' allows manual sharing via commands, 'auto' enables automatic sharing, 'disabled' disables all sharing",
|
||||
),
|
||||
autoshare: z
|
||||
.boolean()
|
||||
.optional()
|
||||
.describe("@deprecated Use 'share' field instead. Share newly created sessions automatically"),
|
||||
autoupdate: z.boolean().optional().describe("Automatically update to the latest version"),
|
||||
disabled_providers: z.array(z.string()).optional().describe("Disable providers that are loaded automatically"),
|
||||
model: z.string().describe("Model to use in the format of provider/model, eg anthropic/claude-2").optional(),
|
||||
small_model: z
|
||||
.string()
|
||||
.describe(
|
||||
"Small model to use for tasks like summarization and title generation in the format of provider/model",
|
||||
)
|
||||
.optional(),
|
||||
username: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe("Custom username to display in conversations instead of system username"),
|
||||
mode: z
|
||||
.object({
|
||||
build: Mode.optional(),
|
||||
plan: Mode.optional(),
|
||||
})
|
||||
.catchall(Mode)
|
||||
.optional()
|
||||
.describe("Modes configuration, see https://opencode.ai/docs/modes"),
|
||||
provider: z
|
||||
.record(
|
||||
ModelsDev.Provider.partial().extend({
|
||||
@@ -58,13 +174,126 @@ export namespace Config {
|
||||
options: z.record(z.any()).optional(),
|
||||
}),
|
||||
)
|
||||
.optional()
|
||||
.describe("Custom provider configurations and model overrides"),
|
||||
mcp: z.record(z.string(), Mcp).optional().describe("MCP (Model Context Protocol) server configurations"),
|
||||
instructions: z.array(z.string()).optional().describe("Additional instruction files or patterns to include"),
|
||||
layout: Layout.optional().describe("@deprecated Always uses stretch layout."),
|
||||
experimental: z
|
||||
.object({
|
||||
hook: z
|
||||
.object({
|
||||
file_edited: z
|
||||
.record(
|
||||
z.string(),
|
||||
z
|
||||
.object({
|
||||
command: z.string().array(),
|
||||
environment: z.record(z.string(), z.string()).optional(),
|
||||
})
|
||||
.array(),
|
||||
)
|
||||
.optional(),
|
||||
session_completed: z
|
||||
.object({
|
||||
command: z.string().array(),
|
||||
environment: z.record(z.string(), z.string()).optional(),
|
||||
})
|
||||
.array()
|
||||
.optional(),
|
||||
})
|
||||
.optional(),
|
||||
})
|
||||
.optional(),
|
||||
mcp: z.record(z.string(), Mcp).optional(),
|
||||
})
|
||||
.strict()
|
||||
.openapi({
|
||||
ref: "Config",
|
||||
})
|
||||
|
||||
export type Info = z.output<typeof Info>
|
||||
|
||||
export const global = lazy(async () => {
|
||||
let result = pipe(
|
||||
{},
|
||||
mergeDeep(await load(path.join(Global.Path.config, "config.json"))),
|
||||
mergeDeep(await load(path.join(Global.Path.config, "opencode.json"))),
|
||||
)
|
||||
|
||||
await import(path.join(Global.Path.config, "config"), {
|
||||
with: {
|
||||
type: "toml",
|
||||
},
|
||||
})
|
||||
.then(async (mod) => {
|
||||
const { provider, model, ...rest } = mod.default
|
||||
if (provider && model) result.model = `${provider}/${model}`
|
||||
result["$schema"] = "https://opencode.ai/config.json"
|
||||
result = mergeDeep(result, rest)
|
||||
await Bun.write(path.join(Global.Path.config, "config.json"), JSON.stringify(result, null, 2))
|
||||
await fs.unlink(path.join(Global.Path.config, "config"))
|
||||
})
|
||||
.catch(() => {})
|
||||
|
||||
return result
|
||||
})
|
||||
|
||||
async function load(configPath: string) {
|
||||
let text = await Bun.file(configPath)
|
||||
.text()
|
||||
.catch((err) => {
|
||||
if (err.code === "ENOENT") return
|
||||
throw new JsonError({ path: configPath }, { cause: err })
|
||||
})
|
||||
if (!text) return {}
|
||||
|
||||
text = text.replace(/\{env:([^}]+)\}/g, (_, varName) => {
|
||||
return process.env[varName] || ""
|
||||
})
|
||||
|
||||
const fileMatches = text.match(/"?\{file:([^}]+)\}"?/g)
|
||||
if (fileMatches) {
|
||||
const configDir = path.dirname(configPath)
|
||||
for (const match of fileMatches) {
|
||||
const filePath = match.replace(/^"?\{file:/, "").replace(/\}"?$/, "")
|
||||
const resolvedPath = path.isAbsolute(filePath) ? filePath : path.resolve(configDir, filePath)
|
||||
const fileContent = await Bun.file(resolvedPath).text()
|
||||
text = text.replace(match, JSON.stringify(fileContent))
|
||||
}
|
||||
}
|
||||
|
||||
let data: any
|
||||
try {
|
||||
data = JSON.parse(text)
|
||||
} catch (err) {
|
||||
throw new JsonError({ path: configPath }, { cause: err as Error })
|
||||
}
|
||||
|
||||
const parsed = Info.safeParse(data)
|
||||
if (parsed.success) {
|
||||
if (!parsed.data.$schema) {
|
||||
parsed.data.$schema = "https://opencode.ai/config.json"
|
||||
await Bun.write(configPath, JSON.stringify(parsed.data, null, 2))
|
||||
}
|
||||
return parsed.data
|
||||
}
|
||||
throw new InvalidError({ path: configPath, issues: parsed.error.issues })
|
||||
}
|
||||
export const JsonError = NamedError.create(
|
||||
"ConfigJsonError",
|
||||
z.object({
|
||||
path: z.string(),
|
||||
}),
|
||||
)
|
||||
|
||||
export const InvalidError = NamedError.create(
|
||||
"ConfigInvalidError",
|
||||
z.object({
|
||||
path: z.string(),
|
||||
issues: z.custom<z.ZodIssue[]>().optional(),
|
||||
}),
|
||||
)
|
||||
|
||||
export function get() {
|
||||
return state()
|
||||
}
|
||||
|
||||
56
packages/opencode/src/config/hooks.ts
Normal file
56
packages/opencode/src/config/hooks.ts
Normal file
@@ -0,0 +1,56 @@
|
||||
import { App } from "../app/app"
|
||||
import { Bus } from "../bus"
|
||||
import { File } from "../file"
|
||||
import { Session } from "../session"
|
||||
import { Log } from "../util/log"
|
||||
import { Config } from "./config"
|
||||
import path from "path"
|
||||
|
||||
export namespace ConfigHooks {
|
||||
const log = Log.create({ service: "config.hooks" })
|
||||
|
||||
export function init() {
|
||||
log.info("init")
|
||||
const app = App.info()
|
||||
|
||||
Bus.subscribe(File.Event.Edited, async (payload) => {
|
||||
const cfg = await Config.get()
|
||||
const ext = path.extname(payload.properties.file)
|
||||
for (const item of cfg.experimental?.hook?.file_edited?.[ext] ?? []) {
|
||||
log.info("file_edited", {
|
||||
file: payload.properties.file,
|
||||
command: item.command,
|
||||
})
|
||||
Bun.spawn({
|
||||
cmd: item.command.map((x) => x.replace("$FILE", payload.properties.file)),
|
||||
env: item.environment,
|
||||
cwd: app.path.cwd,
|
||||
stdout: "ignore",
|
||||
stderr: "ignore",
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
Bus.subscribe(Session.Event.Idle, async (payload) => {
|
||||
const cfg = await Config.get()
|
||||
if (cfg.experimental?.hook?.session_completed) {
|
||||
const session = await Session.get(payload.properties.sessionID)
|
||||
// Only fire hook for top-level sessions (not subagent sessions)
|
||||
if (session.parentID) return
|
||||
|
||||
for (const item of cfg.experimental.hook.session_completed) {
|
||||
log.info("session_completed", {
|
||||
command: item.command,
|
||||
})
|
||||
Bun.spawn({
|
||||
cmd: item.command,
|
||||
cwd: App.info().path.cwd,
|
||||
env: item.environment,
|
||||
stdout: "ignore",
|
||||
stderr: "ignore",
|
||||
})
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
114
packages/opencode/src/external/ripgrep.ts
vendored
114
packages/opencode/src/external/ripgrep.ts
vendored
@@ -1,114 +0,0 @@
|
||||
import { App } from "../app/app"
|
||||
import path from "path"
|
||||
import { Global } from "../global"
|
||||
import fs from "fs/promises"
|
||||
import { z } from "zod"
|
||||
import { NamedError } from "../util/error"
|
||||
import { lazy } from "../util/lazy"
|
||||
|
||||
export namespace Ripgrep {
|
||||
const PLATFORM = {
|
||||
darwin: { platform: "apple-darwin", extension: "tar.gz" },
|
||||
linux: { platform: "unknown-linux-musl", extension: "tar.gz" },
|
||||
win32: { platform: "pc-windows-msvc", extension: "zip" },
|
||||
} as const
|
||||
|
||||
export const ExtractionFailedError = NamedError.create(
|
||||
"RipgrepExtractionFailedError",
|
||||
z.object({
|
||||
filepath: z.string(),
|
||||
stderr: z.string(),
|
||||
}),
|
||||
)
|
||||
|
||||
export const UnsupportedPlatformError = NamedError.create(
|
||||
"RipgrepUnsupportedPlatformError",
|
||||
z.object({
|
||||
platform: z.string(),
|
||||
}),
|
||||
)
|
||||
|
||||
export const DownloadFailedError = NamedError.create(
|
||||
"RipgrepDownloadFailedError",
|
||||
z.object({
|
||||
url: z.string(),
|
||||
status: z.number(),
|
||||
}),
|
||||
)
|
||||
|
||||
const state = lazy(async () => {
|
||||
let filepath = Bun.which("rg")
|
||||
if (filepath) return { filepath }
|
||||
filepath = path.join(
|
||||
Global.Path.bin,
|
||||
"rg" + (process.platform === "win32" ? ".exe" : ""),
|
||||
)
|
||||
|
||||
const file = Bun.file(filepath)
|
||||
if (!(await file.exists())) {
|
||||
const archMap = { x64: "x86_64", arm64: "aarch64" } as const
|
||||
const arch = archMap[process.arch as keyof typeof archMap] ?? process.arch
|
||||
|
||||
const config = PLATFORM[process.platform as keyof typeof PLATFORM]
|
||||
if (!config)
|
||||
throw new UnsupportedPlatformError({ platform: process.platform })
|
||||
|
||||
const version = "14.1.1"
|
||||
const filename = `ripgrep-${version}-${arch}-${config.platform}.${config.extension}`
|
||||
const url = `https://github.com/BurntSushi/ripgrep/releases/download/${version}/${filename}`
|
||||
|
||||
const response = await fetch(url)
|
||||
if (!response.ok)
|
||||
throw new DownloadFailedError({ url, status: response.status })
|
||||
|
||||
const buffer = await response.arrayBuffer()
|
||||
const archivePath = path.join(Global.Path.bin, filename)
|
||||
await Bun.write(archivePath, buffer)
|
||||
if (config.extension === "tar.gz") {
|
||||
const args = ["tar", "-xzf", archivePath, "--strip-components=1"]
|
||||
|
||||
if (process.platform === "darwin") args.push("--include=*/rg")
|
||||
if (process.platform === "linux") args.push("--wildcards", "*/rg")
|
||||
|
||||
const proc = Bun.spawn(args, {
|
||||
cwd: Global.Path.bin,
|
||||
stderr: "pipe",
|
||||
stdout: "pipe",
|
||||
})
|
||||
await proc.exited
|
||||
if (proc.exitCode !== 0)
|
||||
throw new ExtractionFailedError({
|
||||
filepath,
|
||||
stderr: await Bun.readableStreamToText(proc.stderr),
|
||||
})
|
||||
}
|
||||
if (config.extension === "zip") {
|
||||
const proc = Bun.spawn(
|
||||
["unzip", "-j", archivePath, "*/rg.exe", "-d", Global.Path.bin],
|
||||
{
|
||||
cwd: Global.Path.bin,
|
||||
stderr: "pipe",
|
||||
stdout: "ignore",
|
||||
},
|
||||
)
|
||||
await proc.exited
|
||||
if (proc.exitCode !== 0)
|
||||
throw new ExtractionFailedError({
|
||||
filepath: archivePath,
|
||||
stderr: await Bun.readableStreamToText(proc.stderr),
|
||||
})
|
||||
}
|
||||
await fs.unlink(archivePath)
|
||||
if (process.platform !== "win32") await fs.chmod(filepath, 0o755)
|
||||
}
|
||||
|
||||
return {
|
||||
filepath,
|
||||
}
|
||||
})
|
||||
|
||||
export async function filepath() {
|
||||
const { filepath } = await state()
|
||||
return filepath
|
||||
}
|
||||
}
|
||||
@@ -5,7 +5,6 @@ import { z } from "zod"
|
||||
import { NamedError } from "../util/error"
|
||||
import { lazy } from "../util/lazy"
|
||||
import { Log } from "../util/log"
|
||||
import { $ } from "bun"
|
||||
|
||||
export namespace Fzf {
|
||||
const log = Log.create({ service: "fzf" })
|
||||
@@ -46,10 +45,7 @@ export namespace Fzf {
|
||||
log.info("found", { filepath })
|
||||
return { filepath }
|
||||
}
|
||||
filepath = path.join(
|
||||
Global.Path.bin,
|
||||
"fzf" + (process.platform === "win32" ? ".exe" : ""),
|
||||
)
|
||||
filepath = path.join(Global.Path.bin, "fzf" + (process.platform === "win32" ? ".exe" : ""))
|
||||
|
||||
const file = Bun.file(filepath)
|
||||
if (!(await file.exists())) {
|
||||
@@ -57,18 +53,15 @@ export namespace Fzf {
|
||||
const arch = archMap[process.arch as keyof typeof archMap] ?? "amd64"
|
||||
|
||||
const config = PLATFORM[process.platform as keyof typeof PLATFORM]
|
||||
if (!config)
|
||||
throw new UnsupportedPlatformError({ platform: process.platform })
|
||||
if (!config) throw new UnsupportedPlatformError({ platform: process.platform })
|
||||
|
||||
const version = VERSION
|
||||
const platformName =
|
||||
process.platform === "win32" ? "windows" : process.platform
|
||||
const platformName = process.platform === "win32" ? "windows" : process.platform
|
||||
const filename = `fzf-${version}-${platformName}_${arch}.${config.extension}`
|
||||
const url = `https://github.com/junegunn/fzf/releases/download/v${version}/${filename}`
|
||||
|
||||
const response = await fetch(url)
|
||||
if (!response.ok)
|
||||
throw new DownloadFailedError({ url, status: response.status })
|
||||
if (!response.ok) throw new DownloadFailedError({ url, status: response.status })
|
||||
|
||||
const buffer = await response.arrayBuffer()
|
||||
const archivePath = path.join(Global.Path.bin, filename)
|
||||
@@ -87,14 +80,11 @@ export namespace Fzf {
|
||||
})
|
||||
}
|
||||
if (config.extension === "zip") {
|
||||
const proc = Bun.spawn(
|
||||
["unzip", "-j", archivePath, "fzf.exe", "-d", Global.Path.bin],
|
||||
{
|
||||
cwd: Global.Path.bin,
|
||||
stderr: "pipe",
|
||||
stdout: "ignore",
|
||||
},
|
||||
)
|
||||
const proc = Bun.spawn(["unzip", "-j", archivePath, "fzf.exe", "-d", Global.Path.bin], {
|
||||
cwd: Global.Path.bin,
|
||||
stderr: "pipe",
|
||||
stdout: "ignore",
|
||||
})
|
||||
await proc.exited
|
||||
if (proc.exitCode !== 0)
|
||||
throw new ExtractionFailedError({
|
||||
@@ -115,20 +105,4 @@ export namespace Fzf {
|
||||
const { filepath } = await state()
|
||||
return filepath
|
||||
}
|
||||
|
||||
export async function search(cwd: string, query: string) {
|
||||
const results = await $`${await filepath()} --filter ${query}`
|
||||
.quiet()
|
||||
.throws(false)
|
||||
.cwd(cwd)
|
||||
.text()
|
||||
const split = results
|
||||
.trim()
|
||||
.split("\n")
|
||||
.filter((line) => line.length > 0)
|
||||
log.info("results", {
|
||||
count: split.length,
|
||||
})
|
||||
return split
|
||||
}
|
||||
}
|
||||
@@ -1,10 +1,123 @@
|
||||
import { z } from "zod"
|
||||
import { Bus } from "../bus"
|
||||
import { $ } from "bun"
|
||||
import { createPatch } from "diff"
|
||||
import path from "path"
|
||||
import * as git from "isomorphic-git"
|
||||
import { App } from "../app/app"
|
||||
import fs from "fs"
|
||||
import { Log } from "../util/log"
|
||||
|
||||
export namespace File {
|
||||
const glob = new Bun.Glob("**/*")
|
||||
export async function search(path: string, query: string) {
|
||||
for await (const entry of glob.scan({
|
||||
cwd: path,
|
||||
onlyFiles: true,
|
||||
})) {
|
||||
const log = Log.create({ service: "file" })
|
||||
|
||||
export const Info = z
|
||||
.object({
|
||||
path: z.string(),
|
||||
added: z.number().int(),
|
||||
removed: z.number().int(),
|
||||
status: z.enum(["added", "deleted", "modified"]),
|
||||
})
|
||||
.openapi({
|
||||
ref: "File",
|
||||
})
|
||||
|
||||
export type Info = z.infer<typeof Info>
|
||||
|
||||
export const Event = {
|
||||
Edited: Bus.event(
|
||||
"file.edited",
|
||||
z.object({
|
||||
file: z.string(),
|
||||
}),
|
||||
),
|
||||
}
|
||||
|
||||
export async function status() {
|
||||
const app = App.info()
|
||||
if (!app.git) return []
|
||||
|
||||
const diffOutput = await $`git diff --numstat HEAD`.cwd(app.path.cwd).quiet().nothrow().text()
|
||||
|
||||
const changedFiles: Info[] = []
|
||||
|
||||
if (diffOutput.trim()) {
|
||||
const lines = diffOutput.trim().split("\n")
|
||||
for (const line of lines) {
|
||||
const [added, removed, filepath] = line.split("\t")
|
||||
changedFiles.push({
|
||||
path: filepath,
|
||||
added: added === "-" ? 0 : parseInt(added, 10),
|
||||
removed: removed === "-" ? 0 : parseInt(removed, 10),
|
||||
status: "modified",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const untrackedOutput = await $`git ls-files --others --exclude-standard`.cwd(app.path.cwd).quiet().nothrow().text()
|
||||
|
||||
if (untrackedOutput.trim()) {
|
||||
const untrackedFiles = untrackedOutput.trim().split("\n")
|
||||
for (const filepath of untrackedFiles) {
|
||||
try {
|
||||
const content = await Bun.file(path.join(app.path.root, filepath)).text()
|
||||
const lines = content.split("\n").length
|
||||
changedFiles.push({
|
||||
path: filepath,
|
||||
added: lines,
|
||||
removed: 0,
|
||||
status: "added",
|
||||
})
|
||||
} catch {
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Get deleted files
|
||||
const deletedOutput = await $`git diff --name-only --diff-filter=D HEAD`.cwd(app.path.cwd).quiet().nothrow().text()
|
||||
|
||||
if (deletedOutput.trim()) {
|
||||
const deletedFiles = deletedOutput.trim().split("\n")
|
||||
for (const filepath of deletedFiles) {
|
||||
changedFiles.push({
|
||||
path: filepath,
|
||||
added: 0,
|
||||
removed: 0, // Could get original line count but would require another git command
|
||||
status: "deleted",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return changedFiles.map((x) => ({
|
||||
...x,
|
||||
path: path.relative(app.path.cwd, path.join(app.path.root, x.path)),
|
||||
}))
|
||||
}
|
||||
|
||||
export async function read(file: string) {
|
||||
using _ = log.time("read", { file })
|
||||
const app = App.info()
|
||||
const full = path.join(app.path.cwd, file)
|
||||
const content = await Bun.file(full)
|
||||
.text()
|
||||
.catch(() => "")
|
||||
.then((x) => x.trim())
|
||||
if (app.git) {
|
||||
const rel = path.relative(app.path.root, full)
|
||||
const diff = await git.status({
|
||||
fs,
|
||||
dir: app.path.root,
|
||||
filepath: rel,
|
||||
})
|
||||
if (diff !== "unmodified") {
|
||||
const original = await $`git show HEAD:${rel}`.cwd(app.path.root).quiet().nothrow().text()
|
||||
const patch = createPatch(file, original, content, "old", "new", {
|
||||
context: Infinity,
|
||||
})
|
||||
return { type: "patch", content: patch }
|
||||
}
|
||||
}
|
||||
return { type: "raw", content }
|
||||
}
|
||||
}
|
||||
|
||||
336
packages/opencode/src/file/ripgrep.ts
Normal file
336
packages/opencode/src/file/ripgrep.ts
Normal file
@@ -0,0 +1,336 @@
|
||||
// Ripgrep utility functions
|
||||
import path from "path"
|
||||
import { Global } from "../global"
|
||||
import fs from "fs/promises"
|
||||
import { z } from "zod"
|
||||
import { NamedError } from "../util/error"
|
||||
import { lazy } from "../util/lazy"
|
||||
import { $ } from "bun"
|
||||
import { Fzf } from "./fzf"
|
||||
|
||||
export namespace Ripgrep {
|
||||
const Stats = z.object({
|
||||
elapsed: z.object({
|
||||
secs: z.number(),
|
||||
nanos: z.number(),
|
||||
human: z.string(),
|
||||
}),
|
||||
searches: z.number(),
|
||||
searches_with_match: z.number(),
|
||||
bytes_searched: z.number(),
|
||||
bytes_printed: z.number(),
|
||||
matched_lines: z.number(),
|
||||
matches: z.number(),
|
||||
})
|
||||
|
||||
const Begin = z.object({
|
||||
type: z.literal("begin"),
|
||||
data: z.object({
|
||||
path: z.object({
|
||||
text: z.string(),
|
||||
}),
|
||||
}),
|
||||
})
|
||||
|
||||
export const Match = z.object({
|
||||
type: z.literal("match"),
|
||||
data: z
|
||||
.object({
|
||||
path: z.object({
|
||||
text: z.string(),
|
||||
}),
|
||||
lines: z.object({
|
||||
text: z.string(),
|
||||
}),
|
||||
line_number: z.number(),
|
||||
absolute_offset: z.number(),
|
||||
submatches: z.array(
|
||||
z.object({
|
||||
match: z.object({
|
||||
text: z.string(),
|
||||
}),
|
||||
start: z.number(),
|
||||
end: z.number(),
|
||||
}),
|
||||
),
|
||||
})
|
||||
.openapi({ ref: "Match" }),
|
||||
})
|
||||
|
||||
const End = z.object({
|
||||
type: z.literal("end"),
|
||||
data: z.object({
|
||||
path: z.object({
|
||||
text: z.string(),
|
||||
}),
|
||||
binary_offset: z.number().nullable(),
|
||||
stats: Stats,
|
||||
}),
|
||||
})
|
||||
|
||||
const Summary = z.object({
|
||||
type: z.literal("summary"),
|
||||
data: z.object({
|
||||
elapsed_total: z.object({
|
||||
human: z.string(),
|
||||
nanos: z.number(),
|
||||
secs: z.number(),
|
||||
}),
|
||||
stats: Stats,
|
||||
}),
|
||||
})
|
||||
|
||||
const Result = z.union([Begin, Match, End, Summary])
|
||||
|
||||
export type Result = z.infer<typeof Result>
|
||||
export type Match = z.infer<typeof Match>
|
||||
export type Begin = z.infer<typeof Begin>
|
||||
export type End = z.infer<typeof End>
|
||||
export type Summary = z.infer<typeof Summary>
|
||||
const PLATFORM = {
|
||||
"arm64-darwin": { platform: "aarch64-apple-darwin", extension: "tar.gz" },
|
||||
"arm64-linux": {
|
||||
platform: "aarch64-unknown-linux-gnu",
|
||||
extension: "tar.gz",
|
||||
},
|
||||
"x64-darwin": { platform: "x86_64-apple-darwin", extension: "tar.gz" },
|
||||
"x64-linux": { platform: "x86_64-unknown-linux-musl", extension: "tar.gz" },
|
||||
"x64-win32": { platform: "x86_64-pc-windows-msvc", extension: "zip" },
|
||||
} as const
|
||||
|
||||
export const ExtractionFailedError = NamedError.create(
|
||||
"RipgrepExtractionFailedError",
|
||||
z.object({
|
||||
filepath: z.string(),
|
||||
stderr: z.string(),
|
||||
}),
|
||||
)
|
||||
|
||||
export const UnsupportedPlatformError = NamedError.create(
|
||||
"RipgrepUnsupportedPlatformError",
|
||||
z.object({
|
||||
platform: z.string(),
|
||||
}),
|
||||
)
|
||||
|
||||
export const DownloadFailedError = NamedError.create(
|
||||
"RipgrepDownloadFailedError",
|
||||
z.object({
|
||||
url: z.string(),
|
||||
status: z.number(),
|
||||
}),
|
||||
)
|
||||
|
||||
const state = lazy(async () => {
|
||||
let filepath = Bun.which("rg")
|
||||
if (filepath) return { filepath }
|
||||
filepath = path.join(Global.Path.bin, "rg" + (process.platform === "win32" ? ".exe" : ""))
|
||||
|
||||
const file = Bun.file(filepath)
|
||||
if (!(await file.exists())) {
|
||||
const platformKey = `${process.arch}-${process.platform}` as keyof typeof PLATFORM
|
||||
const config = PLATFORM[platformKey]
|
||||
if (!config) throw new UnsupportedPlatformError({ platform: platformKey })
|
||||
|
||||
const version = "14.1.1"
|
||||
const filename = `ripgrep-${version}-${config.platform}.${config.extension}`
|
||||
const url = `https://github.com/BurntSushi/ripgrep/releases/download/${version}/${filename}`
|
||||
|
||||
const response = await fetch(url)
|
||||
if (!response.ok) throw new DownloadFailedError({ url, status: response.status })
|
||||
|
||||
const buffer = await response.arrayBuffer()
|
||||
const archivePath = path.join(Global.Path.bin, filename)
|
||||
await Bun.write(archivePath, buffer)
|
||||
if (config.extension === "tar.gz") {
|
||||
const args = ["tar", "-xzf", archivePath, "--strip-components=1"]
|
||||
|
||||
if (platformKey.endsWith("-darwin")) args.push("--include=*/rg")
|
||||
if (platformKey.endsWith("-linux")) args.push("--wildcards", "*/rg")
|
||||
|
||||
const proc = Bun.spawn(args, {
|
||||
cwd: Global.Path.bin,
|
||||
stderr: "pipe",
|
||||
stdout: "pipe",
|
||||
})
|
||||
await proc.exited
|
||||
if (proc.exitCode !== 0)
|
||||
throw new ExtractionFailedError({
|
||||
filepath,
|
||||
stderr: await Bun.readableStreamToText(proc.stderr),
|
||||
})
|
||||
}
|
||||
if (config.extension === "zip") {
|
||||
const proc = Bun.spawn(["unzip", "-j", archivePath, "*/rg.exe", "-d", Global.Path.bin], {
|
||||
cwd: Global.Path.bin,
|
||||
stderr: "pipe",
|
||||
stdout: "ignore",
|
||||
})
|
||||
await proc.exited
|
||||
if (proc.exitCode !== 0)
|
||||
throw new ExtractionFailedError({
|
||||
filepath: archivePath,
|
||||
stderr: await Bun.readableStreamToText(proc.stderr),
|
||||
})
|
||||
}
|
||||
await fs.unlink(archivePath)
|
||||
if (!platformKey.endsWith("-win32")) await fs.chmod(filepath, 0o755)
|
||||
}
|
||||
|
||||
return {
|
||||
filepath,
|
||||
}
|
||||
})
|
||||
|
||||
export async function filepath() {
|
||||
const { filepath } = await state()
|
||||
return filepath
|
||||
}
|
||||
|
||||
export async function files(input: { cwd: string; query?: string; glob?: string[]; limit?: number }) {
|
||||
const commands = [`${$.escape(await filepath())} --files --follow --hidden --glob='!.git/*'`]
|
||||
|
||||
if (input.glob) {
|
||||
for (const g of input.glob) {
|
||||
commands[0] += ` --glob='${g}'`
|
||||
}
|
||||
}
|
||||
|
||||
if (input.query) commands.push(`${await Fzf.filepath()} --filter=${input.query}`)
|
||||
if (input.limit) commands.push(`head -n ${input.limit}`)
|
||||
const joined = commands.join(" | ")
|
||||
const result = await $`${{ raw: joined }}`.cwd(input.cwd).nothrow().text()
|
||||
return result.split("\n").filter(Boolean)
|
||||
}
|
||||
|
||||
export async function tree(input: { cwd: string; limit?: number }) {
|
||||
const files = await Ripgrep.files({ cwd: input.cwd })
|
||||
interface Node {
|
||||
path: string[]
|
||||
children: Node[]
|
||||
}
|
||||
|
||||
function getPath(node: Node, parts: string[], create: boolean) {
|
||||
if (parts.length === 0) return node
|
||||
let current = node
|
||||
for (const part of parts) {
|
||||
let existing = current.children.find((x) => x.path.at(-1) === part)
|
||||
if (!existing) {
|
||||
if (!create) return
|
||||
existing = {
|
||||
path: current.path.concat(part),
|
||||
children: [],
|
||||
}
|
||||
current.children.push(existing)
|
||||
}
|
||||
current = existing
|
||||
}
|
||||
return current
|
||||
}
|
||||
|
||||
const root: Node = {
|
||||
path: [],
|
||||
children: [],
|
||||
}
|
||||
for (const file of files) {
|
||||
const parts = file.split(path.sep)
|
||||
getPath(root, parts, true)
|
||||
}
|
||||
|
||||
function sort(node: Node) {
|
||||
node.children.sort((a, b) => {
|
||||
if (!a.children.length && b.children.length) return 1
|
||||
if (!b.children.length && a.children.length) return -1
|
||||
return a.path.at(-1)!.localeCompare(b.path.at(-1)!)
|
||||
})
|
||||
for (const child of node.children) {
|
||||
sort(child)
|
||||
}
|
||||
}
|
||||
sort(root)
|
||||
|
||||
let current = [root]
|
||||
const result: Node = {
|
||||
path: [],
|
||||
children: [],
|
||||
}
|
||||
|
||||
let processed = 0
|
||||
const limit = input.limit ?? 50
|
||||
while (current.length > 0) {
|
||||
const next = []
|
||||
for (const node of current) {
|
||||
if (node.children.length) next.push(...node.children)
|
||||
}
|
||||
const max = Math.max(...current.map((x) => x.children.length))
|
||||
for (let i = 0; i < max && processed < limit; i++) {
|
||||
for (const node of current) {
|
||||
const child = node.children[i]
|
||||
if (!child) continue
|
||||
getPath(result, child.path, true)
|
||||
processed++
|
||||
if (processed >= limit) break
|
||||
}
|
||||
}
|
||||
if (processed >= limit) {
|
||||
for (const node of [...current, ...next]) {
|
||||
const compare = getPath(result, node.path, false)
|
||||
if (!compare) continue
|
||||
if (compare?.children.length !== node.children.length) {
|
||||
const diff = node.children.length - compare.children.length
|
||||
compare.children.push({
|
||||
path: compare.path.concat(`[${diff} truncated]`),
|
||||
children: [],
|
||||
})
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
current = next
|
||||
}
|
||||
|
||||
const lines: string[] = []
|
||||
|
||||
function render(node: Node, depth: number) {
|
||||
const indent = "\t".repeat(depth)
|
||||
lines.push(indent + node.path.at(-1) + (node.children.length ? "/" : ""))
|
||||
for (const child of node.children) {
|
||||
render(child, depth + 1)
|
||||
}
|
||||
}
|
||||
result.children.map((x) => render(x, 0))
|
||||
|
||||
return lines.join("\n")
|
||||
}
|
||||
|
||||
export async function search(input: { cwd: string; pattern: string; glob?: string[]; limit?: number }) {
|
||||
const args = [`${await filepath()}`, "--json", "--hidden", "--glob='!.git/*'"]
|
||||
|
||||
if (input.glob) {
|
||||
for (const g of input.glob) {
|
||||
args.push(`--glob=${g}`)
|
||||
}
|
||||
}
|
||||
|
||||
if (input.limit) {
|
||||
args.push(`--max-count=${input.limit}`)
|
||||
}
|
||||
|
||||
args.push(input.pattern)
|
||||
|
||||
const command = args.join(" ")
|
||||
const result = await $`${{ raw: command }}`.cwd(input.cwd).quiet().nothrow()
|
||||
if (result.exitCode !== 0) {
|
||||
return []
|
||||
}
|
||||
|
||||
const lines = result.text().trim().split("\n").filter(Boolean)
|
||||
// Parse JSON lines from ripgrep output
|
||||
|
||||
return lines
|
||||
.map((line) => JSON.parse(line))
|
||||
.map((parsed) => Result.parse(parsed))
|
||||
.filter((r) => r.type === "match")
|
||||
.map((r) => r.data)
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,8 @@
|
||||
import { App } from "../../app/app"
|
||||
import { App } from "../app/app"
|
||||
import { Log } from "../util/log"
|
||||
|
||||
export namespace FileTimes {
|
||||
export namespace FileTime {
|
||||
const log = Log.create({ service: "file.time" })
|
||||
export const state = App.state("tool.filetimes", () => {
|
||||
const read: {
|
||||
[sessionID: string]: {
|
||||
@@ -13,6 +15,7 @@ export namespace FileTimes {
|
||||
})
|
||||
|
||||
export function read(sessionID: string, file: string) {
|
||||
log.info("read", { sessionID, file })
|
||||
const { read } = state()
|
||||
read[sessionID] = read[sessionID] || {}
|
||||
read[sessionID][file] = new Date()
|
||||
@@ -24,10 +27,7 @@ export namespace FileTimes {
|
||||
|
||||
export async function assert(sessionID: string, filepath: string) {
|
||||
const time = get(sessionID, filepath)
|
||||
if (!time)
|
||||
throw new Error(
|
||||
`You must read the file ${filepath} before overwriting it. Use the Read tool first`,
|
||||
)
|
||||
if (!time) throw new Error(`You must read the file ${filepath} before overwriting it. Use the Read tool first`)
|
||||
const stats = await Bun.file(filepath).stat()
|
||||
if (stats.mtime.getTime() > time.getTime()) {
|
||||
throw new Error(
|
||||
52
packages/opencode/src/file/watch.ts
Normal file
52
packages/opencode/src/file/watch.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
import { z } from "zod"
|
||||
import { Bus } from "../bus"
|
||||
import fs from "fs"
|
||||
import { App } from "../app/app"
|
||||
import { Log } from "../util/log"
|
||||
import { Flag } from "../flag/flag"
|
||||
|
||||
export namespace FileWatcher {
|
||||
const log = Log.create({ service: "file.watcher" })
|
||||
|
||||
export const Event = {
|
||||
Updated: Bus.event(
|
||||
"file.watcher.updated",
|
||||
z.object({
|
||||
file: z.string(),
|
||||
event: z.union([z.literal("rename"), z.literal("change")]),
|
||||
}),
|
||||
),
|
||||
}
|
||||
const state = App.state(
|
||||
"file.watcher",
|
||||
() => {
|
||||
const app = App.use()
|
||||
if (!app.info.git) return {}
|
||||
try {
|
||||
const watcher = fs.watch(app.info.path.cwd, { recursive: true }, (event, file) => {
|
||||
log.info("change", { file, event })
|
||||
if (!file) return
|
||||
// for some reason async local storage is lost here
|
||||
// https://github.com/oven-sh/bun/issues/20754
|
||||
App.provideExisting(app, async () => {
|
||||
Bus.publish(Event.Updated, {
|
||||
file,
|
||||
event,
|
||||
})
|
||||
})
|
||||
})
|
||||
return { watcher }
|
||||
} catch {
|
||||
return {}
|
||||
}
|
||||
},
|
||||
async (state) => {
|
||||
state.watcher?.close()
|
||||
},
|
||||
)
|
||||
|
||||
export function init() {
|
||||
if (Flag.OPENCODE_DISABLE_WATCHER || true) return
|
||||
state()
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
export namespace Flag {
|
||||
export const OPENCODE_AUTO_SHARE = truthy("OPENCODE_AUTO_SHARE")
|
||||
export const OPENCODE_DISABLE_WATCHER = truthy("OPENCODE_DISABLE_WATCHER")
|
||||
|
||||
function truthy(key: string) {
|
||||
const value = process.env[key]?.toLowerCase()
|
||||
|
||||
159
packages/opencode/src/format/formatter.ts
Normal file
159
packages/opencode/src/format/formatter.ts
Normal file
@@ -0,0 +1,159 @@
|
||||
import { App } from "../app/app"
|
||||
import { BunProc } from "../bun"
|
||||
import { Filesystem } from "../util/filesystem"
|
||||
import path from "path"
|
||||
|
||||
export interface Info {
|
||||
name: string
|
||||
command: string[]
|
||||
environment?: Record<string, string>
|
||||
extensions: string[]
|
||||
enabled(): Promise<boolean>
|
||||
}
|
||||
|
||||
export const gofmt: Info = {
|
||||
name: "gofmt",
|
||||
command: ["gofmt", "-w", "$FILE"],
|
||||
extensions: [".go"],
|
||||
async enabled() {
|
||||
return Bun.which("gofmt") !== null
|
||||
},
|
||||
}
|
||||
|
||||
export const mix: Info = {
|
||||
name: "mix",
|
||||
command: ["mix", "format", "$FILE"],
|
||||
extensions: [".ex", ".exs", ".eex", ".heex", ".leex", ".neex", ".sface"],
|
||||
async enabled() {
|
||||
return Bun.which("mix") !== null
|
||||
},
|
||||
}
|
||||
|
||||
export const prettier: Info = {
|
||||
name: "prettier",
|
||||
command: [BunProc.which(), "x", "prettier", "--write", "$FILE"],
|
||||
environment: {
|
||||
BUN_BE_BUN: "1",
|
||||
},
|
||||
extensions: [
|
||||
".js",
|
||||
".jsx",
|
||||
".mjs",
|
||||
".cjs",
|
||||
".ts",
|
||||
".tsx",
|
||||
".mts",
|
||||
".cts",
|
||||
".html",
|
||||
".htm",
|
||||
".css",
|
||||
".scss",
|
||||
".sass",
|
||||
".less",
|
||||
".vue",
|
||||
".svelte",
|
||||
".json",
|
||||
".jsonc",
|
||||
".yaml",
|
||||
".yml",
|
||||
".toml",
|
||||
".xml",
|
||||
".md",
|
||||
".mdx",
|
||||
".graphql",
|
||||
".gql",
|
||||
],
|
||||
async enabled() {
|
||||
const app = App.info()
|
||||
const nms = await Filesystem.findUp("node_modules", app.path.cwd, app.path.root)
|
||||
for (const item of nms) {
|
||||
if (await Bun.file(path.join(item, ".bin", "prettier")).exists()) return true
|
||||
}
|
||||
return false
|
||||
},
|
||||
}
|
||||
|
||||
export const zig: Info = {
|
||||
name: "zig",
|
||||
command: ["zig", "fmt", "$FILE"],
|
||||
extensions: [".zig", ".zon"],
|
||||
async enabled() {
|
||||
return Bun.which("zig") !== null
|
||||
},
|
||||
}
|
||||
|
||||
export const clang: Info = {
|
||||
name: "clang-format",
|
||||
command: ["clang-format", "-i", "$FILE"],
|
||||
extensions: [".c", ".cc", ".cpp", ".cxx", ".c++", ".h", ".hh", ".hpp", ".hxx", ".h++", ".ino", ".C", ".H"],
|
||||
async enabled() {
|
||||
return Bun.which("clang-format") !== null
|
||||
},
|
||||
}
|
||||
|
||||
export const ktlint: Info = {
|
||||
name: "ktlint",
|
||||
command: ["ktlint", "-F", "$FILE"],
|
||||
extensions: [".kt", ".kts"],
|
||||
async enabled() {
|
||||
return Bun.which("ktlint") !== null
|
||||
},
|
||||
}
|
||||
|
||||
export const ruff: Info = {
|
||||
name: "ruff",
|
||||
command: ["ruff", "format", "$FILE"],
|
||||
extensions: [".py", ".pyi"],
|
||||
async enabled() {
|
||||
if (!Bun.which("ruff")) return false
|
||||
const app = App.info()
|
||||
const configs = ["pyproject.toml", "ruff.toml", ".ruff.toml"]
|
||||
for (const config of configs) {
|
||||
const found = await Filesystem.findUp(config, app.path.cwd, app.path.root)
|
||||
if (found.length > 0) {
|
||||
if (config === "pyproject.toml") {
|
||||
const content = await Bun.file(found[0]).text()
|
||||
if (content.includes("[tool.ruff]")) return true
|
||||
} else {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
const deps = ["requirements.txt", "pyproject.toml", "Pipfile"]
|
||||
for (const dep of deps) {
|
||||
const found = await Filesystem.findUp(dep, app.path.cwd, app.path.root)
|
||||
if (found.length > 0) {
|
||||
const content = await Bun.file(found[0]).text()
|
||||
if (content.includes("ruff")) return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
},
|
||||
}
|
||||
|
||||
export const rubocop: Info = {
|
||||
name: "rubocop",
|
||||
command: ["rubocop", "--autocorrect", "$FILE"],
|
||||
extensions: [".rb", ".rake", ".gemspec", ".ru"],
|
||||
async enabled() {
|
||||
return Bun.which("rubocop") !== null
|
||||
},
|
||||
}
|
||||
|
||||
export const standardrb: Info = {
|
||||
name: "standardrb",
|
||||
command: ["standardrb", "--fix", "$FILE"],
|
||||
extensions: [".rb", ".rake", ".gemspec", ".ru"],
|
||||
async enabled() {
|
||||
return Bun.which("standardrb") !== null
|
||||
},
|
||||
}
|
||||
|
||||
export const htmlbeautifier: Info = {
|
||||
name: "htmlbeautifier",
|
||||
command: ["htmlbeautifier", "$FILE"],
|
||||
extensions: [".erb", ".html.erb"],
|
||||
async enabled() {
|
||||
return Bun.which("htmlbeautifier") !== null
|
||||
},
|
||||
}
|
||||
65
packages/opencode/src/format/index.ts
Normal file
65
packages/opencode/src/format/index.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
import { App } from "../app/app"
|
||||
import { Bus } from "../bus"
|
||||
import { File } from "../file"
|
||||
import { Log } from "../util/log"
|
||||
import path from "path"
|
||||
|
||||
import * as Formatter from "./formatter"
|
||||
|
||||
export namespace Format {
|
||||
const log = Log.create({ service: "format" })
|
||||
|
||||
const state = App.state("format", () => {
|
||||
const enabled: Record<string, boolean> = {}
|
||||
|
||||
return {
|
||||
enabled,
|
||||
}
|
||||
})
|
||||
|
||||
async function isEnabled(item: Formatter.Info) {
|
||||
const s = state()
|
||||
let status = s.enabled[item.name]
|
||||
if (status === undefined) {
|
||||
status = await item.enabled()
|
||||
s.enabled[item.name] = status
|
||||
}
|
||||
return status
|
||||
}
|
||||
|
||||
async function getFormatter(ext: string) {
|
||||
const result = []
|
||||
for (const item of Object.values(Formatter)) {
|
||||
if (!item.extensions.includes(ext)) continue
|
||||
if (!(await isEnabled(item))) continue
|
||||
result.push(item)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
export function init() {
|
||||
log.info("init")
|
||||
Bus.subscribe(File.Event.Edited, async (payload) => {
|
||||
const file = payload.properties.file
|
||||
log.info("formatting", { file })
|
||||
const ext = path.extname(file)
|
||||
|
||||
for (const item of await getFormatter(ext)) {
|
||||
log.info("running", { command: item.command })
|
||||
const proc = Bun.spawn({
|
||||
cmd: item.command.map((x) => x.replace("$FILE", file)),
|
||||
cwd: App.info().path.cwd,
|
||||
env: item.environment,
|
||||
stdout: "ignore",
|
||||
stderr: "ignore",
|
||||
})
|
||||
const exit = await proc.exited
|
||||
if (exit !== 0)
|
||||
log.error("failed", {
|
||||
command: item.command,
|
||||
...item.environment,
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
import fs from "fs/promises"
|
||||
import { xdgData, xdgCache, xdgConfig } from "xdg-basedir"
|
||||
import { xdgData, xdgCache, xdgConfig, xdgState } from "xdg-basedir"
|
||||
import path from "path"
|
||||
|
||||
const app = "opencode"
|
||||
@@ -7,18 +7,33 @@ const app = "opencode"
|
||||
const data = path.join(xdgData!, app)
|
||||
const cache = path.join(xdgCache!, app)
|
||||
const config = path.join(xdgConfig!, app)
|
||||
|
||||
await Promise.all([
|
||||
fs.mkdir(data, { recursive: true }),
|
||||
fs.mkdir(config, { recursive: true }),
|
||||
fs.mkdir(cache, { recursive: true }),
|
||||
])
|
||||
const state = path.join(xdgState!, app)
|
||||
|
||||
export namespace Global {
|
||||
export const Path = {
|
||||
data,
|
||||
bin: path.join(data, "bin"),
|
||||
providers: path.join(config, "providers"),
|
||||
cache,
|
||||
config,
|
||||
state,
|
||||
} as const
|
||||
}
|
||||
|
||||
await Promise.all([
|
||||
fs.mkdir(Global.Path.data, { recursive: true }),
|
||||
fs.mkdir(Global.Path.config, { recursive: true }),
|
||||
fs.mkdir(Global.Path.providers, { recursive: true }),
|
||||
fs.mkdir(Global.Path.state, { recursive: true }),
|
||||
])
|
||||
|
||||
const CACHE_VERSION = "3"
|
||||
|
||||
const version = await Bun.file(path.join(Global.Path.cache, "version"))
|
||||
.text()
|
||||
.catch(() => "0")
|
||||
|
||||
if (version !== CACHE_VERSION) {
|
||||
await fs.rm(Global.Path.cache, { recursive: true, force: true })
|
||||
await Bun.file(path.join(Global.Path.cache, "version")).write(CACHE_VERSION)
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ export namespace Identifier {
|
||||
session: "ses",
|
||||
message: "msg",
|
||||
user: "usr",
|
||||
part: "prt",
|
||||
} as const
|
||||
|
||||
export function schema(prefix: keyof typeof prefixes) {
|
||||
@@ -26,11 +27,7 @@ export namespace Identifier {
|
||||
return generateID(prefix, true, given)
|
||||
}
|
||||
|
||||
function generateID(
|
||||
prefix: keyof typeof prefixes,
|
||||
descending: boolean,
|
||||
given?: string,
|
||||
): string {
|
||||
function generateID(prefix: keyof typeof prefixes, descending: boolean, given?: string): string {
|
||||
if (!given) {
|
||||
return generateNewID(prefix, descending)
|
||||
}
|
||||
@@ -41,10 +38,17 @@ export namespace Identifier {
|
||||
return given
|
||||
}
|
||||
|
||||
function generateNewID(
|
||||
prefix: keyof typeof prefixes,
|
||||
descending: boolean,
|
||||
): string {
|
||||
function randomBase62(length: number): string {
|
||||
const chars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
|
||||
let result = ""
|
||||
const bytes = randomBytes(length)
|
||||
for (let i = 0; i < length; i++) {
|
||||
result += chars[bytes[i] % 62]
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
function generateNewID(prefix: keyof typeof prefixes, descending: boolean): string {
|
||||
const currentTimestamp = Date.now()
|
||||
|
||||
if (currentTimestamp !== lastTimestamp) {
|
||||
@@ -62,14 +66,6 @@ export namespace Identifier {
|
||||
timeBytes[i] = Number((now >> BigInt(40 - 8 * i)) & BigInt(0xff))
|
||||
}
|
||||
|
||||
const randLength = (LENGTH - 12) / 2
|
||||
const random = randomBytes(randLength)
|
||||
|
||||
return (
|
||||
prefixes[prefix] +
|
||||
"_" +
|
||||
timeBytes.toString("hex") +
|
||||
random.toString("hex")
|
||||
)
|
||||
return prefixes[prefix] + "_" + timeBytes.toString("hex") + randomBase62(LENGTH - 12)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,125 +1,124 @@
|
||||
import "zod-openapi/extend"
|
||||
import { App } from "./app/app"
|
||||
import { Server } from "./server/server"
|
||||
import fs from "fs/promises"
|
||||
import path from "path"
|
||||
|
||||
import { Share } from "./share/share"
|
||||
|
||||
import { Global } from "./global"
|
||||
|
||||
import yargs from "yargs"
|
||||
import { hideBin } from "yargs/helpers"
|
||||
import { RunCommand } from "./cli/cmd/run"
|
||||
import { GenerateCommand } from "./cli/cmd/generate"
|
||||
import { VERSION } from "./cli/version"
|
||||
import { ScrapCommand } from "./cli/cmd/scrap"
|
||||
import { Log } from "./util/log"
|
||||
import { AuthCommand, AuthLoginCommand } from "./cli/cmd/auth"
|
||||
import { Provider } from "./provider/provider"
|
||||
import { AuthCommand } from "./cli/cmd/auth"
|
||||
import { UpgradeCommand } from "./cli/cmd/upgrade"
|
||||
import { ModelsCommand } from "./cli/cmd/models"
|
||||
import { UI } from "./cli/ui"
|
||||
import { Installation } from "./installation"
|
||||
import { NamedError } from "./util/error"
|
||||
import { FormatError } from "./cli/error"
|
||||
import { ServeCommand } from "./cli/cmd/serve"
|
||||
import { TuiCommand } from "./cli/cmd/tui"
|
||||
import { DebugCommand } from "./cli/cmd/debug"
|
||||
import { StatsCommand } from "./cli/cmd/stats"
|
||||
import { McpCommand } from "./cli/cmd/mcp"
|
||||
import { InstallGithubCommand } from "./cli/cmd/install-github"
|
||||
import { Trace } from "./trace"
|
||||
|
||||
Trace.init()
|
||||
|
||||
const cancel = new AbortController()
|
||||
|
||||
process.on("unhandledRejection", (e) => {
|
||||
Log.Default.error("rejection", {
|
||||
e: e instanceof Error ? e.message : e,
|
||||
})
|
||||
})
|
||||
|
||||
process.on("uncaughtException", (e) => {
|
||||
Log.Default.error("exception", {
|
||||
e: e instanceof Error ? e.message : e,
|
||||
})
|
||||
})
|
||||
|
||||
const cli = yargs(hideBin(process.argv))
|
||||
.scriptName("opencode")
|
||||
.version(VERSION)
|
||||
.help("help", "show help")
|
||||
.version("version", "show version number", Installation.VERSION)
|
||||
.alias("version", "v")
|
||||
.option("print-logs", {
|
||||
describe: "Print logs to stderr",
|
||||
describe: "print logs to stderr",
|
||||
type: "boolean",
|
||||
})
|
||||
.middleware(async () => {
|
||||
await Log.init({ print: process.argv.includes("--print-logs") })
|
||||
.option("log-level", {
|
||||
describe: "log level",
|
||||
type: "string",
|
||||
choices: ["DEBUG", "INFO", "WARN", "ERROR"],
|
||||
})
|
||||
.middleware(async (opts) => {
|
||||
await Log.init({
|
||||
print: process.argv.includes("--print-logs"),
|
||||
dev: Installation.isDev(),
|
||||
level: (() => {
|
||||
if (opts.logLevel) return opts.logLevel as Log.Level
|
||||
if (Installation.isDev()) return "DEBUG"
|
||||
return "INFO"
|
||||
})(),
|
||||
})
|
||||
|
||||
Log.Default.info("opencode", {
|
||||
version: VERSION,
|
||||
version: Installation.VERSION,
|
||||
args: process.argv.slice(2),
|
||||
})
|
||||
})
|
||||
.usage("\n" + UI.logo())
|
||||
.command({
|
||||
command: "$0 [project]",
|
||||
describe: "Start OpenCode TUI",
|
||||
builder: (yargs) =>
|
||||
yargs.positional("project", {
|
||||
type: "string",
|
||||
describe: "path to start opencode in",
|
||||
}),
|
||||
handler: async (args) => {
|
||||
while (true) {
|
||||
const cwd = args.project ? path.resolve(args.project) : process.cwd()
|
||||
process.chdir(cwd)
|
||||
const result = await App.provide(
|
||||
{ cwd, version: VERSION },
|
||||
async () => {
|
||||
const providers = await Provider.list()
|
||||
if (Object.keys(providers).length === 0) {
|
||||
return "needs_provider"
|
||||
}
|
||||
|
||||
await Share.init()
|
||||
const server = Server.listen()
|
||||
|
||||
let cmd = ["go", "run", "./main.go"]
|
||||
let cwd = new URL("../../tui/cmd/opencode", import.meta.url)
|
||||
.pathname
|
||||
if (Bun.embeddedFiles.length > 0) {
|
||||
const blob = Bun.embeddedFiles[0] as File
|
||||
const binary = path.join(Global.Path.cache, "tui", blob.name)
|
||||
const file = Bun.file(binary)
|
||||
if (!(await file.exists())) {
|
||||
await Bun.write(file, blob, { mode: 0o755 })
|
||||
await fs.chmod(binary, 0o755)
|
||||
}
|
||||
cwd = process.cwd()
|
||||
cmd = [binary]
|
||||
}
|
||||
const proc = Bun.spawn({
|
||||
cmd: [...cmd, ...process.argv.slice(2)],
|
||||
cwd,
|
||||
stdout: "inherit",
|
||||
stderr: "inherit",
|
||||
stdin: "inherit",
|
||||
env: {
|
||||
...process.env,
|
||||
OPENCODE_SERVER: server.url.toString(),
|
||||
},
|
||||
onExit: () => {
|
||||
server.stop()
|
||||
},
|
||||
})
|
||||
await proc.exited
|
||||
await server.stop()
|
||||
|
||||
return "done"
|
||||
},
|
||||
)
|
||||
if (result === "done") break
|
||||
if (result === "needs_provider") {
|
||||
UI.empty()
|
||||
UI.println(UI.logo(" "))
|
||||
UI.empty()
|
||||
await AuthLoginCommand.handler(args)
|
||||
}
|
||||
}
|
||||
},
|
||||
})
|
||||
.command(McpCommand)
|
||||
.command(TuiCommand)
|
||||
.command(RunCommand)
|
||||
.command(GenerateCommand)
|
||||
.command(ScrapCommand)
|
||||
.command(DebugCommand)
|
||||
.command(AuthCommand)
|
||||
.fail((msg, err) => {
|
||||
if (
|
||||
msg.startsWith("Unknown argument") ||
|
||||
msg.startsWith("Not enough non-option arguments")
|
||||
) {
|
||||
.command(UpgradeCommand)
|
||||
.command(ServeCommand)
|
||||
.command(ModelsCommand)
|
||||
.command(StatsCommand)
|
||||
.command(InstallGithubCommand)
|
||||
.fail((msg) => {
|
||||
if (msg.startsWith("Unknown argument") || msg.startsWith("Not enough non-option arguments")) {
|
||||
cli.showHelp("log")
|
||||
}
|
||||
Log.Default.error(msg, {
|
||||
err,
|
||||
})
|
||||
})
|
||||
.strict()
|
||||
|
||||
try {
|
||||
await cli.parse()
|
||||
} catch (e) {
|
||||
Log.Default.error(e)
|
||||
let data: Record<string, any> = {}
|
||||
if (e instanceof NamedError) {
|
||||
const obj = e.toObject()
|
||||
Object.assign(data, {
|
||||
...obj.data,
|
||||
})
|
||||
}
|
||||
|
||||
if (e instanceof Error) {
|
||||
Object.assign(data, {
|
||||
name: e.name,
|
||||
message: e.message,
|
||||
cause: e.cause?.toString(),
|
||||
})
|
||||
}
|
||||
|
||||
if (e instanceof ResolveMessage) {
|
||||
Object.assign(data, {
|
||||
name: e.name,
|
||||
message: e.message,
|
||||
code: e.code,
|
||||
specifier: e.specifier,
|
||||
referrer: e.referrer,
|
||||
position: e.position,
|
||||
importKind: e.importKind,
|
||||
})
|
||||
}
|
||||
Log.Default.error("fatal", data)
|
||||
const formatted = FormatError(e)
|
||||
if (formatted) UI.error(formatted)
|
||||
if (formatted === undefined) UI.error("Unexpected error, check log file at " + Log.file() + " for more details")
|
||||
process.exitCode = 1
|
||||
}
|
||||
|
||||
cancel.abort()
|
||||
|
||||
151
packages/opencode/src/installation/index.ts
Normal file
151
packages/opencode/src/installation/index.ts
Normal file
@@ -0,0 +1,151 @@
|
||||
import path from "path"
|
||||
import { $ } from "bun"
|
||||
import { z } from "zod"
|
||||
import { NamedError } from "../util/error"
|
||||
import { Bus } from "../bus"
|
||||
import { Log } from "../util/log"
|
||||
|
||||
declare global {
|
||||
const OPENCODE_VERSION: string
|
||||
}
|
||||
|
||||
export namespace Installation {
|
||||
const log = Log.create({ service: "installation" })
|
||||
|
||||
export type Method = Awaited<ReturnType<typeof method>>
|
||||
|
||||
export const Event = {
|
||||
Updated: Bus.event(
|
||||
"installation.updated",
|
||||
z.object({
|
||||
version: z.string(),
|
||||
}),
|
||||
),
|
||||
}
|
||||
|
||||
export const Info = z
|
||||
.object({
|
||||
version: z.string(),
|
||||
latest: z.string(),
|
||||
})
|
||||
.openapi({
|
||||
ref: "InstallationInfo",
|
||||
})
|
||||
export type Info = z.infer<typeof Info>
|
||||
|
||||
export async function info() {
|
||||
return {
|
||||
version: VERSION,
|
||||
latest: await latest(),
|
||||
}
|
||||
}
|
||||
|
||||
export function isSnapshot() {
|
||||
return VERSION.startsWith("0.0.0")
|
||||
}
|
||||
|
||||
export function isDev() {
|
||||
return VERSION === "dev"
|
||||
}
|
||||
|
||||
export async function method() {
|
||||
if (process.execPath.includes(path.join(".opencode", "bin"))) return "curl"
|
||||
const exec = process.execPath.toLowerCase()
|
||||
|
||||
const checks = [
|
||||
{
|
||||
name: "npm" as const,
|
||||
command: () => $`npm list -g --depth=0`.throws(false).text(),
|
||||
},
|
||||
{
|
||||
name: "yarn" as const,
|
||||
command: () => $`yarn global list`.throws(false).text(),
|
||||
},
|
||||
{
|
||||
name: "pnpm" as const,
|
||||
command: () => $`pnpm list -g --depth=0`.throws(false).text(),
|
||||
},
|
||||
{
|
||||
name: "bun" as const,
|
||||
command: () => $`bun pm ls -g`.throws(false).text(),
|
||||
},
|
||||
{
|
||||
name: "brew" as const,
|
||||
command: () => $`brew list --formula opencode-ai`.throws(false).text(),
|
||||
},
|
||||
]
|
||||
|
||||
checks.sort((a, b) => {
|
||||
const aMatches = exec.includes(a.name)
|
||||
const bMatches = exec.includes(b.name)
|
||||
if (aMatches && !bMatches) return -1
|
||||
if (!aMatches && bMatches) return 1
|
||||
return 0
|
||||
})
|
||||
|
||||
for (const check of checks) {
|
||||
const output = await check.command()
|
||||
if (output.includes("opencode-ai")) {
|
||||
return check.name
|
||||
}
|
||||
}
|
||||
|
||||
return "unknown"
|
||||
}
|
||||
|
||||
export const UpgradeFailedError = NamedError.create(
|
||||
"UpgradeFailedError",
|
||||
z.object({
|
||||
stderr: z.string(),
|
||||
}),
|
||||
)
|
||||
|
||||
export async function upgrade(method: Method, target: string) {
|
||||
const cmd = (() => {
|
||||
switch (method) {
|
||||
case "curl":
|
||||
return $`curl -fsSL https://opencode.ai/install | bash`.env({
|
||||
...process.env,
|
||||
VERSION: target,
|
||||
})
|
||||
case "npm":
|
||||
return $`npm install -g opencode-ai@${target}`
|
||||
case "pnpm":
|
||||
return $`pnpm install -g opencode-ai@${target}`
|
||||
case "bun":
|
||||
return $`bun install -g opencode-ai@${target}`
|
||||
case "brew":
|
||||
return $`brew install sst/tap/opencode`.env({
|
||||
HOMEBREW_NO_AUTO_UPDATE: "1",
|
||||
})
|
||||
default:
|
||||
throw new Error(`Unknown method: ${method}`)
|
||||
}
|
||||
})()
|
||||
const result = await cmd.quiet().throws(false)
|
||||
log.info("upgraded", {
|
||||
method,
|
||||
target,
|
||||
stdout: result.stdout.toString(),
|
||||
stderr: result.stderr.toString(),
|
||||
})
|
||||
if (result.exitCode !== 0)
|
||||
throw new UpgradeFailedError({
|
||||
stderr: result.stderr.toString("utf8"),
|
||||
})
|
||||
}
|
||||
|
||||
export const VERSION = typeof OPENCODE_VERSION === "string" ? OPENCODE_VERSION : "dev"
|
||||
|
||||
export async function latest() {
|
||||
return fetch("https://api.github.com/repos/sst/opencode/releases/latest")
|
||||
.then((res) => res.json())
|
||||
.then((data) => {
|
||||
if (typeof data.tag_name !== "string") {
|
||||
log.error("GitHub API error", data)
|
||||
throw new Error("failed to fetch latest version")
|
||||
}
|
||||
return data.tag_name.slice(1) as string
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,9 +1,5 @@
|
||||
import path from "path"
|
||||
import {
|
||||
createMessageConnection,
|
||||
StreamMessageReader,
|
||||
StreamMessageWriter,
|
||||
} from "vscode-jsonrpc/node"
|
||||
import { createMessageConnection, StreamMessageReader, StreamMessageWriter } from "vscode-jsonrpc/node"
|
||||
import type { Diagnostic as VSCodeDiagnostic } from "vscode-languageserver-types"
|
||||
import { App } from "../app/app"
|
||||
import { Log } from "../util/log"
|
||||
@@ -11,6 +7,8 @@ import { LANGUAGE_EXTENSIONS } from "./language"
|
||||
import { Bus } from "../bus"
|
||||
import z from "zod"
|
||||
import type { LSPServer } from "./server"
|
||||
import { NamedError } from "../util/error"
|
||||
import { withTimeout } from "../util/timeout"
|
||||
|
||||
export namespace LSPClient {
|
||||
const log = Log.create({ service: "lsp.client" })
|
||||
@@ -19,6 +17,13 @@ export namespace LSPClient {
|
||||
|
||||
export type Diagnostic = VSCodeDiagnostic
|
||||
|
||||
export const InitializeError = NamedError.create(
|
||||
"LSPInitializeError",
|
||||
z.object({
|
||||
serverID: z.string(),
|
||||
}),
|
||||
)
|
||||
|
||||
export const Event = {
|
||||
Diagnostics: Bus.event(
|
||||
"lsp.client.diagnostics",
|
||||
@@ -29,150 +34,158 @@ export namespace LSPClient {
|
||||
),
|
||||
}
|
||||
|
||||
export async function create(serverID: string, server: LSPServer.Handle) {
|
||||
export async function create(input: { serverID: string; server: LSPServer.Handle; root: string }) {
|
||||
const app = App.info()
|
||||
log.info("starting client", { id: serverID })
|
||||
const l = log.clone().tag("serverID", input.serverID)
|
||||
l.info("starting client")
|
||||
|
||||
const connection = createMessageConnection(
|
||||
new StreamMessageReader(server.process.stdout),
|
||||
new StreamMessageWriter(server.process.stdin),
|
||||
new StreamMessageReader(input.server.process.stdout),
|
||||
new StreamMessageWriter(input.server.process.stdin),
|
||||
)
|
||||
|
||||
const diagnostics = new Map<string, Diagnostic[]>()
|
||||
connection.onNotification("textDocument/publishDiagnostics", (params) => {
|
||||
const path = new URL(params.uri).pathname
|
||||
log.info("textDocument/publishDiagnostics", {
|
||||
l.info("textDocument/publishDiagnostics", {
|
||||
path,
|
||||
})
|
||||
const exists = diagnostics.has(path)
|
||||
diagnostics.set(path, params.diagnostics)
|
||||
Bus.publish(Event.Diagnostics, { path, serverID })
|
||||
if (!exists && input.serverID === "typescript") return
|
||||
Bus.publish(Event.Diagnostics, { path, serverID: input.serverID })
|
||||
})
|
||||
connection.onRequest("window/workDoneProgress/create", (params) => {
|
||||
l.info("window/workDoneProgress/create", params)
|
||||
return null
|
||||
})
|
||||
connection.onRequest("workspace/configuration", async () => {
|
||||
return [{}]
|
||||
})
|
||||
connection.listen()
|
||||
|
||||
await connection.sendRequest("initialize", {
|
||||
processId: server.process.pid,
|
||||
workspaceFolders: [
|
||||
l.info("sending initialize")
|
||||
await withTimeout(
|
||||
connection.sendRequest("initialize", {
|
||||
rootUri: "file://" + input.root,
|
||||
processId: input.server.process.pid,
|
||||
workspaceFolders: [
|
||||
{
|
||||
name: "workspace",
|
||||
uri: "file://" + input.root,
|
||||
},
|
||||
],
|
||||
initializationOptions: {
|
||||
...input.server.initialization,
|
||||
},
|
||||
capabilities: {
|
||||
window: {
|
||||
workDoneProgress: true,
|
||||
},
|
||||
workspace: {
|
||||
configuration: true,
|
||||
},
|
||||
textDocument: {
|
||||
synchronization: {
|
||||
didOpen: true,
|
||||
didChange: true,
|
||||
},
|
||||
publishDiagnostics: {
|
||||
versionSupport: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
5_000,
|
||||
).catch((err) => {
|
||||
l.error("initialize error", { error: err })
|
||||
throw new InitializeError(
|
||||
{ serverID: input.serverID },
|
||||
{
|
||||
name: "workspace",
|
||||
uri: "file://" + app.path.cwd,
|
||||
cause: err,
|
||||
},
|
||||
],
|
||||
initializationOptions: {
|
||||
...server.initialization,
|
||||
},
|
||||
capabilities: {
|
||||
workspace: {
|
||||
configuration: true,
|
||||
},
|
||||
textDocument: {
|
||||
synchronization: {
|
||||
didOpen: true,
|
||||
didChange: true,
|
||||
},
|
||||
publishDiagnostics: {
|
||||
versionSupport: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
})
|
||||
|
||||
await connection.sendNotification("initialized", {})
|
||||
log.info("initialized")
|
||||
|
||||
const files: {
|
||||
[path: string]: number
|
||||
} = {}
|
||||
|
||||
const result = {
|
||||
root: input.root,
|
||||
get serverID() {
|
||||
return serverID
|
||||
return input.serverID
|
||||
},
|
||||
get connection() {
|
||||
return connection
|
||||
},
|
||||
notify: {
|
||||
async open(input: { path: string }) {
|
||||
input.path = path.isAbsolute(input.path)
|
||||
? input.path
|
||||
: path.resolve(app.path.cwd, input.path)
|
||||
input.path = path.isAbsolute(input.path) ? input.path : path.resolve(app.path.cwd, input.path)
|
||||
const file = Bun.file(input.path)
|
||||
const text = await file.text()
|
||||
const version = files[input.path]
|
||||
if (version === undefined) {
|
||||
log.info("textDocument/didOpen", input)
|
||||
if (version !== undefined) {
|
||||
diagnostics.delete(input.path)
|
||||
const extension = path.extname(input.path)
|
||||
const languageId = LANGUAGE_EXTENSIONS[extension] ?? "plaintext"
|
||||
await connection.sendNotification("textDocument/didOpen", {
|
||||
await connection.sendNotification("textDocument/didClose", {
|
||||
textDocument: {
|
||||
uri: `file://` + input.path,
|
||||
languageId,
|
||||
version: 0,
|
||||
text,
|
||||
},
|
||||
})
|
||||
files[input.path] = 0
|
||||
return
|
||||
}
|
||||
|
||||
log.info("textDocument/didChange", input)
|
||||
log.info("textDocument/didOpen", input)
|
||||
diagnostics.delete(input.path)
|
||||
await connection.sendNotification("textDocument/didChange", {
|
||||
const extension = path.extname(input.path)
|
||||
const languageId = LANGUAGE_EXTENSIONS[extension] ?? "plaintext"
|
||||
await connection.sendNotification("textDocument/didOpen", {
|
||||
textDocument: {
|
||||
uri: `file://` + input.path,
|
||||
version: ++files[input.path],
|
||||
languageId,
|
||||
version: 0,
|
||||
text,
|
||||
},
|
||||
contentChanges: [
|
||||
{
|
||||
text,
|
||||
},
|
||||
],
|
||||
})
|
||||
files[input.path] = 0
|
||||
return
|
||||
},
|
||||
},
|
||||
get diagnostics() {
|
||||
return diagnostics
|
||||
},
|
||||
async waitForDiagnostics(input: { path: string }) {
|
||||
input.path = path.isAbsolute(input.path)
|
||||
? input.path
|
||||
: path.resolve(app.path.cwd, input.path)
|
||||
input.path = path.isAbsolute(input.path) ? input.path : path.resolve(app.path.cwd, input.path)
|
||||
log.info("waiting for diagnostics", input)
|
||||
let unsub: () => void
|
||||
let timeout: NodeJS.Timeout
|
||||
return await Promise.race([
|
||||
new Promise<void>(async (resolve) => {
|
||||
return await withTimeout(
|
||||
new Promise<void>((resolve) => {
|
||||
unsub = Bus.subscribe(Event.Diagnostics, (event) => {
|
||||
if (
|
||||
event.properties.path === input.path &&
|
||||
event.properties.serverID === result.serverID
|
||||
) {
|
||||
if (event.properties.path === input.path && event.properties.serverID === result.serverID) {
|
||||
log.info("got diagnostics", input)
|
||||
clearTimeout(timeout)
|
||||
unsub?.()
|
||||
resolve()
|
||||
}
|
||||
})
|
||||
}),
|
||||
new Promise<void>((resolve) => {
|
||||
timeout = setTimeout(() => {
|
||||
log.info("timed out refreshing diagnostics", input)
|
||||
unsub?.()
|
||||
resolve()
|
||||
}, 5000)
|
||||
}),
|
||||
])
|
||||
3000,
|
||||
)
|
||||
.catch(() => {})
|
||||
.finally(() => {
|
||||
unsub?.()
|
||||
})
|
||||
},
|
||||
async shutdown() {
|
||||
log.info("shutting down")
|
||||
l.info("shutting down")
|
||||
connection.end()
|
||||
connection.dispose()
|
||||
server.process.kill("SIGKILL")
|
||||
input.server.process.kill()
|
||||
l.info("shutdown")
|
||||
},
|
||||
}
|
||||
|
||||
l.info("initialized")
|
||||
|
||||
return result
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,48 +3,115 @@ import { Log } from "../util/log"
|
||||
import { LSPClient } from "./client"
|
||||
import path from "path"
|
||||
import { LSPServer } from "./server"
|
||||
import { z } from "zod"
|
||||
|
||||
export namespace LSP {
|
||||
const log = Log.create({ service: "lsp" })
|
||||
|
||||
export const Range = z
|
||||
.object({
|
||||
start: z.object({
|
||||
line: z.number(),
|
||||
character: z.number(),
|
||||
}),
|
||||
end: z.object({
|
||||
line: z.number(),
|
||||
character: z.number(),
|
||||
}),
|
||||
})
|
||||
.openapi({
|
||||
ref: "Range",
|
||||
})
|
||||
export type Range = z.infer<typeof Range>
|
||||
|
||||
export const Symbol = z
|
||||
.object({
|
||||
name: z.string(),
|
||||
kind: z.number(),
|
||||
location: z.object({
|
||||
uri: z.string(),
|
||||
range: Range,
|
||||
}),
|
||||
})
|
||||
.openapi({
|
||||
ref: "Symbol",
|
||||
})
|
||||
export type Symbol = z.infer<typeof Symbol>
|
||||
|
||||
export const DocumentSymbol = z
|
||||
.object({
|
||||
name: z.string(),
|
||||
detail: z.string().optional(),
|
||||
kind: z.number(),
|
||||
range: Range,
|
||||
selectionRange: Range,
|
||||
})
|
||||
.openapi({
|
||||
ref: "DocumentSymbol",
|
||||
})
|
||||
export type DocumentSymbol = z.infer<typeof DocumentSymbol>
|
||||
|
||||
const state = App.state(
|
||||
"lsp",
|
||||
async () => {
|
||||
log.info("initializing")
|
||||
const clients = new Map<string, LSPClient.Info>()
|
||||
|
||||
const clients: LSPClient.Info[] = []
|
||||
return {
|
||||
broken: new Set<string>(),
|
||||
clients,
|
||||
}
|
||||
},
|
||||
async (state) => {
|
||||
for (const client of state.clients.values()) {
|
||||
for (const client of state.clients) {
|
||||
await client.shutdown()
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
export async function touchFile(input: string, waitForDiagnostics?: boolean) {
|
||||
const extension = path.parse(input).ext
|
||||
export async function init() {
|
||||
return state()
|
||||
}
|
||||
|
||||
async function getClients(file: string) {
|
||||
const s = await state()
|
||||
const matches = LSPServer.All.filter((x) =>
|
||||
x.extensions.includes(extension),
|
||||
)
|
||||
for (const match of matches) {
|
||||
const existing = s.clients.get(match.id)
|
||||
if (existing) continue
|
||||
const handle = await match.spawn(App.info())
|
||||
const extension = path.parse(file).ext
|
||||
const result: LSPClient.Info[] = []
|
||||
for (const server of Object.values(LSPServer)) {
|
||||
if (!server.extensions.includes(extension)) continue
|
||||
const root = await server.root(file, App.info())
|
||||
if (!root) continue
|
||||
if (s.broken.has(root + server.id)) continue
|
||||
|
||||
const match = s.clients.find((x) => x.root === root && x.serverID === server.id)
|
||||
if (match) {
|
||||
result.push(match)
|
||||
continue
|
||||
}
|
||||
const handle = await server.spawn(App.info(), root)
|
||||
if (!handle) continue
|
||||
const client = await LSPClient.create(match.id, handle)
|
||||
s.clients.set(match.id, client)
|
||||
}
|
||||
if (waitForDiagnostics) {
|
||||
await run(async (client) => {
|
||||
const wait = client.waitForDiagnostics({ path: input })
|
||||
await client.notify.open({ path: input })
|
||||
return wait
|
||||
const client = await LSPClient.create({
|
||||
serverID: server.id,
|
||||
server: handle,
|
||||
root,
|
||||
}).catch((err) => {
|
||||
s.broken.add(root + server.id)
|
||||
handle.process.kill()
|
||||
log.error("", { error: err })
|
||||
})
|
||||
if (!client) continue
|
||||
s.clients.push(client)
|
||||
result.push(client)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
export async function touchFile(input: string, waitForDiagnostics?: boolean) {
|
||||
const clients = await getClients(input)
|
||||
await run(async (client) => {
|
||||
if (!clients.includes(client)) return
|
||||
const wait = waitForDiagnostics ? client.waitForDiagnostics({ path: input }) : Promise.resolve()
|
||||
await client.notify.open({ path: input })
|
||||
return wait
|
||||
})
|
||||
}
|
||||
|
||||
export async function diagnostics() {
|
||||
@@ -59,11 +126,7 @@ export namespace LSP {
|
||||
return results
|
||||
}
|
||||
|
||||
export async function hover(input: {
|
||||
file: string
|
||||
line: number
|
||||
character: number
|
||||
}) {
|
||||
export async function hover(input: { file: string; line: number; character: number }) {
|
||||
return run((client) => {
|
||||
return client.connection.sendRequest("textDocument/hover", {
|
||||
textDocument: {
|
||||
@@ -77,10 +140,74 @@ export namespace LSP {
|
||||
})
|
||||
}
|
||||
|
||||
async function run<T>(
|
||||
input: (client: LSPClient.Info) => Promise<T>,
|
||||
): Promise<T[]> {
|
||||
const clients = await state().then((x) => [...x.clients.values()])
|
||||
enum SymbolKind {
|
||||
File = 1,
|
||||
Module = 2,
|
||||
Namespace = 3,
|
||||
Package = 4,
|
||||
Class = 5,
|
||||
Method = 6,
|
||||
Property = 7,
|
||||
Field = 8,
|
||||
Constructor = 9,
|
||||
Enum = 10,
|
||||
Interface = 11,
|
||||
Function = 12,
|
||||
Variable = 13,
|
||||
Constant = 14,
|
||||
String = 15,
|
||||
Number = 16,
|
||||
Boolean = 17,
|
||||
Array = 18,
|
||||
Object = 19,
|
||||
Key = 20,
|
||||
Null = 21,
|
||||
EnumMember = 22,
|
||||
Struct = 23,
|
||||
Event = 24,
|
||||
Operator = 25,
|
||||
TypeParameter = 26,
|
||||
}
|
||||
|
||||
const kinds = [
|
||||
SymbolKind.Class,
|
||||
SymbolKind.Function,
|
||||
SymbolKind.Method,
|
||||
SymbolKind.Interface,
|
||||
SymbolKind.Variable,
|
||||
SymbolKind.Constant,
|
||||
SymbolKind.Struct,
|
||||
SymbolKind.Enum,
|
||||
]
|
||||
|
||||
export async function workspaceSymbol(query: string) {
|
||||
return run((client) =>
|
||||
client.connection
|
||||
.sendRequest("workspace/symbol", {
|
||||
query,
|
||||
})
|
||||
.then((result: any) => result.filter((x: LSP.Symbol) => kinds.includes(x.kind)))
|
||||
.then((result: any) => result.slice(0, 10))
|
||||
.catch(() => []),
|
||||
).then((result) => result.flat() as LSP.Symbol[])
|
||||
}
|
||||
|
||||
export async function documentSymbol(uri: string) {
|
||||
return run((client) =>
|
||||
client.connection
|
||||
.sendRequest("textDocument/documentSymbol", {
|
||||
textDocument: {
|
||||
uri,
|
||||
},
|
||||
})
|
||||
.catch(() => []),
|
||||
)
|
||||
.then((result) => result.flat() as (LSP.DocumentSymbol | LSP.Symbol)[])
|
||||
.then((result) => result.filter(Boolean))
|
||||
}
|
||||
|
||||
async function run<T>(input: (client: LSPClient.Info) => Promise<T>): Promise<T[]> {
|
||||
const clients = await state().then((x) => x.clients)
|
||||
const tasks = clients.map((x) => input(x))
|
||||
return Promise.all(tasks)
|
||||
}
|
||||
|
||||
@@ -63,6 +63,14 @@ export const LANGUAGE_EXTENSIONS: Record<string, string> = {
|
||||
".cshtml": "razor",
|
||||
".razor": "razor",
|
||||
".rb": "ruby",
|
||||
".rake": "ruby",
|
||||
".gemspec": "ruby",
|
||||
".ru": "ruby",
|
||||
".erb": "erb",
|
||||
".html.erb": "erb",
|
||||
".js.erb": "erb",
|
||||
".css.erb": "erb",
|
||||
".json.erb": "erb",
|
||||
".rs": "rust",
|
||||
".scss": "scss",
|
||||
".sass": "sass",
|
||||
@@ -86,4 +94,6 @@ export const LANGUAGE_EXTENSIONS: Record<string, string> = {
|
||||
".yml": "yaml",
|
||||
".mjs": "javascript",
|
||||
".cjs": "javascript",
|
||||
".zig": "zig",
|
||||
".zon": "zig",
|
||||
} as const
|
||||
|
||||
@@ -4,6 +4,9 @@ import path from "path"
|
||||
import { Global } from "../global"
|
||||
import { Log } from "../util/log"
|
||||
import { BunProc } from "../bun"
|
||||
import { $ } from "bun"
|
||||
import fs from "fs/promises"
|
||||
import { Filesystem } from "../util/filesystem"
|
||||
|
||||
export namespace LSPServer {
|
||||
const log = Log.create({ service: "lsp.server" })
|
||||
@@ -13,81 +16,310 @@ export namespace LSPServer {
|
||||
initialization?: Record<string, any>
|
||||
}
|
||||
|
||||
type RootFunction = (file: string, app: App.Info) => Promise<string | undefined>
|
||||
|
||||
const NearestRoot = (patterns: string[]): RootFunction => {
|
||||
return async (file, app) => {
|
||||
const files = Filesystem.up({
|
||||
targets: patterns,
|
||||
start: path.dirname(file),
|
||||
stop: app.path.root,
|
||||
})
|
||||
const first = await files.next()
|
||||
await files.return()
|
||||
if (!first.value) return app.path.root
|
||||
return path.dirname(first.value)
|
||||
}
|
||||
}
|
||||
|
||||
export interface Info {
|
||||
id: string
|
||||
extensions: string[]
|
||||
spawn(app: App.Info): Promise<Handle | undefined>
|
||||
global?: boolean
|
||||
root: RootFunction
|
||||
spawn(app: App.Info, root: string): Promise<Handle | undefined>
|
||||
}
|
||||
|
||||
export const All: Info[] = [
|
||||
{
|
||||
id: "typescript",
|
||||
extensions: [
|
||||
".ts",
|
||||
".tsx",
|
||||
".js",
|
||||
".jsx",
|
||||
".mjs",
|
||||
".cjs",
|
||||
".mts",
|
||||
".cts",
|
||||
],
|
||||
async spawn(app) {
|
||||
const tsserver = await Bun.resolve(
|
||||
"typescript/lib/tsserver.js",
|
||||
app.path.cwd,
|
||||
).catch(() => {})
|
||||
if (!tsserver) return
|
||||
const proc = spawn(
|
||||
BunProc.which(),
|
||||
["x", "typescript-language-server", "--stdio"],
|
||||
{
|
||||
env: {
|
||||
...process.env,
|
||||
BUN_BE_BUN: "1",
|
||||
},
|
||||
export const Typescript: Info = {
|
||||
id: "typescript",
|
||||
root: NearestRoot(["tsconfig.json", "package.json", "jsconfig.json"]),
|
||||
extensions: [".ts", ".tsx", ".js", ".jsx", ".mjs", ".cjs", ".mts", ".cts"],
|
||||
async spawn(app, root) {
|
||||
const tsserver = await Bun.resolve("typescript/lib/tsserver.js", app.path.cwd).catch(() => {})
|
||||
if (!tsserver) return
|
||||
const proc = spawn(BunProc.which(), ["x", "typescript-language-server", "--stdio"], {
|
||||
cwd: root,
|
||||
env: {
|
||||
...process.env,
|
||||
BUN_BE_BUN: "1",
|
||||
},
|
||||
})
|
||||
return {
|
||||
process: proc,
|
||||
initialization: {
|
||||
tsserver: {
|
||||
path: tsserver,
|
||||
},
|
||||
)
|
||||
return {
|
||||
process: proc,
|
||||
initialization: {
|
||||
tsserver: {
|
||||
path: tsserver,
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
{
|
||||
id: "golang",
|
||||
extensions: [".go"],
|
||||
async spawn() {
|
||||
let bin = Bun.which("gopls", {
|
||||
PATH: process.env["PATH"] + ":" + Global.Path.bin,
|
||||
}
|
||||
|
||||
export const Gopls: Info = {
|
||||
id: "golang",
|
||||
root: async (file, app) => {
|
||||
const work = await NearestRoot(["go.work"])(file, app)
|
||||
if (work) return work
|
||||
return NearestRoot(["go.mod", "go.sum"])(file, app)
|
||||
},
|
||||
extensions: [".go"],
|
||||
async spawn(_, root) {
|
||||
let bin = Bun.which("gopls", {
|
||||
PATH: process.env["PATH"] + ":" + Global.Path.bin,
|
||||
})
|
||||
if (!bin) {
|
||||
if (!Bun.which("go")) return
|
||||
log.info("installing gopls")
|
||||
const proc = Bun.spawn({
|
||||
cmd: ["go", "install", "golang.org/x/tools/gopls@latest"],
|
||||
env: { ...process.env, GOBIN: Global.Path.bin },
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
stdin: "pipe",
|
||||
})
|
||||
if (!bin) {
|
||||
log.info("installing gopls")
|
||||
const proc = Bun.spawn({
|
||||
cmd: ["go", "install", "golang.org/x/tools/gopls@latest"],
|
||||
env: { ...process.env, GOBIN: Global.Path.bin },
|
||||
})
|
||||
const exit = await proc.exited
|
||||
if (exit !== 0) {
|
||||
log.error("Failed to install gopls")
|
||||
const exit = await proc.exited
|
||||
if (exit !== 0) {
|
||||
log.error("Failed to install gopls")
|
||||
return
|
||||
}
|
||||
bin = path.join(Global.Path.bin, "gopls" + (process.platform === "win32" ? ".exe" : ""))
|
||||
log.info(`installed gopls`, {
|
||||
bin,
|
||||
})
|
||||
}
|
||||
return {
|
||||
process: spawn(bin!, {
|
||||
cwd: root,
|
||||
}),
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
export const RubyLsp: Info = {
|
||||
id: "ruby-lsp",
|
||||
root: NearestRoot(["Gemfile"]),
|
||||
extensions: [".rb", ".rake", ".gemspec", ".ru"],
|
||||
async spawn(_, root) {
|
||||
let bin = Bun.which("ruby-lsp", {
|
||||
PATH: process.env["PATH"] + ":" + Global.Path.bin,
|
||||
})
|
||||
if (!bin) {
|
||||
const ruby = Bun.which("ruby")
|
||||
const gem = Bun.which("gem")
|
||||
if (!ruby || !gem) {
|
||||
log.info("Ruby not found, please install Ruby first")
|
||||
return
|
||||
}
|
||||
log.info("installing ruby-lsp")
|
||||
const proc = Bun.spawn({
|
||||
cmd: ["gem", "install", "ruby-lsp", "--bindir", Global.Path.bin],
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
stdin: "pipe",
|
||||
})
|
||||
const exit = await proc.exited
|
||||
if (exit !== 0) {
|
||||
log.error("Failed to install ruby-lsp")
|
||||
return
|
||||
}
|
||||
bin = path.join(Global.Path.bin, "ruby-lsp" + (process.platform === "win32" ? ".exe" : ""))
|
||||
log.info(`installed ruby-lsp`, {
|
||||
bin,
|
||||
})
|
||||
}
|
||||
return {
|
||||
process: spawn(bin!, ["--stdio"], {
|
||||
cwd: root,
|
||||
}),
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
export const Pyright: Info = {
|
||||
id: "pyright",
|
||||
extensions: [".py", ".pyi"],
|
||||
root: NearestRoot(["pyproject.toml", "setup.py", "setup.cfg", "requirements.txt", "Pipfile", "pyrightconfig.json"]),
|
||||
async spawn(_, root) {
|
||||
const proc = spawn(BunProc.which(), ["x", "pyright-langserver", "--stdio"], {
|
||||
cwd: root,
|
||||
env: {
|
||||
...process.env,
|
||||
BUN_BE_BUN: "1",
|
||||
},
|
||||
})
|
||||
return {
|
||||
process: proc,
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
export const ElixirLS: Info = {
|
||||
id: "elixir-ls",
|
||||
extensions: [".ex", ".exs"],
|
||||
root: NearestRoot(["mix.exs", "mix.lock"]),
|
||||
async spawn(_, root) {
|
||||
let binary = Bun.which("elixir-ls")
|
||||
if (!binary) {
|
||||
const elixirLsPath = path.join(Global.Path.bin, "elixir-ls")
|
||||
binary = path.join(
|
||||
Global.Path.bin,
|
||||
"elixir-ls-master",
|
||||
"release",
|
||||
process.platform === "win32" ? "language_server.bar" : "language_server.sh",
|
||||
)
|
||||
|
||||
if (!(await Bun.file(binary).exists())) {
|
||||
const elixir = Bun.which("elixir")
|
||||
if (!elixir) {
|
||||
log.error("elixir is required to run elixir-ls")
|
||||
return
|
||||
}
|
||||
bin = path.join(
|
||||
Global.Path.bin,
|
||||
"gopls" + (process.platform === "win32" ? ".exe" : ""),
|
||||
)
|
||||
log.info(`installed gopls`, {
|
||||
bin,
|
||||
|
||||
log.info("downloading elixir-ls from GitHub releases")
|
||||
|
||||
const response = await fetch("https://github.com/elixir-lsp/elixir-ls/archive/refs/heads/master.zip")
|
||||
if (!response.ok) return
|
||||
const zipPath = path.join(Global.Path.bin, "elixir-ls.zip")
|
||||
await Bun.file(zipPath).write(response)
|
||||
|
||||
await $`unzip -o -q ${zipPath}`.cwd(Global.Path.bin).nothrow()
|
||||
|
||||
await fs.rm(zipPath, {
|
||||
force: true,
|
||||
recursive: true,
|
||||
})
|
||||
|
||||
await $`mix deps.get && mix compile && mix elixir_ls.release2 -o release`
|
||||
.quiet()
|
||||
.cwd(path.join(Global.Path.bin, "elixir-ls-master"))
|
||||
.env({ MIX_ENV: "prod", ...process.env })
|
||||
|
||||
log.info(`installed elixir-ls`, {
|
||||
path: elixirLsPath,
|
||||
})
|
||||
}
|
||||
return {
|
||||
process: spawn(bin!),
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
return {
|
||||
process: spawn(binary, {
|
||||
cwd: root,
|
||||
}),
|
||||
}
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
export const Zls: Info = {
|
||||
id: "zls",
|
||||
extensions: [".zig", ".zon"],
|
||||
root: NearestRoot(["build.zig"]),
|
||||
async spawn(_, root) {
|
||||
let bin = Bun.which("zls", {
|
||||
PATH: process.env["PATH"] + ":" + Global.Path.bin,
|
||||
})
|
||||
|
||||
if (!bin) {
|
||||
const zig = Bun.which("zig")
|
||||
if (!zig) {
|
||||
log.error("Zig is required to use zls. Please install Zig first.")
|
||||
return
|
||||
}
|
||||
|
||||
log.info("downloading zls from GitHub releases")
|
||||
|
||||
const releaseResponse = await fetch("https://api.github.com/repos/zigtools/zls/releases/latest")
|
||||
if (!releaseResponse.ok) {
|
||||
log.error("Failed to fetch zls release info")
|
||||
return
|
||||
}
|
||||
|
||||
const release = await releaseResponse.json()
|
||||
|
||||
const platform = process.platform
|
||||
const arch = process.arch
|
||||
let assetName = ""
|
||||
|
||||
let zlsArch: string = arch
|
||||
if (arch === "arm64") zlsArch = "aarch64"
|
||||
else if (arch === "x64") zlsArch = "x86_64"
|
||||
else if (arch === "ia32") zlsArch = "x86"
|
||||
|
||||
let zlsPlatform: string = platform
|
||||
if (platform === "darwin") zlsPlatform = "macos"
|
||||
else if (platform === "win32") zlsPlatform = "windows"
|
||||
|
||||
const ext = platform === "win32" ? "zip" : "tar.xz"
|
||||
|
||||
assetName = `zls-${zlsArch}-${zlsPlatform}.${ext}`
|
||||
|
||||
const supportedCombos = [
|
||||
"zls-x86_64-linux.tar.xz",
|
||||
"zls-x86_64-macos.tar.xz",
|
||||
"zls-x86_64-windows.zip",
|
||||
"zls-aarch64-linux.tar.xz",
|
||||
"zls-aarch64-macos.tar.xz",
|
||||
"zls-aarch64-windows.zip",
|
||||
"zls-x86-linux.tar.xz",
|
||||
"zls-x86-windows.zip",
|
||||
]
|
||||
|
||||
if (!supportedCombos.includes(assetName)) {
|
||||
log.error(`Platform ${platform} and architecture ${arch} is not supported by zls`)
|
||||
return
|
||||
}
|
||||
|
||||
const asset = release.assets.find((a: any) => a.name === assetName)
|
||||
if (!asset) {
|
||||
log.error(`Could not find asset ${assetName} in latest zls release`)
|
||||
return
|
||||
}
|
||||
|
||||
const downloadUrl = asset.browser_download_url
|
||||
const downloadResponse = await fetch(downloadUrl)
|
||||
if (!downloadResponse.ok) {
|
||||
log.error("Failed to download zls")
|
||||
return
|
||||
}
|
||||
|
||||
const tempPath = path.join(Global.Path.bin, assetName)
|
||||
await Bun.file(tempPath).write(downloadResponse)
|
||||
|
||||
if (ext === "zip") {
|
||||
await $`unzip -o -q ${tempPath}`.cwd(Global.Path.bin).nothrow()
|
||||
} else {
|
||||
await $`tar -xf ${tempPath}`.cwd(Global.Path.bin).nothrow()
|
||||
}
|
||||
|
||||
await fs.rm(tempPath, { force: true })
|
||||
|
||||
bin = path.join(Global.Path.bin, "zls" + (platform === "win32" ? ".exe" : ""))
|
||||
|
||||
if (!(await Bun.file(bin).exists())) {
|
||||
log.error("Failed to extract zls binary")
|
||||
return
|
||||
}
|
||||
|
||||
if (platform !== "win32") {
|
||||
await $`chmod +x ${bin}`.nothrow()
|
||||
}
|
||||
|
||||
log.info(`installed zls`, { bin })
|
||||
}
|
||||
|
||||
return {
|
||||
process: spawn(bin, {
|
||||
cwd: root,
|
||||
}),
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,8 +2,22 @@ import { experimental_createMCPClient, type Tool } from "ai"
|
||||
import { Experimental_StdioMCPTransport } from "ai/mcp-stdio"
|
||||
import { App } from "../app/app"
|
||||
import { Config } from "../config/config"
|
||||
import { Log } from "../util/log"
|
||||
import { NamedError } from "../util/error"
|
||||
import { z } from "zod"
|
||||
import { Session } from "../session"
|
||||
import { Bus } from "../bus"
|
||||
|
||||
export namespace MCP {
|
||||
const log = Log.create({ service: "mcp" })
|
||||
|
||||
export const Failed = NamedError.create(
|
||||
"MCPFailed",
|
||||
z.object({
|
||||
name: z.string(),
|
||||
}),
|
||||
)
|
||||
|
||||
const state = App.state(
|
||||
"mcp",
|
||||
async () => {
|
||||
@@ -12,27 +26,61 @@ export namespace MCP {
|
||||
[name: string]: Awaited<ReturnType<typeof experimental_createMCPClient>>
|
||||
} = {}
|
||||
for (const [key, mcp] of Object.entries(cfg.mcp ?? {})) {
|
||||
if (mcp.enabled === false) {
|
||||
log.info("mcp server disabled", { key })
|
||||
continue
|
||||
}
|
||||
log.info("found", { key, type: mcp.type })
|
||||
if (mcp.type === "remote") {
|
||||
clients[key] = await experimental_createMCPClient({
|
||||
const client = await experimental_createMCPClient({
|
||||
name: key,
|
||||
transport: {
|
||||
type: "sse",
|
||||
url: mcp.url,
|
||||
headers: mcp.headers,
|
||||
},
|
||||
})
|
||||
}).catch(() => {})
|
||||
if (!client) {
|
||||
Bus.publish(Session.Event.Error, {
|
||||
error: {
|
||||
name: "UnknownError",
|
||||
data: {
|
||||
message: `MCP server ${key} failed to start`,
|
||||
},
|
||||
},
|
||||
})
|
||||
continue
|
||||
}
|
||||
clients[key] = client
|
||||
}
|
||||
|
||||
if (mcp.type === "local") {
|
||||
const [cmd, ...args] = mcp.command
|
||||
clients[key] = await experimental_createMCPClient({
|
||||
const client = await experimental_createMCPClient({
|
||||
name: key,
|
||||
transport: new Experimental_StdioMCPTransport({
|
||||
stderr: "ignore",
|
||||
command: cmd,
|
||||
args,
|
||||
env: mcp.environment,
|
||||
env: {
|
||||
...process.env,
|
||||
...(cmd === "opencode" ? { BUN_BE_BUN: "1" } : {}),
|
||||
...mcp.environment,
|
||||
},
|
||||
}),
|
||||
})
|
||||
}).catch(() => {})
|
||||
if (!client) {
|
||||
Bus.publish(Session.Event.Error, {
|
||||
error: {
|
||||
name: "UnknownError",
|
||||
data: {
|
||||
message: `MCP server ${key} failed to start`,
|
||||
},
|
||||
},
|
||||
})
|
||||
continue
|
||||
}
|
||||
clients[key] = client
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -64,6 +64,7 @@ export namespace Permission {
|
||||
title: Info["title"]
|
||||
metadata: Info["metadata"]
|
||||
}) {
|
||||
return
|
||||
const { pending, approved } = state()
|
||||
log.info("asking", {
|
||||
sessionID: input.sessionID,
|
||||
|
||||
4
packages/opencode/src/provider/models-macro.ts
Normal file
4
packages/opencode/src/provider/models-macro.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export async function data() {
|
||||
const json = await fetch("https://models.dev/api.json").then((x) => x.text())
|
||||
return json
|
||||
}
|
||||
@@ -1,8 +1,8 @@
|
||||
import { Global } from "../global"
|
||||
import { lazy } from "../util/lazy"
|
||||
import { Log } from "../util/log"
|
||||
import path from "path"
|
||||
import { z } from "zod"
|
||||
import { data } from "./models-macro" with { type: "macro" }
|
||||
|
||||
export namespace ModelsDev {
|
||||
const log = Log.create({ service: "models.dev" })
|
||||
@@ -10,36 +10,41 @@ export namespace ModelsDev {
|
||||
|
||||
export const Model = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
release_date: z.string(),
|
||||
attachment: z.boolean(),
|
||||
reasoning: z.boolean(),
|
||||
temperature: z.boolean(),
|
||||
tool_call: z.boolean(),
|
||||
cost: z.object({
|
||||
input: z.number(),
|
||||
output: z.number(),
|
||||
inputCached: z.number(),
|
||||
outputCached: z.number(),
|
||||
cache_read: z.number().optional(),
|
||||
cache_write: z.number().optional(),
|
||||
}),
|
||||
limit: z.object({
|
||||
context: z.number(),
|
||||
output: z.number(),
|
||||
}),
|
||||
id: z.string(),
|
||||
options: z.record(z.any()),
|
||||
})
|
||||
.openapi({
|
||||
ref: "Model.Info",
|
||||
ref: "Model",
|
||||
})
|
||||
export type Model = z.infer<typeof Model>
|
||||
|
||||
export const Provider = z
|
||||
.object({
|
||||
api: z.string().optional(),
|
||||
name: z.string(),
|
||||
env: z.array(z.string()),
|
||||
id: z.string(),
|
||||
npm: z.string().optional(),
|
||||
models: z.record(Model),
|
||||
})
|
||||
.openapi({
|
||||
ref: "Provider.Info",
|
||||
ref: "Provider",
|
||||
})
|
||||
|
||||
export type Provider = z.infer<typeof Provider>
|
||||
@@ -51,42 +56,15 @@ export namespace ModelsDev {
|
||||
refresh()
|
||||
return result as Record<string, Provider>
|
||||
}
|
||||
await refresh()
|
||||
return get()
|
||||
refresh()
|
||||
const json = await data()
|
||||
return JSON.parse(json) as Record<string, Provider>
|
||||
}
|
||||
|
||||
async function refresh() {
|
||||
const file = Bun.file(filepath)
|
||||
log.info("refreshing")
|
||||
const result = await fetch("https://models.dev/api.json")
|
||||
if (!result.ok)
|
||||
throw new Error(`Failed to fetch models.dev: ${result.statusText}`)
|
||||
await Bun.write(file, result)
|
||||
}
|
||||
|
||||
const aisdk = lazy(async () => {
|
||||
log.info("fetching ai-sdk")
|
||||
const response = await fetch(
|
||||
"https://registry.npmjs.org/-/v1/search?text=scope:@ai-sdk",
|
||||
)
|
||||
if (!response.ok)
|
||||
throw new Error(
|
||||
`Failed to fetch ai-sdk information: ${response.statusText}`,
|
||||
)
|
||||
const result = await response.json()
|
||||
log.info("found ai-sdk", result.objects.length)
|
||||
return result.objects
|
||||
.filter((obj: any) => obj.package.name.startsWith("@ai-sdk/"))
|
||||
.reduce((acc: any, obj: any) => {
|
||||
acc[obj.package.name] = obj
|
||||
return acc
|
||||
}, {})
|
||||
})
|
||||
|
||||
export async function pkg(providerID: string): Promise<[string, string]> {
|
||||
const packages = await aisdk()
|
||||
const match = packages[`@ai-sdk/${providerID}`]
|
||||
if (match) return [match.package.name, "latest"]
|
||||
return [providerID, "latest"]
|
||||
const result = await fetch("https://models.dev/api.json").catch(() => {})
|
||||
if (result && result.ok) await Bun.write(file, result)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,14 +11,13 @@ import { WebFetchTool } from "../tool/webfetch"
|
||||
import { GlobTool } from "../tool/glob"
|
||||
import { GrepTool } from "../tool/grep"
|
||||
import { ListTool } from "../tool/ls"
|
||||
import { LspDiagnosticTool } from "../tool/lsp-diagnostics"
|
||||
import { LspHoverTool } from "../tool/lsp-hover"
|
||||
import { PatchTool } from "../tool/patch"
|
||||
import { ReadTool } from "../tool/read"
|
||||
import type { Tool } from "../tool/tool"
|
||||
import { WriteTool } from "../tool/write"
|
||||
import { TodoReadTool, TodoWriteTool } from "../tool/todo"
|
||||
import { AuthAnthropic } from "../auth/anthropic"
|
||||
import { AuthCopilot } from "../auth/copilot"
|
||||
import { ModelsDev } from "./models"
|
||||
import { NamedError } from "../util/error"
|
||||
import { Auth } from "../auth"
|
||||
@@ -29,38 +28,182 @@ export namespace Provider {
|
||||
|
||||
type CustomLoader = (
|
||||
provider: ModelsDev.Provider,
|
||||
) => Promise<Record<string, any> | false>
|
||||
api?: string,
|
||||
) => Promise<{
|
||||
autoload: boolean
|
||||
getModel?: (sdk: any, modelID: string) => Promise<any>
|
||||
options?: Record<string, any>
|
||||
}>
|
||||
|
||||
type Source = "env" | "config" | "custom" | "api"
|
||||
|
||||
const CUSTOM_LOADERS: Record<string, CustomLoader> = {
|
||||
async anthropic(provider) {
|
||||
const access = await AuthAnthropic.access()
|
||||
if (!access) return false
|
||||
if (!access) return { autoload: false }
|
||||
for (const model of Object.values(provider.models)) {
|
||||
model.cost = {
|
||||
input: 0,
|
||||
inputCached: 0,
|
||||
output: 0,
|
||||
outputCached: 0,
|
||||
}
|
||||
}
|
||||
return {
|
||||
apiKey: "",
|
||||
headers: {
|
||||
authorization: `Bearer ${access}`,
|
||||
"anthropic-beta": "oauth-2025-04-20",
|
||||
autoload: true,
|
||||
options: {
|
||||
apiKey: "",
|
||||
async fetch(input: any, init: any) {
|
||||
const access = await AuthAnthropic.access()
|
||||
const headers = {
|
||||
...init.headers,
|
||||
authorization: `Bearer ${access}`,
|
||||
"anthropic-beta": "oauth-2025-04-20",
|
||||
}
|
||||
delete headers["x-api-key"]
|
||||
return fetch(input, {
|
||||
...init,
|
||||
headers,
|
||||
})
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
"amazon-bedrock": async () => {
|
||||
if (!process.env["AWS_PROFILE"]) return false
|
||||
const { fromNodeProviderChain } = await import(
|
||||
await BunProc.install("@aws-sdk/credential-providers")
|
||||
)
|
||||
"github-copilot": async (provider) => {
|
||||
const copilot = await AuthCopilot()
|
||||
if (!copilot) return { autoload: false }
|
||||
let info = await Auth.get("github-copilot")
|
||||
if (!info || info.type !== "oauth") return { autoload: false }
|
||||
|
||||
if (provider && provider.models) {
|
||||
for (const model of Object.values(provider.models)) {
|
||||
model.cost = {
|
||||
input: 0,
|
||||
output: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
region: process.env["AWS_REGION"] ?? "us-east-1",
|
||||
credentialProvider: fromNodeProviderChain(),
|
||||
autoload: true,
|
||||
options: {
|
||||
apiKey: "",
|
||||
async fetch(input: any, init: any) {
|
||||
const info = await Auth.get("github-copilot")
|
||||
if (!info || info.type !== "oauth") return
|
||||
if (!info.access || info.expires < Date.now()) {
|
||||
const tokens = await copilot.access(info.refresh)
|
||||
if (!tokens) throw new Error("GitHub Copilot authentication expired")
|
||||
await Auth.set("github-copilot", {
|
||||
type: "oauth",
|
||||
...tokens,
|
||||
})
|
||||
info.access = tokens.access
|
||||
}
|
||||
let isAgentCall = false
|
||||
let isVisionRequest = false
|
||||
try {
|
||||
const body = typeof init.body === "string" ? JSON.parse(init.body) : init.body
|
||||
if (body?.messages) {
|
||||
isAgentCall = body.messages.some((msg: any) => msg.role && ["tool", "assistant"].includes(msg.role))
|
||||
isVisionRequest = body.messages.some(
|
||||
(msg: any) =>
|
||||
Array.isArray(msg.content) && msg.content.some((part: any) => part.type === "image_url"),
|
||||
)
|
||||
}
|
||||
} catch {}
|
||||
const headers: Record<string, string> = {
|
||||
...init.headers,
|
||||
...copilot.HEADERS,
|
||||
Authorization: `Bearer ${info.access}`,
|
||||
"Openai-Intent": "conversation-edits",
|
||||
"X-Initiator": isAgentCall ? "agent" : "user",
|
||||
}
|
||||
if (isVisionRequest) {
|
||||
headers["Copilot-Vision-Request"] = "true"
|
||||
}
|
||||
delete headers["x-api-key"]
|
||||
return fetch(input, {
|
||||
...init,
|
||||
headers,
|
||||
})
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
openai: async () => {
|
||||
return {
|
||||
autoload: false,
|
||||
async getModel(sdk: any, modelID: string) {
|
||||
return sdk.responses(modelID)
|
||||
},
|
||||
options: {},
|
||||
}
|
||||
},
|
||||
"amazon-bedrock": async () => {
|
||||
if (!process.env["AWS_PROFILE"] && !process.env["AWS_ACCESS_KEY_ID"] && !process.env["AWS_BEARER_TOKEN_BEDROCK"])
|
||||
return { autoload: false }
|
||||
|
||||
const region = process.env["AWS_REGION"] ?? "us-east-1"
|
||||
|
||||
const { fromNodeProviderChain } = await import(await BunProc.install("@aws-sdk/credential-providers"))
|
||||
return {
|
||||
autoload: true,
|
||||
options: {
|
||||
region,
|
||||
credentialProvider: fromNodeProviderChain(),
|
||||
},
|
||||
async getModel(sdk: any, modelID: string) {
|
||||
let regionPrefix = region.split("-")[0]
|
||||
|
||||
switch (regionPrefix) {
|
||||
case "us": {
|
||||
const modelRequiresPrefix = ["claude", "deepseek"].some((m) => modelID.includes(m))
|
||||
if (modelRequiresPrefix) {
|
||||
modelID = `${regionPrefix}.${modelID}`
|
||||
}
|
||||
break
|
||||
}
|
||||
case "eu": {
|
||||
const regionRequiresPrefix = [
|
||||
"eu-west-1",
|
||||
"eu-west-3",
|
||||
"eu-north-1",
|
||||
"eu-central-1",
|
||||
"eu-south-1",
|
||||
"eu-south-2",
|
||||
].some((r) => region.includes(r))
|
||||
const modelRequiresPrefix = ["claude", "nova-lite", "nova-micro", "llama3", "pixtral"].some((m) =>
|
||||
modelID.includes(m),
|
||||
)
|
||||
if (regionRequiresPrefix && modelRequiresPrefix) {
|
||||
modelID = `${regionPrefix}.${modelID}`
|
||||
}
|
||||
break
|
||||
}
|
||||
case "ap": {
|
||||
const modelRequiresPrefix = ["claude", "nova-lite", "nova-micro", "nova-pro"].some((m) =>
|
||||
modelID.includes(m),
|
||||
)
|
||||
if (modelRequiresPrefix) {
|
||||
regionPrefix = "apac"
|
||||
modelID = `${regionPrefix}.${modelID}`
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return sdk.languageModel(modelID)
|
||||
},
|
||||
}
|
||||
},
|
||||
openrouter: async () => {
|
||||
return {
|
||||
autoload: false,
|
||||
options: {
|
||||
headers: {
|
||||
"HTTP-Referer": "https://opencode.ai/",
|
||||
"X-Title": "opencode",
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
@@ -73,43 +216,49 @@ export namespace Provider {
|
||||
[providerID: string]: {
|
||||
source: Source
|
||||
info: ModelsDev.Provider
|
||||
getModel?: (sdk: any, modelID: string) => Promise<any>
|
||||
options: Record<string, any>
|
||||
}
|
||||
} = {}
|
||||
const models = new Map<
|
||||
string,
|
||||
{ info: ModelsDev.Model; language: LanguageModel }
|
||||
>()
|
||||
const models = new Map<string, { info: ModelsDev.Model; language: LanguageModel }>()
|
||||
const sdk = new Map<string, SDK>()
|
||||
|
||||
log.info("loading")
|
||||
log.info("init")
|
||||
|
||||
function mergeProvider(
|
||||
id: string,
|
||||
options: Record<string, any>,
|
||||
source: Source,
|
||||
getModel?: (sdk: any, modelID: string) => Promise<any>,
|
||||
) {
|
||||
const provider = providers[id]
|
||||
if (!provider) {
|
||||
const info = database[id]
|
||||
if (!info) return
|
||||
if (info.api && !options["baseURL"]) options["baseURL"] = info.api
|
||||
providers[id] = {
|
||||
source,
|
||||
info: database[id],
|
||||
info,
|
||||
options,
|
||||
getModel,
|
||||
}
|
||||
return
|
||||
}
|
||||
provider.options = mergeDeep(provider.options, options)
|
||||
provider.source = source
|
||||
provider.getModel = getModel ?? provider.getModel
|
||||
}
|
||||
|
||||
for (const [providerID, provider] of Object.entries(
|
||||
config.provider ?? {},
|
||||
)) {
|
||||
const configProviders = Object.entries(config.provider ?? {})
|
||||
|
||||
for (const [providerID, provider] of configProviders) {
|
||||
const existing = database[providerID]
|
||||
const parsed: ModelsDev.Provider = {
|
||||
id: providerID,
|
||||
npm: provider.npm ?? existing?.npm,
|
||||
name: provider.name ?? existing?.name ?? providerID,
|
||||
env: provider.env ?? existing?.env ?? [],
|
||||
api: provider.api ?? existing?.api,
|
||||
models: existing?.models ?? {},
|
||||
}
|
||||
|
||||
@@ -118,16 +267,29 @@ export namespace Provider {
|
||||
const parsedModel: ModelsDev.Model = {
|
||||
id: modelID,
|
||||
name: model.name ?? existing?.name ?? modelID,
|
||||
release_date: model.release_date ?? existing?.release_date,
|
||||
attachment: model.attachment ?? existing?.attachment ?? false,
|
||||
reasoning: model.reasoning ?? existing?.reasoning ?? false,
|
||||
temperature: model.temperature ?? existing?.temperature ?? false,
|
||||
cost: model.cost ??
|
||||
existing?.cost ?? {
|
||||
input: 0,
|
||||
output: 0,
|
||||
inputCached: 0,
|
||||
outputCached: 0,
|
||||
},
|
||||
tool_call: model.tool_call ?? existing?.tool_call ?? true,
|
||||
cost:
|
||||
!model.cost && !existing?.cost
|
||||
? {
|
||||
input: 0,
|
||||
output: 0,
|
||||
cache_read: 0,
|
||||
cache_write: 0,
|
||||
}
|
||||
: {
|
||||
cache_read: 0,
|
||||
cache_write: 0,
|
||||
...existing?.cost,
|
||||
...model.cost,
|
||||
},
|
||||
options: {
|
||||
...existing?.options,
|
||||
...model.options,
|
||||
},
|
||||
limit: model.limit ??
|
||||
existing?.limit ?? {
|
||||
context: 0,
|
||||
@@ -139,15 +301,23 @@ export namespace Provider {
|
||||
database[providerID] = parsed
|
||||
}
|
||||
|
||||
const disabled = await Config.get().then((cfg) => new Set(cfg.disabled_providers ?? []))
|
||||
// load env
|
||||
for (const [providerID, provider] of Object.entries(database)) {
|
||||
if (provider.env.some((item) => process.env[item])) {
|
||||
mergeProvider(providerID, {}, "env")
|
||||
}
|
||||
if (disabled.has(providerID)) continue
|
||||
const apiKey = provider.env.map((item) => process.env[item]).at(0)
|
||||
if (!apiKey) continue
|
||||
mergeProvider(
|
||||
providerID,
|
||||
// only include apiKey if there's only one potential option
|
||||
provider.env.length === 1 ? { apiKey } : {},
|
||||
"env",
|
||||
)
|
||||
}
|
||||
|
||||
// load apikeys
|
||||
for (const [providerID, provider] of Object.entries(await Auth.all())) {
|
||||
if (disabled.has(providerID)) continue
|
||||
if (provider.type === "api") {
|
||||
mergeProvider(providerID, { apiKey: provider.key }, "api")
|
||||
}
|
||||
@@ -155,19 +325,24 @@ export namespace Provider {
|
||||
|
||||
// load custom
|
||||
for (const [providerID, fn] of Object.entries(CUSTOM_LOADERS)) {
|
||||
if (disabled.has(providerID)) continue
|
||||
const result = await fn(database[providerID])
|
||||
if (result) mergeProvider(providerID, result, "custom")
|
||||
if (result && (result.autoload || providers[providerID])) {
|
||||
mergeProvider(providerID, result.options ?? {}, "custom", result.getModel)
|
||||
}
|
||||
}
|
||||
|
||||
// load config
|
||||
for (const [providerID, provider] of Object.entries(
|
||||
config.provider ?? {},
|
||||
)) {
|
||||
for (const [providerID, provider] of configProviders) {
|
||||
mergeProvider(providerID, provider.options ?? {}, "config")
|
||||
}
|
||||
|
||||
for (const providerID of Object.keys(providers)) {
|
||||
log.info("loaded", { providerID })
|
||||
for (const [providerID, provider] of Object.entries(providers)) {
|
||||
if (Object.keys(provider.info.models).length === 0) {
|
||||
delete providers[providerID]
|
||||
continue
|
||||
}
|
||||
log.info("found", { providerID })
|
||||
}
|
||||
|
||||
return {
|
||||
@@ -181,19 +356,25 @@ export namespace Provider {
|
||||
return state().then((state) => state.providers)
|
||||
}
|
||||
|
||||
async function getSDK(providerID: string) {
|
||||
async function getSDK(provider: ModelsDev.Provider) {
|
||||
return (async () => {
|
||||
using _ = log.time("getSDK", {
|
||||
providerID: provider.id,
|
||||
})
|
||||
const s = await state()
|
||||
const existing = s.sdk.get(providerID)
|
||||
const existing = s.sdk.get(provider.id)
|
||||
if (existing) return existing
|
||||
const [pkg, version] = await ModelsDev.pkg(providerID)
|
||||
const mod = await import(await BunProc.install(pkg, version))
|
||||
const pkg = provider.npm ?? provider.id
|
||||
const mod = await import(await BunProc.install(pkg, "beta"))
|
||||
const fn = mod[Object.keys(mod).find((key) => key.startsWith("create"))!]
|
||||
const loaded = fn(s.providers[providerID]?.options)
|
||||
s.sdk.set(providerID, loaded)
|
||||
const loaded = fn({
|
||||
name: provider.id,
|
||||
...s.providers[provider.id]?.options,
|
||||
})
|
||||
s.sdk.set(provider.id, loaded)
|
||||
return loaded as SDK
|
||||
})().catch((e) => {
|
||||
throw new InitError({ providerID: providerID }, { cause: e })
|
||||
throw new InitError({ providerID: provider.id }, { cause: e })
|
||||
})
|
||||
}
|
||||
|
||||
@@ -202,7 +383,7 @@ export namespace Provider {
|
||||
const s = await state()
|
||||
if (s.models.has(key)) return s.models.get(key)!
|
||||
|
||||
log.info("loading", {
|
||||
log.info("getModel", {
|
||||
providerID,
|
||||
modelID,
|
||||
})
|
||||
@@ -211,13 +392,10 @@ export namespace Provider {
|
||||
if (!provider) throw new ModelNotFoundError({ providerID, modelID })
|
||||
const info = provider.info.models[modelID]
|
||||
if (!info) throw new ModelNotFoundError({ providerID, modelID })
|
||||
|
||||
const sdk = await getSDK(providerID)
|
||||
const sdk = await getSDK(provider.info)
|
||||
|
||||
try {
|
||||
const language =
|
||||
// @ts-expect-error
|
||||
"responses" in sdk ? sdk.responses(modelID) : sdk.languageModel(modelID)
|
||||
const language = provider.getModel ? await provider.getModel(sdk, modelID) : sdk.languageModel(modelID)
|
||||
log.info("found", { providerID, modelID })
|
||||
s.models.set(key, {
|
||||
info,
|
||||
@@ -240,21 +418,40 @@ export namespace Provider {
|
||||
}
|
||||
}
|
||||
|
||||
export async function getSmallModel(providerID: string) {
|
||||
const cfg = await Config.get()
|
||||
|
||||
if (cfg.small_model) {
|
||||
const parsed = parseModel(cfg.small_model)
|
||||
return getModel(parsed.providerID, parsed.modelID)
|
||||
}
|
||||
|
||||
const provider = await state().then((state) => state.providers[providerID])
|
||||
if (!provider) return
|
||||
const priority = ["3-5-haiku", "3.5-haiku", "gemini-2.5-flash"]
|
||||
for (const item of priority) {
|
||||
for (const model of Object.keys(provider.info.models)) {
|
||||
if (model.includes(item)) return getModel(providerID, model)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const priority = ["gemini-2.5-pro-preview", "codex-mini", "claude-sonnet-4"]
|
||||
export function sort(models: ModelsDev.Model[]) {
|
||||
return sortBy(
|
||||
models,
|
||||
[
|
||||
(model) => priority.findIndex((filter) => model.id.includes(filter)),
|
||||
"desc",
|
||||
],
|
||||
[(model) => priority.findIndex((filter) => model.id.includes(filter)), "desc"],
|
||||
[(model) => (model.id.includes("latest") ? 0 : 1), "asc"],
|
||||
[(model) => model.id, "desc"],
|
||||
)
|
||||
}
|
||||
|
||||
export async function defaultModel() {
|
||||
const [provider] = await list().then((val) => Object.values(val))
|
||||
const cfg = await Config.get()
|
||||
if (cfg.model) return parseModel(cfg.model)
|
||||
const provider = await list()
|
||||
.then((val) => Object.values(val))
|
||||
.then((x) => x.find((p) => !cfg.provider || Object.keys(cfg.provider).includes(p.info.id)))
|
||||
if (!provider) throw new Error("no providers found")
|
||||
const [model] = sort(Object.values(provider.info.models))
|
||||
if (!model) throw new Error("no models found")
|
||||
@@ -264,6 +461,14 @@ export namespace Provider {
|
||||
}
|
||||
}
|
||||
|
||||
export function parseModel(model: string) {
|
||||
const [providerID, ...rest] = model.split("/")
|
||||
return {
|
||||
providerID: providerID,
|
||||
modelID: rest.join("/"),
|
||||
}
|
||||
}
|
||||
|
||||
const TOOLS = [
|
||||
BashTool,
|
||||
EditTool,
|
||||
@@ -271,22 +476,33 @@ export namespace Provider {
|
||||
GlobTool,
|
||||
GrepTool,
|
||||
ListTool,
|
||||
LspDiagnosticTool,
|
||||
LspHoverTool,
|
||||
// LspDiagnosticTool,
|
||||
// LspHoverTool,
|
||||
PatchTool,
|
||||
ReadTool,
|
||||
EditTool,
|
||||
// MultiEditTool,
|
||||
WriteTool,
|
||||
TodoWriteTool,
|
||||
TaskTool,
|
||||
TodoReadTool,
|
||||
TaskTool,
|
||||
]
|
||||
|
||||
const TOOL_MAPPING: Record<string, Tool.Info[]> = {
|
||||
anthropic: TOOLS.filter((t) => t.id !== "opencode.patch"),
|
||||
openai: TOOLS,
|
||||
google: TOOLS,
|
||||
anthropic: TOOLS.filter((t) => t.id !== "patch"),
|
||||
openai: TOOLS.map((t) => ({
|
||||
...t,
|
||||
parameters: optionalToNullable(t.parameters),
|
||||
})),
|
||||
azure: TOOLS.map((t) => ({
|
||||
...t,
|
||||
parameters: optionalToNullable(t.parameters),
|
||||
})),
|
||||
google: TOOLS.map((t) => ({
|
||||
...t,
|
||||
parameters: sanitizeGeminiParameters(t.parameters),
|
||||
})),
|
||||
}
|
||||
|
||||
export async function tools(providerID: string) {
|
||||
/*
|
||||
const cfg = await Config.get()
|
||||
@@ -298,6 +514,94 @@ export namespace Provider {
|
||||
return TOOL_MAPPING[providerID] ?? TOOLS
|
||||
}
|
||||
|
||||
function sanitizeGeminiParameters(schema: z.ZodTypeAny, visited = new Set()): z.ZodTypeAny {
|
||||
if (!schema || visited.has(schema)) {
|
||||
return schema
|
||||
}
|
||||
visited.add(schema)
|
||||
|
||||
if (schema instanceof z.ZodDefault) {
|
||||
const innerSchema = schema.removeDefault()
|
||||
// Handle Gemini's incompatibility with `default` on `anyOf` (unions).
|
||||
if (innerSchema instanceof z.ZodUnion) {
|
||||
// The schema was `z.union(...).default(...)`, which is not allowed.
|
||||
// We strip the default and return the sanitized union.
|
||||
return sanitizeGeminiParameters(innerSchema, visited)
|
||||
}
|
||||
// Otherwise, the default is on a regular type, which is allowed.
|
||||
// We recurse on the inner type and then re-apply the default.
|
||||
return sanitizeGeminiParameters(innerSchema, visited).default(schema._def.defaultValue())
|
||||
}
|
||||
|
||||
if (schema instanceof z.ZodOptional) {
|
||||
return z.optional(sanitizeGeminiParameters(schema.unwrap(), visited))
|
||||
}
|
||||
|
||||
if (schema instanceof z.ZodObject) {
|
||||
const newShape: Record<string, z.ZodTypeAny> = {}
|
||||
for (const [key, value] of Object.entries(schema.shape)) {
|
||||
newShape[key] = sanitizeGeminiParameters(value as z.ZodTypeAny, visited)
|
||||
}
|
||||
return z.object(newShape)
|
||||
}
|
||||
|
||||
if (schema instanceof z.ZodArray) {
|
||||
return z.array(sanitizeGeminiParameters(schema.element, visited))
|
||||
}
|
||||
|
||||
if (schema instanceof z.ZodUnion) {
|
||||
// This schema corresponds to `anyOf` in JSON Schema.
|
||||
// We recursively sanitize each option in the union.
|
||||
const sanitizedOptions = schema.options.map((option: z.ZodTypeAny) => sanitizeGeminiParameters(option, visited))
|
||||
return z.union(sanitizedOptions as [z.ZodTypeAny, z.ZodTypeAny, ...z.ZodTypeAny[]])
|
||||
}
|
||||
|
||||
if (schema instanceof z.ZodString) {
|
||||
const newSchema = z.string({ description: schema.description })
|
||||
const safeChecks = ["min", "max", "length", "regex", "startsWith", "endsWith", "includes", "trim"]
|
||||
// rome-ignore lint/suspicious/noExplicitAny: <explanation>
|
||||
;(newSchema._def as any).checks = (schema._def as z.ZodStringDef).checks.filter((check) =>
|
||||
safeChecks.includes(check.kind),
|
||||
)
|
||||
return newSchema
|
||||
}
|
||||
|
||||
return schema
|
||||
}
|
||||
function optionalToNullable(schema: z.ZodTypeAny): z.ZodTypeAny {
|
||||
if (schema instanceof z.ZodObject) {
|
||||
const shape = schema.shape
|
||||
const newShape: Record<string, z.ZodTypeAny> = {}
|
||||
|
||||
for (const [key, value] of Object.entries(shape)) {
|
||||
const zodValue = value as z.ZodTypeAny
|
||||
if (zodValue instanceof z.ZodOptional) {
|
||||
newShape[key] = zodValue.unwrap().nullable()
|
||||
} else {
|
||||
newShape[key] = optionalToNullable(zodValue)
|
||||
}
|
||||
}
|
||||
|
||||
return z.object(newShape)
|
||||
}
|
||||
|
||||
if (schema instanceof z.ZodArray) {
|
||||
return z.array(optionalToNullable(schema.element))
|
||||
}
|
||||
|
||||
if (schema instanceof z.ZodUnion) {
|
||||
return z.union(
|
||||
schema.options.map((option: z.ZodTypeAny) => optionalToNullable(option)) as [
|
||||
z.ZodTypeAny,
|
||||
z.ZodTypeAny,
|
||||
...z.ZodTypeAny[],
|
||||
],
|
||||
)
|
||||
}
|
||||
|
||||
return schema
|
||||
}
|
||||
|
||||
export const ModelNotFoundError = NamedError.create(
|
||||
"ProviderModelNotFoundError",
|
||||
z.object({
|
||||
@@ -312,12 +616,4 @@ export namespace Provider {
|
||||
providerID: z.string(),
|
||||
}),
|
||||
)
|
||||
|
||||
export const AuthError = NamedError.create(
|
||||
"ProviderAuthError",
|
||||
z.object({
|
||||
providerID: z.string(),
|
||||
message: z.string(),
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
30
packages/opencode/src/provider/transform.ts
Normal file
30
packages/opencode/src/provider/transform.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
import type { ModelMessage } from "ai"
|
||||
import { unique } from "remeda"
|
||||
|
||||
export namespace ProviderTransform {
|
||||
export function message(msgs: ModelMessage[], providerID: string, modelID: string) {
|
||||
if (providerID === "anthropic" || modelID.includes("anthropic")) {
|
||||
const system = msgs.filter((msg) => msg.role === "system").slice(0, 2)
|
||||
const final = msgs.filter((msg) => msg.role !== "system").slice(-2)
|
||||
|
||||
for (const msg of unique([...system, ...final])) {
|
||||
msg.providerOptions = {
|
||||
...msg.providerOptions,
|
||||
anthropic: {
|
||||
cacheControl: { type: "ephemeral" },
|
||||
},
|
||||
openrouter: {
|
||||
cache_control: { type: "ephemeral" },
|
||||
},
|
||||
bedrock: {
|
||||
cachePoint: { type: "ephemeral" },
|
||||
},
|
||||
openaiCompatible: {
|
||||
cache_control: { type: "ephemeral" },
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
return msgs
|
||||
}
|
||||
}
|
||||
@@ -6,14 +6,17 @@ import { streamSSE } from "hono/streaming"
|
||||
import { Session } from "../session"
|
||||
import { resolver, validator as zValidator } from "hono-openapi/zod"
|
||||
import { z } from "zod"
|
||||
import { Message } from "../session/message"
|
||||
import { Provider } from "../provider/provider"
|
||||
import { App } from "../app/app"
|
||||
import { Global } from "../global"
|
||||
import { mapValues } from "remeda"
|
||||
import { NamedError } from "../util/error"
|
||||
import { Fzf } from "../external/fzf"
|
||||
import { ModelsDev } from "../provider/models"
|
||||
import { Ripgrep } from "../file/ripgrep"
|
||||
import { Config } from "../config/config"
|
||||
import { File } from "../file"
|
||||
import { LSP } from "../lsp"
|
||||
import { MessageV2 } from "../session/message-v2"
|
||||
import { Mode } from "../session/mode"
|
||||
|
||||
const ERRORS = {
|
||||
400: {
|
||||
@@ -49,12 +52,9 @@ export namespace Server {
|
||||
status: 400,
|
||||
})
|
||||
}
|
||||
return c.json(
|
||||
new NamedError.Unknown({ message: err.toString() }).toObject(),
|
||||
{
|
||||
status: 400,
|
||||
},
|
||||
)
|
||||
return c.json(new NamedError.Unknown({ message: err.toString() }).toObject(), {
|
||||
status: 400,
|
||||
})
|
||||
})
|
||||
.use(async (c, next) => {
|
||||
log.info("request", {
|
||||
@@ -68,12 +68,12 @@ export namespace Server {
|
||||
})
|
||||
})
|
||||
.get(
|
||||
"/openapi",
|
||||
"/doc",
|
||||
openAPISpecs(app, {
|
||||
documentation: {
|
||||
info: {
|
||||
title: "opencode",
|
||||
version: "1.0.0",
|
||||
version: "0.0.3",
|
||||
description: "opencode api",
|
||||
},
|
||||
openapi: "3.0.0",
|
||||
@@ -120,8 +120,8 @@ export namespace Server {
|
||||
})
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/app_info",
|
||||
.get(
|
||||
"/app",
|
||||
describeRoute({
|
||||
description: "Get app info",
|
||||
responses: {
|
||||
@@ -140,7 +140,7 @@ export namespace Server {
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/app_initialize",
|
||||
"/app/init",
|
||||
describeRoute({
|
||||
description: "Initialize the app",
|
||||
responses: {
|
||||
@@ -159,144 +159,27 @@ export namespace Server {
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/session_initialize",
|
||||
.get(
|
||||
"/config",
|
||||
describeRoute({
|
||||
description: "Analyze the app and create an AGENTS.md file",
|
||||
description: "Get config info",
|
||||
responses: {
|
||||
200: {
|
||||
description: "200",
|
||||
description: "Get config info",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
zValidator(
|
||||
"json",
|
||||
z.object({
|
||||
sessionID: z.string(),
|
||||
providerID: z.string(),
|
||||
modelID: z.string(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const body = c.req.valid("json")
|
||||
await Session.initialize(body)
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/path_get",
|
||||
describeRoute({
|
||||
description: "Get paths",
|
||||
responses: {
|
||||
200: {
|
||||
description: "200",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(
|
||||
z.object({
|
||||
root: z.string(),
|
||||
data: z.string(),
|
||||
cwd: z.string(),
|
||||
config: z.string(),
|
||||
}),
|
||||
),
|
||||
schema: resolver(Config.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
const app = App.info()
|
||||
return c.json({
|
||||
root: app.path.root,
|
||||
data: app.path.data,
|
||||
cwd: app.path.cwd,
|
||||
config: Global.Path.data,
|
||||
})
|
||||
return c.json(await Config.get())
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/session_create",
|
||||
describeRoute({
|
||||
description: "Create a new session",
|
||||
responses: {
|
||||
...ERRORS,
|
||||
200: {
|
||||
description: "Successfully created session",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Session.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
const session = await Session.create()
|
||||
return c.json(session)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/session_share",
|
||||
describeRoute({
|
||||
description: "Share the session",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Successfully shared session",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Session.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
zValidator(
|
||||
"json",
|
||||
z.object({
|
||||
sessionID: z.string(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const body = c.req.valid("json")
|
||||
await Session.share(body.sessionID)
|
||||
const session = await Session.get(body.sessionID)
|
||||
return c.json(session)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/session_messages",
|
||||
describeRoute({
|
||||
description: "Get messages for a session",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Successfully created session",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Message.Info.array()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
zValidator(
|
||||
"json",
|
||||
z.object({
|
||||
sessionID: z.string(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const messages = await Session.messages(c.req.valid("json").sessionID)
|
||||
return c.json(messages)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/session_list",
|
||||
.get(
|
||||
"/session",
|
||||
describeRoute({
|
||||
description: "List all sessions",
|
||||
responses: {
|
||||
@@ -316,7 +199,90 @@ export namespace Server {
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/session_abort",
|
||||
"/session",
|
||||
describeRoute({
|
||||
description: "Create a new session",
|
||||
responses: {
|
||||
...ERRORS,
|
||||
200: {
|
||||
description: "Successfully created session",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Session.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
const session = await Session.create()
|
||||
return c.json(session)
|
||||
},
|
||||
)
|
||||
.delete(
|
||||
"/session/:id",
|
||||
describeRoute({
|
||||
description: "Delete a session and all its data",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Successfully deleted session",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
zValidator(
|
||||
"param",
|
||||
z.object({
|
||||
id: z.string(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
await Session.remove(c.req.valid("param").id)
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/session/:id/init",
|
||||
describeRoute({
|
||||
description: "Analyze the app and create an AGENTS.md file",
|
||||
responses: {
|
||||
200: {
|
||||
description: "200",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
zValidator(
|
||||
"param",
|
||||
z.object({
|
||||
id: z.string().openapi({ description: "Session ID" }),
|
||||
}),
|
||||
),
|
||||
zValidator(
|
||||
"json",
|
||||
z.object({
|
||||
messageID: z.string(),
|
||||
providerID: z.string(),
|
||||
modelID: z.string(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const sessionID = c.req.valid("param").id
|
||||
const body = c.req.valid("json")
|
||||
await Session.initialize({ ...body, sessionID })
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/session/:id/abort",
|
||||
describeRoute({
|
||||
description: "Abort a session",
|
||||
responses: {
|
||||
@@ -331,23 +297,78 @@ export namespace Server {
|
||||
},
|
||||
}),
|
||||
zValidator(
|
||||
"json",
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: z.string(),
|
||||
id: z.string(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const body = c.req.valid("json")
|
||||
return c.json(Session.abort(body.sessionID))
|
||||
return c.json(Session.abort(c.req.valid("param").id))
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/session_summarize",
|
||||
"/session/:id/share",
|
||||
describeRoute({
|
||||
description: "Share a session",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Successfully shared session",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Session.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
zValidator(
|
||||
"param",
|
||||
z.object({
|
||||
id: z.string(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const id = c.req.valid("param").id
|
||||
await Session.share(id)
|
||||
const session = await Session.get(id)
|
||||
return c.json(session)
|
||||
},
|
||||
)
|
||||
.delete(
|
||||
"/session/:id/share",
|
||||
describeRoute({
|
||||
description: "Unshare the session",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Successfully unshared session",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Session.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
zValidator(
|
||||
"param",
|
||||
z.object({
|
||||
id: z.string(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const id = c.req.valid("param").id
|
||||
await Session.unshare(id)
|
||||
const session = await Session.get(id)
|
||||
return c.json(session)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/session/:id/summarize",
|
||||
describeRoute({
|
||||
description: "Summarize the session",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Summarize the session",
|
||||
description: "Summarized session",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
@@ -356,52 +377,90 @@ export namespace Server {
|
||||
},
|
||||
},
|
||||
}),
|
||||
zValidator(
|
||||
"param",
|
||||
z.object({
|
||||
id: z.string().openapi({ description: "Session ID" }),
|
||||
}),
|
||||
),
|
||||
zValidator(
|
||||
"json",
|
||||
z.object({
|
||||
sessionID: z.string(),
|
||||
providerID: z.string(),
|
||||
modelID: z.string(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const id = c.req.valid("param").id
|
||||
const body = c.req.valid("json")
|
||||
await Session.summarize(body)
|
||||
await Session.summarize({ ...body, sessionID: id })
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/session_chat",
|
||||
.get(
|
||||
"/session/:id/message",
|
||||
describeRoute({
|
||||
description: "Chat with a model",
|
||||
description: "List messages for a session",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Chat with a model",
|
||||
description: "List of messages",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Message.Info),
|
||||
schema: resolver(
|
||||
z
|
||||
.object({
|
||||
info: MessageV2.Info,
|
||||
parts: MessageV2.Part.array(),
|
||||
})
|
||||
.array(),
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
zValidator(
|
||||
"json",
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: z.string(),
|
||||
providerID: z.string(),
|
||||
modelID: z.string(),
|
||||
parts: Message.Part.array(),
|
||||
id: z.string().openapi({ description: "Session ID" }),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const body = c.req.valid("json")
|
||||
const msg = await Session.chat(body)
|
||||
return c.json(msg)
|
||||
const messages = await Session.messages(c.req.valid("param").id)
|
||||
return c.json(messages)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/provider_list",
|
||||
"/session/:id/message",
|
||||
describeRoute({
|
||||
description: "Create and send a new message to a session",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Created message",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(MessageV2.Assistant),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
zValidator(
|
||||
"param",
|
||||
z.object({
|
||||
id: z.string().openapi({ description: "Session ID" }),
|
||||
}),
|
||||
),
|
||||
zValidator("json", Session.ChatInput.omit({ sessionID: true })),
|
||||
async (c) => {
|
||||
const sessionID = c.req.valid("param").id
|
||||
const body = c.req.valid("json")
|
||||
const msg = await Session.chat({ ...body, sessionID })
|
||||
return c.json(msg)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/config/providers",
|
||||
describeRoute({
|
||||
description: "List all providers",
|
||||
responses: {
|
||||
@@ -421,25 +480,52 @@ export namespace Server {
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
const providers = await Provider.list().then((x) =>
|
||||
mapValues(x, (item) => item.info),
|
||||
)
|
||||
const providers = await Provider.list().then((x) => mapValues(x, (item) => item.info))
|
||||
return c.json({
|
||||
providers: Object.values(providers),
|
||||
default: mapValues(
|
||||
providers,
|
||||
(item) => Provider.sort(Object.values(item.models))[0].id,
|
||||
),
|
||||
default: mapValues(providers, (item) => Provider.sort(Object.values(item.models))[0].id),
|
||||
})
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/file_search",
|
||||
.get(
|
||||
"/find",
|
||||
describeRoute({
|
||||
description: "Search for files",
|
||||
description: "Find text in files",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Search for files",
|
||||
description: "Matches",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Ripgrep.Match.shape.data.array()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
zValidator(
|
||||
"query",
|
||||
z.object({
|
||||
pattern: z.string(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const app = App.info()
|
||||
const pattern = c.req.valid("query").pattern
|
||||
const result = await Ripgrep.search({
|
||||
cwd: app.path.cwd,
|
||||
pattern,
|
||||
limit: 10,
|
||||
})
|
||||
return c.json(result)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/find/file",
|
||||
describeRoute({
|
||||
description: "Find files",
|
||||
responses: {
|
||||
200: {
|
||||
description: "File paths",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.string().array()),
|
||||
@@ -449,18 +535,174 @@ export namespace Server {
|
||||
},
|
||||
}),
|
||||
zValidator(
|
||||
"json",
|
||||
"query",
|
||||
z.object({
|
||||
query: z.string(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const body = c.req.valid("json")
|
||||
const query = c.req.valid("query").query
|
||||
const app = App.info()
|
||||
const result = await Fzf.search(app.path.cwd, body.query)
|
||||
const result = await Ripgrep.files({
|
||||
cwd: app.path.cwd,
|
||||
query,
|
||||
limit: 10,
|
||||
})
|
||||
return c.json(result)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/find/symbol",
|
||||
describeRoute({
|
||||
description: "Find workspace symbols",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Symbols",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(LSP.Symbol.array()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
zValidator(
|
||||
"query",
|
||||
z.object({
|
||||
query: z.string(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const query = c.req.valid("query").query
|
||||
const result = await LSP.workspaceSymbol(query)
|
||||
return c.json(result)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/file",
|
||||
describeRoute({
|
||||
description: "Read a file",
|
||||
responses: {
|
||||
200: {
|
||||
description: "File content",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(
|
||||
z.object({
|
||||
type: z.enum(["raw", "patch"]),
|
||||
content: z.string(),
|
||||
}),
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
zValidator(
|
||||
"query",
|
||||
z.object({
|
||||
path: z.string(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const path = c.req.valid("query").path
|
||||
const content = await File.read(path)
|
||||
log.info("read file", {
|
||||
path,
|
||||
content: content.content,
|
||||
})
|
||||
return c.json(content)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/file/status",
|
||||
describeRoute({
|
||||
description: "Get file status",
|
||||
responses: {
|
||||
200: {
|
||||
description: "File status",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(File.Info.array()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
const content = await File.status()
|
||||
return c.json(content)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/log",
|
||||
describeRoute({
|
||||
description: "Write a log entry to the server logs",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Log entry written successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
zValidator(
|
||||
"json",
|
||||
z.object({
|
||||
service: z.string().openapi({ description: "Service name for the log entry" }),
|
||||
level: z.enum(["debug", "info", "error", "warn"]).openapi({ description: "Log level" }),
|
||||
message: z.string().openapi({ description: "Log message" }),
|
||||
extra: z
|
||||
.record(z.string(), z.any())
|
||||
.optional()
|
||||
.openapi({ description: "Additional metadata for the log entry" }),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const { service, level, message, extra } = c.req.valid("json")
|
||||
const logger = Log.create({ service })
|
||||
|
||||
switch (level) {
|
||||
case "debug":
|
||||
logger.debug(message, extra)
|
||||
break
|
||||
case "info":
|
||||
logger.info(message, extra)
|
||||
break
|
||||
case "error":
|
||||
logger.error(message, extra)
|
||||
break
|
||||
case "warn":
|
||||
logger.warn(message, extra)
|
||||
break
|
||||
}
|
||||
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/mode",
|
||||
describeRoute({
|
||||
description: "List all modes",
|
||||
responses: {
|
||||
200: {
|
||||
description: "List of modes",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Mode.Info.array()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
const modes = await Mode.list()
|
||||
return c.json(modes)
|
||||
},
|
||||
)
|
||||
|
||||
return result
|
||||
}
|
||||
@@ -480,10 +722,10 @@ export namespace Server {
|
||||
return result
|
||||
}
|
||||
|
||||
export function listen() {
|
||||
export function listen(opts: { port: number; hostname: string }) {
|
||||
const server = Bun.serve({
|
||||
port: 0,
|
||||
hostname: "0.0.0.0",
|
||||
port: opts.port,
|
||||
hostname: opts.hostname,
|
||||
idleTimeout: 0,
|
||||
fetch: app().fetch,
|
||||
})
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
import { App } from "../app/app"
|
||||
import { Filesystem } from "../util/filesystem"
|
||||
|
||||
export namespace SessionContext {
|
||||
const FILES = [
|
||||
"AGENTS.md",
|
||||
"CLAUDE.md",
|
||||
"CONTEXT.md", // deprecated
|
||||
]
|
||||
export async function find() {
|
||||
const { cwd, root } = App.info().path
|
||||
const found = []
|
||||
for (const item of FILES) {
|
||||
const matches = await Filesystem.findUp(item, cwd, root)
|
||||
found.push(...matches.map((x) => Bun.file(x).text()))
|
||||
}
|
||||
return Promise.all(found).then((parts) => parts.join("\n\n"))
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
496
packages/opencode/src/session/message-v2.ts
Normal file
496
packages/opencode/src/session/message-v2.ts
Normal file
@@ -0,0 +1,496 @@
|
||||
import z from "zod"
|
||||
import { Bus } from "../bus"
|
||||
import { NamedError } from "../util/error"
|
||||
import { Message } from "./message"
|
||||
import { convertToModelMessages, type ModelMessage, type UIMessage } from "ai"
|
||||
import { Identifier } from "../id/id"
|
||||
import { LSP } from "../lsp"
|
||||
|
||||
export namespace MessageV2 {
|
||||
export const OutputLengthError = NamedError.create("MessageOutputLengthError", z.object({}))
|
||||
export const AbortedError = NamedError.create("MessageAbortedError", z.object({}))
|
||||
export const AuthError = NamedError.create(
|
||||
"ProviderAuthError",
|
||||
z.object({
|
||||
providerID: z.string(),
|
||||
message: z.string(),
|
||||
}),
|
||||
)
|
||||
|
||||
export const ToolStatePending = z
|
||||
.object({
|
||||
status: z.literal("pending"),
|
||||
})
|
||||
.openapi({
|
||||
ref: "ToolStatePending",
|
||||
})
|
||||
|
||||
export type ToolStatePending = z.infer<typeof ToolStatePending>
|
||||
|
||||
export const ToolStateRunning = z
|
||||
.object({
|
||||
status: z.literal("running"),
|
||||
input: z.any(),
|
||||
title: z.string().optional(),
|
||||
metadata: z.record(z.any()).optional(),
|
||||
time: z.object({
|
||||
start: z.number(),
|
||||
}),
|
||||
})
|
||||
.openapi({
|
||||
ref: "ToolStateRunning",
|
||||
})
|
||||
export type ToolStateRunning = z.infer<typeof ToolStateRunning>
|
||||
|
||||
export const ToolStateCompleted = z
|
||||
.object({
|
||||
status: z.literal("completed"),
|
||||
input: z.record(z.any()),
|
||||
output: z.string(),
|
||||
title: z.string(),
|
||||
metadata: z.record(z.any()),
|
||||
time: z.object({
|
||||
start: z.number(),
|
||||
end: z.number(),
|
||||
}),
|
||||
})
|
||||
.openapi({
|
||||
ref: "ToolStateCompleted",
|
||||
})
|
||||
export type ToolStateCompleted = z.infer<typeof ToolStateCompleted>
|
||||
|
||||
export const ToolStateError = z
|
||||
.object({
|
||||
status: z.literal("error"),
|
||||
input: z.record(z.any()),
|
||||
error: z.string(),
|
||||
time: z.object({
|
||||
start: z.number(),
|
||||
end: z.number(),
|
||||
}),
|
||||
})
|
||||
.openapi({
|
||||
ref: "ToolStateError",
|
||||
})
|
||||
export type ToolStateError = z.infer<typeof ToolStateError>
|
||||
|
||||
export const ToolState = z
|
||||
.discriminatedUnion("status", [ToolStatePending, ToolStateRunning, ToolStateCompleted, ToolStateError])
|
||||
.openapi({
|
||||
ref: "ToolState",
|
||||
})
|
||||
|
||||
const PartBase = z.object({
|
||||
id: z.string(),
|
||||
sessionID: z.string(),
|
||||
messageID: z.string(),
|
||||
})
|
||||
|
||||
export const SnapshotPart = PartBase.extend({
|
||||
type: z.literal("snapshot"),
|
||||
snapshot: z.string(),
|
||||
}).openapi({
|
||||
ref: "SnapshotPart",
|
||||
})
|
||||
export type SnapshotPart = z.infer<typeof SnapshotPart>
|
||||
|
||||
export const TextPart = PartBase.extend({
|
||||
type: z.literal("text"),
|
||||
text: z.string(),
|
||||
synthetic: z.boolean().optional(),
|
||||
time: z
|
||||
.object({
|
||||
start: z.number(),
|
||||
end: z.number().optional(),
|
||||
})
|
||||
.optional(),
|
||||
}).openapi({
|
||||
ref: "TextPart",
|
||||
})
|
||||
export type TextPart = z.infer<typeof TextPart>
|
||||
|
||||
export const ToolPart = PartBase.extend({
|
||||
type: z.literal("tool"),
|
||||
callID: z.string(),
|
||||
tool: z.string(),
|
||||
state: ToolState,
|
||||
}).openapi({
|
||||
ref: "ToolPart",
|
||||
})
|
||||
export type ToolPart = z.infer<typeof ToolPart>
|
||||
|
||||
const FilePartSourceBase = z.object({
|
||||
text: z
|
||||
.object({
|
||||
value: z.string(),
|
||||
start: z.number().int(),
|
||||
end: z.number().int(),
|
||||
})
|
||||
.openapi({
|
||||
ref: "FilePartSourceText",
|
||||
}),
|
||||
})
|
||||
|
||||
export const FileSource = FilePartSourceBase.extend({
|
||||
type: z.literal("file"),
|
||||
path: z.string(),
|
||||
}).openapi({
|
||||
ref: "FileSource",
|
||||
})
|
||||
|
||||
export const SymbolSource = FilePartSourceBase.extend({
|
||||
type: z.literal("symbol"),
|
||||
path: z.string(),
|
||||
range: LSP.Range,
|
||||
name: z.string(),
|
||||
kind: z.number().int(),
|
||||
}).openapi({
|
||||
ref: "SymbolSource",
|
||||
})
|
||||
|
||||
export const FilePartSource = z.discriminatedUnion("type", [FileSource, SymbolSource]).openapi({
|
||||
ref: "FilePartSource",
|
||||
})
|
||||
|
||||
export const FilePart = PartBase.extend({
|
||||
type: z.literal("file"),
|
||||
mime: z.string(),
|
||||
filename: z.string().optional(),
|
||||
url: z.string(),
|
||||
source: FilePartSource.optional(),
|
||||
}).openapi({
|
||||
ref: "FilePart",
|
||||
})
|
||||
export type FilePart = z.infer<typeof FilePart>
|
||||
|
||||
export const StepStartPart = PartBase.extend({
|
||||
type: z.literal("step-start"),
|
||||
}).openapi({
|
||||
ref: "StepStartPart",
|
||||
})
|
||||
export type StepStartPart = z.infer<typeof StepStartPart>
|
||||
|
||||
export const StepFinishPart = PartBase.extend({
|
||||
type: z.literal("step-finish"),
|
||||
cost: z.number(),
|
||||
tokens: z.object({
|
||||
input: z.number(),
|
||||
output: z.number(),
|
||||
reasoning: z.number(),
|
||||
cache: z.object({
|
||||
read: z.number(),
|
||||
write: z.number(),
|
||||
}),
|
||||
}),
|
||||
}).openapi({
|
||||
ref: "StepFinishPart",
|
||||
})
|
||||
export type StepFinishPart = z.infer<typeof StepFinishPart>
|
||||
|
||||
const Base = z.object({
|
||||
id: z.string(),
|
||||
sessionID: z.string(),
|
||||
})
|
||||
|
||||
export const User = Base.extend({
|
||||
role: z.literal("user"),
|
||||
time: z.object({
|
||||
created: z.number(),
|
||||
}),
|
||||
}).openapi({
|
||||
ref: "UserMessage",
|
||||
})
|
||||
export type User = z.infer<typeof User>
|
||||
|
||||
export const Part = z
|
||||
.discriminatedUnion("type", [TextPart, FilePart, ToolPart, StepStartPart, StepFinishPart, SnapshotPart])
|
||||
.openapi({
|
||||
ref: "Part",
|
||||
})
|
||||
export type Part = z.infer<typeof Part>
|
||||
|
||||
export const Assistant = Base.extend({
|
||||
role: z.literal("assistant"),
|
||||
time: z.object({
|
||||
created: z.number(),
|
||||
completed: z.number().optional(),
|
||||
}),
|
||||
error: z
|
||||
.discriminatedUnion("name", [
|
||||
AuthError.Schema,
|
||||
NamedError.Unknown.Schema,
|
||||
OutputLengthError.Schema,
|
||||
AbortedError.Schema,
|
||||
])
|
||||
.optional(),
|
||||
system: z.string().array(),
|
||||
modelID: z.string(),
|
||||
providerID: z.string(),
|
||||
path: z.object({
|
||||
cwd: z.string(),
|
||||
root: z.string(),
|
||||
}),
|
||||
summary: z.boolean().optional(),
|
||||
cost: z.number(),
|
||||
tokens: z.object({
|
||||
input: z.number(),
|
||||
output: z.number(),
|
||||
reasoning: z.number(),
|
||||
cache: z.object({
|
||||
read: z.number(),
|
||||
write: z.number(),
|
||||
}),
|
||||
}),
|
||||
}).openapi({
|
||||
ref: "AssistantMessage",
|
||||
})
|
||||
export type Assistant = z.infer<typeof Assistant>
|
||||
|
||||
export const Info = z.discriminatedUnion("role", [User, Assistant]).openapi({
|
||||
ref: "Message",
|
||||
})
|
||||
export type Info = z.infer<typeof Info>
|
||||
|
||||
export const Event = {
|
||||
Updated: Bus.event(
|
||||
"message.updated",
|
||||
z.object({
|
||||
info: Info,
|
||||
}),
|
||||
),
|
||||
Removed: Bus.event(
|
||||
"message.removed",
|
||||
z.object({
|
||||
sessionID: z.string(),
|
||||
messageID: z.string(),
|
||||
}),
|
||||
),
|
||||
PartUpdated: Bus.event(
|
||||
"message.part.updated",
|
||||
z.object({
|
||||
part: Part,
|
||||
}),
|
||||
),
|
||||
}
|
||||
|
||||
export function fromV1(v1: Message.Info) {
|
||||
if (v1.role === "assistant") {
|
||||
const info: Assistant = {
|
||||
id: v1.id,
|
||||
sessionID: v1.metadata.sessionID,
|
||||
role: "assistant",
|
||||
time: {
|
||||
created: v1.metadata.time.created,
|
||||
completed: v1.metadata.time.completed,
|
||||
},
|
||||
cost: v1.metadata.assistant!.cost,
|
||||
path: v1.metadata.assistant!.path,
|
||||
summary: v1.metadata.assistant!.summary,
|
||||
tokens: v1.metadata.assistant!.tokens,
|
||||
modelID: v1.metadata.assistant!.modelID,
|
||||
providerID: v1.metadata.assistant!.providerID,
|
||||
system: v1.metadata.assistant!.system,
|
||||
error: v1.metadata.error,
|
||||
}
|
||||
const parts = v1.parts.flatMap((part): Part[] => {
|
||||
const base = {
|
||||
id: Identifier.ascending("part"),
|
||||
messageID: v1.id,
|
||||
sessionID: v1.metadata.sessionID,
|
||||
}
|
||||
if (part.type === "text") {
|
||||
return [
|
||||
{
|
||||
...base,
|
||||
type: "text",
|
||||
text: part.text,
|
||||
},
|
||||
]
|
||||
}
|
||||
if (part.type === "step-start") {
|
||||
return [
|
||||
{
|
||||
...base,
|
||||
type: "step-start",
|
||||
},
|
||||
]
|
||||
}
|
||||
if (part.type === "tool-invocation") {
|
||||
return [
|
||||
{
|
||||
...base,
|
||||
type: "tool",
|
||||
callID: part.toolInvocation.toolCallId,
|
||||
tool: part.toolInvocation.toolName,
|
||||
state: (() => {
|
||||
if (part.toolInvocation.state === "partial-call") {
|
||||
return {
|
||||
status: "pending",
|
||||
}
|
||||
}
|
||||
|
||||
const { title, time, ...metadata } = v1.metadata.tool[part.toolInvocation.toolCallId] ?? {}
|
||||
if (part.toolInvocation.state === "call") {
|
||||
return {
|
||||
status: "running",
|
||||
input: part.toolInvocation.args,
|
||||
time: {
|
||||
start: time?.start,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
if (part.toolInvocation.state === "result") {
|
||||
return {
|
||||
status: "completed",
|
||||
input: part.toolInvocation.args,
|
||||
output: part.toolInvocation.result,
|
||||
title,
|
||||
time,
|
||||
metadata,
|
||||
}
|
||||
}
|
||||
throw new Error("unknown tool invocation state")
|
||||
})(),
|
||||
},
|
||||
]
|
||||
}
|
||||
return []
|
||||
})
|
||||
return {
|
||||
info,
|
||||
parts,
|
||||
}
|
||||
}
|
||||
|
||||
if (v1.role === "user") {
|
||||
const info: User = {
|
||||
id: v1.id,
|
||||
sessionID: v1.metadata.sessionID,
|
||||
role: "user",
|
||||
time: {
|
||||
created: v1.metadata.time.created,
|
||||
},
|
||||
}
|
||||
const parts = v1.parts.flatMap((part): Part[] => {
|
||||
const base = {
|
||||
id: Identifier.ascending("part"),
|
||||
messageID: v1.id,
|
||||
sessionID: v1.metadata.sessionID,
|
||||
}
|
||||
if (part.type === "text") {
|
||||
return [
|
||||
{
|
||||
...base,
|
||||
type: "text",
|
||||
text: part.text,
|
||||
},
|
||||
]
|
||||
}
|
||||
if (part.type === "file") {
|
||||
return [
|
||||
{
|
||||
...base,
|
||||
type: "file",
|
||||
mime: part.mediaType,
|
||||
filename: part.filename,
|
||||
url: part.url,
|
||||
},
|
||||
]
|
||||
}
|
||||
return []
|
||||
})
|
||||
return { info, parts }
|
||||
}
|
||||
|
||||
throw new Error("unknown message type")
|
||||
}
|
||||
|
||||
export function toModelMessage(
|
||||
input: {
|
||||
info: Info
|
||||
parts: Part[]
|
||||
}[],
|
||||
): ModelMessage[] {
|
||||
const result: UIMessage[] = []
|
||||
|
||||
for (const msg of input) {
|
||||
if (msg.parts.length === 0) continue
|
||||
|
||||
if (msg.info.role === "user") {
|
||||
result.push({
|
||||
id: msg.info.id,
|
||||
role: "user",
|
||||
parts: msg.parts.flatMap((part): UIMessage["parts"] => {
|
||||
if (part.type === "text")
|
||||
return [
|
||||
{
|
||||
type: "text",
|
||||
text: part.text,
|
||||
},
|
||||
]
|
||||
// text/plain files are converted into text parts, ignore them
|
||||
if (part.type === "file" && part.mime !== "text/plain")
|
||||
return [
|
||||
{
|
||||
type: "file",
|
||||
url: part.url,
|
||||
mediaType: part.mime,
|
||||
filename: part.filename,
|
||||
},
|
||||
]
|
||||
return []
|
||||
}),
|
||||
})
|
||||
}
|
||||
|
||||
if (msg.info.role === "assistant") {
|
||||
result.push({
|
||||
id: msg.info.id,
|
||||
role: "assistant",
|
||||
parts: msg.parts.flatMap((part): UIMessage["parts"] => {
|
||||
if (part.type === "text")
|
||||
return [
|
||||
{
|
||||
type: "text",
|
||||
text: part.text,
|
||||
},
|
||||
]
|
||||
if (part.type === "step-start")
|
||||
return [
|
||||
{
|
||||
type: "step-start",
|
||||
},
|
||||
]
|
||||
if (part.type === "tool") {
|
||||
if (part.state.status === "completed")
|
||||
return [
|
||||
{
|
||||
type: ("tool-" + part.tool) as `tool-${string}`,
|
||||
state: "output-available",
|
||||
toolCallId: part.callID,
|
||||
input: part.state.input,
|
||||
output: part.state.output,
|
||||
},
|
||||
]
|
||||
if (part.state.status === "error")
|
||||
return [
|
||||
{
|
||||
type: ("tool-" + part.tool) as `tool-${string}`,
|
||||
state: "output-error",
|
||||
toolCallId: part.callID,
|
||||
input: part.state.input,
|
||||
errorText: part.state.error,
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
return []
|
||||
}),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return convertToModelMessages(result)
|
||||
}
|
||||
}
|
||||
@@ -1,9 +1,16 @@
|
||||
import z from "zod"
|
||||
import { Bus } from "../bus"
|
||||
import { Provider } from "../provider/provider"
|
||||
import { NamedError } from "../util/error"
|
||||
|
||||
export namespace Message {
|
||||
export const OutputLengthError = NamedError.create("MessageOutputLengthError", z.object({}))
|
||||
export const AuthError = NamedError.create(
|
||||
"ProviderAuthError",
|
||||
z.object({
|
||||
providerID: z.string(),
|
||||
message: z.string(),
|
||||
}),
|
||||
)
|
||||
|
||||
export const ToolCall = z
|
||||
.object({
|
||||
state: z.literal("call"),
|
||||
@@ -13,7 +20,7 @@ export namespace Message {
|
||||
args: z.custom<Required<unknown>>(),
|
||||
})
|
||||
.openapi({
|
||||
ref: "Message.ToolInvocation.ToolCall",
|
||||
ref: "ToolCall",
|
||||
})
|
||||
export type ToolCall = z.infer<typeof ToolCall>
|
||||
|
||||
@@ -26,7 +33,7 @@ export namespace Message {
|
||||
args: z.custom<Required<unknown>>(),
|
||||
})
|
||||
.openapi({
|
||||
ref: "Message.ToolInvocation.ToolPartialCall",
|
||||
ref: "ToolPartialCall",
|
||||
})
|
||||
export type ToolPartialCall = z.infer<typeof ToolPartialCall>
|
||||
|
||||
@@ -40,15 +47,13 @@ export namespace Message {
|
||||
result: z.string(),
|
||||
})
|
||||
.openapi({
|
||||
ref: "Message.ToolInvocation.ToolResult",
|
||||
ref: "ToolResult",
|
||||
})
|
||||
export type ToolResult = z.infer<typeof ToolResult>
|
||||
|
||||
export const ToolInvocation = z
|
||||
.discriminatedUnion("state", [ToolCall, ToolPartialCall, ToolResult])
|
||||
.openapi({
|
||||
ref: "Message.ToolInvocation",
|
||||
})
|
||||
export const ToolInvocation = z.discriminatedUnion("state", [ToolCall, ToolPartialCall, ToolResult]).openapi({
|
||||
ref: "ToolInvocation",
|
||||
})
|
||||
export type ToolInvocation = z.infer<typeof ToolInvocation>
|
||||
|
||||
export const TextPart = z
|
||||
@@ -57,7 +62,7 @@ export namespace Message {
|
||||
text: z.string(),
|
||||
})
|
||||
.openapi({
|
||||
ref: "Message.Part.Text",
|
||||
ref: "TextPart",
|
||||
})
|
||||
export type TextPart = z.infer<typeof TextPart>
|
||||
|
||||
@@ -68,7 +73,7 @@ export namespace Message {
|
||||
providerMetadata: z.record(z.any()).optional(),
|
||||
})
|
||||
.openapi({
|
||||
ref: "Message.Part.Reasoning",
|
||||
ref: "ReasoningPart",
|
||||
})
|
||||
export type ReasoningPart = z.infer<typeof ReasoningPart>
|
||||
|
||||
@@ -78,7 +83,7 @@ export namespace Message {
|
||||
toolInvocation: ToolInvocation,
|
||||
})
|
||||
.openapi({
|
||||
ref: "Message.Part.ToolInvocation",
|
||||
ref: "ToolInvocationPart",
|
||||
})
|
||||
export type ToolInvocationPart = z.infer<typeof ToolInvocationPart>
|
||||
|
||||
@@ -91,7 +96,7 @@ export namespace Message {
|
||||
providerMetadata: z.record(z.any()).optional(),
|
||||
})
|
||||
.openapi({
|
||||
ref: "Message.Part.SourceUrl",
|
||||
ref: "SourceUrlPart",
|
||||
})
|
||||
export type SourceUrlPart = z.infer<typeof SourceUrlPart>
|
||||
|
||||
@@ -103,7 +108,7 @@ export namespace Message {
|
||||
url: z.string(),
|
||||
})
|
||||
.openapi({
|
||||
ref: "Message.Part.File",
|
||||
ref: "FilePart",
|
||||
})
|
||||
export type FilePart = z.infer<typeof FilePart>
|
||||
|
||||
@@ -112,88 +117,73 @@ export namespace Message {
|
||||
type: z.literal("step-start"),
|
||||
})
|
||||
.openapi({
|
||||
ref: "Message.Part.StepStart",
|
||||
ref: "StepStartPart",
|
||||
})
|
||||
export type StepStartPart = z.infer<typeof StepStartPart>
|
||||
|
||||
export const Part = z
|
||||
.discriminatedUnion("type", [
|
||||
TextPart,
|
||||
ReasoningPart,
|
||||
ToolInvocationPart,
|
||||
SourceUrlPart,
|
||||
FilePart,
|
||||
StepStartPart,
|
||||
])
|
||||
export const MessagePart = z
|
||||
.discriminatedUnion("type", [TextPart, ReasoningPart, ToolInvocationPart, SourceUrlPart, FilePart, StepStartPart])
|
||||
.openapi({
|
||||
ref: "Message.Part",
|
||||
ref: "MessagePart",
|
||||
})
|
||||
export type Part = z.infer<typeof Part>
|
||||
export type MessagePart = z.infer<typeof MessagePart>
|
||||
|
||||
export const Info = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
role: z.enum(["user", "assistant"]),
|
||||
parts: z.array(Part),
|
||||
metadata: z.object({
|
||||
time: z.object({
|
||||
created: z.number(),
|
||||
completed: z.number().optional(),
|
||||
}),
|
||||
error: z
|
||||
.discriminatedUnion("name", [
|
||||
Provider.AuthError.Schema,
|
||||
NamedError.Unknown.Schema,
|
||||
])
|
||||
.optional(),
|
||||
sessionID: z.string(),
|
||||
tool: z.record(
|
||||
z.string(),
|
||||
z
|
||||
parts: z.array(MessagePart),
|
||||
metadata: z
|
||||
.object({
|
||||
time: z.object({
|
||||
created: z.number(),
|
||||
completed: z.number().optional(),
|
||||
}),
|
||||
error: z
|
||||
.discriminatedUnion("name", [AuthError.Schema, NamedError.Unknown.Schema, OutputLengthError.Schema])
|
||||
.optional(),
|
||||
sessionID: z.string(),
|
||||
tool: z.record(
|
||||
z.string(),
|
||||
z
|
||||
.object({
|
||||
title: z.string(),
|
||||
snapshot: z.string().optional(),
|
||||
time: z.object({
|
||||
start: z.number(),
|
||||
end: z.number(),
|
||||
}),
|
||||
})
|
||||
.catchall(z.any()),
|
||||
),
|
||||
assistant: z
|
||||
.object({
|
||||
title: z.string(),
|
||||
time: z.object({
|
||||
start: z.number(),
|
||||
end: z.number(),
|
||||
system: z.string().array(),
|
||||
modelID: z.string(),
|
||||
providerID: z.string(),
|
||||
path: z.object({
|
||||
cwd: z.string(),
|
||||
root: z.string(),
|
||||
}),
|
||||
cost: z.number(),
|
||||
summary: z.boolean().optional(),
|
||||
tokens: z.object({
|
||||
input: z.number(),
|
||||
output: z.number(),
|
||||
reasoning: z.number(),
|
||||
cache: z.object({
|
||||
read: z.number(),
|
||||
write: z.number(),
|
||||
}),
|
||||
}),
|
||||
})
|
||||
.catchall(z.any()),
|
||||
),
|
||||
assistant: z
|
||||
.object({
|
||||
system: z.string().array(),
|
||||
modelID: z.string(),
|
||||
providerID: z.string(),
|
||||
path: z.object({
|
||||
cwd: z.string(),
|
||||
root: z.string(),
|
||||
}),
|
||||
cost: z.number(),
|
||||
summary: z.boolean().optional(),
|
||||
tokens: z.object({
|
||||
input: z.number(),
|
||||
output: z.number(),
|
||||
reasoning: z.number(),
|
||||
}),
|
||||
})
|
||||
.optional(),
|
||||
}),
|
||||
.optional(),
|
||||
snapshot: z.string().optional(),
|
||||
})
|
||||
.openapi({ ref: "MessageMetadata" }),
|
||||
})
|
||||
.openapi({
|
||||
ref: "Message.Info",
|
||||
ref: "Message",
|
||||
})
|
||||
export type Info = z.infer<typeof Info>
|
||||
|
||||
export const Event = {
|
||||
Updated: Bus.event(
|
||||
"message.updated",
|
||||
z.object({
|
||||
info: Info,
|
||||
}),
|
||||
),
|
||||
PartUpdated: Bus.event(
|
||||
"message.part.updated",
|
||||
z.object({ part: Part, sessionID: z.string(), messageID: z.string() }),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
69
packages/opencode/src/session/mode.ts
Normal file
69
packages/opencode/src/session/mode.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
import { mergeDeep } from "remeda"
|
||||
import { App } from "../app/app"
|
||||
import { Config } from "../config/config"
|
||||
import z from "zod"
|
||||
|
||||
export namespace Mode {
|
||||
export const Info = z
|
||||
.object({
|
||||
name: z.string(),
|
||||
model: z
|
||||
.object({
|
||||
modelID: z.string(),
|
||||
providerID: z.string(),
|
||||
})
|
||||
.optional(),
|
||||
prompt: z.string().optional(),
|
||||
tools: z.record(z.boolean()),
|
||||
})
|
||||
.openapi({
|
||||
ref: "Mode",
|
||||
})
|
||||
export type Info = z.infer<typeof Info>
|
||||
const state = App.state("mode", async () => {
|
||||
const cfg = await Config.get()
|
||||
const mode = mergeDeep(
|
||||
{
|
||||
build: {},
|
||||
plan: {
|
||||
tools: {
|
||||
write: false,
|
||||
edit: false,
|
||||
patch: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
cfg.mode ?? {},
|
||||
)
|
||||
const result: Record<string, Info> = {}
|
||||
for (const [key, value] of Object.entries(mode)) {
|
||||
let item = result[key]
|
||||
if (!item)
|
||||
item = result[key] = {
|
||||
name: key,
|
||||
tools: {},
|
||||
}
|
||||
const model = value.model ?? cfg.model
|
||||
if (model) {
|
||||
const [providerID, ...rest] = model.split("/")
|
||||
const modelID = rest.join("/")
|
||||
item.model = {
|
||||
modelID,
|
||||
providerID,
|
||||
}
|
||||
}
|
||||
if (value.prompt) item.prompt = value.prompt
|
||||
if (value.tools) item.tools = value.tools
|
||||
}
|
||||
|
||||
return result
|
||||
})
|
||||
|
||||
export async function get(mode: string) {
|
||||
return state().then((x) => x[mode])
|
||||
}
|
||||
|
||||
export async function list() {
|
||||
return state().then((x) => Object.values(x))
|
||||
}
|
||||
}
|
||||
@@ -134,7 +134,7 @@ The user will primarily request you perform software engineering tasks. This inc
|
||||
- Use the available search tools to understand the codebase and the user's query. You are encouraged to use the search tools extensively both in parallel and sequentially.
|
||||
- Implement the solution using all tools available to you
|
||||
- Verify the solution if possible with tests. NEVER assume specific test framework or test script. Check the README or search codebase to determine the testing approach.
|
||||
- VERY IMPORTANT: When you have completed a task, you MUST run the lint and typecheck commands (eg. npm run lint, npm run typecheck, ruff, etc.) with Bash if they were provided to you to ensure your code is correct. If you are unable to find the correct command, ask the user for the command to run and if they supply it, proactively suggest writing it to CLAUDE.md so that you will know to run it next time.
|
||||
- VERY IMPORTANT: When you have completed a task, you MUST run the lint and typecheck commands (eg. npm run lint, npm run typecheck, ruff, etc.) with Bash if they were provided to you to ensure your code is correct. If you are unable to find the correct command, ask the user for the command to run and if they supply it, proactively suggest writing it to AGENTS.md so that you will know to run it next time.
|
||||
NEVER commit changes unless the user explicitly asks you to. It is VERY IMPORTANT to only commit when explicitly asked, otherwise the user will feel that you are being too proactive.
|
||||
|
||||
- Tool results and user messages may include <system-reminder> tags. <system-reminder> tags contain useful information and reminders. They are NOT part of the user's provided input or the tool result.
|
||||
|
||||
117
packages/opencode/src/session/prompt/beast.txt
Normal file
117
packages/opencode/src/session/prompt/beast.txt
Normal file
@@ -0,0 +1,117 @@
|
||||
You are opencode, an autonomous agent - please keep going until the user's query is completely resolved, before ending your turn and yielding back to the user.
|
||||
|
||||
Your thinking should be thorough and so it's fine if it's very long. However, avoid unnecessary repetition and verbosity. You should be concise, but thorough.
|
||||
|
||||
You MUST iterate and keep going until the problem is solved.
|
||||
|
||||
You have everything you need to resolve this problem. I want you to fully solve this autonomously before coming back to me.
|
||||
|
||||
Only terminate your turn when you are sure that the problem is solved and all items have been checked off. Use the TodoWrite and TodoRead tools to track and manage steps. Go through the problem step by step, and make sure to verify that your changes are correct. Once each step is finished mark it as completed with the TodoWrite tool. NEVER end your turn without having truly and completely solved the problem, use the TodoRead tool to make sure all steps are complete, and when you say you are going to make a tool call, make sure you ACTUALLY make the tool call, instead of ending your turn. If a step is impossible to complete, mark it as cancelled using the TodoWrite tool.
|
||||
|
||||
THE PROBLEM CAN NOT BE SOLVED WITHOUT EXTENSIVE INTERNET RESEARCH.
|
||||
|
||||
You must use the webfetch tool to recursively gather all information from URLs provided to you by the user, as well as any links you find in the content of those pages.
|
||||
|
||||
Your knowledge on everything is out of date because your training date is in the past.
|
||||
|
||||
You CANNOT successfully complete this task without using Bing to verify your understanding of third party packages and dependencies is up to date. You must use the webfetch tool to search bing for how to properly use libraries, packages, frameworks, dependencies, etc. every single time you install or implement one. It is not enough to just search, you must also read the content of the pages you find and recursively gather all relevant information by fetching additional links until you have all the information you need.
|
||||
|
||||
If the user request is "resume" or "continue" or "try again",use the TodoRead tool to find the next pending step. Continue from that step, and do not hand back control to the user until the entire todo list is complete and all steps are marked as complete or cancelled. Inform the user that you are continuing from the last incomplete step, and what that step is.
|
||||
|
||||
Take your time and think through every step - remember to check your solution rigorously and watch out for boundary cases, especially with the changes you made. Use the sequential thinking tool if available. Your solution must be perfect. If not, continue working on it. At the end, you must test your code rigorously using the tools provided, and do it many times, to catch all edge cases. If it is not robust, update the plan and iterate more and make it perfect. Failing to test your code sufficiently rigorously is the NUMBER ONE failure mode on these types of tasks; run the build, and verify that the changes you made actually build; make sure you handle all edge cases, and run existing tests if they are provided.
|
||||
|
||||
You MUST plan extensively before each tool call, and reflect extensively on the outcomes of the previous tool calls. DO NOT do this entire process by making tool calls only, as this can impair your ability to solve the problem and think insightfully.
|
||||
|
||||
You MUST keep working until the problem is completely solved, and all steps in the todo list are complete. Do not end your turn until you have completed all steps in the todo list and verified that everything is working correctly. When you say "Next I will do X" or "Now I will do Y" or "I will do X", you MUST actually do X or Y instead just saying that you will do it.
|
||||
|
||||
You MUST use the ToolRead tool to verify that all steps are complete or cancelled before ending your turn. If any steps are incomplete, you MUST continue working on them until they are all complete.
|
||||
|
||||
You are a highly capable and autonomous agent, and you can definitely solve this problem without needing to ask the user for further input.
|
||||
|
||||
# Workflow
|
||||
1. Fetch any URL's provided by the user using the `webfetch` tool.
|
||||
2. Understand the problem deeply. Carefully read the issue and think critically about what is required. Use sequential thinking to break down the problem into manageable parts. Consider the following:
|
||||
- What is the expected behavior?
|
||||
- What are the edge cases?
|
||||
- What are the potential pitfalls?
|
||||
- How does this fit into the larger context of the codebase?
|
||||
- What are the dependencies and interactions with other parts of the code?
|
||||
3. Investigate the codebase. Explore relevant files, search for key functions, and gather context.
|
||||
4. Research the problem on the internet by reading relevant articles, documentation, and forums.
|
||||
5. Develop a clear, step-by-step plan. Break down the fix into manageable, incremental steps. Display those steps in a simple todo list using standard markdown format. Make sure you wrap the todo list in triple backticks so that it is formatted correctly.
|
||||
6. Implement the fix incrementally. Make small, testable code changes.
|
||||
7. Debug as needed. Use debugging techniques to isolate and resolve issues.
|
||||
8. Test frequently. Run tests after each change to verify correctness.
|
||||
9. Iterate until the root cause is fixed and all tests pass.
|
||||
10. Reflect and validate comprehensively. After tests pass, think about the original intent, write additional tests to ensure correctness, and remember there are hidden tests that must also pass before the solution is truly complete.
|
||||
|
||||
Refer to the detailed sections below for more information on each step.
|
||||
|
||||
## 1. Fetch Provided URLs
|
||||
- If the user provides a URL, use the `webfetch` tool to retrieve the content of the provided URL.
|
||||
- After fetching, review the content returned by the fetch tool.
|
||||
- If you find any additional URLs or links that are relevant, use the `webfetch` tool again to retrieve those links.
|
||||
- Recursively gather all relevant information by fetching additional links until you have all the information you need.
|
||||
|
||||
## 2. Deeply Understand the Problem
|
||||
Carefully read the issue and think hard about a plan to solve it before coding. Use the sequential thinking tool if available.
|
||||
|
||||
## 3. Codebase Investigation
|
||||
- Explore relevant files and directories.
|
||||
- Search for key functions, classes, or variables related to the issue.
|
||||
- Read and understand relevant code snippets.
|
||||
- Identify the root cause of the problem.
|
||||
- Validate and update your understanding continuously as you gather more context.
|
||||
|
||||
## 4. Internet Research
|
||||
- Use the `webfetch` tool to search bing by fetching the URL `https://www.bing.com/search?q=your+search+query`.
|
||||
- After fetching, review the content returned by the fetch tool.
|
||||
- If you find any additional URLs or links that are relevant, use the `webfetch` tool again to retrieve those links.
|
||||
- Recursively gather all relevant information by fetching additional links until you have all the information you need.
|
||||
|
||||
## 5. Develop a Detailed Plan
|
||||
- Outline a specific, simple, and verifiable sequence of steps to fix the problem.
|
||||
- Add steps using the TodoWrite tool.
|
||||
- Each time you complete a step, mark it as complete using the TodoWrite tool.
|
||||
- Each time you check off a step, use the TodoRead tool and display the updated todo list to the user in markdown format.
|
||||
- You MUST continue on to the next step after checking off a step instead of ending your turn and asking the user what they want to do next.
|
||||
- You may only end your turn when all steps in the todo list are marked as complete or cancelled.
|
||||
|
||||
## 6. Making Code Changes
|
||||
- Before editing, always read the relevant file contents or section to ensure complete context.
|
||||
- Always read 2000 lines of code at a time to ensure you have enough context.
|
||||
- Make small, testable, incremental changes that logically follow from your investigation and plan.
|
||||
- When using the edit tool, include 3-5 lines of unchanged code before and after the string you want to replace, to make it unambiguous which part of the file should be edited.
|
||||
- If a patch or edit is not applied correctly, attempt to reapply it.
|
||||
- Always validate that your changes build and pass tests after each change.
|
||||
- If the build fails or test fail, debug why before proceeding, update the plan as needed.
|
||||
|
||||
## 7. Debugging
|
||||
- Use the `lsp_diagnostics` tool to check for any problems in the code.
|
||||
- Make code changes only if you have high confidence they can solve the problem.
|
||||
- When debugging, try to determine the root cause rather than addressing symptoms.
|
||||
- Debug for as long as needed to identify the root cause and identify a fix.
|
||||
- Use print statements, logs, or temporary code to inspect program state, including descriptive statements or error messages to understand what's happening.
|
||||
- To test hypotheses, you can also add test statements or functions.
|
||||
- Revisit your assumptions if unexpected behavior occurs.
|
||||
|
||||
# How to create a Todo List
|
||||
Use the following format to show the todo list:
|
||||
```markdown
|
||||
- [ ] Step 1: Description of the first step
|
||||
- [ ] Step 2: Description of the second step
|
||||
- [ ] Step 3: Description of the third step
|
||||
```
|
||||
Do not ever use HTML tags or any other formatting for the todo list, as it will not be rendered correctly. Always use the markdown format shown above.
|
||||
|
||||
# Communication Guidelines
|
||||
Always communicate clearly and concisely in a casual, friendly yet professional tone.
|
||||
|
||||
<examples>
|
||||
"Let me fetch the URL you provided to gather more information."
|
||||
"Ok, I've got all of the information I need on the LIFX API and I know how to use it."
|
||||
"Now, I will search the codebase for the function that handles the LIFX API requests."
|
||||
"I need to update several files here - stand by"
|
||||
"OK! Now let's run the tests to make sure everything is working correctly."
|
||||
"Whelp - I see we have some problems. Let's fix those up."
|
||||
</examples>
|
||||
155
packages/opencode/src/session/prompt/gemini.txt
Normal file
155
packages/opencode/src/session/prompt/gemini.txt
Normal file
@@ -0,0 +1,155 @@
|
||||
You are opencode, an interactive CLI agent specializing in software engineering tasks. Your primary goal is to help users safely and efficiently, adhering strictly to the following instructions and utilizing your available tools.
|
||||
|
||||
# Core Mandates
|
||||
|
||||
- **Conventions:** Rigorously adhere to existing project conventions when reading or modifying code. Analyze surrounding code, tests, and configuration first.
|
||||
- **Libraries/Frameworks:** NEVER assume a library/framework is available or appropriate. Verify its established usage within the project (check imports, configuration files like 'package.json', 'Cargo.toml', 'requirements.txt', 'build.gradle', etc., or observe neighboring files) before employing it.
|
||||
- **Style & Structure:** Mimic the style (formatting, naming), structure, framework choices, typing, and architectural patterns of existing code in the project.
|
||||
- **Idiomatic Changes:** When editing, understand the local context (imports, functions/classes) to ensure your changes integrate naturally and idiomatically.
|
||||
- **Comments:** Add code comments sparingly. Focus on *why* something is done, especially for complex logic, rather than *what* is done. Only add high-value comments if necessary for clarity or if requested by the user. Do not edit comments that are separate from the code you are changing. *NEVER* talk to the user or describe your changes through comments.
|
||||
- **Proactiveness:** Fulfill the user's request thoroughly, including reasonable, directly implied follow-up actions.
|
||||
- **Confirm Ambiguity/Expansion:** Do not take significant actions beyond the clear scope of the request without confirming with the user. If asked *how* to do something, explain first, don't just do it.
|
||||
- **Explaining Changes:** After completing a code modification or file operation *do not* provide summaries unless asked.
|
||||
- **Path Construction:** Before using any file system tool (e.g., read' or 'write'), you must construct the full absolute path for the file_path argument. Always combine the absolute path of the project's root directory with the file's path relative to the root. For example, if the project root is /path/to/project/ and the file is foo/bar/baz.txt, the final path you must use is /path/to/project/foo/bar/baz.txt. If the user provides a relative path, you must resolve it against the root directory to create an absolute path.
|
||||
- **Do Not revert changes:** Do not revert changes to the codebase unless asked to do so by the user. Only revert changes made by you if they have resulted in an error or if the user has explicitly asked you to revert the changes.
|
||||
|
||||
# Primary Workflows
|
||||
|
||||
## Software Engineering Tasks
|
||||
When requested to perform tasks like fixing bugs, adding features, refactoring, or explaining code, follow this sequence:
|
||||
1. **Understand:** Think about the user's request and the relevant codebase context. Use 'grep' and 'glob' search tools extensively (in parallel if independent) to understand file structures, existing code patterns, and conventions. Use 'read' to understand context and validate any assumptions you may have.
|
||||
2. **Plan:** Build a coherent and grounded (based on the understanding in step 1) plan for how you intend to resolve the user's task. Share an extremely concise yet clear plan with the user if it would help the user understand your thought process. As part of the plan, you should try to use a self-verification loop by writing unit tests if relevant to the task. Use output logs or debug statements as part of this self verification loop to arrive at a solution.
|
||||
3. **Implement:** Use the available tools (e.g., 'edit', 'write' 'bash' ...) to act on the plan, strictly adhering to the project's established conventions (detailed under 'Core Mandates').
|
||||
4. **Verify (Tests):** If applicable and feasible, verify the changes using the project's testing procedures. Identify the correct test commands and frameworks by examining 'README' files, build/package configuration (e.g., 'package.json'), or existing test execution patterns. NEVER assume standard test commands.
|
||||
5. **Verify (Standards):** VERY IMPORTANT: After making code changes, execute the project-specific build, linting and type-checking commands (e.g., 'tsc', 'npm run lint', 'ruff check .') that you have identified for this project (or obtained from the user). This ensures code quality and adherence to standards. If unsure about these commands, you can ask the user if they'd like you to run them and if so how to.
|
||||
|
||||
## New Applications
|
||||
|
||||
**Goal:** Autonomously implement and deliver a visually appealing, substantially complete, and functional prototype. Utilize all tools at your disposal to implement the application. Some tools you may especially find useful are 'write', 'edit' and 'bash'.
|
||||
|
||||
1. **Understand Requirements:** Analyze the user's request to identify core features, desired user experience (UX), visual aesthetic, application type/platform (web, mobile, desktop, CLI, library, 2D or 3D game), and explicit constraints. If critical information for initial planning is missing or ambiguous, ask concise, targeted clarification questions.
|
||||
2. **Propose Plan:** Formulate an internal development plan. Present a clear, concise, high-level summary to the user. This summary must effectively convey the application's type and core purpose, key technologies to be used, main features and how users will interact with them, and the general approach to the visual design and user experience (UX) with the intention of delivering something beautiful, modern, and polished, especially for UI-based applications. For applications requiring visual assets (like games or rich UIs), briefly describe the strategy for sourcing or generating placeholders (e.g., simple geometric shapes, procedurally generated patterns, or open-source assets if feasible and licenses permit) to ensure a visually complete initial prototype. Ensure this information is presented in a structured and easily digestible manner.
|
||||
3. **User Approval:** Obtain user approval for the proposed plan.
|
||||
4. **Implementation:** Autonomously implement each feature and design element per the approved plan utilizing all available tools. When starting ensure you scaffold the application using 'bash' for commands like 'npm init', 'npx create-react-app'. Aim for full scope completion. Proactively create or source necessary placeholder assets (e.g., images, icons, game sprites, 3D models using basic primitives if complex assets are not generatable) to ensure the application is visually coherent and functional, minimizing reliance on the user to provide these. If the model can generate simple assets (e.g., a uniformly colored square sprite, a simple 3D cube), it should do so. Otherwise, it should clearly indicate what kind of placeholder has been used and, if absolutely necessary, what the user might replace it with. Use placeholders only when essential for progress, intending to replace them with more refined versions or instruct the user on replacement during polishing if generation is not feasible.
|
||||
5. **Verify:** Review work against the original request, the approved plan. Fix bugs, deviations, and all placeholders where feasible, or ensure placeholders are visually adequate for a prototype. Ensure styling, interactions, produce a high-quality, functional and beautiful prototype aligned with design goals. Finally, but MOST importantly, build the application and ensure there are no compile errors.
|
||||
6. **Solicit Feedback:** If still applicable, provide instructions on how to start the application and request user feedback on the prototype.
|
||||
|
||||
# Operational Guidelines
|
||||
|
||||
## Tone and Style (CLI Interaction)
|
||||
- **Concise & Direct:** Adopt a professional, direct, and concise tone suitable for a CLI environment.
|
||||
- **Minimal Output:** Aim for fewer than 3 lines of text output (excluding tool use/code generation) per response whenever practical. Focus strictly on the user's query.
|
||||
- **Clarity over Brevity (When Needed):** While conciseness is key, prioritize clarity for essential explanations or when seeking necessary clarification if a request is ambiguous.
|
||||
- **No Chitchat:** Avoid conversational filler, preambles ("Okay, I will now..."), or postambles ("I have finished the changes..."). Get straight to the action or answer.
|
||||
- **Formatting:** Use GitHub-flavored Markdown. Responses will be rendered in monospace.
|
||||
- **Tools vs. Text:** Use tools for actions, text output *only* for communication. Do not add explanatory comments within tool calls or code blocks unless specifically part of the required code/command itself.
|
||||
- **Handling Inability:** If unable/unwilling to fulfill a request, state so briefly (1-2 sentences) without excessive justification. Offer alternatives if appropriate.
|
||||
|
||||
## Security and Safety Rules
|
||||
- **Explain Critical Commands:** Before executing commands with 'bash' that modify the file system, codebase, or system state, you *must* provide a brief explanation of the command's purpose and potential impact. Prioritize user understanding and safety. You should not ask permission to use the tool; the user will be presented with a confirmation dialogue upon use (you do not need to tell them this).
|
||||
- **Security First:** Always apply security best practices. Never introduce code that exposes, logs, or commits secrets, API keys, or other sensitive information.
|
||||
|
||||
## Tool Usage
|
||||
- **File Paths:** Always use absolute paths when referring to files with tools like 'read' or 'write'. Relative paths are not supported. You must provide an absolute path.
|
||||
- **Parallelism:** Execute multiple independent tool calls in parallel when feasible (i.e. searching the codebase).
|
||||
- **Command Execution:** Use the 'bash' tool for running shell commands, remembering the safety rule to explain modifying commands first.
|
||||
- **Background Processes:** Use background processes (via \`&\`) for commands that are unlikely to stop on their own, e.g. \`node server.js &\`. If unsure, ask the user.
|
||||
- **Interactive Commands:** Try to avoid shell commands that are likely to require user interaction (e.g. \`git rebase -i\`). Use non-interactive versions of commands (e.g. \`npm init -y\` instead of \`npm init\`) when available, and otherwise remind the user that interactive shell commands are not supported and may cause hangs until canceled by the user.
|
||||
- **Respect User Confirmations:** Most tool calls (also denoted as 'function calls') will first require confirmation from the user, where they will either approve or cancel the function call. If a user cancels a function call, respect their choice and do _not_ try to make the function call again. It is okay to request the tool call again _only_ if the user requests that same tool call on a subsequent prompt. When a user cancels a function call, assume best intentions from the user and consider inquiring if they prefer any alternative paths forward.
|
||||
|
||||
## Interaction Details
|
||||
- **Help Command:** The user can use '/help' to display help information.
|
||||
- **Feedback:** To report a bug or provide feedback, please use the /bug command.
|
||||
|
||||
# Examples (Illustrating Tone and Workflow)
|
||||
<example>
|
||||
user: 1 + 2
|
||||
model: 3
|
||||
</example>
|
||||
|
||||
<example>
|
||||
user: is 13 a prime number?
|
||||
model: true
|
||||
</example>
|
||||
|
||||
<example>
|
||||
user: list files here.
|
||||
model: [tool_call: ls for path '/path/to/project']
|
||||
</example>
|
||||
|
||||
<example>
|
||||
user: start the server implemented in server.js
|
||||
model: [tool_call: bash for 'node server.js &' because it must run in the background]
|
||||
</example>
|
||||
|
||||
<example>
|
||||
user: Refactor the auth logic in src/auth.py to use the requests library instead of urllib.
|
||||
model: Okay, I can refactor 'src/auth.py'.
|
||||
First, I'll analyze the code and check for a test safety net before planning any changes.
|
||||
[tool_call: glob for path 'tests/test_auth.py']
|
||||
[tool_call: read for absolute_path '/path/to/tests/test_auth.py']
|
||||
(After analysis)
|
||||
Great, 'tests/test_auth.py' exists and covers the core authentication logic. With this safety net in place, I can safely plan the refactoring.
|
||||
I'll also confirm 'requests' is a dependency.
|
||||
[tool_call: read for absolute_path '/path/to/requirements.txt']
|
||||
(After analysis)
|
||||
Looks good, 'requests' is available.
|
||||
|
||||
Here's the plan:
|
||||
1. Replace the 'urllib' calls with 'requests'.
|
||||
2. Add proper 'try...except' error handling for the new network calls.
|
||||
3. Remove the old 'urllib' import.
|
||||
4. Run the project's linter and tests to verify the changes.
|
||||
|
||||
Should I proceed?
|
||||
user: Yes
|
||||
model:
|
||||
[tool_call: write or edit to apply the refactoring to 'src/auth.py']
|
||||
Refactoring complete. Running verification...
|
||||
[tool_call: bash for 'ruff check src/auth.py && pytest']
|
||||
(After verification passes)
|
||||
All checks passed. This is a stable checkpoint.
|
||||
|
||||
<example>
|
||||
user: Delete the temp directory.
|
||||
model: I can run \`rm -rf /path/to/project/temp\`. This will permanently delete the directory and all its contents.
|
||||
</example>
|
||||
|
||||
<example>
|
||||
user: Write tests for someFile.ts
|
||||
model:
|
||||
Okay, I can write those tests. First, I'll read \`someFile.ts\` to understand its functionality.
|
||||
[tool_call: read for absolute_path '/path/to/someFile.ts' or use glob to find \`someFile.ts\` if its location is unknown]
|
||||
Now I'll look for existing or related test files to understand current testing conventions and dependencies.
|
||||
[tool_call: glob for paths ['**/*.test.ts', 'src/**/*.spec.ts'] assuming someFile.ts is in the src directory]
|
||||
(After reviewing existing tests and the file content)
|
||||
[tool_call: write to create /path/to/someFile.test.ts with the test code]
|
||||
I've written the tests. Now I'll run the project's test command to verify them.
|
||||
[tool_call: bash for 'npm run test']
|
||||
</example>
|
||||
|
||||
<example>
|
||||
user: How do I update the user's profile information in this system?
|
||||
model:
|
||||
I'm not immediately sure how user profile information is updated. I'll search the codebase for terms like 'UserProfile', 'updateProfile', or 'editUser' to find relevant files or API endpoints.
|
||||
[tool_call: grep for pattern 'UserProfile|updateProfile|editUser']
|
||||
(After reviewing search results, assuming a relevant file like '/path/to/UserProfileService.java' was found)
|
||||
Okay, \`/path/to/UserProfileService.java\` seems like the most relevant file. I'll read its content to understand how updates are handled.
|
||||
[tool_call: read for absolute_path '/path/to/UserProfileService.java']
|
||||
(After reading the file)
|
||||
It appears the \`updateUserProfile\` method in \`UserProfileService.java\` is responsible for this. It expects a user ID and a \`UserProfileDTO\` object...
|
||||
</example>
|
||||
|
||||
<example>
|
||||
user: Where are all the 'app.config' files in this project? I need to check their settings.
|
||||
model:
|
||||
[tool_call: glob for pattern '**/app.config']
|
||||
(Assuming GlobTool returns a list of paths like ['/path/to/moduleA/app.config', '/path/to/moduleB/app.config'])
|
||||
I found the following 'app.config' files:
|
||||
- /path/to/moduleA/app.config
|
||||
- /path/to/moduleB/app.config
|
||||
To help you check their settings, I can read their contents. Which one would you like to start with, or should I read all of them?
|
||||
</example>
|
||||
|
||||
# Final Reminder
|
||||
Your core function is efficient and safe assistance. Balance extreme conciseness with the crucial need for clarity, especially regarding safety and potential system modifications. Always prioritize user control and project conventions. Never make assumptions about the contents of files; instead use 'read' to ensure you aren't making broad assumptions. Finally, you are an agent - please keep going until the user's query is completely resolved.
|
||||
3
packages/opencode/src/session/prompt/plan.txt
Normal file
3
packages/opencode/src/session/prompt/plan.txt
Normal file
@@ -0,0 +1,3 @@
|
||||
<system-reminder>
|
||||
Plan mode is active. The user indicated that they do not want you to execute yet -- you MUST NOT make any edits, run any non-readonly tools (including changing configs or making commits), or otherwise make any changes to the system. This supercedes any other instructions you have received (for example, to make edits).
|
||||
</system-reminder>
|
||||
@@ -1,7 +1,31 @@
|
||||
you will generate a short title based on the first message a user begins a conversation with
|
||||
- ensure it is not more than 50 characters long
|
||||
- the title should be a summary of the user's message
|
||||
- it should be one line long
|
||||
- do not use quotes or colons
|
||||
- the entire text you return will be used as the title
|
||||
- never return anything that is more than one sentence (one line) long
|
||||
<task>
|
||||
Generate a conversation thread title from the user message.
|
||||
</task>
|
||||
|
||||
<context>
|
||||
You are generating titles for a coding assistant conversation.
|
||||
</context>
|
||||
|
||||
<rules>
|
||||
- Max 50 chars, single line
|
||||
- Focus on the specific action or question
|
||||
- Keep technical terms, numbers, and filenames exactly as written
|
||||
- Preserve HTTP status codes (401, 404, 500, etc) as numbers
|
||||
- For file references, include the filename
|
||||
- Avoid filler words: the, this, my, a, an, properly
|
||||
- NEVER assume their tech stack or domain
|
||||
- Use -ing verbs consistently for actions
|
||||
- Write like a chat thread title, not a blog post
|
||||
</rules>
|
||||
|
||||
<examples>
|
||||
"debug 500 errors in production" → "Debugging production 500 errors"
|
||||
"refactor user service" → "Refactoring user service"
|
||||
"why is app.js failing" → "Analyzing app.js failure"
|
||||
"implement rate limiting" → "Implementing rate limiting"
|
||||
</examples>
|
||||
|
||||
<format>
|
||||
Return only the thread title text on a single line with no newlines, explanations, or additional formatting.
|
||||
You should NEVER reply to the user's message. You can only generate titles.
|
||||
</format>
|
||||
|
||||
@@ -1,28 +1,26 @@
|
||||
import { App } from "../app/app"
|
||||
import { ListTool } from "../tool/ls"
|
||||
import { Ripgrep } from "../file/ripgrep"
|
||||
import { Global } from "../global"
|
||||
import { Filesystem } from "../util/filesystem"
|
||||
import { Config } from "../config/config"
|
||||
import path from "path"
|
||||
import os from "os"
|
||||
|
||||
import PROMPT_ANTHROPIC from "./prompt/anthropic.txt"
|
||||
import PROMPT_BEAST from "./prompt/beast.txt"
|
||||
import PROMPT_GEMINI from "./prompt/gemini.txt"
|
||||
import PROMPT_ANTHROPIC_SPOOF from "./prompt/anthropic_spoof.txt"
|
||||
import PROMPT_SUMMARIZE from "./prompt/summarize.txt"
|
||||
import PROMPT_TITLE from "./prompt/title.txt"
|
||||
|
||||
export namespace SystemPrompt {
|
||||
export function provider(providerID: string) {
|
||||
const result = []
|
||||
switch (providerID) {
|
||||
case "anthropic":
|
||||
result.push(PROMPT_ANTHROPIC_SPOOF.trim())
|
||||
result.push(PROMPT_ANTHROPIC)
|
||||
break
|
||||
default:
|
||||
result.push(PROMPT_ANTHROPIC)
|
||||
break
|
||||
}
|
||||
return result
|
||||
export function provider(modelID: string) {
|
||||
if (modelID.includes("gpt-") || modelID.includes("o1") || modelID.includes("o3")) return [PROMPT_BEAST]
|
||||
if (modelID.includes("gemini-")) return [PROMPT_GEMINI]
|
||||
return [PROMPT_ANTHROPIC]
|
||||
}
|
||||
|
||||
export async function environment(sessionID: string) {
|
||||
export async function environment() {
|
||||
const app = App.info()
|
||||
return [
|
||||
[
|
||||
@@ -34,7 +32,14 @@ export namespace SystemPrompt {
|
||||
` Today's date: ${new Date().toDateString()}`,
|
||||
`</env>`,
|
||||
`<project>`,
|
||||
` ${app.git ? await ListTool.execute({ path: app.path.cwd, ignore: [] }, { sessionID: sessionID, messageID: "", abort: AbortSignal.any([]) }).then((x) => x.output) : ""}`,
|
||||
` ${
|
||||
app.git
|
||||
? await Ripgrep.tree({
|
||||
cwd: app.path.cwd,
|
||||
limit: 200,
|
||||
})
|
||||
: ""
|
||||
}`,
|
||||
`</project>`,
|
||||
].join("\n"),
|
||||
]
|
||||
@@ -45,14 +50,38 @@ export namespace SystemPrompt {
|
||||
"CLAUDE.md",
|
||||
"CONTEXT.md", // deprecated
|
||||
]
|
||||
|
||||
export async function custom() {
|
||||
const { cwd, root } = App.info().path
|
||||
const config = await Config.get()
|
||||
const found = []
|
||||
for (const item of CUSTOM_FILES) {
|
||||
const matches = await Filesystem.findUp(item, cwd, root)
|
||||
found.push(...matches.map((x) => Bun.file(x).text()))
|
||||
}
|
||||
return Promise.all(found)
|
||||
found.push(
|
||||
Bun.file(path.join(Global.Path.config, "AGENTS.md"))
|
||||
.text()
|
||||
.catch(() => ""),
|
||||
)
|
||||
found.push(
|
||||
Bun.file(path.join(os.homedir(), ".claude", "CLAUDE.md"))
|
||||
.text()
|
||||
.catch(() => ""),
|
||||
)
|
||||
|
||||
if (config.instructions) {
|
||||
for (const instruction of config.instructions) {
|
||||
try {
|
||||
const matches = await Filesystem.globUp(instruction, cwd, root)
|
||||
found.push(...matches.map((x) => Bun.file(x).text()))
|
||||
} catch {
|
||||
continue // Skip invalid glob patterns
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Promise.all(found).then((result) => result.filter(Boolean))
|
||||
}
|
||||
|
||||
export function summarize(providerID: string) {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { App } from "../app/app"
|
||||
import { Bus } from "../bus"
|
||||
import { Installation } from "../installation"
|
||||
import { Session } from "../session"
|
||||
import { Storage } from "../storage/storage"
|
||||
import { Log } from "../util/log"
|
||||
@@ -10,19 +10,14 @@ export namespace Share {
|
||||
let queue: Promise<void> = Promise.resolve()
|
||||
const pending = new Map<string, any>()
|
||||
|
||||
const state = App.state("share", async () => {
|
||||
Bus.subscribe(Storage.Event.Write, async (payload) => {
|
||||
await sync(payload.properties.key, payload.properties.content)
|
||||
})
|
||||
})
|
||||
|
||||
export async function sync(key: string, content: any) {
|
||||
const [root, ...splits] = key.split("/")
|
||||
if (root !== "session") return
|
||||
const [, sessionID] = splits
|
||||
const session = await Session.get(sessionID)
|
||||
if (!session.share) return
|
||||
const { secret } = session.share
|
||||
const [sub, sessionID] = splits
|
||||
if (sub === "share") return
|
||||
const share = await Session.getShare(sessionID).catch(() => {})
|
||||
if (!share) return
|
||||
const { secret } = share
|
||||
pending.set(key, content)
|
||||
queue = queue
|
||||
.then(async () => {
|
||||
@@ -50,12 +45,15 @@ export namespace Share {
|
||||
})
|
||||
}
|
||||
|
||||
export async function init() {
|
||||
await state()
|
||||
export function init() {
|
||||
Bus.subscribe(Storage.Event.Write, async (payload) => {
|
||||
await sync(payload.properties.key, payload.properties.content)
|
||||
})
|
||||
}
|
||||
|
||||
export const URL =
|
||||
process.env["OPENCODE_API"] ?? "https://api.dev.opencode.ai"
|
||||
process.env["OPENCODE_API"] ??
|
||||
(Installation.isSnapshot() || Installation.isDev() ? "https://api.dev.opencode.ai" : "https://api.opencode.ai")
|
||||
|
||||
export async function create(sessionID: string) {
|
||||
return fetch(`${URL}/share_create`, {
|
||||
@@ -65,4 +63,11 @@ export namespace Share {
|
||||
.then((x) => x.json())
|
||||
.then((x) => x as { url: string; secret: string })
|
||||
}
|
||||
|
||||
export async function remove(sessionID: string, secret: string) {
|
||||
return fetch(`${URL}/share_delete`, {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ sessionID, secret }),
|
||||
}).then((x) => x.json())
|
||||
}
|
||||
}
|
||||
|
||||
69
packages/opencode/src/snapshot/index.ts
Normal file
69
packages/opencode/src/snapshot/index.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
import { App } from "../app/app"
|
||||
import { $ } from "bun"
|
||||
import path from "path"
|
||||
import fs from "fs/promises"
|
||||
import { Ripgrep } from "../file/ripgrep"
|
||||
import { Log } from "../util/log"
|
||||
|
||||
export namespace Snapshot {
|
||||
const log = Log.create({ service: "snapshot" })
|
||||
|
||||
export async function create(sessionID: string) {
|
||||
log.info("creating snapshot")
|
||||
const app = App.info()
|
||||
|
||||
// not a git repo, check if too big to snapshot
|
||||
if (!app.git) {
|
||||
const files = await Ripgrep.files({
|
||||
cwd: app.path.cwd,
|
||||
limit: 1000,
|
||||
})
|
||||
log.info("found files", { count: files.length })
|
||||
if (files.length >= 1000) return
|
||||
}
|
||||
|
||||
const git = gitdir(sessionID)
|
||||
if (await fs.mkdir(git, { recursive: true })) {
|
||||
await $`git init`
|
||||
.env({
|
||||
...process.env,
|
||||
GIT_DIR: git,
|
||||
GIT_WORK_TREE: app.path.root,
|
||||
})
|
||||
.quiet()
|
||||
.nothrow()
|
||||
log.info("initialized")
|
||||
}
|
||||
|
||||
await $`git --git-dir ${git} add .`.quiet().cwd(app.path.cwd).nothrow()
|
||||
log.info("added files")
|
||||
|
||||
const result =
|
||||
await $`git --git-dir ${git} commit -m "snapshot" --no-gpg-sign --author="opencode <mail@opencode.ai>"`
|
||||
.quiet()
|
||||
.cwd(app.path.cwd)
|
||||
.nothrow()
|
||||
|
||||
const match = result.stdout.toString().match(/\[.+ ([a-f0-9]+)\]/)
|
||||
if (!match) return
|
||||
return match![1]
|
||||
}
|
||||
|
||||
export async function restore(sessionID: string, snapshot: string) {
|
||||
log.info("restore", { commit: snapshot })
|
||||
const app = App.info()
|
||||
const git = gitdir(sessionID)
|
||||
await $`git --git-dir=${git} checkout ${snapshot} --force`.quiet().cwd(app.path.root)
|
||||
}
|
||||
|
||||
export async function diff(sessionID: string, commit: string) {
|
||||
const git = gitdir(sessionID)
|
||||
const result = await $`git --git-dir=${git} diff -R ${commit}`.quiet().cwd(App.info().path.root)
|
||||
return result.stdout.toString("utf8")
|
||||
}
|
||||
|
||||
function gitdir(sessionID: string) {
|
||||
const app = App.info()
|
||||
return path.join(app.path.data, "snapshot", sessionID)
|
||||
}
|
||||
}
|
||||
@@ -4,53 +4,136 @@ import { Bus } from "../bus"
|
||||
import path from "path"
|
||||
import z from "zod"
|
||||
import fs from "fs/promises"
|
||||
import { MessageV2 } from "../session/message-v2"
|
||||
import { Identifier } from "../id/id"
|
||||
|
||||
export namespace Storage {
|
||||
const log = Log.create({ service: "storage" })
|
||||
|
||||
export const Event = {
|
||||
Write: Bus.event(
|
||||
"storage.write",
|
||||
z.object({ key: z.string(), content: z.any() }),
|
||||
),
|
||||
Write: Bus.event("storage.write", z.object({ key: z.string(), content: z.any() })),
|
||||
}
|
||||
|
||||
const state = App.state("storage", () => {
|
||||
type Migration = (dir: string) => Promise<void>
|
||||
|
||||
const MIGRATIONS: Migration[] = [
|
||||
async (dir: string) => {
|
||||
try {
|
||||
const files = new Bun.Glob("session/message/*/*.json").scanSync({
|
||||
cwd: dir,
|
||||
absolute: true,
|
||||
})
|
||||
for (const file of files) {
|
||||
const content = await Bun.file(file).json()
|
||||
if (!content.metadata) continue
|
||||
log.info("migrating to v2 message", { file })
|
||||
try {
|
||||
const result = MessageV2.fromV1(content)
|
||||
await Bun.write(
|
||||
file,
|
||||
JSON.stringify(
|
||||
{
|
||||
...result.info,
|
||||
parts: result.parts,
|
||||
},
|
||||
null,
|
||||
2,
|
||||
),
|
||||
)
|
||||
} catch (e) {
|
||||
await fs.rename(file, file.replace("storage", "broken"))
|
||||
}
|
||||
}
|
||||
} catch {}
|
||||
},
|
||||
async (dir: string) => {
|
||||
const files = new Bun.Glob("session/message/*/*.json").scanSync({
|
||||
cwd: dir,
|
||||
absolute: true,
|
||||
})
|
||||
for (const file of files) {
|
||||
try {
|
||||
const { parts, ...info } = await Bun.file(file).json()
|
||||
if (!parts) continue
|
||||
for (const part of parts) {
|
||||
const id = Identifier.ascending("part")
|
||||
await Bun.write(
|
||||
[dir, "session", "part", info.sessionID, info.id, id + ".json"].join("/"),
|
||||
JSON.stringify({
|
||||
...part,
|
||||
id,
|
||||
sessionID: info.sessionID,
|
||||
messageID: info.id,
|
||||
...(part.type === "tool" ? { callID: part.id } : {}),
|
||||
}),
|
||||
)
|
||||
}
|
||||
await Bun.write(file, JSON.stringify(info, null, 2))
|
||||
} catch (e) {}
|
||||
}
|
||||
},
|
||||
]
|
||||
|
||||
const state = App.state("storage", async () => {
|
||||
const app = App.info()
|
||||
const dir = path.join(app.path.data, "storage")
|
||||
log.info("init", { path: dir })
|
||||
const dir = path.normalize(path.join(app.path.data, "storage"))
|
||||
await fs.mkdir(dir, { recursive: true })
|
||||
const migration = await Bun.file(path.join(dir, "migration"))
|
||||
.json()
|
||||
.then((x) => parseInt(x))
|
||||
.catch(() => 0)
|
||||
for (let index = migration; index < MIGRATIONS.length; index++) {
|
||||
log.info("running migration", { index })
|
||||
const migration = MIGRATIONS[index]
|
||||
await migration(dir)
|
||||
await Bun.write(path.join(dir, "migration"), (index + 1).toString())
|
||||
}
|
||||
return {
|
||||
dir,
|
||||
}
|
||||
})
|
||||
|
||||
const locks = new Map<string, Promise<void>>()
|
||||
export async function remove(key: string) {
|
||||
const dir = await state().then((x) => x.dir)
|
||||
const target = path.join(dir, key + ".json")
|
||||
await fs.unlink(target).catch(() => {})
|
||||
}
|
||||
|
||||
export async function removeDir(key: string) {
|
||||
const dir = await state().then((x) => x.dir)
|
||||
const target = path.join(dir, key)
|
||||
await fs.rm(target, { recursive: true, force: true }).catch(() => {})
|
||||
}
|
||||
|
||||
export async function readJSON<T>(key: string) {
|
||||
return Bun.file(path.join(state().dir, key + ".json")).json() as Promise<T>
|
||||
const dir = await state().then((x) => x.dir)
|
||||
return Bun.file(path.join(dir, key + ".json")).json() as Promise<T>
|
||||
}
|
||||
|
||||
export async function writeJSON<T>(key: string, content: T) {
|
||||
const target = path.join(state().dir, key + ".json")
|
||||
const dir = await state().then((x) => x.dir)
|
||||
const target = path.join(dir, key + ".json")
|
||||
const tmp = target + Date.now() + ".tmp"
|
||||
await Bun.write(tmp, JSON.stringify(content))
|
||||
await Bun.write(tmp, JSON.stringify(content, null, 2))
|
||||
await fs.rename(tmp, target).catch(() => {})
|
||||
await fs.unlink(tmp).catch(() => {})
|
||||
Bus.publish(Event.Write, { key, content })
|
||||
}
|
||||
|
||||
const glob = new Bun.Glob("**/*")
|
||||
export async function* list(prefix: string) {
|
||||
export async function list(prefix: string) {
|
||||
const dir = await state().then((x) => x.dir)
|
||||
try {
|
||||
for await (const item of glob.scan({
|
||||
cwd: path.join(state().dir, prefix),
|
||||
onlyFiles: true,
|
||||
})) {
|
||||
const result = path.join(prefix, item.slice(0, -5))
|
||||
yield result
|
||||
}
|
||||
const result = await Array.fromAsync(
|
||||
glob.scan({
|
||||
cwd: path.join(dir, prefix),
|
||||
onlyFiles: true,
|
||||
}),
|
||||
).then((items) => items.map((item) => path.join(prefix, item.slice(0, -5))))
|
||||
result.sort()
|
||||
return result
|
||||
} catch {
|
||||
return
|
||||
return []
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user