Compare commits

...

1439 Commits

Author SHA1 Message Date
Frank
b4c7042c17 wip: vscode extension 2025-07-20 13:27:37 -04:00
Frank
6965787b33 wip: vscode extension 2025-07-20 13:17:51 -04:00
Frank
ce064b8b0e wip: github action 2025-07-20 13:14:14 -04:00
Frank
0fc546fc6b wip: vscode extension 2025-07-20 13:13:18 -04:00
Frank
77ac9e5ec2 wip: github action 2025-07-20 13:13:00 -04:00
Frank
af2c0b3695 wip: github action 2025-07-20 13:07:48 -04:00
Frank
811b22367d wip: github action 2025-07-20 12:41:02 -04:00
Frank
933d50e25a wip: github actions 2025-07-20 12:36:53 -04:00
Frank
800bee2722 wip: vscode extension 2025-07-20 12:00:09 -04:00
Dax Raad
5b4fb96c2e wip: make api logger sort correctly 2025-07-20 11:54:56 -04:00
Frank
1d20bf343d wip: vscode extension 2025-07-20 11:54:30 -04:00
Frank
79d9bf57f7 wip: vscode extension 2025-07-20 11:47:18 -04:00
Frank
7b63db6a13 wip: vscode extension 2025-07-20 11:45:35 -04:00
Frank
0e1565449e wip: vscode extension 2025-07-20 11:33:44 -04:00
GitHub Action
f9a47fe5a3 ignore: update download stats 2025-07-20 2025-07-20 12:04:10 +00:00
adamdotdevin
2bf9d5d4ec wip: file part source in server/api (optional) 2025-07-20 05:39:18 -05:00
adamdotdevin
c18f9ece69 chore: updated tui gitignore 2025-07-20 05:39:18 -05:00
adamdotdevin
4e3c73c4f5 chore: updated stainless script 2025-07-20 05:39:18 -05:00
b0tmtl
8bf2eeccd0 fix(windows): resolve numlock and French keyboard input issues (#1165) 2025-07-20 05:28:15 -05:00
Dax Raad
6232e0fc58 fix bad layout on first render of chat history 2025-07-19 22:38:36 -04:00
Dax Raad
a8b4aed446 fix bash tool rendering 2025-07-19 22:25:15 -04:00
Aiden Cline
03de0c406d fix: title generation for certain providers (#1159) 2025-07-19 20:01:55 -05:00
Aiden Cline
faf8da8743 fix: adjust editor parsing to handle flags like --wait (#1160) 2025-07-19 20:01:25 -05:00
Dax Raad
3386908fd6 ci: ignore 2025-07-19 19:30:12 -04:00
Dax Raad
5a8847952a ci: ignore 2025-07-19 19:29:05 -04:00
Dax Raad
87d21ebf2b Revert "fix: prevent sparse spacing in hyphenated words (#1102)"
This reverts commit 2b44dbdbf1.
2025-07-19 19:25:15 -04:00
Timo Clasen
a524fc545c fix(hooks): prevent session_complete hook from firing on subagent sessions (#1149) 2025-07-19 18:20:07 -05:00
Dax Raad
4316edaf43 fix first run github copilot 2025-07-19 19:19:38 -04:00
Dax Raad
d845924e8b ci: ignore 2025-07-19 19:00:17 -04:00
Dax Raad
a29b322bdd ci: ignore 2025-07-19 18:54:46 -04:00
Dax Raad
9723ffa7a6 ignore: ci 2025-07-19 18:48:43 -04:00
Dax Raad
f06cd88773 perf: more performance improvements 2025-07-19 18:41:21 -04:00
Dax Raad
9af92b6914 perf: scroll to bottom in thread 2025-07-19 17:55:01 -04:00
Dax Raad
8f64c4b312 disable todo tools when running as task 2025-07-19 15:54:11 -04:00
Dax Raad
a32877e908 ignore: create memo abstraction 2025-07-19 15:26:26 -04:00
Dax Raad
6465c9c44a fix openrouter caching 2025-07-19 15:11:21 -04:00
Dax Raad
4699739814 shitty hack for terrible charm bubbletea performance 2025-07-19 15:00:11 -04:00
Dax Raad
c1d87c32a2 remove log level from config 2025-07-19 13:37:02 -04:00
Aiden Cline
9c5d9be33a fix: bullet display (#1148) 2025-07-19 12:36:50 -05:00
Aiden Cline
97d9c851e6 fix: escape ansi sequences (#1139) 2025-07-19 12:02:24 -05:00
Dax Raad
76bd702992 docs: fix typo 2025-07-19 12:45:33 -04:00
Yihui Khuu
50c453e577 feat(tui): collapse session header into single line when sharing is disabled (#1145) 2025-07-19 11:43:04 -05:00
Dax Raad
86d5b25d18 pass through model.options properly without having to nest it under provider name. you may have to update your configs see https://opencode.ai/docs/models/#openrouter for an example 2025-07-19 12:41:58 -04:00
Tom
2b44dbdbf1 fix: prevent sparse spacing in hyphenated words (#1102) 2025-07-19 09:28:40 -05:00
Dax Raad
4bbbbac5f6 vercel ai gateway 2025-07-19 10:08:36 -04:00
GitHub Action
3c3a997d2a ignore: update download stats 2025-07-19 2025-07-19 12:04:11 +00:00
CodinCat
1676f8b5dd fix table heading rendering (#1138) 2025-07-18 20:17:22 -05:00
Dax Raad
c87a7469a0 ci: rollback install script 2025-07-18 18:57:58 -04:00
Michael Hanson
132e26ddbf docs: Clarify MCP config instructions (#1026) 2025-07-18 16:04:29 -04:00
Rami Chowdhury
f1da70b1de feat(provider): add Gemini tool schema sanitization (#1132) 2025-07-18 16:02:54 -04:00
Aiden Cline
5c9d1910af fix: func called before definition (#1134) 2025-07-18 15:00:32 -05:00
Timo Clasen
18abcab208 feat(config): make small model configurable (#1030) 2025-07-18 14:16:50 -04:00
opencode-agent[bot]
01e7dc2d02 Added install dir priority & user feedback (#1129)
Co-authored-by: opencode-agent[bot] <opencode-agent[bot]@users.noreply.github.com>
Co-authored-by: thdxr <thdxr@users.noreply.github.com>
2025-07-18 14:15:10 -04:00
adamdotdevin
611854e4b6 feat(tui): simpler layout, always stretched 2025-07-18 13:03:27 -05:00
Dax
d56dec4ba7 wip: optional IDs in api (#1128) 2025-07-18 13:42:50 -04:00
Dax Raad
c952e9ae3d message rendering performance improvements 2025-07-18 13:40:07 -04:00
GitHub Action
6470243095 ignore: update download stats 2025-07-18 2025-07-18 12:04:28 +00:00
GitHub Action
c8321cfbd9 ignore: update download stats 2025-07-18 2025-07-18 12:02:18 +00:00
Yihui Khuu
46c246e01f fix: \{return} should be replaced with new line on all lines (#1119) 2025-07-18 06:22:36 -05:00
adamdotdevin
9964d8e6c0 fix: model cost overrides 2025-07-18 05:08:35 -05:00
Timo Clasen
df33143396 feat(tui): parse for file attachments when exiting EDITOR (#1117) 2025-07-18 04:47:20 -05:00
Aiden Cline
571aeaaea2 tweak: remove needless resorting (#1116) 2025-07-18 04:42:43 -05:00
Aiden Cline
edfea03917 tweak: fix [object Object] in logging (#1114) 2025-07-18 04:41:23 -05:00
Tom
81c88cc742 fix(tui): ensure viewport scrolls to bottom on new messages (#1110) 2025-07-18 04:41:03 -05:00
Mike Wallio
99b9390d80 Update to a customized beast mode v3 for opencode. (#1109) 2025-07-17 20:10:06 -05:00
Dax Raad
23c30521d8 only enable ruff if it seems to be used 2025-07-17 18:07:06 -04:00
Wendell Misiedjan
e681d610de feat: support AWS_BEARER_TOKEN_BEDROCK for amazon bedrock provider autoloading (#1094) 2025-07-17 09:12:30 -05:00
Aiden Cline
a1fdeded3e tweak: allow mcp servers to include headers (#1096) 2025-07-17 09:11:48 -05:00
GitHub Action
2051312d12 ignore: update download stats 2025-07-17 2025-07-17 14:07:13 +00:00
Alexander Drottsgård
20cb7a76af feat(tui): highlight current session in sessions modal (#1093) 2025-07-17 07:40:15 -05:00
Timo Clasen
a493aec174 feat(tui): remove share commands from help if sharing is disabled (#1087) 2025-07-17 04:28:12 -05:00
Aiden Cline
3ce3ac8e61 fix: message error centering (#1085) 2025-07-17 04:27:40 -05:00
Timo Clasen
91ad64feda fix(tui): user defined ctrl+z should take precedence over suspending (#1088) 2025-07-17 04:27:02 -05:00
Timo Clasen
60b55f9d92 feat(tui): remove sharing info from session header when sharing is disabled (#1076) 2025-07-16 17:36:48 -05:00
Timo Clasen
3c6c2bf13b docs(share): add explicit manual share mode (#1074) 2025-07-16 16:08:25 -05:00
Aiden Cline
d4f9375548 fix: type 'reasoning' was provided without its required following item (#1072) 2025-07-16 15:59:40 -05:00
Jay V
28b39f547e docs: edit 2025-07-16 16:59:12 -04:00
Jay V
7520f5efa8 docs: update enterprise doc 2025-07-16 16:44:28 -04:00
Jay V
eb4cdf4b20 docs: config doc 2025-07-16 16:27:44 -04:00
Jay V
9f6fc1c3c5 docs: edits 2025-07-16 16:20:09 -04:00
Mike Wallio
dfede9ae6e Remove binary file opencode (#1069) 2025-07-16 15:10:40 -05:00
Daniel Saldarriaga López
fc45c0c944 docs: fix keybinds documentation to match actual config schema (#867) 2025-07-16 15:34:52 -04:00
adamdotdevin
9d869f784c fix(tui): expand edit calls 2025-07-16 14:33:57 -05:00
adamdotdevin
bd244f73af fix(tui): slightly faster scroll speed 2025-07-16 14:26:46 -05:00
Dax Raad
dd34556e9c only include severity 1 diagnostics from lsp in edit tool output 2025-07-16 15:25:37 -04:00
adamdotdevin
f7dd48e60d feat(tui): more ways to quit 2025-07-16 14:20:28 -05:00
Dax Raad
93c779cf48 docs: better variable examples 2025-07-16 14:56:24 -04:00
adamdotdevin
360c04c542 docs: copying text 2025-07-16 13:26:26 -05:00
adamdotdevin
529fd57e75 fix: missing dependency 2025-07-16 12:58:29 -05:00
adamdotdevin
faea3777e1 fix: missing dependency 2025-07-16 12:56:11 -05:00
Aiden Cline
a4664e2344 fix: generate title should use same options as model it uses to gen (#1064) 2025-07-16 12:46:52 -05:00
adamdotdevin
cdc1d8a94d feat(tui): layout config to render full width 2025-07-16 12:43:02 -05:00
Jay V
fdd6d6600f docs: rename workflow 2025-07-16 13:38:00 -04:00
Jay V
9f44cfd595 docs: discord releases 2025-07-16 13:17:04 -04:00
Aiden Cline
70229b150c Fix: better title generation (needs to change due to small models) (#1059) 2025-07-16 11:47:56 -05:00
John Henry Rudden
050ff943a6 Fix: Add escape sequence for @ symbols to prevent send blocking (#1029) 2025-07-16 11:18:48 -05:00
Tom
88b58fd6a0 fix: Prevent division by zero in context percentage calculation (#1055) 2025-07-16 09:35:20 -05:00
Jeremy Mack
5d67e13df5 fix: grep omitting text after a colon (#1053) 2025-07-16 09:09:05 -05:00
Adi Yeroslav
57d1a60efc feat(tui): shift+tab to cycle modes backward (#1049) 2025-07-16 07:43:48 -05:00
Nipuna Perera
add81b9739 Enhance private npm registry support (#998) 2025-07-16 08:31:38 -04:00
GitHub Action
81bdb8e269 ignore: update download stats 2025-07-16 2025-07-16 12:04:30 +00:00
adamdotdevin
a563fdd287 fix(tui): diagnostics rendering 2025-07-16 06:55:14 -05:00
adamdotdevin
7c93bf5993 fix(tui): pending tool call width 2025-07-16 06:27:32 -05:00
adamdotdevin
6a5a4247c6 fix(gh): build 2025-07-16 06:13:43 -05:00
adamdotdevin
a39136a2a0 fix(tui): render attachments in user messages in accent color 2025-07-16 06:09:27 -05:00
adamdotdevin
9f5b59f336 chore: messages cleanup 2025-07-16 06:09:27 -05:00
adamdotdevin
01c125b058 fix(tui): faster cache algo 2025-07-16 06:09:27 -05:00
adamdotdevin
d41aa2bc72 chore(tui): simplify messages component, remove navigate, add copy last message 2025-07-16 06:09:26 -05:00
Robin Moser
f45deb37f0 fix: don't sign snapshot commits (#1046) 2025-07-16 04:46:32 -05:00
Matias Insaurralde
e89972a396 perf: move ANSI regex compilations to package level (#1040)
Signed-off-by: Matías Insaurralde <matias@insaurral.de>
2025-07-16 04:20:25 -05:00
Frank
c3c647a21a wip: github actions 2025-07-16 16:20:06 +08:00
Frank
b79167ce66 sync 2025-07-16 16:12:31 +08:00
Frank
7ac0a2bc65 wip: github actions 2025-07-16 16:05:51 +08:00
Frank
cb032cff2b wip: github actions 2025-07-16 03:57:14 -04:00
Frank
867a69a751 wip: github actions 2025-07-16 03:54:20 -04:00
Frank
20b8efcc50 wip: github actions 2025-07-16 15:36:23 +08:00
Frank
a86d42149f wip: github actions 2025-07-16 14:59:53 +08:00
Frank
82a36acfe3 wip: github action 2025-07-16 14:59:53 +08:00
Dax Raad
0793c3f2a3 clean up export command 2025-07-15 21:50:43 -04:00
Dax Raad
5c860b0d69 fix share page v1 message 2025-07-15 21:35:32 -04:00
Dax Raad
05bb127a8e enable bash tool in plan mode 2025-07-15 21:28:03 -04:00
aron
1bbd84008f move spoof prompt to support anthropic with custom modes (#1031) 2025-07-15 21:16:27 -04:00
Stephen Murray
fdfd4d69d3 add support for modified gemini-cli system prompt (#1033)
Co-authored-by: Dax Raad <d@ironbay.co>
2025-07-15 21:13:11 -04:00
Jay
7f659cce36 docs: Update README.md 2025-07-15 20:09:26 -04:00
Jay V
48fcaa83be docs: fix config 2025-07-15 19:54:51 -04:00
Jay V
70c16c4c95 docs: adding action to notify discord 2025-07-15 19:49:38 -04:00
Jay V
c1e1ef6eb5 docs: readme 2025-07-15 18:32:04 -04:00
Jay V
bb155db8b2 docs: share tweak copy button 2025-07-15 18:25:25 -04:00
John Henry Rudden
7c91f668d1 docs: share add copy button to messages in web interface (#902)
Co-authored-by: Jay <air@live.ca>
2025-07-15 17:56:33 -04:00
Jay V
1af103d29e docs: share handle non bundled langs 2025-07-15 17:47:22 -04:00
Jay V
8a3e581edc docs: share fix diff bugs 2025-07-15 17:47:22 -04:00
Jay V
749e7838a4 docs: share page task tool 2025-07-15 17:47:22 -04:00
Dax Raad
73b46c2bf9 docs: document base URL 2025-07-15 14:57:50 -04:00
Joe Schmitt
8bd250fb15 feat(tui): add /export command to export conversation to editor (#989)
Co-authored-by: opencode <noreply@opencode.ai>
2025-07-15 13:53:21 -05:00
Dax Raad
b1ab641905 add small model for title generation 2025-07-15 14:00:52 -04:00
adamdotdevin
76e256ed64 fix(tui): wider max width 2025-07-15 12:44:41 -05:00
adamdotdevin
4f955f2127 fix(tui): mouse scroll ansi parsing and perf 2025-07-15 12:03:30 -05:00
Aiden Cline
bbeb579d3a tweak: (opencode run): adjust tool call rendering, reduce number of "Unknowns" (#1012) 2025-07-15 11:22:57 -05:00
Timo Clasen
f707fb3f8d feat(tui): add keymap to remove entries from recently used models (#1019) 2025-07-15 11:20:56 -05:00
adamdotdevin
6b98acb7be chore: update stainless defs 2025-07-15 10:03:11 -05:00
adamdotdevin
2487b18f62 chore: update stainless script to kick off prod build 2025-07-15 08:15:31 -05:00
adamdotdevin
533f64fe26 fix(tui): rework lists and search dialog 2025-07-15 08:07:26 -05:00
Dax Raad
b5c85d3806 fix logic for suprpessing snapshots in big directories 2025-07-15 09:07:04 -04:00
Dax Raad
bcf952bc8a upgrade ai sdk 2025-07-15 09:06:35 -04:00
GitHub Action
a6dc75a44c ignore: update download stats 2025-07-15 2025-07-15 12:04:28 +00:00
Joohoon Cha
416daca9c6 fix(tui): close completion dialog on ctrl+h (#1005) 2025-07-15 06:24:05 -05:00
Aiden Cline
636fe0fb64 Fix: failed to open session (#999) 2025-07-15 05:40:29 -05:00
Frank
95e0957d64 wip: github actions 2025-07-15 17:45:16 +08:00
Dax Raad
2eefdae6a9 ignore: fix types 2025-07-15 00:56:03 -04:00
Dax Raad
d62746ceb7 fix panic 2025-07-15 00:35:02 -04:00
Dax Raad
4b2ce14ff3 bring back task tool 2025-07-15 00:05:54 -04:00
Jase Kraft
294a11752e fix: --continue pull the latest session id consistently (#918)
Co-authored-by: Dax Raad <d@ironbay.co>
2025-07-14 20:32:00 -04:00
Dax Raad
1cf1d1f634 docs: fix agents.md 2025-07-14 20:23:05 -04:00
Ryan Roden-Corrent
2ce694d41f Add support for job-control suspend (ctrl+z/SIGSTP). (#944) 2025-07-14 20:13:46 -04:00
CodinCat
d6eff3b3a3 improve error handling and logging for GitHub API failures in upgrade and install script (#972) 2025-07-14 20:13:12 -04:00
Dax Raad
e63a6d45c1 docs: README 2025-07-14 20:10:43 -04:00
Dax Raad
93686519ba docs: README 2025-07-14 20:06:15 -04:00
Mike Wallio
f593792fb5 Standardize parameter description references in Edit and MultiEdit tools (#984) 2025-07-14 20:03:59 -04:00
Dax Raad
2cdb37c32b support anthropic console login flow 2025-07-14 18:07:55 -04:00
Timo Clasen
535d79b64c docs: fix typo (#982) 2025-07-14 16:40:16 -04:00
Dax Raad
b4e4c3f662 wip: snapshot 2025-07-14 15:29:08 -04:00
adamdotdevin
ba676e7ae0 fix(tui): support readline nav in new search component 2025-07-14 12:20:58 -05:00
adamdotdevin
a1c8e5af45 chore: use new search component in find dialog 2025-07-14 12:15:47 -05:00
adamdotdevin
f1e7e7c138 feat(tui): even better model selector 2025-07-14 12:15:46 -05:00
Dax Raad
80b77caec0 ignore: share page fix 2025-07-14 13:13:33 -04:00
Dorian Karter
86a2ea44b5 feat(tui): add support for readline list nav (ctrl-p/ctrl-n) (#955) 2025-07-14 10:21:09 -05:00
Dax Raad
a2002c88c6 wip: update sdk 2025-07-14 11:18:08 -04:00
opencode-agent[bot]
d8bcf4f4e7 Fix issue: Option to update username shown in conversations. (#975)
Co-authored-by: opencode-agent[bot] <opencode-agent[bot]@users.noreply.github.com>
Co-authored-by: thdxr <thdxr@users.noreply.github.com>
2025-07-14 11:03:04 -04:00
Dax Raad
31e0326f78 fix init command and escape to cancel 2025-07-14 10:48:17 -04:00
adamdotdevin
a53d2ea356 fix(tui): build and bg color 2025-07-14 09:14:02 -05:00
adamdotdevin
229a280652 fix(tui): find dialog bg color 2025-07-14 09:09:55 -05:00
Nicholas Hamilton
8d0350d923 feat: ability to create new session from session dialog (#920) 2025-07-14 09:04:43 -05:00
Almir Sarajčić
4192d7eacc Fix failing git hooks (#966) 2025-07-14 07:52:29 -05:00
Munawwar Firoz
7b8b4cf8c7 feat: ctrl+left arrow / ctrl+right arrow key support (#969) 2025-07-14 07:16:06 -05:00
Almir Sarajčić
1f4de75348 Explain usage of external references in AGENTS.md (#965) 2025-07-14 07:06:37 -05:00
GitHub Action
457755c690 ignore: update download stats 2025-07-14 2025-07-14 12:04:16 +00:00
Aiden Cline
052a1e7514 fix: file command visual bug (#959) 2025-07-14 07:03:02 -05:00
Daniel Nouri
139d6e2818 Fix clipboard on Wayland systems (#941)
Co-authored-by: Daniel Nouri <daniel@redhotcar>
2025-07-14 06:57:45 -05:00
Dax Raad
06554efdf4 get rid of cli markdown dep 2025-07-13 23:06:31 -04:00
Dax Raad
67e9bda94f ci 2025-07-13 22:58:33 -04:00
Dax Raad
53bb6b4c4f fix missing tokens 2025-07-13 22:56:29 -04:00
Dax Raad
73d54c7068 fix type error 2025-07-13 17:25:13 -04:00
Dax
90d6c4ab41 Part data model (#950) 2025-07-13 17:22:11 -04:00
opencode-agent[bot]
736396fc70 Added sharing config with auto/disabled options (#951)
Co-authored-by: opencode-agent[bot] <opencode-agent[bot]@users.noreply.github.com>
Co-authored-by: thdxr <thdxr@users.noreply.github.com>
2025-07-13 16:43:58 -04:00
Dax Raad
177bfed93e ci: github action 2025-07-13 16:22:58 -04:00
Dax Raad
91f8477ef5 wip: mcp 2025-07-13 16:22:16 -04:00
John Henry Rudden
f04a5e50ee fix: deduplicate command suggestions (#934) 2025-07-13 14:47:26 -05:00
Aiden Cline
bb28b70700 Fix: title generation (#949) 2025-07-13 14:46:36 -05:00
Frank
7361a02ef3 wip: github actions 2025-07-13 23:59:25 +08:00
GitHub Action
d465f150fc ignore: update download stats 2025-07-13 2025-07-13 12:04:11 +00:00
Dax Raad
17fa8c117b fix packages being reinstalled on every start 2025-07-12 12:41:12 -04:00
Muzammil Khan
9aa0c40a00 feat: add more ignore patterns to the ls tool (#913) 2025-07-12 12:06:58 -04:00
GitHub Action
fd4648da17 ignore: update download stats 2025-07-12 2025-07-12 12:03:59 +00:00
Dax Raad
aadca5013a fix share page timestamps 2025-07-11 21:49:20 -04:00
Dax Raad
5c3d490e59 share page hide step-finish events 2025-07-11 21:45:56 -04:00
Dax Raad
1254f48135 fix issue preventing things from working when node_modules or package.json present in ~/ 2025-07-11 21:09:39 -04:00
Dax Raad
1729c310d9 switch global config to ~/.config/opencode/opencode.json 2025-07-11 20:51:23 -04:00
Dax Raad
0130190bbd docs: add model docs 2025-07-11 20:33:06 -04:00
Aiden Cline
97a31ddffc tweak: plan interactions should match web (TUI) (#895) 2025-07-11 18:03:22 -04:00
zWing
3249420ad1 fix: avoid overwriting the provider.option.baseURL (#880) 2025-07-11 18:01:28 -04:00
Dax Raad
4bb8536d34 introduce cache version concept for auto cleanup when breaking cache changes happen 2025-07-11 17:50:49 -04:00
Jay
c73d4a137e docs: Update troubleshooting.mdx 2025-07-11 17:50:25 -04:00
Dax Raad
57ac8f2741 wip: stats 2025-07-11 17:37:41 -04:00
Jay V
2f1acee5a1 docs: share page add time footer back 2025-07-11 14:24:20 -04:00
Jay V
9ca54020ac docs: share page mobile bugs 2025-07-11 14:24:20 -04:00
Jay V
f7d44b178b docs: share fix mobile diffs 2025-07-11 14:24:20 -04:00
Sergii Kozak
b4950a157c fix(session): add fallback for undefined output token limit (#860)
Co-authored-by: opencode <noreply@opencode.ai>
2025-07-11 10:55:13 -04:00
alexz
dfbef066c7 fix: ENAMETOOLONG: name too long when adding custom mode (#881) 2025-07-11 10:54:52 -04:00
GitHub Action
26fd76fbee ignore: update download stats 2025-07-11 2025-07-11 12:04:08 +00:00
adamdotdevin
04769d8a26 fix(tui): help commands bg color 2025-07-11 06:03:21 -05:00
adamdotdevin
34b576d9b5 fix(tui): don't include /mode trigger 2025-07-11 06:01:51 -05:00
adamdotdevin
22b244f847 fix(tui): actually fix mouse ansi codes leaking 2025-07-11 06:00:20 -05:00
Aiden Cline
7e1fc275e7 fix: avoid worker exception, graceful 404 (#869) 2025-07-11 04:55:56 -05:00
Frank
3b9b391320 wip: github actions 2025-07-11 06:55:13 +08:00
Frank
766bfd025c wip: github actions 2025-07-11 05:23:24 +08:00
Jay V
c7f30e1065 docs: share page fix terminal part 2025-07-10 17:21:21 -04:00
Frank
1c4fd7f28f Api: add endpoint for getting github app token 2025-07-11 05:01:27 +08:00
adamdotdevin
85805d2c38 fix(tui): handle SIGTERM, closes #319 2025-07-10 15:59:03 -05:00
Timo Clasen
982cb3e71a fix(tui): center help dilaog (#853) 2025-07-10 15:56:19 -05:00
adamdotdevin
294d0e7ee3 fix(tui): mouse wheel ansi codes leaking into editor 2025-07-10 15:49:58 -05:00
Jay V
8be1ca836c docs: fix diag styles 2025-07-10 16:38:51 -04:00
Jay V
2e5f96fa41 docs: share page attachment 2025-07-10 16:38:51 -04:00
Dax Raad
c056b0add9 add step finish part 2025-07-10 16:25:38 -04:00
Dax Raad
b00bb3c083 run: properly close session.list 2025-07-10 16:13:01 -04:00
Dax Raad
d9befd3aa6 disable filewatcher, fixes file descriptor leak 2025-07-10 15:58:45 -04:00
Dax Raad
49de703ba1 config: escape file: string content 2025-07-10 15:38:58 -04:00
Dax Raad
22988894c8 ci: slow down stats 2025-07-10 15:31:06 -04:00
adamdotdevin
34b1754f25 docs: clipboard requirements on linux 2025-07-10 13:12:37 -05:00
adamdotdevin
54fe3504ba feat(tui): accent editor border on leader key 2025-07-10 12:57:22 -05:00
Jay V
d2c862e32d docs: edit local models 2025-07-10 13:49:24 -04:00
Jay V
afc53afb35 docs: edit mode 2025-07-10 13:29:37 -04:00
Gabriel Garrett
b56e49c5dc Adds real example in docs of how to configure custom provider (#840) 2025-07-10 13:29:30 -04:00
Aiden Cline
8b2a909e1f fix: encode & decode file paths (#843) 2025-07-10 11:19:54 -05:00
Jay V
e9c954d45e docs: add modes to sidebar 2025-07-10 12:07:44 -04:00
Jay V
6f449d13af docs: add modes to sidebar 2025-07-10 12:07:18 -04:00
Dax Raad
6e375bef0d docs: modes 2025-07-10 11:53:28 -04:00
Dax Raad
67106a6967 docs: add config variable docs 2025-07-10 11:48:55 -04:00
Dax Raad
b5d690620d support env and file pointers in config 2025-07-10 11:45:31 -04:00
Dax Raad
9db3ce1d0b opencode run respects mode 2025-07-10 11:28:28 -04:00
Dax Raad
1cc55b68ef wip: scrap 2025-07-10 11:25:37 -04:00
Dax Raad
469f667774 set max output token limit to 32_000 2025-07-10 11:25:37 -04:00
adamdottv
6603d9a9f0 feat: --mode flag passed to tui 2025-07-10 10:19:25 -05:00
adamdottv
5dc1920a4c feat: mode flag in cli run command 2025-07-10 10:13:15 -05:00
adamdottv
d3e5f3f3a8 feat(tui): add token and cost info to session header 2025-07-10 10:06:51 -05:00
adamdottv
ce4cb820f7 feat(tui): modes 2025-07-10 10:06:51 -05:00
Dax Raad
ba5be6b625 make LSP lazy again 2025-07-10 09:37:40 -04:00
adamdottv
f95c3f4177 fix(tui): fouc in textarea on app load 2025-07-10 08:20:17 -05:00
adamdottv
d2b1307bff fix(tui): textarea cursor sync issues with attachments 2025-07-10 07:49:36 -05:00
adamdottv
b40ba32adc fix(tui): textarea issues 2025-07-10 07:38:57 -05:00
GitHub Action
ce0cebb7d7 ignore: update download stats 2025-07-10 2025-07-10 12:04:15 +00:00
Dax Raad
f478f89a68 temporary grok 4 patch 2025-07-10 07:57:55 -04:00
Dax Raad
85d95f0f2b disable lsp on non-git folders 2025-07-10 07:39:02 -04:00
Dax Raad
1515efc77c fix session is busy error 2025-07-10 07:27:03 -04:00
Josh Medeski
6d393759e1 feat(tui): subsitute cwd home path on status bar (#808) 2025-07-10 06:12:19 -05:00
Adi Yeroslav
a1701678cd feat(tui): /editor - change the auto-send behavior to put content in input box instead (#827) 2025-07-10 05:57:52 -05:00
Timo Clasen
c411a26d6f feat(tui): hide cost if using subscription model (#828) 2025-07-10 05:56:36 -05:00
adamdottv
85dbfeb314 feat(tui): @symbol attachments 2025-07-10 05:53:00 -05:00
Dax Raad
085c0e4e2b respect go.work when spawning LSP 2025-07-09 22:54:47 -04:00
Dax Raad
8404a97c3e better detection of prettier formatter 2025-07-09 22:37:31 -04:00
Dax Raad
0ee3b1ede2 do not wait for LSP to be fully ready 2025-07-09 21:59:38 -04:00
Dax Raad
a826936702 modes concept 2025-07-09 21:59:38 -04:00
Jay V
fd4a5d5a63 docs: share doc edit 2025-07-09 20:26:31 -04:00
Jay V
69cf1d7b7e docs: share doc 2025-07-09 20:24:09 -04:00
Jay V
8e0a1d1167 docs: edit troubleshooting 2025-07-09 19:55:14 -04:00
Timo Clasen
f22021187d feat(tui): treat pasted text file paths as file references (#809) 2025-07-09 18:37:39 -05:00
Jay V
febecc348a docs: enterprise doc 2025-07-09 15:46:57 -04:00
Jay V
c5ccfc3e94 docs: share page last part fix 2025-07-09 15:46:57 -04:00
Mike Wallio
1f6efc6b94 Add gpt-4.1 beast prompt (#778)
Co-authored-by: Dax Raad <d@ironbay.co>
2025-07-09 12:11:54 -04:00
Frank Denis
727fe6f942 LSP: fix SimpleRoots to actually search in the root directory (#795) 2025-07-09 10:35:06 -05:00
Dax Raad
a91e79382e ci: remove checked in config.schema.json 2025-07-09 11:30:42 -04:00
Dax Raad
5c626e0a2f ci: generate config schema as part of build 2025-07-09 11:25:58 -04:00
adamdottv
8e9e383219 chore: troubleshooting docs 2025-07-09 10:12:36 -05:00
Dax Raad
f383008cc1 lsp: spawn only a single tsserver in project root 2025-07-09 11:06:44 -04:00
adamdottv
303ade25ed feat: discord redirect 2025-07-09 10:01:42 -05:00
adamdottv
53f8e7850e feat: configurable log levels 2025-07-09 10:00:03 -05:00
adamdottv
ca8ce88354 feat(tui): move logging to server logs 2025-07-09 08:16:10 -05:00
adamdottv
37a86439c4 fix(tui): don't panic on missing linux clipboard tool 2025-07-09 06:51:58 -05:00
adamdottv
269b43f4de fix(tui): markdown wrapping off sometimes 2025-07-09 06:41:53 -05:00
adamdottv
3f25e5bf86 chore: internal clipboard package 2025-07-09 04:55:24 -05:00
Aiden Cline
67765fa47c tweak: keep completion options open when trigger is still present (#789) 2025-07-09 04:42:31 -05:00
adamdottv
58b1c58bc5 fix(tui): clear command priority 2025-07-08 19:26:50 -05:00
Dax Raad
d80badc50f ci: ignore chore commits 2025-07-08 20:05:33 -04:00
Dax Raad
75279e5ccf wip: symbols endpoint 2025-07-08 20:05:33 -04:00
Yihui Khuu
7893b84614 Add debounce before exit when using non-leader exit command (#759) 2025-07-08 18:53:38 -05:00
Dax Raad
cfc715bd48 wip: remove excess import 2025-07-08 19:51:09 -04:00
adamdottv
39bcba85a9 chore: vendor clipboard into go package 2025-07-08 18:48:40 -05:00
adamdottv
da3df51316 chore: remove clipboard temp 2025-07-08 18:47:59 -05:00
adamdottv
12190e4efc chore: vendor clipboard into go package 2025-07-08 18:46:42 -05:00
Aiden Cline
d2a9b2f64a fix: documentation typo (#781) 2025-07-08 18:30:46 -05:00
adamdottv
aacadd8a8a fix(tui): panic when reading/writing clipboard on linux 2025-07-08 18:29:45 -05:00
Jay V
969154a473 docs: share page image 2025-07-08 19:24:21 -04:00
Jay V
4d6ca3fab1 docs: share page many model case 2025-07-08 19:08:33 -04:00
Dax Raad
00ea5082e7 add typescript lsp timeout if it fails to start 2025-07-08 18:33:12 -04:00
Dax Raad
4a878b88c0 properly load typescript lsp in subpaths 2025-07-08 18:18:45 -04:00
Dax Raad
6de955847c big rework of LSP system 2025-07-08 18:14:49 -04:00
Jay V
3ba5d528b4 docs: share bugs 2025-07-08 18:14:36 -04:00
Jay V
f99e2b3429 docs: share error part 2025-07-08 18:00:08 -04:00
Jay V
7e4e6f6e51 docs: share page bugs 2025-07-08 17:18:38 -04:00
Jay V
0514f3f43b docs: share image model 2025-07-08 17:18:38 -04:00
Timo Clasen
1e07384364 fix: make compact command interruptible (#691)
Co-authored-by: GitHub Action <action@github.com>
2025-07-08 15:37:25 -05:00
strager
4c4739c422 fix(tool): fix ripgrep invocation on Windows (#700)
Co-authored-by: Adam <2363879+adamdotdevin@users.noreply.github.com>
2025-07-08 15:36:26 -05:00
Rami Chowdhury
2d8b90a6ff feat(storage): ensure storage directory exists and handle paths correctly (#771) 2025-07-08 15:34:11 -05:00
Robb Currall
a2fa7ffa42 fix: support cancelled task state (#775) 2025-07-08 15:33:39 -05:00
Frank Denis
f7d6175283 Add support for the Zig Language Server (ZLS) (#756) 2025-07-08 15:31:11 -05:00
Tommy
9ed187ee52 docs: add terminal requirements (#708) 2025-07-08 15:30:05 -05:00
Gal Schlezinger
14d81e574b [config json schema] declare default values and examples for in-ide documentation (#754) 2025-07-08 15:29:07 -05:00
adamdottv
6efe8cc8df fix: env has to be string 2025-07-08 14:59:03 -05:00
adamdottv
daa5fc916a fix(tui): pasting causes panic on macos 2025-07-08 14:57:17 -05:00
adamdottv
c659496b96 fix(tui): model/provider arg parsing 2025-07-08 14:11:57 -05:00
Timo Clasen
21fbf21cb6 fix(copilot): add vision request header (#773) 2025-07-08 14:01:54 -05:00
adamdottv
f31cbf2744 fix: image reading 2025-07-08 13:02:13 -05:00
Aiden Cline
8322f18e03 fix: display errors when using opencode run ... (#751) 2025-07-08 10:38:11 -05:00
adamdottv
562bdb95e2 fix: include symlinks in ripgrep searches 2025-07-08 10:02:19 -05:00
Dax
a57ce8365d Update STATS.md 2025-07-08 10:30:02 -04:00
adamdottv
0da83ae67e feat(tui): command aliases 2025-07-08 08:20:55 -05:00
adamdottv
662d022a48 feat(tui): paste images and pdfs 2025-07-08 08:09:01 -05:00
GitHub Action
9efef03919 ignore: update download stats 2025-07-08 2025-07-08 12:04:27 +00:00
GitHub Action
7a9fb3fa92 ignore: update download stats 2025-07-08 2025-07-08 10:51:06 +00:00
adamdottv
ea96ead346 feat(tui): handle --model and --prompt flags 2025-07-08 05:50:18 -05:00
Dax Raad
6100a77b85 start file watcher only for tui 2025-07-07 21:05:04 -04:00
Dax Raad
c7a59ee2b1 better handling of aborting sessions 2025-07-07 20:59:00 -04:00
Jay V
a272b58fe9 docs: intro 2025-07-07 17:41:46 -04:00
Dax Raad
9948fcf1b6 fix crash when running on new project 2025-07-07 17:39:52 -04:00
Dax Raad
0d50c867ff fix mcp tools corrupting session 2025-07-07 17:05:16 -04:00
Dax Raad
27f7e02f12 run: truncate prompt 2025-07-07 16:41:42 -04:00
Jay V
0f93ecd564 docs: canonical url 2025-07-07 16:37:00 -04:00
Dax Raad
da909d9684 append piped stdin to prompt 2025-07-07 16:33:21 -04:00
Jay V
facd851b11 docs: dynamic domain 2025-07-07 16:31:15 -04:00
Dax Raad
c51de945a5 Add stdin support to run command
Allow piping content to opencode run when no message arguments are provided, enabling standard Unix pipe patterns for better CLI integration.

🤖 Generated with [opencode](https://opencode.ai)

Co-Authored-By: opencode <noreply@opencode.ai>
2025-07-07 16:29:13 -04:00
Jay V
9253a3ca9e docs: debug 2025-07-07 16:26:23 -04:00
Dax Raad
7cfa297a78 wip: model and prompt flags for tui 2025-07-07 16:24:37 -04:00
Jay V
661b74def6 docs: debug info 2025-07-07 16:13:26 -04:00
Dax Raad
b478e5655c fix interrupt 2025-07-07 16:12:47 -04:00
Dax
f884766445 v2 message format and upgrade to ai sdk v5 (#743)
Co-authored-by: GitHub Action <action@github.com>
Co-authored-by: Liang-Shih Lin <liangshihlin@proton.me>
Co-authored-by: Dominik Engelhardt <dominikengelhardt@ymail.com>
Co-authored-by: Jay V <air@live.ca>
Co-authored-by: adamdottv <2363879+adamdottv@users.noreply.github.com>
2025-07-07 15:53:43 -04:00
Jay V
76b2e4539c docs: discord 2025-07-07 14:44:37 -04:00
Dominik Engelhardt
d87922c0eb Fix Elixir LSP startup (#726) 2025-07-06 23:37:46 -04:00
Liang-Shih Lin
2446483df5 fix: Skip opencode upgrade if same version (#720) 2025-07-06 23:36:59 -04:00
GitHub Action
f4c453155d Update download stats 2025-07-06 2025-07-06 12:03:56 +00:00
Dax Raad
969ad80ed2 fix openrouter caching with anthropic, should be a lot cheaper 2025-07-05 11:39:54 -04:00
GitHub Action
af064b41d7 Update download stats 2025-07-05 2025-07-05 12:03:56 +00:00
Dax Raad
ea6bfef21a use full filepath 2025-07-04 17:58:03 -04:00
Jay V
107363b1d9 docs: fix show more in share page 2025-07-04 17:57:12 -04:00
Dax Raad
85214d7c59 fix input bar not rendering capital letters 2025-07-04 17:21:51 -04:00
Timo Clasen
997cb2d945 fix(tui): optimistic rendering (#692) 2025-07-04 16:06:57 -05:00
Dax Raad
45b139390c make file attachments work good like 2025-07-04 16:21:26 -04:00
Jay V
994368de15 docs: share fix scrolling again 2025-07-04 13:53:25 -04:00
Jay V
143fd8e076 docs: share improve markdown rendering of ai responses 2025-07-04 13:53:25 -04:00
Dax Raad
06dba28bd6 wip: fix media type 2025-07-04 12:50:52 -04:00
adamdottv
b8d276a049 fix(tui): full paths for attachments 2025-07-04 11:42:22 -05:00
Dax Raad
ee01f01271 file attachments 2025-07-04 12:24:01 -04:00
adamdottv
32d5db4f0a fix(tui): markdown wrapping off sometimes 2025-07-04 11:16:38 -05:00
adamdottv
f6108b7be8 fix(tui): handle pdf and image @ files 2025-07-04 11:13:09 -05:00
adamdottv
94ef341c9d feat(tui): render attachments 2025-07-04 10:55:02 -05:00
adamdottv
f9abc7c84f feat(tui): file attachments 2025-07-04 10:55:02 -05:00
adamdottv
891ed6ebc0 fix(tui): slower startup due to file.status 2025-07-04 10:55:01 -05:00
Dax Raad
163e23a68b removed banned command concept 2025-07-04 11:32:12 -04:00
Vladimir
f13b0af491 docs: Fix invalid json in the mcp example config (#645) 2025-07-04 11:24:13 -04:00
Aiden Cline
4a0be45d3d chore: document instructions configuration option (#670) 2025-07-04 11:22:45 -04:00
Dax Raad
23788674c8 disable snapshots temporarily 2025-07-04 08:45:18 -04:00
GitHub Action
121eb24e73 Update download stats 2025-07-04 2025-07-04 12:26:16 +00:00
Dax Raad
571d60182a improve snapshotting speed further 2025-07-03 21:36:09 -04:00
Jay V
167a9dcaf3 docs: share fix scroll to anchor 2025-07-03 20:30:21 -04:00
Dax Raad
37327259cb ci: ignore 2025-07-03 20:30:02 -04:00
Dax Raad
cdb25656d5 improve snapshot speed 2025-07-03 20:16:25 -04:00
Jay V
25c876caa2 docs: share fix last message not expandable 2025-07-03 19:33:55 -04:00
Dax Raad
cf83e31f23 add elixir lsp support 2025-07-03 19:29:51 -04:00
Dax Raad
3bc238b58b wip: logs 2025-07-03 19:29:51 -04:00
Jay V
b8de69dced docs: fix share page scroll performance 2025-07-03 19:15:38 -04:00
Jay V
e7fcb692a4 docs: tweak page title 2025-07-03 16:23:08 -04:00
Timo Clasen
dae38574ab chore: add dev script (#666) 2025-07-03 14:43:25 -05:00
Dax Raad
ed4f862b49 fix /unshare 2025-07-03 15:34:04 -04:00
adamdottv
fce59db94a chore: simplify completions 2025-07-03 12:48:22 -05:00
Jay V
3e2a0c7281 docs: share handle slow loading pages 2025-07-03 13:15:21 -04:00
adamdottv
5a0910ea79 chore: better local dev with stainless script 2025-07-03 11:49:15 -05:00
adamdottv
1dffabcfda fix(tui): panic on completions failure 2025-07-03 10:53:43 -05:00
adamdottv
c389e0ed43 fix(tui): redundant tool calls in each message in collapsed mode 2025-07-03 10:42:27 -05:00
Dax Raad
204801052a flag for disabling file watcher 2025-07-03 10:37:08 -04:00
Dax Raad
2528d8cb88 increase max retries to 10 2025-07-03 10:32:55 -04:00
adamdottv
6b73ffd1c1 fix(tui): include orphaned tool calls 2025-07-03 09:32:44 -05:00
adamdottv
0eadc50a33 fix(tui): selected message visuals 2025-07-03 09:03:04 -05:00
Dax Raad
aeea84a877 fix webdomain 2025-07-03 09:58:25 -04:00
GitHub Action
a54c5c6298 Update download stats 2025-07-03 2025-07-03 12:26:51 +00:00
adamdottv
8825cd3811 feat(tui): unshare command 2025-07-03 07:09:09 -05:00
adamdottv
3d9a5d9970 fix(tui): always show status bar 2025-07-03 06:53:05 -05:00
adamdottv
1f9e195fa6 fix(tui): better highlight visuals 2025-07-03 06:49:37 -05:00
Craig Andrews
73c012c76c fix: simplify parallel map using channels (#582) 2025-07-03 05:43:10 -05:00
Lev
2ace57404b fix: properly handle utf-8 in diff highlighting (#585) 2025-07-03 05:42:40 -05:00
Dax Raad
8c4b5e088b do not install gopls if go is not installed 2025-07-02 23:59:08 -04:00
Jacob Hands
69920a73d7 fix: use correct opencode bin path when running in development mode (#483) 2025-07-02 23:37:48 -04:00
Timo Clasen
ae76a3467a fix: typescript error (#618) 2025-07-02 23:27:43 -04:00
Adi Yeroslav
701107cda4 Update prompt reference from CLAUDE.md to AGENTS.md (#623) 2025-07-02 23:27:22 -04:00
Aiden Cline
b99565959b feat: configurable instructions (#624) 2025-07-02 23:27:04 -04:00
andrewxt
67aa7ce04d fix mouse scroll events being interpreted as keyboard input (#628) 2025-07-02 23:26:09 -04:00
Dax Raad
c663fbc3ee remove need for glibc 2025-07-02 22:53:04 -04:00
Dax Raad
2090bab537 fix(tui): change messages layout toggle keybinding from <leader>m to <leader>p
🤖 Generated with [opencode](https://opencode.ai)

Co-Authored-By: opencode <noreply@opencode.ai>
2025-07-02 20:06:30 -04:00
Aiden Cline
64d5fff9a3 fix: unawaited promise causes opencode to use unenabled formatter (#625) 2025-07-02 19:19:31 -04:00
Jay V
925f695503 docs: tweak styles 2025-07-02 18:44:05 -04:00
adamdottv
f1c925795d fix: typescript error 2025-07-02 16:08:41 -05:00
adamdottv
c82a060eca feat(tui): file viewer, select messages 2025-07-02 16:08:11 -05:00
Ciaran McAleer
63e783ef79 Changed handling of OpenRouter requests to add some custom headers so that it can see the app (#613)
Co-authored-by: Dax Raad <d@ironbay.co>
2025-07-02 14:43:59 -04:00
Dax Raad
35d6273fb3 wip: session revert/unrevert 2025-07-02 13:10:36 -04:00
Mark Huggins
b89d4a16fd fix: Copilot Premium Requests (#595) 2025-07-02 12:04:53 -04:00
Prashant Choudhary
2799a96032 fix: Ensure shared file previews use truncated content (#607)
Co-authored-by: google-labs-jules[bot] <161369871+google-labs-jules[bot]@users.noreply.github.com>
2025-07-02 12:04:10 -04:00
Timo Clasen
8f4b79227c fix(formatting): check for enabled formatters (#611) 2025-07-02 12:03:42 -04:00
Dax Raad
c810b6d206 wip: symbols for lsp 2025-07-02 11:35:25 -04:00
Dax Raad
fa35407572 fix lazy loading 2025-07-02 11:18:25 -04:00
Dax Raad
8bbbc07aff fix filewatcher not closing cleanly 2025-07-02 11:15:12 -04:00
GitHub Action
75a21ba3ce Update download stats 2025-07-02 2025-07-02 12:26:24 +00:00
Timo Clasen
0d6fb68a88 fix(tui): no space between agent and user message (#598) 2025-07-02 05:12:49 -05:00
Jean du Plessis
242b886434 fix: Small typo in CLI --model flag description (#577) 2025-07-02 05:10:58 -05:00
Daniel Vélez
caf465a9da chore: rename OpenCode to opencode (#579) 2025-07-02 05:09:51 -05:00
Dax Raad
bbf77c6139 improve ripgrep download 2025-07-01 22:39:17 -04:00
Dax Raad
53b7e04b86 ci: tweaks 2025-07-01 22:25:53 -04:00
Dax Raad
9e75e3ed18 ignore: read deleted files 2025-07-01 20:45:50 -04:00
Dax Raad
6389858d41 ignore: add file status command 2025-07-01 20:44:12 -04:00
Dax Raad
7e5941e14b ignore: add file status command 2025-07-01 20:39:43 -04:00
Dax Raad
c68aeed8d9 ignore: fix file read with diff 2025-07-01 20:08:42 -04:00
Aiden Cline
b199a609a8 fix: handle null case if tool args are empty for todos (#588) 2025-07-01 18:25:23 -05:00
Frank
4a5a93b3f8 Temporarily add admin unshare api 2025-07-01 18:57:08 -04:00
Dax Raad
e99bdcefac fix write tool timeout 2025-07-01 13:50:57 -04:00
Dax Raad
26dcb85de1 add file watcher 2025-07-01 13:45:25 -04:00
Dax Raad
11d042be25 snapshot functionality 2025-07-01 12:28:34 -04:00
adamdottv
33b5fe236a fix(tui): better message rendering performance 2025-07-01 07:57:45 -05:00
GitHub Action
d56991006c Update download stats 2025-07-01 2025-07-01 12:27:09 +00:00
adamdottv
739a9f71c3 fix(tui): layout issues 2025-07-01 06:41:39 -05:00
Adam Spiers
aef81fce0b docs: use correct baseUrl for astro editLink (#507)
Co-authored-by: Adam Spiers <opencode@adamspiers.org>
2025-07-01 05:31:18 -05:00
Timo Clasen
8f3d7b4038 feat: better model dialog with sorting by release date (#563) 2025-07-01 05:28:32 -05:00
Dax Raad
de15e67834 fix lsp diagnostic accurancy 2025-06-30 22:48:32 -04:00
Dax Raad
fea56d8de6 fix loading api key from env for openai compatible providers 2025-06-30 19:07:51 -04:00
Max Rabin
3d71be2b45 Add pyright lsp for Python (#551)
Co-authored-by: Max Rabin <max.rabin@mobileye.com>
2025-06-30 18:17:47 -04:00
adamdottv
58baca2a5b chore: typescript error 2025-06-30 15:46:18 -05:00
adamdottv
ef73926db6 chore: include model release date 2025-06-30 15:46:18 -05:00
Dax Raad
9ad1687f04 optimistically boot lsp servers 2025-06-30 16:45:26 -04:00
Jeremy Mack
c573270e66 chore: remove duplicate EditTool in TOOLS array (#556) 2025-06-30 15:32:15 -04:00
Dax Raad
9ebad68274 fix bash tool extra line 2025-06-30 15:31:30 -04:00
Dax Raad
03664ba588 fix formatting of bash tools 2025-06-30 15:28:59 -04:00
adamdottv
5a107b275c fix(tui): layout issues 2025-06-30 14:04:56 -05:00
Dax Raad
dd5736fe5f add back in file hierarchy in system prompt but limit to 200 items 2025-06-30 14:46:46 -04:00
adamdottv
9f3ba03965 chore: rework layout primitives 2025-06-30 12:29:29 -05:00
Timo Clasen
d090c08ef0 feat: update user and agent messages width and alignment (#515)
Co-authored-by: adamdottv <2363879+adamdottv@users.noreply.github.com>
2025-06-30 11:57:56 -05:00
Dmytro Yankovskyi
68e82e4d94 fix(#467): more granular bedrock modelID based on aws region (#482) 2025-06-30 11:12:30 -04:00
Dax Raad
a4aa0e6f8d docs: readme 2025-06-30 10:56:38 -04:00
GitHub Action
8c1ae2717c Update download stats 2025-06-30 2025-06-30 12:26:30 +00:00
Dax Raad
72d48759d7 add ruby formatter and lsp 2025-06-29 22:00:08 -04:00
Timo Clasen
986144b377 docs: how to disable mcp server (#543)
Co-authored-by: GitHub Action <action@github.com>
2025-06-29 21:33:30 -04:00
Dax Raad
1fdb326aa7 ignore: refactoring 2025-06-29 21:30:23 -04:00
Dax Raad
463257e7e4 add zig, python, clang, and kotlin formatters
Co-authored-by: Suhas-Koheda <Suhas-Koheda@users.noreply.github.com>
Co-authored-by: Polo123456789 <Polo123456789@users.noreply.github.com>
Co-authored-by: theodore-s-beers <theodore-s-beers@users.noreply.github.com>
Co-authored-by: TylerHillery <TylerHillery@users.noreply.github.com>
2025-06-29 21:27:35 -04:00
Dax Raad
0f41e60bd6 restructure formatters 2025-06-29 21:22:21 -04:00
Polo123456789
7df81f7b3e Formatters as plugins (#487) 2025-06-29 21:13:32 -04:00
Adam Spiers
dd22cb2bb0 chore: add .editorconfig (#536)
Co-authored-by: Adam Spiers <opencode@adamspiers.org>
2025-06-29 21:12:58 -04:00
Dax Raad
248325925f fix issue with costs resetting once chat is completed 2025-06-29 19:43:03 -04:00
Dax Raad
ca48a4f0fb better amazon bedrock caching with anthropic models 2025-06-29 19:27:07 -04:00
Dax
98ee5a3d87 Update STATS.md 2025-06-29 13:04:44 -04:00
GitHub Action
67480e5a1c Update download stats 2025-06-29 2025-06-29 12:23:40 +00:00
GitHub Action
2581a9b54c Update download stats 2025-06-29 2025-06-29 02:00:18 +00:00
Dax Raad
14a293e124 ci: stats 2025-06-28 21:59:14 -04:00
Dax Raad
780419ecae ci: daily stats script 2025-06-28 21:57:46 -04:00
Timo Clasen
f0962e2d9c Add Option to Disable MCP Servers (#513) 2025-06-28 21:05:31 -04:00
Dax Raad
3a9584a419 fix context display 2025-06-28 21:01:53 -04:00
adamdottv
196f42cbff fix(tui): share command and error messages 2025-06-28 17:51:28 -05:00
Dax Raad
322385f6b1 patch for scroll dumping characters into input buffer 2025-06-28 11:56:47 -04:00
Dax Raad
b7446cd7b9 ci: fix 2025-06-28 09:16:29 -04:00
Gal Schlezinger
f618e569ab optimize edit-tool rendering (#463)
Co-authored-by: opencode <noreply@opencode.ai>
Co-authored-by: Adam <2363879+adamdotdevin@users.noreply.github.com>
2025-06-28 06:01:10 -05:00
Jay V
7b394b91e2 docs: share handle slower code blocks 2025-06-27 20:21:28 -04:00
Jay V
6a7983a4ea docs: adding more share images 2025-06-27 20:03:17 -04:00
Jay V
737146fca1 docs: tweak logo 2025-06-27 19:18:54 -04:00
Jay V
688f3fd12f Merge branch 'jeremyosih-feat/scroll-to-bottom-button' into dev 2025-06-27 19:16:46 -04:00
Jay V
145df08444 docs: share page format 2025-06-27 19:16:33 -04:00
Dax Raad
8b400515ea smooth out initial onboarding flow 2025-06-27 19:10:42 -04:00
Jay V
289797f56d docs: share cleanup title 2025-06-27 19:10:42 -04:00
adamdottv
be0811ecc3 chore: rework openapi spec and use stainless sdk 2025-06-27 19:10:42 -04:00
Dax Raad
0676bcd4fd temporary patch for input lag on initial run 2025-06-27 19:10:42 -04:00
Polo123456789
d076def561 feat: Add golang file formatting (#474) 2025-06-27 19:10:42 -04:00
Wendell Misiedjan
e0807d7317 fix: bunproc stdout / stderr parsing, error handling for bun ResolveMessage (#468) 2025-06-27 19:10:42 -04:00
Jay V
fa2723f2d0 docs: update logo screenshot 2025-06-27 19:10:42 -04:00
Jay V
87d62514db docs: share page write tool bug 2025-06-27 19:10:42 -04:00
Dax Raad
2f8cf9146b ci: ignore 2025-06-27 19:10:42 -04:00
Dax Raad
8e0ec6b037 ci: aur 2025-06-27 19:10:42 -04:00
Dax Raad
6dc434cb83 ignore: cleanup 2025-06-27 19:10:42 -04:00
Dax Raad
d972c27f03 lazy load formatters 2025-06-27 19:10:42 -04:00
Ryan Winchester
9e2bb63688 feat: add elixir file formatting (#458) 2025-06-27 19:10:42 -04:00
adamdottv
49053b66a9 fix(web): remove system prompts from share page 2025-06-27 19:10:42 -04:00
TheGoddessInari
47497aef07 scripts/hooks: Change shebang to universal /bin/sh (#453) 2025-06-27 19:10:41 -04:00
adamdottv
8455029de1 fix(tui): min width on user messages 2025-06-27 19:10:41 -04:00
Dax Raad
9f07f89384 fix formatting output going into tui 2025-06-27 19:10:41 -04:00
adamdottv
d840d43e8f ignore: more metadata in app info 2025-06-27 19:10:41 -04:00
adamdottv
9ead2f3dfb fix: don't use prettier for langs it doesn't format 2025-06-27 19:10:41 -04:00
Dax Raad
f3742ddbb8 ignore: run prettier 2025-06-27 19:10:41 -04:00
Dax Raad
b61a841aa8 add auto formatting and experimental hooks feature 2025-06-27 19:10:41 -04:00
Jay V
ebcf11e574 docs: lander tweak 2025-06-27 19:10:41 -04:00
Jay V
065f0aaddf docs: tweak lander 2025-06-27 19:10:41 -04:00
Dax Raad
c0773dc7c5 smooth out initial onboarding flow 2025-06-27 16:09:59 -04:00
Jay V
1c3c74bd36 docs: share cleanup title 2025-06-27 15:31:21 -04:00
adamdottv
79bbf90b72 chore: rework openapi spec and use stainless sdk 2025-06-27 14:26:25 -05:00
Dax Raad
226a4a7f36 temporary patch for input lag on initial run 2025-06-27 14:36:03 -04:00
Polo123456789
df3b424830 feat: Add golang file formatting (#474) 2025-06-27 14:11:09 -04:00
Wendell Misiedjan
3cfd9d80bc fix: bunproc stdout / stderr parsing, error handling for bun ResolveMessage (#468) 2025-06-27 14:09:35 -04:00
Jay V
e0553b8d2c docs: update logo screenshot 2025-06-27 14:04:09 -04:00
Jay V
391c837b37 docs: share page write tool bug 2025-06-27 13:25:15 -04:00
Dax Raad
5773d9d1a3 ci: ignore 2025-06-27 12:37:57 -04:00
Dax Raad
ce611963c3 ci: aur 2025-06-27 12:29:13 -04:00
Dax Raad
f865cacfb8 ignore: cleanup 2025-06-27 11:35:57 -04:00
Dax Raad
2ec0611f42 lazy load formatters 2025-06-27 11:33:37 -04:00
Ryan Winchester
334161a30e feat: add elixir file formatting (#458) 2025-06-27 10:15:11 -04:00
adamdottv
dbb6e55226 fix(web): remove system prompts from share page 2025-06-27 06:48:44 -05:00
TheGoddessInari
d0f9260559 scripts/hooks: Change shebang to universal /bin/sh (#453) 2025-06-27 07:40:22 -04:00
adamdottv
d2176064e1 fix(tui): min width on user messages 2025-06-27 06:31:13 -05:00
Dax Raad
ed8d277e49 fix formatting output going into tui 2025-06-27 07:29:41 -04:00
adamdottv
59b3268c64 ignore: more metadata in app info 2025-06-27 06:19:27 -05:00
adamdottv
d043f67761 fix: don't use prettier for langs it doesn't format 2025-06-27 05:47:14 -05:00
Dax Raad
51bf193889 ignore: run prettier 2025-06-26 22:30:44 -04:00
Dax Raad
f8b78f08b4 add auto formatting and experimental hooks feature 2025-06-26 22:17:08 -04:00
Jay V
a4f32d602b docs: lander tweak 2025-06-26 19:47:58 -04:00
Jay V
dc3dd21cf3 docs: tweak lander 2025-06-26 19:02:44 -04:00
Jeremy Osih
b4c2fcccf5 Merge branch 'sst:dev' into feat/scroll-to-bottom-button 2025-06-27 00:41:20 +02:00
Jeremy Osih
e950ad5306 feat(web): add scroll to last message button
Add intelligent floating scroll button for long conversations that:
- Only appears when scrolling down (direction-aware)
- Auto-hides after 3 seconds of inactivity
- Stays visible on hover to prevent accidental disappearance
- Uses consistent design patterns with repo styling
- Includes proper accessibility features

🤖 Generated with [opencode](https://opencode.ai)

Co-Authored-By: Jeremy Osih <osih.jeremy@gmail.com>
Co-Authored-By: opencode <noreply@opencode.ai>
2025-06-27 00:38:14 +02:00
Dax Raad
8ca713b737 disable task tool temporarily 2025-06-26 18:27:49 -04:00
Jay V
5b54554fd5 docs: edit theme doc 2025-06-26 17:56:31 -04:00
Dax Raad
4bc651f958 fix: improve JSON formatting and add piped output support for run command
🤖 Generated with [opencode](https://opencode.ai)

Co-Authored-By: opencode <noreply@opencode.ai>
2025-06-26 17:32:00 -04:00
Jay V
3b6976a9c8 Merge branch 'rekram1-node-chore/update-config-docs' into dev 2025-06-26 17:24:03 -04:00
Jay V
863d5c1e8e docs: editing rules 2025-06-26 17:23:52 -04:00
adamdottv
97e19e9677 fix(tui): editor styles were off 2025-06-26 17:22:21 -04:00
adamdottv
b27851461f feat(tui): more themes 2025-06-26 17:22:21 -04:00
adamdottv
209687377a feat(tui): more themes 2025-06-26 17:22:21 -04:00
adamdottv
90face1c09 fix(tui): editor width issues 2025-06-26 17:22:21 -04:00
adamdottv
936e2ce48b feat(tui): show lsp diagnostics for edit and write tools 2025-06-26 17:22:21 -04:00
adamdottv
16ee8ee379 fix(tui): chat editor aesthetics 2025-06-26 17:22:21 -04:00
adamdottv
ac39308dad fix(tui): visual issue with modal selected items in system theme 2025-06-26 17:22:21 -04:00
adamdottv
346b49219d chore: tui agents.md 2025-06-26 17:22:21 -04:00
Jay V
d84c1f20c7 docs: social share 2025-06-26 17:22:17 -04:00
adamdottv
dfb8777555 fix(tui): editor spinner colors 2025-06-26 17:21:53 -04:00
Jay V
008af18156 docs: share page responsive diff 2025-06-26 17:21:53 -04:00
adamdottv
ab23167f80 docs: system theme 2025-06-26 17:21:53 -04:00
adamdottv
b17ec46463 fix(tui): make opencode theme default 2025-06-26 17:21:53 -04:00
Adam
2e26b58d16 feat: default system theme (#419)
Co-authored-by: adamdottv <2363879+adamdottv@users.noreply.github.com>
2025-06-26 17:21:53 -04:00
Mike Wallio
31b56e5a05 Fix undefined is not an object (evaluating 'G.title') (#395) 2025-06-26 17:21:53 -04:00
Juhani Pelli
47c401cf25 fix: guard against large output limit causing infinite summarize loop (#399) 2025-06-26 17:21:53 -04:00
Dax Raad
fab8dc9e6f more edit tool fixes 2025-06-26 17:21:53 -04:00
Dax Raad
f39a2b1f16 integrate gemini-cli strategies for edit tool 2025-06-26 17:21:53 -04:00
Dax Raad
66830ced4e make edit tool more robust 2025-06-26 17:21:53 -04:00
Dax Raad
9d3fad754d ignore: typo 2025-06-26 17:21:53 -04:00
Dax Raad
dcd3131f58 add output length errors 2025-06-26 17:21:53 -04:00
Dax Raad
3d02e07161 fix codex not working 2025-06-26 17:21:53 -04:00
Dax Raad
4dbc6a43a6 redirect uncaught errors to log file 2025-06-26 17:21:53 -04:00
adamdottv
5394b5188b fix(tui): editor styles were off 2025-06-26 15:12:26 -05:00
adamdottv
8e680b3957 feat(tui): more themes 2025-06-26 15:03:30 -05:00
adamdottv
1b8cd796d6 feat(tui): more themes 2025-06-26 14:54:32 -05:00
adamdottv
35fba793d0 fix(tui): editor width issues 2025-06-26 12:57:11 -05:00
adamdottv
5358d43b74 feat(tui): show lsp diagnostics for edit and write tools 2025-06-26 12:47:17 -05:00
adamdottv
f777347bac fix(tui): chat editor aesthetics 2025-06-26 12:44:44 -05:00
adamdottv
17c8b914df fix(tui): visual issue with modal selected items in system theme 2025-06-26 12:33:06 -05:00
adamdottv
43b467dd12 chore: tui agents.md 2025-06-26 12:28:29 -05:00
Jay V
0e0770921e docs: social share 2025-06-26 13:21:42 -04:00
adamdottv
8edbb74352 fix(tui): editor spinner colors 2025-06-26 12:21:20 -05:00
Jay V
e6bfa95758 docs: share page responsive diff 2025-06-26 13:06:41 -04:00
adamdottv
e4120b6287 docs: system theme 2025-06-26 11:33:02 -05:00
adamdottv
ccbc9e00f2 fix(tui): make opencode theme default 2025-06-26 11:32:25 -05:00
Adam
7d13baadc8 feat: default system theme (#419)
Co-authored-by: adamdottv <2363879+adamdottv@users.noreply.github.com>
2025-06-26 10:16:07 -05:00
rekram1-node
9acc83697f chore: document AGENTS.md 2025-06-26 08:28:06 -05:00
Mike Wallio
db24bf87c0 Fix undefined is not an object (evaluating 'G.title') (#395) 2025-06-25 19:40:09 -04:00
Juhani Pelli
f4c0d2d2fd fix: guard against large output limit causing infinite summarize loop (#399) 2025-06-25 19:39:51 -04:00
Dax Raad
d240f4c676 more edit tool fixes 2025-06-25 19:22:54 -04:00
Dax Raad
9c90cdbe08 integrate gemini-cli strategies for edit tool 2025-06-25 17:56:14 -04:00
Dax Raad
fc7af31fe5 make edit tool more robust 2025-06-25 17:10:48 -04:00
Dax Raad
2f8d23ec66 ignore: typo 2025-06-25 11:02:57 -04:00
Dax Raad
77ae3fb9b9 add output length errors 2025-06-25 11:02:09 -04:00
Dax Raad
4e7f6c47fd fix codex not working 2025-06-25 10:01:35 -04:00
Dax Raad
50469ed750 redirect uncaught errors to log file 2025-06-25 08:41:10 -04:00
Dax Raad
aaab785493 better error message when bad directory is specified to start in 2025-06-24 22:28:25 -04:00
Dax Raad
9751937894 Enhance auth command with environment variable display and add models command
🤖 Generated with [opencode](https://opencode.ai)

Co-Authored-By: opencode <noreply@opencode.ai>
2025-06-24 22:24:55 -04:00
Dax Raad
0fc8dfc77e do not print error on ctrl+c during prompts 2025-06-24 22:09:43 -04:00
Dax Raad
81b7df61ec ci: bun lock 2025-06-24 21:14:32 -04:00
Dax Raad
8217b96d4a ci: fix type issue 2025-06-24 21:12:32 -04:00
Dax Raad
7dd0918d32 remove accidental opanai autoloader 2025-06-24 21:11:11 -04:00
Dax Raad
4b26b43855 added opencode serve command 2025-06-24 20:52:09 -04:00
Jay V
9d7cfda9fe docs: share page styles 2025-06-24 19:34:35 -04:00
Jay V
a3cf18c905 docs: share page bash tool output 2025-06-24 19:28:51 -04:00
Aiden Cline
0b1a8ae699 fix: file completions replaced wrong text when paths overlap (#378) 2025-06-24 18:13:15 -05:00
Dax Raad
eb70b1e5c8 docs: windows instructions 2025-06-24 18:54:59 -04:00
Dax Raad
00a3d818b6 ci: windows 2025-06-24 18:46:43 -04:00
Dax Raad
2384c7e734 ci: windows 2025-06-24 18:40:36 -04:00
Dax Raad
1bad3d9894 ci: windows 2025-06-24 18:27:57 -04:00
Dax Raad
4f715e66dc ci: windows 2025-06-24 18:13:15 -04:00
Dax
ec001ca02f windows fixes (#374)
Co-authored-by: Matthew Glazar <strager.nds@gmail.com>
2025-06-24 18:05:04 -04:00
Jay
a2d3b9f0c8 docs: Share page diff view improvements (#373) 2025-06-24 17:11:43 -04:00
Dax Raad
9cfb6ff964 ignore: revert 2025-06-24 14:59:27 -04:00
Dax Raad
6ed661c140 ci: upgrade bun 2025-06-24 14:42:25 -04:00
Dax Raad
9dc00edfc9 potential fix for failing to install provider package on first run 2025-06-24 14:33:35 -04:00
Jay V
e063bf888e docs: share code blocks in markdown 2025-06-24 13:53:59 -04:00
Adam
6f18475428 feat: delete sessions (#362)
Co-authored-by: adamdottv <2363879+adamdottv@users.noreply.github.com>
2025-06-24 11:07:41 -05:00
Dax Raad
3664b09812 remove debug code writing to /tmp/message.json 2025-06-24 11:16:17 -04:00
Dax Raad
7050cc0ac3 ignore: fix type errors 2025-06-24 11:09:36 -04:00
Dax Raad
4d3d63294d externalize github copilot code 2025-06-24 10:42:19 -04:00
Tom
6bc61cbc2d feat(tui): add debounce logic to escape key interrupt (#169)
Co-authored-by: opencode <noreply@opencode.ai>
Co-authored-by: adamdottv <2363879+adamdottv@users.noreply.github.com>
2025-06-24 06:31:02 -05:00
Dax Raad
01d351bebe add HOMEBREW_NO_AUTO_UPDATE to brew upgrades 2025-06-23 20:36:08 -04:00
Dax Raad
dbba4a97aa force use npm registry 2025-06-23 20:23:37 -04:00
GitMurf
0dc586faef fix: typescript error (any) from models (#347) 2025-06-23 18:44:57 -04:00
Dax Raad
f19c6b05f2 glob tool should respect .gitignore 2025-06-23 17:37:32 -04:00
Dax Raad
bc34f08333 bundle models.dev at build time and ignore refresh errors 2025-06-23 14:50:19 -04:00
Dax Raad
b7ee16aabd ignore: remove opencode.json 2025-06-23 14:32:57 -04:00
Lucas Grzegorczyk
ed1b0d97bf Fix project folder name starting with "-" in data (#323). Note old session data will still be in the old format in ~/.local/share/opencode/projects - you can remove the leading dash to recover the, 2025-06-23 14:31:51 -04:00
adamdottv
8d3b2fb821 feat(tui): optimistically render user messages 2025-06-23 12:30:20 -05:00
Jay V
fa991920bc fix help copy 2025-06-23 13:00:24 -04:00
adamdottv
5e79e3d7a5 fix(tui): less incorrect escapingn of < and > 2025-06-23 11:32:32 -05:00
adamdottv
966015c9ae fix: overlay border color issues 2025-06-23 11:21:49 -05:00
adamdottv
61f057337a fix: markdown wrapping issue 2025-06-23 11:20:44 -05:00
adamdottv
0b261054a2 chore: unused import 2025-06-23 10:21:57 -05:00
adamdottv
e2e481cbb5 docs: disabled_providers 2025-06-23 10:21:25 -05:00
GitMurf
5140e83012 feat(copilot): edit headers for better rate limit avoidance (#321) 2025-06-23 10:44:19 -04:00
Dax Raad
100d6212be more graceful mcp failures 2025-06-22 21:10:05 -04:00
Dax Raad
f0e19a6542 aws autoload include more env vars 2025-06-22 20:16:10 -04:00
Dax Raad
00c4d4f9f8 fix double entry of github copilot in auth login 2025-06-22 19:13:25 -04:00
Martin Palma
6e6fe6e013 Add Github Copilot OAuth authentication flow (#305) 2025-06-22 19:11:37 -04:00
Dax Raad
d05b60291e docs: contributing 2025-06-22 17:55:10 -04:00
adamdottv
5162361372 fix(tui): color contrast fixes for nord 2025-06-22 15:17:18 -05:00
adamdottv
d271b9f75b fix(tui): help dialog visuals 2025-06-22 14:28:16 -05:00
Márk Magyar
333569bed3 ignore: fix typos and formatting (#294) 2025-06-22 14:26:46 -04:00
Tom
09b89fdb23 fix: resolve test failures by adding missing zod-openapi import (#301)
Co-authored-by: opencode <noreply@opencode.ai>
2025-06-22 14:25:02 -04:00
Tom
0e8c3359d1 combine stdout and stderr in bash tool output (#300)
Co-authored-by: opencode <noreply@opencode.ai>
Co-authored-by: Dax Raad <d@ironbay.co>
2025-06-22 14:24:35 -04:00
Adam
37e0a7050f fix(tui): mouse wheel escape codes leaking into input 2025-06-22 10:26:44 -05:00
adamdottv
774dcb6980 fix(tui): cleanup help dialog 2025-06-22 06:44:23 -05:00
phantomreactor
28bc49ad17 fix: invisible html tags and compact long delay (#304) 2025-06-22 06:29:04 -05:00
adamdottv
dc1947838c fix(tui): cleanup modal visuals 2025-06-22 06:09:23 -05:00
adamdottv
3ea2daaa4c fix(tui): theme dialog visuals 2025-06-22 05:34:22 -05:00
Márk Magyar
137e964131 fix: session title generation (#293) 2025-06-21 14:32:11 -05:00
tyrellshawn
8efbe497fd Created a Theme inspired by the matrix (#285) 2025-06-21 07:29:49 -05:00
Thomas Meire
119d2d966c Add error handling on the calls to the server to debug issue #132 (#137) 2025-06-21 07:24:39 -05:00
Dax Raad
194415e785 footer clarifies it's showing context usage, not input token usage 2025-06-20 22:52:51 -04:00
Dax Raad
1684042fb6 huge optimization for token usage with anthropic 2025-06-20 22:43:04 -04:00
Dax Raad
59f0004d34 Add --method option to upgrade command for manual installation method selection
🤖 Generated with [opencode](https://opencode.ai)

Co-Authored-By: opencode <noreply@opencode.ai>
2025-06-20 20:48:23 -04:00
Dax Raad
da35a64fa1 handle brew upgrades better 2025-06-20 20:27:23 -04:00
Dax Raad
460338ca53 make IDs more random 2025-06-20 17:39:59 -04:00
Saatvik Arya
53c18a64b4 docs: add API client generation instructions to README and AGENTS.md (#273) 2025-06-20 17:27:58 -04:00
Saatvik Arya
b8144c5654 fix: return false for missing AWS_PROFILE in amazon-bedrock provider (#277) 2025-06-20 17:27:27 -04:00
adamdottv
9081e17fcc fix(tui): visual tweaks to themes 2025-06-20 15:49:51 -05:00
adamdottv
ef3fd5900f docs: cleanup casing 2025-06-20 15:35:25 -05:00
adamdottv
453d690c11 docs: new themes docs 2025-06-20 15:31:38 -05:00
adamdottv
c45be6a645 feat(tui): one dark theme 2025-06-20 15:14:23 -05:00
adamdottv
7b9b177088 feat(tui): kanagawa theme 2025-06-20 15:14:23 -05:00
adamdottv
3cee5b0470 feat(tui): gruvbox theme 2025-06-20 15:14:23 -05:00
adamdottv
9246d1c901 feat(tui): catppuccin theme 2025-06-20 15:14:22 -05:00
adamdottv
cc12abc83e feat(tui): nord theme 2025-06-20 15:14:22 -05:00
adamdottv
4f7e4a9436 feat(tui): custom themes 2025-06-20 15:14:22 -05:00
Márk Magyar
eee396f903 feat(tui): theme switcher with preview (#264) 2025-06-20 15:14:05 -05:00
Jay V
0d2f8e175a docs: share bugs 2025-06-20 15:50:12 -04:00
Jay V
4df40e0d9b docs: share page bugs 2025-06-20 15:50:12 -04:00
Dax Raad
b72e17a8b7 fix issue with conversations hanging 2025-06-20 15:49:49 -04:00
Dax Raad
61160dc220 docs: readme 2025-06-20 15:22:41 -04:00
Dax Raad
98734ff28c Consolidate session context handling and add global config support
Refactored context file discovery by removing separate SessionContext module and integrating functionality into SystemPrompt.context(). Added support for finding AGENTS.md and CLAUDE.md files in global config directories.

🤖 Generated with [opencode](https://opencode.ai)

Co-Authored-By: opencode <noreply@opencode.ai>
2025-06-20 15:14:12 -04:00
Josh
9991352663 feat: forward provider options from model config (#202)
Co-authored-by: Dax Raad <d@ironbay.co>
2025-06-20 15:03:41 -04:00
Dmytro Yankovskyi
91c4da5dbd fix(#243): claude on aws bedrock (#241)
Co-authored-by: Dax Raad <d@ironbay.co>
2025-06-20 14:57:33 -04:00
niba
2fd0e7dd6b chore: use client_id everywhere (#260) 2025-06-20 14:56:33 -04:00
adamdottv
d50b7ad481 docs: theme schema update 2025-06-20 13:51:32 -05:00
adamdottv
df95c49401 docs: theme schema 2025-06-20 13:00:32 -05:00
adamdottv
8b73c52f00 chore(tui): rename theme colors 2025-06-20 13:00:31 -05:00
Jay V
5603098d17 docs: add config 2025-06-20 13:22:31 -04:00
Jay V
f436a50125 docs: share header 2025-06-20 13:12:35 -04:00
Jay V
e19e977591 docs: test 2025-06-20 13:02:05 -04:00
Jay V
addbe295b1 docs: test 2025-06-20 12:59:32 -04:00
Jay V
9a573dedc6 docs: test 2025-06-20 12:56:00 -04:00
adamdottv
9ea0d71e8d fix(tui): async load messages on theme/session switch 2025-06-20 11:25:21 -05:00
adamdottv
b1a3599017 fix(tui): input latency optimization 2025-06-20 11:08:08 -05:00
adamdottv
7b0329f67f fix(tui): fetch tool more defensive 2025-06-20 09:00:28 -05:00
adamdottv
311b9c74dd fix(tui): typeahead open/close perf 2025-06-20 08:20:10 -05:00
adamdottv
f7e8dd2ff8 chore: fix typescript issues 2025-06-20 07:48:42 -05:00
adamdottv
40b1dd7ef2 fix(tui): insert newline correctly positioned 2025-06-20 07:42:04 -05:00
adamdottv
261e76e0a3 fix(tui): input feels laggy 2025-06-20 07:31:45 -05:00
Dax Raad
a300bfaccb docs: remove opencode.json 2025-06-20 01:00:15 -04:00
Dax Raad
41dba0db08 config validation 2025-06-20 00:57:28 -04:00
Rohan Godha
6674c6083a fix: phantom input bug on wsl (#200) 2025-06-19 20:08:56 -05:00
Tom Watkins
f6afa2c6bb docs: fix typo in config.mdx (#218) 2025-06-19 21:08:21 -04:00
Dax Raad
b2fb0508ea fix for azure models not liking tool definitions 2025-06-19 18:28:42 -04:00
Jay V
93f4252bb1 docs: tweak lander 2025-06-19 18:19:35 -04:00
Jay
46ab9c16dd docs: Update README.md 2025-06-19 18:19:06 -04:00
Dax Raad
d869df4fee remove unused permission timeout 2025-06-19 18:00:53 -04:00
Dax Raad
b99d4650ec temporarily disable project details in system prompt 2025-06-19 17:37:23 -04:00
Frank
261bb7f110 Infra: fix DO tag 2025-06-19 17:20:13 -04:00
Dax Raad
0515fbb260 fix gopls download spewing into terminal 2025-06-19 17:08:58 -04:00
adamdottv
88211d8c5b fix(tui): upgrade notification 2025-06-19 16:03:45 -05:00
Jay V
a812f95b9d docs: share 2025-06-19 16:57:42 -04:00
adamdottv
3728a12bee fix(tui): better help on home 2025-06-19 15:56:28 -05:00
Jay V
af07e51213 docs: tweak 2025-06-19 16:40:15 -04:00
Jay V
3113788c92 docs: copy 2025-06-19 16:39:36 -04:00
Jay V
efb5fe6d4e docs: styles 2025-06-19 16:38:37 -04:00
Jay V
54dd6c644d docs: adding to config 2025-06-19 16:36:17 -04:00
Dax Raad
39ad8f2667 ignore: do migration 2025-06-19 16:32:32 -04:00
Jay V
c4a2c84e53 docs: readme 2025-06-19 16:29:20 -04:00
Jay V
44fe012812 docs: edits 2025-06-19 16:28:11 -04:00
Jay V
f5e7f079ea Copy changes 2025-06-19 16:28:03 -04:00
adamdottv
15a8936806 fix(tui): better tool titles 2025-06-19 15:11:53 -05:00
adamdottv
4e4cff49c0 feat(tui): better task tool rendering 2025-06-19 15:02:13 -05:00
adamdottv
5540503bee fix(tui): sorted tool arg maps 2025-06-19 14:07:33 -05:00
adamdottv
193718034b fix: typescript error 2025-06-19 13:57:25 -05:00
adamdottv
72108c0296 fix(tui): sorted tool arg maps 2025-06-19 13:56:09 -05:00
Dax Raad
ec1c9f8cd1 use production share url 2025-06-19 14:21:00 -04:00
Dax Raad
a85b0a370e ci: share 2025-06-19 13:26:15 -04:00
Dax Raad
e7784d2864 add schema descriptions to config fields
Enhance configuration schema with descriptive text for all fields to improve developer experience and auto-generated documentation.

🤖 Generated with [opencode](https://opencode.ai)

Co-Authored-By: opencode <noreply@opencode.ai>
2025-06-19 13:12:13 -04:00
Dax Raad
97c4815444 fix task agent performance issues 2025-06-19 13:00:57 -04:00
Dax Raad
7d1a1663c8 allow selecting model and continuing previous session for opencode run 2025-06-19 13:00:57 -04:00
adamdottv
24c0ce6e53 fix(tui): vscode and mac terminal colors 2025-06-19 11:46:08 -05:00
adamdottv
4cdc86612c fix(tui): overlay border backgrounds 2025-06-19 11:41:30 -05:00
Jay V
f1f3f8d12c ignore: share version 2025-06-19 12:20:30 -04:00
adamdottv
e78d3b54bf chore: cleanup logs 2025-06-19 10:52:45 -05:00
adamdottv
f8a7cd372d fix(tui): toast placement and overlay rendering 2025-06-19 10:45:10 -05:00
adamdottv
f48eac638d feat(tui): more toast messages 2025-06-19 10:41:59 -05:00
adamdottv
e1f12f93eb feat(tui): toast messages 2025-06-19 10:12:29 -05:00
Dax Raad
7ca8334a8b fix webfetch tool when returning html as text 2025-06-19 10:43:54 -04:00
Dax Raad
f1a2b2eba4 support token caching for anthropic via openrouter 2025-06-19 10:32:14 -04:00
adamdottv
4b132656df feat(tui): copy share url to clipboard 2025-06-19 09:06:25 -05:00
Dax Raad
26bab00dab remove opencode_ prefixes from tool names. unfortunately this will break
all old sessions and share links. we'll be more backwards compatible in
the future once we're more stable.
2025-06-19 09:59:44 -04:00
adamdottv
568c04753e feat(tui): expand input to fit message 2025-06-19 08:45:27 -05:00
Dax Raad
4a06e164d2 ensure session.info is synced when shared 2025-06-19 09:41:11 -04:00
adamdottv
c57b52c300 fix: include schema in converted toml config 2025-06-19 06:02:02 -05:00
Guillermo Antony Cava Nuñez
0b8f48f17f Fixes tool tip layering (#199) 2025-06-19 00:23:29 -04:00
Dax Raad
3862184ccb hooks 2025-06-19 00:20:03 -04:00
Frank
8619c50976 Update SST 2025-06-18 23:38:06 -04:00
Josh
bb6b56b72a fix: incorrect command on main screen for exiting application (#201) 2025-06-18 23:19:43 -04:00
Dax Raad
1252b65166 stop loading models.dev format from global config 2025-06-18 23:08:51 -04:00
Dax Raad
6840276dad docs: update README 2025-06-18 23:03:54 -04:00
Dax Raad
bd8c3cd0f1 BREAKING CONFIG CHANGE
We have changed the config format yet again - but this should be the
final time. You can see the readme for more details but the summary is

- got rid of global providers config
- got rid of global toml
- global config is now in `~/.config/opencode/config.json`
- it will be merged with any project level config
2025-06-18 23:01:19 -04:00
Dax Raad
e5e9b3e3c0 rework config 2025-06-18 23:01:19 -04:00
Frank
1e8a681de9 Render version 2025-06-18 22:26:51 -04:00
Jay V
a834bedc17 ignore: share copy link 2025-06-18 20:18:10 -04:00
Dax Raad
6a3392385e support global config 2025-06-18 18:56:52 -04:00
Jay V
6a00e063c4 ignore: share logo 2025-06-18 18:33:51 -04:00
Jay V
73a0ce2b7d ignore: share 2025-06-18 18:22:19 -04:00
Jay V
4d1afd01fa ignore: share 2025-06-18 18:21:44 -04:00
Jay V
801d5f47bd ignore: share favicon 2025-06-18 18:10:22 -04:00
Jay V
b6caae9708 ignore: share 2025-06-18 18:01:34 -04:00
adamdottv
183ca64ef9 feat(tui): show provider next to model 2025-06-18 16:09:49 -05:00
adamdottv
8c32cfe829 chore: tui style tweaks 2025-06-18 15:59:58 -05:00
Jay V
73dcc88da1 ignore: share 2025-06-18 16:54:33 -04:00
Jay V
14bded65dc ignore: share 2025-06-18 16:54:33 -04:00
adamdottv
87d1d3fb62 fix(tui): file completion quirks 2025-06-18 15:51:26 -05:00
Frank
e054454109 Api: only return session messages 2025-06-18 16:20:34 -04:00
Dax Raad
a6142cf975 ignore: types 2025-06-18 16:20:03 -04:00
Jay V
69332e5fa3 ignore: share 2025-06-18 16:15:22 -04:00
Jay V
20201ba3c4 ignore: share 2025-06-18 16:15:11 -04:00
Dax Raad
658067186a ignore: share page stuff 2025-06-18 16:13:33 -04:00
adamdottv
ac777b77cf fix(tui): modal visuals 2025-06-18 15:12:24 -05:00
Dax Raad
5944ae2023 share types 2025-06-18 15:34:13 -04:00
Jay V
2f10961ba8 ignore: share 2025-06-18 15:32:40 -04:00
adamdottv
fae97978a3 chore: cleanup logs 2025-06-18 14:18:46 -05:00
Dax Raad
3423415e49 docs: improve keybinds configuration format in README
Update keybinds configuration example to use proper TOML table syntax instead of dot notation for better readability and standard TOML formatting.

🤖 Generated with [opencode](https://opencode.ai)

Co-Authored-By: opencode <noreply@opencode.ai>
2025-06-18 15:10:46 -04:00
adamdottv
1d0bfc2b2a fix(tui): help dialog sorting 2025-06-18 14:06:20 -05:00
adamdottv
bd46cf0f86 feat(tui): configurable keybinds and mouse scroll 2025-06-18 13:56:51 -05:00
Dax Raad
d4157d9a96 ctrl+c should gracefully clean up pending sessions 2025-06-18 14:11:49 -04:00
Jay V
6e4ef585d8 ignore: share error styles 2025-06-18 14:10:14 -04:00
Dax Raad
e05c3b7a76 fix panic when invalid config 2025-06-18 14:03:16 -04:00
Dax Raad
f99904bc1c track version on session info 2025-06-18 13:40:36 -04:00
Jay V
b796d6763f ignore: share page styles 2025-06-18 12:53:48 -04:00
Dax Raad
c1250abdf8 implemented diff trimming 2025-06-18 11:20:40 -04:00
Dax Raad
ebe51534a1 allow setting options in global provider store 2025-06-18 11:06:16 -04:00
Dax Raad
b8bbee4718 fix issue with provider cache 2025-06-18 10:56:23 -04:00
Dax Raad
8f852b396f fix deploys 2025-06-18 10:47:07 -04:00
Dax Raad
ae4d089c06 remove call to npm causing noticible delay when starting chat 2025-06-18 10:35:41 -04:00
Dax Raad
5110fbdaf9 fix issue when running opencode in empty directory 2025-06-18 10:29:09 -04:00
Dax Raad
e6ddb474fc ignore: sync 2025-06-18 08:36:25 -04:00
SBSTN
0dc71774ce Add Everforest Theme (#170) 2025-06-18 05:55:38 -05:00
Dax Raad
b470466e30 integrate cache read/write data 2025-06-17 20:51:39 -04:00
Jay V
d1f9311931 ignore: share page polish 2025-06-17 20:26:12 -04:00
Dax Raad
1c58023df9 improve anthropic oauth token caching and authentication handling
🤖 Generated with [opencode](https://opencode.ai)

Co-Authored-By: opencode <noreply@opencode.ai>
2025-06-17 13:23:15 -04:00
Dax Raad
4e0aa58b7e ignore: fix 2025-06-17 13:04:26 -04:00
Dax Raad
23ee34b35f state 2025-06-17 12:29:28 -04:00
Dax Raad
674c9a5220 support disabling providers from automatically being added 2025-06-17 12:23:04 -04:00
Dax Raad
54c86ed43a docs: readme 2025-06-17 12:17:45 -04:00
Dax Raad
676d75ee75 docs: update README 2025-06-17 12:14:38 -04:00
Dax Raad
70dc0a12f2 docs: readme 2025-06-17 12:12:33 -04:00
Dax Raad
d579c5e8aa support global config for providers 2025-06-17 12:10:44 -04:00
Dax Raad
ee91f31313 fix issue with tool schemas and google 2025-06-17 11:27:07 -04:00
Dax Raad
57b3051024 fix agent getting caught in summary loop 2025-06-17 10:50:03 -04:00
Dax Raad
ae5cf3cc23 ci: fix 2025-06-17 10:38:01 -04:00
Dax Raad
68e1b3c46c Fix TypeScript compilation errors and consolidate version handling
🤖 Generated with [opencode](https://opencode.ai)

Co-Authored-By: opencode <noreply@opencode.ai>
2025-06-17 10:27:49 -04:00
adamdottv
2d68814abc feat: better collapsed tool call visuals 2025-06-17 08:35:18 -05:00
adamdottv
a5da5127fa chore: consolidate chat page into tui.go 2025-06-17 07:09:04 -05:00
Dax Raad
b5a4439704 Add autoshare configuration and improve run command UI
Enables automatic session sharing via global config or flag, enhances UI with logo display and provider/model info positioning.

🤖 Generated with [opencode](https://opencode.ai)

Co-Authored-By: opencode <noreply@opencode.ai>
2025-06-17 01:45:32 -04:00
Dax Raad
9c5616521d do not autoupgrade snapshot builds 2025-06-17 01:18:32 -04:00
Dax Raad
3fe163416d autoupgrade 2025-06-17 01:05:05 -04:00
Dax
d054f88130 Improve upgrade command with installation method detection (#158) 2025-06-17 00:07:17 -04:00
Jay
b929b4f4b9 docs: Update README.md 2025-06-16 21:01:38 -04:00
Jay V
4c0c83b02d docs: readme 2025-06-16 20:10:19 -04:00
adamdottv
d6d45bdc63 feat: share and init commands 2025-06-16 15:58:52 -05:00
Dax Raad
13a83721b0 ci: fixed ci issue 2025-06-16 16:58:25 -04:00
Dax Raad
f0edffbae9 docs: readme 2025-06-16 16:53:43 -04:00
Dax Raad
8131bee49a ignore: logs 2025-06-16 16:02:45 -04:00
Dax Raad
b5f44ae13f docs: update readme 2025-06-16 15:42:35 -04:00
Miles Till
0d23f2a7fd fix: incorrect lipgloss version (#131) 2025-06-16 14:35:46 -05:00
Dax Raad
ac096d84ad remove windows builds 2025-06-16 15:11:14 -04:00
Dax Raad
fcaf0e6dbf opencode auth login: validation on provider id and better error messages 2025-06-16 15:09:49 -04:00
Dax Raad
19e259d90d docs: readme 2025-06-16 15:04:32 -04:00
Dax Raad
2c9fd1e776 BREAKING CHANGE: the config structure has changed, custom providers have an npm field now to specify which npm package to load. see examples in README.md 2025-06-16 15:02:25 -04:00
Dax Raad
63996c4189 limit to 4 system prompts cached 2025-06-16 14:51:59 -04:00
adamdottv
c7bb7ce4de fix: include cached tokens in tui 2025-06-16 12:59:38 -05:00
adamdottv
c8eb1b24c3 feat: believe it or not, even faster tui init 2025-06-16 12:34:34 -05:00
adamdottv
b9f894f1e9 feat: even faster tui init 2025-06-16 12:24:18 -05:00
adamdottv
7c0d10a4ce feat: faster tui init 2025-06-16 11:54:55 -05:00
Dax Raad
06af406146 properly track cache token counts 2025-06-16 12:43:22 -04:00
Dax Raad
0e3458b112 fix cache-control 2025-06-16 12:07:01 -04:00
adamdottv
2d15c683e0 fix: default provider and model 2025-06-16 10:51:01 -05:00
adamdottv
3c94d26570 chore: remove status service 2025-06-16 10:45:19 -05:00
Dax Raad
1a553e525f enable prompt caching for anthropic 2025-06-16 11:41:54 -04:00
adamdottv
3c4e966216 fix: spinner background color 2025-06-16 10:03:44 -05:00
Dax Raad
0721620ed8 docs: readme 2025-06-16 10:44:48 -04:00
Thomas Meire
9fc6734f32 ignore: remove log files and add them to gitignore (#138) 2025-06-16 09:30:07 -04:00
Jacob
e1733a423d fix: typo and literal wording in packages/opencode/AGENTS.md (#134) 2025-06-16 08:18:29 -05:00
Dax Raad
d42e3db7e0 docs: update README 2025-06-15 21:43:20 -04:00
Dax Raad
cdb26f6d83 docs: readme 2025-06-15 21:39:02 -04:00
Dax Raad
fe05edaa79 enhance ripgrep files function with query filtering and limit support
🤖 Generated with [opencode](https://opencode.ai)

Co-Authored-By: opencode <noreply@opencode.ai>
2025-06-15 21:26:32 -04:00
Dax Raad
7d174767b0 first pass making system prompt less fast 2025-06-15 20:25:04 -04:00
George Potoshin
c5eefd1752 Fix: Improve Help UI Readability (Issue #99) (#117) 2025-06-15 18:38:44 -05:00
adamdottv
77a6b3bdd6 fix: background color rendering issues 2025-06-15 15:07:05 -05:00
Pierre B.
7effff56c0 fix: spelling, grammar and typos (#121) 2025-06-15 14:23:18 -05:00
Dax Raad
e30fba0d3c Improve LSP server initialization with timeout handling and skip failed servers
🤖 Generated with [opencode](https://opencode.ai)

Co-Authored-By: opencode <noreply@opencode.ai>
2025-06-15 13:52:57 -04:00
Dax Raad
7fbb2ca9a6 ignore: add timer log helper 2025-06-15 13:33:24 -04:00
Dax Raad
230d0a1510 fix postinstall script for node 2025-06-15 13:11:11 -04:00
Pierre B.
46ff2c0ae0 chore: ignore intellij, vscode (#122) 2025-06-15 10:40:34 -05:00
adamdottv
b8a89dab0f fix: background color rendering issues 2025-06-15 05:57:15 -05:00
szymon
7351e12886 remove .DS_Store (#112) 2025-06-15 05:34:46 -05:00
Dax Raad
38879dee2d beginning of upgrade command 2025-06-14 22:05:41 -04:00
Dax Raad
c4ff8dd205 revert ctrl+d - conflicts with page down 2025-06-14 21:29:02 -04:00
Dax Raad
0e035b3115 fix aborting issue 2025-06-14 21:23:57 -04:00
Dax Raad
b855511d9a fix issue with follow up tool calls and cancelation 2025-06-14 21:03:44 -04:00
Dax Raad
783faf554d fix issue continuing session after aborted 2025-06-14 20:24:50 -04:00
nitishxyz
bfd4269d7d Add Ayu dark theme (#109) 2025-06-14 20:08:31 -04:00
Berr
25f78b053b fix: improve browser opening error handling in AuthLoginCommand (#111) 2025-06-14 20:07:41 -04:00
Dax Raad
87f260ee17 sync 2025-06-14 20:04:41 -04:00
Dax Raad
12931a869d ci: ignore commits 2025-06-14 18:59:05 -04:00
Dax Raad
f759e1804d docs: typo 2025-06-14 18:58:27 -04:00
Rohan Godha
c9b4564d36 tui: fix help dialog background (#110) 2025-06-14 18:57:15 -04:00
Conor O'Brien
d097c546db nit: update commands displayed on home to match commands available (#108) 2025-06-14 18:56:44 -04:00
Gal Schlezinger
adb54521b4 make ctrl+d quit too, just like shells (#105) 2025-06-14 18:56:34 -04:00
Dax Raad
2ea0399aa7 docs: use ollama example 2025-06-14 18:55:39 -04:00
Dax Raad
fa1266263d downgrade to ai sdk v4.x 2025-06-14 18:44:08 -04:00
Gal Schlezinger
fe109c921e add focus tracking for tui so cursor will hide when not in focus (#103) 2025-06-14 14:53:43 -05:00
Dax Raad
37bb8895fe docs: readme 2025-06-14 14:52:02 -04:00
Dax Raad
89b95be4de docs: provider config 2025-06-14 14:45:59 -04:00
Dax Raad
eaf295bac7 docs: faq 2025-06-14 14:39:13 -04:00
Mantena Rama Raju
27d3cec477 typo (#94) 2025-06-14 14:36:29 -04:00
Dax Raad
574d494c3c Enhance provider system with dynamic package resolution and improved logging
- Add npm registry lookup for AI SDK packages with fallback support
- Enhance error logging with cause information
- Add timing deltas to log output for performance monitoring

🤖 Generated with [opencode](https://opencode.ai)

Co-Authored-By: opencode <noreply@opencode.ai>
2025-06-14 14:35:33 -04:00
Albert Ilagan
0239761f31 tui: remove quit dialog (#97) 2025-06-14 12:47:34 -05:00
Dax Raad
a53f9165e9 doc: remove dev script 2025-06-14 13:05:23 -04:00
Dax Raad
ffc231bd8b docs: contributing 2025-06-14 12:45:26 -04:00
Dax Raad
3cf4ef56fb sync 2025-06-14 12:32:41 -04:00
Dax Raad
c738e26438 docs: mcp 2025-06-14 12:25:26 -04:00
Dax Raad
9c6aa82ac1 docs: config schema 2025-06-14 12:22:07 -04:00
Dax Raad
ef74d97491 ci: update publish script 2025-06-14 12:13:59 -04:00
Dax Raad
af892e5432 docs: readme 2025-06-14 12:13:46 -04:00
Dax Raad
d7aca6230d naming fixes 2025-06-14 01:54:28 -04:00
Dax Raad
0f9c2c5c27 Add flag system and auto-share functionality
- Add Flag module for environment variable configuration
- Implement OPENCODE_AUTO_SHARE flag to automatically share new sessions
- Update session creation to conditionally auto-share based on flag

🤖 Generated with [OpenCode](https://opencode.ai)

Co-Authored-By: OpenCode <noreply@opencode.ai>
2025-06-14 01:51:04 -04:00
Dax Raad
6a261dedb4 Improve logging and simplify fzf implementation
- Refactor fzf search to use Bun's $ syntax for cleaner command execution
- Add request/response duration logging to server middleware
- Set default service name for logging to improve log clarity

🤖 Generated with [OpenCode](https://opencode.ai)

Co-Authored-By: OpenCode <noreply@opencode.ai>
2025-06-14 01:51:04 -04:00
Alireza Bahrami
ec928d88b5 fix(install): check if the path export command already exists (#28) 2025-06-13 23:28:33 -04:00
Dax Raad
59a5f120c0 Clean up workflows and enhance file discovery tools to include dot files
🤖 Generated with [OpenCode](https://opencode.ai)

Co-Authored-By: OpenCode <noreply@opencode.ai>
2025-06-13 23:24:46 -04:00
Dax Raad
ce07f80b19 sync 2025-06-13 17:42:56 -04:00
Dax Raad
168fd9b2e3 screenshot 2025-06-13 17:42:14 -04:00
Dax Raad
df13b155f9 disable autoshare 2025-06-13 17:30:17 -04:00
Dax Raad
eeed5b8718 sync 2025-06-13 17:24:45 -04:00
Dax Raad
148ef90210 sync 2025-06-13 17:23:22 -04:00
adamdottv
67023bb007 wip: refactoring tui 2025-06-13 15:56:33 -05:00
Dax Raad
a316aed4fe sync 2025-06-13 16:47:15 -04:00
Dax Raad
9f7c0bd599 sync 2025-06-13 16:46:48 -04:00
Dax Raad
c7e1068f90 sync 2025-06-13 16:45:58 -04:00
Dax Raad
e2052d790b sync 2025-06-13 16:43:53 -04:00
Dax Raad
d3b2763c14 commit and push 2025-06-13 16:42:31 -04:00
Dax Raad
c6492de7ac sync 2025-06-13 16:37:58 -04:00
Dax Raad
d8fa0fb50c sync 2025-06-13 16:29:57 -04:00
Dax Raad
18ab8faa1d reset readme 2025-06-13 16:26:34 -04:00
Dax Raad
f35ce180e2 ci 2025-06-13 16:23:38 -04:00
Dax Raad
2bee48a9bc homebrew 2025-06-13 16:17:27 -04:00
adamdottv
10ddd654cf wip: refactoring tui 2025-06-13 11:27:05 -05:00
adamdottv
61396b93ed wip: refactoring tui 2025-06-13 11:18:46 -05:00
adamdottv
62b9a30a9c wip: refactoring tui 2025-06-13 10:47:51 -05:00
adamdottv
5706c6ad3a wip: refactoring tui 2025-06-13 09:57:54 -05:00
adamdottv
e8e03c895a wip: refactoring tui 2025-06-13 09:44:09 -05:00
adamdottv
38667682a7 wip: refactoring tui 2025-06-13 09:19:51 -05:00
adamdottv
d7d5fc39fb wip: refactoring tui 2025-06-13 08:30:57 -05:00
adamdottv
0caf25adee wip: refactoring tui 2025-06-13 08:30:56 -05:00
Dax Raad
37febc6873 do not strip aur package 2025-06-13 08:27:17 -04:00
adamdottv
4169f0c412 wip: refactoring tui 2025-06-13 07:01:26 -05:00
adamdottv
b7f06bbc1f wip: refactoring tui 2025-06-13 06:56:12 -05:00
adamdottv
1b8cfe9e99 wip: refactoring tui 2025-06-13 06:49:59 -05:00
adamdottv
97837d2d23 wip: refactoring tui 2025-06-13 06:23:12 -05:00
Dax Raad
9abc2a0cf8 load API keys 2025-06-13 00:53:46 -04:00
Dax Raad
9fb47bc855 Enhance auth command with dynamic provider selection
- Add support for dynamically loading providers from ModelsDev
- Prioritize anthropic as recommended provider
- Add "other" provider option for manual entry
- Include special handling for amazon-bedrock with AWS config guidance
- Expand provider selection UI to show up to 8 providers

🤖 Generated with [OpenCode](https://opencode.ai)

Co-Authored-By: OpenCode <noreply@opencode.ai>
2025-06-13 00:33:54 -04:00
Dax Raad
73e9fb53d5 sync 2025-06-13 00:06:15 -04:00
Dax Raad
f03637b1fc Refactor AI SDK provider loading to use BunProc.install
Simplifies provider installation by using BunProc.install() instead of manual path construction and file system checks.

🤖 Generated with [OpenCode](https://opencode.ai)

Co-Authored-By: OpenCode <noreply@opencode.ai>
2025-06-12 23:50:26 -04:00
Dax Raad
2c376c5abc bedrock loader 2025-06-12 23:39:52 -04:00
Dax Raad
442e1b52ad Update provider configuration and server handling
🤖 Generated with [OpenCode](https://opencode.ai)

Co-Authored-By: OpenCode <noreply@opencode.ai>
2025-06-12 23:10:03 -04:00
Thomas Meire
e8c3abc369 Update error message to say opencode instead of sst (#81) 2025-06-12 18:38:59 -04:00
Dax Raad
c8648baba2 ci 2025-06-12 18:30:19 -04:00
Dax Raad
7b3a799856 ci 2025-06-12 18:21:08 -04:00
Dax Raad
9356b6c35a sync 2025-06-12 18:14:04 -04:00
Dax Raad
29a6603a89 Update CLI run command and session handling
🤖 Generated with [OpenCode](https://opencode.ai)

Co-Authored-By: OpenCode <noreply@opencode.ai>
2025-06-12 18:07:31 -04:00
Dax Raad
a454ba8895 subagent 2025-06-12 18:07:31 -04:00
Jay V
5eae7aef0e updating logo 2025-06-12 17:30:24 -04:00
adamdottv
1031bceef7 wip: refactoring tui 2025-06-12 16:04:45 -05:00
adamdottv
653965ef59 wip: refactoring tui 2025-06-12 16:00:26 -05:00
adamdottv
ca0ea3f94d wip: refactoring tui 2025-06-12 16:00:25 -05:00
adamdottv
98bd5109c2 wip: refactoring tui 2025-06-12 16:00:25 -05:00
adamdottv
78f65e4789 wip: refactoring tui 2025-06-12 16:00:25 -05:00
adamdottv
75dd2f75aa wip: refactoring tui 2025-06-12 16:00:25 -05:00
adamdottv
fe86e58bbb wip: refactoring tui 2025-06-12 16:00:24 -05:00
adamdottv
ae339015fc wip: refactoring tui 2025-06-12 16:00:24 -05:00
adamdottv
cce2e4ad75 wip: refactoring tui 2025-06-12 16:00:24 -05:00
Dax Raad
a1ce35c208 ci 2025-06-12 14:15:44 -04:00
Dax Raad
69d6709a19 sync 2025-06-12 14:11:01 -04:00
Dax Raad
52ec134b2d Update publish workflow to support snapshot releases on dontlook branch
🤖 Generated with [OpenCode](https://opencode.ai)

Co-Authored-By: OpenCode <noreply@opencode.ai>
2025-06-12 14:10:29 -04:00
Dax Raad
db88bede05 sync 2025-06-12 14:06:06 -04:00
Dax Raad
d4d218d7d6 Update index.ts
🤖 Generated with [OpenCode](https://opencode.ai)

Co-Authored-By: OpenCode <noreply@opencode.ai>
2025-06-12 13:59:42 -04:00
Dax Raad
3e086e3ab9 sync 2025-06-12 13:49:43 -04:00
Jay V
2f5faae34b fix share page edit 2025-06-12 13:42:10 -04:00
Dax Raad
e3ad6a0698 do not output bunproc 2025-06-12 13:39:03 -04:00
Dax Raad
b536b45536 Fix AUR SSH key path handling in publish script
Quote and trim AUR_KEY environment variable to handle paths with spaces and multiline content properly.

🤖 Generated with [OpenCode](https://opencode.ai)

Co-Authored-By: OpenCode <noreply@opencode.ai>
2025-06-12 13:37:12 -04:00
Dax Raad
81c245035f Simplify BunProc.which() to use process.execPath directly
🤖 Generated with [OpenCode](https://opencode.ai)

Co-Authored-By: OpenCode <noreply@opencode.ai>
2025-06-12 13:32:31 -04:00
Dax Raad
dda7059e57 update bun integration
🤖 Generated with [OpenCode](https://opencode.ai)

Co-Authored-By: OpenCode <noreply@opencode.ai>
2025-06-12 13:29:14 -04:00
Dax Raad
0cca75ef48 sync 2025-06-12 13:19:24 -04:00
Dax Raad
ee1f55dbe2 token 2025-06-12 13:17:06 -04:00
Dax Raad
2fa50190e5 skip nil values 2025-06-12 13:13:34 -04:00
Jay V
662b6b1258 share page handle undefined 2025-06-12 13:11:34 -04:00
Dax Raad
f0dbe40522 sync 2025-06-12 13:04:01 -04:00
Dax Raad
41c54f629c sync 2025-06-12 12:48:38 -04:00
Dax Raad
4503201b15 npm token 2025-06-12 12:09:13 -04:00
Dax Raad
120151ee38 sync 2025-06-12 11:58:41 -04:00
Dax Raad
4d2e556713 sync 2025-06-12 11:57:57 -04:00
Dax Raad
54a5d3a9eb sync 2025-06-12 11:56:52 -04:00
Dax Raad
22dc6b6ec9 sync 2025-06-12 11:56:06 -04:00
Dax Raad
b5c6ddcd04 tweak 2025-06-12 11:55:07 -04:00
Dax Raad
e03ad6c42e sync 2025-06-12 11:18:17 -04:00
Dax Raad
33457d8472 sync 2025-06-12 11:02:51 -04:00
Dax Raad
888105e60f sync 2025-06-12 11:00:37 -04:00
Dax Raad
b7b490f67c Add postinstall script and update session/release configuration
🤖 Generated with [OpenCode](https://opencode.ai)

Co-Authored-By: OpenCode <noreply@opencode.ai>
2025-06-12 00:50:49 -04:00
Dax Raad
f6ed59bf45 Refactor external tools organization and add file search API endpoint
🤖 Generated with [OpenCode](https://opencode.ai)

Co-Authored-By: OpenCode <noreply@opencode.ai>
2025-06-11 23:59:51 -04:00
Dax Raad
83991bee88 Add search function to fzf and move version constant to namespace level
🤖 Generated with [OpenCode](https://opencode.ai)

Co-Authored-By: OpenCode <noreply@opencode.ai>
2025-06-11 23:58:13 -04:00
Dax Raad
29142eb940 add title temp 2025-06-11 19:09:08 -04:00
Dax Raad
aab47714c9 sync 2025-06-11 19:00:09 -04:00
Dax Raad
9b2b610920 sync 2025-06-11 18:45:18 -04:00
Dax Raad
468cec545a sync 2025-06-11 18:19:21 -04:00
adamdottv
3c82fb6818 wip: refactoring tui 2025-06-11 12:05:54 -05:00
adamdottv
8a2f370eda wip: refactoring tui 2025-06-11 11:58:25 -05:00
Dax Raad
636133e6cb sync 2025-06-11 12:58:06 -04:00
Dax Raad
6cf8784ecf sync 2025-06-11 12:44:21 -04:00
adamdottv
95d5e1f231 wip: refactoring tui 2025-06-11 11:43:28 -05:00
Dax Raad
979bad3e64 sync 2025-06-11 12:35:20 -04:00
Dax Raad
300d0474a3 sync 2025-06-11 11:41:10 -04:00
Dax Raad
d4379c8c93 turn on tool call streaming 2025-06-11 11:36:12 -04:00
Jay V
a9b230f419 share page scroll anchor 2025-06-11 09:43:52 -05:00
Dax Raad
07cffebc8f fix bash 2025-06-11 00:37:43 -04:00
Dax Raad
f1de1634d6 Standardize styling in auth commands by replacing Bun.color with UI.Style constants
🤖 Generated with [OpenCode](https://opencode.ai)

Co-Authored-By: OpenCode <noreply@opencode.ai>
2025-06-11 00:34:55 -04:00
Dax Raad
8d8663399d sync 2025-06-11 00:27:46 -04:00
Dax Raad
83eb61fd5f Refactor authentication system to consolidate auth flow and remove provider-based commands
🤖 Generated with [OpenCode](https://opencode.ai)

Co-Authored-By: OpenCode <noreply@opencode.ai>
2025-06-11 00:21:46 -04:00
Dax Raad
b8e7d06356 sync 2025-06-10 22:18:27 -04:00
Dax Raad
4543765e3a fix rendering 2025-06-10 19:37:25 -04:00
Dax Raad
28f5cbbfe9 Fix shutdown handling, error management, and process lifecycle issues
🤖 Generated with [OpenCode](https://opencode.ai)

Co-Authored-By: OpenCode <noreply@opencode.ai>
2025-06-10 18:58:47 -04:00
Dax Raad
ca3c22dc12 fix bunfile bug 2025-06-10 18:23:19 -04:00
Dax Raad
49110f7412 sync 2025-06-10 18:10:30 -04:00
Dax Raad
14dcf43246 add abort signals 2025-06-10 17:56:05 -04:00
Dax Raad
84e4afc0bd webfetch spoof browser 2025-06-10 16:39:06 -04:00
Dax Raad
1cc8e9a36d kill 2025-06-10 16:35:02 -04:00
Dax Raad
43b429db93 lsp 2025-06-10 16:29:35 -04:00
Dax Raad
6248c1e720 merge deep config 2025-06-10 16:19:02 -04:00
Frank
772e1851c0 Share: render url on frontend 2025-06-10 15:50:42 -04:00
Dax Raad
5ab2ff9589 onboarding progress 2025-06-10 15:43:14 -04:00
Dax Raad
a0062d4661 fix share link 2025-06-10 13:36:47 -04:00
Dax Raad
ef7f1f0761 sync 2025-06-10 13:30:13 -04:00
Dax Raad
96b5a079ff Update LSP client/server and CLI scrap command functionality
🤖 Generated with [OpenCode](https://opencode.ai)

Co-Authored-By: OpenCode <noreply@opencode.ai>
2025-06-10 13:30:13 -04:00
Jay V
0e58f488df moving share urls 2025-06-10 11:52:02 -05:00
Dax Raad
fa7416687b Enhance ripgrep error handling and utility functions
🤖 Generated with [OpenCode](https://opencode.ai)

Co-Authored-By: OpenCode <noreply@opencode.ai>
2025-06-10 11:06:01 -04:00
Jay V
c3ab370344 share page fix 2025-06-10 09:33:28 -05:00
Dax Raad
bb60aa3060 sync 2025-06-10 10:14:03 -04:00
Dax Raad
34fa8cadd6 Improve ripgrep error handling with structured error types
🤖 Generated with [OpenCode](https://opencode.ai)

Co-Authored-By: OpenCode <noreply@opencode.ai>
2025-06-10 10:14:03 -04:00
Frank
edd459ec00 Share: og image 2025-06-09 23:37:32 -04:00
Frank
177875f624 ssr sync 2025-06-09 23:07:29 -04:00
Dax Raad
fdaa7f287c aborted toolcalls should be cleaned up 2025-06-09 20:48:50 -04:00
Dax Raad
fed659c582 properly support codex mini 2025-06-09 20:24:18 -04:00
Dax Raad
bffc612a4e notice 2025-06-09 19:58:08 -04:00
Jay V
b97b15e0fe share og image 2025-06-09 16:42:13 -05:00
Jay V
fcb972de19 edits 2025-06-09 16:09:15 -05:00
Dax Raad
4478195ea8 file permissions for anthropic credentials 2025-06-09 17:07:52 -04:00
Jay V
54c4a783b3 share page show lsp diag 2025-06-09 15:41:19 -05:00
Jay V
c091cbb624 share page fix ai text 2025-06-09 15:02:46 -05:00
Jay V
d4f8fd867a fix duplicates models 2025-06-09 15:02:46 -05:00
Dax Raad
a2884b08cc improve AGENTS.md 2025-06-09 15:28:06 -04:00
Dax Raad
60faa26a15 sync 2025-06-09 15:00:48 -04:00
Dax Raad
d8510ab452 Refactor logging system to centralize initialization and remove printLogs parameter
🤖 Generated with [OpenCode](https://opencode.ai)

Co-Authored-By: OpenCode <noreply@opencode.ai>
2025-06-09 14:52:30 -04:00
Dax Raad
3c23b92bea optional 2025-06-09 14:02:58 -04:00
Dax Raad
021fd3fcb5 sync 2025-06-09 14:01:11 -04:00
Dax Raad
fa3253d1b6 Add TypeScript server initialization config to LSP server
🤖 Generated with [OpenCode](https://opencode.ai)

Co-Authored-By: OpenCode <noreply@opencode.ai>
2025-06-08 13:22:07 -04:00
Dax Raad
a1cc8f6cdb fix ts lsp 2025-06-08 12:57:33 -04:00
Dax Raad
1bcc02442a sync 2025-06-08 12:46:32 -04:00
Frank
1d782dc19a Share: load server data on page load 2025-06-08 01:17:54 -04:00
Frank
879d02f86c Remove unused package 2025-06-07 23:47:45 -04:00
Frank
028d589ea0 Infra: use Astro component 2025-06-07 23:46:56 -04:00
Dax Raad
cdbdb96218 sync 2025-06-07 17:15:40 -04:00
Dax Raad
f22c93ba1b logs 2025-06-06 23:58:09 -04:00
Dax Raad
b34d5c959b add go version 2025-06-06 23:26:11 -04:00
Dax Raad
32e6a552c0 autodownload lsp 2025-06-06 23:21:57 -04:00
Dax Raad
d6afebf22a make lsp better 2025-06-06 17:20:08 -04:00
Jay V
b32cb2b932 share page styles 2025-06-06 12:18:02 -04:00
Dax Raad
265f427d2a lsp progress 2025-06-05 23:42:04 -04:00
Dax Raad
16520261f4 update auth and provider configuration
🤖 Generated with [OpenCode](https://opencode.ai)

Co-Authored-By: OpenCode <noreply@opencode.ai>
2025-06-05 20:11:56 -04:00
Jay V
65b2cf73d7 share page markdown 2025-06-05 19:14:35 -04:00
Jay V
95069af03f share write tool 2025-06-05 16:41:44 -04:00
Jay V
3e4ebb6e5d share page 2025-06-05 16:35:23 -04:00
Dax Raad
9147108675 fix webfetch 2025-06-05 15:59:17 -04:00
Jay V
ea9dd4e9e2 share ssr 2025-06-05 15:37:23 -04:00
Dax Raad
4a6e36a404 fix cost 2025-06-05 15:06:42 -04:00
Dax Raad
db2bb32bcf integrate with models.dev 2025-06-05 14:59:16 -04:00
Jay V
1384a5e3e6 share page fetch 2025-06-05 14:17:43 -04:00
adamdottv
167aea6aaf wip: refactoring tui 2025-06-05 13:10:20 -05:00
Jay V
142056e9af share page todos 2025-06-05 13:52:44 -04:00
Dax Raad
241c366164 sync 2025-06-05 13:51:18 -04:00
Dax Raad
3cd7ae0807 gopls 2025-06-05 13:51:18 -04:00
Dax Raad
299a74061a sync 2025-06-05 13:51:18 -04:00
adamdottv
e3d5af2855 wip: refactoring tui 2025-06-05 12:38:25 -05:00
adamdottv
bbfa72552a feat: opus 4 2025-06-05 11:39:13 -05:00
Dax Raad
8dfdd3927e sync 2025-06-05 12:06:21 -04:00
Dax Raad
02e326f87f fixed 2025-06-05 11:58:04 -04:00
Dax Raad
35b03e4cb3 claude oauth support 2025-06-05 11:51:06 -04:00
adamdottv
b3555cda30 wip: refactoring tui 2025-06-05 10:46:07 -05:00
adamdottv
04bd98cf4d wip: refactoring tui 2025-06-05 05:43:33 -05:00
Dax Raad
f3e31130ba fix cost 2025-06-05 02:23:11 -04:00
Dax Raad
671e91f201 Add session share URL display to CLI output
🤖 Generated with [OpenCode](https://opencode.ai)

Co-Authored-By: OpenCode <noreply@opencode.ai>
2025-06-04 21:01:36 -04:00
Dax Raad
d334ead84a sync 2025-06-04 20:49:33 -04:00
Jay V
160428d2d4 share page glob 2025-06-04 20:14:14 -04:00
Jay V
0a1f9accd7 share page glob 2025-06-04 19:34:15 -04:00
Jay V
a74f27e59a read tool share page 2025-06-04 18:39:06 -04:00
Dax Raad
f76cdfff9b sync 2025-06-04 18:32:10 -04:00
Dax Raad
05974ea109 rework 2025-06-04 18:03:04 -04:00
Dax Raad
6cfce1e4da track errors 2025-06-04 17:55:14 -04:00
Jay V
e20093678f share page tool args 2025-06-04 17:48:42 -04:00
Dax Raad
7f8f46f9fe Refactor session module structure and improve error handling
- Rename session.ts to index.ts for cleaner module imports
- Update all imports to use new session module structure
- Add error metadata tracking to message schema
- Improve error handling in session stream processing

🤖 Generated with [OpenCode](https://opencode.ai)

Co-Authored-By: OpenCode <noreply@opencode.ai>
2025-06-04 17:38:54 -04:00
Jay V
2dbdecb0f7 styles share 2025-06-04 17:01:04 -04:00
Dax Raad
53b0a25085 implemented todo tool 2025-06-04 15:09:50 -04:00
adamdottv
093e64eb54 wip: refactoring tui 2025-06-04 13:52:23 -05:00
adamdottv
f34a3b6f67 wip: refactoring tui 2025-06-04 12:57:17 -05:00
Jay V
484c90ed00 share collapse system prompt 2025-06-04 13:37:07 -04:00
Dax Raad
f49694a543 sync 2025-06-04 13:33:53 -04:00
Dax Raad
fb88705bdc more tools 2025-06-04 13:33:25 -04:00
Dax Raad
90d85e6393 sync 2025-06-04 13:12:55 -04:00
Dax Raad
d13822d26e tool updates 2025-06-04 13:12:48 -04:00
adamdottv
a890288900 wip: refactoring tui 2025-06-04 11:48:01 -05:00
adamdottv
31d6e303a6 wip: refactoring tui 2025-06-04 11:48:01 -05:00
Jay V
199c42f726 styles share 2025-06-04 12:44:36 -04:00
Jay V
3211594821 styles 2025-06-04 12:32:25 -04:00
adamdottv
01050a430f wip: refactoring tui 2025-06-04 09:20:48 -05:00
Dax Raad
0b565b18c4 sync 2025-06-04 10:20:29 -04:00
Dax Raad
0791b077d7 fix tokens 2025-06-04 10:14:42 -04:00
Jay V
2fb59fee8e share paage durations 2025-06-03 18:08:46 -04:00
Jay V
8206da4d9e share page bugs 2025-06-03 17:20:30 -04:00
adamdottv
0c6bda8255 wip: refactoring tui 2025-06-03 15:49:41 -05:00
Dax Raad
1e063e7937 fix port issue 2025-06-03 16:35:37 -04:00
Dax Raad
37c34fd39c mcp support 2025-06-03 15:57:48 -04:00
Jay V
3a60ae98f3 styles 2025-06-03 15:57:00 -04:00
Jay V
feeb49a42b Styling share 2025-06-03 15:55:34 -04:00
Jay V
ad84355ebc lock 2025-06-03 15:55:34 -04:00
Dax Raad
e2397a343d Fix bash tool stream handling by ensuring consistent stdout/stderr pipe configuration
🤖 Generated with opencode
Co-Authored-By: opencode <noreply@opencode.ai>
2025-06-03 14:48:05 -04:00
Dax Raad
8e769dcac0 Replace env-paths with xdg-basedir for better XDG compliance and cross-platform directory handling
🤖 Generated with opencode
Co-Authored-By: opencode <noreply@opencode.ai>
2025-06-03 14:46:28 -04:00
Dax Raad
9ad0477af6 change file name again 2025-06-03 14:28:50 -04:00
Dax Raad
c5eafd5722 sync 2025-06-03 14:25:25 -04:00
Dax Raad
caf9fdc893 initialized 2025-06-03 14:24:45 -04:00
Dax Raad
be4155a838 max output tokens 2025-06-03 13:46:39 -04:00
adamdottv
b00326a75a wip: refactoring tui 2025-06-03 12:45:28 -05:00
Dax Raad
4cf0aebb2e do not list files if not git 2025-06-03 13:19:44 -04:00
Dax Raad
a5a39dada7 include env 2025-06-03 13:18:42 -04:00
Dax Raad
866f22b077 sync 2025-06-03 13:08:47 -04:00
Dax Raad
ec7ab16ce4 sync 2025-06-03 13:00:27 -04:00
Dax Raad
e5b06a2d95 initialzie 2025-06-03 12:38:48 -04:00
Dax Raad
fae49aaf88 sync 2025-06-03 12:08:07 -04:00
Dax Raad
274c8baa34 added app 2025-06-03 12:02:21 -04:00
Dax Raad
bfb36a8566 Refactor app context system to use Zod schemas and sync access pattern
🤖 Generated with opencode
Co-Authored-By: opencode <noreply@opencode.ai>
2025-06-03 11:59:03 -04:00
Dax Raad
8487346d3f check in generated files 2025-06-02 22:14:12 -04:00
Dax Raad
54b99cd88a scope filetimes to session 2025-06-02 20:24:32 -04:00
Dax Raad
786db364d2 add permission system 2025-06-02 19:51:37 -04:00
adamdottv
863e7a093e wip: refactoring tui 2025-06-02 14:14:13 -05:00
adamdottv
ca87b2806f wip: refactoring tui 2025-06-02 13:33:05 -05:00
adamdottv
2958c6b53c wip: refactoring tui 2025-06-02 12:30:03 -05:00
adamdottv
8b5394e031 wip: refactoring tui 2025-06-02 12:00:21 -05:00
adamdottv
da92ee5f09 wip: refactoring tui 2025-06-02 11:33:01 -05:00
Dax Raad
80de5d489f spawn from anywhere in dev mode 2025-06-02 11:40:28 -04:00
Dax Raad
ab17578516 remove json 2025-06-01 15:03:25 -04:00
Dax Raad
e764525578 sync 2025-06-01 15:01:57 -04:00
Dax Raad
a1c4f345a8 Fix TypeScript errors and clean up unused imports
- Remove unused fs import from app.ts
- Remove unused LLM import and missing module reference from server.ts
- Fix parameter naming inconsistency in fetch.ts execute function
- Add missing metadata property to fetch.ts return statement
- Update test file to use correct API signatures and parameter names
- Remove unused parameters from example.ts

🤖 Generated with opencode
Co-Authored-By: opencode <noreply@opencode.ai>
2025-06-01 14:46:04 -04:00
Dax Raad
526a8ea19a Refactor application path handling and data storage architecture
Replace simple directory-based path system with git-aware data management that uses global data directories and proper workspace detection.

🤖 Generated with opencode
Co-Authored-By: opencode <noreply@opencode.ai>
2025-06-01 14:40:44 -04:00
Dax Raad
4be9f7ab9c fix tool name change 2025-05-31 20:33:21 -04:00
Dax Raad
8e40160934 sync 2025-05-31 18:45:36 -04:00
Dax Raad
e97ed735d9 sync 2025-05-31 18:42:43 -04:00
Dax Raad
6d21525e71 sync 2025-05-31 18:10:49 -04:00
Dax Raad
b4f809559e tool rework 2025-05-31 17:12:16 -04:00
Dax Raad
33109bac4d fix generate 2025-05-31 16:06:51 -04:00
Dax Raad
f072ab3276 lazy load LLMs even harder 2025-05-31 16:05:12 -04:00
Dax Raad
3b746162d2 run formatter 2025-05-31 14:41:00 -04:00
Dax Raad
6df19f1828 Standardize code style to no semicolons and remove unused @types/node dependency
🤖 Generated with opencode
Co-Authored-By: opencode <noreply@opencode.ai>
2025-05-30 22:14:09 -04:00
Dax Raad
fba56d6871 Update package dependencies and lock file
Synchronized package.json and bun.lock with latest dependency versions and optimized package structure.

🤖 Generated with opencode
Co-Authored-By: opencode <noreply@opencode.ai>
2025-05-30 22:02:22 -04:00
Dax Raad
1472efcbfe Optimize package management with catalog and exact versions
- Add catalog feature to centralize common dependency versions
- Convert all package versions to exact (remove ^ prefixes)
- Move prettier to root package only to reduce duplication
- Configure bunfig.toml for exact dependency installs
- Improve version consistency across workspace packages

🤖 Generated with opencode
Co-Authored-By: opencode <noreply@opencode.ai>
2025-05-30 21:56:37 -04:00
Dax Raad
56a5d58945 sync 2025-05-30 21:42:03 -04:00
Dax Raad
f50a57041f embed go 2025-05-30 21:26:53 -04:00
Dax Raad
f3da73553c sync 2025-05-30 20:48:36 -04:00
Jay V
9a26b3058f listing themes explicitly 2025-05-30 17:06:03 -04:00
Jay V
a09be7cf74 tweaking styles 2025-05-30 17:00:18 -04:00
Dax Raad
91a9e455e2 sync 2025-05-30 16:43:42 -04:00
Jay V
c391c6d3f3 tweaking share edit 2025-05-30 16:42:59 -04:00
adamdottv
ca562266b7 wip: refactoring tui 2025-05-30 15:37:04 -05:00
adamdottv
c69c9327da wip: refactoring tui 2025-05-30 15:34:25 -05:00
Dax Raad
f5e2c596d4 sync 2025-05-30 14:45:10 -04:00
Dax Raad
e9bad39a7e sync 2025-05-30 14:40:59 -04:00
Dax Raad
42c7880858 sync 2025-05-30 14:40:59 -04:00
Jay V
017a440a70 refactor 2025-05-30 14:08:21 -04:00
Jay V
1ab9547bb2 import 2025-05-30 14:02:15 -04:00
Jay V
a4e46e6e18 share page diff 2025-05-30 13:58:34 -04:00
Jay V
680d52016c adding diff 2025-05-30 13:58:34 -04:00
adamdottv
6ebbcb3179 wip: refactoring tui 2025-05-30 12:29:27 -05:00
adamdottv
437de4ee36 wip: refactoring tui 2025-05-30 10:37:09 -05:00
adamdottv
a5b28b5cef wip: refactoring tui 2025-05-30 08:35:59 -05:00
adamdottv
9bf024f8be wip: refactoring tui 2025-05-30 08:29:37 -05:00
adamdottv
189d0e5fb2 wip: refactoring tui 2025-05-30 06:47:44 -05:00
adamdottv
b1b402faa7 wip: refactoring tui 2025-05-30 06:03:34 -05:00
adamdottv
c5413c8c8d wip: refactoring tui 2025-05-30 05:32:42 -05:00
adamdottv
da1e8484a9 wip: refactoring tui 2025-05-29 15:42:18 -05:00
adamdottv
4818bc5426 wip: refactoring tui 2025-05-29 15:41:13 -05:00
adamdottv
8a8c6b14af wip: refactoring tui 2025-05-29 15:40:37 -05:00
adamdottv
0e31bbcd93 wip: refactoring tui 2025-05-29 15:37:06 -05:00
adamdottv
913b3434d8 wip: refactoring tui 2025-05-29 15:30:39 -05:00
adamdottv
1c01ee4834 wip: refactoring tui 2025-05-29 15:22:25 -05:00
adamdottv
005d6e0bde wip: refactoring tui 2025-05-29 15:18:47 -05:00
adamdottv
37c0c1f358 wip: refactoring tui 2025-05-29 15:10:44 -05:00
adamdottv
2a132f86d6 wip: refactoring tui 2025-05-29 15:02:22 -05:00
adamdottv
50ba0b380b wip: refactoring tui 2025-05-29 14:39:37 -05:00
adamdottv
6cccbdccd3 wip: refactoring tui 2025-05-29 14:20:55 -05:00
Dax Raad
d0ad09d798 Improve async share handling and enhance diagnostic output with filenames
🤖 Generated with opencode
Co-Authored-By: opencode <noreply@opencode.ai>
2025-05-29 15:16:42 -04:00
Dax Raad
4fa4246c10 Fix working indicator to only show when message is incomplete
🤖 Generated with opencode
Co-Authored-By: opencode <noreply@opencode.ai>
2025-05-29 15:10:23 -04:00
Dax Raad
0fe72864f2 remov elog 2025-05-29 15:05:44 -04:00
adamdottv
ce5b3126d3 wip: refactoring tui 2025-05-29 14:04:49 -05:00
adamdottv
26606ccbf7 wip: refactoring tui 2025-05-29 14:04:49 -05:00
adamdottv
fce9e79d38 wip: refactoring tui 2025-05-29 14:04:48 -05:00
adamdottv
6759674c0f wip: refactoring tui 2025-05-29 14:04:48 -05:00
Dax Raad
a9799136fe support continuation 2025-05-29 14:29:08 -04:00
Jay V
7a29af4e30 styling tool calls 2025-05-29 14:24:25 -04:00
Dax Raad
d398001f96 Remove React/Ink dependencies and add context window management
- Remove unused React and Ink CLI dependencies to simplify package
- Update provider schema to use maxOutputTokens for clarity
- Add automatic summarization when approaching context window limits
- Simplify message event handling and add cost/token metadata display

🤖 Generated with opencode
Co-Authored-By: opencode <noreply@opencode.ai>
2025-05-29 14:16:15 -04:00
Dax Raad
e68747a64a add summarize 2025-05-29 13:17:56 -04:00
Dax Raad
d62ce482da fix race 2025-05-29 13:15:58 -04:00
Dax Raad
f9f41e205d add summarize 2025-05-29 13:02:48 -04:00
Dax Raad
80597cd3fd type error fix 2025-05-29 11:58:40 -04:00
Dax Raad
48f81fe4d3 sync 2025-05-29 11:38:55 -04:00
Dax Raad
a96c2ce65c sync 2025-05-29 11:35:56 -04:00
Dax Raad
6f604bd0f9 remove secondary codegen 2025-05-29 11:32:55 -04:00
Dax Raad
42c1cd6a85 event 2025-05-29 11:17:34 -04:00
Dax Raad
33a831d2be rework types 2025-05-29 10:22:07 -04:00
adamdottv
d70201cd93 wip: refactoring tui 2025-05-29 06:45:34 -05:00
adamdottv
9f1a75e938 wip: refactoring tui 2025-05-29 06:42:58 -05:00
adamdottv
2b77a7f714 wip: refactoring tui 2025-05-29 06:32:44 -05:00
Jay V
5974a53071 styling share 2025-05-28 20:04:58 -04:00
Jay V
3d61cc5d2b styling share 2025-05-28 18:32:40 -04:00
Dax Raad
a22a2f0f37 autoshare 2025-05-28 17:34:37 -04:00
Dax Raad
be65ed6f88 sync 2025-05-28 17:25:00 -04:00
Dax Raad
e88264075a sync 2025-05-28 17:24:21 -04:00
Jay V
041a080a13 refactor share 2025-05-28 16:42:44 -04:00
adamdottv
9d7c5efb9b wip: refactoring tui 2025-05-28 15:36:36 -05:00
adamdottv
8863a499a9 wip: refactoring tui 2025-05-28 15:36:35 -05:00
adamdottv
15d21bf04a wip: refactoring tui 2025-05-28 15:36:35 -05:00
adamdottv
5e738ce7d3 wip: refactoring tui 2025-05-28 15:36:35 -05:00
adamdottv
641e9ff664 chore: cleanup unused imports 2025-05-28 15:36:35 -05:00
adamdottv
d249766777 chore: remove dead code 2025-05-28 15:36:35 -05:00
Dax Raad
6cf4b7f00b sync 2025-05-28 15:40:35 -04:00
Dax Raad
6183398543 sync 2025-05-28 15:39:51 -04:00
Dax Raad
ff786d9139 abort 2025-05-28 15:07:51 -04:00
Dax Raad
4767276a0e more 2025-05-28 14:17:02 -04:00
Jay V
71bab45065 styling share 2025-05-28 14:10:20 -04:00
Dax Raad
cb48813c95 sync 2025-05-28 13:58:55 -04:00
Dax Raad
520cd02dd5 added cost field 2025-05-28 13:57:24 -04:00
Dax Raad
afe741b63e add cost 2025-05-28 13:57:02 -04:00
Dax Raad
f3b224090c better discriminator 2025-05-28 13:35:05 -04:00
Dax Raad
3b7b7f4bea add runtime 2025-05-28 13:24:35 -04:00
Dax Raad
3a4d3b249f generate message type 2025-05-28 13:22:48 -04:00
Dax Raad
55a6fcdd3f add provider_list 2025-05-28 12:53:22 -04:00
Dax Raad
4132fcc1b2 return tool errors properly 2025-05-27 21:53:21 -04:00
Jay V
37082b2176 styling share 2025-05-27 20:38:29 -04:00
Dax Raad
b9f009c529 title 2025-05-27 17:22:29 -04:00
Dax Raad
601f610eb7 sync 2025-05-27 16:56:12 -04:00
Dax Raad
2e2bdd46b4 sync 2025-05-27 15:41:44 -04:00
Dax Raad
3a28ce9b0a sync 2025-05-27 15:37:46 -04:00
Dax Raad
bb6fc2a1fd regen client 2025-05-27 15:35:48 -04:00
Dax Raad
ad76fa8616 fix 2025-05-27 15:35:31 -04:00
Dax Raad
bdac7d10dd add session list 2025-05-27 15:34:46 -04:00
Dax Raad
0ecfdd7501 fix type errors 2025-05-27 15:28:58 -04:00
Dax Raad
a9758e0db5 vomit 2025-05-27 15:26:49 -04:00
Dax Raad
e98f915fd5 stripped 2025-05-27 15:20:43 -04:00
Dax Raad
07f0fea4bf trust sharp 2025-05-27 14:36:23 -04:00
Dax Raad
6a43afc4e7 sync 2025-05-27 14:31:53 -04:00
Dax Raad
c01eefc729 Add manual trigger option to deployment workflow
🤖 Generated with opencode
Co-Authored-By: opencode <noreply@opencode.ai>
2025-05-27 14:30:52 -04:00
Dax Raad
5d4ccc8883 Add GitHub Actions deployment workflow to automate deployments
🤖 Generated with opencode
Co-Authored-By: opencode <noreply@opencode.ai>
2025-05-27 14:29:36 -04:00
Dax Raad
98b5390a22 Refactor grep tool output generation and fix ls directory traversal bug
🤖 Generated with opencode
Co-Authored-By: opencode <noreply@opencode.ai>
2025-05-27 02:26:53 -04:00
Dax Raad
c040baae11 Refactor LSP tools and add hover functionality
- Split diagnostics tool into separate lsp-diagnostics.ts file
- Add new lsp-hover.ts tool for LSP hover information
- Update tool exports and session integration
- Remove old diagnostics.ts file

🤖 Generated with opencode
Co-Authored-By: opencode <noreply@opencode.ai>
2025-05-27 02:17:35 -04:00
Dax Raad
754cc66741 Add automatic CONTEXT.md loading and improve share sync reliability
🤖 Generated with opencode
Co-Authored-By: opencode <noreply@opencode.ai>
2025-05-26 23:04:40 -04:00
Dax Raad
6ef0b991ec Fix race condition in share sync by implementing request queue
🤖 Generated with opencode
Co-Authored-By: opencode <noreply@opencode.ai>
2025-05-26 22:56:58 -04:00
Dax Raad
f6ca06b8ea track tokens progressively 2025-05-26 22:35:30 -04:00
Dax Raad
4c198940d5 Remove debug console.log statements and improve LSP diagnostic handling
🤖 Generated with opencode
Co-Authored-By: opencode <noreply@opencode.ai>
2025-05-26 22:25:17 -04:00
Dax Raad
2e938d9da1 Standardize API parameters to camelCase and improve LSP client reliability
- Convert tool parameters from snake_case to camelCase for consistency
- Add file existence check in LSP client before opening files
- Fix version increment timing in LSP textDocument operations
- Optimize session token tracking using onStepFinish callback
- Add debugging logs for diagnostics troubleshooting

🤖 Generated with opencode
Co-Authored-By: opencode <noreply@opencode.ai>
2025-05-26 22:08:50 -04:00
Dax Raad
b840a40759 sync 2025-05-26 21:44:55 -04:00
Dax Raad
a1d40f8f28 add context.md 2025-05-26 21:38:17 -04:00
Dax Raad
575d76fa06 Fix session update to use proper getter and correct operation order
🤖 Generated with opencode
Co-Authored-By: opencode <noreply@opencode.ai>
2025-05-26 21:25:20 -04:00
Dax Raad
b75456f5dd Clean up code formatting and remove unused imports for better consistency
🤖 Generated with opencode
Co-Authored-By: opencode <noreply@opencode.ai>
2025-05-26 21:22:38 -04:00
Dax Raad
eb69cc3943 sync 2025-05-26 21:13:46 -04:00
Dax Raad
e524209352 title 2025-05-26 21:08:22 -04:00
Jay V
e1c897c1ae styles 2025-05-26 20:33:21 -04:00
Jay V
39f54e83e1 styles 2025-05-26 19:05:17 -04:00
Jay V
d34c974996 adding styles 2025-05-26 19:01:50 -04:00
Frank
c203891b84 Share: sync 2025-05-26 18:54:11 -04:00
Dax Raad
591bd2a4e3 sync 2025-05-26 18:14:36 -04:00
Dax Raad
94f35130f7 sync 2025-05-26 18:10:10 -04:00
Dax Raad
f26873f5de sync 2025-05-26 18:06:47 -04:00
Jay V
66b18959eb Merging docs and share app 2025-05-26 17:25:12 -04:00
Jay V
deacf5991a Adding share page 2025-05-26 17:25:12 -04:00
Dax Raad
25623d1f84 sync 2025-05-26 17:22:31 -04:00
Dax Raad
de9f144858 sync 2025-05-26 17:15:47 -04:00
Dax Raad
0ad8738933 sync 2025-05-26 17:15:47 -04:00
Frank
db5744bbc4 Share: sync 2025-05-26 16:35:03 -04:00
Dax Raad
b87ba57819 shutdown 2025-05-26 14:52:38 -04:00
Dax Raad
802389a90e fixed id generation 2025-05-26 14:36:45 -04:00
Dax Raad
4880b08b8a sync 2025-05-26 14:18:07 -04:00
Dax Raad
80555f13e0 more tools 2025-05-26 14:09:17 -04:00
Dax Raad
113c49457f ls tool 2025-05-26 13:44:59 -04:00
Dax Raad
e1ec815d1b sync 2025-05-26 13:39:16 -04:00
Dax Raad
2ed17f4877 exit properly 2025-05-26 13:21:15 -04:00
Frank
80118212da Share: sync 2025-05-26 12:40:17 -04:00
Frank
9b3760247a Share: sync 2025-05-26 12:40:17 -04:00
Dax Raad
a2d652b13d sync 2025-05-26 12:40:17 -04:00
Dax Raad
5c491758f5 sync 2025-05-26 12:40:17 -04:00
Dax Raad
5f750b7368 share 2025-05-26 12:40:17 -04:00
Frank
e2dc5a8faf Share: sync 2025-05-26 12:40:17 -04:00
Frank
72d10a0823 Share: fix types 2025-05-26 12:40:17 -04:00
Frank
7623b33f31 Share: sync 2025-05-26 12:40:17 -04:00
Frank
9b331a917e Share: sync 2025-05-26 12:40:17 -04:00
Frank
d51b4263ab Share: sync 2025-05-26 12:40:17 -04:00
Frank
34a2dcb80a Share: sync 2025-05-26 12:40:17 -04:00
Frank
8cbd59296e Share: syc 2025-05-26 12:40:17 -04:00
Frank
83974e0c95 Share: sync 2025-05-26 12:40:17 -04:00
Dax Raad
59d43fa5da add diagnostics meta 2025-05-26 12:40:17 -04:00
Dax Raad
e01afb407c add tool tests 2025-05-26 12:40:17 -04:00
Dax Raad
f0f55bc75f sync 2025-05-26 12:40:17 -04:00
Dax Raad
2860a2bb1a sync 2025-05-26 12:40:17 -04:00
Dax Raad
9b564f0b73 tool meta 2025-05-26 12:40:17 -04:00
Dax Raad
2437ce3f8b toolz 2025-05-26 12:40:17 -04:00
Dax Raad
fa8a46326a codegen 2025-05-26 12:40:17 -04:00
Dax Raad
652429377b sync 2025-05-26 12:40:17 -04:00
Dax Raad
99af6146d5 openapi 2025-05-26 12:40:17 -04:00
Dax Raad
020e0ca039 sync 2025-05-26 12:40:17 -04:00
Dax Raad
0439072420 fix 2025-05-26 12:40:17 -04:00
Dax Raad
49ad2efef6 sync 2025-05-26 12:40:17 -04:00
Dax Raad
0e303e6508 sync 2025-05-26 12:40:17 -04:00
Dax Raad
bcd2fd68b7 sync 2025-05-26 12:40:17 -04:00
Dax Raad
d0d67029f4 process 2025-05-26 12:40:17 -04:00
Dax Raad
a34d020bc6 sync 2025-05-26 12:40:17 -04:00
Martin Palma
96fbc37f01 feat: ayu theme (#60) 2025-05-26 06:45:06 -05:00
Ed Zynda
89e3a72ae1 feat: add support for piped input to CLI (#51) 2025-05-23 06:54:09 -05:00
Ed Zynda
b9ebcea82c really disable history nav when completions dialog is open (#50) 2025-05-23 06:43:30 -05:00
Jay
f31f92119d Merge pull request #52 from sst/docs
Adding website
2025-05-22 15:23:09 -04:00
Jay V
da9b2a18b9 editing docs 2025-05-22 15:10:08 -04:00
Dax Raad
2b258b1473 switch default to claude sonnet 4 2025-05-22 13:17:32 -04:00
Jay V
6f894950a6 adding other chapters 2025-05-21 19:34:50 -04:00
Jay V
9049295cc9 Merge branch 'dev' into docs 2025-05-21 15:01:25 -04:00
Jay V
4526b14b17 Moving readme to docs 2025-05-21 15:01:12 -04:00
Jay V
f768313c4f Moving readme to docs 2025-05-21 14:59:09 -04:00
Jay V
d9c1b2cc90 adding docs site 2025-05-21 14:58:30 -04:00
Jay V
6cfcf51752 adding docs site 2025-05-21 14:24:53 -04:00
Alvin Johansson
dff8e77eb6 docs: add clearer bedrock instructions (#40) 2025-05-20 08:23:51 -05:00
Ed Zynda
6e854a4df4 fix: disable history navigation when filepicker is open (#39) 2025-05-18 18:41:34 -05:00
phantomreactor
2f8984fadb Codex mini (#34) 2025-05-18 08:29:53 -05:00
Ed Zynda
c84918cb47 feat: Add message history navigation with arrow keys (#30) 2025-05-17 13:35:49 -05:00
Pierre B.
05bb065d00 fix: log pane text wrapping (#32) 2025-05-17 13:33:47 -05:00
Pierre Berube
3742997889 fix: typo 2025-05-17 08:50:30 -05:00
Pierre Berube
daf0305203 fix: split out bedrock models 2025-05-17 08:48:51 -05:00
Ed Zynda
307982a099 feat: Add tool restriction flags for non-interactive mode (#29) 2025-05-17 08:23:13 -05:00
phantomreactor
ba416e787b paste images with ctrl+v (#26) 2025-05-16 14:31:50 -05:00
Ed Zynda
b71cae63f1 feat: Add tools dialog accessible via F9 (#24)
* Add tools dialog

* Remove sorting and double items

* Update key handling
2025-05-16 10:57:35 -05:00
Ed Zynda
c92f7c6630 fix: Show correct file paths in permission window (#25)
* Fix paths in permission window to show relative paths

* Fix paths in permission window to show actual file paths
2025-05-16 10:56:39 -05:00
Ed Zynda
4a444e9c9b feat: Make shell configurable via config file (#23) 2025-05-16 06:27:28 -05:00
Ed Zynda
623d132772 feat: Add non-interactive mode (#18) 2025-05-16 06:06:28 -05:00
adamdottv
d127a1c4eb feat: 0-255 color support in custom themes 2025-05-15 19:02:27 -05:00
adamdottv
c9cca48d08 fix: layout 2025-05-15 15:57:15 -05:00
adamdottv
3944930fc0 chore: cleanup 2025-05-15 15:45:22 -05:00
adamdottv
825c0b64af fix: build 2025-05-15 14:49:30 -05:00
adamdottv
d7af7dd3fe fix: redundant status msg 2025-05-15 14:42:10 -05:00
adamdottv
b112216241 fix: build 2025-05-15 13:36:58 -05:00
mineo
87237b6462 feat: support VertexAI provider (#153)
* support: vertexai

fix

fix

set default for vertexai

added comment

fix

fix

* create schema

* fix README.md

* fix order

* added pupularity

* set tools if tools is exists

restore commentout

* fix comment

* set summarizer model
2025-05-15 13:35:06 -05:00
phantomreactor
5f5f9dad87 add support to preview webp images and paste file paths 2025-05-15 12:55:36 -05:00
adamdottv
aa8b3ce1ee fix: init ordering 2025-05-15 12:52:42 -05:00
adamdottv
a65e593ab4 feat: batch tool 2025-05-15 12:44:16 -05:00
adamdottv
5d9058eb74 fix: tui height 2025-05-15 12:18:28 -05:00
adamdottv
a850320fad fix: don't limit session logs 2025-05-15 12:05:26 -05:00
adamdottv
ddbb217d0d feat: better status bar 2025-05-15 12:04:15 -05:00
Ed Zynda
ab150be7c3 feat: Support named arguments in custom commands (#158)
* Allow multiple named args

* fix: Fix styling in multi-arguments dialog

* Remove old unused modal

* Focus on only one input at a time
2025-05-15 09:43:33 -05:00
Adictya
a203fb8ccc fix(complete-module): change completion start key to slash 2025-05-15 08:29:54 -05:00
Adictya
acc084c9ea chore(complete-module): lint 2025-05-15 08:29:54 -05:00
Adictya
3ee213081e fix(complete-module): logging 2025-05-15 08:29:54 -05:00
Adictya
15bf40bc10 feat(complete-module): add completions logic, dialog and providers 2025-05-15 08:29:54 -05:00
adamdottv
a33e3e25b6 chore: cleanup dead code 2025-05-15 07:19:17 -05:00
adamdottv
658faab2bf chore: update icon in readme 2025-05-15 07:03:58 -05:00
rekram1-node
797045ee29 feat: add configuration persistence for model selections 2025-05-14 16:32:05 -05:00
adamdottv
c8f8d67a88 feat: codeAction tool 2025-05-14 14:57:47 -05:00
Dax Raad
182e32e4f7 add screenshot 2025-05-14 15:50:25 -04:00
adamdottv
5ea989fb74 feat: docSymbols and workspaceSymbols tools 2025-05-14 14:40:45 -05:00
adamdottv
45c778b90d feat: definition and references tools 2025-05-14 14:25:15 -05:00
Dax Raad
47cbb650a0 ci 2025-05-14 14:30:47 -04:00
Dax Raad
e91371c6a5 ci 2025-05-14 14:27:18 -04:00
adamdottv
9d17314309 fix: tweak 2025-05-14 13:15:39 -05:00
adamdottv
3982be4310 feat: session specific logs 2025-05-14 13:06:09 -05:00
adamdottv
4c998d4f4f chore: remove sourcegraph tool 2025-05-14 10:42:16 -05:00
adamdottv
f7849c2d59 fix: log display 2025-05-14 07:47:15 -05:00
adamdottv
463002185b fix: log perf 2025-05-14 07:38:21 -05:00
adamdottv
53a80eac1e chore: cleanup 2025-05-14 06:00:34 -05:00
adamdottv
01b6bf5bb7 chore: refactor db 2025-05-13 13:08:43 -05:00
adamdottv
d8f3b60625 chore: refactoring 2025-05-13 11:34:48 -05:00
adamdottv
cf8e16018d chore: refactoring 2025-05-13 11:15:14 -05:00
adamdottv
674797bd48 chore: refactoring 2025-05-13 11:07:34 -05:00
adamdottv
1f9610e266 chore: refactoring 2025-05-13 10:45:58 -05:00
adamdottv
ae86ef519c chore: refactoring 2025-05-13 10:27:09 -05:00
adamdottv
2391e338b4 chore: rename 2025-05-13 10:02:39 -05:00
adamdottv
1e9399fbee chore: cleanup 2025-05-13 09:26:54 -05:00
adamdottv
e9f74b867f chore: cleanup 2025-05-13 07:15:58 -05:00
adamdottv
5079556896 chore: cleanup 2025-05-13 07:04:27 -05:00
adamdottv
7f0e68b933 chore: cleanup 2025-05-13 06:55:18 -05:00
adamdottv
0c21ca5318 chore: cleanup 2025-05-13 06:51:28 -05:00
adamdottv
0117c72a2c chore: refactor diff 2025-05-12 15:17:50 -05:00
adamdottv
e3eb9e5435 chore: refactor diff 2025-05-12 14:48:13 -05:00
adamdottv
d941be3f1f chore: refactor agent.go 2025-05-12 14:43:12 -05:00
adamdottv
36e5ae804e chore: rename coder -> primary 2025-05-12 14:32:27 -05:00
adamdottv
c9b90dd184 fix: show context % 2025-05-12 14:08:57 -05:00
adamdottv
8270a1e4b1 chore: cleanup 2025-05-12 13:01:59 -05:00
adamdottv
7f9c992993 fix: log status messages 2025-05-12 12:53:20 -05:00
adamdottv
b6524c0982 chore: cleanup 2025-05-12 11:38:07 -05:00
adamdottv
425c0f1bab fix: timestamp formatting 2025-05-12 11:33:52 -05:00
adamdottv
d20d0c5a95 chore: cleanup 2025-05-12 11:22:19 -05:00
adamdottv
5af3c05d41 chore: cleanup 2025-05-12 10:53:13 -05:00
adamdottv
df4a9295c0 chore: cleanup 2025-05-12 10:46:14 -05:00
adamdottv
8cbfc581b5 chore: cleanup 2025-05-12 10:45:03 -05:00
Nicholas Hamilton
4bb350a09b Fix filepicker manual input (#146)
* fix: allows to type i while manual inputting filepath

* fix: file selection in filepicker focus mode

* remove duplicate code
2025-05-12 10:01:56 -05:00
adamdottv
17c5b9c12c fix: build 2025-05-12 09:59:57 -05:00
Ed Zynda
1f8580553c feat: custom commands (#133)
* Implement custom commands

* Add User: prefix

* Reuse var

* Check if the agent is busy and if so report a warning

* Update README

* fix typo

* Implement user and project scoped custom commands

* Allow for $ARGUMENTS

* UI tweaks

* Update internal/tui/components/dialog/arguments.go

Co-authored-by: Kujtim Hoxha <kujtimii.h@gmail.com>

* Also search in $HOME/.opencode/commands

---------

Co-authored-by: Kujtim Hoxha <kujtimii.h@gmail.com>
2025-05-12 09:58:59 -05:00
mineo
f92b2b76dc replace github.com/google/generative-ai-go with github.com/googleapis/go-genai (#138)
* replace to github.com/googleapis/go-genai

* fix history logic

* small fixes

---------

Co-authored-by: Kujtim Hoxha <kujtimii.h@gmail.com>
2025-05-12 09:56:30 -05:00
adamdottv
1d1a1ddcbf fix: visual tweaks 2025-05-12 09:54:20 -05:00
adamdottv
dfe5fd8d97 wip: refactoring 2025-05-12 09:44:56 -05:00
adamdottv
ed9fba99c9 wip: refactoring 2025-05-12 08:43:34 -05:00
adamdottv
f100777199 wip: logging improvements 2025-05-09 13:37:13 -05:00
adamdottv
f41b7bbd0a chore: refactoring status updates 2025-05-08 12:03:59 -05:00
adamdottv
e35ea2d448 fix: log page nav 2025-05-08 07:59:15 -05:00
adamdottv
bab17d7520 feat: session manager 2025-05-08 07:58:37 -05:00
adamdottv
051d7d7936 chore: logging token usage 2025-05-06 14:40:00 -05:00
adamdottv
b638dafe5f feat: better logs page 2025-05-06 14:22:37 -05:00
adamdottv
e387b1f16c fix: openrouter require_parameters 2025-05-06 11:17:32 -05:00
adamdottv
71a68dd56d feat: add qwen3 models 2025-05-06 10:19:07 -05:00
adamdottv
3ee8ebd3d3 fix: auto-compact logic 2025-05-06 10:03:21 -05:00
adamdottv
ef298b2f18 fix: hide empty messages 2025-05-05 14:38:33 -05:00
adamdottv
3cc08494a5 fix: pubsub leak and shutdown seq 2025-05-05 14:23:29 -05:00
adamdottv
afcdabd095 fix: anthropic non-empty blocks 2025-05-05 12:00:09 -05:00
adamdottv
efaba6c5b8 feat: hide tool calls 2025-05-05 11:25:34 -05:00
adamdottv
874715838a feat: show sender name and timestamp 2025-05-05 11:02:02 -05:00
adamdottv
167eb9ddfa fix: diagnostics visual in status bar 2025-05-05 07:04:33 -05:00
Joshua LaMorey-Salzmann
fba344718f Config fix correcting loose viper string check, default model now set correctly (#147) 2025-05-05 06:56:10 -05:00
adamdottv
cdd906e32e fix: bedrock supports attachments 2025-05-02 15:35:24 -05:00
phantomreactor
ff0ef3bb43 feat: add support for images 2025-05-02 15:29:46 -05:00
adamdottv
0095832be3 chore: cleanup and logging 2025-05-02 15:24:47 -05:00
adamdottv
406ccf9b87 fix: diagnostics tool init 2025-05-02 15:24:47 -05:00
adamdottv
f90d6238ed fix: bedrock context window 2025-05-02 15:24:47 -05:00
adamdottv
f004a0b8c3 fix: anthropic non-empty blocks 2025-05-02 15:24:47 -05:00
adamdottv
49423da081 feat: compact command with auto-compact 2025-05-02 15:24:47 -05:00
adamdottv
364cf5b429 feat: write to context.md by default 2025-05-02 15:24:47 -05:00
adamdottv
b2f24e38ed feat: better diagnostic visuals in status bar 2025-05-02 15:24:47 -05:00
adamdottv
49037e7b28 feat: better logs page 2025-05-02 15:24:47 -05:00
adamdottv
c66832d299 fix: wording 2025-05-01 11:54:35 -05:00
adamdottv
7398b4ce70 fix: don't truncate task tool output 2025-05-01 11:35:54 -05:00
Kujtim Hoxha
a61b2026eb add xai support (#135) 2025-05-01 11:08:26 -05:00
Aiden Cline
69ade34c2c fix: tweak the logic in config to ensure that env vs file configurations merge properly (#115) 2025-05-01 11:08:17 -05:00
Garrett Ladley
fbca5441f6 feat: test for getContextFromPaths (#105)
* feat: test for getContextFromPaths

* fix: use testify
2025-05-01 11:08:06 -05:00
Kujtim Hoxha
e4680caebb some small fixes 2025-05-01 11:07:47 -05:00
adamdottv
e760d28c5a feat: show hunk headers 2025-05-01 09:02:14 -05:00
adamdottv
7d5f0f9d18 fix: pass input to EDITOR 2025-05-01 07:58:27 -05:00
adamdottv
515f4e8642 fix: visual tweaks 2025-05-01 07:32:04 -05:00
adamdottv
f2b36b9234 fix: remove lsp tool 2025-05-01 07:28:37 -05:00
adamdottv
f224978bbc fix: remove manual lsp definition 2025-05-01 07:24:53 -05:00
adamdottv
8819a37a05 fix: logo 2025-05-01 06:43:59 -05:00
adamdottv
769dff00ba fix: don't mark as init 2025-05-01 06:43:30 -05:00
adamdottv
d1be7a984e fix: logo 2025-05-01 06:36:30 -05:00
adamdottv
3e30607a6d fix: minor prompt fix 2025-05-01 06:35:47 -05:00
adamdottv
d08e58279d feat: lsp discovery 2025-05-01 06:26:20 -05:00
adamdottv
7bc542abff fix: better diagnostics visual 2025-04-30 15:23:19 -05:00
adamdottv
ed50c36789 fix: lsp issues with tmp and deleted files 2025-04-30 12:20:51 -05:00
adamdottv
98cf65b425 fix: more intuitive keybinds 2025-04-30 11:34:21 -05:00
adamdottv
5406083850 fix: minor icon tweak 2025-04-30 11:33:30 -05:00
adamdottv
91ae9b33d3 feat: custom themes 2025-04-30 11:05:59 -05:00
adamdottv
a42175c067 fix: info and hint icons 2025-04-30 07:47:14 -05:00
adamdottv
8497145db2 fix: status sizing 2025-04-30 07:47:10 -05:00
adamdottv
89544fad61 feat: tron theme 2025-04-30 07:46:35 -05:00
adamdottv
1151accf4b feat: tokyonight theme 2025-04-30 07:46:35 -05:00
adamdottv
1ae3f1830b feat: dracula theme 2025-04-30 07:46:35 -05:00
adamdottv
1e958b62ad feat: opencode theme (default) 2025-04-30 07:46:35 -05:00
adamdottv
fdf5367f4f feat: monokai pro theme 2025-04-30 07:46:34 -05:00
adamdottv
0e8842a007 feat: onedark theme 2025-04-30 07:46:34 -05:00
adamdottv
060994f393 feat: flexoki theme 2025-04-30 07:46:34 -05:00
adamdottv
61b605e724 feat: themes 2025-04-30 07:46:34 -05:00
Adam
61d9dc9511 fix: allow text selection (#127) 2025-04-30 12:52:30 +02:00
Hunter Casten
76275e533e fix(openrouter): set api key from env (#129) 2025-04-30 12:50:57 +02:00
Isaac Scarrott
98e2910e82 feat: Add support for OpenRouter (#92)
* Add support for OpenRouter as a new model provider

- Introduced `ProviderOpenRouter` in the `models` package.
- Added OpenRouter-specific models, including `GPT41`, `GPT41Mini`, `GPT4o`, and others, with their configurations and costs.
- Updated `generateSchema` to include OpenRouter as a provider.
- Added OpenRouter-specific environment variable handling (`OPENROUTER_API_KEY`) in `config.go`.
- Implemented default model settings for OpenRouter agents in `setDefaultModelForAgent`.
- Updated `getProviderAPIKey` to retrieve the OpenRouter API key.
- Extended `SupportedModels` to include OpenRouter models.
- Added OpenRouter client initialization in the `provider` package.
- Modified `processGeneration` to handle `FinishReasonUnknown` in addition to `FinishReasonToolUse`.

* [feature/openrouter-provider] Add new models and provider to schema

- Added "deepseek-chat-free" and "deepseek-r1-free" to the list of supported models in `opencode-schema.json`.

* [feature/openrouter-provider] Add OpenRouter provider support and integrate new models

- Updated README.md to include OpenRouter as a supported provider and its configuration details.
- Added `OPENROUTER_API_KEY` to environment variable configuration.
- Introduced OpenRouter-specific models in `internal/llm/models/openrouter.go` with mappings to existing cost and token configurations.
- Updated `internal/config/config.go` to set default models for OpenRouter agents.
- Extended `opencode-schema.json` to include OpenRouter models in the schema definitions.
- Refactored model IDs and names to align with OpenRouter naming conventions.

* [feature/openrouter-provider] Refactor finish reason handling and tool call logic in agent and OpenAI provider

- Simplified finish reason check in `agent.go` by removing redundant variable assignment.
- Updated `openai.go` to override the finish reason to `FinishReasonToolUse` when tool calls are present.
- Ensured consistent finish reason handling in both `send` and `stream` methods of the OpenAI provider.

[feature/openrouter-provider] Refactor finish reason handling and tool call logic in agent and OpenAI provider

- Simplified finish reason check in `agent.go` by removing redundant variable assignment.
- Updated `openai.go` to override the finish reason to `FinishReasonToolUse` when tool calls are present.
- Ensured consistent finish reason handling in both `send` and `stream` methods of the OpenAI provider.

* **[feature/openrouter-provider] Add support for custom headers in OpenAI client configuration**

- Introduced a new `extraHeaders` field in the `openaiOptions` struct to allow specifying additional HTTP headers.
- Added logic in `newOpenAIClient` to apply `extraHeaders` to the OpenAI client configuration.
- Implemented a new option function `WithOpenAIExtraHeaders` to set custom headers in `openaiOptions`.
- Updated the OpenRouter provider configuration in `NewProvider` to include default headers (`HTTP-Referer` and `X-Title`) for OpenRouter API requests.

* Update OpenRouter model config and remove unsupported models

* [feature/openrouter-provider] Update OpenRouter models and default configurations

- Added new OpenRouter models: `claude-3.5-sonnet`, `claude-3-haiku`, `claude-3.7-sonnet`, `claude-3.5-haiku`, and `claude-3-opus` in `openrouter.go`.
- Updated default agent models in `config.go`:
  - `agents.coder.model` now uses `claude-3.7-sonnet`.
  - `agents.task.model` now uses `claude-3.7-sonnet`.
  - `agents.title.model` now uses `claude-3.5-haiku`.
- Updated `opencode-schema.json` to include the new models in the allowed list for schema validation.
- Adjusted logic in `setDefaultModelForAgent` to reflect the new default models.

* [feature/openrouter-provider] Remove unused ProviderEvent emission in stream function

The changes remove the emission of a `ProviderEvent` with type `EventContentStop` in the `stream` function of the `openaiClient` implementation. This event was sent upon successful stream completion but is no longer used.
2025-04-29 13:56:49 +02:00
Kujtim Hoxha
2941137416 fix diagnostics for deleted files 2025-04-28 19:37:42 +02:00
Aiden Cline
b3c0285db3 feat: model selection for given provider (#57)
* feat: model selection for given provider

* tweak: adjust cfg validation func, remove duplicated logic, consolidate agent updating into agent.go

* tweak: make the model dialog scrollable, adjust padding slightly for modal"

* feat: add provider selection, add hints, simplify some logic, add horizontal scrolling support, additional scroll indicators"

* remove nav help

* update docs

* increase number of visible models, make horizontal scroll "wrap"

* add provider popularity rankings
2025-04-28 19:25:06 +02:00
YJG
805aeff83c feat: add azure openai models (#74) 2025-04-28 15:42:57 +02:00
Kujtim Hoxha
bce2ec5c10 fix duplicate context 2025-04-27 20:43:27 +02:00
Kujtim Hoxha
292e9d90ca remove unnecessary var 2025-04-27 20:34:20 +02:00
Kujtim Hoxha
2b4441a0d1 fix context 2025-04-27 20:31:53 +02:00
Garrett Ladley
8f3a94df92 feat: configure context paths (#86) 2025-04-27 20:11:09 +02:00
Kujtim Hoxha
4415220555 fix minor issue 2025-04-27 19:24:46 +02:00
Kujtim Hoxha
a3a04d8a54 fix gemini provider 2025-04-27 19:12:02 +02:00
Lukáš Loukota
792e2b164b fix: gemini tool calling 2025-04-27 19:12:02 +02:00
Kujtim Hoxha
5859dcdc00 small glob fixes 2025-04-27 18:01:31 +02:00
isaac-scarrott
3c2b0f4dd0 [feature/ripgrep-glob] Add ripgrep-based file globbing to improve performance
- Introduced `globWithRipgrep` function to perform file globbing using the `rg` (ripgrep) command.
- Updated `globFiles` to prioritize ripgrep-based globbing and fall back to doublestar-based globbing if ripgrep fails.
- Added logic to handle ripgrep command execution, output parsing, and filtering of hidden files.
- Ensured results are sorted by path length and limited to the specified maximum number of matches.
- Modified imports to include `os/exec` and `bytes` for ripgrep integration.
2025-04-27 18:01:31 +02:00
Kujtim Hoxha
9738886620 fix provider config 2025-04-27 14:44:40 +02:00
Sam Ottenhoff
f3dccad54b Handle new Cursor rules format
1. Check if a path ends with a slash (/)
2. If it does, treat it as a directory and read all files within it
3. For directories like .cursor/rules/, it will scan all files and include their content in the prompt
4. Each file from a directory will be prefixed with "# From filename" for clarity
2025-04-27 14:17:06 +02:00
Kujtim Hoxha
b3a8dbd0d9 fix retry warning 2025-04-27 14:08:09 +02:00
Garrett Mitchell Ladley
d93694a979 feat: simpler diff implementation 2025-04-27 13:56:57 +02:00
Fuad
8a4d4152ce use workingDir if shellInstance is nil otherwise use cwd if shellInstance is not nil 2025-04-27 13:46:59 +02:00
Fuad
f12386e558 use provided workingg dir 2025-04-27 13:46:59 +02:00
Fuad
94aeb7b7fe Fix nil pointer dereference in GetPersistentShell
Added nil check in GetPersistentShell before accessing
shellInstance.isAlive
to prevent panic when newPersistentShell returns nil due to shell
startup
errors. This resolves the "invalid memory address or nil pointer
dereference"
error that was occurring in the shell tool.
2025-04-27 13:46:59 +02:00
Kujtim Hoxha
a35466cdb3 fix acc error 2025-04-25 21:58:14 +02:00
Kujtim Hoxha
170c7ad67a small fixes 2025-04-25 14:42:47 +02:00
Hunter Casten
7a62ab7675 feat(groq): add support for Groq using the OpenAI provider 2025-04-25 11:11:52 +02:00
Kujtim Hoxha
1586d757dc remove tool timeout 2025-04-24 22:35:17 +02:00
Dax Raad
d043526200 add more installation options 2025-04-24 16:34:57 -04:00
Kujtim Hoxha
aaf0bc14ba try fix 2025-04-24 22:27:51 +02:00
Kujtim Hoxha
f2d9bb7ee3 try fix 2025-04-24 22:27:51 +02:00
Kujtim Hoxha
de41703e20 change db driver 2025-04-24 22:27:51 +02:00
Kujtim Hoxha
2c24bfb7b3 fix kitty issues 2025-04-24 19:57:04 +02:00
Dax Raad
47a37b7dd6 back to disablign cgo 2025-04-24 12:46:11 -04:00
Dax Raad
bdbf31f0b9 force CGO 2025-04-24 12:34:55 -04:00
Dax Raad
4e6560efb9 turn on cgo 2025-04-24 12:30:33 -04:00
Dax Raad
f2f6efdd35 fix version ldflags 2025-04-24 12:26:42 -04:00
Kujtim Hoxha
b106787a50 change package name 2025-04-24 18:26:16 +02:00
Kujtim Hoxha
e1b2ce483f change additions/removals 2025-04-24 16:40:36 +02:00
Kujtim Hoxha
c42d94c465 small fixes 2025-04-24 16:40:36 +02:00
Kujtim Hoxha
f879a94c95 update mainter email 2025-04-24 16:40:36 +02:00
Kujtim Hoxha
6d05d5a7c3 small changes 2025-04-24 16:40:36 +02:00
Kujtim Hoxha
2c5003e3fc remove edit/normal mode 2025-04-24 16:40:36 +02:00
537 changed files with 69203 additions and 35316 deletions

9
.editorconfig Normal file
View File

@@ -0,0 +1,9 @@
root = true
[*]
charset = utf-8
insert_final_newline = true
end_of_line = lf
indent_style = space
indent_size = 2
max_line_length = 80

View File

@@ -1,37 +0,0 @@
name: build
on:
workflow_dispatch:
push:
branches:
- main
concurrency: ${{ github.workflow }}-${{ github.ref }}
permissions:
contents: write
packages: write
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- run: git fetch --force --tags
- uses: actions/setup-go@v5
with:
go-version: ">=1.23.2"
cache: true
cache-dependency-path: go.sum
- run: go mod download
- uses: goreleaser/goreleaser-action@v6
with:
distribution: goreleaser
version: latest
args: build --snapshot --clean

26
.github/workflows/deploy.yml vendored Normal file
View File

@@ -0,0 +1,26 @@
name: deploy
on:
push:
branches:
- dev
- production
workflow_dispatch:
concurrency: ${{ github.workflow }}-${{ github.ref }}
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: oven-sh/setup-bun@v1
with:
bun-version: 1.2.17
- run: bun install
- run: bun sst deploy --stage=${{ github.ref_name }}
env:
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}

14
.github/workflows/notify-discord.yml vendored Normal file
View File

@@ -0,0 +1,14 @@
name: discord
on:
release:
types: [published] # fires only when a release is published
jobs:
notify:
runs-on: ubuntu-latest
steps:
- name: Send nicely-formatted embed to Discord
uses: SethCohen/github-releases-to-discord@v1
with:
webhook_url: ${{ secrets.DISCORD_WEBHOOK }}

24
.github/workflows/opencode.yml vendored Normal file
View File

@@ -0,0 +1,24 @@
name: opencode
on:
issue_comment:
types: [created]
jobs:
opencode:
if: startsWith(github.event.comment.body, 'hey opencode')
runs-on: ubuntu-latest
permissions:
id-token: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Run opencode
uses: sst/opencode/sdks/github@github-v1
env:
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
with:
model: anthropic/claude-sonnet-4-20250514

View File

@@ -0,0 +1,30 @@
name: publish-github-action
on:
workflow_dispatch:
push:
tags:
- "github-v*.*.*"
- "!github-v1"
concurrency: ${{ github.workflow }}-${{ github.ref }}
permissions:
contents: write
jobs:
publish:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- run: git fetch --force --tags
- name: Publish
run: |
git config --global user.email "opencode@sst.dev"
git config --global user.name "opencode"
./script/publish
working-directory: ./sdks/github

36
.github/workflows/publish-vscode.yml vendored Normal file
View File

@@ -0,0 +1,36 @@
name: publish-vscode
on:
workflow_dispatch:
push:
tags:
- "vscode-v*.*.*"
concurrency: ${{ github.workflow }}-${{ github.ref }}
permissions:
contents: write
jobs:
publish:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- uses: oven-sh/setup-bun@v2
with:
bun-version: 1.2.17
- run: git fetch --force --tags
- run: bun install -g @vscode/vsce
- name: Publish
run: |
bun install
./script/publish
working-directory: ./sdks/vscode
env:
VSCE_PAT: ${{ secrets.VSCE_PAT }}
OPENVSX_TOKEN: ${{ secrets.OPENVSX_TOKEN }}

65
.github/workflows/publish.yml vendored Normal file
View File

@@ -0,0 +1,65 @@
name: publish
on:
workflow_dispatch:
push:
branches:
- dev
tags:
- "*"
- "!vscode-v*"
- "!github-v*"
concurrency: ${{ github.workflow }}-${{ github.ref }}
permissions:
contents: write
packages: write
jobs:
publish:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- run: git fetch --force --tags
- uses: actions/setup-go@v5
with:
go-version: ">=1.24.0"
cache: true
cache-dependency-path: go.sum
- uses: oven-sh/setup-bun@v2
with:
bun-version: 1.2.17
- name: Install makepkg
run: |
sudo apt-get update
sudo apt-get install -y pacman-package-manager
- name: Setup SSH for AUR
run: |
mkdir -p ~/.ssh
echo "${{ secrets.AUR_KEY }}" > ~/.ssh/id_rsa
chmod 600 ~/.ssh/id_rsa
ssh-keyscan -H aur.archlinux.org >> ~/.ssh/known_hosts
git config --global user.email "opencode@sst.dev"
git config --global user.name "opencode"
- name: Publish
run: |
bun install
if [ "${{ startsWith(github.ref, 'refs/tags/') }}" = "true" ]; then
./script/publish.ts
else
./script/publish.ts --snapshot
fi
working-directory: ./packages/opencode
env:
GITHUB_TOKEN: ${{ secrets.SST_GITHUB_TOKEN }}
AUR_KEY: ${{ secrets.AUR_KEY }}
NPM_CONFIG_TOKEN: ${{ secrets.NPM_TOKEN }}

View File

@@ -1,40 +0,0 @@
name: release
on:
workflow_dispatch:
push:
tags:
- "*"
concurrency: ${{ github.workflow }}-${{ github.ref }}
permissions:
contents: write
packages: write
jobs:
goreleaser:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- run: git fetch --force --tags
- uses: actions/setup-go@v5
with:
go-version: ">=1.23.2"
cache: true
cache-dependency-path: go.sum
- run: go mod download
- uses: goreleaser/goreleaser-action@v6
with:
distribution: goreleaser
version: latest
args: release --clean
env:
GITHUB_TOKEN: ${{ secrets.HOMEBREW_GITHUB_TOKEN }}
AUR_KEY: ${{ secrets.AUR_KEY }}

32
.github/workflows/stats.yml vendored Normal file
View File

@@ -0,0 +1,32 @@
name: stats
on:
schedule:
- cron: "0 12 * * *" # Run daily at 12:00 UTC
workflow_dispatch: # Allow manual trigger
jobs:
stats:
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Bun
uses: oven-sh/setup-bun@v2
with:
bun-version: latest
- name: Run stats script
run: bun scripts/stats.ts
- name: Commit stats
run: |
git config --local user.email "action@github.com"
git config --local user.name "GitHub Action"
git add STATS.md
git diff --staged --quiet || git commit -m "ignore: update download stats $(date -I)"
git push

49
.gitignore vendored
View File

@@ -1,45 +1,8 @@
# Binaries for programs and plugins
*.exe
*.exe~
*.dll
*.so
*.dylib
# Test binary, built with `go test -c`
*.test
# Output of the go coverage tool, specifically when used with LiteIDE
*.out
# Dependency directories (remove the comment below to include it)
# vendor/
# Go workspace file
go.work
# IDE specific files
.idea/
.vscode/
*.swp
*.swo
# OS specific files
.DS_Store
.DS_Store?
._*
.Spotlight-V100
.Trashes
ehthumbs.db
Thumbs.db
*.log
# Binary output directory
/bin/
/dist/
# Local environment variables
node_modules
.opencode
.sst
.env
.env.local
.opencode/
.idea
.vscode
openapi.json

View File

@@ -1,8 +0,0 @@
{
"$schema": "./opencode-schema.json",
"lsp": {
"gopls": {
"command": "gopls"
}
}
}

15
AGENTS.md Normal file
View File

@@ -0,0 +1,15 @@
# TUI Agent Guidelines
## Style
- prefer single word variable/function names
- avoid try catch where possible - prefer to let exceptions bubble up
- avoid else statements where possible
- do not make useless helper functions - inline functionality unless the
function is reusable or composable
- prefer Bun apis
## Workflow
- you can regenerate the golang sdk by calling ./scripts/stainless.ts
- we use bun for everything

View File

@@ -1,6 +1,6 @@
MIT License
Copyright (c) 2025 Kujtim Hoxha
Copyright (c) 2025 opencode
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

411
README.md
View File

@@ -1,377 +1,110 @@
# OpenCode
<p align="center">
<a href="https://opencode.ai">
<picture>
<source srcset="packages/web/src/assets/logo-ornate-dark.svg" media="(prefers-color-scheme: dark)">
<source srcset="packages/web/src/assets/logo-ornate-light.svg" media="(prefers-color-scheme: light)">
<img src="packages/web/src/assets/logo-ornate-light.svg" alt="opencode logo">
</picture>
</a>
</p>
<p align="center">AI coding agent, built for the terminal.</p>
<p align="center">
<a href="https://opencode.ai/discord"><img alt="Discord" src="https://img.shields.io/discord/1391832426048651334?style=flat-square&label=discord" /></a>
<a href="https://www.npmjs.com/package/opencode-ai"><img alt="npm" src="https://img.shields.io/npm/v/opencode-ai?style=flat-square" /></a>
<a href="https://github.com/sst/opencode/actions/workflows/publish.yml"><img alt="Build status" src="https://img.shields.io/github/actions/workflow/status/sst/opencode/publish.yml?style=flat-square&branch=dev" /></a>
</p>
> **⚠️ Early Development Notice:** This project is in early development and is not yet ready for production use. Features may change, break, or be incomplete. Use at your own risk.
[![opencode Terminal UI](packages/web/src/assets/lander/screenshot.png)](https://opencode.ai)
A powerful terminal-based AI assistant for developers, providing intelligent coding assistance directly in your terminal.
---
## Overview
OpenCode is a Go-based CLI application that brings AI assistance to your terminal. It provides a TUI (Terminal User Interface) for interacting with various AI models to help with coding tasks, debugging, and more.
## Features
- **Interactive TUI**: Built with [Bubble Tea](https://github.com/charmbracelet/bubbletea) for a smooth terminal experience
- **Multiple AI Providers**: Support for OpenAI, Anthropic Claude, Google Gemini, AWS Bedrock, and Groq
- **Session Management**: Save and manage multiple conversation sessions
- **Tool Integration**: AI can execute commands, search files, and modify code
- **Vim-like Editor**: Integrated editor with text input capabilities
- **Persistent Storage**: SQLite database for storing conversations and sessions
- **LSP Integration**: Language Server Protocol support for code intelligence
- **File Change Tracking**: Track and visualize file changes during sessions
- **External Editor Support**: Open your preferred editor for composing messages
## Installation
### Installation
```bash
# Coming soon
go install github.com/kujtimiihoxha/opencode@latest
# YOLO
curl -fsSL https://opencode.ai/install | bash
# Package managers
npm i -g opencode-ai@latest # or bun/pnpm/yarn
brew install sst/tap/opencode # macOS
paru -S opencode-bin # Arch Linux
```
## Configuration
> [!TIP]
> Remove versions older than 0.1.x before installing.
OpenCode looks for configuration in the following locations:
#### Installation Directory
- `$HOME/.opencode.json`
- `$XDG_CONFIG_HOME/opencode/.opencode.json`
- `./.opencode.json` (local directory)
The install script respects the following priority order for the installation path:
### Environment Variables
You can configure OpenCode using environment variables:
| Environment Variable | Purpose |
| ----------------------- | ------------------------ |
| `ANTHROPIC_API_KEY` | For Claude models |
| `OPENAI_API_KEY` | For OpenAI models |
| `GEMINI_API_KEY` | For Google Gemini models |
| `GROQ_API_KEY` | For Groq models |
| `AWS_ACCESS_KEY_ID` | For AWS Bedrock (Claude) |
| `AWS_SECRET_ACCESS_KEY` | For AWS Bedrock (Claude) |
| `AWS_REGION` | For AWS Bedrock (Claude) |
### Configuration File Structure
```json
{
"data": {
"directory": ".opencode"
},
"providers": {
"openai": {
"apiKey": "your-api-key",
"disabled": false
},
"anthropic": {
"apiKey": "your-api-key",
"disabled": false
}
},
"agents": {
"coder": {
"model": "claude-3.7-sonnet",
"maxTokens": 5000
},
"task": {
"model": "claude-3.7-sonnet",
"maxTokens": 5000
},
"title": {
"model": "claude-3.7-sonnet",
"maxTokens": 80
}
},
"mcpServers": {
"example": {
"type": "stdio",
"command": "path/to/mcp-server",
"env": [],
"args": []
}
},
"lsp": {
"go": {
"disabled": false,
"command": "gopls"
}
},
"debug": false,
"debugLSP": false
}
```
## Supported AI Models
OpenCode supports a variety of AI models from different providers:
### OpenAI
- GPT-4.1 family (gpt-4.1, gpt-4.1-mini, gpt-4.1-nano)
- GPT-4.5 Preview
- GPT-4o family (gpt-4o, gpt-4o-mini)
- O1 family (o1, o1-pro, o1-mini)
- O3 family (o3, o3-mini)
- O4 Mini
### Anthropic
- Claude 3.5 Sonnet
- Claude 3.5 Haiku
- Claude 3.7 Sonnet
- Claude 3 Haiku
- Claude 3 Opus
### Google
- Gemini 2.5
- Gemini 2.5 Flash
- Gemini 2.0 Flash
- Gemini 2.0 Flash Lite
### AWS Bedrock
- Claude 3.7 Sonnet
## Usage
1. `$OPENCODE_INSTALL_DIR` - Custom installation directory
2. `$XDG_BIN_DIR` - XDG Base Directory Specification compliant path
3. `$HOME/bin` - Standard user binary directory (if exists or can be created)
4. `$HOME/.opencode/bin` - Default fallback
```bash
# Start OpenCode
opencode
# Start with debug logging
opencode -d
# Start with a specific working directory
opencode -c /path/to/project
# Examples
OPENCODE_INSTALL_DIR=/usr/local/bin curl -fsSL https://opencode.ai/install | bash
XDG_BIN_DIR=$HOME/.local/bin curl -fsSL https://opencode.ai/install | bash
```
## Command-line Flags
### Documentation
| Flag | Short | Description |
| --------- | ----- | ----------------------------- |
| `--help` | `-h` | Display help information |
| `--debug` | `-d` | Enable debug mode |
| `--cwd` | `-c` | Set current working directory |
For more info on how to configure opencode [**head over to our docs**](https://opencode.ai/docs).
## Keyboard Shortcuts
### Contributing
### Global Shortcuts
opencode is an opinionated tool so any fundamental feature needs to go through a
design process with the core team.
| Shortcut | Action |
| -------- | ------------------------------------------------------- |
| `Ctrl+C` | Quit application |
| `Ctrl+?` | Toggle help dialog |
| `?` | Toggle help dialog (when not in editing mode) |
| `Ctrl+L` | View logs |
| `Ctrl+A` | Switch session |
| `Ctrl+K` | Command dialog |
| `Esc` | Close current overlay/dialog or return to previous mode |
> [!IMPORTANT]
> We do not accept PRs for core features.
### Chat Page Shortcuts
However we still merge a ton of PRs - you can contribute:
| Shortcut | Action |
| -------- | --------------------------------------- |
| `Ctrl+N` | Create new session |
| `Ctrl+X` | Cancel current operation/generation |
| `i` | Focus editor (when not in writing mode) |
| `Esc` | Exit writing mode and focus messages |
- Bug fixes
- Improvements to LLM performance
- Support for new providers
- Fixes for env specific quirks
- Missing standard behavior
- Documentation
### Editor Shortcuts
Take a look at the git history to see what kind of PRs we end up merging.
| Shortcut | Action |
| ------------------- | ----------------------------------------- |
| `Ctrl+S` | Send message (when editor is focused) |
| `Enter` or `Ctrl+S` | Send message (when editor is not focused) |
| `Ctrl+E` | Open external editor |
| `Esc` | Blur editor and focus messages |
> [!NOTE]
> If you do not follow the above guidelines we might close your PR.
### Session Dialog Shortcuts
To run opencode locally you need.
| Shortcut | Action |
| ---------- | ---------------- |
| `↑` or `k` | Previous session |
| `↓` or `j` | Next session |
| `Enter` | Select session |
| `Esc` | Close dialog |
- Bun
- Golang 1.24.x
### Permission Dialog Shortcuts
| Shortcut | Action |
| ----------------------- | ---------------------------- |
| `←` or `left` | Switch options left |
| `→` or `right` or `tab` | Switch options right |
| `Enter` or `space` | Confirm selection |
| `a` | Allow permission |
| `A` | Allow permission for session |
| `d` | Deny permission |
### Logs Page Shortcuts
| Shortcut | Action |
| ------------------ | ------------------- |
| `Backspace` or `q` | Return to chat page |
## AI Assistant Tools
OpenCode's AI assistant has access to various tools to help with coding tasks:
### File and Code Tools
| Tool | Description | Parameters |
| ------------- | --------------------------- | ---------------------------------------------------------------------------------------- |
| `glob` | Find files by pattern | `pattern` (required), `path` (optional) |
| `grep` | Search file contents | `pattern` (required), `path` (optional), `include` (optional), `literal_text` (optional) |
| `ls` | List directory contents | `path` (optional), `ignore` (optional array of patterns) |
| `view` | View file contents | `file_path` (required), `offset` (optional), `limit` (optional) |
| `write` | Write to files | `file_path` (required), `content` (required) |
| `edit` | Edit files | Various parameters for file editing |
| `patch` | Apply patches to files | `file_path` (required), `diff` (required) |
| `diagnostics` | Get diagnostics information | `file_path` (optional) |
### Other Tools
| Tool | Description | Parameters |
| ------------- | -------------------------------------- | ----------------------------------------------------------------------------------------- |
| `bash` | Execute shell commands | `command` (required), `timeout` (optional) |
| `fetch` | Fetch data from URLs | `url` (required), `format` (required), `timeout` (optional) |
| `sourcegraph` | Search code across public repositories | `query` (required), `count` (optional), `context_window` (optional), `timeout` (optional) |
| `agent` | Run sub-tasks with the AI agent | `prompt` (required) |
## Architecture
OpenCode is built with a modular architecture:
- **cmd**: Command-line interface using Cobra
- **internal/app**: Core application services
- **internal/config**: Configuration management
- **internal/db**: Database operations and migrations
- **internal/llm**: LLM providers and tools integration
- **internal/tui**: Terminal UI components and layouts
- **internal/logging**: Logging infrastructure
- **internal/message**: Message handling
- **internal/session**: Session management
- **internal/lsp**: Language Server Protocol integration
## MCP (Model Context Protocol)
OpenCode implements the Model Context Protocol (MCP) to extend its capabilities through external tools. MCP provides a standardized way for the AI assistant to interact with external services and tools.
### MCP Features
- **External Tool Integration**: Connect to external tools and services via a standardized protocol
- **Tool Discovery**: Automatically discover available tools from MCP servers
- **Multiple Connection Types**:
- **Stdio**: Communicate with tools via standard input/output
- **SSE**: Communicate with tools via Server-Sent Events
- **Security**: Permission system for controlling access to MCP tools
### Configuring MCP Servers
MCP servers are defined in the configuration file under the `mcpServers` section:
```json
{
"mcpServers": {
"example": {
"type": "stdio",
"command": "path/to/mcp-server",
"env": [],
"args": []
},
"web-example": {
"type": "sse",
"url": "https://example.com/mcp",
"headers": {
"Authorization": "Bearer token"
}
}
}
}
```
### MCP Tool Usage
Once configured, MCP tools are automatically available to the AI assistant alongside built-in tools. They follow the same permission model as other tools, requiring user approval before execution.
## LSP (Language Server Protocol)
OpenCode integrates with Language Server Protocol to provide code intelligence features across multiple programming languages.
### LSP Features
- **Multi-language Support**: Connect to language servers for different programming languages
- **Diagnostics**: Receive error checking and linting information
- **File Watching**: Automatically notify language servers of file changes
### Configuring LSP
Language servers are configured in the configuration file under the `lsp` section:
```json
{
"lsp": {
"go": {
"disabled": false,
"command": "gopls"
},
"typescript": {
"disabled": false,
"command": "typescript-language-server",
"args": ["--stdio"]
}
}
}
```
### LSP Integration with AI
The AI assistant can access LSP features through the `diagnostics` tool, allowing it to:
- Check for errors in your code
- Suggest fixes based on diagnostics
While the LSP client implementation supports the full LSP protocol (including completions, hover, definition, etc.), currently only diagnostics are exposed to the AI assistant.
## Development
### Prerequisites
- Go 1.24.0 or higher
### Building from Source
And run.
```bash
# Clone the repository
git clone https://github.com/kujtimiihoxha/opencode.git
cd opencode
# Build
go build -o opencode
# Run
./opencode
$ bun install
$ bun run packages/opencode/src/index.ts
```
## Acknowledgments
#### Development Notes
OpenCode gratefully acknowledges the contributions and support from these key individuals:
**API Client**: After making changes to the TypeScript API endpoints in `packages/opencode/src/server/server.ts`, you will need the opencode team to generate a new stainless sdk for the clients.
- [@isaacphi](https://github.com/isaacphi) - For the [mcp-language-server](https://github.com/isaacphi/mcp-language-server) project which provided the foundation for our LSP client implementation
- [@adamdottv](https://github.com/adamdottv) - For the design direction and UI/UX architecture
### FAQ
Special thanks to the broader open source community whose tools and libraries have made this project possible.
#### How is this different than Claude Code?
## License
It's very similar to Claude Code in terms of capability. Here are the key differences:
OpenCode is licensed under the MIT License. See the [LICENSE](LICENSE) file for details.
- 100% open source
- Not coupled to any provider. Although Anthropic is recommended, opencode can be used with OpenAI, Google or even local models. As models evolve the gaps between them will close and pricing will drop so being provider agnostic is important.
- A focus on TUI. opencode is built by neovim users and the creators of [terminal.shop](https://terminal.shop); we are going to push the limits of what's possible in the terminal.
- A client/server architecture. This for example can allow opencode to run on your computer, while you can drive it remotely from a mobile app. Meaning that the TUI frontend is just one of the possible clients.
## Contributing
#### What's the other repo?
Contributions are welcome! Here's how you can contribute:
The other confusingly named repo has no relation to this one. You can [read the story behind it here](https://x.com/thdxr/status/1933561254481666466).
1. Fork the repository
2. Create a feature branch (`git checkout -b feature/amazing-feature`)
3. Commit your changes (`git commit -m 'Add some amazing feature'`)
4. Push to the branch (`git push origin feature/amazing-feature`)
5. Open a Pull Request
---
Please make sure to update tests as appropriate and follow the existing code style.
**Join our community** [Discord](https://discord.gg/opencode) | [YouTube](https://www.youtube.com/c/sst-dev) | [X.com](https://x.com/SST_dev)

25
STATS.md Normal file
View File

@@ -0,0 +1,25 @@
# Download Stats
| Date | GitHub Downloads | npm Downloads | Total |
| ---------- | ---------------- | ---------------- | ----------------- |
| 2025-06-29 | 18,789 (+0) | 39,420 (+0) | 58,209 (+0) |
| 2025-06-30 | 20,127 (+1,338) | 41,059 (+1,639) | 61,186 (+2,977) |
| 2025-07-01 | 22,108 (+1,981) | 43,745 (+2,686) | 65,853 (+4,667) |
| 2025-07-02 | 24,814 (+2,706) | 46,168 (+2,423) | 70,982 (+5,129) |
| 2025-07-03 | 27,834 (+3,020) | 49,955 (+3,787) | 77,789 (+6,807) |
| 2025-07-04 | 30,608 (+2,774) | 54,758 (+4,803) | 85,366 (+7,577) |
| 2025-07-05 | 32,524 (+1,916) | 58,371 (+3,613) | 90,895 (+5,529) |
| 2025-07-06 | 33,766 (+1,242) | 59,694 (+1,323) | 93,460 (+2,565) |
| 2025-07-08 | 38,052 (+4,286) | 64,468 (+4,774) | 102,520 (+9,060) |
| 2025-07-10 | 43,796 (+5,744) | 71,402 (+6,934) | 115,198 (+12,678) |
| 2025-07-11 | 46,982 (+3,186) | 77,462 (+6,060) | 124,444 (+9,246) |
| 2025-07-12 | 49,302 (+2,320) | 82,177 (+4,715) | 131,479 (+7,035) |
| 2025-07-13 | 50,803 (+1,501) | 86,394 (+4,217) | 137,197 (+5,718) |
| 2025-07-14 | 53,283 (+2,480) | 87,860 (+1,466) | 141,143 (+3,946) |
| 2025-07-15 | 57,590 (+4,307) | 91,036 (+3,176) | 148,626 (+7,483) |
| 2025-07-16 | 62,313 (+4,723) | 95,258 (+4,222) | 157,571 (+8,945) |
| 2025-07-17 | 66,684 (+4,371) | 100,048 (+4,790) | 166,732 (+9,161) |
| 2025-07-18 | 70,379 (+3,695) | 102,587 (+2,539) | 172,966 (+6,234) |
| 2025-07-18 | 70,380 (+1) | 102,587 (+0) | 172,967 (+1) |
| 2025-07-19 | 73,497 (+3,117) | 105,904 (+3,317) | 179,401 (+6,434) |
| 2025-07-20 | 76,453 (+2,956) | 109,044 (+3,140) | 185,497 (+6,096) |

1852
bun.lock Normal file

File diff suppressed because it is too large Load Diff

2
bunfig.toml Normal file
View File

@@ -0,0 +1,2 @@
[install]
exact = true

View File

@@ -1,252 +0,0 @@
package cmd
import (
"context"
"fmt"
"os"
"sync"
"time"
tea "github.com/charmbracelet/bubbletea"
"github.com/kujtimiihoxha/opencode/internal/app"
"github.com/kujtimiihoxha/opencode/internal/config"
"github.com/kujtimiihoxha/opencode/internal/db"
"github.com/kujtimiihoxha/opencode/internal/llm/agent"
"github.com/kujtimiihoxha/opencode/internal/logging"
"github.com/kujtimiihoxha/opencode/internal/pubsub"
"github.com/kujtimiihoxha/opencode/internal/tui"
zone "github.com/lrstanley/bubblezone"
"github.com/spf13/cobra"
)
var rootCmd = &cobra.Command{
Use: "OpenCode",
Short: "A terminal AI assistant for software development",
Long: `OpenCode is a powerful terminal-based AI assistant that helps with software development tasks.
It provides an interactive chat interface with AI capabilities, code analysis, and LSP integration
to assist developers in writing, debugging, and understanding code directly from the terminal.`,
RunE: func(cmd *cobra.Command, args []string) error {
// If the help flag is set, show the help message
if cmd.Flag("help").Changed {
cmd.Help()
return nil
}
// Load the config
debug, _ := cmd.Flags().GetBool("debug")
cwd, _ := cmd.Flags().GetString("cwd")
if cwd != "" {
err := os.Chdir(cwd)
if err != nil {
return fmt.Errorf("failed to change directory: %v", err)
}
}
if cwd == "" {
c, err := os.Getwd()
if err != nil {
return fmt.Errorf("failed to get current working directory: %v", err)
}
cwd = c
}
_, err := config.Load(cwd, debug)
if err != nil {
return err
}
// Connect DB, this will also run migrations
conn, err := db.Connect()
if err != nil {
return err
}
// Create main context for the application
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
app, err := app.New(ctx, conn)
if err != nil {
logging.Error("Failed to create app: %v", err)
return err
}
// Set up the TUI
zone.NewGlobal()
program := tea.NewProgram(
tui.New(app),
tea.WithAltScreen(),
tea.WithMouseCellMotion(),
)
// Initialize MCP tools in the background
initMCPTools(ctx, app)
// Setup the subscriptions, this will send services events to the TUI
ch, cancelSubs := setupSubscriptions(app, ctx)
// Create a context for the TUI message handler
tuiCtx, tuiCancel := context.WithCancel(ctx)
var tuiWg sync.WaitGroup
tuiWg.Add(1)
// Set up message handling for the TUI
go func() {
defer tuiWg.Done()
defer logging.RecoverPanic("TUI-message-handler", func() {
attemptTUIRecovery(program)
})
for {
select {
case <-tuiCtx.Done():
logging.Info("TUI message handler shutting down")
return
case msg, ok := <-ch:
if !ok {
logging.Info("TUI message channel closed")
return
}
program.Send(msg)
}
}
}()
// Cleanup function for when the program exits
cleanup := func() {
// Shutdown the app
app.Shutdown()
// Cancel subscriptions first
cancelSubs()
// Then cancel TUI message handler
tuiCancel()
// Wait for TUI message handler to finish
tuiWg.Wait()
logging.Info("All goroutines cleaned up")
}
// Run the TUI
result, err := program.Run()
cleanup()
if err != nil {
logging.Error("TUI error: %v", err)
return fmt.Errorf("TUI error: %v", err)
}
logging.Info("TUI exited with result: %v", result)
return nil
},
}
// attemptTUIRecovery tries to recover the TUI after a panic
func attemptTUIRecovery(program *tea.Program) {
logging.Info("Attempting to recover TUI after panic")
// We could try to restart the TUI or gracefully exit
// For now, we'll just quit the program to avoid further issues
program.Quit()
}
func initMCPTools(ctx context.Context, app *app.App) {
go func() {
defer logging.RecoverPanic("MCP-goroutine", nil)
// Create a context with timeout for the initial MCP tools fetch
ctxWithTimeout, cancel := context.WithTimeout(ctx, 30*time.Second)
defer cancel()
// Set this up once with proper error handling
agent.GetMcpTools(ctxWithTimeout, app.Permissions)
logging.Info("MCP message handling goroutine exiting")
}()
}
func setupSubscriber[T any](
ctx context.Context,
wg *sync.WaitGroup,
name string,
subscriber func(context.Context) <-chan pubsub.Event[T],
outputCh chan<- tea.Msg,
) {
wg.Add(1)
go func() {
defer wg.Done()
defer logging.RecoverPanic(fmt.Sprintf("subscription-%s", name), nil)
subCh := subscriber(ctx)
for {
select {
case event, ok := <-subCh:
if !ok {
logging.Info("subscription channel closed", "name", name)
return
}
var msg tea.Msg = event
select {
case outputCh <- msg:
case <-time.After(2 * time.Second):
logging.Warn("message dropped due to slow consumer", "name", name)
case <-ctx.Done():
logging.Info("subscription cancelled", "name", name)
return
}
case <-ctx.Done():
logging.Info("subscription cancelled", "name", name)
return
}
}
}()
}
func setupSubscriptions(app *app.App, parentCtx context.Context) (chan tea.Msg, func()) {
ch := make(chan tea.Msg, 100)
wg := sync.WaitGroup{}
ctx, cancel := context.WithCancel(parentCtx) // Inherit from parent context
setupSubscriber(ctx, &wg, "logging", logging.Subscribe, ch)
setupSubscriber(ctx, &wg, "sessions", app.Sessions.Subscribe, ch)
setupSubscriber(ctx, &wg, "messages", app.Messages.Subscribe, ch)
setupSubscriber(ctx, &wg, "permissions", app.Permissions.Subscribe, ch)
cleanupFunc := func() {
logging.Info("Cancelling all subscriptions")
cancel() // Signal all goroutines to stop
waitCh := make(chan struct{})
go func() {
defer logging.RecoverPanic("subscription-cleanup", nil)
wg.Wait()
close(waitCh)
}()
select {
case <-waitCh:
logging.Info("All subscription goroutines completed successfully")
close(ch) // Only close after all writers are confirmed done
case <-time.After(5 * time.Second):
logging.Warn("Timed out waiting for some subscription goroutines to complete")
close(ch)
}
}
return ch, cleanupFunc
}
func Execute() {
err := rootCmd.Execute()
if err != nil {
os.Exit(1)
}
}
func init() {
rootCmd.Flags().BoolP("help", "h", false, "Help")
rootCmd.Flags().BoolP("debug", "d", false, "Debug")
rootCmd.Flags().StringP("cwd", "c", "", "Current working directory")
}

View File

@@ -1,64 +0,0 @@
# OpenCode Configuration Schema Generator
This tool generates a JSON Schema for the OpenCode configuration file. The schema can be used to validate configuration files and provide autocompletion in editors that support JSON Schema.
## Usage
```bash
go run cmd/schema/main.go > opencode-schema.json
```
This will generate a JSON Schema file that can be used to validate configuration files.
## Schema Features
The generated schema includes:
- All configuration options with descriptions
- Default values where applicable
- Validation for enum values (e.g., model IDs, provider types)
- Required fields
- Type checking
## Using the Schema
You can use the generated schema in several ways:
1. **Editor Integration**: Many editors (VS Code, JetBrains IDEs, etc.) support JSON Schema for validation and autocompletion. You can configure your editor to use the generated schema for `.opencode.json` files.
2. **Validation Tools**: You can use tools like [jsonschema](https://github.com/Julian/jsonschema) to validate your configuration files against the schema.
3. **Documentation**: The schema serves as documentation for the configuration options.
## Example Configuration
Here's an example configuration that conforms to the schema:
```json
{
"data": {
"directory": ".opencode"
},
"debug": false,
"providers": {
"anthropic": {
"apiKey": "your-api-key"
}
},
"agents": {
"coder": {
"model": "claude-3.7-sonnet",
"maxTokens": 5000,
"reasoningEffort": "medium"
},
"task": {
"model": "claude-3.7-sonnet",
"maxTokens": 5000
},
"title": {
"model": "claude-3.7-sonnet",
"maxTokens": 80
}
}
}
```

View File

@@ -1,262 +0,0 @@
package main
import (
"encoding/json"
"fmt"
"os"
"github.com/kujtimiihoxha/opencode/internal/config"
"github.com/kujtimiihoxha/opencode/internal/llm/models"
)
// JSONSchemaType represents a JSON Schema type
type JSONSchemaType struct {
Type string `json:"type,omitempty"`
Description string `json:"description,omitempty"`
Properties map[string]any `json:"properties,omitempty"`
Required []string `json:"required,omitempty"`
AdditionalProperties any `json:"additionalProperties,omitempty"`
Enum []any `json:"enum,omitempty"`
Items map[string]any `json:"items,omitempty"`
OneOf []map[string]any `json:"oneOf,omitempty"`
AnyOf []map[string]any `json:"anyOf,omitempty"`
Default any `json:"default,omitempty"`
}
func main() {
schema := generateSchema()
// Pretty print the schema
encoder := json.NewEncoder(os.Stdout)
encoder.SetIndent("", " ")
if err := encoder.Encode(schema); err != nil {
fmt.Fprintf(os.Stderr, "Error encoding schema: %v\n", err)
os.Exit(1)
}
}
func generateSchema() map[string]any {
schema := map[string]any{
"$schema": "http://json-schema.org/draft-07/schema#",
"title": "OpenCode Configuration",
"description": "Configuration schema for the OpenCode application",
"type": "object",
"properties": map[string]any{},
}
// Add Data configuration
schema["properties"].(map[string]any)["data"] = map[string]any{
"type": "object",
"description": "Storage configuration",
"properties": map[string]any{
"directory": map[string]any{
"type": "string",
"description": "Directory where application data is stored",
"default": ".opencode",
},
},
"required": []string{"directory"},
}
// Add working directory
schema["properties"].(map[string]any)["wd"] = map[string]any{
"type": "string",
"description": "Working directory for the application",
}
// Add debug flags
schema["properties"].(map[string]any)["debug"] = map[string]any{
"type": "boolean",
"description": "Enable debug mode",
"default": false,
}
schema["properties"].(map[string]any)["debugLSP"] = map[string]any{
"type": "boolean",
"description": "Enable LSP debug mode",
"default": false,
}
// Add MCP servers
schema["properties"].(map[string]any)["mcpServers"] = map[string]any{
"type": "object",
"description": "Model Control Protocol server configurations",
"additionalProperties": map[string]any{
"type": "object",
"description": "MCP server configuration",
"properties": map[string]any{
"command": map[string]any{
"type": "string",
"description": "Command to execute for the MCP server",
},
"env": map[string]any{
"type": "array",
"description": "Environment variables for the MCP server",
"items": map[string]any{
"type": "string",
},
},
"args": map[string]any{
"type": "array",
"description": "Command arguments for the MCP server",
"items": map[string]any{
"type": "string",
},
},
"type": map[string]any{
"type": "string",
"description": "Type of MCP server",
"enum": []string{"stdio", "sse"},
"default": "stdio",
},
"url": map[string]any{
"type": "string",
"description": "URL for SSE type MCP servers",
},
"headers": map[string]any{
"type": "object",
"description": "HTTP headers for SSE type MCP servers",
"additionalProperties": map[string]any{
"type": "string",
},
},
},
"required": []string{"command"},
},
}
// Add providers
providerSchema := map[string]any{
"type": "object",
"description": "LLM provider configurations",
"additionalProperties": map[string]any{
"type": "object",
"description": "Provider configuration",
"properties": map[string]any{
"apiKey": map[string]any{
"type": "string",
"description": "API key for the provider",
},
"disabled": map[string]any{
"type": "boolean",
"description": "Whether the provider is disabled",
"default": false,
},
},
},
}
// Add known providers
knownProviders := []string{
string(models.ProviderAnthropic),
string(models.ProviderOpenAI),
string(models.ProviderGemini),
string(models.ProviderGROQ),
string(models.ProviderBedrock),
}
providerSchema["additionalProperties"].(map[string]any)["properties"].(map[string]any)["provider"] = map[string]any{
"type": "string",
"description": "Provider type",
"enum": knownProviders,
}
schema["properties"].(map[string]any)["providers"] = providerSchema
// Add agents
agentSchema := map[string]any{
"type": "object",
"description": "Agent configurations",
"additionalProperties": map[string]any{
"type": "object",
"description": "Agent configuration",
"properties": map[string]any{
"model": map[string]any{
"type": "string",
"description": "Model ID for the agent",
},
"maxTokens": map[string]any{
"type": "integer",
"description": "Maximum tokens for the agent",
"minimum": 1,
},
"reasoningEffort": map[string]any{
"type": "string",
"description": "Reasoning effort for models that support it (OpenAI, Anthropic)",
"enum": []string{"low", "medium", "high"},
},
},
"required": []string{"model"},
},
}
// Add model enum
modelEnum := []string{}
for modelID := range models.SupportedModels {
modelEnum = append(modelEnum, string(modelID))
}
agentSchema["additionalProperties"].(map[string]any)["properties"].(map[string]any)["model"].(map[string]any)["enum"] = modelEnum
// Add specific agent properties
agentProperties := map[string]any{}
knownAgents := []string{
string(config.AgentCoder),
string(config.AgentTask),
string(config.AgentTitle),
}
for _, agentName := range knownAgents {
agentProperties[agentName] = map[string]any{
"$ref": "#/definitions/agent",
}
}
// Create a combined schema that allows both specific agents and additional ones
combinedAgentSchema := map[string]any{
"type": "object",
"description": "Agent configurations",
"properties": agentProperties,
"additionalProperties": agentSchema["additionalProperties"],
}
schema["properties"].(map[string]any)["agents"] = combinedAgentSchema
schema["definitions"] = map[string]any{
"agent": agentSchema["additionalProperties"],
}
// Add LSP configuration
schema["properties"].(map[string]any)["lsp"] = map[string]any{
"type": "object",
"description": "Language Server Protocol configurations",
"additionalProperties": map[string]any{
"type": "object",
"description": "LSP configuration for a language",
"properties": map[string]any{
"disabled": map[string]any{
"type": "boolean",
"description": "Whether the LSP is disabled",
"default": false,
},
"command": map[string]any{
"type": "string",
"description": "Command to execute for the LSP server",
},
"args": map[string]any{
"type": "array",
"description": "Command arguments for the LSP server",
"items": map[string]any{
"type": "string",
},
},
"options": map[string]any{
"type": "object",
"description": "Additional options for the LSP server",
},
},
"required": []string{"command"},
},
}
return schema
}

143
go.mod
View File

@@ -1,143 +0,0 @@
module github.com/kujtimiihoxha/opencode
go 1.24.0
toolchain go1.24.2
require (
github.com/JohannesKaufmann/html-to-markdown v1.6.0
github.com/PuerkitoBio/goquery v1.9.2
github.com/alecthomas/chroma/v2 v2.15.0
github.com/anthropics/anthropic-sdk-go v0.2.0-beta.2
github.com/bmatcuk/doublestar/v4 v4.8.1
github.com/catppuccin/go v0.3.0
github.com/charmbracelet/bubbles v0.20.0
github.com/charmbracelet/bubbletea v1.3.4
github.com/charmbracelet/glamour v0.9.1
github.com/charmbracelet/huh v0.6.0
github.com/charmbracelet/lipgloss v1.1.0
github.com/charmbracelet/x/ansi v0.8.0
github.com/fsnotify/fsnotify v1.8.0
github.com/go-git/go-git/v5 v5.15.0
github.com/go-logfmt/logfmt v0.6.0
github.com/golang-migrate/migrate/v4 v4.18.2
github.com/google/generative-ai-go v0.19.0
github.com/google/uuid v1.6.0
github.com/lrstanley/bubblezone v0.0.0-20250315020633-c249a3fe1231
github.com/mark3labs/mcp-go v0.17.0
github.com/mattn/go-runewidth v0.0.16
github.com/mattn/go-sqlite3 v1.14.24
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6
github.com/muesli/reflow v0.3.0
github.com/muesli/termenv v0.16.0
github.com/openai/openai-go v0.1.0-beta.2
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3
github.com/spf13/cobra v1.9.1
github.com/spf13/viper v1.20.0
github.com/stretchr/testify v1.10.0
google.golang.org/api v0.215.0
)
require (
cloud.google.com/go v0.116.0 // indirect
cloud.google.com/go/ai v0.8.0 // indirect
cloud.google.com/go/auth v0.13.0 // indirect
cloud.google.com/go/auth/oauth2adapt v0.2.6 // indirect
cloud.google.com/go/compute/metadata v0.6.0 // indirect
cloud.google.com/go/longrunning v0.5.7 // indirect
dario.cat/mergo v1.0.0 // indirect
github.com/Microsoft/go-winio v0.6.2 // indirect
github.com/ProtonMail/go-crypto v1.1.6 // indirect
github.com/andybalholm/cascadia v1.3.2 // indirect
github.com/atotto/clipboard v0.1.4 // indirect
github.com/aws/aws-sdk-go-v2 v1.30.3 // indirect
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.3 // indirect
github.com/aws/aws-sdk-go-v2/config v1.27.27 // indirect
github.com/aws/aws-sdk-go-v2/credentials v1.17.27 // indirect
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.11 // indirect
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.15 // indirect
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.15 // indirect
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.3 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.17 // indirect
github.com/aws/aws-sdk-go-v2/service/sso v1.22.4 // indirect
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.26.4 // indirect
github.com/aws/aws-sdk-go-v2/service/sts v1.30.3 // indirect
github.com/aws/smithy-go v1.20.3 // indirect
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
github.com/aymerick/douceur v0.2.0 // indirect
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc // indirect
github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd // indirect
github.com/charmbracelet/x/exp/strings v0.0.0-20240722160745-212f7b056ed0 // indirect
github.com/charmbracelet/x/term v0.2.1 // indirect
github.com/cloudflare/circl v1.6.1 // indirect
github.com/cyphar/filepath-securejoin v0.4.1 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/dlclark/regexp2 v1.11.4 // indirect
github.com/dustin/go-humanize v1.0.1 // indirect
github.com/emirpasic/gods v1.18.1 // indirect
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect
github.com/felixge/httpsnoop v1.0.4 // indirect
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect
github.com/go-git/go-billy/v5 v5.6.2 // indirect
github.com/go-logr/logr v1.4.2 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/go-viper/mapstructure/v2 v2.2.1 // indirect
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect
github.com/google/s2a-go v0.1.8 // indirect
github.com/googleapis/enterprise-certificate-proxy v0.3.4 // indirect
github.com/googleapis/gax-go/v2 v2.14.1 // indirect
github.com/gorilla/css v1.0.1 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect
github.com/hashicorp/go-multierror v1.1.1 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect
github.com/kevinburke/ssh_config v1.2.0 // indirect
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mattn/go-localereader v0.0.1 // indirect
github.com/microcosm-cc/bluemonday v1.0.27 // indirect
github.com/mitchellh/hashstructure/v2 v2.0.2 // indirect
github.com/muesli/cancelreader v0.2.2 // indirect
github.com/pelletier/go-toml/v2 v2.2.3 // indirect
github.com/pjbgf/sha1cd v0.3.2 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/rivo/uniseg v0.4.7 // indirect
github.com/sagikazarmark/locafero v0.7.0 // indirect
github.com/skeema/knownhosts v1.3.1 // indirect
github.com/sourcegraph/conc v0.3.0 // indirect
github.com/spf13/afero v1.12.0 // indirect
github.com/spf13/cast v1.7.1 // indirect
github.com/spf13/pflag v1.0.6 // indirect
github.com/subosito/gotenv v1.6.0 // indirect
github.com/tidwall/gjson v1.18.0 // indirect
github.com/tidwall/match v1.1.1 // indirect
github.com/tidwall/pretty v1.2.1 // indirect
github.com/tidwall/sjson v1.2.5 // indirect
github.com/xanzy/ssh-agent v0.3.3 // indirect
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
github.com/yosida95/uritemplate/v3 v3.0.2 // indirect
github.com/yuin/goldmark v1.7.8 // indirect
github.com/yuin/goldmark-emoji v1.0.5 // indirect
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.54.0 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.54.0 // indirect
go.opentelemetry.io/otel v1.29.0 // indirect
go.opentelemetry.io/otel/metric v1.29.0 // indirect
go.opentelemetry.io/otel/trace v1.29.0 // indirect
go.uber.org/atomic v1.9.0 // indirect
go.uber.org/multierr v1.9.0 // indirect
golang.org/x/crypto v0.37.0 // indirect
golang.org/x/net v0.39.0 // indirect
golang.org/x/oauth2 v0.25.0 // indirect
golang.org/x/sync v0.13.0 // indirect
golang.org/x/sys v0.32.0 // indirect
golang.org/x/term v0.31.0 // indirect
golang.org/x/text v0.24.0 // indirect
golang.org/x/time v0.8.0 // indirect
google.golang.org/genproto/googleapis/api v0.0.0-20241209162323-e6fa225c2576 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20241223144023-3abc09e42ca8 // indirect
google.golang.org/grpc v1.67.3 // indirect
google.golang.org/protobuf v1.36.1 // indirect
gopkg.in/warnings.v0 v0.1.2 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)

396
go.sum
View File

@@ -1,396 +0,0 @@
cloud.google.com/go v0.116.0 h1:B3fRrSDkLRt5qSHWe40ERJvhvnQwdZiHu0bJOpldweE=
cloud.google.com/go v0.116.0/go.mod h1:cEPSRWPzZEswwdr9BxE6ChEn01dWlTaF05LiC2Xs70U=
cloud.google.com/go/ai v0.8.0 h1:rXUEz8Wp2OlrM8r1bfmpF2+VKqc1VJpafE3HgzRnD/w=
cloud.google.com/go/ai v0.8.0/go.mod h1:t3Dfk4cM61sytiggo2UyGsDVW3RF1qGZaUKDrZFyqkE=
cloud.google.com/go/auth v0.13.0 h1:8Fu8TZy167JkW8Tj3q7dIkr2v4cndv41ouecJx0PAHs=
cloud.google.com/go/auth v0.13.0/go.mod h1:COOjD9gwfKNKz+IIduatIhYJQIc0mG3H102r/EMxX6Q=
cloud.google.com/go/auth/oauth2adapt v0.2.6 h1:V6a6XDu2lTwPZWOawrAa9HUK+DB2zfJyTuciBG5hFkU=
cloud.google.com/go/auth/oauth2adapt v0.2.6/go.mod h1:AlmsELtlEBnaNTL7jCj8VQFLy6mbZv0s4Q7NGBeQ5E8=
cloud.google.com/go/compute/metadata v0.6.0 h1:A6hENjEsCDtC1k8byVsgwvVcioamEHvZ4j01OwKxG9I=
cloud.google.com/go/compute/metadata v0.6.0/go.mod h1:FjyFAW1MW0C203CEOMDTu3Dk1FlqW3Rga40jzHL4hfg=
cloud.google.com/go/longrunning v0.5.7 h1:WLbHekDbjK1fVFD3ibpFFVoyizlLRl73I7YKuAKilhU=
cloud.google.com/go/longrunning v0.5.7/go.mod h1:8GClkudohy1Fxm3owmBGid8W0pSgodEMwEAztp38Xng=
dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk=
dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk=
github.com/JohannesKaufmann/html-to-markdown v1.6.0 h1:04VXMiE50YYfCfLboJCLcgqF5x+rHJnb1ssNmqpLH/k=
github.com/JohannesKaufmann/html-to-markdown v1.6.0/go.mod h1:NUI78lGg/a7vpEJTz/0uOcYMaibytE4BUOQS8k78yPQ=
github.com/MakeNowJust/heredoc v1.0.0 h1:cXCdzVdstXyiTqTvfqk9SDHpKNjxuom+DOlyEeQ4pzQ=
github.com/MakeNowJust/heredoc v1.0.0/go.mod h1:mG5amYoWBHf8vpLOuehzbGGw0EHxpZZ6lCpQ4fNJ8LE=
github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY=
github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY=
github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU=
github.com/ProtonMail/go-crypto v1.1.6 h1:ZcV+Ropw6Qn0AX9brlQLAUXfqLBc7Bl+f/DmNxpLfdw=
github.com/ProtonMail/go-crypto v1.1.6/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE=
github.com/PuerkitoBio/goquery v1.9.2 h1:4/wZksC3KgkQw7SQgkKotmKljk0M6V8TUvA8Wb4yPeE=
github.com/PuerkitoBio/goquery v1.9.2/go.mod h1:GHPCaP0ODyyxqcNoFGYlAprUFH81NuRPd0GX3Zu2Mvk=
github.com/alecthomas/assert/v2 v2.11.0 h1:2Q9r3ki8+JYXvGsDyBXwH3LcJ+WK5D0gc5E8vS6K3D0=
github.com/alecthomas/assert/v2 v2.11.0/go.mod h1:Bze95FyfUr7x34QZrjL+XP+0qgp/zg8yS+TtBj1WA3k=
github.com/alecthomas/chroma/v2 v2.15.0 h1:LxXTQHFoYrstG2nnV9y2X5O94sOBzf0CIUpSTbpxvMc=
github.com/alecthomas/chroma/v2 v2.15.0/go.mod h1:gUhVLrPDXPtp/f+L1jo9xepo9gL4eLwRuGAunSZMkio=
github.com/alecthomas/repr v0.4.0 h1:GhI2A8MACjfegCPVq9f1FLvIBS+DrQ2KQBFZP1iFzXc=
github.com/alecthomas/repr v0.4.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4=
github.com/andybalholm/cascadia v1.3.2 h1:3Xi6Dw5lHF15JtdcmAHD3i1+T8plmv7BQ/nsViSLyss=
github.com/andybalholm/cascadia v1.3.2/go.mod h1:7gtRlve5FxPPgIgX36uWBX58OdBsSS6lUvCFb+h7KvU=
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8=
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4=
github.com/anthropics/anthropic-sdk-go v0.2.0-beta.2 h1:h7qxtumNjKPWFv1QM/HJy60MteeW23iKeEtBoY7bYZk=
github.com/anthropics/anthropic-sdk-go v0.2.0-beta.2/go.mod h1:AapDW22irxK2PSumZiQXYUFvsdQgkwIWlpESweWZI/c=
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio=
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs=
github.com/atotto/clipboard v0.1.4 h1:EH0zSVneZPSuFR11BlR9YppQTVDbh5+16AmcJi4g1z4=
github.com/atotto/clipboard v0.1.4/go.mod h1:ZY9tmq7sm5xIbd9bOK4onWV4S6X0u6GY7Vn0Yu86PYI=
github.com/aws/aws-sdk-go-v2 v1.30.3 h1:jUeBtG0Ih+ZIFH0F4UkmL9w3cSpaMv9tYYDbzILP8dY=
github.com/aws/aws-sdk-go-v2 v1.30.3/go.mod h1:nIQjQVp5sfpQcTc9mPSr1B0PaWK5ByX9MOoDadSN4lc=
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.3 h1:tW1/Rkad38LA15X4UQtjXZXNKsCgkshC3EbmcUmghTg=
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.3/go.mod h1:UbnqO+zjqk3uIt9yCACHJ9IVNhyhOCnYk8yA19SAWrM=
github.com/aws/aws-sdk-go-v2/config v1.27.27 h1:HdqgGt1OAP0HkEDDShEl0oSYa9ZZBSOmKpdpsDMdO90=
github.com/aws/aws-sdk-go-v2/config v1.27.27/go.mod h1:MVYamCg76dFNINkZFu4n4RjDixhVr51HLj4ErWzrVwg=
github.com/aws/aws-sdk-go-v2/credentials v1.17.27 h1:2raNba6gr2IfA0eqqiP2XiQ0UVOpGPgDSi0I9iAP+UI=
github.com/aws/aws-sdk-go-v2/credentials v1.17.27/go.mod h1:gniiwbGahQByxan6YjQUMcW4Aov6bLC3m+evgcoN4r4=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.11 h1:KreluoV8FZDEtI6Co2xuNk/UqI9iwMrOx/87PBNIKqw=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.11/go.mod h1:SeSUYBLsMYFoRvHE0Tjvn7kbxaUhl75CJi1sbfhMxkU=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.15 h1:SoNJ4RlFEQEbtDcCEt+QG56MY4fm4W8rYirAmq+/DdU=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.15/go.mod h1:U9ke74k1n2bf+RIgoX1SXFed1HLs51OgUSs+Ph0KJP8=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.15 h1:C6WHdGnTDIYETAm5iErQUiVNsclNx9qbJVPIt03B6bI=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.15/go.mod h1:ZQLZqhcu+JhSrA9/NXRm8SkDvsycE+JkV3WGY41e+IM=
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 h1:hT8rVHwugYE2lEfdFE0QWVo81lF7jMrYJVDWI+f+VxU=
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0/go.mod h1:8tu/lYfQfFe6IGnaOdrpVgEL2IrrDOf6/m9RQum4NkY=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.3 h1:dT3MqvGhSoaIhRseqw2I0yH81l7wiR2vjs57O51EAm8=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.3/go.mod h1:GlAeCkHwugxdHaueRr4nhPuY+WW+gR8UjlcqzPr1SPI=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.17 h1:HGErhhrxZlQ044RiM+WdoZxp0p+EGM62y3L6pwA4olE=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.17/go.mod h1:RkZEx4l0EHYDJpWppMJ3nD9wZJAa8/0lq9aVC+r2UII=
github.com/aws/aws-sdk-go-v2/service/sso v1.22.4 h1:BXx0ZIxvrJdSgSvKTZ+yRBeSqqgPM89VPlulEcl37tM=
github.com/aws/aws-sdk-go-v2/service/sso v1.22.4/go.mod h1:ooyCOXjvJEsUw7x+ZDHeISPMhtwI3ZCB7ggFMcFfWLU=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.26.4 h1:yiwVzJW2ZxZTurVbYWA7QOrAaCYQR72t0wrSBfoesUE=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.26.4/go.mod h1:0oxfLkpz3rQ/CHlx5hB7H69YUpFiI1tql6Q6Ne+1bCw=
github.com/aws/aws-sdk-go-v2/service/sts v1.30.3 h1:ZsDKRLXGWHk8WdtyYMoGNO7bTudrvuKpDKgMVRlepGE=
github.com/aws/aws-sdk-go-v2/service/sts v1.30.3/go.mod h1:zwySh8fpFyXp9yOr/KVzxOl8SRqgf/IDw5aUt9UKFcQ=
github.com/aws/smithy-go v1.20.3 h1:ryHwveWzPV5BIof6fyDvor6V3iUL7nTfiTKXHiW05nE=
github.com/aws/smithy-go v1.20.3/go.mod h1:krry+ya/rV9RDcV/Q16kpu6ypI4K2czasz0NC3qS14E=
github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k=
github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8=
github.com/aymanbagabas/go-udiff v0.2.0 h1:TK0fH4MteXUDspT88n8CKzvK0X9O2xu9yQjWpi6yML8=
github.com/aymanbagabas/go-udiff v0.2.0/go.mod h1:RE4Ex0qsGkTAJoQdQQCA0uG+nAzJO/pI/QwceO5fgrA=
github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk=
github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4=
github.com/bmatcuk/doublestar/v4 v4.8.1 h1:54Bopc5c2cAvhLRAzqOGCYHYyhcDHsFF4wWIR5wKP38=
github.com/bmatcuk/doublestar/v4 v4.8.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc=
github.com/catppuccin/go v0.3.0 h1:d+0/YicIq+hSTo5oPuRi5kOpqkVA5tAsU6dNhvRu+aY=
github.com/catppuccin/go v0.3.0/go.mod h1:8IHJuMGaUUjQM82qBrGNBv7LFq6JI3NnQCF6MOlZjpc=
github.com/charmbracelet/bubbles v0.20.0 h1:jSZu6qD8cRQ6k9OMfR1WlM+ruM8fkPWkHvQWD9LIutE=
github.com/charmbracelet/bubbles v0.20.0/go.mod h1:39slydyswPy+uVOHZ5x/GjwVAFkCsV8IIVy+4MhzwwU=
github.com/charmbracelet/bubbletea v1.3.4 h1:kCg7B+jSCFPLYRA52SDZjr51kG/fMUEoPoZrkaDHyoI=
github.com/charmbracelet/bubbletea v1.3.4/go.mod h1:dtcUCyCGEX3g9tosuYiut3MXgY/Jsv9nKVdibKKRRXo=
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc h1:4pZI35227imm7yK2bGPcfpFEmuY1gc2YSTShr4iJBfs=
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc/go.mod h1:X4/0JoqgTIPSFcRA/P6INZzIuyqdFY5rm8tb41s9okk=
github.com/charmbracelet/glamour v0.9.1 h1:11dEfiGP8q1BEqvGoIjivuc2rBk+5qEXdPtaQ2WoiCM=
github.com/charmbracelet/glamour v0.9.1/go.mod h1:+SHvIS8qnwhgTpVMiXwn7OfGomSqff1cHBCI8jLOetk=
github.com/charmbracelet/huh v0.6.0 h1:mZM8VvZGuE0hoDXq6XLxRtgfWyTI3b2jZNKh0xWmax8=
github.com/charmbracelet/huh v0.6.0/go.mod h1:GGNKeWCeNzKpEOh/OJD8WBwTQjV3prFAtQPpLv+AVwU=
github.com/charmbracelet/lipgloss v1.1.0 h1:vYXsiLHVkK7fp74RkV7b2kq9+zDLoEU4MZoFqR/noCY=
github.com/charmbracelet/lipgloss v1.1.0/go.mod h1:/6Q8FR2o+kj8rz4Dq0zQc3vYf7X+B0binUUBwA0aL30=
github.com/charmbracelet/x/ansi v0.8.0 h1:9GTq3xq9caJW8ZrBTe0LIe2fvfLR/bYXKTx2llXn7xE=
github.com/charmbracelet/x/ansi v0.8.0/go.mod h1:wdYl/ONOLHLIVmQaxbIYEC/cRKOQyjTkowiI4blgS9Q=
github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd h1:vy0GVL4jeHEwG5YOXDmi86oYw2yuYUGqz6a8sLwg0X8=
github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd/go.mod h1:xe0nKWGd3eJgtqZRaN9RjMtK7xUYchjzPr7q6kcvCCs=
github.com/charmbracelet/x/exp/golden v0.0.0-20240815200342-61de596daa2b h1:MnAMdlwSltxJyULnrYbkZpp4k58Co7Tah3ciKhSNo0Q=
github.com/charmbracelet/x/exp/golden v0.0.0-20240815200342-61de596daa2b/go.mod h1:wDlXFlCrmJ8J+swcL/MnGUuYnqgQdW9rhSD61oNMb6U=
github.com/charmbracelet/x/exp/strings v0.0.0-20240722160745-212f7b056ed0 h1:qko3AQ4gK1MTS/de7F5hPGx6/k1u0w4TeYmBFwzYVP4=
github.com/charmbracelet/x/exp/strings v0.0.0-20240722160745-212f7b056ed0/go.mod h1:pBhA0ybfXv6hDjQUZ7hk1lVxBiUbupdw5R31yPUViVQ=
github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ=
github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg=
github.com/cloudflare/circl v1.6.1 h1:zqIqSPIndyBh1bjLVVDHMPpVKqp8Su/V+6MeDzzQBQ0=
github.com/cloudflare/circl v1.6.1/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs=
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
github.com/cyphar/filepath-securejoin v0.4.1 h1:JyxxyPEaktOD+GAnqIqTf9A8tHyAG22rowi7HkoSU1s=
github.com/cyphar/filepath-securejoin v0.4.1/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/dlclark/regexp2 v1.11.4 h1:rPYF9/LECdNymJufQKmri9gV604RvvABwgOA8un7yAo=
github.com/dlclark/regexp2 v1.11.4/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
github.com/elazarl/goproxy v1.7.2 h1:Y2o6urb7Eule09PjlhQRGNsqRfPmYI3KKQLFpCAV3+o=
github.com/elazarl/goproxy v1.7.2/go.mod h1:82vkLNir0ALaW14Rc399OTTjyNREgmdL2cVoIbS6XaE=
github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc=
github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ=
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f h1:Y/CXytFA4m6baUTXGLOoWe4PQhGxaX0KpnayAqC48p4=
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f/go.mod h1:vw97MGsxSvLiUE2X8qFplwetxpGLQrlU1Q9AUEIzCaM=
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
github.com/fsnotify/fsnotify v1.8.0 h1:dAwr6QBTBZIkG8roQaJjGof0pp0EeF+tNV7YBP3F/8M=
github.com/fsnotify/fsnotify v1.8.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0=
github.com/gliderlabs/ssh v0.3.8 h1:a4YXD1V7xMF9g5nTkdfnja3Sxy1PVDCj1Zg4Wb8vY6c=
github.com/gliderlabs/ssh v0.3.8/go.mod h1:xYoytBv1sV0aL3CavoDuJIQNURXkkfPA/wxQ1pL1fAU=
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI=
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic=
github.com/go-git/go-billy/v5 v5.6.2 h1:6Q86EsPXMa7c3YZ3aLAQsMA0VlWmy43r6FHqa/UNbRM=
github.com/go-git/go-billy/v5 v5.6.2/go.mod h1:rcFC2rAsp/erv7CMz9GczHcuD0D32fWzH+MJAU+jaUU=
github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399 h1:eMje31YglSBqCdIqdhKBW8lokaMrL3uTkpGYlE2OOT4=
github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399/go.mod h1:1OCfN199q1Jm3HZlxleg+Dw/mwps2Wbk9frAWm+4FII=
github.com/go-git/go-git/v5 v5.15.0 h1:f5Qn0W0F7ry1iN0ZwIU5m/n7/BKB4hiZfc+zlZx7ly0=
github.com/go-git/go-git/v5 v5.15.0/go.mod h1:4Ge4alE/5gPs30F2H1esi2gPd69R0C39lolkucHBOp8=
github.com/go-logfmt/logfmt v0.6.0 h1:wGYYu3uicYdqXVgoYbvnkrPVXkuLM1p1ifugDMEdRi4=
github.com/go-logfmt/logfmt v0.6.0/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs=
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY=
github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
github.com/go-viper/mapstructure/v2 v2.2.1 h1:ZAaOCxANMuZx5RCeg0mBdEZk7DZasvvZIxtHqx8aGss=
github.com/go-viper/mapstructure/v2 v2.2.1/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
github.com/golang-migrate/migrate/v4 v4.18.2 h1:2VSCMz7x7mjyTXx3m2zPokOY82LTRgxK1yQYKo6wWQ8=
github.com/golang-migrate/migrate/v4 v4.18.2/go.mod h1:2CM6tJvn2kqPXwnXO/d3rAQYiyoIm180VsO8PRX6Rpk=
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 h1:f+oWsMOmNPc8JmEHVZIycC7hBoQxHH9pNKQORJNozsQ=
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8/go.mod h1:wcDNUvekVysuuOpQKo3191zZyTpiI6se1N1ULghS0sw=
github.com/google/generative-ai-go v0.19.0 h1:R71szggh8wHMCUlEMsW2A/3T+5LdEIkiaHSYgSpUgdg=
github.com/google/generative-ai-go v0.19.0/go.mod h1:JYolL13VG7j79kM5BtHz4qwONHkeJQzOCkKXnpqtS/E=
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
github.com/google/s2a-go v0.1.8 h1:zZDs9gcbt9ZPLV0ndSyQk6Kacx2g/X+SKYovpnz3SMM=
github.com/google/s2a-go v0.1.8/go.mod h1:6iNWHTpQ+nfNRN5E00MSdfDwVesa8hhS32PhPO8deJA=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/googleapis/enterprise-certificate-proxy v0.3.4 h1:XYIDZApgAnrN1c855gTgghdIA6Stxb52D5RnLI1SLyw=
github.com/googleapis/enterprise-certificate-proxy v0.3.4/go.mod h1:YKe7cfqYXjKGpGvmSg28/fFvhNzinZQm8DGnaburhGA=
github.com/googleapis/gax-go/v2 v2.14.1 h1:hb0FFeiPaQskmvakKu5EbCbpntQn48jyHuvrkurSS/Q=
github.com/googleapis/gax-go/v2 v2.14.1/go.mod h1:Hb/NubMaVM88SrNkvl8X/o8XWwDJEPqouaLeN2IUxoA=
github.com/gorilla/css v1.0.1 h1:ntNaBIghp6JmvWnxbZKANoLyuXTPZ4cAMlo6RyhlbO8=
github.com/gorilla/css v1.0.1/go.mod h1:BvnYkspnSzMmwRK+b8/xgNPLiIuNZr6vbZBTPQ2A3b0=
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I=
github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo=
github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM=
github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg=
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A=
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo=
github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4=
github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/lrstanley/bubblezone v0.0.0-20250315020633-c249a3fe1231 h1:9rjt7AfnrXKNSZhp36A3/4QAZAwGGCGD/p8Bse26zms=
github.com/lrstanley/bubblezone v0.0.0-20250315020633-c249a3fe1231/go.mod h1:S5etECMx+sZnW0Gm100Ma9J1PgVCTgNyFaqGu2b08b4=
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
github.com/mark3labs/mcp-go v0.17.0 h1:5Ps6T7qXr7De/2QTqs9h6BKeZ/qdeUeGrgM5lPzi930=
github.com/mark3labs/mcp-go v0.17.0/go.mod h1:KmJndYv7GIgcPVwEKJjNcbhVQ+hJGJhrCCB/9xITzpE=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-localereader v0.0.1 h1:ygSAOl7ZXTx4RdPYinUpg6W99U8jWvWi9Ye2JC/oIi4=
github.com/mattn/go-localereader v0.0.1/go.mod h1:8fBrzywKY7BI3czFoHkuzRoWE9C+EiG4R1k4Cjx5p88=
github.com/mattn/go-runewidth v0.0.12/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk=
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
github.com/mattn/go-sqlite3 v1.14.24 h1:tpSp2G2KyMnnQu99ngJ47EIkWVmliIizyZBfPrBWDRM=
github.com/mattn/go-sqlite3 v1.14.24/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
github.com/microcosm-cc/bluemonday v1.0.27 h1:MpEUotklkwCSLeH+Qdx1VJgNqLlpY2KXwXFM08ygZfk=
github.com/microcosm-cc/bluemonday v1.0.27/go.mod h1:jFi9vgW+H7c3V0lb6nR74Ib/DIB5OBs92Dimizgw2cA=
github.com/mitchellh/hashstructure/v2 v2.0.2 h1:vGKWl0YJqUNxE8d+h8f6NJLcCJrgbhC4NcD46KavDd4=
github.com/mitchellh/hashstructure/v2 v2.0.2/go.mod h1:MG3aRVU/N29oo/V/IhBX8GR/zz4kQkprJgF2EVszyDE=
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 h1:ZK8zHtRHOkbHy6Mmr5D264iyp3TiX5OmNcI5cIARiQI=
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6/go.mod h1:CJlz5H+gyd6CUWT45Oy4q24RdLyn7Md9Vj2/ldJBSIo=
github.com/muesli/cancelreader v0.2.2 h1:3I4Kt4BQjOR54NavqnDogx/MIoWBFa0StPA8ELUXHmA=
github.com/muesli/cancelreader v0.2.2/go.mod h1:3XuTXfFS2VjM+HTLZY9Ak0l6eUKfijIfMUZ4EgX0QYo=
github.com/muesli/reflow v0.3.0 h1:IFsN6K9NfGtjeggFP+68I4chLZV2yIKsXJFNZ+eWh6s=
github.com/muesli/reflow v0.3.0/go.mod h1:pbwTDkVPibjO2kyvBQRBxTWEEGDGq0FlB1BIKtnHY/8=
github.com/muesli/termenv v0.16.0 h1:S5AlUN9dENB57rsbnkPyfdGuWIlkmzJjbFf0Tf5FWUc=
github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3VfY/Cnk=
github.com/onsi/gomega v1.34.1 h1:EUMJIKUjM8sKjYbtxQI9A4z2o+rruxnzNvpknOXie6k=
github.com/onsi/gomega v1.34.1/go.mod h1:kU1QgUvBDLXBJq618Xvm2LUX6rSAfRaFRTcdOeDLwwY=
github.com/openai/openai-go v0.1.0-beta.2 h1:Ra5nCFkbEl9w+UJwAciC4kqnIBUCcJazhmMA0/YN894=
github.com/openai/openai-go v0.1.0-beta.2/go.mod h1:g461MYGXEXBVdV5SaR/5tNzNbSfwTBBefwc+LlDCK0Y=
github.com/pelletier/go-toml/v2 v2.2.3 h1:YmeHyLY8mFWbdkNWwpr+qIL2bEqT0o95WSdkNHvL12M=
github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc=
github.com/pjbgf/sha1cd v0.3.2 h1:a9wb0bp1oC2TGwStyn0Umc/IGKQnEgF0vVaZ8QF8eo4=
github.com/pjbgf/sha1cd v0.3.2/go.mod h1:zQWigSxVmsHEZow5qaLtPYxpcKMMQpa09ixqBxuCS6A=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/sagikazarmark/locafero v0.7.0 h1:5MqpDsTGNDhY8sGp0Aowyf0qKsPrhewaLSsFaodPcyo=
github.com/sagikazarmark/locafero v0.7.0/go.mod h1:2za3Cg5rMaTMoG/2Ulr9AwtFaIppKXTRYnozin4aB5k=
github.com/sebdah/goldie/v2 v2.5.3 h1:9ES/mNN+HNUbNWpVAlrzuZ7jE+Nrczbj8uFRjM7624Y=
github.com/sebdah/goldie/v2 v2.5.3/go.mod h1:oZ9fp0+se1eapSRjfYbsV/0Hqhbuu3bJVvKI/NNtssI=
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
github.com/sergi/go-diff v1.3.1/go.mod h1:aMJSSKb2lpPvRNec0+w3fl7LP9IOFzdc9Pa4NFbPK1I=
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN3Uc8sB6B/s6Z4t2xvBgU1htSHuq8=
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4=
github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
github.com/skeema/knownhosts v1.3.1 h1:X2osQ+RAjK76shCbvhHHHVl3ZlgDm8apHEHFqRjnBY8=
github.com/skeema/knownhosts v1.3.1/go.mod h1:r7KTdC8l4uxWRyK2TpQZ/1o5HaSzh06ePQNxPwTcfiY=
github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo=
github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0=
github.com/spf13/afero v1.12.0 h1:UcOPyRBYczmFn6yvphxkn9ZEOY65cpwGKb5mL36mrqs=
github.com/spf13/afero v1.12.0/go.mod h1:ZTlWwG4/ahT8W7T0WQ5uYmjI9duaLQGy3Q2OAl4sk/4=
github.com/spf13/cast v1.7.1 h1:cuNEagBQEHWN1FnbGEjCXL2szYEXqfJPbP2HNUaca9Y=
github.com/spf13/cast v1.7.1/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo=
github.com/spf13/cobra v1.9.1 h1:CXSaggrXdbHK9CF+8ywj8Amf7PBRmPCOJugH954Nnlo=
github.com/spf13/cobra v1.9.1/go.mod h1:nDyEzZ8ogv936Cinf6g1RU9MRY64Ir93oCnqb9wxYW0=
github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o=
github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/viper v1.20.0 h1:zrxIyR3RQIOsarIrgL8+sAvALXul9jeEPa06Y0Ph6vY=
github.com/spf13/viper v1.20.0/go.mod h1:P9Mdzt1zoHIG8m2eZQinpiBjo6kCmZSKBClNNqjJvu4=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY=
github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA=
github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM=
github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4=
github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY=
github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28=
github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM=
github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw=
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no=
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM=
github.com/yosida95/uritemplate/v3 v3.0.2 h1:Ed3Oyj9yrmi9087+NczuL5BwkIc4wvTb5zIM+UJPGz4=
github.com/yosida95/uritemplate/v3 v3.0.2/go.mod h1:ILOh0sOhIJR3+L/8afwt/kE++YT040gmv5BQTMR2HP4=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
github.com/yuin/goldmark v1.7.1/go.mod h1:uzxRWxtg69N339t3louHJ7+O03ezfj6PlliRlaOzY1E=
github.com/yuin/goldmark v1.7.8 h1:iERMLn0/QJeHFhxSt3p6PeN9mGnvIKSpG9YYorDMnic=
github.com/yuin/goldmark v1.7.8/go.mod h1:uzxRWxtg69N339t3louHJ7+O03ezfj6PlliRlaOzY1E=
github.com/yuin/goldmark-emoji v1.0.5 h1:EMVWyCGPlXJfUXBXpuMu+ii3TIaxbVBnEX9uaDC4cIk=
github.com/yuin/goldmark-emoji v1.0.5/go.mod h1:tTkZEbwu5wkPmgTcitqddVxY9osFZiavD+r4AzQrh1U=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.54.0 h1:r6I7RJCN86bpD/FQwedZ0vSixDpwuWREjW9oRMsmqDc=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.54.0/go.mod h1:B9yO6b04uB80CzjedvewuqDhxJxi11s7/GtiGa8bAjI=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.54.0 h1:TT4fX+nBOA/+LUkobKGW1ydGcn+G3vRw9+g5HwCphpk=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.54.0/go.mod h1:L7UH0GbB0p47T4Rri3uHjbpCFYrVrwc1I25QhNPiGK8=
go.opentelemetry.io/otel v1.29.0 h1:PdomN/Al4q/lN6iBJEN3AwPvUiHPMlt93c8bqTG5Llw=
go.opentelemetry.io/otel v1.29.0/go.mod h1:N/WtXPs1CNCUEx+Agz5uouwCba+i+bJGFicT8SR4NP8=
go.opentelemetry.io/otel/metric v1.29.0 h1:vPf/HFWTNkPu1aYeIsc98l4ktOQaL6LeSoeV2g+8YLc=
go.opentelemetry.io/otel/metric v1.29.0/go.mod h1:auu/QWieFVWx+DmQOUMgj0F8LHWdgalxXqvp7BII/W8=
go.opentelemetry.io/otel/trace v1.29.0 h1:J/8ZNK4XgR7a21DZUAsbF8pZ5Jcw1VhACmnYt39JTi4=
go.opentelemetry.io/otel/trace v1.29.0/go.mod h1:eHl3w0sp3paPkYstJOmAimxhiFXPg+MMTlEh3nsQgWQ=
go.uber.org/atomic v1.9.0 h1:ECmE8Bn/WFTYwEW/bpKD3M8VtR/zQVbavAoalC1PYyE=
go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
go.uber.org/multierr v1.9.0 h1:7fIwc/ZtS0q++VgcfqFDxSBZVv/Xo49/SYnDFupUwlI=
go.uber.org/multierr v1.9.0/go.mod h1:X2jQV1h+kxSjClGpnseKVIxpmcjrj7MNnI0bnlfKTVQ=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
golang.org/x/crypto v0.22.0/go.mod h1:vr6Su+7cTlO45qkww3VDJlzDn0ctJvRgYbC2NvXHt+M=
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
golang.org/x/crypto v0.37.0 h1:kJNSjF/Xp7kU0iB2Z+9viTPMW4EqqsrywMXLJOOsXSE=
golang.org/x/crypto v0.37.0/go.mod h1:vg+k43peMZ0pUMhYmVAWysMK35e6ioLh3wB8ZCAfbVc=
golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 h1:2dVuKD2vS7b0QIHQbpyTISPd0LeHDbnYEryqj5Q1ug8=
golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56/go.mod h1:M4RDyNAINzryxdtnbRXRL/OHtkFuWGRjvuhBJpk2IlY=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns=
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
golang.org/x/net v0.24.0/go.mod h1:2Q7sJY5mzlzWjKtYUEXSlBWCdyaioyXzRB2RtU8KVE8=
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
golang.org/x/net v0.39.0 h1:ZCu7HMWDxpXpaiKdhzIfaltL9Lp31x/3fCP11bc6/fY=
golang.org/x/net v0.39.0/go.mod h1:X7NRbYVEA+ewNkCNyJ513WmMdQ3BineSwVtN2zD/d+E=
golang.org/x/oauth2 v0.25.0 h1:CY4y7XT9v0cRI9oupztF8AgiIu99L/ksR/Xp/6jrZ70=
golang.org/x/oauth2 v0.25.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.13.0 h1:AauUjRAJ9OSnvULf/ARrrVywoJDy0YS2AwQ98I37610=
golang.org/x/sync v0.13.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.32.0 h1:s77OFDvIQeibCmezSnk/q6iAfkdiQaJi4VzroCFrN20=
golang.org/x/sys v0.32.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY=
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
golang.org/x/term v0.19.0/go.mod h1:2CuTdWZ7KHSQwUzKva0cbMg6q2DMI3Mmxp+gKJbskEk=
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
golang.org/x/term v0.31.0 h1:erwDkOK1Msy6offm1mOgvspSkslFnIGsFnxOKoufg3o=
golang.org/x/term v0.31.0/go.mod h1:R4BeIy7D95HzImkxGkTW1UQTtP54tio2RyHz7PwK0aw=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.24.0 h1:dd5Bzh4yt5KYA8f9CJHCP4FB4D51c2c6JvN37xJJkJ0=
golang.org/x/text v0.24.0/go.mod h1:L8rBsPeo2pSS+xqN0d5u2ikmjtmoJbDBT1b7nHvFCdU=
golang.org/x/time v0.8.0 h1:9i3RxcPv3PZnitoVGMPDKZSq1xW1gK1Xy3ArNOGZfEg=
golang.org/x/time v0.8.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/api v0.215.0 h1:jdYF4qnyczlEz2ReWIsosNLDuzXyvFHJtI5gcr0J7t0=
google.golang.org/api v0.215.0/go.mod h1:fta3CVtuJYOEdugLNWm6WodzOS8KdFckABwN4I40hzY=
google.golang.org/genproto/googleapis/api v0.0.0-20241209162323-e6fa225c2576 h1:CkkIfIt50+lT6NHAVoRYEyAvQGFM7xEwXUUywFvEb3Q=
google.golang.org/genproto/googleapis/api v0.0.0-20241209162323-e6fa225c2576/go.mod h1:1R3kvZ1dtP3+4p4d3G8uJ8rFk/fWlScl38vanWACI08=
google.golang.org/genproto/googleapis/rpc v0.0.0-20241223144023-3abc09e42ca8 h1:TqExAhdPaB60Ux47Cn0oLV07rGnxZzIsaRhQaqS666A=
google.golang.org/genproto/googleapis/rpc v0.0.0-20241223144023-3abc09e42ca8/go.mod h1:lcTa1sDdWEIHMWlITnIczmw5w60CF9ffkb8Z+DVmmjA=
google.golang.org/grpc v1.67.3 h1:OgPcDAFKHnH8X3O4WcO4XUc8GRDeKsKReqbQtiCj7N8=
google.golang.org/grpc v1.67.3/go.mod h1:YGaHCc6Oap+FzBJTZLBzkGSYt/cvGPFTPxkn7QfSU8s=
google.golang.org/protobuf v1.36.1 h1:yBPeRvTftaleIgM3PZ/WBIZ7XM/eEYAaEyCwvyjq/gk=
google.golang.org/protobuf v1.36.1/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME=
gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

48
infra/app.ts Normal file
View File

@@ -0,0 +1,48 @@
export const domain = (() => {
if ($app.stage === "production") return "opencode.ai"
if ($app.stage === "dev") return "dev.opencode.ai"
return `${$app.stage}.dev.opencode.ai`
})()
const GITHUB_APP_ID = new sst.Secret("GITHUB_APP_ID")
const GITHUB_APP_PRIVATE_KEY = new sst.Secret("GITHUB_APP_PRIVATE_KEY")
const bucket = new sst.cloudflare.Bucket("Bucket")
export const api = new sst.cloudflare.Worker("Api", {
domain: `api.${domain}`,
handler: "packages/function/src/api.ts",
environment: {
WEB_DOMAIN: domain,
},
url: true,
link: [bucket, GITHUB_APP_ID, GITHUB_APP_PRIVATE_KEY],
transform: {
worker: (args) => {
args.logpush = true
args.bindings = $resolve(args.bindings).apply((bindings) => [
...bindings,
{
name: "SYNC_SERVER",
type: "durable_object_namespace",
className: "SyncServer",
},
])
args.migrations = {
// Note: when releasing the next tag, make sure all stages use tag v2
oldTag: $app.stage === "production" ? "" : "v1",
newTag: $app.stage === "production" ? "" : "v1",
//newSqliteClasses: ["SyncServer"],
}
},
},
})
new sst.cloudflare.x.Astro("Web", {
domain,
path: "packages/web",
environment: {
// For astro config
SST_STAGE: $app.stage,
VITE_API_URL: api.url,
},
})

34
install
View File

@@ -12,23 +12,28 @@ requested_version=${VERSION:-}
os=$(uname -s | tr '[:upper:]' '[:lower:]')
if [[ "$os" == "darwin" ]]; then
os="mac"
os="darwin"
fi
arch=$(uname -m)
if [[ "$arch" == "aarch64" ]]; then
arch="arm64"
elif [[ "$arch" == "x86_64" ]]; then
arch="x64"
fi
filename="$APP-$os-$arch.tar.gz"
filename="$APP-$os-$arch.zip"
case "$filename" in
*"-linux-"*)
[[ "$arch" == "x86_64" || "$arch" == "arm64" || "$arch" == "i386" ]] || exit 1
[[ "$arch" == "x64" || "$arch" == "arm64" ]] || exit 1
;;
*"-mac-"*)
[[ "$arch" == "x86_64" || "$arch" == "arm64" ]] || exit 1
*"-darwin-"*)
[[ "$arch" == "x64" || "$arch" == "arm64" ]] || exit 1
;;
*"-windows-"*)
[[ "$arch" == "x64" ]] || exit 1
;;
*)
echo "${RED}Unsupported OS/Arch: $os/$arch${NC}"
@@ -40,15 +45,15 @@ INSTALL_DIR=$HOME/.opencode/bin
mkdir -p "$INSTALL_DIR"
if [ -z "$requested_version" ]; then
url="https://github.com/opencode-ai/opencode/releases/latest/download/$filename"
specific_version=$(curl -s https://api.github.com/repos/opencode-ai/opencode/releases/latest | awk -F'"' '/"tag_name": "/ {gsub(/^v/, "", $4); print $4}')
url="https://github.com/sst/opencode/releases/latest/download/$filename"
specific_version=$(curl -s https://api.github.com/repos/sst/opencode/releases/latest | awk -F'"' '/"tag_name": "/ {gsub(/^v/, "", $4); print $4}')
if [[ $? -ne 0 ]]; then
if [[ $? -ne 0 || -z "$specific_version" ]]; then
echo "${RED}Failed to fetch version information${NC}"
exit 1
fi
else
url="https://github.com/opencode-ai/opencode/releases/download/v${requested_version}/$filename"
url="https://github.com/sst/opencode/releases/download/v${requested_version}/$filename"
specific_version=$requested_version
fi
@@ -88,8 +93,9 @@ check_version() {
download_and_install() {
print_message info "Downloading ${ORANGE}opencode ${GREEN}version: ${YELLOW}$specific_version ${GREEN}..."
mkdir -p opencodetmp && cd opencodetmp
curl -# -L $url | tar xz
mv opencode $INSTALL_DIR
curl -# -L -o "$filename" "$url"
unzip -q "$filename"
mv opencode "$INSTALL_DIR"
cd .. && rm -rf opencodetmp
}
@@ -101,7 +107,9 @@ add_to_path() {
local config_file=$1
local command=$2
if [[ -w $config_file ]]; then
if grep -Fxq "$command" "$config_file"; then
print_message info "Command already exists in $config_file, skipping write."
elif [[ -w $config_file ]]; then
echo -e "\n# opencode" >> "$config_file"
echo "$command" >> "$config_file"
print_message info "Successfully added ${ORANGE}opencode ${GREEN}to \$PATH in $config_file"
@@ -167,6 +175,7 @@ if [[ ":$PATH:" != *":$INSTALL_DIR:"* ]]; then
add_to_path "$config_file" "export PATH=$INSTALL_DIR:\$PATH"
;;
*)
export PATH=$INSTALL_DIR:$PATH
print_message warning "Manually add the directory to $config_file (or similar):"
print_message info " export PATH=$INSTALL_DIR:\$PATH"
;;
@@ -177,4 +186,3 @@ if [ -n "${GITHUB_ACTIONS-}" ] && [ "${GITHUB_ACTIONS}" == "true" ]; then
echo "$INSTALL_DIR" >> $GITHUB_PATH
print_message info "Added $INSTALL_DIR to \$GITHUB_PATH"
fi

View File

@@ -1,99 +0,0 @@
package app
import (
"context"
"database/sql"
"maps"
"sync"
"time"
"github.com/kujtimiihoxha/opencode/internal/config"
"github.com/kujtimiihoxha/opencode/internal/db"
"github.com/kujtimiihoxha/opencode/internal/history"
"github.com/kujtimiihoxha/opencode/internal/llm/agent"
"github.com/kujtimiihoxha/opencode/internal/logging"
"github.com/kujtimiihoxha/opencode/internal/lsp"
"github.com/kujtimiihoxha/opencode/internal/message"
"github.com/kujtimiihoxha/opencode/internal/permission"
"github.com/kujtimiihoxha/opencode/internal/session"
)
type App struct {
Sessions session.Service
Messages message.Service
History history.Service
Permissions permission.Service
CoderAgent agent.Service
LSPClients map[string]*lsp.Client
clientsMutex sync.RWMutex
watcherCancelFuncs []context.CancelFunc
cancelFuncsMutex sync.Mutex
watcherWG sync.WaitGroup
}
func New(ctx context.Context, conn *sql.DB) (*App, error) {
q := db.New(conn)
sessions := session.NewService(q)
messages := message.NewService(q)
files := history.NewService(q, conn)
app := &App{
Sessions: sessions,
Messages: messages,
History: files,
Permissions: permission.NewPermissionService(),
LSPClients: make(map[string]*lsp.Client),
}
// Initialize LSP clients in the background
go app.initLSPClients(ctx)
var err error
app.CoderAgent, err = agent.NewAgent(
config.AgentCoder,
app.Sessions,
app.Messages,
agent.CoderAgentTools(
app.Permissions,
app.Sessions,
app.Messages,
app.History,
app.LSPClients,
),
)
if err != nil {
logging.Error("Failed to create coder agent", err)
return nil, err
}
return app, nil
}
// Shutdown performs a clean shutdown of the application
func (app *App) Shutdown() {
// Cancel all watcher goroutines
app.cancelFuncsMutex.Lock()
for _, cancel := range app.watcherCancelFuncs {
cancel()
}
app.cancelFuncsMutex.Unlock()
app.watcherWG.Wait()
// Perform additional cleanup for LSP clients
app.clientsMutex.RLock()
clients := make(map[string]*lsp.Client, len(app.LSPClients))
maps.Copy(clients, app.LSPClients)
app.clientsMutex.RUnlock()
for name, client := range clients {
shutdownCtx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
if err := client.Shutdown(shutdownCtx); err != nil {
logging.Error("Failed to shutdown LSP client", "name", name, "error", err)
}
cancel()
}
}

View File

@@ -1,126 +0,0 @@
package app
import (
"context"
"time"
"github.com/kujtimiihoxha/opencode/internal/config"
"github.com/kujtimiihoxha/opencode/internal/logging"
"github.com/kujtimiihoxha/opencode/internal/lsp"
"github.com/kujtimiihoxha/opencode/internal/lsp/watcher"
)
func (app *App) initLSPClients(ctx context.Context) {
cfg := config.Get()
// Initialize LSP clients
for name, clientConfig := range cfg.LSP {
// Start each client initialization in its own goroutine
go app.createAndStartLSPClient(ctx, name, clientConfig.Command, clientConfig.Args...)
}
logging.Info("LSP clients initialization started in background")
}
// createAndStartLSPClient creates a new LSP client, initializes it, and starts its workspace watcher
func (app *App) createAndStartLSPClient(ctx context.Context, name string, command string, args ...string) {
// Create a specific context for initialization with a timeout
logging.Info("Creating LSP client", "name", name, "command", command, "args", args)
// Create the LSP client
lspClient, err := lsp.NewClient(ctx, command, args...)
if err != nil {
logging.Error("Failed to create LSP client for", name, err)
return
}
// Create a longer timeout for initialization (some servers take time to start)
initCtx, cancel := context.WithTimeout(ctx, 30*time.Second)
defer cancel()
// Initialize with the initialization context
_, err = lspClient.InitializeLSPClient(initCtx, config.WorkingDirectory())
if err != nil {
logging.Error("Initialize failed", "name", name, "error", err)
// Clean up the client to prevent resource leaks
lspClient.Close()
return
}
// Wait for the server to be ready
if err := lspClient.WaitForServerReady(initCtx); err != nil {
logging.Error("Server failed to become ready", "name", name, "error", err)
// We'll continue anyway, as some functionality might still work
lspClient.SetServerState(lsp.StateError)
} else {
logging.Info("LSP server is ready", "name", name)
lspClient.SetServerState(lsp.StateReady)
}
logging.Info("LSP client initialized", "name", name)
// Create a child context that can be canceled when the app is shutting down
watchCtx, cancelFunc := context.WithCancel(ctx)
// Create a context with the server name for better identification
watchCtx = context.WithValue(watchCtx, "serverName", name)
// Create the workspace watcher
workspaceWatcher := watcher.NewWorkspaceWatcher(lspClient)
// Store the cancel function to be called during cleanup
app.cancelFuncsMutex.Lock()
app.watcherCancelFuncs = append(app.watcherCancelFuncs, cancelFunc)
app.cancelFuncsMutex.Unlock()
// Add the watcher to a WaitGroup to track active goroutines
app.watcherWG.Add(1)
// Add to map with mutex protection before starting goroutine
app.clientsMutex.Lock()
app.LSPClients[name] = lspClient
app.clientsMutex.Unlock()
go app.runWorkspaceWatcher(watchCtx, name, workspaceWatcher)
}
// runWorkspaceWatcher executes the workspace watcher for an LSP client
func (app *App) runWorkspaceWatcher(ctx context.Context, name string, workspaceWatcher *watcher.WorkspaceWatcher) {
defer app.watcherWG.Done()
defer logging.RecoverPanic("LSP-"+name, func() {
// Try to restart the client
app.restartLSPClient(ctx, name)
})
workspaceWatcher.WatchWorkspace(ctx, config.WorkingDirectory())
logging.Info("Workspace watcher stopped", "client", name)
}
// restartLSPClient attempts to restart a crashed or failed LSP client
func (app *App) restartLSPClient(ctx context.Context, name string) {
// Get the original configuration
cfg := config.Get()
clientConfig, exists := cfg.LSP[name]
if !exists {
logging.Error("Cannot restart client, configuration not found", "client", name)
return
}
// Clean up the old client if it exists
app.clientsMutex.Lock()
oldClient, exists := app.LSPClients[name]
if exists {
delete(app.LSPClients, name) // Remove from map before potentially slow shutdown
}
app.clientsMutex.Unlock()
if exists && oldClient != nil {
// Try to shut it down gracefully, but don't block on errors
shutdownCtx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
_ = oldClient.Shutdown(shutdownCtx)
cancel()
}
// Create a new client using the shared function
app.createAndStartLSPClient(ctx, name, clientConfig.Command, clientConfig.Args...)
logging.Info("Successfully restarted LSP client", "client", name)
}

View File

@@ -1,594 +0,0 @@
// Package config manages application configuration from various sources.
package config
import (
"fmt"
"log/slog"
"os"
"strings"
"github.com/kujtimiihoxha/opencode/internal/llm/models"
"github.com/kujtimiihoxha/opencode/internal/logging"
"github.com/spf13/viper"
)
// MCPType defines the type of MCP (Model Control Protocol) server.
type MCPType string
// Supported MCP types
const (
MCPStdio MCPType = "stdio"
MCPSse MCPType = "sse"
)
// MCPServer defines the configuration for a Model Control Protocol server.
type MCPServer struct {
Command string `json:"command"`
Env []string `json:"env"`
Args []string `json:"args"`
Type MCPType `json:"type"`
URL string `json:"url"`
Headers map[string]string `json:"headers"`
}
type AgentName string
const (
AgentCoder AgentName = "coder"
AgentTask AgentName = "task"
AgentTitle AgentName = "title"
)
// Agent defines configuration for different LLM models and their token limits.
type Agent struct {
Model models.ModelID `json:"model"`
MaxTokens int64 `json:"maxTokens"`
ReasoningEffort string `json:"reasoningEffort"` // For openai models low,medium,heigh
}
// Provider defines configuration for an LLM provider.
type Provider struct {
APIKey string `json:"apiKey"`
Disabled bool `json:"disabled"`
}
// Data defines storage configuration.
type Data struct {
Directory string `json:"directory"`
}
// LSPConfig defines configuration for Language Server Protocol integration.
type LSPConfig struct {
Disabled bool `json:"enabled"`
Command string `json:"command"`
Args []string `json:"args"`
Options any `json:"options"`
}
// Config is the main configuration structure for the application.
type Config struct {
Data Data `json:"data"`
WorkingDir string `json:"wd,omitempty"`
MCPServers map[string]MCPServer `json:"mcpServers,omitempty"`
Providers map[models.ModelProvider]Provider `json:"providers,omitempty"`
LSP map[string]LSPConfig `json:"lsp,omitempty"`
Agents map[AgentName]Agent `json:"agents"`
Debug bool `json:"debug,omitempty"`
DebugLSP bool `json:"debugLSP,omitempty"`
}
// Application constants
const (
defaultDataDirectory = ".opencode"
defaultLogLevel = "info"
appName = "opencode"
)
// Global configuration instance
var cfg *Config
// Load initializes the configuration from environment variables and config files.
// If debug is true, debug mode is enabled and log level is set to debug.
// It returns an error if configuration loading fails.
func Load(workingDir string, debug bool) (*Config, error) {
if cfg != nil {
return cfg, nil
}
cfg = &Config{
WorkingDir: workingDir,
MCPServers: make(map[string]MCPServer),
Providers: make(map[models.ModelProvider]Provider),
LSP: make(map[string]LSPConfig),
}
configureViper()
setDefaults(debug)
setProviderDefaults()
// Read global config
if err := readConfig(viper.ReadInConfig()); err != nil {
return cfg, err
}
// Load and merge local config
mergeLocalConfig(workingDir)
// Apply configuration to the struct
if err := viper.Unmarshal(cfg); err != nil {
return cfg, fmt.Errorf("failed to unmarshal config: %w", err)
}
applyDefaultValues()
defaultLevel := slog.LevelInfo
if cfg.Debug {
defaultLevel = slog.LevelDebug
}
if os.Getenv("OPENCODE_DEV_DEBUG") == "true" {
loggingFile := fmt.Sprintf("%s/%s", cfg.Data.Directory, "debug.log")
// if file does not exist create it
if _, err := os.Stat(loggingFile); os.IsNotExist(err) {
if err := os.MkdirAll(cfg.Data.Directory, 0o755); err != nil {
return cfg, fmt.Errorf("failed to create directory: %w", err)
}
if _, err := os.Create(loggingFile); err != nil {
return cfg, fmt.Errorf("failed to create log file: %w", err)
}
}
sloggingFileWriter, err := os.OpenFile(loggingFile, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0o666)
if err != nil {
return cfg, fmt.Errorf("failed to open log file: %w", err)
}
// Configure logger
logger := slog.New(slog.NewTextHandler(sloggingFileWriter, &slog.HandlerOptions{
Level: defaultLevel,
}))
slog.SetDefault(logger)
} else {
// Configure logger
logger := slog.New(slog.NewTextHandler(logging.NewWriter(), &slog.HandlerOptions{
Level: defaultLevel,
}))
slog.SetDefault(logger)
}
// Validate configuration
if err := Validate(); err != nil {
return cfg, fmt.Errorf("config validation failed: %w", err)
}
if cfg.Agents == nil {
cfg.Agents = make(map[AgentName]Agent)
}
// Override the max tokens for title agent
cfg.Agents[AgentTitle] = Agent{
Model: cfg.Agents[AgentTitle].Model,
MaxTokens: 80,
}
return cfg, nil
}
// configureViper sets up viper's configuration paths and environment variables.
func configureViper() {
viper.SetConfigName(fmt.Sprintf(".%s", appName))
viper.SetConfigType("json")
viper.AddConfigPath("$HOME")
viper.AddConfigPath(fmt.Sprintf("$XDG_CONFIG_HOME/%s", appName))
viper.AddConfigPath(fmt.Sprintf("$HOME/.config/%s", appName))
viper.SetEnvPrefix(strings.ToUpper(appName))
viper.AutomaticEnv()
}
// setDefaults configures default values for configuration options.
func setDefaults(debug bool) {
viper.SetDefault("data.directory", defaultDataDirectory)
if debug {
viper.SetDefault("debug", true)
viper.Set("log.level", "debug")
} else {
viper.SetDefault("debug", false)
viper.SetDefault("log.level", defaultLogLevel)
}
}
// setProviderDefaults configures LLM provider defaults based on environment variables.
// the default model priority is:
// 1. Anthropic
// 2. OpenAI
// 3. Google Gemini
// 4. Groq
// 5. AWS Bedrock
func setProviderDefaults() {
// Anthropic configuration
if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
viper.SetDefault("providers.anthropic.apiKey", apiKey)
viper.SetDefault("agents.coder.model", models.Claude37Sonnet)
viper.SetDefault("agents.task.model", models.Claude37Sonnet)
viper.SetDefault("agents.title.model", models.Claude37Sonnet)
return
}
// OpenAI configuration
if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
viper.SetDefault("providers.openai.apiKey", apiKey)
viper.SetDefault("agents.coder.model", models.GPT41)
viper.SetDefault("agents.task.model", models.GPT41Mini)
viper.SetDefault("agents.title.model", models.GPT41Mini)
return
}
// Google Gemini configuration
if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
viper.SetDefault("providers.gemini.apiKey", apiKey)
viper.SetDefault("agents.coder.model", models.Gemini25)
viper.SetDefault("agents.task.model", models.Gemini25Flash)
viper.SetDefault("agents.title.model", models.Gemini25Flash)
return
}
// Groq configuration
if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
viper.SetDefault("providers.groq.apiKey", apiKey)
viper.SetDefault("agents.coder.model", models.QWENQwq)
viper.SetDefault("agents.task.model", models.QWENQwq)
viper.SetDefault("agents.title.model", models.QWENQwq)
return
}
// AWS Bedrock configuration
if hasAWSCredentials() {
viper.SetDefault("agents.coder.model", models.BedrockClaude37Sonnet)
viper.SetDefault("agents.task.model", models.BedrockClaude37Sonnet)
viper.SetDefault("agents.title.model", models.BedrockClaude37Sonnet)
return
}
}
// hasAWSCredentials checks if AWS credentials are available in the environment.
func hasAWSCredentials() bool {
// Check for explicit AWS credentials
if os.Getenv("AWS_ACCESS_KEY_ID") != "" && os.Getenv("AWS_SECRET_ACCESS_KEY") != "" {
return true
}
// Check for AWS profile
if os.Getenv("AWS_PROFILE") != "" || os.Getenv("AWS_DEFAULT_PROFILE") != "" {
return true
}
// Check for AWS region
if os.Getenv("AWS_REGION") != "" || os.Getenv("AWS_DEFAULT_REGION") != "" {
return true
}
// Check if running on EC2 with instance profile
if os.Getenv("AWS_CONTAINER_CREDENTIALS_RELATIVE_URI") != "" ||
os.Getenv("AWS_CONTAINER_CREDENTIALS_FULL_URI") != "" {
return true
}
return false
}
// readConfig handles the result of reading a configuration file.
func readConfig(err error) error {
if err == nil {
return nil
}
// It's okay if the config file doesn't exist
if _, ok := err.(viper.ConfigFileNotFoundError); ok {
return nil
}
return fmt.Errorf("failed to read config: %w", err)
}
// mergeLocalConfig loads and merges configuration from the local directory.
func mergeLocalConfig(workingDir string) {
local := viper.New()
local.SetConfigName(fmt.Sprintf(".%s", appName))
local.SetConfigType("json")
local.AddConfigPath(workingDir)
// Merge local config if it exists
if err := local.ReadInConfig(); err == nil {
viper.MergeConfigMap(local.AllSettings())
}
}
// applyDefaultValues sets default values for configuration fields that need processing.
func applyDefaultValues() {
// Set default MCP type if not specified
for k, v := range cfg.MCPServers {
if v.Type == "" {
v.Type = MCPStdio
cfg.MCPServers[k] = v
}
}
}
// Validate checks if the configuration is valid and applies defaults where needed.
// It validates model IDs and providers, ensuring they are supported.
func Validate() error {
if cfg == nil {
return fmt.Errorf("config not loaded")
}
// Validate agent models
for name, agent := range cfg.Agents {
// Check if model exists
model, modelExists := models.SupportedModels[agent.Model]
if !modelExists {
logging.Warn("unsupported model configured, reverting to default",
"agent", name,
"configured_model", agent.Model)
// Set default model based on available providers
if setDefaultModelForAgent(name) {
logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
} else {
return fmt.Errorf("no valid provider available for agent %s", name)
}
continue
}
// Check if provider for the model is configured
provider := model.Provider
providerCfg, providerExists := cfg.Providers[provider]
if !providerExists {
// Provider not configured, check if we have environment variables
apiKey := getProviderAPIKey(provider)
if apiKey == "" {
logging.Warn("provider not configured for model, reverting to default",
"agent", name,
"model", agent.Model,
"provider", provider)
// Set default model based on available providers
if setDefaultModelForAgent(name) {
logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
} else {
return fmt.Errorf("no valid provider available for agent %s", name)
}
} else {
// Add provider with API key from environment
cfg.Providers[provider] = Provider{
APIKey: apiKey,
}
logging.Info("added provider from environment", "provider", provider)
}
} else if providerCfg.Disabled || providerCfg.APIKey == "" {
// Provider is disabled or has no API key
logging.Warn("provider is disabled or has no API key, reverting to default",
"agent", name,
"model", agent.Model,
"provider", provider)
// Set default model based on available providers
if setDefaultModelForAgent(name) {
logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
} else {
return fmt.Errorf("no valid provider available for agent %s", name)
}
}
// Validate max tokens
if agent.MaxTokens <= 0 {
logging.Warn("invalid max tokens, setting to default",
"agent", name,
"model", agent.Model,
"max_tokens", agent.MaxTokens)
// Update the agent with default max tokens
updatedAgent := cfg.Agents[name]
if model.DefaultMaxTokens > 0 {
updatedAgent.MaxTokens = model.DefaultMaxTokens
} else {
updatedAgent.MaxTokens = 4096 // Fallback default
}
cfg.Agents[name] = updatedAgent
} else if model.ContextWindow > 0 && agent.MaxTokens > model.ContextWindow/2 {
// Ensure max tokens doesn't exceed half the context window (reasonable limit)
logging.Warn("max tokens exceeds half the context window, adjusting",
"agent", name,
"model", agent.Model,
"max_tokens", agent.MaxTokens,
"context_window", model.ContextWindow)
// Update the agent with adjusted max tokens
updatedAgent := cfg.Agents[name]
updatedAgent.MaxTokens = model.ContextWindow / 2
cfg.Agents[name] = updatedAgent
}
// Validate reasoning effort for models that support reasoning
if model.CanReason && provider == models.ProviderOpenAI {
if agent.ReasoningEffort == "" {
// Set default reasoning effort for models that support it
logging.Info("setting default reasoning effort for model that supports reasoning",
"agent", name,
"model", agent.Model)
// Update the agent with default reasoning effort
updatedAgent := cfg.Agents[name]
updatedAgent.ReasoningEffort = "medium"
cfg.Agents[name] = updatedAgent
} else {
// Check if reasoning effort is valid (low, medium, high)
effort := strings.ToLower(agent.ReasoningEffort)
if effort != "low" && effort != "medium" && effort != "high" {
logging.Warn("invalid reasoning effort, setting to medium",
"agent", name,
"model", agent.Model,
"reasoning_effort", agent.ReasoningEffort)
// Update the agent with valid reasoning effort
updatedAgent := cfg.Agents[name]
updatedAgent.ReasoningEffort = "medium"
cfg.Agents[name] = updatedAgent
}
}
} else if !model.CanReason && agent.ReasoningEffort != "" {
// Model doesn't support reasoning but reasoning effort is set
logging.Warn("model doesn't support reasoning but reasoning effort is set, ignoring",
"agent", name,
"model", agent.Model,
"reasoning_effort", agent.ReasoningEffort)
// Update the agent to remove reasoning effort
updatedAgent := cfg.Agents[name]
updatedAgent.ReasoningEffort = ""
cfg.Agents[name] = updatedAgent
}
}
// Validate providers
for provider, providerCfg := range cfg.Providers {
if providerCfg.APIKey == "" && !providerCfg.Disabled {
logging.Warn("provider has no API key, marking as disabled", "provider", provider)
providerCfg.Disabled = true
cfg.Providers[provider] = providerCfg
}
}
// Validate LSP configurations
for language, lspConfig := range cfg.LSP {
if lspConfig.Command == "" && !lspConfig.Disabled {
logging.Warn("LSP configuration has no command, marking as disabled", "language", language)
lspConfig.Disabled = true
cfg.LSP[language] = lspConfig
}
}
return nil
}
// getProviderAPIKey gets the API key for a provider from environment variables
func getProviderAPIKey(provider models.ModelProvider) string {
switch provider {
case models.ProviderAnthropic:
return os.Getenv("ANTHROPIC_API_KEY")
case models.ProviderOpenAI:
return os.Getenv("OPENAI_API_KEY")
case models.ProviderGemini:
return os.Getenv("GEMINI_API_KEY")
case models.ProviderGROQ:
return os.Getenv("GROQ_API_KEY")
case models.ProviderBedrock:
if hasAWSCredentials() {
return "aws-credentials-available"
}
}
return ""
}
// setDefaultModelForAgent sets a default model for an agent based on available providers
func setDefaultModelForAgent(agent AgentName) bool {
// Check providers in order of preference
if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
maxTokens := int64(5000)
if agent == AgentTitle {
maxTokens = 80
}
cfg.Agents[agent] = Agent{
Model: models.Claude37Sonnet,
MaxTokens: maxTokens,
}
return true
}
if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
var model models.ModelID
maxTokens := int64(5000)
reasoningEffort := ""
switch agent {
case AgentTitle:
model = models.GPT41Mini
maxTokens = 80
case AgentTask:
model = models.GPT41Mini
default:
model = models.GPT41
}
// Check if model supports reasoning
if modelInfo, ok := models.SupportedModels[model]; ok && modelInfo.CanReason {
reasoningEffort = "medium"
}
cfg.Agents[agent] = Agent{
Model: model,
MaxTokens: maxTokens,
ReasoningEffort: reasoningEffort,
}
return true
}
if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
var model models.ModelID
maxTokens := int64(5000)
if agent == AgentTitle {
model = models.Gemini25Flash
maxTokens = 80
} else {
model = models.Gemini25
}
cfg.Agents[agent] = Agent{
Model: model,
MaxTokens: maxTokens,
}
return true
}
if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
maxTokens := int64(5000)
if agent == AgentTitle {
maxTokens = 80
}
cfg.Agents[agent] = Agent{
Model: models.QWENQwq,
MaxTokens: maxTokens,
}
return true
}
if hasAWSCredentials() {
maxTokens := int64(5000)
if agent == AgentTitle {
maxTokens = 80
}
cfg.Agents[agent] = Agent{
Model: models.BedrockClaude37Sonnet,
MaxTokens: maxTokens,
ReasoningEffort: "medium", // Claude models support reasoning
}
return true
}
return false
}
// Get returns the current configuration.
// It's safe to call this function multiple times.
func Get() *Config {
return cfg
}
// WorkingDirectory returns the current working directory from the configuration.
func WorkingDirectory() string {
if cfg == nil {
panic("config not loaded")
}
return cfg.WorkingDir
}

View File

@@ -1,61 +0,0 @@
package config
import (
"fmt"
"os"
"path/filepath"
)
const (
// InitFlagFilename is the name of the file that indicates whether the project has been initialized
InitFlagFilename = "init"
)
// ProjectInitFlag represents the initialization status for a project directory
type ProjectInitFlag struct {
Initialized bool `json:"initialized"`
}
// ShouldShowInitDialog checks if the initialization dialog should be shown for the current directory
func ShouldShowInitDialog() (bool, error) {
if cfg == nil {
return false, fmt.Errorf("config not loaded")
}
// Create the flag file path
flagFilePath := filepath.Join(cfg.Data.Directory, InitFlagFilename)
// Check if the flag file exists
_, err := os.Stat(flagFilePath)
if err == nil {
// File exists, don't show the dialog
return false, nil
}
// If the error is not "file not found", return the error
if !os.IsNotExist(err) {
return false, fmt.Errorf("failed to check init flag file: %w", err)
}
// File doesn't exist, show the dialog
return true, nil
}
// MarkProjectInitialized marks the current project as initialized
func MarkProjectInitialized() error {
if cfg == nil {
return fmt.Errorf("config not loaded")
}
// Create the flag file path
flagFilePath := filepath.Join(cfg.Data.Directory, InitFlagFilename)
// Create an empty file to mark the project as initialized
file, err := os.Create(flagFilePath)
if err != nil {
return fmt.Errorf("failed to create init flag file: %w", err)
}
defer file.Close()
return nil
}

View File

@@ -1,91 +0,0 @@
package db
import (
"database/sql"
"fmt"
"os"
"path/filepath"
"github.com/golang-migrate/migrate/v4"
"github.com/golang-migrate/migrate/v4/source/iofs"
"github.com/golang-migrate/migrate/v4/database/sqlite3"
_ "github.com/mattn/go-sqlite3"
"github.com/kujtimiihoxha/opencode/internal/config"
"github.com/kujtimiihoxha/opencode/internal/logging"
)
func Connect() (*sql.DB, error) {
dataDir := config.Get().Data.Directory
if dataDir == "" {
return nil, fmt.Errorf("data.dir is not set")
}
if err := os.MkdirAll(dataDir, 0o700); err != nil {
return nil, fmt.Errorf("failed to create data directory: %w", err)
}
dbPath := filepath.Join(dataDir, "opencode.db")
// Open the SQLite database
db, err := sql.Open("sqlite3", dbPath)
if err != nil {
return nil, fmt.Errorf("failed to open database: %w", err)
}
// Verify connection
if err = db.Ping(); err != nil {
db.Close()
return nil, fmt.Errorf("failed to connect to database: %w", err)
}
// Set pragmas for better performance
pragmas := []string{
"PRAGMA foreign_keys = ON;",
"PRAGMA journal_mode = WAL;",
"PRAGMA page_size = 4096;",
"PRAGMA cache_size = -8000;",
"PRAGMA synchronous = NORMAL;",
}
for _, pragma := range pragmas {
if _, err = db.Exec(pragma); err != nil {
logging.Error("Failed to set pragma", pragma, err)
} else {
logging.Debug("Set pragma", "pragma", pragma)
}
}
// Initialize schema from embedded file
d, err := iofs.New(FS, "migrations")
if err != nil {
logging.Error("Failed to open embedded migrations", "error", err)
db.Close()
return nil, fmt.Errorf("failed to open embedded migrations: %w", err)
}
driver, err := sqlite3.WithInstance(db, &sqlite3.Config{})
if err != nil {
logging.Error("Failed to create SQLite driver", "error", err)
db.Close()
return nil, fmt.Errorf("failed to create SQLite driver: %w", err)
}
m, err := migrate.NewWithInstance("iofs", d, "ql", driver)
if err != nil {
logging.Error("Failed to create migration instance", "error", err)
db.Close()
return nil, fmt.Errorf("failed to create migration instance: %w", err)
}
err = m.Up()
if err != nil && err != migrate.ErrNoChange {
logging.Error("Migration failed", "error", err)
db.Close()
return nil, fmt.Errorf("failed to apply schema: %w", err)
} else if err == migrate.ErrNoChange {
logging.Info("No schema changes to apply")
} else {
logging.Info("Schema migration applied successfully")
}
return db, nil
}

View File

@@ -1,288 +0,0 @@
// Code generated by sqlc. DO NOT EDIT.
// versions:
// sqlc v1.27.0
package db
import (
"context"
"database/sql"
"fmt"
)
type DBTX interface {
ExecContext(context.Context, string, ...interface{}) (sql.Result, error)
PrepareContext(context.Context, string) (*sql.Stmt, error)
QueryContext(context.Context, string, ...interface{}) (*sql.Rows, error)
QueryRowContext(context.Context, string, ...interface{}) *sql.Row
}
func New(db DBTX) *Queries {
return &Queries{db: db}
}
func Prepare(ctx context.Context, db DBTX) (*Queries, error) {
q := Queries{db: db}
var err error
if q.createFileStmt, err = db.PrepareContext(ctx, createFile); err != nil {
return nil, fmt.Errorf("error preparing query CreateFile: %w", err)
}
if q.createMessageStmt, err = db.PrepareContext(ctx, createMessage); err != nil {
return nil, fmt.Errorf("error preparing query CreateMessage: %w", err)
}
if q.createSessionStmt, err = db.PrepareContext(ctx, createSession); err != nil {
return nil, fmt.Errorf("error preparing query CreateSession: %w", err)
}
if q.deleteFileStmt, err = db.PrepareContext(ctx, deleteFile); err != nil {
return nil, fmt.Errorf("error preparing query DeleteFile: %w", err)
}
if q.deleteMessageStmt, err = db.PrepareContext(ctx, deleteMessage); err != nil {
return nil, fmt.Errorf("error preparing query DeleteMessage: %w", err)
}
if q.deleteSessionStmt, err = db.PrepareContext(ctx, deleteSession); err != nil {
return nil, fmt.Errorf("error preparing query DeleteSession: %w", err)
}
if q.deleteSessionFilesStmt, err = db.PrepareContext(ctx, deleteSessionFiles); err != nil {
return nil, fmt.Errorf("error preparing query DeleteSessionFiles: %w", err)
}
if q.deleteSessionMessagesStmt, err = db.PrepareContext(ctx, deleteSessionMessages); err != nil {
return nil, fmt.Errorf("error preparing query DeleteSessionMessages: %w", err)
}
if q.getFileStmt, err = db.PrepareContext(ctx, getFile); err != nil {
return nil, fmt.Errorf("error preparing query GetFile: %w", err)
}
if q.getFileByPathAndSessionStmt, err = db.PrepareContext(ctx, getFileByPathAndSession); err != nil {
return nil, fmt.Errorf("error preparing query GetFileByPathAndSession: %w", err)
}
if q.getMessageStmt, err = db.PrepareContext(ctx, getMessage); err != nil {
return nil, fmt.Errorf("error preparing query GetMessage: %w", err)
}
if q.getSessionByIDStmt, err = db.PrepareContext(ctx, getSessionByID); err != nil {
return nil, fmt.Errorf("error preparing query GetSessionByID: %w", err)
}
if q.listFilesByPathStmt, err = db.PrepareContext(ctx, listFilesByPath); err != nil {
return nil, fmt.Errorf("error preparing query ListFilesByPath: %w", err)
}
if q.listFilesBySessionStmt, err = db.PrepareContext(ctx, listFilesBySession); err != nil {
return nil, fmt.Errorf("error preparing query ListFilesBySession: %w", err)
}
if q.listLatestSessionFilesStmt, err = db.PrepareContext(ctx, listLatestSessionFiles); err != nil {
return nil, fmt.Errorf("error preparing query ListLatestSessionFiles: %w", err)
}
if q.listMessagesBySessionStmt, err = db.PrepareContext(ctx, listMessagesBySession); err != nil {
return nil, fmt.Errorf("error preparing query ListMessagesBySession: %w", err)
}
if q.listNewFilesStmt, err = db.PrepareContext(ctx, listNewFiles); err != nil {
return nil, fmt.Errorf("error preparing query ListNewFiles: %w", err)
}
if q.listSessionsStmt, err = db.PrepareContext(ctx, listSessions); err != nil {
return nil, fmt.Errorf("error preparing query ListSessions: %w", err)
}
if q.updateFileStmt, err = db.PrepareContext(ctx, updateFile); err != nil {
return nil, fmt.Errorf("error preparing query UpdateFile: %w", err)
}
if q.updateMessageStmt, err = db.PrepareContext(ctx, updateMessage); err != nil {
return nil, fmt.Errorf("error preparing query UpdateMessage: %w", err)
}
if q.updateSessionStmt, err = db.PrepareContext(ctx, updateSession); err != nil {
return nil, fmt.Errorf("error preparing query UpdateSession: %w", err)
}
return &q, nil
}
func (q *Queries) Close() error {
var err error
if q.createFileStmt != nil {
if cerr := q.createFileStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing createFileStmt: %w", cerr)
}
}
if q.createMessageStmt != nil {
if cerr := q.createMessageStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing createMessageStmt: %w", cerr)
}
}
if q.createSessionStmt != nil {
if cerr := q.createSessionStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing createSessionStmt: %w", cerr)
}
}
if q.deleteFileStmt != nil {
if cerr := q.deleteFileStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing deleteFileStmt: %w", cerr)
}
}
if q.deleteMessageStmt != nil {
if cerr := q.deleteMessageStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing deleteMessageStmt: %w", cerr)
}
}
if q.deleteSessionStmt != nil {
if cerr := q.deleteSessionStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing deleteSessionStmt: %w", cerr)
}
}
if q.deleteSessionFilesStmt != nil {
if cerr := q.deleteSessionFilesStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing deleteSessionFilesStmt: %w", cerr)
}
}
if q.deleteSessionMessagesStmt != nil {
if cerr := q.deleteSessionMessagesStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing deleteSessionMessagesStmt: %w", cerr)
}
}
if q.getFileStmt != nil {
if cerr := q.getFileStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing getFileStmt: %w", cerr)
}
}
if q.getFileByPathAndSessionStmt != nil {
if cerr := q.getFileByPathAndSessionStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing getFileByPathAndSessionStmt: %w", cerr)
}
}
if q.getMessageStmt != nil {
if cerr := q.getMessageStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing getMessageStmt: %w", cerr)
}
}
if q.getSessionByIDStmt != nil {
if cerr := q.getSessionByIDStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing getSessionByIDStmt: %w", cerr)
}
}
if q.listFilesByPathStmt != nil {
if cerr := q.listFilesByPathStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing listFilesByPathStmt: %w", cerr)
}
}
if q.listFilesBySessionStmt != nil {
if cerr := q.listFilesBySessionStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing listFilesBySessionStmt: %w", cerr)
}
}
if q.listLatestSessionFilesStmt != nil {
if cerr := q.listLatestSessionFilesStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing listLatestSessionFilesStmt: %w", cerr)
}
}
if q.listMessagesBySessionStmt != nil {
if cerr := q.listMessagesBySessionStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing listMessagesBySessionStmt: %w", cerr)
}
}
if q.listNewFilesStmt != nil {
if cerr := q.listNewFilesStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing listNewFilesStmt: %w", cerr)
}
}
if q.listSessionsStmt != nil {
if cerr := q.listSessionsStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing listSessionsStmt: %w", cerr)
}
}
if q.updateFileStmt != nil {
if cerr := q.updateFileStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing updateFileStmt: %w", cerr)
}
}
if q.updateMessageStmt != nil {
if cerr := q.updateMessageStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing updateMessageStmt: %w", cerr)
}
}
if q.updateSessionStmt != nil {
if cerr := q.updateSessionStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing updateSessionStmt: %w", cerr)
}
}
return err
}
func (q *Queries) exec(ctx context.Context, stmt *sql.Stmt, query string, args ...interface{}) (sql.Result, error) {
switch {
case stmt != nil && q.tx != nil:
return q.tx.StmtContext(ctx, stmt).ExecContext(ctx, args...)
case stmt != nil:
return stmt.ExecContext(ctx, args...)
default:
return q.db.ExecContext(ctx, query, args...)
}
}
func (q *Queries) query(ctx context.Context, stmt *sql.Stmt, query string, args ...interface{}) (*sql.Rows, error) {
switch {
case stmt != nil && q.tx != nil:
return q.tx.StmtContext(ctx, stmt).QueryContext(ctx, args...)
case stmt != nil:
return stmt.QueryContext(ctx, args...)
default:
return q.db.QueryContext(ctx, query, args...)
}
}
func (q *Queries) queryRow(ctx context.Context, stmt *sql.Stmt, query string, args ...interface{}) *sql.Row {
switch {
case stmt != nil && q.tx != nil:
return q.tx.StmtContext(ctx, stmt).QueryRowContext(ctx, args...)
case stmt != nil:
return stmt.QueryRowContext(ctx, args...)
default:
return q.db.QueryRowContext(ctx, query, args...)
}
}
type Queries struct {
db DBTX
tx *sql.Tx
createFileStmt *sql.Stmt
createMessageStmt *sql.Stmt
createSessionStmt *sql.Stmt
deleteFileStmt *sql.Stmt
deleteMessageStmt *sql.Stmt
deleteSessionStmt *sql.Stmt
deleteSessionFilesStmt *sql.Stmt
deleteSessionMessagesStmt *sql.Stmt
getFileStmt *sql.Stmt
getFileByPathAndSessionStmt *sql.Stmt
getMessageStmt *sql.Stmt
getSessionByIDStmt *sql.Stmt
listFilesByPathStmt *sql.Stmt
listFilesBySessionStmt *sql.Stmt
listLatestSessionFilesStmt *sql.Stmt
listMessagesBySessionStmt *sql.Stmt
listNewFilesStmt *sql.Stmt
listSessionsStmt *sql.Stmt
updateFileStmt *sql.Stmt
updateMessageStmt *sql.Stmt
updateSessionStmt *sql.Stmt
}
func (q *Queries) WithTx(tx *sql.Tx) *Queries {
return &Queries{
db: tx,
tx: tx,
createFileStmt: q.createFileStmt,
createMessageStmt: q.createMessageStmt,
createSessionStmt: q.createSessionStmt,
deleteFileStmt: q.deleteFileStmt,
deleteMessageStmt: q.deleteMessageStmt,
deleteSessionStmt: q.deleteSessionStmt,
deleteSessionFilesStmt: q.deleteSessionFilesStmt,
deleteSessionMessagesStmt: q.deleteSessionMessagesStmt,
getFileStmt: q.getFileStmt,
getFileByPathAndSessionStmt: q.getFileByPathAndSessionStmt,
getMessageStmt: q.getMessageStmt,
getSessionByIDStmt: q.getSessionByIDStmt,
listFilesByPathStmt: q.listFilesByPathStmt,
listFilesBySessionStmt: q.listFilesBySessionStmt,
listLatestSessionFilesStmt: q.listLatestSessionFilesStmt,
listMessagesBySessionStmt: q.listMessagesBySessionStmt,
listNewFilesStmt: q.listNewFilesStmt,
listSessionsStmt: q.listSessionsStmt,
updateFileStmt: q.updateFileStmt,
updateMessageStmt: q.updateMessageStmt,
updateSessionStmt: q.updateSessionStmt,
}
}

View File

@@ -1,6 +0,0 @@
package db
import "embed"
//go:embed migrations/*.sql
var FS embed.FS

View File

@@ -1,311 +0,0 @@
// Code generated by sqlc. DO NOT EDIT.
// versions:
// sqlc v1.27.0
// source: files.sql
package db
import (
"context"
)
const createFile = `-- name: CreateFile :one
INSERT INTO files (
id,
session_id,
path,
content,
version,
created_at,
updated_at
) VALUES (
?, ?, ?, ?, ?, strftime('%s', 'now'), strftime('%s', 'now')
)
RETURNING id, session_id, path, content, version, created_at, updated_at
`
type CreateFileParams struct {
ID string `json:"id"`
SessionID string `json:"session_id"`
Path string `json:"path"`
Content string `json:"content"`
Version string `json:"version"`
}
func (q *Queries) CreateFile(ctx context.Context, arg CreateFileParams) (File, error) {
row := q.queryRow(ctx, q.createFileStmt, createFile,
arg.ID,
arg.SessionID,
arg.Path,
arg.Content,
arg.Version,
)
var i File
err := row.Scan(
&i.ID,
&i.SessionID,
&i.Path,
&i.Content,
&i.Version,
&i.CreatedAt,
&i.UpdatedAt,
)
return i, err
}
const deleteFile = `-- name: DeleteFile :exec
DELETE FROM files
WHERE id = ?
`
func (q *Queries) DeleteFile(ctx context.Context, id string) error {
_, err := q.exec(ctx, q.deleteFileStmt, deleteFile, id)
return err
}
const deleteSessionFiles = `-- name: DeleteSessionFiles :exec
DELETE FROM files
WHERE session_id = ?
`
func (q *Queries) DeleteSessionFiles(ctx context.Context, sessionID string) error {
_, err := q.exec(ctx, q.deleteSessionFilesStmt, deleteSessionFiles, sessionID)
return err
}
const getFile = `-- name: GetFile :one
SELECT id, session_id, path, content, version, created_at, updated_at
FROM files
WHERE id = ? LIMIT 1
`
func (q *Queries) GetFile(ctx context.Context, id string) (File, error) {
row := q.queryRow(ctx, q.getFileStmt, getFile, id)
var i File
err := row.Scan(
&i.ID,
&i.SessionID,
&i.Path,
&i.Content,
&i.Version,
&i.CreatedAt,
&i.UpdatedAt,
)
return i, err
}
const getFileByPathAndSession = `-- name: GetFileByPathAndSession :one
SELECT id, session_id, path, content, version, created_at, updated_at
FROM files
WHERE path = ? AND session_id = ?
ORDER BY created_at DESC
LIMIT 1
`
type GetFileByPathAndSessionParams struct {
Path string `json:"path"`
SessionID string `json:"session_id"`
}
func (q *Queries) GetFileByPathAndSession(ctx context.Context, arg GetFileByPathAndSessionParams) (File, error) {
row := q.queryRow(ctx, q.getFileByPathAndSessionStmt, getFileByPathAndSession, arg.Path, arg.SessionID)
var i File
err := row.Scan(
&i.ID,
&i.SessionID,
&i.Path,
&i.Content,
&i.Version,
&i.CreatedAt,
&i.UpdatedAt,
)
return i, err
}
const listFilesByPath = `-- name: ListFilesByPath :many
SELECT id, session_id, path, content, version, created_at, updated_at
FROM files
WHERE path = ?
ORDER BY created_at DESC
`
func (q *Queries) ListFilesByPath(ctx context.Context, path string) ([]File, error) {
rows, err := q.query(ctx, q.listFilesByPathStmt, listFilesByPath, path)
if err != nil {
return nil, err
}
defer rows.Close()
items := []File{}
for rows.Next() {
var i File
if err := rows.Scan(
&i.ID,
&i.SessionID,
&i.Path,
&i.Content,
&i.Version,
&i.CreatedAt,
&i.UpdatedAt,
); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Close(); err != nil {
return nil, err
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}
const listFilesBySession = `-- name: ListFilesBySession :many
SELECT id, session_id, path, content, version, created_at, updated_at
FROM files
WHERE session_id = ?
ORDER BY created_at ASC
`
func (q *Queries) ListFilesBySession(ctx context.Context, sessionID string) ([]File, error) {
rows, err := q.query(ctx, q.listFilesBySessionStmt, listFilesBySession, sessionID)
if err != nil {
return nil, err
}
defer rows.Close()
items := []File{}
for rows.Next() {
var i File
if err := rows.Scan(
&i.ID,
&i.SessionID,
&i.Path,
&i.Content,
&i.Version,
&i.CreatedAt,
&i.UpdatedAt,
); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Close(); err != nil {
return nil, err
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}
const listLatestSessionFiles = `-- name: ListLatestSessionFiles :many
SELECT f.id, f.session_id, f.path, f.content, f.version, f.created_at, f.updated_at
FROM files f
INNER JOIN (
SELECT path, MAX(created_at) as max_created_at
FROM files
GROUP BY path
) latest ON f.path = latest.path AND f.created_at = latest.max_created_at
WHERE f.session_id = ?
ORDER BY f.path
`
func (q *Queries) ListLatestSessionFiles(ctx context.Context, sessionID string) ([]File, error) {
rows, err := q.query(ctx, q.listLatestSessionFilesStmt, listLatestSessionFiles, sessionID)
if err != nil {
return nil, err
}
defer rows.Close()
items := []File{}
for rows.Next() {
var i File
if err := rows.Scan(
&i.ID,
&i.SessionID,
&i.Path,
&i.Content,
&i.Version,
&i.CreatedAt,
&i.UpdatedAt,
); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Close(); err != nil {
return nil, err
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}
const listNewFiles = `-- name: ListNewFiles :many
SELECT id, session_id, path, content, version, created_at, updated_at
FROM files
WHERE is_new = 1
ORDER BY created_at DESC
`
func (q *Queries) ListNewFiles(ctx context.Context) ([]File, error) {
rows, err := q.query(ctx, q.listNewFilesStmt, listNewFiles)
if err != nil {
return nil, err
}
defer rows.Close()
items := []File{}
for rows.Next() {
var i File
if err := rows.Scan(
&i.ID,
&i.SessionID,
&i.Path,
&i.Content,
&i.Version,
&i.CreatedAt,
&i.UpdatedAt,
); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Close(); err != nil {
return nil, err
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}
const updateFile = `-- name: UpdateFile :one
UPDATE files
SET
content = ?,
version = ?,
updated_at = strftime('%s', 'now')
WHERE id = ?
RETURNING id, session_id, path, content, version, created_at, updated_at
`
type UpdateFileParams struct {
Content string `json:"content"`
Version string `json:"version"`
ID string `json:"id"`
}
func (q *Queries) UpdateFile(ctx context.Context, arg UpdateFileParams) (File, error) {
row := q.queryRow(ctx, q.updateFileStmt, updateFile, arg.Content, arg.Version, arg.ID)
var i File
err := row.Scan(
&i.ID,
&i.SessionID,
&i.Path,
&i.Content,
&i.Version,
&i.CreatedAt,
&i.UpdatedAt,
)
return i, err
}

View File

@@ -1,157 +0,0 @@
// Code generated by sqlc. DO NOT EDIT.
// versions:
// sqlc v1.27.0
// source: messages.sql
package db
import (
"context"
"database/sql"
)
const createMessage = `-- name: CreateMessage :one
INSERT INTO messages (
id,
session_id,
role,
parts,
model,
created_at,
updated_at
) VALUES (
?, ?, ?, ?, ?, strftime('%s', 'now'), strftime('%s', 'now')
)
RETURNING id, session_id, role, parts, model, created_at, updated_at, finished_at
`
type CreateMessageParams struct {
ID string `json:"id"`
SessionID string `json:"session_id"`
Role string `json:"role"`
Parts string `json:"parts"`
Model sql.NullString `json:"model"`
}
func (q *Queries) CreateMessage(ctx context.Context, arg CreateMessageParams) (Message, error) {
row := q.queryRow(ctx, q.createMessageStmt, createMessage,
arg.ID,
arg.SessionID,
arg.Role,
arg.Parts,
arg.Model,
)
var i Message
err := row.Scan(
&i.ID,
&i.SessionID,
&i.Role,
&i.Parts,
&i.Model,
&i.CreatedAt,
&i.UpdatedAt,
&i.FinishedAt,
)
return i, err
}
const deleteMessage = `-- name: DeleteMessage :exec
DELETE FROM messages
WHERE id = ?
`
func (q *Queries) DeleteMessage(ctx context.Context, id string) error {
_, err := q.exec(ctx, q.deleteMessageStmt, deleteMessage, id)
return err
}
const deleteSessionMessages = `-- name: DeleteSessionMessages :exec
DELETE FROM messages
WHERE session_id = ?
`
func (q *Queries) DeleteSessionMessages(ctx context.Context, sessionID string) error {
_, err := q.exec(ctx, q.deleteSessionMessagesStmt, deleteSessionMessages, sessionID)
return err
}
const getMessage = `-- name: GetMessage :one
SELECT id, session_id, role, parts, model, created_at, updated_at, finished_at
FROM messages
WHERE id = ? LIMIT 1
`
func (q *Queries) GetMessage(ctx context.Context, id string) (Message, error) {
row := q.queryRow(ctx, q.getMessageStmt, getMessage, id)
var i Message
err := row.Scan(
&i.ID,
&i.SessionID,
&i.Role,
&i.Parts,
&i.Model,
&i.CreatedAt,
&i.UpdatedAt,
&i.FinishedAt,
)
return i, err
}
const listMessagesBySession = `-- name: ListMessagesBySession :many
SELECT id, session_id, role, parts, model, created_at, updated_at, finished_at
FROM messages
WHERE session_id = ?
ORDER BY created_at ASC
`
func (q *Queries) ListMessagesBySession(ctx context.Context, sessionID string) ([]Message, error) {
rows, err := q.query(ctx, q.listMessagesBySessionStmt, listMessagesBySession, sessionID)
if err != nil {
return nil, err
}
defer rows.Close()
items := []Message{}
for rows.Next() {
var i Message
if err := rows.Scan(
&i.ID,
&i.SessionID,
&i.Role,
&i.Parts,
&i.Model,
&i.CreatedAt,
&i.UpdatedAt,
&i.FinishedAt,
); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Close(); err != nil {
return nil, err
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}
const updateMessage = `-- name: UpdateMessage :exec
UPDATE messages
SET
parts = ?,
finished_at = ?,
updated_at = strftime('%s', 'now')
WHERE id = ?
`
type UpdateMessageParams struct {
Parts string `json:"parts"`
FinishedAt sql.NullInt64 `json:"finished_at"`
ID string `json:"id"`
}
func (q *Queries) UpdateMessage(ctx context.Context, arg UpdateMessageParams) error {
_, err := q.exec(ctx, q.updateMessageStmt, updateMessage, arg.Parts, arg.FinishedAt, arg.ID)
return err
}

View File

@@ -1,10 +0,0 @@
DROP TRIGGER IF EXISTS update_sessions_updated_at;
DROP TRIGGER IF EXISTS update_messages_updated_at;
DROP TRIGGER IF EXISTS update_files_updated_at;
DROP TRIGGER IF EXISTS update_session_message_count_on_delete;
DROP TRIGGER IF EXISTS update_session_message_count_on_insert;
DROP TABLE IF EXISTS sessions;
DROP TABLE IF EXISTS messages;
DROP TABLE IF EXISTS files;

View File

@@ -1,80 +0,0 @@
-- Sessions
CREATE TABLE IF NOT EXISTS sessions (
id TEXT PRIMARY KEY,
parent_session_id TEXT,
title TEXT NOT NULL,
message_count INTEGER NOT NULL DEFAULT 0 CHECK (message_count >= 0),
prompt_tokens INTEGER NOT NULL DEFAULT 0 CHECK (prompt_tokens >= 0),
completion_tokens INTEGER NOT NULL DEFAULT 0 CHECK (completion_tokens>= 0),
cost REAL NOT NULL DEFAULT 0.0 CHECK (cost >= 0.0),
updated_at INTEGER NOT NULL, -- Unix timestamp in milliseconds
created_at INTEGER NOT NULL -- Unix timestamp in milliseconds
);
CREATE TRIGGER IF NOT EXISTS update_sessions_updated_at
AFTER UPDATE ON sessions
BEGIN
UPDATE sessions SET updated_at = strftime('%s', 'now')
WHERE id = new.id;
END;
-- Files
CREATE TABLE IF NOT EXISTS files (
id TEXT PRIMARY KEY,
session_id TEXT NOT NULL,
path TEXT NOT NULL,
content TEXT NOT NULL,
version TEXT NOT NULL,
created_at INTEGER NOT NULL, -- Unix timestamp in milliseconds
updated_at INTEGER NOT NULL, -- Unix timestamp in milliseconds
FOREIGN KEY (session_id) REFERENCES sessions (id) ON DELETE CASCADE,
UNIQUE(path, session_id, version)
);
CREATE INDEX IF NOT EXISTS idx_files_session_id ON files (session_id);
CREATE INDEX IF NOT EXISTS idx_files_path ON files (path);
CREATE TRIGGER IF NOT EXISTS update_files_updated_at
AFTER UPDATE ON files
BEGIN
UPDATE files SET updated_at = strftime('%s', 'now')
WHERE id = new.id;
END;
-- Messages
CREATE TABLE IF NOT EXISTS messages (
id TEXT PRIMARY KEY,
session_id TEXT NOT NULL,
role TEXT NOT NULL,
parts TEXT NOT NULL default '[]',
model TEXT,
created_at INTEGER NOT NULL, -- Unix timestamp in milliseconds
updated_at INTEGER NOT NULL, -- Unix timestamp in milliseconds
finished_at INTEGER, -- Unix timestamp in milliseconds
FOREIGN KEY (session_id) REFERENCES sessions (id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_messages_session_id ON messages (session_id);
CREATE TRIGGER IF NOT EXISTS update_messages_updated_at
AFTER UPDATE ON messages
BEGIN
UPDATE messages SET updated_at = strftime('%s', 'now')
WHERE id = new.id;
END;
CREATE TRIGGER IF NOT EXISTS update_session_message_count_on_insert
AFTER INSERT ON messages
BEGIN
UPDATE sessions SET
message_count = message_count + 1
WHERE id = new.session_id;
END;
CREATE TRIGGER IF NOT EXISTS update_session_message_count_on_delete
AFTER DELETE ON messages
BEGIN
UPDATE sessions SET
message_count = message_count - 1
WHERE id = old.session_id;
END;

View File

@@ -1,42 +0,0 @@
// Code generated by sqlc. DO NOT EDIT.
// versions:
// sqlc v1.27.0
package db
import (
"database/sql"
)
type File struct {
ID string `json:"id"`
SessionID string `json:"session_id"`
Path string `json:"path"`
Content string `json:"content"`
Version string `json:"version"`
CreatedAt int64 `json:"created_at"`
UpdatedAt int64 `json:"updated_at"`
}
type Message struct {
ID string `json:"id"`
SessionID string `json:"session_id"`
Role string `json:"role"`
Parts string `json:"parts"`
Model sql.NullString `json:"model"`
CreatedAt int64 `json:"created_at"`
UpdatedAt int64 `json:"updated_at"`
FinishedAt sql.NullInt64 `json:"finished_at"`
}
type Session struct {
ID string `json:"id"`
ParentSessionID sql.NullString `json:"parent_session_id"`
Title string `json:"title"`
MessageCount int64 `json:"message_count"`
PromptTokens int64 `json:"prompt_tokens"`
CompletionTokens int64 `json:"completion_tokens"`
Cost float64 `json:"cost"`
UpdatedAt int64 `json:"updated_at"`
CreatedAt int64 `json:"created_at"`
}

View File

@@ -1,35 +0,0 @@
// Code generated by sqlc. DO NOT EDIT.
// versions:
// sqlc v1.27.0
package db
import (
"context"
)
type Querier interface {
CreateFile(ctx context.Context, arg CreateFileParams) (File, error)
CreateMessage(ctx context.Context, arg CreateMessageParams) (Message, error)
CreateSession(ctx context.Context, arg CreateSessionParams) (Session, error)
DeleteFile(ctx context.Context, id string) error
DeleteMessage(ctx context.Context, id string) error
DeleteSession(ctx context.Context, id string) error
DeleteSessionFiles(ctx context.Context, sessionID string) error
DeleteSessionMessages(ctx context.Context, sessionID string) error
GetFile(ctx context.Context, id string) (File, error)
GetFileByPathAndSession(ctx context.Context, arg GetFileByPathAndSessionParams) (File, error)
GetMessage(ctx context.Context, id string) (Message, error)
GetSessionByID(ctx context.Context, id string) (Session, error)
ListFilesByPath(ctx context.Context, path string) ([]File, error)
ListFilesBySession(ctx context.Context, sessionID string) ([]File, error)
ListLatestSessionFiles(ctx context.Context, sessionID string) ([]File, error)
ListMessagesBySession(ctx context.Context, sessionID string) ([]Message, error)
ListNewFiles(ctx context.Context) ([]File, error)
ListSessions(ctx context.Context) ([]Session, error)
UpdateFile(ctx context.Context, arg UpdateFileParams) (File, error)
UpdateMessage(ctx context.Context, arg UpdateMessageParams) error
UpdateSession(ctx context.Context, arg UpdateSessionParams) (Session, error)
}
var _ Querier = (*Queries)(nil)

View File

@@ -1,185 +0,0 @@
// Code generated by sqlc. DO NOT EDIT.
// versions:
// sqlc v1.27.0
// source: sessions.sql
package db
import (
"context"
"database/sql"
)
const createSession = `-- name: CreateSession :one
INSERT INTO sessions (
id,
parent_session_id,
title,
message_count,
prompt_tokens,
completion_tokens,
cost,
updated_at,
created_at
) VALUES (
?,
?,
?,
?,
?,
?,
?,
strftime('%s', 'now'),
strftime('%s', 'now')
) RETURNING id, parent_session_id, title, message_count, prompt_tokens, completion_tokens, cost, updated_at, created_at
`
type CreateSessionParams struct {
ID string `json:"id"`
ParentSessionID sql.NullString `json:"parent_session_id"`
Title string `json:"title"`
MessageCount int64 `json:"message_count"`
PromptTokens int64 `json:"prompt_tokens"`
CompletionTokens int64 `json:"completion_tokens"`
Cost float64 `json:"cost"`
}
func (q *Queries) CreateSession(ctx context.Context, arg CreateSessionParams) (Session, error) {
row := q.queryRow(ctx, q.createSessionStmt, createSession,
arg.ID,
arg.ParentSessionID,
arg.Title,
arg.MessageCount,
arg.PromptTokens,
arg.CompletionTokens,
arg.Cost,
)
var i Session
err := row.Scan(
&i.ID,
&i.ParentSessionID,
&i.Title,
&i.MessageCount,
&i.PromptTokens,
&i.CompletionTokens,
&i.Cost,
&i.UpdatedAt,
&i.CreatedAt,
)
return i, err
}
const deleteSession = `-- name: DeleteSession :exec
DELETE FROM sessions
WHERE id = ?
`
func (q *Queries) DeleteSession(ctx context.Context, id string) error {
_, err := q.exec(ctx, q.deleteSessionStmt, deleteSession, id)
return err
}
const getSessionByID = `-- name: GetSessionByID :one
SELECT id, parent_session_id, title, message_count, prompt_tokens, completion_tokens, cost, updated_at, created_at
FROM sessions
WHERE id = ? LIMIT 1
`
func (q *Queries) GetSessionByID(ctx context.Context, id string) (Session, error) {
row := q.queryRow(ctx, q.getSessionByIDStmt, getSessionByID, id)
var i Session
err := row.Scan(
&i.ID,
&i.ParentSessionID,
&i.Title,
&i.MessageCount,
&i.PromptTokens,
&i.CompletionTokens,
&i.Cost,
&i.UpdatedAt,
&i.CreatedAt,
)
return i, err
}
const listSessions = `-- name: ListSessions :many
SELECT id, parent_session_id, title, message_count, prompt_tokens, completion_tokens, cost, updated_at, created_at
FROM sessions
WHERE parent_session_id is NULL
ORDER BY created_at DESC
`
func (q *Queries) ListSessions(ctx context.Context) ([]Session, error) {
rows, err := q.query(ctx, q.listSessionsStmt, listSessions)
if err != nil {
return nil, err
}
defer rows.Close()
items := []Session{}
for rows.Next() {
var i Session
if err := rows.Scan(
&i.ID,
&i.ParentSessionID,
&i.Title,
&i.MessageCount,
&i.PromptTokens,
&i.CompletionTokens,
&i.Cost,
&i.UpdatedAt,
&i.CreatedAt,
); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Close(); err != nil {
return nil, err
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}
const updateSession = `-- name: UpdateSession :one
UPDATE sessions
SET
title = ?,
prompt_tokens = ?,
completion_tokens = ?,
cost = ?
WHERE id = ?
RETURNING id, parent_session_id, title, message_count, prompt_tokens, completion_tokens, cost, updated_at, created_at
`
type UpdateSessionParams struct {
Title string `json:"title"`
PromptTokens int64 `json:"prompt_tokens"`
CompletionTokens int64 `json:"completion_tokens"`
Cost float64 `json:"cost"`
ID string `json:"id"`
}
func (q *Queries) UpdateSession(ctx context.Context, arg UpdateSessionParams) (Session, error) {
row := q.queryRow(ctx, q.updateSessionStmt, updateSession,
arg.Title,
arg.PromptTokens,
arg.CompletionTokens,
arg.Cost,
arg.ID,
)
var i Session
err := row.Scan(
&i.ID,
&i.ParentSessionID,
&i.Title,
&i.MessageCount,
&i.PromptTokens,
&i.CompletionTokens,
&i.Cost,
&i.UpdatedAt,
&i.CreatedAt,
)
return i, err
}

View File

@@ -1,71 +0,0 @@
-- name: GetFile :one
SELECT *
FROM files
WHERE id = ? LIMIT 1;
-- name: GetFileByPathAndSession :one
SELECT *
FROM files
WHERE path = ? AND session_id = ?
ORDER BY created_at DESC
LIMIT 1;
-- name: ListFilesBySession :many
SELECT *
FROM files
WHERE session_id = ?
ORDER BY created_at ASC;
-- name: ListFilesByPath :many
SELECT *
FROM files
WHERE path = ?
ORDER BY created_at DESC;
-- name: CreateFile :one
INSERT INTO files (
id,
session_id,
path,
content,
version,
created_at,
updated_at
) VALUES (
?, ?, ?, ?, ?, strftime('%s', 'now'), strftime('%s', 'now')
)
RETURNING *;
-- name: UpdateFile :one
UPDATE files
SET
content = ?,
version = ?,
updated_at = strftime('%s', 'now')
WHERE id = ?
RETURNING *;
-- name: DeleteFile :exec
DELETE FROM files
WHERE id = ?;
-- name: DeleteSessionFiles :exec
DELETE FROM files
WHERE session_id = ?;
-- name: ListLatestSessionFiles :many
SELECT f.*
FROM files f
INNER JOIN (
SELECT path, MAX(created_at) as max_created_at
FROM files
GROUP BY path
) latest ON f.path = latest.path AND f.created_at = latest.max_created_at
WHERE f.session_id = ?
ORDER BY f.path;
-- name: ListNewFiles :many
SELECT *
FROM files
WHERE is_new = 1
ORDER BY created_at DESC;

View File

@@ -1,41 +0,0 @@
-- name: GetMessage :one
SELECT *
FROM messages
WHERE id = ? LIMIT 1;
-- name: ListMessagesBySession :many
SELECT *
FROM messages
WHERE session_id = ?
ORDER BY created_at ASC;
-- name: CreateMessage :one
INSERT INTO messages (
id,
session_id,
role,
parts,
model,
created_at,
updated_at
) VALUES (
?, ?, ?, ?, ?, strftime('%s', 'now'), strftime('%s', 'now')
)
RETURNING *;
-- name: UpdateMessage :exec
UPDATE messages
SET
parts = ?,
finished_at = ?,
updated_at = strftime('%s', 'now')
WHERE id = ?;
-- name: DeleteMessage :exec
DELETE FROM messages
WHERE id = ?;
-- name: DeleteSessionMessages :exec
DELETE FROM messages
WHERE session_id = ?;

View File

@@ -1,48 +0,0 @@
-- name: CreateSession :one
INSERT INTO sessions (
id,
parent_session_id,
title,
message_count,
prompt_tokens,
completion_tokens,
cost,
updated_at,
created_at
) VALUES (
?,
?,
?,
?,
?,
?,
?,
strftime('%s', 'now'),
strftime('%s', 'now')
) RETURNING *;
-- name: GetSessionByID :one
SELECT *
FROM sessions
WHERE id = ? LIMIT 1;
-- name: ListSessions :many
SELECT *
FROM sessions
WHERE parent_session_id is NULL
ORDER BY created_at DESC;
-- name: UpdateSession :one
UPDATE sessions
SET
title = ?,
prompt_tokens = ?,
completion_tokens = ?,
cost = ?
WHERE id = ?
RETURNING *;
-- name: DeleteSession :exec
DELETE FROM sessions
WHERE id = ?;

File diff suppressed because it is too large Load Diff

View File

@@ -1,740 +0,0 @@
package diff
import (
"errors"
"fmt"
"os"
"path/filepath"
"strings"
)
type ActionType string
const (
ActionAdd ActionType = "add"
ActionDelete ActionType = "delete"
ActionUpdate ActionType = "update"
)
type FileChange struct {
Type ActionType
OldContent *string
NewContent *string
MovePath *string
}
type Commit struct {
Changes map[string]FileChange
}
type Chunk struct {
OrigIndex int // line index of the first line in the original file
DelLines []string // lines to delete
InsLines []string // lines to insert
}
type PatchAction struct {
Type ActionType
NewFile *string
Chunks []Chunk
MovePath *string
}
type Patch struct {
Actions map[string]PatchAction
}
type DiffError struct {
message string
}
func (e DiffError) Error() string {
return e.message
}
// Helper functions for error handling
func NewDiffError(message string) DiffError {
return DiffError{message: message}
}
func fileError(action, reason, path string) DiffError {
return NewDiffError(fmt.Sprintf("%s File Error: %s: %s", action, reason, path))
}
func contextError(index int, context string, isEOF bool) DiffError {
prefix := "Invalid Context"
if isEOF {
prefix = "Invalid EOF Context"
}
return NewDiffError(fmt.Sprintf("%s %d:\n%s", prefix, index, context))
}
type Parser struct {
currentFiles map[string]string
lines []string
index int
patch Patch
fuzz int
}
func NewParser(currentFiles map[string]string, lines []string) *Parser {
return &Parser{
currentFiles: currentFiles,
lines: lines,
index: 0,
patch: Patch{Actions: make(map[string]PatchAction, len(currentFiles))},
fuzz: 0,
}
}
func (p *Parser) isDone(prefixes []string) bool {
if p.index >= len(p.lines) {
return true
}
for _, prefix := range prefixes {
if strings.HasPrefix(p.lines[p.index], prefix) {
return true
}
}
return false
}
func (p *Parser) startsWith(prefix any) bool {
var prefixes []string
switch v := prefix.(type) {
case string:
prefixes = []string{v}
case []string:
prefixes = v
}
for _, pfx := range prefixes {
if strings.HasPrefix(p.lines[p.index], pfx) {
return true
}
}
return false
}
func (p *Parser) readStr(prefix string, returnEverything bool) string {
if p.index >= len(p.lines) {
return "" // Changed from panic to return empty string for safer operation
}
if strings.HasPrefix(p.lines[p.index], prefix) {
var text string
if returnEverything {
text = p.lines[p.index]
} else {
text = p.lines[p.index][len(prefix):]
}
p.index++
return text
}
return ""
}
func (p *Parser) Parse() error {
endPatchPrefixes := []string{"*** End Patch"}
for !p.isDone(endPatchPrefixes) {
path := p.readStr("*** Update File: ", false)
if path != "" {
if _, exists := p.patch.Actions[path]; exists {
return fileError("Update", "Duplicate Path", path)
}
moveTo := p.readStr("*** Move to: ", false)
if _, exists := p.currentFiles[path]; !exists {
return fileError("Update", "Missing File", path)
}
text := p.currentFiles[path]
action, err := p.parseUpdateFile(text)
if err != nil {
return err
}
if moveTo != "" {
action.MovePath = &moveTo
}
p.patch.Actions[path] = action
continue
}
path = p.readStr("*** Delete File: ", false)
if path != "" {
if _, exists := p.patch.Actions[path]; exists {
return fileError("Delete", "Duplicate Path", path)
}
if _, exists := p.currentFiles[path]; !exists {
return fileError("Delete", "Missing File", path)
}
p.patch.Actions[path] = PatchAction{Type: ActionDelete, Chunks: []Chunk{}}
continue
}
path = p.readStr("*** Add File: ", false)
if path != "" {
if _, exists := p.patch.Actions[path]; exists {
return fileError("Add", "Duplicate Path", path)
}
if _, exists := p.currentFiles[path]; exists {
return fileError("Add", "File already exists", path)
}
action, err := p.parseAddFile()
if err != nil {
return err
}
p.patch.Actions[path] = action
continue
}
return NewDiffError(fmt.Sprintf("Unknown Line: %s", p.lines[p.index]))
}
if !p.startsWith("*** End Patch") {
return NewDiffError("Missing End Patch")
}
p.index++
return nil
}
func (p *Parser) parseUpdateFile(text string) (PatchAction, error) {
action := PatchAction{Type: ActionUpdate, Chunks: []Chunk{}}
fileLines := strings.Split(text, "\n")
index := 0
endPrefixes := []string{
"*** End Patch",
"*** Update File:",
"*** Delete File:",
"*** Add File:",
"*** End of File",
}
for !p.isDone(endPrefixes) {
defStr := p.readStr("@@ ", false)
sectionStr := ""
if defStr == "" && p.index < len(p.lines) && p.lines[p.index] == "@@" {
sectionStr = p.lines[p.index]
p.index++
}
if defStr == "" && sectionStr == "" && index != 0 {
return action, NewDiffError(fmt.Sprintf("Invalid Line:\n%s", p.lines[p.index]))
}
if strings.TrimSpace(defStr) != "" {
found := false
for i := range fileLines[:index] {
if fileLines[i] == defStr {
found = true
break
}
}
if !found {
for i := index; i < len(fileLines); i++ {
if fileLines[i] == defStr {
index = i + 1
found = true
break
}
}
}
if !found {
for i := range fileLines[:index] {
if strings.TrimSpace(fileLines[i]) == strings.TrimSpace(defStr) {
found = true
break
}
}
}
if !found {
for i := index; i < len(fileLines); i++ {
if strings.TrimSpace(fileLines[i]) == strings.TrimSpace(defStr) {
index = i + 1
p.fuzz++
found = true
break
}
}
}
}
nextChunkContext, chunks, endPatchIndex, eof := peekNextSection(p.lines, p.index)
newIndex, fuzz := findContext(fileLines, nextChunkContext, index, eof)
if newIndex == -1 {
ctxText := strings.Join(nextChunkContext, "\n")
return action, contextError(index, ctxText, eof)
}
p.fuzz += fuzz
for _, ch := range chunks {
ch.OrigIndex += newIndex
action.Chunks = append(action.Chunks, ch)
}
index = newIndex + len(nextChunkContext)
p.index = endPatchIndex
}
return action, nil
}
func (p *Parser) parseAddFile() (PatchAction, error) {
lines := make([]string, 0, 16) // Preallocate space for better performance
endPrefixes := []string{
"*** End Patch",
"*** Update File:",
"*** Delete File:",
"*** Add File:",
}
for !p.isDone(endPrefixes) {
s := p.readStr("", true)
if !strings.HasPrefix(s, "+") {
return PatchAction{}, NewDiffError(fmt.Sprintf("Invalid Add File Line: %s", s))
}
lines = append(lines, s[1:])
}
newFile := strings.Join(lines, "\n")
return PatchAction{
Type: ActionAdd,
NewFile: &newFile,
Chunks: []Chunk{},
}, nil
}
// Refactored to use a matcher function for each comparison type
func findContextCore(lines []string, context []string, start int) (int, int) {
if len(context) == 0 {
return start, 0
}
// Try exact match
if idx, fuzz := tryFindMatch(lines, context, start, func(a, b string) bool {
return a == b
}); idx >= 0 {
return idx, fuzz
}
// Try trimming right whitespace
if idx, fuzz := tryFindMatch(lines, context, start, func(a, b string) bool {
return strings.TrimRight(a, " \t") == strings.TrimRight(b, " \t")
}); idx >= 0 {
return idx, fuzz
}
// Try trimming all whitespace
if idx, fuzz := tryFindMatch(lines, context, start, func(a, b string) bool {
return strings.TrimSpace(a) == strings.TrimSpace(b)
}); idx >= 0 {
return idx, fuzz
}
return -1, 0
}
// Helper function to DRY up the match logic
func tryFindMatch(lines []string, context []string, start int,
compareFunc func(string, string) bool,
) (int, int) {
for i := start; i < len(lines); i++ {
if i+len(context) <= len(lines) {
match := true
for j := range context {
if !compareFunc(lines[i+j], context[j]) {
match = false
break
}
}
if match {
// Return fuzz level: 0 for exact, 1 for trimRight, 100 for trimSpace
var fuzz int
if compareFunc("a ", "a") && !compareFunc("a", "b") {
fuzz = 1
} else if compareFunc("a ", "a") {
fuzz = 100
}
return i, fuzz
}
}
}
return -1, 0
}
func findContext(lines []string, context []string, start int, eof bool) (int, int) {
if eof {
newIndex, fuzz := findContextCore(lines, context, len(lines)-len(context))
if newIndex != -1 {
return newIndex, fuzz
}
newIndex, fuzz = findContextCore(lines, context, start)
return newIndex, fuzz + 10000
}
return findContextCore(lines, context, start)
}
func peekNextSection(lines []string, initialIndex int) ([]string, []Chunk, int, bool) {
index := initialIndex
old := make([]string, 0, 32) // Preallocate for better performance
delLines := make([]string, 0, 8)
insLines := make([]string, 0, 8)
chunks := make([]Chunk, 0, 4)
mode := "keep"
// End conditions for the section
endSectionConditions := func(s string) bool {
return strings.HasPrefix(s, "@@") ||
strings.HasPrefix(s, "*** End Patch") ||
strings.HasPrefix(s, "*** Update File:") ||
strings.HasPrefix(s, "*** Delete File:") ||
strings.HasPrefix(s, "*** Add File:") ||
strings.HasPrefix(s, "*** End of File") ||
s == "***" ||
strings.HasPrefix(s, "***")
}
for index < len(lines) {
s := lines[index]
if endSectionConditions(s) {
break
}
index++
lastMode := mode
line := s
if len(line) > 0 {
switch line[0] {
case '+':
mode = "add"
case '-':
mode = "delete"
case ' ':
mode = "keep"
default:
mode = "keep"
line = " " + line
}
} else {
mode = "keep"
line = " "
}
line = line[1:]
if mode == "keep" && lastMode != mode {
if len(insLines) > 0 || len(delLines) > 0 {
chunks = append(chunks, Chunk{
OrigIndex: len(old) - len(delLines),
DelLines: delLines,
InsLines: insLines,
})
}
delLines = make([]string, 0, 8)
insLines = make([]string, 0, 8)
}
switch mode {
case "delete":
delLines = append(delLines, line)
old = append(old, line)
case "add":
insLines = append(insLines, line)
default:
old = append(old, line)
}
}
if len(insLines) > 0 || len(delLines) > 0 {
chunks = append(chunks, Chunk{
OrigIndex: len(old) - len(delLines),
DelLines: delLines,
InsLines: insLines,
})
}
if index < len(lines) && lines[index] == "*** End of File" {
index++
return old, chunks, index, true
}
return old, chunks, index, false
}
func TextToPatch(text string, orig map[string]string) (Patch, int, error) {
text = strings.TrimSpace(text)
lines := strings.Split(text, "\n")
if len(lines) < 2 || !strings.HasPrefix(lines[0], "*** Begin Patch") || lines[len(lines)-1] != "*** End Patch" {
return Patch{}, 0, NewDiffError("Invalid patch text")
}
parser := NewParser(orig, lines)
parser.index = 1
if err := parser.Parse(); err != nil {
return Patch{}, 0, err
}
return parser.patch, parser.fuzz, nil
}
func IdentifyFilesNeeded(text string) []string {
text = strings.TrimSpace(text)
lines := strings.Split(text, "\n")
result := make(map[string]bool)
for _, line := range lines {
if strings.HasPrefix(line, "*** Update File: ") {
result[line[len("*** Update File: "):]] = true
}
if strings.HasPrefix(line, "*** Delete File: ") {
result[line[len("*** Delete File: "):]] = true
}
}
files := make([]string, 0, len(result))
for file := range result {
files = append(files, file)
}
return files
}
func IdentifyFilesAdded(text string) []string {
text = strings.TrimSpace(text)
lines := strings.Split(text, "\n")
result := make(map[string]bool)
for _, line := range lines {
if strings.HasPrefix(line, "*** Add File: ") {
result[line[len("*** Add File: "):]] = true
}
}
files := make([]string, 0, len(result))
for file := range result {
files = append(files, file)
}
return files
}
func getUpdatedFile(text string, action PatchAction, path string) (string, error) {
if action.Type != ActionUpdate {
return "", errors.New("expected UPDATE action")
}
origLines := strings.Split(text, "\n")
destLines := make([]string, 0, len(origLines)) // Preallocate with capacity
origIndex := 0
for _, chunk := range action.Chunks {
if chunk.OrigIndex > len(origLines) {
return "", NewDiffError(fmt.Sprintf("%s: chunk.orig_index %d > len(lines) %d", path, chunk.OrigIndex, len(origLines)))
}
if origIndex > chunk.OrigIndex {
return "", NewDiffError(fmt.Sprintf("%s: orig_index %d > chunk.orig_index %d", path, origIndex, chunk.OrigIndex))
}
destLines = append(destLines, origLines[origIndex:chunk.OrigIndex]...)
delta := chunk.OrigIndex - origIndex
origIndex += delta
if len(chunk.InsLines) > 0 {
destLines = append(destLines, chunk.InsLines...)
}
origIndex += len(chunk.DelLines)
}
destLines = append(destLines, origLines[origIndex:]...)
return strings.Join(destLines, "\n"), nil
}
func PatchToCommit(patch Patch, orig map[string]string) (Commit, error) {
commit := Commit{Changes: make(map[string]FileChange, len(patch.Actions))}
for pathKey, action := range patch.Actions {
switch action.Type {
case ActionDelete:
oldContent := orig[pathKey]
commit.Changes[pathKey] = FileChange{
Type: ActionDelete,
OldContent: &oldContent,
}
case ActionAdd:
commit.Changes[pathKey] = FileChange{
Type: ActionAdd,
NewContent: action.NewFile,
}
case ActionUpdate:
newContent, err := getUpdatedFile(orig[pathKey], action, pathKey)
if err != nil {
return Commit{}, err
}
oldContent := orig[pathKey]
fileChange := FileChange{
Type: ActionUpdate,
OldContent: &oldContent,
NewContent: &newContent,
}
if action.MovePath != nil {
fileChange.MovePath = action.MovePath
}
commit.Changes[pathKey] = fileChange
}
}
return commit, nil
}
func AssembleChanges(orig map[string]string, updatedFiles map[string]string) Commit {
commit := Commit{Changes: make(map[string]FileChange, len(updatedFiles))}
for p, newContent := range updatedFiles {
oldContent, exists := orig[p]
if exists && oldContent == newContent {
continue
}
if exists && newContent != "" {
commit.Changes[p] = FileChange{
Type: ActionUpdate,
OldContent: &oldContent,
NewContent: &newContent,
}
} else if newContent != "" {
commit.Changes[p] = FileChange{
Type: ActionAdd,
NewContent: &newContent,
}
} else if exists {
commit.Changes[p] = FileChange{
Type: ActionDelete,
OldContent: &oldContent,
}
} else {
return commit // Changed from panic to simply return current commit
}
}
return commit
}
func LoadFiles(paths []string, openFn func(string) (string, error)) (map[string]string, error) {
orig := make(map[string]string, len(paths))
for _, p := range paths {
content, err := openFn(p)
if err != nil {
return nil, fileError("Open", "File not found", p)
}
orig[p] = content
}
return orig, nil
}
func ApplyCommit(commit Commit, writeFn func(string, string) error, removeFn func(string) error) error {
for p, change := range commit.Changes {
switch change.Type {
case ActionDelete:
if err := removeFn(p); err != nil {
return err
}
case ActionAdd:
if change.NewContent == nil {
return NewDiffError(fmt.Sprintf("Add action for %s has nil new_content", p))
}
if err := writeFn(p, *change.NewContent); err != nil {
return err
}
case ActionUpdate:
if change.NewContent == nil {
return NewDiffError(fmt.Sprintf("Update action for %s has nil new_content", p))
}
if change.MovePath != nil {
if err := writeFn(*change.MovePath, *change.NewContent); err != nil {
return err
}
if err := removeFn(p); err != nil {
return err
}
} else {
if err := writeFn(p, *change.NewContent); err != nil {
return err
}
}
}
}
return nil
}
func ProcessPatch(text string, openFn func(string) (string, error), writeFn func(string, string) error, removeFn func(string) error) (string, error) {
if !strings.HasPrefix(text, "*** Begin Patch") {
return "", NewDiffError("Patch must start with *** Begin Patch")
}
paths := IdentifyFilesNeeded(text)
orig, err := LoadFiles(paths, openFn)
if err != nil {
return "", err
}
patch, fuzz, err := TextToPatch(text, orig)
if err != nil {
return "", err
}
if fuzz > 0 {
return "", NewDiffError(fmt.Sprintf("Patch contains fuzzy matches (fuzz level: %d)", fuzz))
}
commit, err := PatchToCommit(patch, orig)
if err != nil {
return "", err
}
if err := ApplyCommit(commit, writeFn, removeFn); err != nil {
return "", err
}
return "Patch applied successfully", nil
}
func OpenFile(p string) (string, error) {
data, err := os.ReadFile(p)
if err != nil {
return "", err
}
return string(data), nil
}
func WriteFile(p string, content string) error {
if filepath.IsAbs(p) {
return NewDiffError("We do not support absolute paths.")
}
dir := filepath.Dir(p)
if dir != "." {
if err := os.MkdirAll(dir, 0o755); err != nil {
return err
}
}
return os.WriteFile(p, []byte(content), 0o644)
}
func RemoveFile(p string) error {
return os.Remove(p)
}
func ValidatePatch(patchText string, files map[string]string) (bool, string, error) {
if !strings.HasPrefix(patchText, "*** Begin Patch") {
return false, "Patch must start with *** Begin Patch", nil
}
neededFiles := IdentifyFilesNeeded(patchText)
for _, filePath := range neededFiles {
if _, exists := files[filePath]; !exists {
return false, fmt.Sprintf("File not found: %s", filePath), nil
}
}
patch, fuzz, err := TextToPatch(patchText, files)
if err != nil {
return false, err.Error(), nil
}
if fuzz > 0 {
return false, fmt.Sprintf("Patch contains fuzzy matches (fuzz level: %d)", fuzz), nil
}
_, err = PatchToCommit(patch, files)
if err != nil {
return false, err.Error(), nil
}
return true, "Patch is valid", nil
}

View File

@@ -1,252 +0,0 @@
package history
import (
"context"
"database/sql"
"fmt"
"strconv"
"strings"
"time"
"github.com/google/uuid"
"github.com/kujtimiihoxha/opencode/internal/db"
"github.com/kujtimiihoxha/opencode/internal/pubsub"
)
const (
InitialVersion = "initial"
)
type File struct {
ID string
SessionID string
Path string
Content string
Version string
CreatedAt int64
UpdatedAt int64
}
type Service interface {
pubsub.Suscriber[File]
Create(ctx context.Context, sessionID, path, content string) (File, error)
CreateVersion(ctx context.Context, sessionID, path, content string) (File, error)
Get(ctx context.Context, id string) (File, error)
GetByPathAndSession(ctx context.Context, path, sessionID string) (File, error)
ListBySession(ctx context.Context, sessionID string) ([]File, error)
ListLatestSessionFiles(ctx context.Context, sessionID string) ([]File, error)
Update(ctx context.Context, file File) (File, error)
Delete(ctx context.Context, id string) error
DeleteSessionFiles(ctx context.Context, sessionID string) error
}
type service struct {
*pubsub.Broker[File]
db *sql.DB
q *db.Queries
}
func NewService(q *db.Queries, db *sql.DB) Service {
return &service{
Broker: pubsub.NewBroker[File](),
q: q,
db: db,
}
}
func (s *service) Create(ctx context.Context, sessionID, path, content string) (File, error) {
return s.createWithVersion(ctx, sessionID, path, content, InitialVersion)
}
func (s *service) CreateVersion(ctx context.Context, sessionID, path, content string) (File, error) {
// Get the latest version for this path
files, err := s.q.ListFilesByPath(ctx, path)
if err != nil {
return File{}, err
}
if len(files) == 0 {
// No previous versions, create initial
return s.Create(ctx, sessionID, path, content)
}
// Get the latest version
latestFile := files[0] // Files are ordered by created_at DESC
latestVersion := latestFile.Version
// Generate the next version
var nextVersion string
if latestVersion == InitialVersion {
nextVersion = "v1"
} else if strings.HasPrefix(latestVersion, "v") {
versionNum, err := strconv.Atoi(latestVersion[1:])
if err != nil {
// If we can't parse the version, just use a timestamp-based version
nextVersion = fmt.Sprintf("v%d", latestFile.CreatedAt)
} else {
nextVersion = fmt.Sprintf("v%d", versionNum+1)
}
} else {
// If the version format is unexpected, use a timestamp-based version
nextVersion = fmt.Sprintf("v%d", latestFile.CreatedAt)
}
return s.createWithVersion(ctx, sessionID, path, content, nextVersion)
}
func (s *service) createWithVersion(ctx context.Context, sessionID, path, content, version string) (File, error) {
// Maximum number of retries for transaction conflicts
const maxRetries = 3
var file File
var err error
// Retry loop for transaction conflicts
for attempt := range maxRetries {
// Start a transaction
tx, txErr := s.db.Begin()
if txErr != nil {
return File{}, fmt.Errorf("failed to begin transaction: %w", txErr)
}
// Create a new queries instance with the transaction
qtx := s.q.WithTx(tx)
// Try to create the file within the transaction
dbFile, txErr := qtx.CreateFile(ctx, db.CreateFileParams{
ID: uuid.New().String(),
SessionID: sessionID,
Path: path,
Content: content,
Version: version,
})
if txErr != nil {
// Rollback the transaction
tx.Rollback()
// Check if this is a uniqueness constraint violation
if strings.Contains(txErr.Error(), "UNIQUE constraint failed") {
if attempt < maxRetries-1 {
// If we have retries left, generate a new version and try again
if strings.HasPrefix(version, "v") {
versionNum, parseErr := strconv.Atoi(version[1:])
if parseErr == nil {
version = fmt.Sprintf("v%d", versionNum+1)
continue
}
}
// If we can't parse the version, use a timestamp-based version
version = fmt.Sprintf("v%d", time.Now().Unix())
continue
}
}
return File{}, txErr
}
// Commit the transaction
if txErr = tx.Commit(); txErr != nil {
return File{}, fmt.Errorf("failed to commit transaction: %w", txErr)
}
file = s.fromDBItem(dbFile)
s.Publish(pubsub.CreatedEvent, file)
return file, nil
}
return file, err
}
func (s *service) Get(ctx context.Context, id string) (File, error) {
dbFile, err := s.q.GetFile(ctx, id)
if err != nil {
return File{}, err
}
return s.fromDBItem(dbFile), nil
}
func (s *service) GetByPathAndSession(ctx context.Context, path, sessionID string) (File, error) {
dbFile, err := s.q.GetFileByPathAndSession(ctx, db.GetFileByPathAndSessionParams{
Path: path,
SessionID: sessionID,
})
if err != nil {
return File{}, err
}
return s.fromDBItem(dbFile), nil
}
func (s *service) ListBySession(ctx context.Context, sessionID string) ([]File, error) {
dbFiles, err := s.q.ListFilesBySession(ctx, sessionID)
if err != nil {
return nil, err
}
files := make([]File, len(dbFiles))
for i, dbFile := range dbFiles {
files[i] = s.fromDBItem(dbFile)
}
return files, nil
}
func (s *service) ListLatestSessionFiles(ctx context.Context, sessionID string) ([]File, error) {
dbFiles, err := s.q.ListLatestSessionFiles(ctx, sessionID)
if err != nil {
return nil, err
}
files := make([]File, len(dbFiles))
for i, dbFile := range dbFiles {
files[i] = s.fromDBItem(dbFile)
}
return files, nil
}
func (s *service) Update(ctx context.Context, file File) (File, error) {
dbFile, err := s.q.UpdateFile(ctx, db.UpdateFileParams{
ID: file.ID,
Content: file.Content,
Version: file.Version,
})
if err != nil {
return File{}, err
}
updatedFile := s.fromDBItem(dbFile)
s.Publish(pubsub.UpdatedEvent, updatedFile)
return updatedFile, nil
}
func (s *service) Delete(ctx context.Context, id string) error {
file, err := s.Get(ctx, id)
if err != nil {
return err
}
err = s.q.DeleteFile(ctx, id)
if err != nil {
return err
}
s.Publish(pubsub.DeletedEvent, file)
return nil
}
func (s *service) DeleteSessionFiles(ctx context.Context, sessionID string) error {
files, err := s.ListBySession(ctx, sessionID)
if err != nil {
return err
}
for _, file := range files {
err = s.Delete(ctx, file.ID)
if err != nil {
return err
}
}
return nil
}
func (s *service) fromDBItem(item db.File) File {
return File{
ID: item.ID,
SessionID: item.SessionID,
Path: item.Path,
Content: item.Content,
Version: item.Version,
CreatedAt: item.CreatedAt,
UpdatedAt: item.UpdatedAt,
}
}

View File

@@ -1,111 +0,0 @@
package agent
import (
"context"
"encoding/json"
"fmt"
"github.com/kujtimiihoxha/opencode/internal/config"
"github.com/kujtimiihoxha/opencode/internal/llm/tools"
"github.com/kujtimiihoxha/opencode/internal/lsp"
"github.com/kujtimiihoxha/opencode/internal/message"
"github.com/kujtimiihoxha/opencode/internal/session"
)
type agentTool struct {
sessions session.Service
messages message.Service
lspClients map[string]*lsp.Client
}
const (
AgentToolName = "agent"
)
type AgentParams struct {
Prompt string `json:"prompt"`
}
func (b *agentTool) Info() tools.ToolInfo {
return tools.ToolInfo{
Name: AgentToolName,
Description: "Launch a new agent that has access to the following tools: GlobTool, GrepTool, LS, View. When you are searching for a keyword or file and are not confident that you will find the right match on the first try, use the Agent tool to perform the search for you. For example:\n\n- If you are searching for a keyword like \"config\" or \"logger\", or for questions like \"which file does X?\", the Agent tool is strongly recommended\n- If you want to read a specific file path, use the View or GlobTool tool instead of the Agent tool, to find the match more quickly\n- If you are searching for a specific class definition like \"class Foo\", use the GlobTool tool instead, to find the match more quickly\n\nUsage notes:\n1. Launch multiple agents concurrently whenever possible, to maximize performance; to do that, use a single message with multiple tool uses\n2. When the agent is done, it will return a single message back to you. The result returned by the agent is not visible to the user. To show the user the result, you should send a text message back to the user with a concise summary of the result.\n3. Each agent invocation is stateless. You will not be able to send additional messages to the agent, nor will the agent be able to communicate with you outside of its final report. Therefore, your prompt should contain a highly detailed task description for the agent to perform autonomously and you should specify exactly what information the agent should return back to you in its final and only message to you.\n4. The agent's outputs should generally be trusted\n5. IMPORTANT: The agent can not use Bash, Replace, Edit, so can not modify files. If you want to use these tools, use them directly instead of going through the agent.",
Parameters: map[string]any{
"prompt": map[string]any{
"type": "string",
"description": "The task for the agent to perform",
},
},
Required: []string{"prompt"},
}
}
func (b *agentTool) Run(ctx context.Context, call tools.ToolCall) (tools.ToolResponse, error) {
var params AgentParams
if err := json.Unmarshal([]byte(call.Input), &params); err != nil {
return tools.NewTextErrorResponse(fmt.Sprintf("error parsing parameters: %s", err)), nil
}
if params.Prompt == "" {
return tools.NewTextErrorResponse("prompt is required"), nil
}
sessionID, messageID := tools.GetContextValues(ctx)
if sessionID == "" || messageID == "" {
return tools.ToolResponse{}, fmt.Errorf("session_id and message_id are required")
}
agent, err := NewAgent(config.AgentTask, b.sessions, b.messages, TaskAgentTools(b.lspClients))
if err != nil {
return tools.ToolResponse{}, fmt.Errorf("error creating agent: %s", err)
}
session, err := b.sessions.CreateTaskSession(ctx, call.ID, sessionID, "New Agent Session")
if err != nil {
return tools.ToolResponse{}, fmt.Errorf("error creating session: %s", err)
}
done, err := agent.Run(ctx, session.ID, params.Prompt)
if err != nil {
return tools.ToolResponse{}, fmt.Errorf("error generating agent: %s", err)
}
result := <-done
if result.Err() != nil {
return tools.ToolResponse{}, fmt.Errorf("error generating agent: %s", result.Err())
}
response := result.Response()
if response.Role != message.Assistant {
return tools.NewTextErrorResponse("no response"), nil
}
updatedSession, err := b.sessions.Get(ctx, session.ID)
if err != nil {
return tools.ToolResponse{}, fmt.Errorf("error getting session: %s", err)
}
parentSession, err := b.sessions.Get(ctx, sessionID)
if err != nil {
return tools.ToolResponse{}, fmt.Errorf("error getting parent session: %s", err)
}
parentSession.Cost += updatedSession.Cost
parentSession.PromptTokens += updatedSession.PromptTokens
parentSession.CompletionTokens += updatedSession.CompletionTokens
_, err = b.sessions.Save(ctx, parentSession)
if err != nil {
return tools.ToolResponse{}, fmt.Errorf("error saving parent session: %s", err)
}
return tools.NewTextResponse(response.Content().String()), nil
}
func NewAgentTool(
Sessions session.Service,
Messages message.Service,
LspClients map[string]*lsp.Client,
) tools.BaseTool {
return &agentTool{
sessions: Sessions,
messages: Messages,
lspClients: LspClients,
}
}

View File

@@ -1,491 +0,0 @@
package agent
import (
"context"
"errors"
"fmt"
"strings"
"sync"
"github.com/kujtimiihoxha/opencode/internal/config"
"github.com/kujtimiihoxha/opencode/internal/llm/models"
"github.com/kujtimiihoxha/opencode/internal/llm/prompt"
"github.com/kujtimiihoxha/opencode/internal/llm/provider"
"github.com/kujtimiihoxha/opencode/internal/llm/tools"
"github.com/kujtimiihoxha/opencode/internal/logging"
"github.com/kujtimiihoxha/opencode/internal/message"
"github.com/kujtimiihoxha/opencode/internal/permission"
"github.com/kujtimiihoxha/opencode/internal/session"
)
// Common errors
var (
ErrRequestCancelled = errors.New("request cancelled by user")
ErrSessionBusy = errors.New("session is currently processing another request")
)
type AgentEvent struct {
message message.Message
err error
}
func (e *AgentEvent) Err() error {
return e.err
}
func (e *AgentEvent) Response() message.Message {
return e.message
}
type Service interface {
Run(ctx context.Context, sessionID string, content string) (<-chan AgentEvent, error)
Cancel(sessionID string)
IsSessionBusy(sessionID string) bool
IsBusy() bool
}
type agent struct {
sessions session.Service
messages message.Service
tools []tools.BaseTool
provider provider.Provider
titleProvider provider.Provider
activeRequests sync.Map
}
func NewAgent(
agentName config.AgentName,
sessions session.Service,
messages message.Service,
agentTools []tools.BaseTool,
) (Service, error) {
agentProvider, err := createAgentProvider(agentName)
if err != nil {
return nil, err
}
var titleProvider provider.Provider
// Only generate titles for the coder agent
if agentName == config.AgentCoder {
titleProvider, err = createAgentProvider(config.AgentTitle)
if err != nil {
return nil, err
}
}
agent := &agent{
provider: agentProvider,
messages: messages,
sessions: sessions,
tools: agentTools,
titleProvider: titleProvider,
activeRequests: sync.Map{},
}
return agent, nil
}
func (a *agent) Cancel(sessionID string) {
if cancelFunc, exists := a.activeRequests.LoadAndDelete(sessionID); exists {
if cancel, ok := cancelFunc.(context.CancelFunc); ok {
logging.InfoPersist(fmt.Sprintf("Request cancellation initiated for session: %s", sessionID))
cancel()
}
}
}
func (a *agent) IsBusy() bool {
busy := false
a.activeRequests.Range(func(key, value interface{}) bool {
if cancelFunc, ok := value.(context.CancelFunc); ok {
if cancelFunc != nil {
busy = true
return false // Stop iterating
}
}
return true // Continue iterating
})
return busy
}
func (a *agent) IsSessionBusy(sessionID string) bool {
_, busy := a.activeRequests.Load(sessionID)
return busy
}
func (a *agent) generateTitle(ctx context.Context, sessionID string, content string) error {
if a.titleProvider == nil {
return nil
}
session, err := a.sessions.Get(ctx, sessionID)
if err != nil {
return err
}
response, err := a.titleProvider.SendMessages(
ctx,
[]message.Message{
{
Role: message.User,
Parts: []message.ContentPart{
message.TextContent{
Text: content,
},
},
},
},
make([]tools.BaseTool, 0),
)
if err != nil {
return err
}
title := strings.TrimSpace(strings.ReplaceAll(response.Content, "\n", " "))
if title == "" {
return nil
}
session.Title = title
_, err = a.sessions.Save(ctx, session)
return err
}
func (a *agent) err(err error) AgentEvent {
return AgentEvent{
err: err,
}
}
func (a *agent) Run(ctx context.Context, sessionID string, content string) (<-chan AgentEvent, error) {
events := make(chan AgentEvent)
if a.IsSessionBusy(sessionID) {
return nil, ErrSessionBusy
}
genCtx, cancel := context.WithCancel(ctx)
a.activeRequests.Store(sessionID, cancel)
go func() {
logging.Debug("Request started", "sessionID", sessionID)
defer logging.RecoverPanic("agent.Run", func() {
events <- a.err(fmt.Errorf("panic while running the agent"))
})
result := a.processGeneration(genCtx, sessionID, content)
if result.Err() != nil && !errors.Is(result.Err(), ErrRequestCancelled) && !errors.Is(result.Err(), context.Canceled) {
logging.ErrorPersist(fmt.Sprintf("Generation error for session %s: %v", sessionID, result))
}
logging.Debug("Request completed", "sessionID", sessionID)
a.activeRequests.Delete(sessionID)
cancel()
events <- result
close(events)
}()
return events, nil
}
func (a *agent) processGeneration(ctx context.Context, sessionID, content string) AgentEvent {
// List existing messages; if none, start title generation asynchronously.
msgs, err := a.messages.List(ctx, sessionID)
if err != nil {
return a.err(fmt.Errorf("failed to list messages: %w", err))
}
if len(msgs) == 0 {
go func() {
defer logging.RecoverPanic("agent.Run", func() {
logging.ErrorPersist("panic while generating title")
})
titleErr := a.generateTitle(context.Background(), sessionID, content)
if titleErr != nil {
logging.ErrorPersist(fmt.Sprintf("failed to generate title: %v", titleErr))
}
}()
}
userMsg, err := a.createUserMessage(ctx, sessionID, content)
if err != nil {
return a.err(fmt.Errorf("failed to create user message: %w", err))
}
// Append the new user message to the conversation history.
msgHistory := append(msgs, userMsg)
for {
// Check for cancellation before each iteration
select {
case <-ctx.Done():
return a.err(ctx.Err())
default:
// Continue processing
}
agentMessage, toolResults, err := a.streamAndHandleEvents(ctx, sessionID, msgHistory)
if err != nil {
if errors.Is(err, context.Canceled) {
agentMessage.AddFinish(message.FinishReasonCanceled)
a.messages.Update(context.Background(), agentMessage)
return a.err(ErrRequestCancelled)
}
return a.err(fmt.Errorf("failed to process events: %w", err))
}
logging.Info("Result", "message", agentMessage.FinishReason(), "toolResults", toolResults)
if (agentMessage.FinishReason() == message.FinishReasonToolUse) && toolResults != nil {
// We are not done, we need to respond with the tool response
msgHistory = append(msgHistory, agentMessage, *toolResults)
continue
}
return AgentEvent{
message: agentMessage,
}
}
}
func (a *agent) createUserMessage(ctx context.Context, sessionID, content string) (message.Message, error) {
return a.messages.Create(ctx, sessionID, message.CreateMessageParams{
Role: message.User,
Parts: []message.ContentPart{
message.TextContent{Text: content},
},
})
}
func (a *agent) streamAndHandleEvents(ctx context.Context, sessionID string, msgHistory []message.Message) (message.Message, *message.Message, error) {
eventChan := a.provider.StreamResponse(ctx, msgHistory, a.tools)
assistantMsg, err := a.messages.Create(ctx, sessionID, message.CreateMessageParams{
Role: message.Assistant,
Parts: []message.ContentPart{},
Model: a.provider.Model().ID,
})
if err != nil {
return assistantMsg, nil, fmt.Errorf("failed to create assistant message: %w", err)
}
// Add the session and message ID into the context if needed by tools.
ctx = context.WithValue(ctx, tools.MessageIDContextKey, assistantMsg.ID)
ctx = context.WithValue(ctx, tools.SessionIDContextKey, sessionID)
// Process each event in the stream.
for event := range eventChan {
if processErr := a.processEvent(ctx, sessionID, &assistantMsg, event); processErr != nil {
a.finishMessage(ctx, &assistantMsg, message.FinishReasonCanceled)
return assistantMsg, nil, processErr
}
if ctx.Err() != nil {
a.finishMessage(context.Background(), &assistantMsg, message.FinishReasonCanceled)
return assistantMsg, nil, ctx.Err()
}
}
toolResults := make([]message.ToolResult, len(assistantMsg.ToolCalls()))
toolCalls := assistantMsg.ToolCalls()
for i, toolCall := range toolCalls {
select {
case <-ctx.Done():
a.finishMessage(context.Background(), &assistantMsg, message.FinishReasonCanceled)
// Make all future tool calls cancelled
for j := i; j < len(toolCalls); j++ {
toolResults[j] = message.ToolResult{
ToolCallID: toolCalls[j].ID,
Content: "Tool execution canceled by user",
IsError: true,
}
}
goto out
default:
// Continue processing
var tool tools.BaseTool
for _, availableTools := range a.tools {
if availableTools.Info().Name == toolCall.Name {
tool = availableTools
}
}
// Tool not found
if tool == nil {
toolResults[i] = message.ToolResult{
ToolCallID: toolCall.ID,
Content: fmt.Sprintf("Tool not found: %s", toolCall.Name),
IsError: true,
}
continue
}
toolResult, toolErr := tool.Run(ctx, tools.ToolCall{
ID: toolCall.ID,
Name: toolCall.Name,
Input: toolCall.Input,
})
if toolErr != nil {
if errors.Is(toolErr, permission.ErrorPermissionDenied) {
toolResults[i] = message.ToolResult{
ToolCallID: toolCall.ID,
Content: "Permission denied",
IsError: true,
}
for j := i + 1; j < len(toolCalls); j++ {
toolResults[j] = message.ToolResult{
ToolCallID: toolCalls[j].ID,
Content: "Tool execution canceled by user",
IsError: true,
}
}
a.finishMessage(ctx, &assistantMsg, message.FinishReasonPermissionDenied)
break
}
}
toolResults[i] = message.ToolResult{
ToolCallID: toolCall.ID,
Content: toolResult.Content,
Metadata: toolResult.Metadata,
IsError: toolResult.IsError,
}
}
}
out:
if len(toolResults) == 0 {
return assistantMsg, nil, nil
}
parts := make([]message.ContentPart, 0)
for _, tr := range toolResults {
parts = append(parts, tr)
}
msg, err := a.messages.Create(context.Background(), assistantMsg.SessionID, message.CreateMessageParams{
Role: message.Tool,
Parts: parts,
})
if err != nil {
return assistantMsg, nil, fmt.Errorf("failed to create cancelled tool message: %w", err)
}
return assistantMsg, &msg, err
}
func (a *agent) finishMessage(ctx context.Context, msg *message.Message, finishReson message.FinishReason) {
msg.AddFinish(finishReson)
_ = a.messages.Update(ctx, *msg)
}
func (a *agent) processEvent(ctx context.Context, sessionID string, assistantMsg *message.Message, event provider.ProviderEvent) error {
select {
case <-ctx.Done():
return ctx.Err()
default:
// Continue processing.
}
switch event.Type {
case provider.EventThinkingDelta:
assistantMsg.AppendReasoningContent(event.Content)
return a.messages.Update(ctx, *assistantMsg)
case provider.EventContentDelta:
assistantMsg.AppendContent(event.Content)
return a.messages.Update(ctx, *assistantMsg)
case provider.EventToolUseStart:
assistantMsg.AddToolCall(*event.ToolCall)
return a.messages.Update(ctx, *assistantMsg)
// TODO: see how to handle this
// case provider.EventToolUseDelta:
// tm := time.Unix(assistantMsg.UpdatedAt, 0)
// assistantMsg.AppendToolCallInput(event.ToolCall.ID, event.ToolCall.Input)
// if time.Since(tm) > 1000*time.Millisecond {
// err := a.messages.Update(ctx, *assistantMsg)
// assistantMsg.UpdatedAt = time.Now().Unix()
// return err
// }
case provider.EventToolUseStop:
assistantMsg.FinishToolCall(event.ToolCall.ID)
return a.messages.Update(ctx, *assistantMsg)
case provider.EventError:
if errors.Is(event.Error, context.Canceled) {
logging.InfoPersist(fmt.Sprintf("Event processing canceled for session: %s", sessionID))
return context.Canceled
}
logging.ErrorPersist(event.Error.Error())
return event.Error
case provider.EventComplete:
assistantMsg.SetToolCalls(event.Response.ToolCalls)
assistantMsg.AddFinish(event.Response.FinishReason)
if err := a.messages.Update(ctx, *assistantMsg); err != nil {
return fmt.Errorf("failed to update message: %w", err)
}
return a.TrackUsage(ctx, sessionID, a.provider.Model(), event.Response.Usage)
}
return nil
}
func (a *agent) TrackUsage(ctx context.Context, sessionID string, model models.Model, usage provider.TokenUsage) error {
sess, err := a.sessions.Get(ctx, sessionID)
if err != nil {
return fmt.Errorf("failed to get session: %w", err)
}
cost := model.CostPer1MInCached/1e6*float64(usage.CacheCreationTokens) +
model.CostPer1MOutCached/1e6*float64(usage.CacheReadTokens) +
model.CostPer1MIn/1e6*float64(usage.InputTokens) +
model.CostPer1MOut/1e6*float64(usage.OutputTokens)
sess.Cost += cost
sess.CompletionTokens += usage.OutputTokens
sess.PromptTokens += usage.InputTokens
_, err = a.sessions.Save(ctx, sess)
if err != nil {
return fmt.Errorf("failed to save session: %w", err)
}
return nil
}
func createAgentProvider(agentName config.AgentName) (provider.Provider, error) {
cfg := config.Get()
agentConfig, ok := cfg.Agents[agentName]
if !ok {
return nil, fmt.Errorf("agent %s not found", agentName)
}
model, ok := models.SupportedModels[agentConfig.Model]
if !ok {
return nil, fmt.Errorf("model %s not supported", agentConfig.Model)
}
providerCfg, ok := cfg.Providers[model.Provider]
if !ok {
return nil, fmt.Errorf("provider %s not supported", model.Provider)
}
if providerCfg.Disabled {
return nil, fmt.Errorf("provider %s is not enabled", model.Provider)
}
maxTokens := model.DefaultMaxTokens
if agentConfig.MaxTokens > 0 {
maxTokens = agentConfig.MaxTokens
}
opts := []provider.ProviderClientOption{
provider.WithAPIKey(providerCfg.APIKey),
provider.WithModel(model),
provider.WithSystemMessage(prompt.GetAgentPrompt(agentName, model.Provider)),
provider.WithMaxTokens(maxTokens),
}
if model.Provider == models.ProviderOpenAI && model.CanReason {
opts = append(
opts,
provider.WithOpenAIOptions(
provider.WithReasoningEffort(agentConfig.ReasoningEffort),
),
)
} else if model.Provider == models.ProviderAnthropic && model.CanReason && agentName == config.AgentCoder {
opts = append(
opts,
provider.WithAnthropicOptions(
provider.WithAnthropicShouldThinkFn(provider.DefaultShouldThinkFn),
),
)
}
agentProvider, err := provider.NewProvider(
model.Provider,
opts...,
)
if err != nil {
return nil, fmt.Errorf("could not create provider: %v", err)
}
return agentProvider, nil
}

View File

@@ -1,197 +0,0 @@
package agent
import (
"context"
"encoding/json"
"fmt"
"github.com/kujtimiihoxha/opencode/internal/config"
"github.com/kujtimiihoxha/opencode/internal/llm/tools"
"github.com/kujtimiihoxha/opencode/internal/logging"
"github.com/kujtimiihoxha/opencode/internal/permission"
"github.com/kujtimiihoxha/opencode/internal/version"
"github.com/mark3labs/mcp-go/client"
"github.com/mark3labs/mcp-go/mcp"
)
type mcpTool struct {
mcpName string
tool mcp.Tool
mcpConfig config.MCPServer
permissions permission.Service
}
type MCPClient interface {
Initialize(
ctx context.Context,
request mcp.InitializeRequest,
) (*mcp.InitializeResult, error)
ListTools(ctx context.Context, request mcp.ListToolsRequest) (*mcp.ListToolsResult, error)
CallTool(ctx context.Context, request mcp.CallToolRequest) (*mcp.CallToolResult, error)
Close() error
}
func (b *mcpTool) Info() tools.ToolInfo {
return tools.ToolInfo{
Name: fmt.Sprintf("%s_%s", b.mcpName, b.tool.Name),
Description: b.tool.Description,
Parameters: b.tool.InputSchema.Properties,
Required: b.tool.InputSchema.Required,
}
}
func runTool(ctx context.Context, c MCPClient, toolName string, input string) (tools.ToolResponse, error) {
defer c.Close()
initRequest := mcp.InitializeRequest{}
initRequest.Params.ProtocolVersion = mcp.LATEST_PROTOCOL_VERSION
initRequest.Params.ClientInfo = mcp.Implementation{
Name: "OpenCode",
Version: version.Version,
}
_, err := c.Initialize(ctx, initRequest)
if err != nil {
return tools.NewTextErrorResponse(err.Error()), nil
}
toolRequest := mcp.CallToolRequest{}
toolRequest.Params.Name = toolName
var args map[string]any
if err = json.Unmarshal([]byte(input), &input); err != nil {
return tools.NewTextErrorResponse(fmt.Sprintf("error parsing parameters: %s", err)), nil
}
toolRequest.Params.Arguments = args
result, err := c.CallTool(ctx, toolRequest)
if err != nil {
return tools.NewTextErrorResponse(err.Error()), nil
}
output := ""
for _, v := range result.Content {
if v, ok := v.(mcp.TextContent); ok {
output = v.Text
} else {
output = fmt.Sprintf("%v", v)
}
}
return tools.NewTextResponse(output), nil
}
func (b *mcpTool) Run(ctx context.Context, params tools.ToolCall) (tools.ToolResponse, error) {
sessionID, messageID := tools.GetContextValues(ctx)
if sessionID == "" || messageID == "" {
return tools.ToolResponse{}, fmt.Errorf("session ID and message ID are required for creating a new file")
}
permissionDescription := fmt.Sprintf("execute %s with the following parameters: %s", b.Info().Name, params.Input)
p := b.permissions.Request(
permission.CreatePermissionRequest{
SessionID: sessionID,
Path: config.WorkingDirectory(),
ToolName: b.Info().Name,
Action: "execute",
Description: permissionDescription,
Params: params.Input,
},
)
if !p {
return tools.NewTextErrorResponse("permission denied"), nil
}
switch b.mcpConfig.Type {
case config.MCPStdio:
c, err := client.NewStdioMCPClient(
b.mcpConfig.Command,
b.mcpConfig.Env,
b.mcpConfig.Args...,
)
if err != nil {
return tools.NewTextErrorResponse(err.Error()), nil
}
return runTool(ctx, c, b.tool.Name, params.Input)
case config.MCPSse:
c, err := client.NewSSEMCPClient(
b.mcpConfig.URL,
client.WithHeaders(b.mcpConfig.Headers),
)
if err != nil {
return tools.NewTextErrorResponse(err.Error()), nil
}
return runTool(ctx, c, b.tool.Name, params.Input)
}
return tools.NewTextErrorResponse("invalid mcp type"), nil
}
func NewMcpTool(name string, tool mcp.Tool, permissions permission.Service, mcpConfig config.MCPServer) tools.BaseTool {
return &mcpTool{
mcpName: name,
tool: tool,
mcpConfig: mcpConfig,
permissions: permissions,
}
}
var mcpTools []tools.BaseTool
func getTools(ctx context.Context, name string, m config.MCPServer, permissions permission.Service, c MCPClient) []tools.BaseTool {
var stdioTools []tools.BaseTool
initRequest := mcp.InitializeRequest{}
initRequest.Params.ProtocolVersion = mcp.LATEST_PROTOCOL_VERSION
initRequest.Params.ClientInfo = mcp.Implementation{
Name: "OpenCode",
Version: version.Version,
}
_, err := c.Initialize(ctx, initRequest)
if err != nil {
logging.Error("error initializing mcp client", "error", err)
return stdioTools
}
toolsRequest := mcp.ListToolsRequest{}
tools, err := c.ListTools(ctx, toolsRequest)
if err != nil {
logging.Error("error listing tools", "error", err)
return stdioTools
}
for _, t := range tools.Tools {
stdioTools = append(stdioTools, NewMcpTool(name, t, permissions, m))
}
defer c.Close()
return stdioTools
}
func GetMcpTools(ctx context.Context, permissions permission.Service) []tools.BaseTool {
if len(mcpTools) > 0 {
return mcpTools
}
for name, m := range config.Get().MCPServers {
switch m.Type {
case config.MCPStdio:
c, err := client.NewStdioMCPClient(
m.Command,
m.Env,
m.Args...,
)
if err != nil {
logging.Error("error creating mcp client", "error", err)
continue
}
mcpTools = append(mcpTools, getTools(ctx, name, m, permissions, c)...)
case config.MCPSse:
c, err := client.NewSSEMCPClient(
m.URL,
client.WithHeaders(m.Headers),
)
if err != nil {
logging.Error("error creating mcp client", "error", err)
continue
}
mcpTools = append(mcpTools, getTools(ctx, name, m, permissions, c)...)
}
}
return mcpTools
}

View File

@@ -1,51 +0,0 @@
package agent
import (
"context"
"github.com/kujtimiihoxha/opencode/internal/history"
"github.com/kujtimiihoxha/opencode/internal/llm/tools"
"github.com/kujtimiihoxha/opencode/internal/lsp"
"github.com/kujtimiihoxha/opencode/internal/message"
"github.com/kujtimiihoxha/opencode/internal/permission"
"github.com/kujtimiihoxha/opencode/internal/session"
)
func CoderAgentTools(
permissions permission.Service,
sessions session.Service,
messages message.Service,
history history.Service,
lspClients map[string]*lsp.Client,
) []tools.BaseTool {
ctx := context.Background()
otherTools := GetMcpTools(ctx, permissions)
if len(lspClients) > 0 {
otherTools = append(otherTools, tools.NewDiagnosticsTool(lspClients))
}
return append(
[]tools.BaseTool{
tools.NewBashTool(permissions),
tools.NewEditTool(lspClients, permissions, history),
tools.NewFetchTool(permissions),
tools.NewGlobTool(),
tools.NewGrepTool(),
tools.NewLsTool(),
tools.NewSourcegraphTool(),
tools.NewViewTool(lspClients),
tools.NewPatchTool(lspClients, permissions, history),
tools.NewWriteTool(lspClients, permissions, history),
NewAgentTool(sessions, messages, lspClients),
}, otherTools...,
)
}
func TaskAgentTools(lspClients map[string]*lsp.Client) []tools.BaseTool {
return []tools.BaseTool{
tools.NewGlobTool(),
tools.NewGrepTool(),
tools.NewLsTool(),
tools.NewSourcegraphTool(),
tools.NewViewTool(lspClients),
}
}

View File

@@ -1,77 +0,0 @@
package models
const (
ProviderAnthropic ModelProvider = "anthropic"
// Models
Claude35Sonnet ModelID = "claude-3.5-sonnet"
Claude3Haiku ModelID = "claude-3-haiku"
Claude37Sonnet ModelID = "claude-3.7-sonnet"
Claude35Haiku ModelID = "claude-3.5-haiku"
Claude3Opus ModelID = "claude-3-opus"
)
var AnthropicModels = map[ModelID]Model{
// Anthropic
Claude35Sonnet: {
ID: Claude35Sonnet,
Name: "Claude 3.5 Sonnet",
Provider: ProviderAnthropic,
APIModel: "claude-3-5-sonnet-latest",
CostPer1MIn: 3.0,
CostPer1MInCached: 3.75,
CostPer1MOutCached: 0.30,
CostPer1MOut: 15.0,
ContextWindow: 200000,
DefaultMaxTokens: 5000,
},
Claude3Haiku: {
ID: Claude3Haiku,
Name: "Claude 3 Haiku",
Provider: ProviderAnthropic,
APIModel: "claude-3-haiku-latest",
CostPer1MIn: 0.25,
CostPer1MInCached: 0.30,
CostPer1MOutCached: 0.03,
CostPer1MOut: 1.25,
ContextWindow: 200000,
DefaultMaxTokens: 5000,
},
Claude37Sonnet: {
ID: Claude37Sonnet,
Name: "Claude 3.7 Sonnet",
Provider: ProviderAnthropic,
APIModel: "claude-3-7-sonnet-latest",
CostPer1MIn: 3.0,
CostPer1MInCached: 3.75,
CostPer1MOutCached: 0.30,
CostPer1MOut: 15.0,
ContextWindow: 200000,
DefaultMaxTokens: 50000,
CanReason: true,
},
Claude35Haiku: {
ID: Claude35Haiku,
Name: "Claude 3.5 Haiku",
Provider: ProviderAnthropic,
APIModel: "claude-3-5-haiku-latest",
CostPer1MIn: 0.80,
CostPer1MInCached: 1.0,
CostPer1MOutCached: 0.08,
CostPer1MOut: 4.0,
ContextWindow: 200000,
DefaultMaxTokens: 4096,
},
Claude3Opus: {
ID: Claude3Opus,
Name: "Claude 3 Opus",
Provider: ProviderAnthropic,
APIModel: "claude-3-opus-latest",
CostPer1MIn: 15.0,
CostPer1MInCached: 18.75,
CostPer1MOutCached: 1.50,
CostPer1MOut: 75.0,
ContextWindow: 200000,
DefaultMaxTokens: 4096,
},
}

View File

@@ -1,63 +0,0 @@
package models
const (
ProviderGemini ModelProvider = "gemini"
// Models
Gemini25Flash ModelID = "gemini-2.5-flash"
Gemini25 ModelID = "gemini-2.5"
Gemini20Flash ModelID = "gemini-2.0-flash"
Gemini20FlashLite ModelID = "gemini-2.0-flash-lite"
)
var GeminiModels = map[ModelID]Model{
Gemini25Flash: {
ID: Gemini25Flash,
Name: "Gemini 2.5 Flash",
Provider: ProviderGemini,
APIModel: "gemini-2.5-flash-preview-04-17",
CostPer1MIn: 0.15,
CostPer1MInCached: 0,
CostPer1MOutCached: 0,
CostPer1MOut: 0.60,
ContextWindow: 1000000,
DefaultMaxTokens: 50000,
},
Gemini25: {
ID: Gemini25,
Name: "Gemini 2.5 Pro",
Provider: ProviderGemini,
APIModel: "gemini-2.5-pro-preview-03-25",
CostPer1MIn: 1.25,
CostPer1MInCached: 0,
CostPer1MOutCached: 0,
CostPer1MOut: 10,
ContextWindow: 1000000,
DefaultMaxTokens: 50000,
},
Gemini20Flash: {
ID: Gemini20Flash,
Name: "Gemini 2.0 Flash",
Provider: ProviderGemini,
APIModel: "gemini-2.0-flash",
CostPer1MIn: 0.10,
CostPer1MInCached: 0,
CostPer1MOutCached: 0,
CostPer1MOut: 0.40,
ContextWindow: 1000000,
DefaultMaxTokens: 6000,
},
Gemini20FlashLite: {
ID: Gemini20FlashLite,
Name: "Gemini 2.0 Flash Lite",
Provider: ProviderGemini,
APIModel: "gemini-2.0-flash-lite",
CostPer1MIn: 0.05,
CostPer1MInCached: 0,
CostPer1MOutCached: 0,
CostPer1MOut: 0.30,
ContextWindow: 1000000,
DefaultMaxTokens: 6000,
},
}

View File

@@ -1,95 +0,0 @@
package models
import "maps"
type (
ModelID string
ModelProvider string
)
type Model struct {
ID ModelID `json:"id"`
Name string `json:"name"`
Provider ModelProvider `json:"provider"`
APIModel string `json:"api_model"`
CostPer1MIn float64 `json:"cost_per_1m_in"`
CostPer1MOut float64 `json:"cost_per_1m_out"`
CostPer1MInCached float64 `json:"cost_per_1m_in_cached"`
CostPer1MOutCached float64 `json:"cost_per_1m_out_cached"`
ContextWindow int64 `json:"context_window"`
DefaultMaxTokens int64 `json:"default_max_tokens"`
CanReason bool `json:"can_reason"`
}
// Model IDs
const ( // GEMINI
// GROQ
QWENQwq ModelID = "qwen-qwq"
// Bedrock
BedrockClaude37Sonnet ModelID = "bedrock.claude-3.7-sonnet"
)
const (
ProviderBedrock ModelProvider = "bedrock"
ProviderGROQ ModelProvider = "groq"
// ForTests
ProviderMock ModelProvider = "__mock"
)
var SupportedModels = map[ModelID]Model{
//
// // GEMINI
// GEMINI25: {
// ID: GEMINI25,
// Name: "Gemini 2.5 Pro",
// Provider: ProviderGemini,
// APIModel: "gemini-2.5-pro-exp-03-25",
// CostPer1MIn: 0,
// CostPer1MInCached: 0,
// CostPer1MOutCached: 0,
// CostPer1MOut: 0,
// },
//
// GRMINI20Flash: {
// ID: GRMINI20Flash,
// Name: "Gemini 2.0 Flash",
// Provider: ProviderGemini,
// APIModel: "gemini-2.0-flash",
// CostPer1MIn: 0.1,
// CostPer1MInCached: 0,
// CostPer1MOutCached: 0.025,
// CostPer1MOut: 0.4,
// },
//
// // GROQ
// QWENQwq: {
// ID: QWENQwq,
// Name: "Qwen Qwq",
// Provider: ProviderGROQ,
// APIModel: "qwen-qwq-32b",
// CostPer1MIn: 0,
// CostPer1MInCached: 0,
// CostPer1MOutCached: 0,
// CostPer1MOut: 0,
// },
//
// // Bedrock
BedrockClaude37Sonnet: {
ID: BedrockClaude37Sonnet,
Name: "Bedrock: Claude 3.7 Sonnet",
Provider: ProviderBedrock,
APIModel: "anthropic.claude-3-7-sonnet-20250219-v1:0",
CostPer1MIn: 3.0,
CostPer1MInCached: 3.75,
CostPer1MOutCached: 0.30,
CostPer1MOut: 15.0,
},
}
func init() {
maps.Copy(SupportedModels, AnthropicModels)
maps.Copy(SupportedModels, OpenAIModels)
maps.Copy(SupportedModels, GeminiModels)
}

View File

@@ -1,169 +0,0 @@
package models
const (
ProviderOpenAI ModelProvider = "openai"
GPT41 ModelID = "gpt-4.1"
GPT41Mini ModelID = "gpt-4.1-mini"
GPT41Nano ModelID = "gpt-4.1-nano"
GPT45Preview ModelID = "gpt-4.5-preview"
GPT4o ModelID = "gpt-4o"
GPT4oMini ModelID = "gpt-4o-mini"
O1 ModelID = "o1"
O1Pro ModelID = "o1-pro"
O1Mini ModelID = "o1-mini"
O3 ModelID = "o3"
O3Mini ModelID = "o3-mini"
O4Mini ModelID = "o4-mini"
)
var OpenAIModels = map[ModelID]Model{
GPT41: {
ID: GPT41,
Name: "GPT 4.1",
Provider: ProviderOpenAI,
APIModel: "gpt-4.1",
CostPer1MIn: 2.00,
CostPer1MInCached: 0.50,
CostPer1MOutCached: 0.0,
CostPer1MOut: 8.00,
ContextWindow: 1_047_576,
DefaultMaxTokens: 20000,
},
GPT41Mini: {
ID: GPT41Mini,
Name: "GPT 4.1 mini",
Provider: ProviderOpenAI,
APIModel: "gpt-4.1",
CostPer1MIn: 0.40,
CostPer1MInCached: 0.10,
CostPer1MOutCached: 0.0,
CostPer1MOut: 1.60,
ContextWindow: 200_000,
DefaultMaxTokens: 20000,
},
GPT41Nano: {
ID: GPT41Nano,
Name: "GPT 4.1 nano",
Provider: ProviderOpenAI,
APIModel: "gpt-4.1-nano",
CostPer1MIn: 0.10,
CostPer1MInCached: 0.025,
CostPer1MOutCached: 0.0,
CostPer1MOut: 0.40,
ContextWindow: 1_047_576,
DefaultMaxTokens: 20000,
},
GPT45Preview: {
ID: GPT45Preview,
Name: "GPT 4.5 preview",
Provider: ProviderOpenAI,
APIModel: "gpt-4.5-preview",
CostPer1MIn: 75.00,
CostPer1MInCached: 37.50,
CostPer1MOutCached: 0.0,
CostPer1MOut: 150.00,
ContextWindow: 128_000,
DefaultMaxTokens: 15000,
},
GPT4o: {
ID: GPT4o,
Name: "GPT 4o",
Provider: ProviderOpenAI,
APIModel: "gpt-4o",
CostPer1MIn: 2.50,
CostPer1MInCached: 1.25,
CostPer1MOutCached: 0.0,
CostPer1MOut: 10.00,
ContextWindow: 128_000,
DefaultMaxTokens: 4096,
},
GPT4oMini: {
ID: GPT4oMini,
Name: "GPT 4o mini",
Provider: ProviderOpenAI,
APIModel: "gpt-4o-mini",
CostPer1MIn: 0.15,
CostPer1MInCached: 0.075,
CostPer1MOutCached: 0.0,
CostPer1MOut: 0.60,
ContextWindow: 128_000,
},
O1: {
ID: O1,
Name: "O1",
Provider: ProviderOpenAI,
APIModel: "o1",
CostPer1MIn: 15.00,
CostPer1MInCached: 7.50,
CostPer1MOutCached: 0.0,
CostPer1MOut: 60.00,
ContextWindow: 200_000,
DefaultMaxTokens: 50000,
CanReason: true,
},
O1Pro: {
ID: O1Pro,
Name: "o1 pro",
Provider: ProviderOpenAI,
APIModel: "o1-pro",
CostPer1MIn: 150.00,
CostPer1MInCached: 0.0,
CostPer1MOutCached: 0.0,
CostPer1MOut: 600.00,
ContextWindow: 200_000,
DefaultMaxTokens: 50000,
CanReason: true,
},
O1Mini: {
ID: O1Mini,
Name: "o1 mini",
Provider: ProviderOpenAI,
APIModel: "o1-mini",
CostPer1MIn: 1.10,
CostPer1MInCached: 0.55,
CostPer1MOutCached: 0.0,
CostPer1MOut: 4.40,
ContextWindow: 128_000,
DefaultMaxTokens: 50000,
CanReason: true,
},
O3: {
ID: O3,
Name: "o3",
Provider: ProviderOpenAI,
APIModel: "o3",
CostPer1MIn: 10.00,
CostPer1MInCached: 2.50,
CostPer1MOutCached: 0.0,
CostPer1MOut: 40.00,
ContextWindow: 200_000,
CanReason: true,
},
O3Mini: {
ID: O3Mini,
Name: "o3 mini",
Provider: ProviderOpenAI,
APIModel: "o3-mini",
CostPer1MIn: 1.10,
CostPer1MInCached: 0.55,
CostPer1MOutCached: 0.0,
CostPer1MOut: 4.40,
ContextWindow: 200_000,
DefaultMaxTokens: 50000,
CanReason: true,
},
O4Mini: {
ID: O4Mini,
Name: "o4 mini",
Provider: ProviderOpenAI,
APIModel: "o4-mini",
CostPer1MIn: 1.10,
CostPer1MInCached: 0.275,
CostPer1MOutCached: 0.0,
CostPer1MOut: 4.40,
ContextWindow: 128_000,
DefaultMaxTokens: 50000,
CanReason: true,
},
}

View File

@@ -1,222 +0,0 @@
package prompt
import (
"context"
"fmt"
"os"
"path/filepath"
"runtime"
"time"
"github.com/kujtimiihoxha/opencode/internal/config"
"github.com/kujtimiihoxha/opencode/internal/llm/models"
"github.com/kujtimiihoxha/opencode/internal/llm/tools"
)
func CoderPrompt(provider models.ModelProvider) string {
basePrompt := baseAnthropicCoderPrompt
switch provider {
case models.ProviderOpenAI:
basePrompt = baseOpenAICoderPrompt
}
envInfo := getEnvironmentInfo()
return fmt.Sprintf("%s\n\n%s\n%s", basePrompt, envInfo, lspInformation())
}
const baseOpenAICoderPrompt = `
You are operating as and within the OpenCode CLI, a terminal-based agentic coding assistant built by OpenAI. It wraps OpenAI models to enable natural language interaction with a local codebase. You are expected to be precise, safe, and helpful.
You can:
- Receive user prompts, project context, and files.
- Stream responses and emit function calls (e.g., shell commands, code edits).
- Apply patches, run commands, and manage user approvals based on policy.
- Work inside a sandboxed, git-backed workspace with rollback support.
- Log telemetry so sessions can be replayed or inspected later.
- More details on your functionality are available at "opencode --help"
You are an agent - please keep going until the user's query is completely resolved, before ending your turn and yielding back to the user. Only terminate your turn when you are sure that the problem is solved. If you are not sure about file content or codebase structure pertaining to the user's request, use your tools to read files and gather the relevant information: do NOT guess or make up an answer.
Please resolve the user's task by editing and testing the code files in your current code execution session. You are a deployed coding agent. Your session allows for you to modify and run code. The repo(s) are already cloned in your working directory, and you must fully solve the problem for your answer to be considered correct.
You MUST adhere to the following criteria when executing the task:
- Working on the repo(s) in the current environment is allowed, even if they are proprietary.
- Analyzing code for vulnerabilities is allowed.
- Showing user code and tool call details is allowed.
- User instructions may overwrite the *CODING GUIDELINES* section in this developer message.
- If completing the user's task requires writing or modifying files:
- Your code and final answer should follow these *CODING GUIDELINES*:
- Fix the problem at the root cause rather than applying surface-level patches, when possible.
- Avoid unneeded complexity in your solution.
- Ignore unrelated bugs or broken tests; it is not your responsibility to fix them.
- Update documentation as necessary.
- Keep changes consistent with the style of the existing codebase. Changes should be minimal and focused on the task.
- Use "git log" and "git blame" to search the history of the codebase if additional context is required; internet access is disabled.
- NEVER add copyright or license headers unless specifically requested.
- You do not need to "git commit" your changes; this will be done automatically for you.
- Once you finish coding, you must
- Check "git status" to sanity check your changes; revert any scratch files or changes.
- Remove all inline comments you added as much as possible, even if they look normal. Check using "git diff". Inline comments must be generally avoided, unless active maintainers of the repo, after long careful study of the code and the issue, will still misinterpret the code without the comments.
- Check if you accidentally add copyright or license headers. If so, remove them.
- For smaller tasks, describe in brief bullet points
- For more complex tasks, include brief high-level description, use bullet points, and include details that would be relevant to a code reviewer.
- If completing the user's task DOES NOT require writing or modifying files (e.g., the user asks a question about the code base):
- Respond in a friendly tune as a remote teammate, who is knowledgeable, capable and eager to help with coding.
- When your task involves writing or modifying files:
- Do NOT tell the user to "save the file" or "copy the code into a file" if you already created or modified the file using "apply_patch". Instead, reference the file as already saved.
- Do NOT show the full contents of large files you have already written, unless the user explicitly asks for them.
- When doing things with paths, always use use the full path, if the working directory is /abc/xyz and you want to edit the file abc.go in the working dir refer to it as /abc/xyz/abc.go.
- If you send a path not including the working dir, the working dir will be prepended to it.
- Remember the user does not see the full output of tools
`
const baseAnthropicCoderPrompt = `You are OpenCode, an interactive CLI tool that helps users with software engineering tasks. Use the instructions below and the tools available to you to assist the user.
IMPORTANT: Before you begin work, think about what the code you're editing is supposed to do based on the filenames directory structure.
# Memory
If the current working directory contains a file called OpenCode.md, it will be automatically added to your context. This file serves multiple purposes:
1. Storing frequently used bash commands (build, test, lint, etc.) so you can use them without searching each time
2. Recording the user's code style preferences (naming conventions, preferred libraries, etc.)
3. Maintaining useful information about the codebase structure and organization
When you spend time searching for commands to typecheck, lint, build, or test, you should ask the user if it's okay to add those commands to OpenCode.md. Similarly, when learning about code style preferences or important codebase information, ask if it's okay to add that to OpenCode.md so you can remember it for next time.
# Tone and style
You should be concise, direct, and to the point. When you run a non-trivial bash command, you should explain what the command does and why you are running it, to make sure the user understands what you are doing (this is especially important when you are running a command that will make changes to the user's system).
Remember that your output will be displayed on a command line interface. Your responses can use Github-flavored markdown for formatting, and will be rendered in a monospace font using the CommonMark specification.
Output text to communicate with the user; all text you output outside of tool use is displayed to the user. Only use tools to complete tasks. Never use tools like Bash or code comments as means to communicate with the user during the session.
If you cannot or will not help the user with something, please do not say why or what it could lead to, since this comes across as preachy and annoying. Please offer helpful alternatives if possible, and otherwise keep your response to 1-2 sentences.
IMPORTANT: You should minimize output tokens as much as possible while maintaining helpfulness, quality, and accuracy. Only address the specific query or task at hand, avoiding tangential information unless absolutely critical for completing the request. If you can answer in 1-3 sentences or a short paragraph, please do.
IMPORTANT: You should NOT answer with unnecessary preamble or postamble (such as explaining your code or summarizing your action), unless the user asks you to.
IMPORTANT: Keep your responses short, since they will be displayed on a command line interface. You MUST answer concisely with fewer than 4 lines (not including tool use or code generation), unless user asks for detail. Answer the user's question directly, without elaboration, explanation, or details. One word answers are best. Avoid introductions, conclusions, and explanations. You MUST avoid text before/after your response, such as "The answer is <answer>.", "Here is the content of the file..." or "Based on the information provided, the answer is..." or "Here is what I will do next...". Here are some examples to demonstrate appropriate verbosity:
<example>
user: 2 + 2
assistant: 4
</example>
<example>
user: what is 2+2?
assistant: 4
</example>
<example>
user: is 11 a prime number?
assistant: true
</example>
<example>
user: what command should I run to list files in the current directory?
assistant: ls
</example>
<example>
user: what command should I run to watch files in the current directory?
assistant: [use the ls tool to list the files in the current directory, then read docs/commands in the relevant file to find out how to watch files]
npm run dev
</example>
<example>
user: How many golf balls fit inside a jetta?
assistant: 150000
</example>
<example>
user: what files are in the directory src/?
assistant: [runs ls and sees foo.c, bar.c, baz.c]
user: which file contains the implementation of foo?
assistant: src/foo.c
</example>
<example>
user: write tests for new feature
assistant: [uses grep and glob search tools to find where similar tests are defined, uses concurrent read file tool use blocks in one tool call to read relevant files at the same time, uses edit/patch file tool to write new tests]
</example>
# Proactiveness
You are allowed to be proactive, but only when the user asks you to do something. You should strive to strike a balance between:
1. Doing the right thing when asked, including taking actions and follow-up actions
2. Not surprising the user with actions you take without asking
For example, if the user asks you how to approach something, you should do your best to answer their question first, and not immediately jump into taking actions.
3. Do not add additional code explanation summary unless requested by the user. After working on a file, just stop, rather than providing an explanation of what you did.
# Following conventions
When making changes to files, first understand the file's code conventions. Mimic code style, use existing libraries and utilities, and follow existing patterns.
- NEVER assume that a given library is available, even if it is well known. Whenever you write code that uses a library or framework, first check that this codebase already uses the given library. For example, you might look at neighboring files, or check the package.json (or cargo.toml, and so on depending on the language).
- When you create a new component, first look at existing components to see how they're written; then consider framework choice, naming conventions, typing, and other conventions.
- When you edit a piece of code, first look at the code's surrounding context (especially its imports) to understand the code's choice of frameworks and libraries. Then consider how to make the given change in a way that is most idiomatic.
- Always follow security best practices. Never introduce code that exposes or logs secrets and keys. Never commit secrets or keys to the repository.
# Code style
- Do not add comments to the code you write, unless the user asks you to, or the code is complex and requires additional context.
# Doing tasks
The user will primarily request you perform software engineering tasks. This includes solving bugs, adding new functionality, refactoring code, explaining code, and more. For these tasks the following steps are recommended:
1. Use the available search tools to understand the codebase and the user's query. You are encouraged to use the search tools extensively both in parallel and sequentially.
2. Implement the solution using all tools available to you
3. Verify the solution if possible with tests. NEVER assume specific test framework or test script. Check the README or search codebase to determine the testing approach.
4. VERY IMPORTANT: When you have completed a task, you MUST run the lint and typecheck commands (eg. npm run lint, npm run typecheck, ruff, etc.) if they were provided to you to ensure your code is correct. If you are unable to find the correct command, ask the user for the command to run and if they supply it, proactively suggest writing it to opencode.md so that you will know to run it next time.
NEVER commit changes unless the user explicitly asks you to. It is VERY IMPORTANT to only commit when explicitly asked, otherwise the user will feel that you are being too proactive.
# Tool usage policy
- When doing file search, prefer to use the Agent tool in order to reduce context usage.
- If you intend to call multiple tools and there are no dependencies between the calls, make all of the independent calls in the same function_calls block.
- IMPORTANT: The user does not see the full output of the tool responses, so if you need the output of the tool for the response make sure to summarize it for the user.
You MUST answer concisely with fewer than 4 lines of text (not including tool use or code generation), unless user asks for detail.`
func getEnvironmentInfo() string {
cwd := config.WorkingDirectory()
isGit := isGitRepo(cwd)
platform := runtime.GOOS
date := time.Now().Format("1/2/2006")
ls := tools.NewLsTool()
r, _ := ls.Run(context.Background(), tools.ToolCall{
Input: `{"path":"."}`,
})
return fmt.Sprintf(`Here is useful information about the environment you are running in:
<env>
Working directory: %s
Is directory a git repo: %s
Platform: %s
Today's date: %s
</env>
<project>
%s
</project>
`, cwd, boolToYesNo(isGit), platform, date, r.Content)
}
func isGitRepo(dir string) bool {
_, err := os.Stat(filepath.Join(dir, ".git"))
return err == nil
}
func lspInformation() string {
cfg := config.Get()
hasLSP := false
for _, v := range cfg.LSP {
if !v.Disabled {
hasLSP = true
break
}
}
if !hasLSP {
return ""
}
return `# LSP Information
Tools that support it will also include useful diagnostics such as linting and typechecking.
- These diagnostics will be automatically enabled when you run the tool, and will be displayed in the output at the bottom within the <file_diagnostics></file_diagnostics> and <project_diagnostics></project_diagnostics> tags.
- Take necessary actions to fix the issues.
- You should ignore diagnostics of files that you did not change or are not related or caused by your changes unless the user explicitly asks you to fix them.
`
}
func boolToYesNo(b bool) string {
if b {
return "Yes"
}
return "No"
}

View File

@@ -1,63 +0,0 @@
package prompt
import (
"fmt"
"os"
"path/filepath"
"github.com/kujtimiihoxha/opencode/internal/config"
"github.com/kujtimiihoxha/opencode/internal/llm/models"
)
// contextFiles is a list of potential context files to check for
var contextFiles = []string{
".github/copilot-instructions.md",
".cursorrules",
"CLAUDE.md",
"CLAUDE.local.md",
"opencode.md",
"opencode.local.md",
"OpenCode.md",
"OpenCode.local.md",
"OPENCODE.md",
"OPENCODE.local.md",
}
func GetAgentPrompt(agentName config.AgentName, provider models.ModelProvider) string {
basePrompt := ""
switch agentName {
case config.AgentCoder:
basePrompt = CoderPrompt(provider)
case config.AgentTitle:
basePrompt = TitlePrompt(provider)
case config.AgentTask:
basePrompt = TaskPrompt(provider)
default:
basePrompt = "You are a helpful assistant"
}
if agentName == config.AgentCoder || agentName == config.AgentTask {
// Add context from project-specific instruction files if they exist
contextContent := getContextFromFiles()
if contextContent != "" {
return fmt.Sprintf("%s\n\n# Project-Specific Context\n%s", basePrompt, contextContent)
}
}
return basePrompt
}
// getContextFromFiles checks for the existence of context files and returns their content
func getContextFromFiles() string {
workDir := config.WorkingDirectory()
var contextContent string
for _, file := range contextFiles {
filePath := filepath.Join(workDir, file)
content, err := os.ReadFile(filePath)
if err == nil {
contextContent += fmt.Sprintf("\n%s\n", string(content))
}
}
return contextContent
}

View File

@@ -1,17 +0,0 @@
package prompt
import (
"fmt"
"github.com/kujtimiihoxha/opencode/internal/llm/models"
)
func TaskPrompt(_ models.ModelProvider) string {
agentPrompt := `You are an agent for OpenCode. Given the user's prompt, you should use the tools available to you to answer the user's question.
Notes:
1. IMPORTANT: You should be concise, direct, and to the point, since your responses will be displayed on a command line interface. Answer the user's question directly, without elaboration, explanation, or details. One word answers are best. Avoid introductions, conclusions, and explanations. You MUST avoid text before/after your response, such as "The answer is <answer>.", "Here is the content of the file..." or "Based on the information provided, the answer is..." or "Here is what I will do next...".
2. When relevant, share file names and code snippets relevant to the query
3. Any file paths you return in your final response MUST be absolute. DO NOT use relative paths.`
return fmt.Sprintf("%s\n%s\n", agentPrompt, getEnvironmentInfo())
}

View File

@@ -1,12 +0,0 @@
package prompt
import "github.com/kujtimiihoxha/opencode/internal/llm/models"
func TitlePrompt(_ models.ModelProvider) string {
return `you will generate a short title based on the first message a user begins a conversation with
- ensure it is not more than 50 characters long
- the title should be a summary of the user's message
- it should be one line long
- do not use quotes or colons
- the entire text you return will be used as the title`
}

View File

@@ -1,455 +0,0 @@
package provider
import (
"context"
"encoding/json"
"errors"
"fmt"
"io"
"strings"
"time"
"github.com/anthropics/anthropic-sdk-go"
"github.com/anthropics/anthropic-sdk-go/bedrock"
"github.com/anthropics/anthropic-sdk-go/option"
"github.com/kujtimiihoxha/opencode/internal/config"
"github.com/kujtimiihoxha/opencode/internal/llm/tools"
"github.com/kujtimiihoxha/opencode/internal/logging"
"github.com/kujtimiihoxha/opencode/internal/message"
)
type anthropicOptions struct {
useBedrock bool
disableCache bool
shouldThink func(userMessage string) bool
}
type AnthropicOption func(*anthropicOptions)
type anthropicClient struct {
providerOptions providerClientOptions
options anthropicOptions
client anthropic.Client
}
type AnthropicClient ProviderClient
func newAnthropicClient(opts providerClientOptions) AnthropicClient {
anthropicOpts := anthropicOptions{}
for _, o := range opts.anthropicOptions {
o(&anthropicOpts)
}
anthropicClientOptions := []option.RequestOption{}
if opts.apiKey != "" {
anthropicClientOptions = append(anthropicClientOptions, option.WithAPIKey(opts.apiKey))
}
if anthropicOpts.useBedrock {
anthropicClientOptions = append(anthropicClientOptions, bedrock.WithLoadDefaultConfig(context.Background()))
}
client := anthropic.NewClient(anthropicClientOptions...)
return &anthropicClient{
providerOptions: opts,
options: anthropicOpts,
client: client,
}
}
func (a *anthropicClient) convertMessages(messages []message.Message) (anthropicMessages []anthropic.MessageParam) {
for i, msg := range messages {
cache := false
if i > len(messages)-3 {
cache = true
}
switch msg.Role {
case message.User:
content := anthropic.NewTextBlock(msg.Content().String())
if cache && !a.options.disableCache {
content.OfRequestTextBlock.CacheControl = anthropic.CacheControlEphemeralParam{
Type: "ephemeral",
}
}
anthropicMessages = append(anthropicMessages, anthropic.NewUserMessage(content))
case message.Assistant:
blocks := []anthropic.ContentBlockParamUnion{}
if msg.Content().String() != "" {
content := anthropic.NewTextBlock(msg.Content().String())
if cache && !a.options.disableCache {
content.OfRequestTextBlock.CacheControl = anthropic.CacheControlEphemeralParam{
Type: "ephemeral",
}
}
blocks = append(blocks, content)
}
for _, toolCall := range msg.ToolCalls() {
var inputMap map[string]any
err := json.Unmarshal([]byte(toolCall.Input), &inputMap)
if err != nil {
continue
}
blocks = append(blocks, anthropic.ContentBlockParamOfRequestToolUseBlock(toolCall.ID, inputMap, toolCall.Name))
}
if len(blocks) == 0 {
logging.Warn("There is a message without content, investigate, this should not happen")
continue
}
anthropicMessages = append(anthropicMessages, anthropic.NewAssistantMessage(blocks...))
case message.Tool:
results := make([]anthropic.ContentBlockParamUnion, len(msg.ToolResults()))
for i, toolResult := range msg.ToolResults() {
results[i] = anthropic.NewToolResultBlock(toolResult.ToolCallID, toolResult.Content, toolResult.IsError)
}
anthropicMessages = append(anthropicMessages, anthropic.NewUserMessage(results...))
}
}
return
}
func (a *anthropicClient) convertTools(tools []tools.BaseTool) []anthropic.ToolUnionParam {
anthropicTools := make([]anthropic.ToolUnionParam, len(tools))
for i, tool := range tools {
info := tool.Info()
toolParam := anthropic.ToolParam{
Name: info.Name,
Description: anthropic.String(info.Description),
InputSchema: anthropic.ToolInputSchemaParam{
Properties: info.Parameters,
// TODO: figure out how we can tell claude the required fields?
},
}
if i == len(tools)-1 && !a.options.disableCache {
toolParam.CacheControl = anthropic.CacheControlEphemeralParam{
Type: "ephemeral",
}
}
anthropicTools[i] = anthropic.ToolUnionParam{OfTool: &toolParam}
}
return anthropicTools
}
func (a *anthropicClient) finishReason(reason string) message.FinishReason {
switch reason {
case "end_turn":
return message.FinishReasonEndTurn
case "max_tokens":
return message.FinishReasonMaxTokens
case "tool_use":
return message.FinishReasonToolUse
case "stop_sequence":
return message.FinishReasonEndTurn
default:
return message.FinishReasonUnknown
}
}
func (a *anthropicClient) preparedMessages(messages []anthropic.MessageParam, tools []anthropic.ToolUnionParam) anthropic.MessageNewParams {
var thinkingParam anthropic.ThinkingConfigParamUnion
lastMessage := messages[len(messages)-1]
isUser := lastMessage.Role == anthropic.MessageParamRoleUser
messageContent := ""
temperature := anthropic.Float(0)
if isUser {
for _, m := range lastMessage.Content {
if m.OfRequestTextBlock != nil && m.OfRequestTextBlock.Text != "" {
messageContent = m.OfRequestTextBlock.Text
}
}
if messageContent != "" && a.options.shouldThink != nil && a.options.shouldThink(messageContent) {
thinkingParam = anthropic.ThinkingConfigParamUnion{
OfThinkingConfigEnabled: &anthropic.ThinkingConfigEnabledParam{
BudgetTokens: int64(float64(a.providerOptions.maxTokens) * 0.8),
Type: "enabled",
},
}
temperature = anthropic.Float(1)
}
}
return anthropic.MessageNewParams{
Model: anthropic.Model(a.providerOptions.model.APIModel),
MaxTokens: a.providerOptions.maxTokens,
Temperature: temperature,
Messages: messages,
Tools: tools,
Thinking: thinkingParam,
System: []anthropic.TextBlockParam{
{
Text: a.providerOptions.systemMessage,
CacheControl: anthropic.CacheControlEphemeralParam{
Type: "ephemeral",
},
},
},
}
}
func (a *anthropicClient) send(ctx context.Context, messages []message.Message, tools []tools.BaseTool) (resposne *ProviderResponse, err error) {
preparedMessages := a.preparedMessages(a.convertMessages(messages), a.convertTools(tools))
cfg := config.Get()
if cfg.Debug {
// jsonData, _ := json.Marshal(preparedMessages)
// logging.Debug("Prepared messages", "messages", string(jsonData))
}
attempts := 0
for {
attempts++
anthropicResponse, err := a.client.Messages.New(
ctx,
preparedMessages,
)
// If there is an error we are going to see if we can retry the call
if err != nil {
retry, after, retryErr := a.shouldRetry(attempts, err)
if retryErr != nil {
return nil, retryErr
}
if retry {
logging.WarnPersist("Retrying due to rate limit... attempt %d of %d", logging.PersistTimeArg, time.Millisecond*time.Duration(after+100))
select {
case <-ctx.Done():
return nil, ctx.Err()
case <-time.After(time.Duration(after) * time.Millisecond):
continue
}
}
return nil, retryErr
}
content := ""
for _, block := range anthropicResponse.Content {
if text, ok := block.AsAny().(anthropic.TextBlock); ok {
content += text.Text
}
}
return &ProviderResponse{
Content: content,
ToolCalls: a.toolCalls(*anthropicResponse),
Usage: a.usage(*anthropicResponse),
}, nil
}
}
func (a *anthropicClient) stream(ctx context.Context, messages []message.Message, tools []tools.BaseTool) <-chan ProviderEvent {
preparedMessages := a.preparedMessages(a.convertMessages(messages), a.convertTools(tools))
cfg := config.Get()
if cfg.Debug {
// jsonData, _ := json.Marshal(preparedMessages)
// logging.Debug("Prepared messages", "messages", string(jsonData))
}
attempts := 0
eventChan := make(chan ProviderEvent)
go func() {
for {
attempts++
anthropicStream := a.client.Messages.NewStreaming(
ctx,
preparedMessages,
)
accumulatedMessage := anthropic.Message{}
currentToolCallID := ""
for anthropicStream.Next() {
event := anthropicStream.Current()
err := accumulatedMessage.Accumulate(event)
if err != nil {
eventChan <- ProviderEvent{Type: EventError, Error: err}
continue
}
switch event := event.AsAny().(type) {
case anthropic.ContentBlockStartEvent:
if event.ContentBlock.Type == "text" {
eventChan <- ProviderEvent{Type: EventContentStart}
} else if event.ContentBlock.Type == "tool_use" {
currentToolCallID = event.ContentBlock.ID
eventChan <- ProviderEvent{
Type: EventToolUseStart,
ToolCall: &message.ToolCall{
ID: event.ContentBlock.ID,
Name: event.ContentBlock.Name,
Finished: false,
},
}
}
case anthropic.ContentBlockDeltaEvent:
if event.Delta.Type == "thinking_delta" && event.Delta.Thinking != "" {
eventChan <- ProviderEvent{
Type: EventThinkingDelta,
Thinking: event.Delta.Thinking,
}
} else if event.Delta.Type == "text_delta" && event.Delta.Text != "" {
eventChan <- ProviderEvent{
Type: EventContentDelta,
Content: event.Delta.Text,
}
} else if event.Delta.Type == "input_json_delta" {
if currentToolCallID != "" {
eventChan <- ProviderEvent{
Type: EventToolUseDelta,
ToolCall: &message.ToolCall{
ID: currentToolCallID,
Finished: false,
Input: event.Delta.JSON.PartialJSON.Raw(),
},
}
}
}
case anthropic.ContentBlockStopEvent:
if currentToolCallID != "" {
eventChan <- ProviderEvent{
Type: EventToolUseStop,
ToolCall: &message.ToolCall{
ID: currentToolCallID,
},
}
currentToolCallID = ""
} else {
eventChan <- ProviderEvent{Type: EventContentStop}
}
case anthropic.MessageStopEvent:
content := ""
for _, block := range accumulatedMessage.Content {
if text, ok := block.AsAny().(anthropic.TextBlock); ok {
content += text.Text
}
}
eventChan <- ProviderEvent{
Type: EventComplete,
Response: &ProviderResponse{
Content: content,
ToolCalls: a.toolCalls(accumulatedMessage),
Usage: a.usage(accumulatedMessage),
FinishReason: a.finishReason(string(accumulatedMessage.StopReason)),
},
}
}
}
err := anthropicStream.Err()
if err == nil || errors.Is(err, io.EOF) {
close(eventChan)
return
}
// If there is an error we are going to see if we can retry the call
retry, after, retryErr := a.shouldRetry(attempts, err)
if retryErr != nil {
eventChan <- ProviderEvent{Type: EventError, Error: retryErr}
close(eventChan)
return
}
if retry {
logging.WarnPersist("Retrying due to rate limit... attempt %d of %d", logging.PersistTimeArg, time.Millisecond*time.Duration(after+100))
select {
case <-ctx.Done():
// context cancelled
if ctx.Err() != nil {
eventChan <- ProviderEvent{Type: EventError, Error: ctx.Err()}
}
close(eventChan)
return
case <-time.After(time.Duration(after) * time.Millisecond):
continue
}
}
if ctx.Err() != nil {
eventChan <- ProviderEvent{Type: EventError, Error: ctx.Err()}
}
close(eventChan)
return
}
}()
return eventChan
}
func (a *anthropicClient) shouldRetry(attempts int, err error) (bool, int64, error) {
var apierr *anthropic.Error
if !errors.As(err, &apierr) {
return false, 0, err
}
if apierr.StatusCode != 429 && apierr.StatusCode != 529 {
return false, 0, err
}
if attempts > maxRetries {
return false, 0, fmt.Errorf("maximum retry attempts reached for rate limit: %d retries", maxRetries)
}
retryMs := 0
retryAfterValues := apierr.Response.Header.Values("Retry-After")
backoffMs := 2000 * (1 << (attempts - 1))
jitterMs := int(float64(backoffMs) * 0.2)
retryMs = backoffMs + jitterMs
if len(retryAfterValues) > 0 {
if _, err := fmt.Sscanf(retryAfterValues[0], "%d", &retryMs); err == nil {
retryMs = retryMs * 1000
}
}
return true, int64(retryMs), nil
}
func (a *anthropicClient) toolCalls(msg anthropic.Message) []message.ToolCall {
var toolCalls []message.ToolCall
for _, block := range msg.Content {
switch variant := block.AsAny().(type) {
case anthropic.ToolUseBlock:
toolCall := message.ToolCall{
ID: variant.ID,
Name: variant.Name,
Input: string(variant.Input),
Type: string(variant.Type),
Finished: true,
}
toolCalls = append(toolCalls, toolCall)
}
}
return toolCalls
}
func (a *anthropicClient) usage(msg anthropic.Message) TokenUsage {
return TokenUsage{
InputTokens: msg.Usage.InputTokens,
OutputTokens: msg.Usage.OutputTokens,
CacheCreationTokens: msg.Usage.CacheCreationInputTokens,
CacheReadTokens: msg.Usage.CacheReadInputTokens,
}
}
func WithAnthropicBedrock(useBedrock bool) AnthropicOption {
return func(options *anthropicOptions) {
options.useBedrock = useBedrock
}
}
func WithAnthropicDisableCache() AnthropicOption {
return func(options *anthropicOptions) {
options.disableCache = true
}
}
func DefaultShouldThinkFn(s string) bool {
return strings.Contains(strings.ToLower(s), "think")
}
func WithAnthropicShouldThinkFn(fn func(string) bool) AnthropicOption {
return func(options *anthropicOptions) {
options.shouldThink = fn
}
}

View File

@@ -1,100 +0,0 @@
package provider
import (
"context"
"errors"
"fmt"
"os"
"strings"
"github.com/kujtimiihoxha/opencode/internal/llm/tools"
"github.com/kujtimiihoxha/opencode/internal/message"
)
type bedrockOptions struct {
// Bedrock specific options can be added here
}
type BedrockOption func(*bedrockOptions)
type bedrockClient struct {
providerOptions providerClientOptions
options bedrockOptions
childProvider ProviderClient
}
type BedrockClient ProviderClient
func newBedrockClient(opts providerClientOptions) BedrockClient {
bedrockOpts := bedrockOptions{}
// Apply bedrock specific options if they are added in the future
// Get AWS region from environment
region := os.Getenv("AWS_REGION")
if region == "" {
region = os.Getenv("AWS_DEFAULT_REGION")
}
if region == "" {
region = "us-east-1" // default region
}
if len(region) < 2 {
return &bedrockClient{
providerOptions: opts,
options: bedrockOpts,
childProvider: nil, // Will cause an error when used
}
}
// Prefix the model name with region
regionPrefix := region[:2]
modelName := opts.model.APIModel
opts.model.APIModel = fmt.Sprintf("%s.%s", regionPrefix, modelName)
// Determine which provider to use based on the model
if strings.Contains(string(opts.model.APIModel), "anthropic") {
// Create Anthropic client with Bedrock configuration
anthropicOpts := opts
anthropicOpts.anthropicOptions = append(anthropicOpts.anthropicOptions,
WithAnthropicBedrock(true),
WithAnthropicDisableCache(),
)
return &bedrockClient{
providerOptions: opts,
options: bedrockOpts,
childProvider: newAnthropicClient(anthropicOpts),
}
}
// Return client with nil childProvider if model is not supported
// This will cause an error when used
return &bedrockClient{
providerOptions: opts,
options: bedrockOpts,
childProvider: nil,
}
}
func (b *bedrockClient) send(ctx context.Context, messages []message.Message, tools []tools.BaseTool) (*ProviderResponse, error) {
if b.childProvider == nil {
return nil, errors.New("unsupported model for bedrock provider")
}
return b.childProvider.send(ctx, messages, tools)
}
func (b *bedrockClient) stream(ctx context.Context, messages []message.Message, tools []tools.BaseTool) <-chan ProviderEvent {
eventChan := make(chan ProviderEvent)
if b.childProvider == nil {
go func() {
eventChan <- ProviderEvent{
Type: EventError,
Error: errors.New("unsupported model for bedrock provider"),
}
close(eventChan)
}()
return eventChan
}
return b.childProvider.stream(ctx, messages, tools)
}

View File

@@ -1,569 +0,0 @@
package provider
import (
"context"
"encoding/json"
"errors"
"fmt"
"io"
"strings"
"time"
"github.com/google/generative-ai-go/genai"
"github.com/google/uuid"
"github.com/kujtimiihoxha/opencode/internal/config"
"github.com/kujtimiihoxha/opencode/internal/llm/tools"
"github.com/kujtimiihoxha/opencode/internal/logging"
"github.com/kujtimiihoxha/opencode/internal/message"
"google.golang.org/api/iterator"
"google.golang.org/api/option"
)
type geminiOptions struct {
disableCache bool
}
type GeminiOption func(*geminiOptions)
type geminiClient struct {
providerOptions providerClientOptions
options geminiOptions
client *genai.Client
}
type GeminiClient ProviderClient
func newGeminiClient(opts providerClientOptions) GeminiClient {
geminiOpts := geminiOptions{}
for _, o := range opts.geminiOptions {
o(&geminiOpts)
}
client, err := genai.NewClient(context.Background(), option.WithAPIKey(opts.apiKey))
if err != nil {
logging.Error("Failed to create Gemini client", "error", err)
return nil
}
return &geminiClient{
providerOptions: opts,
options: geminiOpts,
client: client,
}
}
func (g *geminiClient) convertMessages(messages []message.Message) []*genai.Content {
var history []*genai.Content
// Add system message first
history = append(history, &genai.Content{
Parts: []genai.Part{genai.Text(g.providerOptions.systemMessage)},
Role: "user",
})
// Add a system response to acknowledge the system message
history = append(history, &genai.Content{
Parts: []genai.Part{genai.Text("I'll help you with that.")},
Role: "model",
})
for _, msg := range messages {
switch msg.Role {
case message.User:
history = append(history, &genai.Content{
Parts: []genai.Part{genai.Text(msg.Content().String())},
Role: "user",
})
case message.Assistant:
content := &genai.Content{
Role: "model",
Parts: []genai.Part{},
}
if msg.Content().String() != "" {
content.Parts = append(content.Parts, genai.Text(msg.Content().String()))
}
if len(msg.ToolCalls()) > 0 {
for _, call := range msg.ToolCalls() {
args, _ := parseJsonToMap(call.Input)
content.Parts = append(content.Parts, genai.FunctionCall{
Name: call.Name,
Args: args,
})
}
}
history = append(history, content)
case message.Tool:
for _, result := range msg.ToolResults() {
response := map[string]interface{}{"result": result.Content}
parsed, err := parseJsonToMap(result.Content)
if err == nil {
response = parsed
}
var toolCall message.ToolCall
for _, m := range messages {
if m.Role == message.Assistant {
for _, call := range m.ToolCalls() {
if call.ID == result.ToolCallID {
toolCall = call
break
}
}
}
}
history = append(history, &genai.Content{
Parts: []genai.Part{genai.FunctionResponse{
Name: toolCall.Name,
Response: response,
}},
Role: "function",
})
}
}
}
return history
}
func (g *geminiClient) convertTools(tools []tools.BaseTool) []*genai.Tool {
geminiTools := make([]*genai.Tool, 0, len(tools))
for _, tool := range tools {
info := tool.Info()
declaration := &genai.FunctionDeclaration{
Name: info.Name,
Description: info.Description,
Parameters: &genai.Schema{
Type: genai.TypeObject,
Properties: convertSchemaProperties(info.Parameters),
Required: info.Required,
},
}
geminiTools = append(geminiTools, &genai.Tool{
FunctionDeclarations: []*genai.FunctionDeclaration{declaration},
})
}
return geminiTools
}
func (g *geminiClient) finishReason(reason genai.FinishReason) message.FinishReason {
reasonStr := reason.String()
switch {
case reasonStr == "STOP":
return message.FinishReasonEndTurn
case reasonStr == "MAX_TOKENS":
return message.FinishReasonMaxTokens
case strings.Contains(reasonStr, "FUNCTION") || strings.Contains(reasonStr, "TOOL"):
return message.FinishReasonToolUse
default:
return message.FinishReasonUnknown
}
}
func (g *geminiClient) send(ctx context.Context, messages []message.Message, tools []tools.BaseTool) (*ProviderResponse, error) {
model := g.client.GenerativeModel(g.providerOptions.model.APIModel)
model.SetMaxOutputTokens(int32(g.providerOptions.maxTokens))
// Convert tools
if len(tools) > 0 {
model.Tools = g.convertTools(tools)
}
// Convert messages
geminiMessages := g.convertMessages(messages)
cfg := config.Get()
if cfg.Debug {
jsonData, _ := json.Marshal(geminiMessages)
logging.Debug("Prepared messages", "messages", string(jsonData))
}
attempts := 0
for {
attempts++
chat := model.StartChat()
chat.History = geminiMessages[:len(geminiMessages)-1] // All but last message
lastMsg := geminiMessages[len(geminiMessages)-1]
var lastText string
for _, part := range lastMsg.Parts {
if text, ok := part.(genai.Text); ok {
lastText = string(text)
break
}
}
resp, err := chat.SendMessage(ctx, genai.Text(lastText))
// If there is an error we are going to see if we can retry the call
if err != nil {
retry, after, retryErr := g.shouldRetry(attempts, err)
if retryErr != nil {
return nil, retryErr
}
if retry {
logging.WarnPersist("Retrying due to rate limit... attempt %d of %d", logging.PersistTimeArg, time.Millisecond*time.Duration(after+100))
select {
case <-ctx.Done():
return nil, ctx.Err()
case <-time.After(time.Duration(after) * time.Millisecond):
continue
}
}
return nil, retryErr
}
content := ""
var toolCalls []message.ToolCall
if len(resp.Candidates) > 0 && resp.Candidates[0].Content != nil {
for _, part := range resp.Candidates[0].Content.Parts {
switch p := part.(type) {
case genai.Text:
content = string(p)
case genai.FunctionCall:
id := "call_" + uuid.New().String()
args, _ := json.Marshal(p.Args)
toolCalls = append(toolCalls, message.ToolCall{
ID: id,
Name: p.Name,
Input: string(args),
Type: "function",
})
}
}
}
return &ProviderResponse{
Content: content,
ToolCalls: toolCalls,
Usage: g.usage(resp),
FinishReason: g.finishReason(resp.Candidates[0].FinishReason),
}, nil
}
}
func (g *geminiClient) stream(ctx context.Context, messages []message.Message, tools []tools.BaseTool) <-chan ProviderEvent {
model := g.client.GenerativeModel(g.providerOptions.model.APIModel)
model.SetMaxOutputTokens(int32(g.providerOptions.maxTokens))
// Convert tools
if len(tools) > 0 {
model.Tools = g.convertTools(tools)
}
// Convert messages
geminiMessages := g.convertMessages(messages)
cfg := config.Get()
if cfg.Debug {
jsonData, _ := json.Marshal(geminiMessages)
logging.Debug("Prepared messages", "messages", string(jsonData))
}
attempts := 0
eventChan := make(chan ProviderEvent)
go func() {
defer close(eventChan)
for {
attempts++
chat := model.StartChat()
chat.History = geminiMessages[:len(geminiMessages)-1] // All but last message
lastMsg := geminiMessages[len(geminiMessages)-1]
var lastText string
for _, part := range lastMsg.Parts {
if text, ok := part.(genai.Text); ok {
lastText = string(text)
break
}
}
iter := chat.SendMessageStream(ctx, genai.Text(lastText))
currentContent := ""
toolCalls := []message.ToolCall{}
var finalResp *genai.GenerateContentResponse
eventChan <- ProviderEvent{Type: EventContentStart}
for {
resp, err := iter.Next()
if err == iterator.Done {
break
}
if err != nil {
retry, after, retryErr := g.shouldRetry(attempts, err)
if retryErr != nil {
eventChan <- ProviderEvent{Type: EventError, Error: retryErr}
return
}
if retry {
logging.WarnPersist("Retrying due to rate limit... attempt %d of %d", logging.PersistTimeArg, time.Millisecond*time.Duration(after+100))
select {
case <-ctx.Done():
if ctx.Err() != nil {
eventChan <- ProviderEvent{Type: EventError, Error: ctx.Err()}
}
return
case <-time.After(time.Duration(after) * time.Millisecond):
break
}
} else {
eventChan <- ProviderEvent{Type: EventError, Error: err}
return
}
}
finalResp = resp
if len(resp.Candidates) > 0 && resp.Candidates[0].Content != nil {
for _, part := range resp.Candidates[0].Content.Parts {
switch p := part.(type) {
case genai.Text:
newText := string(p)
delta := newText[len(currentContent):]
if delta != "" {
eventChan <- ProviderEvent{
Type: EventContentDelta,
Content: delta,
}
currentContent = newText
}
case genai.FunctionCall:
id := "call_" + uuid.New().String()
args, _ := json.Marshal(p.Args)
newCall := message.ToolCall{
ID: id,
Name: p.Name,
Input: string(args),
Type: "function",
}
isNew := true
for _, existing := range toolCalls {
if existing.Name == newCall.Name && existing.Input == newCall.Input {
isNew = false
break
}
}
if isNew {
toolCalls = append(toolCalls, newCall)
}
}
}
}
}
eventChan <- ProviderEvent{Type: EventContentStop}
if finalResp != nil {
eventChan <- ProviderEvent{
Type: EventComplete,
Response: &ProviderResponse{
Content: currentContent,
ToolCalls: toolCalls,
Usage: g.usage(finalResp),
FinishReason: g.finishReason(finalResp.Candidates[0].FinishReason),
},
}
return
}
// If we get here, we need to retry
if attempts > maxRetries {
eventChan <- ProviderEvent{
Type: EventError,
Error: fmt.Errorf("maximum retry attempts reached: %d retries", maxRetries),
}
return
}
// Wait before retrying
select {
case <-ctx.Done():
if ctx.Err() != nil {
eventChan <- ProviderEvent{Type: EventError, Error: ctx.Err()}
}
return
case <-time.After(time.Duration(2000*(1<<(attempts-1))) * time.Millisecond):
continue
}
}
}()
return eventChan
}
func (g *geminiClient) shouldRetry(attempts int, err error) (bool, int64, error) {
// Check if error is a rate limit error
if attempts > maxRetries {
return false, 0, fmt.Errorf("maximum retry attempts reached for rate limit: %d retries", maxRetries)
}
// Gemini doesn't have a standard error type we can check against
// So we'll check the error message for rate limit indicators
if errors.Is(err, io.EOF) {
return false, 0, err
}
errMsg := err.Error()
isRateLimit := false
// Check for common rate limit error messages
if contains(errMsg, "rate limit", "quota exceeded", "too many requests") {
isRateLimit = true
}
if !isRateLimit {
return false, 0, err
}
// Calculate backoff with jitter
backoffMs := 2000 * (1 << (attempts - 1))
jitterMs := int(float64(backoffMs) * 0.2)
retryMs := backoffMs + jitterMs
return true, int64(retryMs), nil
}
func (g *geminiClient) toolCalls(resp *genai.GenerateContentResponse) []message.ToolCall {
var toolCalls []message.ToolCall
if len(resp.Candidates) > 0 && resp.Candidates[0].Content != nil {
for _, part := range resp.Candidates[0].Content.Parts {
if funcCall, ok := part.(genai.FunctionCall); ok {
id := "call_" + uuid.New().String()
args, _ := json.Marshal(funcCall.Args)
toolCalls = append(toolCalls, message.ToolCall{
ID: id,
Name: funcCall.Name,
Input: string(args),
Type: "function",
})
}
}
}
return toolCalls
}
func (g *geminiClient) usage(resp *genai.GenerateContentResponse) TokenUsage {
if resp == nil || resp.UsageMetadata == nil {
return TokenUsage{}
}
return TokenUsage{
InputTokens: int64(resp.UsageMetadata.PromptTokenCount),
OutputTokens: int64(resp.UsageMetadata.CandidatesTokenCount),
CacheCreationTokens: 0, // Not directly provided by Gemini
CacheReadTokens: int64(resp.UsageMetadata.CachedContentTokenCount),
}
}
func WithGeminiDisableCache() GeminiOption {
return func(options *geminiOptions) {
options.disableCache = true
}
}
// Helper functions
func parseJsonToMap(jsonStr string) (map[string]interface{}, error) {
var result map[string]interface{}
err := json.Unmarshal([]byte(jsonStr), &result)
return result, err
}
func convertSchemaProperties(parameters map[string]interface{}) map[string]*genai.Schema {
properties := make(map[string]*genai.Schema)
for name, param := range parameters {
properties[name] = convertToSchema(param)
}
return properties
}
func convertToSchema(param interface{}) *genai.Schema {
schema := &genai.Schema{Type: genai.TypeString}
paramMap, ok := param.(map[string]interface{})
if !ok {
return schema
}
if desc, ok := paramMap["description"].(string); ok {
schema.Description = desc
}
typeVal, hasType := paramMap["type"]
if !hasType {
return schema
}
typeStr, ok := typeVal.(string)
if !ok {
return schema
}
schema.Type = mapJSONTypeToGenAI(typeStr)
switch typeStr {
case "array":
schema.Items = processArrayItems(paramMap)
case "object":
if props, ok := paramMap["properties"].(map[string]interface{}); ok {
schema.Properties = convertSchemaProperties(props)
}
}
return schema
}
func processArrayItems(paramMap map[string]interface{}) *genai.Schema {
items, ok := paramMap["items"].(map[string]interface{})
if !ok {
return nil
}
return convertToSchema(items)
}
func mapJSONTypeToGenAI(jsonType string) genai.Type {
switch jsonType {
case "string":
return genai.TypeString
case "number":
return genai.TypeNumber
case "integer":
return genai.TypeInteger
case "boolean":
return genai.TypeBoolean
case "array":
return genai.TypeArray
case "object":
return genai.TypeObject
default:
return genai.TypeString // Default to string for unknown types
}
}
func contains(s string, substrs ...string) bool {
for _, substr := range substrs {
if strings.Contains(strings.ToLower(s), strings.ToLower(substr)) {
return true
}
}
return false
}

View File

@@ -1,395 +0,0 @@
package provider
import (
"context"
"encoding/json"
"errors"
"fmt"
"io"
"time"
"github.com/kujtimiihoxha/opencode/internal/config"
"github.com/kujtimiihoxha/opencode/internal/llm/tools"
"github.com/kujtimiihoxha/opencode/internal/logging"
"github.com/kujtimiihoxha/opencode/internal/message"
"github.com/openai/openai-go"
"github.com/openai/openai-go/option"
"github.com/openai/openai-go/shared"
)
type openaiOptions struct {
baseURL string
disableCache bool
reasoningEffort string
}
type OpenAIOption func(*openaiOptions)
type openaiClient struct {
providerOptions providerClientOptions
options openaiOptions
client openai.Client
}
type OpenAIClient ProviderClient
func newOpenAIClient(opts providerClientOptions) OpenAIClient {
openaiOpts := openaiOptions{
reasoningEffort: "medium",
}
for _, o := range opts.openaiOptions {
o(&openaiOpts)
}
openaiClientOptions := []option.RequestOption{}
if opts.apiKey != "" {
openaiClientOptions = append(openaiClientOptions, option.WithAPIKey(opts.apiKey))
}
if openaiOpts.baseURL != "" {
openaiClientOptions = append(openaiClientOptions, option.WithBaseURL(openaiOpts.baseURL))
}
client := openai.NewClient(openaiClientOptions...)
return &openaiClient{
providerOptions: opts,
options: openaiOpts,
client: client,
}
}
func (o *openaiClient) convertMessages(messages []message.Message) (openaiMessages []openai.ChatCompletionMessageParamUnion) {
// Add system message first
openaiMessages = append(openaiMessages, openai.SystemMessage(o.providerOptions.systemMessage))
for _, msg := range messages {
switch msg.Role {
case message.User:
openaiMessages = append(openaiMessages, openai.UserMessage(msg.Content().String()))
case message.Assistant:
assistantMsg := openai.ChatCompletionAssistantMessageParam{
Role: "assistant",
}
if msg.Content().String() != "" {
assistantMsg.Content = openai.ChatCompletionAssistantMessageParamContentUnion{
OfString: openai.String(msg.Content().String()),
}
}
if len(msg.ToolCalls()) > 0 {
assistantMsg.ToolCalls = make([]openai.ChatCompletionMessageToolCallParam, len(msg.ToolCalls()))
for i, call := range msg.ToolCalls() {
assistantMsg.ToolCalls[i] = openai.ChatCompletionMessageToolCallParam{
ID: call.ID,
Type: "function",
Function: openai.ChatCompletionMessageToolCallFunctionParam{
Name: call.Name,
Arguments: call.Input,
},
}
}
}
openaiMessages = append(openaiMessages, openai.ChatCompletionMessageParamUnion{
OfAssistant: &assistantMsg,
})
case message.Tool:
for _, result := range msg.ToolResults() {
openaiMessages = append(openaiMessages,
openai.ToolMessage(result.Content, result.ToolCallID),
)
}
}
}
return
}
func (o *openaiClient) convertTools(tools []tools.BaseTool) []openai.ChatCompletionToolParam {
openaiTools := make([]openai.ChatCompletionToolParam, len(tools))
for i, tool := range tools {
info := tool.Info()
openaiTools[i] = openai.ChatCompletionToolParam{
Function: openai.FunctionDefinitionParam{
Name: info.Name,
Description: openai.String(info.Description),
Parameters: openai.FunctionParameters{
"type": "object",
"properties": info.Parameters,
"required": info.Required,
},
},
}
}
return openaiTools
}
func (o *openaiClient) finishReason(reason string) message.FinishReason {
switch reason {
case "stop":
return message.FinishReasonEndTurn
case "length":
return message.FinishReasonMaxTokens
case "tool_calls":
return message.FinishReasonToolUse
default:
return message.FinishReasonUnknown
}
}
func (o *openaiClient) preparedParams(messages []openai.ChatCompletionMessageParamUnion, tools []openai.ChatCompletionToolParam) openai.ChatCompletionNewParams {
params := openai.ChatCompletionNewParams{
Model: openai.ChatModel(o.providerOptions.model.APIModel),
Messages: messages,
Tools: tools,
}
if o.providerOptions.model.CanReason == true {
params.MaxCompletionTokens = openai.Int(o.providerOptions.maxTokens)
switch o.options.reasoningEffort {
case "low":
params.ReasoningEffort = shared.ReasoningEffortLow
case "medium":
params.ReasoningEffort = shared.ReasoningEffortMedium
case "high":
params.ReasoningEffort = shared.ReasoningEffortHigh
default:
params.ReasoningEffort = shared.ReasoningEffortMedium
}
} else {
params.MaxTokens = openai.Int(o.providerOptions.maxTokens)
}
return params
}
func (o *openaiClient) send(ctx context.Context, messages []message.Message, tools []tools.BaseTool) (response *ProviderResponse, err error) {
params := o.preparedParams(o.convertMessages(messages), o.convertTools(tools))
cfg := config.Get()
if cfg.Debug {
jsonData, _ := json.Marshal(params)
logging.Debug("Prepared messages", "messages", string(jsonData))
}
attempts := 0
for {
attempts++
openaiResponse, err := o.client.Chat.Completions.New(
ctx,
params,
)
// If there is an error we are going to see if we can retry the call
if err != nil {
retry, after, retryErr := o.shouldRetry(attempts, err)
if retryErr != nil {
return nil, retryErr
}
if retry {
logging.WarnPersist("Retrying due to rate limit... attempt %d of %d", logging.PersistTimeArg, time.Millisecond*time.Duration(after+100))
select {
case <-ctx.Done():
return nil, ctx.Err()
case <-time.After(time.Duration(after) * time.Millisecond):
continue
}
}
return nil, retryErr
}
content := ""
if openaiResponse.Choices[0].Message.Content != "" {
content = openaiResponse.Choices[0].Message.Content
}
return &ProviderResponse{
Content: content,
ToolCalls: o.toolCalls(*openaiResponse),
Usage: o.usage(*openaiResponse),
FinishReason: o.finishReason(string(openaiResponse.Choices[0].FinishReason)),
}, nil
}
}
func (o *openaiClient) stream(ctx context.Context, messages []message.Message, tools []tools.BaseTool) <-chan ProviderEvent {
params := o.preparedParams(o.convertMessages(messages), o.convertTools(tools))
params.StreamOptions = openai.ChatCompletionStreamOptionsParam{
IncludeUsage: openai.Bool(true),
}
cfg := config.Get()
if cfg.Debug {
jsonData, _ := json.Marshal(params)
logging.Debug("Prepared messages", "messages", string(jsonData))
}
attempts := 0
eventChan := make(chan ProviderEvent)
go func() {
for {
attempts++
openaiStream := o.client.Chat.Completions.NewStreaming(
ctx,
params,
)
acc := openai.ChatCompletionAccumulator{}
currentContent := ""
toolCalls := make([]message.ToolCall, 0)
for openaiStream.Next() {
chunk := openaiStream.Current()
acc.AddChunk(chunk)
if tool, ok := acc.JustFinishedToolCall(); ok {
toolCalls = append(toolCalls, message.ToolCall{
ID: tool.Id,
Name: tool.Name,
Input: tool.Arguments,
Type: "function",
})
}
for _, choice := range chunk.Choices {
if choice.Delta.Content != "" {
eventChan <- ProviderEvent{
Type: EventContentDelta,
Content: choice.Delta.Content,
}
currentContent += choice.Delta.Content
}
}
}
err := openaiStream.Err()
if err == nil || errors.Is(err, io.EOF) {
// Stream completed successfully
eventChan <- ProviderEvent{
Type: EventComplete,
Response: &ProviderResponse{
Content: currentContent,
ToolCalls: toolCalls,
Usage: o.usage(acc.ChatCompletion),
FinishReason: o.finishReason(string(acc.ChatCompletion.Choices[0].FinishReason)),
},
}
close(eventChan)
return
}
// If there is an error we are going to see if we can retry the call
retry, after, retryErr := o.shouldRetry(attempts, err)
if retryErr != nil {
eventChan <- ProviderEvent{Type: EventError, Error: retryErr}
close(eventChan)
return
}
if retry {
logging.WarnPersist("Retrying due to rate limit... attempt %d of %d", logging.PersistTimeArg, time.Millisecond*time.Duration(after+100))
select {
case <-ctx.Done():
// context cancelled
if ctx.Err() == nil {
eventChan <- ProviderEvent{Type: EventError, Error: ctx.Err()}
}
close(eventChan)
return
case <-time.After(time.Duration(after) * time.Millisecond):
continue
}
}
eventChan <- ProviderEvent{Type: EventError, Error: retryErr}
close(eventChan)
return
}
}()
return eventChan
}
func (o *openaiClient) shouldRetry(attempts int, err error) (bool, int64, error) {
var apierr *openai.Error
if !errors.As(err, &apierr) {
return false, 0, err
}
if apierr.StatusCode != 429 && apierr.StatusCode != 500 {
return false, 0, err
}
if attempts > maxRetries {
return false, 0, fmt.Errorf("maximum retry attempts reached for rate limit: %d retries", maxRetries)
}
retryMs := 0
retryAfterValues := apierr.Response.Header.Values("Retry-After")
backoffMs := 2000 * (1 << (attempts - 1))
jitterMs := int(float64(backoffMs) * 0.2)
retryMs = backoffMs + jitterMs
if len(retryAfterValues) > 0 {
if _, err := fmt.Sscanf(retryAfterValues[0], "%d", &retryMs); err == nil {
retryMs = retryMs * 1000
}
}
return true, int64(retryMs), nil
}
func (o *openaiClient) toolCalls(completion openai.ChatCompletion) []message.ToolCall {
var toolCalls []message.ToolCall
if len(completion.Choices) > 0 && len(completion.Choices[0].Message.ToolCalls) > 0 {
for _, call := range completion.Choices[0].Message.ToolCalls {
toolCall := message.ToolCall{
ID: call.ID,
Name: call.Function.Name,
Input: call.Function.Arguments,
Type: "function",
Finished: true,
}
toolCalls = append(toolCalls, toolCall)
}
}
return toolCalls
}
func (o *openaiClient) usage(completion openai.ChatCompletion) TokenUsage {
cachedTokens := completion.Usage.PromptTokensDetails.CachedTokens
inputTokens := completion.Usage.PromptTokens - cachedTokens
return TokenUsage{
InputTokens: inputTokens,
OutputTokens: completion.Usage.CompletionTokens,
CacheCreationTokens: 0, // OpenAI doesn't provide this directly
CacheReadTokens: cachedTokens,
}
}
func WithOpenAIBaseURL(baseURL string) OpenAIOption {
return func(options *openaiOptions) {
options.baseURL = baseURL
}
}
func WithOpenAIDisableCache() OpenAIOption {
return func(options *openaiOptions) {
options.disableCache = true
}
}
func WithReasoningEffort(effort string) OpenAIOption {
return func(options *openaiOptions) {
defaultReasoningEffort := "medium"
switch effort {
case "low", "medium", "high":
defaultReasoningEffort = effort
default:
logging.Warn("Invalid reasoning effort, using default: medium")
}
options.reasoningEffort = defaultReasoningEffort
}
}

View File

@@ -1,188 +0,0 @@
package provider
import (
"context"
"fmt"
"github.com/kujtimiihoxha/opencode/internal/llm/models"
"github.com/kujtimiihoxha/opencode/internal/llm/tools"
"github.com/kujtimiihoxha/opencode/internal/message"
)
type EventType string
const maxRetries = 8
const (
EventContentStart EventType = "content_start"
EventToolUseStart EventType = "tool_use_start"
EventToolUseDelta EventType = "tool_use_delta"
EventToolUseStop EventType = "tool_use_stop"
EventContentDelta EventType = "content_delta"
EventThinkingDelta EventType = "thinking_delta"
EventContentStop EventType = "content_stop"
EventComplete EventType = "complete"
EventError EventType = "error"
EventWarning EventType = "warning"
)
type TokenUsage struct {
InputTokens int64
OutputTokens int64
CacheCreationTokens int64
CacheReadTokens int64
}
type ProviderResponse struct {
Content string
ToolCalls []message.ToolCall
Usage TokenUsage
FinishReason message.FinishReason
}
type ProviderEvent struct {
Type EventType
Content string
Thinking string
Response *ProviderResponse
ToolCall *message.ToolCall
Error error
}
type Provider interface {
SendMessages(ctx context.Context, messages []message.Message, tools []tools.BaseTool) (*ProviderResponse, error)
StreamResponse(ctx context.Context, messages []message.Message, tools []tools.BaseTool) <-chan ProviderEvent
Model() models.Model
}
type providerClientOptions struct {
apiKey string
model models.Model
maxTokens int64
systemMessage string
anthropicOptions []AnthropicOption
openaiOptions []OpenAIOption
geminiOptions []GeminiOption
bedrockOptions []BedrockOption
}
type ProviderClientOption func(*providerClientOptions)
type ProviderClient interface {
send(ctx context.Context, messages []message.Message, tools []tools.BaseTool) (*ProviderResponse, error)
stream(ctx context.Context, messages []message.Message, tools []tools.BaseTool) <-chan ProviderEvent
}
type baseProvider[C ProviderClient] struct {
options providerClientOptions
client C
}
func NewProvider(providerName models.ModelProvider, opts ...ProviderClientOption) (Provider, error) {
clientOptions := providerClientOptions{}
for _, o := range opts {
o(&clientOptions)
}
switch providerName {
case models.ProviderAnthropic:
return &baseProvider[AnthropicClient]{
options: clientOptions,
client: newAnthropicClient(clientOptions),
}, nil
case models.ProviderOpenAI:
return &baseProvider[OpenAIClient]{
options: clientOptions,
client: newOpenAIClient(clientOptions),
}, nil
case models.ProviderGemini:
return &baseProvider[GeminiClient]{
options: clientOptions,
client: newGeminiClient(clientOptions),
}, nil
case models.ProviderBedrock:
return &baseProvider[BedrockClient]{
options: clientOptions,
client: newBedrockClient(clientOptions),
}, nil
case models.ProviderMock:
// TODO: implement mock client for test
panic("not implemented")
}
return nil, fmt.Errorf("provider not supported: %s", providerName)
}
func (p *baseProvider[C]) cleanMessages(messages []message.Message) (cleaned []message.Message) {
for _, msg := range messages {
// The message has no content
if len(msg.Parts) == 0 {
continue
}
cleaned = append(cleaned, msg)
}
return
}
func (p *baseProvider[C]) SendMessages(ctx context.Context, messages []message.Message, tools []tools.BaseTool) (*ProviderResponse, error) {
messages = p.cleanMessages(messages)
return p.client.send(ctx, messages, tools)
}
func (p *baseProvider[C]) Model() models.Model {
return p.options.model
}
func (p *baseProvider[C]) StreamResponse(ctx context.Context, messages []message.Message, tools []tools.BaseTool) <-chan ProviderEvent {
messages = p.cleanMessages(messages)
return p.client.stream(ctx, messages, tools)
}
func WithAPIKey(apiKey string) ProviderClientOption {
return func(options *providerClientOptions) {
options.apiKey = apiKey
}
}
func WithModel(model models.Model) ProviderClientOption {
return func(options *providerClientOptions) {
options.model = model
}
}
func WithMaxTokens(maxTokens int64) ProviderClientOption {
return func(options *providerClientOptions) {
options.maxTokens = maxTokens
}
}
func WithSystemMessage(systemMessage string) ProviderClientOption {
return func(options *providerClientOptions) {
options.systemMessage = systemMessage
}
}
func WithAnthropicOptions(anthropicOptions ...AnthropicOption) ProviderClientOption {
return func(options *providerClientOptions) {
options.anthropicOptions = anthropicOptions
}
}
func WithOpenAIOptions(openaiOptions ...OpenAIOption) ProviderClientOption {
return func(options *providerClientOptions) {
options.openaiOptions = openaiOptions
}
}
func WithGeminiOptions(geminiOptions ...GeminiOption) ProviderClientOption {
return func(options *providerClientOptions) {
options.geminiOptions = geminiOptions
}
}
func WithBedrockOptions(bedrockOptions ...BedrockOption) ProviderClientOption {
return func(options *providerClientOptions) {
options.bedrockOptions = bedrockOptions
}
}

View File

@@ -1,347 +0,0 @@
package tools
import (
"context"
"encoding/json"
"fmt"
"strings"
"time"
"github.com/kujtimiihoxha/opencode/internal/config"
"github.com/kujtimiihoxha/opencode/internal/llm/tools/shell"
"github.com/kujtimiihoxha/opencode/internal/permission"
)
type BashParams struct {
Command string `json:"command"`
Timeout int `json:"timeout"`
}
type BashPermissionsParams struct {
Command string `json:"command"`
Timeout int `json:"timeout"`
}
type BashResponseMetadata struct {
StartTime int64 `json:"start_time"`
EndTime int64 `json:"end_time"`
}
type bashTool struct {
permissions permission.Service
}
const (
BashToolName = "bash"
DefaultTimeout = 1 * 60 * 1000 // 1 minutes in milliseconds
MaxTimeout = 10 * 60 * 1000 // 10 minutes in milliseconds
MaxOutputLength = 30000
)
var bannedCommands = []string{
"alias", "curl", "curlie", "wget", "axel", "aria2c",
"nc", "telnet", "lynx", "w3m", "links", "httpie", "xh",
"http-prompt", "chrome", "firefox", "safari",
}
var safeReadOnlyCommands = []string{
"ls", "echo", "pwd", "date", "cal", "uptime", "whoami", "id", "groups", "env", "printenv", "set", "unset", "which", "type", "whereis",
"whatis", "uname", "hostname", "df", "du", "free", "top", "ps", "kill", "killall", "nice", "nohup", "time", "timeout",
"git status", "git log", "git diff", "git show", "git branch", "git tag", "git remote", "git ls-files", "git ls-remote",
"git rev-parse", "git config --get", "git config --list", "git describe", "git blame", "git grep", "git shortlog",
"go version", "go help", "go list", "go env", "go doc", "go vet", "go fmt", "go mod", "go test", "go build", "go run", "go install", "go clean",
}
func bashDescription() string {
bannedCommandsStr := strings.Join(bannedCommands, ", ")
return fmt.Sprintf(`Executes a given bash command in a persistent shell session with optional timeout, ensuring proper handling and security measures.
Before executing the command, please follow these steps:
1. Directory Verification:
- If the command will create new directories or files, first use the LS tool to verify the parent directory exists and is the correct location
- For example, before running "mkdir foo/bar", first use LS to check that "foo" exists and is the intended parent directory
2. Security Check:
- For security and to limit the threat of a prompt injection attack, some commands are limited or banned. If you use a disallowed command, you will receive an error message explaining the restriction. Explain the error to the User.
- Verify that the command is not one of the banned commands: %s.
3. Command Execution:
- After ensuring proper quoting, execute the command.
- Capture the output of the command.
4. Output Processing:
- If the output exceeds %d characters, output will be truncated before being returned to you.
- Prepare the output for display to the user.
5. Return Result:
- Provide the processed output of the command.
- If any errors occurred during execution, include those in the output.
Usage notes:
- The command argument is required.
- You can specify an optional timeout in milliseconds (up to 600000ms / 10 minutes). If not specified, commands will timeout after 30 minutes.
- VERY IMPORTANT: You MUST avoid using search commands like 'find' and 'grep'. Instead use Grep, Glob, or Agent tools to search. You MUST avoid read tools like 'cat', 'head', 'tail', and 'ls', and use FileRead and LS tools to read files.
- When issuing multiple commands, use the ';' or '&&' operator to separate them. DO NOT use newlines (newlines are ok in quoted strings).
- IMPORTANT: All commands share the same shell session. Shell state (environment variables, virtual environments, current directory, etc.) persist between commands. For example, if you set an environment variable as part of a command, the environment variable will persist for subsequent commands.
- Try to maintain your current working directory throughout the session by using absolute paths and avoiding usage of 'cd'. You may use 'cd' if the User explicitly requests it.
<good-example>
pytest /foo/bar/tests
</good-example>
<bad-example>
cd /foo/bar && pytest tests
</bad-example>
# Committing changes with git
When the user asks you to create a new git commit, follow these steps carefully:
1. Start with a single message that contains exactly three tool_use blocks that do the following (it is VERY IMPORTANT that you send these tool_use blocks in a single message, otherwise it will feel slow to the user!):
- Run a git status command to see all untracked files.
- Run a git diff command to see both staged and unstaged changes that will be committed.
- Run a git log command to see recent commit messages, so that you can follow this repository's commit message style.
2. Use the git context at the start of this conversation to determine which files are relevant to your commit. Add relevant untracked files to the staging area. Do not commit files that were already modified at the start of this conversation, if they are not relevant to your commit.
3. Analyze all staged changes (both previously staged and newly added) and draft a commit message. Wrap your analysis process in <commit_analysis> tags:
<commit_analysis>
- List the files that have been changed or added
- Summarize the nature of the changes (eg. new feature, enhancement to an existing feature, bug fix, refactoring, test, docs, etc.)
- Brainstorm the purpose or motivation behind these changes
- Do not use tools to explore code, beyond what is available in the git context
- Assess the impact of these changes on the overall project
- Check for any sensitive information that shouldn't be committed
- Draft a concise (1-2 sentences) commit message that focuses on the "why" rather than the "what"
- Ensure your language is clear, concise, and to the point
- Ensure the message accurately reflects the changes and their purpose (i.e. "add" means a wholly new feature, "update" means an enhancement to an existing feature, "fix" means a bug fix, etc.)
- Ensure the message is not generic (avoid words like "Update" or "Fix" without context)
- Review the draft message to ensure it accurately reflects the changes and their purpose
</commit_analysis>
4. Create the commit with a message ending with:
🤖 Generated with opencode
Co-Authored-By: opencode <noreply@opencode.ai>
- In order to ensure good formatting, ALWAYS pass the commit message via a HEREDOC, a la this example:
<example>
git commit -m "$(cat <<'EOF'
Commit message here.
🤖 Generated with opencode
Co-Authored-By: opencode <noreply@opencode.ai>
EOF
)"
</example>
5. If the commit fails due to pre-commit hook changes, retry the commit ONCE to include these automated changes. If it fails again, it usually means a pre-commit hook is preventing the commit. If the commit succeeds but you notice that files were modified by the pre-commit hook, you MUST amend your commit to include them.
6. Finally, run git status to make sure the commit succeeded.
Important notes:
- When possible, combine the "git add" and "git commit" commands into a single "git commit -am" command, to speed things up
- However, be careful not to stage files (e.g. with 'git add .') for commits that aren't part of the change, they may have untracked files they want to keep around, but not commit.
- NEVER update the git config
- DO NOT push to the remote repository
- IMPORTANT: Never use git commands with the -i flag (like git rebase -i or git add -i) since they require interactive input which is not supported.
- If there are no changes to commit (i.e., no untracked files and no modifications), do not create an empty commit
- Ensure your commit message is meaningful and concise. It should explain the purpose of the changes, not just describe them.
- Return an empty response - the user will see the git output directly
# Creating pull requests
Use the gh command via the Bash tool for ALL GitHub-related tasks including working with issues, pull requests, checks, and releases. If given a Github URL use the gh command to get the information needed.
IMPORTANT: When the user asks you to create a pull request, follow these steps carefully:
1. Understand the current state of the branch. Remember to send a single message that contains multiple tool_use blocks (it is VERY IMPORTANT that you do this in a single message, otherwise it will feel slow to the user!):
- Run a git status command to see all untracked files.
- Run a git diff command to see both staged and unstaged changes that will be committed.
- Check if the current branch tracks a remote branch and is up to date with the remote, so you know if you need to push to the remote
- Run a git log command and 'git diff main...HEAD' to understand the full commit history for the current branch (from the time it diverged from the 'main' branch.)
2. Create new branch if needed
3. Commit changes if needed
4. Push to remote with -u flag if needed
5. Analyze all changes that will be included in the pull request, making sure to look at all relevant commits (not just the latest commit, but all commits that will be included in the pull request!), and draft a pull request summary. Wrap your analysis process in <pr_analysis> tags:
<pr_analysis>
- List the commits since diverging from the main branch
- Summarize the nature of the changes (eg. new feature, enhancement to an existing feature, bug fix, refactoring, test, docs, etc.)
- Brainstorm the purpose or motivation behind these changes
- Assess the impact of these changes on the overall project
- Do not use tools to explore code, beyond what is available in the git context
- Check for any sensitive information that shouldn't be committed
- Draft a concise (1-2 bullet points) pull request summary that focuses on the "why" rather than the "what"
- Ensure the summary accurately reflects all changes since diverging from the main branch
- Ensure your language is clear, concise, and to the point
- Ensure the summary accurately reflects the changes and their purpose (ie. "add" means a wholly new feature, "update" means an enhancement to an existing feature, "fix" means a bug fix, etc.)
- Ensure the summary is not generic (avoid words like "Update" or "Fix" without context)
- Review the draft summary to ensure it accurately reflects the changes and their purpose
</pr_analysis>
6. Create PR using gh pr create with the format below. Use a HEREDOC to pass the body to ensure correct formatting.
<example>
gh pr create --title "the pr title" --body "$(cat <<'EOF'
## Summary
<1-3 bullet points>
## Test plan
[Checklist of TODOs for testing the pull request...]
🤖 Generated with opencode
EOF
)"
</example>
Important:
- Return an empty response - the user will see the gh output directly
- Never update git config`, bannedCommandsStr, MaxOutputLength)
}
func NewBashTool(permission permission.Service) BaseTool {
return &bashTool{
permissions: permission,
}
}
func (b *bashTool) Info() ToolInfo {
return ToolInfo{
Name: BashToolName,
Description: bashDescription(),
Parameters: map[string]any{
"command": map[string]any{
"type": "string",
"description": "The command to execute",
},
"timeout": map[string]any{
"type": "number",
"description": "Optional timeout in milliseconds (max 600000)",
},
},
Required: []string{"command"},
}
}
func (b *bashTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error) {
var params BashParams
if err := json.Unmarshal([]byte(call.Input), &params); err != nil {
return NewTextErrorResponse("invalid parameters"), nil
}
if params.Timeout > MaxTimeout {
params.Timeout = MaxTimeout
} else if params.Timeout <= 0 {
params.Timeout = DefaultTimeout
}
if params.Command == "" {
return NewTextErrorResponse("missing command"), nil
}
baseCmd := strings.Fields(params.Command)[0]
for _, banned := range bannedCommands {
if strings.EqualFold(baseCmd, banned) {
return NewTextErrorResponse(fmt.Sprintf("command '%s' is not allowed", baseCmd)), nil
}
}
isSafeReadOnly := false
cmdLower := strings.ToLower(params.Command)
for _, safe := range safeReadOnlyCommands {
if strings.HasPrefix(cmdLower, strings.ToLower(safe)) {
if len(cmdLower) == len(safe) || cmdLower[len(safe)] == ' ' || cmdLower[len(safe)] == '-' {
isSafeReadOnly = true
break
}
}
}
sessionID, messageID := GetContextValues(ctx)
if sessionID == "" || messageID == "" {
return ToolResponse{}, fmt.Errorf("session ID and message ID are required for creating a new file")
}
if !isSafeReadOnly {
p := b.permissions.Request(
permission.CreatePermissionRequest{
SessionID: sessionID,
Path: config.WorkingDirectory(),
ToolName: BashToolName,
Action: "execute",
Description: fmt.Sprintf("Execute command: %s", params.Command),
Params: BashPermissionsParams{
Command: params.Command,
},
},
)
if !p {
return ToolResponse{}, permission.ErrorPermissionDenied
}
}
startTime := time.Now()
shell := shell.GetPersistentShell(config.WorkingDirectory())
stdout, stderr, exitCode, interrupted, err := shell.Exec(ctx, params.Command, params.Timeout)
if err != nil {
return ToolResponse{}, fmt.Errorf("error executing command: %w", err)
}
stdout = truncateOutput(stdout)
stderr = truncateOutput(stderr)
errorMessage := stderr
if interrupted {
if errorMessage != "" {
errorMessage += "\n"
}
errorMessage += "Command was aborted before completion"
} else if exitCode != 0 {
if errorMessage != "" {
errorMessage += "\n"
}
errorMessage += fmt.Sprintf("Exit code %d", exitCode)
}
hasBothOutputs := stdout != "" && stderr != ""
if hasBothOutputs {
stdout += "\n"
}
if errorMessage != "" {
stdout += "\n" + errorMessage
}
metadata := BashResponseMetadata{
StartTime: startTime.UnixMilli(),
EndTime: time.Now().UnixMilli(),
}
if stdout == "" {
return WithResponseMetadata(NewTextResponse("no output"), metadata), nil
}
return WithResponseMetadata(NewTextResponse(stdout), metadata), nil
}
func truncateOutput(content string) string {
if len(content) <= MaxOutputLength {
return content
}
halfLength := MaxOutputLength / 2
start := content[:halfLength]
end := content[len(content)-halfLength:]
truncatedLinesCount := countLines(content[halfLength : len(content)-halfLength])
return fmt.Sprintf("%s\n\n... [%d lines truncated] ...\n\n%s", start, truncatedLinesCount, end)
}
func countLines(s string) int {
if s == "" {
return 0
}
return len(strings.Split(s, "\n"))
}

View File

@@ -1,295 +0,0 @@
package tools
import (
"context"
"encoding/json"
"fmt"
"maps"
"sort"
"strings"
"time"
"github.com/kujtimiihoxha/opencode/internal/lsp"
"github.com/kujtimiihoxha/opencode/internal/lsp/protocol"
)
type DiagnosticsParams struct {
FilePath string `json:"file_path"`
}
type diagnosticsTool struct {
lspClients map[string]*lsp.Client
}
const (
DiagnosticsToolName = "diagnostics"
diagnosticsDescription = `Get diagnostics for a file and/or project.
WHEN TO USE THIS TOOL:
- Use when you need to check for errors or warnings in your code
- Helpful for debugging and ensuring code quality
- Good for getting a quick overview of issues in a file or project
HOW TO USE:
- Provide a path to a file to get diagnostics for that file
- Leave the path empty to get diagnostics for the entire project
- Results are displayed in a structured format with severity levels
FEATURES:
- Displays errors, warnings, and hints
- Groups diagnostics by severity
- Provides detailed information about each diagnostic
LIMITATIONS:
- Results are limited to the diagnostics provided by the LSP clients
- May not cover all possible issues in the code
- Does not provide suggestions for fixing issues
TIPS:
- Use in conjunction with other tools for a comprehensive code review
- Combine with the LSP client for real-time diagnostics
`
)
func NewDiagnosticsTool(lspClients map[string]*lsp.Client) BaseTool {
return &diagnosticsTool{
lspClients,
}
}
func (b *diagnosticsTool) Info() ToolInfo {
return ToolInfo{
Name: DiagnosticsToolName,
Description: diagnosticsDescription,
Parameters: map[string]any{
"file_path": map[string]any{
"type": "string",
"description": "The path to the file to get diagnostics for (leave w empty for project diagnostics)",
},
},
Required: []string{},
}
}
func (b *diagnosticsTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error) {
var params DiagnosticsParams
if err := json.Unmarshal([]byte(call.Input), &params); err != nil {
return NewTextErrorResponse(fmt.Sprintf("error parsing parameters: %s", err)), nil
}
lsps := b.lspClients
if len(lsps) == 0 {
return NewTextErrorResponse("no LSP clients available"), nil
}
if params.FilePath != "" {
notifyLspOpenFile(ctx, params.FilePath, lsps)
waitForLspDiagnostics(ctx, params.FilePath, lsps)
}
output := getDiagnostics(params.FilePath, lsps)
return NewTextResponse(output), nil
}
func notifyLspOpenFile(ctx context.Context, filePath string, lsps map[string]*lsp.Client) {
for _, client := range lsps {
err := client.OpenFile(ctx, filePath)
if err != nil {
continue
}
}
}
func waitForLspDiagnostics(ctx context.Context, filePath string, lsps map[string]*lsp.Client) {
if len(lsps) == 0 {
return
}
diagChan := make(chan struct{}, 1)
for _, client := range lsps {
originalDiags := make(map[protocol.DocumentUri][]protocol.Diagnostic)
maps.Copy(originalDiags, client.GetDiagnostics())
handler := func(params json.RawMessage) {
lsp.HandleDiagnostics(client, params)
var diagParams protocol.PublishDiagnosticsParams
if err := json.Unmarshal(params, &diagParams); err != nil {
return
}
if diagParams.URI.Path() == filePath || hasDiagnosticsChanged(client.GetDiagnostics(), originalDiags) {
select {
case diagChan <- struct{}{}:
default:
}
}
}
client.RegisterNotificationHandler("textDocument/publishDiagnostics", handler)
if client.IsFileOpen(filePath) {
err := client.NotifyChange(ctx, filePath)
if err != nil {
continue
}
} else {
err := client.OpenFile(ctx, filePath)
if err != nil {
continue
}
}
}
select {
case <-diagChan:
case <-time.After(5 * time.Second):
case <-ctx.Done():
}
}
func hasDiagnosticsChanged(current, original map[protocol.DocumentUri][]protocol.Diagnostic) bool {
for uri, diags := range current {
origDiags, exists := original[uri]
if !exists || len(diags) != len(origDiags) {
return true
}
}
return false
}
func getDiagnostics(filePath string, lsps map[string]*lsp.Client) string {
fileDiagnostics := []string{}
projectDiagnostics := []string{}
formatDiagnostic := func(pth string, diagnostic protocol.Diagnostic, source string) string {
severity := "Info"
switch diagnostic.Severity {
case protocol.SeverityError:
severity = "Error"
case protocol.SeverityWarning:
severity = "Warn"
case protocol.SeverityHint:
severity = "Hint"
}
location := fmt.Sprintf("%s:%d:%d", pth, diagnostic.Range.Start.Line+1, diagnostic.Range.Start.Character+1)
sourceInfo := ""
if diagnostic.Source != "" {
sourceInfo = diagnostic.Source
} else if source != "" {
sourceInfo = source
}
codeInfo := ""
if diagnostic.Code != nil {
codeInfo = fmt.Sprintf("[%v]", diagnostic.Code)
}
tagsInfo := ""
if len(diagnostic.Tags) > 0 {
tags := []string{}
for _, tag := range diagnostic.Tags {
switch tag {
case protocol.Unnecessary:
tags = append(tags, "unnecessary")
case protocol.Deprecated:
tags = append(tags, "deprecated")
}
}
if len(tags) > 0 {
tagsInfo = fmt.Sprintf(" (%s)", strings.Join(tags, ", "))
}
}
return fmt.Sprintf("%s: %s [%s]%s%s %s",
severity,
location,
sourceInfo,
codeInfo,
tagsInfo,
diagnostic.Message)
}
for lspName, client := range lsps {
diagnostics := client.GetDiagnostics()
if len(diagnostics) > 0 {
for location, diags := range diagnostics {
isCurrentFile := location.Path() == filePath
for _, diag := range diags {
formattedDiag := formatDiagnostic(location.Path(), diag, lspName)
if isCurrentFile {
fileDiagnostics = append(fileDiagnostics, formattedDiag)
} else {
projectDiagnostics = append(projectDiagnostics, formattedDiag)
}
}
}
}
}
sort.Slice(fileDiagnostics, func(i, j int) bool {
iIsError := strings.HasPrefix(fileDiagnostics[i], "Error")
jIsError := strings.HasPrefix(fileDiagnostics[j], "Error")
if iIsError != jIsError {
return iIsError // Errors come first
}
return fileDiagnostics[i] < fileDiagnostics[j] // Then alphabetically
})
sort.Slice(projectDiagnostics, func(i, j int) bool {
iIsError := strings.HasPrefix(projectDiagnostics[i], "Error")
jIsError := strings.HasPrefix(projectDiagnostics[j], "Error")
if iIsError != jIsError {
return iIsError
}
return projectDiagnostics[i] < projectDiagnostics[j]
})
output := ""
if len(fileDiagnostics) > 0 {
output += "\n<file_diagnostics>\n"
if len(fileDiagnostics) > 10 {
output += strings.Join(fileDiagnostics[:10], "\n")
output += fmt.Sprintf("\n... and %d more diagnostics", len(fileDiagnostics)-10)
} else {
output += strings.Join(fileDiagnostics, "\n")
}
output += "\n</file_diagnostics>\n"
}
if len(projectDiagnostics) > 0 {
output += "\n<project_diagnostics>\n"
if len(projectDiagnostics) > 10 {
output += strings.Join(projectDiagnostics[:10], "\n")
output += fmt.Sprintf("\n... and %d more diagnostics", len(projectDiagnostics)-10)
} else {
output += strings.Join(projectDiagnostics, "\n")
}
output += "\n</project_diagnostics>\n"
}
if len(fileDiagnostics) > 0 || len(projectDiagnostics) > 0 {
fileErrors := countSeverity(fileDiagnostics, "Error")
fileWarnings := countSeverity(fileDiagnostics, "Warn")
projectErrors := countSeverity(projectDiagnostics, "Error")
projectWarnings := countSeverity(projectDiagnostics, "Warn")
output += "\n<diagnostic_summary>\n"
output += fmt.Sprintf("Current file: %d errors, %d warnings\n", fileErrors, fileWarnings)
output += fmt.Sprintf("Project: %d errors, %d warnings\n", projectErrors, projectWarnings)
output += "</diagnostic_summary>\n"
}
return output
}
func countSeverity(diagnostics []string, severity string) int {
count := 0
for _, diag := range diagnostics {
if strings.HasPrefix(diag, severity) {
count++
}
}
return count
}

View File

@@ -1,489 +0,0 @@
package tools
import (
"context"
"encoding/json"
"fmt"
"os"
"path/filepath"
"strings"
"time"
"github.com/kujtimiihoxha/opencode/internal/config"
"github.com/kujtimiihoxha/opencode/internal/diff"
"github.com/kujtimiihoxha/opencode/internal/history"
"github.com/kujtimiihoxha/opencode/internal/logging"
"github.com/kujtimiihoxha/opencode/internal/lsp"
"github.com/kujtimiihoxha/opencode/internal/permission"
)
type EditParams struct {
FilePath string `json:"file_path"`
OldString string `json:"old_string"`
NewString string `json:"new_string"`
}
type EditPermissionsParams struct {
FilePath string `json:"file_path"`
Diff string `json:"diff"`
}
type EditResponseMetadata struct {
Diff string `json:"diff"`
Additions int `json:"additions"`
Removals int `json:"removals"`
}
type editTool struct {
lspClients map[string]*lsp.Client
permissions permission.Service
files history.Service
}
const (
EditToolName = "edit"
editDescription = `Edits files by replacing text, creating new files, or deleting content. For moving or renaming files, use the Bash tool with the 'mv' command instead. For larger file edits, use the FileWrite tool to overwrite files.
Before using this tool:
1. Use the FileRead tool to understand the file's contents and context
2. Verify the directory path is correct (only applicable when creating new files):
- Use the LS tool to verify the parent directory exists and is the correct location
To make a file edit, provide the following:
1. file_path: The absolute path to the file to modify (must be absolute, not relative)
2. old_string: The text to replace (must be unique within the file, and must match the file contents exactly, including all whitespace and indentation)
3. new_string: The edited text to replace the old_string
Special cases:
- To create a new file: provide file_path and new_string, leave old_string empty
- To delete content: provide file_path and old_string, leave new_string empty
The tool will replace ONE occurrence of old_string with new_string in the specified file.
CRITICAL REQUIREMENTS FOR USING THIS TOOL:
1. UNIQUENESS: The old_string MUST uniquely identify the specific instance you want to change. This means:
- Include AT LEAST 3-5 lines of context BEFORE the change point
- Include AT LEAST 3-5 lines of context AFTER the change point
- Include all whitespace, indentation, and surrounding code exactly as it appears in the file
2. SINGLE INSTANCE: This tool can only change ONE instance at a time. If you need to change multiple instances:
- Make separate calls to this tool for each instance
- Each call must uniquely identify its specific instance using extensive context
3. VERIFICATION: Before using this tool:
- Check how many instances of the target text exist in the file
- If multiple instances exist, gather enough context to uniquely identify each one
- Plan separate tool calls for each instance
WARNING: If you do not follow these requirements:
- The tool will fail if old_string matches multiple locations
- The tool will fail if old_string doesn't match exactly (including whitespace)
- You may change the wrong instance if you don't include enough context
When making edits:
- Ensure the edit results in idiomatic, correct code
- Do not leave the code in a broken state
- Always use absolute file paths (starting with /)
Remember: when making multiple file edits in a row to the same file, you should prefer to send all edits in a single message with multiple calls to this tool, rather than multiple messages with a single call each.`
)
func NewEditTool(lspClients map[string]*lsp.Client, permissions permission.Service, files history.Service) BaseTool {
return &editTool{
lspClients: lspClients,
permissions: permissions,
files: files,
}
}
func (e *editTool) Info() ToolInfo {
return ToolInfo{
Name: EditToolName,
Description: editDescription,
Parameters: map[string]any{
"file_path": map[string]any{
"type": "string",
"description": "The absolute path to the file to modify",
},
"old_string": map[string]any{
"type": "string",
"description": "The text to replace",
},
"new_string": map[string]any{
"type": "string",
"description": "The text to replace it with",
},
},
Required: []string{"file_path", "old_string", "new_string"},
}
}
func (e *editTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error) {
var params EditParams
if err := json.Unmarshal([]byte(call.Input), &params); err != nil {
return NewTextErrorResponse("invalid parameters"), nil
}
if params.FilePath == "" {
return NewTextErrorResponse("file_path is required"), nil
}
if !filepath.IsAbs(params.FilePath) {
wd := config.WorkingDirectory()
params.FilePath = filepath.Join(wd, params.FilePath)
}
var response ToolResponse
var err error
if params.OldString == "" {
response, err = e.createNewFile(ctx, params.FilePath, params.NewString)
if err != nil {
return response, err
}
}
if params.NewString == "" {
response, err = e.deleteContent(ctx, params.FilePath, params.OldString)
if err != nil {
return response, err
}
}
response, err = e.replaceContent(ctx, params.FilePath, params.OldString, params.NewString)
if err != nil {
return response, err
}
if response.IsError {
// Return early if there was an error during content replacement
// This prevents unnecessary LSP diagnostics processing
return response, nil
}
waitForLspDiagnostics(ctx, params.FilePath, e.lspClients)
text := fmt.Sprintf("<result>\n%s\n</result>\n", response.Content)
text += getDiagnostics(params.FilePath, e.lspClients)
response.Content = text
return response, nil
}
func (e *editTool) createNewFile(ctx context.Context, filePath, content string) (ToolResponse, error) {
fileInfo, err := os.Stat(filePath)
if err == nil {
if fileInfo.IsDir() {
return NewTextErrorResponse(fmt.Sprintf("path is a directory, not a file: %s", filePath)), nil
}
return NewTextErrorResponse(fmt.Sprintf("file already exists: %s", filePath)), nil
} else if !os.IsNotExist(err) {
return ToolResponse{}, fmt.Errorf("failed to access file: %w", err)
}
dir := filepath.Dir(filePath)
if err = os.MkdirAll(dir, 0o755); err != nil {
return ToolResponse{}, fmt.Errorf("failed to create parent directories: %w", err)
}
sessionID, messageID := GetContextValues(ctx)
if sessionID == "" || messageID == "" {
return ToolResponse{}, fmt.Errorf("session ID and message ID are required for creating a new file")
}
diff, additions, removals := diff.GenerateDiff(
"",
content,
filePath,
)
rootDir := config.WorkingDirectory()
permissionPath := filepath.Dir(filePath)
if strings.HasPrefix(filePath, rootDir) {
permissionPath = rootDir
}
p := e.permissions.Request(
permission.CreatePermissionRequest{
SessionID: sessionID,
Path: permissionPath,
ToolName: EditToolName,
Action: "write",
Description: fmt.Sprintf("Create file %s", filePath),
Params: EditPermissionsParams{
FilePath: filePath,
Diff: diff,
},
},
)
if !p {
return ToolResponse{}, permission.ErrorPermissionDenied
}
err = os.WriteFile(filePath, []byte(content), 0o644)
if err != nil {
return ToolResponse{}, fmt.Errorf("failed to write file: %w", err)
}
// File can't be in the history so we create a new file history
_, err = e.files.Create(ctx, sessionID, filePath, "")
if err != nil {
// Log error but don't fail the operation
return ToolResponse{}, fmt.Errorf("error creating file history: %w", err)
}
// Add the new content to the file history
_, err = e.files.CreateVersion(ctx, sessionID, filePath, content)
if err != nil {
// Log error but don't fail the operation
logging.Debug("Error creating file history version", "error", err)
}
recordFileWrite(filePath)
recordFileRead(filePath)
return WithResponseMetadata(
NewTextResponse("File created: "+filePath),
EditResponseMetadata{
Diff: diff,
Additions: additions,
Removals: removals,
},
), nil
}
func (e *editTool) deleteContent(ctx context.Context, filePath, oldString string) (ToolResponse, error) {
fileInfo, err := os.Stat(filePath)
if err != nil {
if os.IsNotExist(err) {
return NewTextErrorResponse(fmt.Sprintf("file not found: %s", filePath)), nil
}
return ToolResponse{}, fmt.Errorf("failed to access file: %w", err)
}
if fileInfo.IsDir() {
return NewTextErrorResponse(fmt.Sprintf("path is a directory, not a file: %s", filePath)), nil
}
if getLastReadTime(filePath).IsZero() {
return NewTextErrorResponse("you must read the file before editing it. Use the View tool first"), nil
}
modTime := fileInfo.ModTime()
lastRead := getLastReadTime(filePath)
if modTime.After(lastRead) {
return NewTextErrorResponse(
fmt.Sprintf("file %s has been modified since it was last read (mod time: %s, last read: %s)",
filePath, modTime.Format(time.RFC3339), lastRead.Format(time.RFC3339),
)), nil
}
content, err := os.ReadFile(filePath)
if err != nil {
return ToolResponse{}, fmt.Errorf("failed to read file: %w", err)
}
oldContent := string(content)
index := strings.Index(oldContent, oldString)
if index == -1 {
return NewTextErrorResponse("old_string not found in file. Make sure it matches exactly, including whitespace and line breaks"), nil
}
lastIndex := strings.LastIndex(oldContent, oldString)
if index != lastIndex {
return NewTextErrorResponse("old_string appears multiple times in the file. Please provide more context to ensure a unique match"), nil
}
newContent := oldContent[:index] + oldContent[index+len(oldString):]
sessionID, messageID := GetContextValues(ctx)
if sessionID == "" || messageID == "" {
return ToolResponse{}, fmt.Errorf("session ID and message ID are required for creating a new file")
}
diff, additions, removals := diff.GenerateDiff(
oldContent,
newContent,
filePath,
)
rootDir := config.WorkingDirectory()
permissionPath := filepath.Dir(filePath)
if strings.HasPrefix(filePath, rootDir) {
permissionPath = rootDir
}
p := e.permissions.Request(
permission.CreatePermissionRequest{
SessionID: sessionID,
Path: permissionPath,
ToolName: EditToolName,
Action: "write",
Description: fmt.Sprintf("Delete content from file %s", filePath),
Params: EditPermissionsParams{
FilePath: filePath,
Diff: diff,
},
},
)
if !p {
return ToolResponse{}, permission.ErrorPermissionDenied
}
err = os.WriteFile(filePath, []byte(newContent), 0o644)
if err != nil {
return ToolResponse{}, fmt.Errorf("failed to write file: %w", err)
}
// Check if file exists in history
file, err := e.files.GetByPathAndSession(ctx, filePath, sessionID)
if err != nil {
_, err = e.files.Create(ctx, sessionID, filePath, oldContent)
if err != nil {
// Log error but don't fail the operation
return ToolResponse{}, fmt.Errorf("error creating file history: %w", err)
}
}
if file.Content != oldContent {
// User Manually changed the content store an intermediate version
_, err = e.files.CreateVersion(ctx, sessionID, filePath, oldContent)
if err != nil {
logging.Debug("Error creating file history version", "error", err)
}
}
// Store the new version
_, err = e.files.CreateVersion(ctx, sessionID, filePath, "")
if err != nil {
logging.Debug("Error creating file history version", "error", err)
}
recordFileWrite(filePath)
recordFileRead(filePath)
return WithResponseMetadata(
NewTextResponse("Content deleted from file: "+filePath),
EditResponseMetadata{
Diff: diff,
Additions: additions,
Removals: removals,
},
), nil
}
func (e *editTool) replaceContent(ctx context.Context, filePath, oldString, newString string) (ToolResponse, error) {
fileInfo, err := os.Stat(filePath)
if err != nil {
if os.IsNotExist(err) {
return NewTextErrorResponse(fmt.Sprintf("file not found: %s", filePath)), nil
}
return ToolResponse{}, fmt.Errorf("failed to access file: %w", err)
}
if fileInfo.IsDir() {
return NewTextErrorResponse(fmt.Sprintf("path is a directory, not a file: %s", filePath)), nil
}
if getLastReadTime(filePath).IsZero() {
return NewTextErrorResponse("you must read the file before editing it. Use the View tool first"), nil
}
modTime := fileInfo.ModTime()
lastRead := getLastReadTime(filePath)
if modTime.After(lastRead) {
return NewTextErrorResponse(
fmt.Sprintf("file %s has been modified since it was last read (mod time: %s, last read: %s)",
filePath, modTime.Format(time.RFC3339), lastRead.Format(time.RFC3339),
)), nil
}
content, err := os.ReadFile(filePath)
if err != nil {
return ToolResponse{}, fmt.Errorf("failed to read file: %w", err)
}
oldContent := string(content)
index := strings.Index(oldContent, oldString)
if index == -1 {
return NewTextErrorResponse("old_string not found in file. Make sure it matches exactly, including whitespace and line breaks"), nil
}
lastIndex := strings.LastIndex(oldContent, oldString)
if index != lastIndex {
return NewTextErrorResponse("old_string appears multiple times in the file. Please provide more context to ensure a unique match"), nil
}
newContent := oldContent[:index] + newString + oldContent[index+len(oldString):]
if oldContent == newContent {
return NewTextErrorResponse("new content is the same as old content. No changes made."), nil
}
sessionID, messageID := GetContextValues(ctx)
if sessionID == "" || messageID == "" {
return ToolResponse{}, fmt.Errorf("session ID and message ID are required for creating a new file")
}
diff, additions, removals := diff.GenerateDiff(
oldContent,
newContent,
filePath,
)
rootDir := config.WorkingDirectory()
permissionPath := filepath.Dir(filePath)
if strings.HasPrefix(filePath, rootDir) {
permissionPath = rootDir
}
p := e.permissions.Request(
permission.CreatePermissionRequest{
SessionID: sessionID,
Path: permissionPath,
ToolName: EditToolName,
Action: "write",
Description: fmt.Sprintf("Replace content in file %s", filePath),
Params: EditPermissionsParams{
FilePath: filePath,
Diff: diff,
},
},
)
if !p {
return ToolResponse{}, permission.ErrorPermissionDenied
}
err = os.WriteFile(filePath, []byte(newContent), 0o644)
if err != nil {
return ToolResponse{}, fmt.Errorf("failed to write file: %w", err)
}
// Check if file exists in history
file, err := e.files.GetByPathAndSession(ctx, filePath, sessionID)
if err != nil {
_, err = e.files.Create(ctx, sessionID, filePath, oldContent)
if err != nil {
// Log error but don't fail the operation
return ToolResponse{}, fmt.Errorf("error creating file history: %w", err)
}
}
if file.Content != oldContent {
// User Manually changed the content store an intermediate version
_, err = e.files.CreateVersion(ctx, sessionID, filePath, oldContent)
if err != nil {
logging.Debug("Error creating file history version", "error", err)
}
}
// Store the new version
_, err = e.files.CreateVersion(ctx, sessionID, filePath, newContent)
if err != nil {
logging.Debug("Error creating file history version", "error", err)
}
recordFileWrite(filePath)
recordFileRead(filePath)
return WithResponseMetadata(
NewTextResponse("Content replaced in file: "+filePath),
EditResponseMetadata{
Diff: diff,
Additions: additions,
Removals: removals,
}), nil
}

View File

@@ -1,227 +0,0 @@
package tools
import (
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"strings"
"time"
md "github.com/JohannesKaufmann/html-to-markdown"
"github.com/PuerkitoBio/goquery"
"github.com/kujtimiihoxha/opencode/internal/config"
"github.com/kujtimiihoxha/opencode/internal/permission"
)
type FetchParams struct {
URL string `json:"url"`
Format string `json:"format"`
Timeout int `json:"timeout,omitempty"`
}
type FetchPermissionsParams struct {
URL string `json:"url"`
Format string `json:"format"`
Timeout int `json:"timeout,omitempty"`
}
type fetchTool struct {
client *http.Client
permissions permission.Service
}
const (
FetchToolName = "fetch"
fetchToolDescription = `Fetches content from a URL and returns it in the specified format.
WHEN TO USE THIS TOOL:
- Use when you need to download content from a URL
- Helpful for retrieving documentation, API responses, or web content
- Useful for getting external information to assist with tasks
HOW TO USE:
- Provide the URL to fetch content from
- Specify the desired output format (text, markdown, or html)
- Optionally set a timeout for the request
FEATURES:
- Supports three output formats: text, markdown, and html
- Automatically handles HTTP redirects
- Sets reasonable timeouts to prevent hanging
- Validates input parameters before making requests
LIMITATIONS:
- Maximum response size is 5MB
- Only supports HTTP and HTTPS protocols
- Cannot handle authentication or cookies
- Some websites may block automated requests
TIPS:
- Use text format for plain text content or simple API responses
- Use markdown format for content that should be rendered with formatting
- Use html format when you need the raw HTML structure
- Set appropriate timeouts for potentially slow websites`
)
func NewFetchTool(permissions permission.Service) BaseTool {
return &fetchTool{
client: &http.Client{
Timeout: 30 * time.Second,
},
permissions: permissions,
}
}
func (t *fetchTool) Info() ToolInfo {
return ToolInfo{
Name: FetchToolName,
Description: fetchToolDescription,
Parameters: map[string]any{
"url": map[string]any{
"type": "string",
"description": "The URL to fetch content from",
},
"format": map[string]any{
"type": "string",
"description": "The format to return the content in (text, markdown, or html)",
"enum": []string{"text", "markdown", "html"},
},
"timeout": map[string]any{
"type": "number",
"description": "Optional timeout in seconds (max 120)",
},
},
Required: []string{"url", "format"},
}
}
func (t *fetchTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error) {
var params FetchParams
if err := json.Unmarshal([]byte(call.Input), &params); err != nil {
return NewTextErrorResponse("Failed to parse fetch parameters: " + err.Error()), nil
}
if params.URL == "" {
return NewTextErrorResponse("URL parameter is required"), nil
}
format := strings.ToLower(params.Format)
if format != "text" && format != "markdown" && format != "html" {
return NewTextErrorResponse("Format must be one of: text, markdown, html"), nil
}
if !strings.HasPrefix(params.URL, "http://") && !strings.HasPrefix(params.URL, "https://") {
return NewTextErrorResponse("URL must start with http:// or https://"), nil
}
sessionID, messageID := GetContextValues(ctx)
if sessionID == "" || messageID == "" {
return ToolResponse{}, fmt.Errorf("session ID and message ID are required for creating a new file")
}
p := t.permissions.Request(
permission.CreatePermissionRequest{
SessionID: sessionID,
Path: config.WorkingDirectory(),
ToolName: FetchToolName,
Action: "fetch",
Description: fmt.Sprintf("Fetch content from URL: %s", params.URL),
Params: FetchPermissionsParams(params),
},
)
if !p {
return ToolResponse{}, permission.ErrorPermissionDenied
}
client := t.client
if params.Timeout > 0 {
maxTimeout := 120 // 2 minutes
if params.Timeout > maxTimeout {
params.Timeout = maxTimeout
}
client = &http.Client{
Timeout: time.Duration(params.Timeout) * time.Second,
}
}
req, err := http.NewRequestWithContext(ctx, "GET", params.URL, nil)
if err != nil {
return ToolResponse{}, fmt.Errorf("failed to create request: %w", err)
}
req.Header.Set("User-Agent", "opencode/1.0")
resp, err := client.Do(req)
if err != nil {
return ToolResponse{}, fmt.Errorf("failed to fetch URL: %w", err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return NewTextErrorResponse(fmt.Sprintf("Request failed with status code: %d", resp.StatusCode)), nil
}
maxSize := int64(5 * 1024 * 1024) // 5MB
body, err := io.ReadAll(io.LimitReader(resp.Body, maxSize))
if err != nil {
return NewTextErrorResponse("Failed to read response body: " + err.Error()), nil
}
content := string(body)
contentType := resp.Header.Get("Content-Type")
switch format {
case "text":
if strings.Contains(contentType, "text/html") {
text, err := extractTextFromHTML(content)
if err != nil {
return NewTextErrorResponse("Failed to extract text from HTML: " + err.Error()), nil
}
return NewTextResponse(text), nil
}
return NewTextResponse(content), nil
case "markdown":
if strings.Contains(contentType, "text/html") {
markdown, err := convertHTMLToMarkdown(content)
if err != nil {
return NewTextErrorResponse("Failed to convert HTML to Markdown: " + err.Error()), nil
}
return NewTextResponse(markdown), nil
}
return NewTextResponse("```\n" + content + "\n```"), nil
case "html":
return NewTextResponse(content), nil
default:
return NewTextResponse(content), nil
}
}
func extractTextFromHTML(html string) (string, error) {
doc, err := goquery.NewDocumentFromReader(strings.NewReader(html))
if err != nil {
return "", err
}
text := doc.Text()
text = strings.Join(strings.Fields(text), " ")
return text, nil
}
func convertHTMLToMarkdown(html string) (string, error) {
converter := md.NewConverter("", true, nil)
markdown, err := converter.ConvertString(html)
if err != nil {
return "", err
}
return markdown, nil
}

View File

@@ -1,53 +0,0 @@
package tools
import (
"sync"
"time"
)
// File record to track when files were read/written
type fileRecord struct {
path string
readTime time.Time
writeTime time.Time
}
var (
fileRecords = make(map[string]fileRecord)
fileRecordMutex sync.RWMutex
)
func recordFileRead(path string) {
fileRecordMutex.Lock()
defer fileRecordMutex.Unlock()
record, exists := fileRecords[path]
if !exists {
record = fileRecord{path: path}
}
record.readTime = time.Now()
fileRecords[path] = record
}
func getLastReadTime(path string) time.Time {
fileRecordMutex.RLock()
defer fileRecordMutex.RUnlock()
record, exists := fileRecords[path]
if !exists {
return time.Time{}
}
return record.readTime
}
func recordFileWrite(path string) {
fileRecordMutex.Lock()
defer fileRecordMutex.Unlock()
record, exists := fileRecords[path]
if !exists {
record = fileRecord{path: path}
}
record.writeTime = time.Now()
fileRecords[path] = record
}

View File

@@ -1,233 +0,0 @@
package tools
import (
"context"
"encoding/json"
"fmt"
"io/fs"
"os"
"path/filepath"
"sort"
"strings"
"time"
"github.com/bmatcuk/doublestar/v4"
"github.com/kujtimiihoxha/opencode/internal/config"
)
const (
GlobToolName = "glob"
globDescription = `Fast file pattern matching tool that finds files by name and pattern, returning matching paths sorted by modification time (newest first).
WHEN TO USE THIS TOOL:
- Use when you need to find files by name patterns or extensions
- Great for finding specific file types across a directory structure
- Useful for discovering files that match certain naming conventions
HOW TO USE:
- Provide a glob pattern to match against file paths
- Optionally specify a starting directory (defaults to current working directory)
- Results are sorted with most recently modified files first
GLOB PATTERN SYNTAX:
- '*' matches any sequence of non-separator characters
- '**' matches any sequence of characters, including separators
- '?' matches any single non-separator character
- '[...]' matches any character in the brackets
- '[!...]' matches any character not in the brackets
COMMON PATTERN EXAMPLES:
- '*.js' - Find all JavaScript files in the current directory
- '**/*.js' - Find all JavaScript files in any subdirectory
- 'src/**/*.{ts,tsx}' - Find all TypeScript files in the src directory
- '*.{html,css,js}' - Find all HTML, CSS, and JS files
LIMITATIONS:
- Results are limited to 100 files (newest first)
- Does not search file contents (use Grep tool for that)
- Hidden files (starting with '.') are skipped
TIPS:
- For the most useful results, combine with the Grep tool: first find files with Glob, then search their contents with Grep
- When doing iterative exploration that may require multiple rounds of searching, consider using the Agent tool instead
- Always check if results are truncated and refine your search pattern if needed`
)
type fileInfo struct {
path string
modTime time.Time
}
type GlobParams struct {
Pattern string `json:"pattern"`
Path string `json:"path"`
}
type GlobResponseMetadata struct {
NumberOfFiles int `json:"number_of_files"`
Truncated bool `json:"truncated"`
}
type globTool struct{}
func NewGlobTool() BaseTool {
return &globTool{}
}
func (g *globTool) Info() ToolInfo {
return ToolInfo{
Name: GlobToolName,
Description: globDescription,
Parameters: map[string]any{
"pattern": map[string]any{
"type": "string",
"description": "The glob pattern to match files against",
},
"path": map[string]any{
"type": "string",
"description": "The directory to search in. Defaults to the current working directory.",
},
},
Required: []string{"pattern"},
}
}
func (g *globTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error) {
var params GlobParams
if err := json.Unmarshal([]byte(call.Input), &params); err != nil {
return NewTextErrorResponse(fmt.Sprintf("error parsing parameters: %s", err)), nil
}
if params.Pattern == "" {
return NewTextErrorResponse("pattern is required"), nil
}
searchPath := params.Path
if searchPath == "" {
searchPath = config.WorkingDirectory()
}
files, truncated, err := globFiles(params.Pattern, searchPath, 100)
if err != nil {
return ToolResponse{}, fmt.Errorf("error finding files: %w", err)
}
var output string
if len(files) == 0 {
output = "No files found"
} else {
output = strings.Join(files, "\n")
if truncated {
output += "\n\n(Results are truncated. Consider using a more specific path or pattern.)"
}
}
return WithResponseMetadata(
NewTextResponse(output),
GlobResponseMetadata{
NumberOfFiles: len(files),
Truncated: truncated,
},
), nil
}
func globFiles(pattern, searchPath string, limit int) ([]string, bool, error) {
if !strings.HasPrefix(pattern, "/") && !strings.HasPrefix(pattern, searchPath) {
if !strings.HasSuffix(searchPath, "/") {
searchPath += "/"
}
pattern = searchPath + pattern
}
fsys := os.DirFS("/")
relPattern := strings.TrimPrefix(pattern, "/")
var matches []fileInfo
err := doublestar.GlobWalk(fsys, relPattern, func(path string, d fs.DirEntry) error {
if d.IsDir() {
return nil
}
if skipHidden(path) {
return nil
}
info, err := d.Info()
if err != nil {
return nil // Skip files we can't access
}
absPath := "/" + path // Restore absolute path
matches = append(matches, fileInfo{
path: absPath,
modTime: info.ModTime(),
})
if len(matches) >= limit*2 { // Collect more than needed for sorting
return fs.SkipAll
}
return nil
})
if err != nil {
return nil, false, fmt.Errorf("glob walk error: %w", err)
}
sort.Slice(matches, func(i, j int) bool {
return matches[i].modTime.After(matches[j].modTime)
})
truncated := len(matches) > limit
if truncated {
matches = matches[:limit]
}
results := make([]string, len(matches))
for i, m := range matches {
results[i] = m.path
}
return results, truncated, nil
}
func skipHidden(path string) bool {
// Check for hidden files (starting with a dot)
base := filepath.Base(path)
if base != "." && strings.HasPrefix(base, ".") {
return true
}
// List of commonly ignored directories in development projects
commonIgnoredDirs := map[string]bool{
"node_modules": true,
"vendor": true,
"dist": true,
"build": true,
"target": true,
".git": true,
".idea": true,
".vscode": true,
"__pycache__": true,
"bin": true,
"obj": true,
"out": true,
"coverage": true,
"tmp": true,
"temp": true,
"logs": true,
"generated": true,
"bower_components": true,
"jspm_packages": true,
}
// Check if any path component is in our ignore list
parts := strings.SplitSeq(path, string(os.PathSeparator))
for part := range parts {
if commonIgnoredDirs[part] {
return true
}
}
return false
}

View File

@@ -1,358 +0,0 @@
package tools
import (
"bufio"
"context"
"encoding/json"
"fmt"
"os"
"os/exec"
"path/filepath"
"regexp"
"sort"
"strconv"
"strings"
"time"
"github.com/kujtimiihoxha/opencode/internal/config"
)
type GrepParams struct {
Pattern string `json:"pattern"`
Path string `json:"path"`
Include string `json:"include"`
LiteralText bool `json:"literal_text"`
}
type grepMatch struct {
path string
modTime time.Time
lineNum int
lineText string
}
type GrepResponseMetadata struct {
NumberOfMatches int `json:"number_of_matches"`
Truncated bool `json:"truncated"`
}
type grepTool struct{}
const (
GrepToolName = "grep"
grepDescription = `Fast content search tool that finds files containing specific text or patterns, returning matching file paths sorted by modification time (newest first).
WHEN TO USE THIS TOOL:
- Use when you need to find files containing specific text or patterns
- Great for searching code bases for function names, variable declarations, or error messages
- Useful for finding all files that use a particular API or pattern
HOW TO USE:
- Provide a regex pattern to search for within file contents
- Set literal_text=true if you want to search for the exact text with special characters (recommended for non-regex users)
- Optionally specify a starting directory (defaults to current working directory)
- Optionally provide an include pattern to filter which files to search
- Results are sorted with most recently modified files first
REGEX PATTERN SYNTAX (when literal_text=false):
- Supports standard regular expression syntax
- 'function' searches for the literal text "function"
- 'log\..*Error' finds text starting with "log." and ending with "Error"
- 'import\s+.*\s+from' finds import statements in JavaScript/TypeScript
COMMON INCLUDE PATTERN EXAMPLES:
- '*.js' - Only search JavaScript files
- '*.{ts,tsx}' - Only search TypeScript files
- '*.go' - Only search Go files
LIMITATIONS:
- Results are limited to 100 files (newest first)
- Performance depends on the number of files being searched
- Very large binary files may be skipped
- Hidden files (starting with '.') are skipped
TIPS:
- For faster, more targeted searches, first use Glob to find relevant files, then use Grep
- When doing iterative exploration that may require multiple rounds of searching, consider using the Agent tool instead
- Always check if results are truncated and refine your search pattern if needed
- Use literal_text=true when searching for exact text containing special characters like dots, parentheses, etc.`
)
func NewGrepTool() BaseTool {
return &grepTool{}
}
func (g *grepTool) Info() ToolInfo {
return ToolInfo{
Name: GrepToolName,
Description: grepDescription,
Parameters: map[string]any{
"pattern": map[string]any{
"type": "string",
"description": "The regex pattern to search for in file contents",
},
"path": map[string]any{
"type": "string",
"description": "The directory to search in. Defaults to the current working directory.",
},
"include": map[string]any{
"type": "string",
"description": "File pattern to include in the search (e.g. \"*.js\", \"*.{ts,tsx}\")",
},
"literal_text": map[string]any{
"type": "boolean",
"description": "If true, the pattern will be treated as literal text with special regex characters escaped. Default is false.",
},
},
Required: []string{"pattern"},
}
}
// escapeRegexPattern escapes special regex characters so they're treated as literal characters
func escapeRegexPattern(pattern string) string {
specialChars := []string{"\\", ".", "+", "*", "?", "(", ")", "[", "]", "{", "}", "^", "$", "|"}
escaped := pattern
for _, char := range specialChars {
escaped = strings.ReplaceAll(escaped, char, "\\"+char)
}
return escaped
}
func (g *grepTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error) {
var params GrepParams
if err := json.Unmarshal([]byte(call.Input), &params); err != nil {
return NewTextErrorResponse(fmt.Sprintf("error parsing parameters: %s", err)), nil
}
if params.Pattern == "" {
return NewTextErrorResponse("pattern is required"), nil
}
// If literal_text is true, escape the pattern
searchPattern := params.Pattern
if params.LiteralText {
searchPattern = escapeRegexPattern(params.Pattern)
}
searchPath := params.Path
if searchPath == "" {
searchPath = config.WorkingDirectory()
}
matches, truncated, err := searchFiles(searchPattern, searchPath, params.Include, 100)
if err != nil {
return ToolResponse{}, fmt.Errorf("error searching files: %w", err)
}
var output string
if len(matches) == 0 {
output = "No files found"
} else {
output = fmt.Sprintf("Found %d matches\n", len(matches))
currentFile := ""
for _, match := range matches {
if currentFile != match.path {
if currentFile != "" {
output += "\n"
}
currentFile = match.path
output += fmt.Sprintf("%s:\n", match.path)
}
if match.lineNum > 0 {
output += fmt.Sprintf(" Line %d: %s\n", match.lineNum, match.lineText)
} else {
output += fmt.Sprintf(" %s\n", match.path)
}
}
if truncated {
output += "\n(Results are truncated. Consider using a more specific path or pattern.)"
}
}
return WithResponseMetadata(
NewTextResponse(output),
GrepResponseMetadata{
NumberOfMatches: len(matches),
Truncated: truncated,
},
), nil
}
func searchFiles(pattern, rootPath, include string, limit int) ([]grepMatch, bool, error) {
matches, err := searchWithRipgrep(pattern, rootPath, include)
if err != nil {
matches, err = searchFilesWithRegex(pattern, rootPath, include)
if err != nil {
return nil, false, err
}
}
sort.Slice(matches, func(i, j int) bool {
return matches[i].modTime.After(matches[j].modTime)
})
truncated := len(matches) > limit
if truncated {
matches = matches[:limit]
}
return matches, truncated, nil
}
func searchWithRipgrep(pattern, path, include string) ([]grepMatch, error) {
_, err := exec.LookPath("rg")
if err != nil {
return nil, fmt.Errorf("ripgrep not found: %w", err)
}
// Use -n to show line numbers and include the matched line
args := []string{"-n", pattern}
if include != "" {
args = append(args, "--glob", include)
}
args = append(args, path)
cmd := exec.Command("rg", args...)
output, err := cmd.Output()
if err != nil {
if exitErr, ok := err.(*exec.ExitError); ok && exitErr.ExitCode() == 1 {
return []grepMatch{}, nil
}
return nil, err
}
lines := strings.Split(strings.TrimSpace(string(output)), "\n")
matches := make([]grepMatch, 0, len(lines))
for _, line := range lines {
if line == "" {
continue
}
// Parse ripgrep output format: file:line:content
parts := strings.SplitN(line, ":", 3)
if len(parts) < 3 {
continue
}
filePath := parts[0]
lineNum, err := strconv.Atoi(parts[1])
if err != nil {
continue
}
lineText := parts[2]
fileInfo, err := os.Stat(filePath)
if err != nil {
continue // Skip files we can't access
}
matches = append(matches, grepMatch{
path: filePath,
modTime: fileInfo.ModTime(),
lineNum: lineNum,
lineText: lineText,
})
}
return matches, nil
}
func searchFilesWithRegex(pattern, rootPath, include string) ([]grepMatch, error) {
matches := []grepMatch{}
regex, err := regexp.Compile(pattern)
if err != nil {
return nil, fmt.Errorf("invalid regex pattern: %w", err)
}
var includePattern *regexp.Regexp
if include != "" {
regexPattern := globToRegex(include)
includePattern, err = regexp.Compile(regexPattern)
if err != nil {
return nil, fmt.Errorf("invalid include pattern: %w", err)
}
}
err = filepath.Walk(rootPath, func(path string, info os.FileInfo, err error) error {
if err != nil {
return nil // Skip errors
}
if info.IsDir() {
return nil // Skip directories
}
if skipHidden(path) {
return nil
}
if includePattern != nil && !includePattern.MatchString(path) {
return nil
}
match, lineNum, lineText, err := fileContainsPattern(path, regex)
if err != nil {
return nil // Skip files we can't read
}
if match {
matches = append(matches, grepMatch{
path: path,
modTime: info.ModTime(),
lineNum: lineNum,
lineText: lineText,
})
if len(matches) >= 200 {
return filepath.SkipAll
}
}
return nil
})
if err != nil {
return nil, err
}
return matches, nil
}
func fileContainsPattern(filePath string, pattern *regexp.Regexp) (bool, int, string, error) {
file, err := os.Open(filePath)
if err != nil {
return false, 0, "", err
}
defer file.Close()
scanner := bufio.NewScanner(file)
lineNum := 0
for scanner.Scan() {
lineNum++
line := scanner.Text()
if pattern.MatchString(line) {
return true, lineNum, line, nil
}
}
return false, 0, "", scanner.Err()
}
func globToRegex(glob string) string {
regexPattern := strings.ReplaceAll(glob, ".", "\\.")
regexPattern = strings.ReplaceAll(regexPattern, "*", ".*")
regexPattern = strings.ReplaceAll(regexPattern, "?", ".")
re := regexp.MustCompile(`\{([^}]+)\}`)
regexPattern = re.ReplaceAllStringFunc(regexPattern, func(match string) string {
inner := match[1 : len(match)-1]
return "(" + strings.ReplaceAll(inner, ",", "|") + ")"
})
return regexPattern
}

View File

@@ -1,316 +0,0 @@
package tools
import (
"context"
"encoding/json"
"fmt"
"os"
"path/filepath"
"strings"
"github.com/kujtimiihoxha/opencode/internal/config"
)
type LSParams struct {
Path string `json:"path"`
Ignore []string `json:"ignore"`
}
type TreeNode struct {
Name string `json:"name"`
Path string `json:"path"`
Type string `json:"type"` // "file" or "directory"
Children []*TreeNode `json:"children,omitempty"`
}
type LSResponseMetadata struct {
NumberOfFiles int `json:"number_of_files"`
Truncated bool `json:"truncated"`
}
type lsTool struct{}
const (
LSToolName = "ls"
MaxLSFiles = 1000
lsDescription = `Directory listing tool that shows files and subdirectories in a tree structure, helping you explore and understand the project organization.
WHEN TO USE THIS TOOL:
- Use when you need to explore the structure of a directory
- Helpful for understanding the organization of a project
- Good first step when getting familiar with a new codebase
HOW TO USE:
- Provide a path to list (defaults to current working directory)
- Optionally specify glob patterns to ignore
- Results are displayed in a tree structure
FEATURES:
- Displays a hierarchical view of files and directories
- Automatically skips hidden files/directories (starting with '.')
- Skips common system directories like __pycache__
- Can filter out files matching specific patterns
LIMITATIONS:
- Results are limited to 1000 files
- Very large directories will be truncated
- Does not show file sizes or permissions
- Cannot recursively list all directories in a large project
TIPS:
- Use Glob tool for finding files by name patterns instead of browsing
- Use Grep tool for searching file contents
- Combine with other tools for more effective exploration`
)
func NewLsTool() BaseTool {
return &lsTool{}
}
func (l *lsTool) Info() ToolInfo {
return ToolInfo{
Name: LSToolName,
Description: lsDescription,
Parameters: map[string]any{
"path": map[string]any{
"type": "string",
"description": "The path to the directory to list (defaults to current working directory)",
},
"ignore": map[string]any{
"type": "array",
"description": "List of glob patterns to ignore",
"items": map[string]any{
"type": "string",
},
},
},
Required: []string{"path"},
}
}
func (l *lsTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error) {
var params LSParams
if err := json.Unmarshal([]byte(call.Input), &params); err != nil {
return NewTextErrorResponse(fmt.Sprintf("error parsing parameters: %s", err)), nil
}
searchPath := params.Path
if searchPath == "" {
searchPath = config.WorkingDirectory()
}
if !filepath.IsAbs(searchPath) {
searchPath = filepath.Join(config.WorkingDirectory(), searchPath)
}
if _, err := os.Stat(searchPath); os.IsNotExist(err) {
return NewTextErrorResponse(fmt.Sprintf("path does not exist: %s", searchPath)), nil
}
files, truncated, err := listDirectory(searchPath, params.Ignore, MaxLSFiles)
if err != nil {
return ToolResponse{}, fmt.Errorf("error listing directory: %w", err)
}
tree := createFileTree(files)
output := printTree(tree, searchPath)
if truncated {
output = fmt.Sprintf("There are more than %d files in the directory. Use a more specific path or use the Glob tool to find specific files. The first %d files and directories are included below:\n\n%s", MaxLSFiles, MaxLSFiles, output)
}
return WithResponseMetadata(
NewTextResponse(output),
LSResponseMetadata{
NumberOfFiles: len(files),
Truncated: truncated,
},
), nil
}
func listDirectory(initialPath string, ignorePatterns []string, limit int) ([]string, bool, error) {
var results []string
truncated := false
err := filepath.Walk(initialPath, func(path string, info os.FileInfo, err error) error {
if err != nil {
return nil // Skip files we don't have permission to access
}
if shouldSkip(path, ignorePatterns) {
if info.IsDir() {
return filepath.SkipDir
}
return nil
}
if path != initialPath {
if info.IsDir() {
path = path + string(filepath.Separator)
}
results = append(results, path)
}
if len(results) >= limit {
truncated = true
return filepath.SkipAll
}
return nil
})
if err != nil {
return nil, truncated, err
}
return results, truncated, nil
}
func shouldSkip(path string, ignorePatterns []string) bool {
base := filepath.Base(path)
if base != "." && strings.HasPrefix(base, ".") {
return true
}
commonIgnored := []string{
"__pycache__",
"node_modules",
"dist",
"build",
"target",
"vendor",
"bin",
"obj",
".git",
".idea",
".vscode",
".DS_Store",
"*.pyc",
"*.pyo",
"*.pyd",
"*.so",
"*.dll",
"*.exe",
}
if strings.Contains(path, filepath.Join("__pycache__", "")) {
return true
}
for _, ignored := range commonIgnored {
if strings.HasSuffix(ignored, "/") {
if strings.Contains(path, filepath.Join(ignored[:len(ignored)-1], "")) {
return true
}
} else if strings.HasPrefix(ignored, "*.") {
if strings.HasSuffix(base, ignored[1:]) {
return true
}
} else {
if base == ignored {
return true
}
}
}
for _, pattern := range ignorePatterns {
matched, err := filepath.Match(pattern, base)
if err == nil && matched {
return true
}
}
return false
}
func createFileTree(sortedPaths []string) []*TreeNode {
root := []*TreeNode{}
pathMap := make(map[string]*TreeNode)
for _, path := range sortedPaths {
parts := strings.Split(path, string(filepath.Separator))
currentPath := ""
var parentPath string
var cleanParts []string
for _, part := range parts {
if part != "" {
cleanParts = append(cleanParts, part)
}
}
parts = cleanParts
if len(parts) == 0 {
continue
}
for i, part := range parts {
if currentPath == "" {
currentPath = part
} else {
currentPath = filepath.Join(currentPath, part)
}
if _, exists := pathMap[currentPath]; exists {
parentPath = currentPath
continue
}
isLastPart := i == len(parts)-1
isDir := !isLastPart || strings.HasSuffix(path, string(filepath.Separator))
nodeType := "file"
if isDir {
nodeType = "directory"
}
newNode := &TreeNode{
Name: part,
Path: currentPath,
Type: nodeType,
Children: []*TreeNode{},
}
pathMap[currentPath] = newNode
if i > 0 && parentPath != "" {
if parent, ok := pathMap[parentPath]; ok {
parent.Children = append(parent.Children, newNode)
}
} else {
root = append(root, newNode)
}
parentPath = currentPath
}
}
return root
}
func printTree(tree []*TreeNode, rootPath string) string {
var result strings.Builder
result.WriteString(fmt.Sprintf("- %s%s\n", rootPath, string(filepath.Separator)))
for _, node := range tree {
printNode(&result, node, 1)
}
return result.String()
}
func printNode(builder *strings.Builder, node *TreeNode, level int) {
indent := strings.Repeat(" ", level)
nodeName := node.Name
if node.Type == "directory" {
nodeName += string(filepath.Separator)
}
fmt.Fprintf(builder, "%s- %s\n", indent, nodeName)
if node.Type == "directory" && len(node.Children) > 0 {
for _, child := range node.Children {
printNode(builder, child, level+1)
}
}
}

View File

@@ -1,457 +0,0 @@
package tools
import (
"context"
"encoding/json"
"os"
"path/filepath"
"strings"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestLsTool_Info(t *testing.T) {
tool := NewLsTool()
info := tool.Info()
assert.Equal(t, LSToolName, info.Name)
assert.NotEmpty(t, info.Description)
assert.Contains(t, info.Parameters, "path")
assert.Contains(t, info.Parameters, "ignore")
assert.Contains(t, info.Required, "path")
}
func TestLsTool_Run(t *testing.T) {
// Create a temporary directory for testing
tempDir, err := os.MkdirTemp("", "ls_tool_test")
require.NoError(t, err)
defer os.RemoveAll(tempDir)
// Create a test directory structure
testDirs := []string{
"dir1",
"dir2",
"dir2/subdir1",
"dir2/subdir2",
"dir3",
"dir3/.hidden_dir",
"__pycache__",
}
testFiles := []string{
"file1.txt",
"file2.txt",
"dir1/file3.txt",
"dir2/file4.txt",
"dir2/subdir1/file5.txt",
"dir2/subdir2/file6.txt",
"dir3/file7.txt",
"dir3/.hidden_file.txt",
"__pycache__/cache.pyc",
".hidden_root_file.txt",
}
// Create directories
for _, dir := range testDirs {
dirPath := filepath.Join(tempDir, dir)
err := os.MkdirAll(dirPath, 0755)
require.NoError(t, err)
}
// Create files
for _, file := range testFiles {
filePath := filepath.Join(tempDir, file)
err := os.WriteFile(filePath, []byte("test content"), 0644)
require.NoError(t, err)
}
t.Run("lists directory successfully", func(t *testing.T) {
tool := NewLsTool()
params := LSParams{
Path: tempDir,
}
paramsJSON, err := json.Marshal(params)
require.NoError(t, err)
call := ToolCall{
Name: LSToolName,
Input: string(paramsJSON),
}
response, err := tool.Run(context.Background(), call)
require.NoError(t, err)
// Check that visible directories and files are included
assert.Contains(t, response.Content, "dir1")
assert.Contains(t, response.Content, "dir2")
assert.Contains(t, response.Content, "dir3")
assert.Contains(t, response.Content, "file1.txt")
assert.Contains(t, response.Content, "file2.txt")
// Check that hidden files and directories are not included
assert.NotContains(t, response.Content, ".hidden_dir")
assert.NotContains(t, response.Content, ".hidden_file.txt")
assert.NotContains(t, response.Content, ".hidden_root_file.txt")
// Check that __pycache__ is not included
assert.NotContains(t, response.Content, "__pycache__")
})
t.Run("handles non-existent path", func(t *testing.T) {
tool := NewLsTool()
params := LSParams{
Path: filepath.Join(tempDir, "non_existent_dir"),
}
paramsJSON, err := json.Marshal(params)
require.NoError(t, err)
call := ToolCall{
Name: LSToolName,
Input: string(paramsJSON),
}
response, err := tool.Run(context.Background(), call)
require.NoError(t, err)
assert.Contains(t, response.Content, "path does not exist")
})
t.Run("handles empty path parameter", func(t *testing.T) {
// For this test, we need to mock the config.WorkingDirectory function
// Since we can't easily do that, we'll just check that the response doesn't contain an error message
tool := NewLsTool()
params := LSParams{
Path: "",
}
paramsJSON, err := json.Marshal(params)
require.NoError(t, err)
call := ToolCall{
Name: LSToolName,
Input: string(paramsJSON),
}
response, err := tool.Run(context.Background(), call)
require.NoError(t, err)
// The response should either contain a valid directory listing or an error
// We'll just check that it's not empty
assert.NotEmpty(t, response.Content)
})
t.Run("handles invalid parameters", func(t *testing.T) {
tool := NewLsTool()
call := ToolCall{
Name: LSToolName,
Input: "invalid json",
}
response, err := tool.Run(context.Background(), call)
require.NoError(t, err)
assert.Contains(t, response.Content, "error parsing parameters")
})
t.Run("respects ignore patterns", func(t *testing.T) {
tool := NewLsTool()
params := LSParams{
Path: tempDir,
Ignore: []string{"file1.txt", "dir1"},
}
paramsJSON, err := json.Marshal(params)
require.NoError(t, err)
call := ToolCall{
Name: LSToolName,
Input: string(paramsJSON),
}
response, err := tool.Run(context.Background(), call)
require.NoError(t, err)
// The output format is a tree, so we need to check for specific patterns
// Check that file1.txt is not directly mentioned
assert.NotContains(t, response.Content, "- file1.txt")
// Check that dir1/ is not directly mentioned
assert.NotContains(t, response.Content, "- dir1/")
})
t.Run("handles relative path", func(t *testing.T) {
// Save original working directory
origWd, err := os.Getwd()
require.NoError(t, err)
defer func() {
os.Chdir(origWd)
}()
// Change to a directory above the temp directory
parentDir := filepath.Dir(tempDir)
err = os.Chdir(parentDir)
require.NoError(t, err)
tool := NewLsTool()
params := LSParams{
Path: filepath.Base(tempDir),
}
paramsJSON, err := json.Marshal(params)
require.NoError(t, err)
call := ToolCall{
Name: LSToolName,
Input: string(paramsJSON),
}
response, err := tool.Run(context.Background(), call)
require.NoError(t, err)
// Should list the temp directory contents
assert.Contains(t, response.Content, "dir1")
assert.Contains(t, response.Content, "file1.txt")
})
}
func TestShouldSkip(t *testing.T) {
testCases := []struct {
name string
path string
ignorePatterns []string
expected bool
}{
{
name: "hidden file",
path: "/path/to/.hidden_file",
ignorePatterns: []string{},
expected: true,
},
{
name: "hidden directory",
path: "/path/to/.hidden_dir",
ignorePatterns: []string{},
expected: true,
},
{
name: "pycache directory",
path: "/path/to/__pycache__/file.pyc",
ignorePatterns: []string{},
expected: true,
},
{
name: "node_modules directory",
path: "/path/to/node_modules/package",
ignorePatterns: []string{},
expected: false, // The shouldSkip function doesn't directly check for node_modules in the path
},
{
name: "normal file",
path: "/path/to/normal_file.txt",
ignorePatterns: []string{},
expected: false,
},
{
name: "normal directory",
path: "/path/to/normal_dir",
ignorePatterns: []string{},
expected: false,
},
{
name: "ignored by pattern",
path: "/path/to/ignore_me.txt",
ignorePatterns: []string{"ignore_*.txt"},
expected: true,
},
{
name: "not ignored by pattern",
path: "/path/to/keep_me.txt",
ignorePatterns: []string{"ignore_*.txt"},
expected: false,
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
result := shouldSkip(tc.path, tc.ignorePatterns)
assert.Equal(t, tc.expected, result)
})
}
}
func TestCreateFileTree(t *testing.T) {
paths := []string{
"/path/to/file1.txt",
"/path/to/dir1/file2.txt",
"/path/to/dir1/subdir/file3.txt",
"/path/to/dir2/file4.txt",
}
tree := createFileTree(paths)
// Check the structure of the tree
assert.Len(t, tree, 1) // Should have one root node
// Check the root node
rootNode := tree[0]
assert.Equal(t, "path", rootNode.Name)
assert.Equal(t, "directory", rootNode.Type)
assert.Len(t, rootNode.Children, 1)
// Check the "to" node
toNode := rootNode.Children[0]
assert.Equal(t, "to", toNode.Name)
assert.Equal(t, "directory", toNode.Type)
assert.Len(t, toNode.Children, 3) // file1.txt, dir1, dir2
// Find the dir1 node
var dir1Node *TreeNode
for _, child := range toNode.Children {
if child.Name == "dir1" {
dir1Node = child
break
}
}
require.NotNil(t, dir1Node)
assert.Equal(t, "directory", dir1Node.Type)
assert.Len(t, dir1Node.Children, 2) // file2.txt and subdir
}
func TestPrintTree(t *testing.T) {
// Create a simple tree
tree := []*TreeNode{
{
Name: "dir1",
Path: "dir1",
Type: "directory",
Children: []*TreeNode{
{
Name: "file1.txt",
Path: "dir1/file1.txt",
Type: "file",
},
{
Name: "subdir",
Path: "dir1/subdir",
Type: "directory",
Children: []*TreeNode{
{
Name: "file2.txt",
Path: "dir1/subdir/file2.txt",
Type: "file",
},
},
},
},
},
{
Name: "file3.txt",
Path: "file3.txt",
Type: "file",
},
}
result := printTree(tree, "/root")
// Check the output format
assert.Contains(t, result, "- /root/")
assert.Contains(t, result, " - dir1/")
assert.Contains(t, result, " - file1.txt")
assert.Contains(t, result, " - subdir/")
assert.Contains(t, result, " - file2.txt")
assert.Contains(t, result, " - file3.txt")
}
func TestListDirectory(t *testing.T) {
// Create a temporary directory for testing
tempDir, err := os.MkdirTemp("", "list_directory_test")
require.NoError(t, err)
defer os.RemoveAll(tempDir)
// Create a test directory structure
testDirs := []string{
"dir1",
"dir1/subdir1",
".hidden_dir",
}
testFiles := []string{
"file1.txt",
"file2.txt",
"dir1/file3.txt",
"dir1/subdir1/file4.txt",
".hidden_file.txt",
}
// Create directories
for _, dir := range testDirs {
dirPath := filepath.Join(tempDir, dir)
err := os.MkdirAll(dirPath, 0755)
require.NoError(t, err)
}
// Create files
for _, file := range testFiles {
filePath := filepath.Join(tempDir, file)
err := os.WriteFile(filePath, []byte("test content"), 0644)
require.NoError(t, err)
}
t.Run("lists files with no limit", func(t *testing.T) {
files, truncated, err := listDirectory(tempDir, []string{}, 1000)
require.NoError(t, err)
assert.False(t, truncated)
// Check that visible files and directories are included
containsPath := func(paths []string, target string) bool {
targetPath := filepath.Join(tempDir, target)
for _, path := range paths {
if strings.HasPrefix(path, targetPath) {
return true
}
}
return false
}
assert.True(t, containsPath(files, "dir1"))
assert.True(t, containsPath(files, "file1.txt"))
assert.True(t, containsPath(files, "file2.txt"))
assert.True(t, containsPath(files, "dir1/file3.txt"))
// Check that hidden files and directories are not included
assert.False(t, containsPath(files, ".hidden_dir"))
assert.False(t, containsPath(files, ".hidden_file.txt"))
})
t.Run("respects limit and returns truncated flag", func(t *testing.T) {
files, truncated, err := listDirectory(tempDir, []string{}, 2)
require.NoError(t, err)
assert.True(t, truncated)
assert.Len(t, files, 2)
})
t.Run("respects ignore patterns", func(t *testing.T) {
files, truncated, err := listDirectory(tempDir, []string{"*.txt"}, 1000)
require.NoError(t, err)
assert.False(t, truncated)
// Check that no .txt files are included
for _, file := range files {
assert.False(t, strings.HasSuffix(file, ".txt"), "Found .txt file: %s", file)
}
// But directories should still be included
containsDir := false
for _, file := range files {
if strings.Contains(file, "dir1") {
containsDir = true
break
}
}
assert.True(t, containsDir)
})
}

View File

@@ -1,372 +0,0 @@
package tools
import (
"context"
"encoding/json"
"fmt"
"os"
"path/filepath"
"time"
"github.com/kujtimiihoxha/opencode/internal/config"
"github.com/kujtimiihoxha/opencode/internal/diff"
"github.com/kujtimiihoxha/opencode/internal/history"
"github.com/kujtimiihoxha/opencode/internal/logging"
"github.com/kujtimiihoxha/opencode/internal/lsp"
"github.com/kujtimiihoxha/opencode/internal/permission"
)
type PatchParams struct {
PatchText string `json:"patch_text"`
}
type PatchResponseMetadata struct {
FilesChanged []string `json:"files_changed"`
Additions int `json:"additions"`
Removals int `json:"removals"`
}
type patchTool struct {
lspClients map[string]*lsp.Client
permissions permission.Service
files history.Service
}
const (
PatchToolName = "patch"
patchDescription = `Applies a patch to multiple files in one operation. This tool is useful for making coordinated changes across multiple files.
The patch text must follow this format:
*** Begin Patch
*** Update File: /path/to/file
@@ Context line (unique within the file)
Line to keep
-Line to remove
+Line to add
Line to keep
*** Add File: /path/to/new/file
+Content of the new file
+More content
*** Delete File: /path/to/file/to/delete
*** End Patch
Before using this tool:
1. Use the FileRead tool to understand the files' contents and context
2. Verify all file paths are correct (use the LS tool)
CRITICAL REQUIREMENTS FOR USING THIS TOOL:
1. UNIQUENESS: Context lines MUST uniquely identify the specific sections you want to change
2. PRECISION: All whitespace, indentation, and surrounding code must match exactly
3. VALIDATION: Ensure edits result in idiomatic, correct code
4. PATHS: Always use absolute file paths (starting with /)
The tool will apply all changes in a single atomic operation.`
)
func NewPatchTool(lspClients map[string]*lsp.Client, permissions permission.Service, files history.Service) BaseTool {
return &patchTool{
lspClients: lspClients,
permissions: permissions,
files: files,
}
}
func (p *patchTool) Info() ToolInfo {
return ToolInfo{
Name: PatchToolName,
Description: patchDescription,
Parameters: map[string]any{
"patch_text": map[string]any{
"type": "string",
"description": "The full patch text that describes all changes to be made",
},
},
Required: []string{"patch_text"},
}
}
func (p *patchTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error) {
var params PatchParams
if err := json.Unmarshal([]byte(call.Input), &params); err != nil {
return NewTextErrorResponse("invalid parameters"), nil
}
if params.PatchText == "" {
return NewTextErrorResponse("patch_text is required"), nil
}
// Identify all files needed for the patch and verify they've been read
filesToRead := diff.IdentifyFilesNeeded(params.PatchText)
for _, filePath := range filesToRead {
absPath := filePath
if !filepath.IsAbs(absPath) {
wd := config.WorkingDirectory()
absPath = filepath.Join(wd, absPath)
}
if getLastReadTime(absPath).IsZero() {
return NewTextErrorResponse(fmt.Sprintf("you must read the file %s before patching it. Use the FileRead tool first", filePath)), nil
}
fileInfo, err := os.Stat(absPath)
if err != nil {
if os.IsNotExist(err) {
return NewTextErrorResponse(fmt.Sprintf("file not found: %s", absPath)), nil
}
return ToolResponse{}, fmt.Errorf("failed to access file: %w", err)
}
if fileInfo.IsDir() {
return NewTextErrorResponse(fmt.Sprintf("path is a directory, not a file: %s", absPath)), nil
}
modTime := fileInfo.ModTime()
lastRead := getLastReadTime(absPath)
if modTime.After(lastRead) {
return NewTextErrorResponse(
fmt.Sprintf("file %s has been modified since it was last read (mod time: %s, last read: %s)",
absPath, modTime.Format(time.RFC3339), lastRead.Format(time.RFC3339),
)), nil
}
}
// Check for new files to ensure they don't already exist
filesToAdd := diff.IdentifyFilesAdded(params.PatchText)
for _, filePath := range filesToAdd {
absPath := filePath
if !filepath.IsAbs(absPath) {
wd := config.WorkingDirectory()
absPath = filepath.Join(wd, absPath)
}
_, err := os.Stat(absPath)
if err == nil {
return NewTextErrorResponse(fmt.Sprintf("file already exists and cannot be added: %s", absPath)), nil
} else if !os.IsNotExist(err) {
return ToolResponse{}, fmt.Errorf("failed to check file: %w", err)
}
}
// Load all required files
currentFiles := make(map[string]string)
for _, filePath := range filesToRead {
absPath := filePath
if !filepath.IsAbs(absPath) {
wd := config.WorkingDirectory()
absPath = filepath.Join(wd, absPath)
}
content, err := os.ReadFile(absPath)
if err != nil {
return ToolResponse{}, fmt.Errorf("failed to read file %s: %w", absPath, err)
}
currentFiles[filePath] = string(content)
}
// Process the patch
patch, fuzz, err := diff.TextToPatch(params.PatchText, currentFiles)
if err != nil {
return NewTextErrorResponse(fmt.Sprintf("failed to parse patch: %s", err)), nil
}
if fuzz > 3 {
return NewTextErrorResponse(fmt.Sprintf("patch contains fuzzy matches (fuzz level: %d). Please make your context lines more precise", fuzz)), nil
}
// Convert patch to commit
commit, err := diff.PatchToCommit(patch, currentFiles)
if err != nil {
return NewTextErrorResponse(fmt.Sprintf("failed to create commit from patch: %s", err)), nil
}
// Get session ID and message ID
sessionID, messageID := GetContextValues(ctx)
if sessionID == "" || messageID == "" {
return ToolResponse{}, fmt.Errorf("session ID and message ID are required for creating a patch")
}
// Request permission for all changes
for path, change := range commit.Changes {
switch change.Type {
case diff.ActionAdd:
dir := filepath.Dir(path)
patchDiff, _, _ := diff.GenerateDiff("", *change.NewContent, path)
p := p.permissions.Request(
permission.CreatePermissionRequest{
SessionID: sessionID,
Path: dir,
ToolName: PatchToolName,
Action: "create",
Description: fmt.Sprintf("Create file %s", path),
Params: EditPermissionsParams{
FilePath: path,
Diff: patchDiff,
},
},
)
if !p {
return ToolResponse{}, permission.ErrorPermissionDenied
}
case diff.ActionUpdate:
currentContent := ""
if change.OldContent != nil {
currentContent = *change.OldContent
}
newContent := ""
if change.NewContent != nil {
newContent = *change.NewContent
}
patchDiff, _, _ := diff.GenerateDiff(currentContent, newContent, path)
dir := filepath.Dir(path)
p := p.permissions.Request(
permission.CreatePermissionRequest{
SessionID: sessionID,
Path: dir,
ToolName: PatchToolName,
Action: "update",
Description: fmt.Sprintf("Update file %s", path),
Params: EditPermissionsParams{
FilePath: path,
Diff: patchDiff,
},
},
)
if !p {
return ToolResponse{}, permission.ErrorPermissionDenied
}
case diff.ActionDelete:
dir := filepath.Dir(path)
patchDiff, _, _ := diff.GenerateDiff(*change.OldContent, "", path)
p := p.permissions.Request(
permission.CreatePermissionRequest{
SessionID: sessionID,
Path: dir,
ToolName: PatchToolName,
Action: "delete",
Description: fmt.Sprintf("Delete file %s", path),
Params: EditPermissionsParams{
FilePath: path,
Diff: patchDiff,
},
},
)
if !p {
return ToolResponse{}, permission.ErrorPermissionDenied
}
}
}
// Apply the changes to the filesystem
err = diff.ApplyCommit(commit, func(path string, content string) error {
absPath := path
if !filepath.IsAbs(absPath) {
wd := config.WorkingDirectory()
absPath = filepath.Join(wd, absPath)
}
// Create parent directories if needed
dir := filepath.Dir(absPath)
if err := os.MkdirAll(dir, 0o755); err != nil {
return fmt.Errorf("failed to create parent directories for %s: %w", absPath, err)
}
return os.WriteFile(absPath, []byte(content), 0o644)
}, func(path string) error {
absPath := path
if !filepath.IsAbs(absPath) {
wd := config.WorkingDirectory()
absPath = filepath.Join(wd, absPath)
}
return os.Remove(absPath)
})
if err != nil {
return NewTextErrorResponse(fmt.Sprintf("failed to apply patch: %s", err)), nil
}
// Update file history for all modified files
changedFiles := []string{}
totalAdditions := 0
totalRemovals := 0
for path, change := range commit.Changes {
absPath := path
if !filepath.IsAbs(absPath) {
wd := config.WorkingDirectory()
absPath = filepath.Join(wd, absPath)
}
changedFiles = append(changedFiles, absPath)
oldContent := ""
if change.OldContent != nil {
oldContent = *change.OldContent
}
newContent := ""
if change.NewContent != nil {
newContent = *change.NewContent
}
// Calculate diff statistics
_, additions, removals := diff.GenerateDiff(oldContent, newContent, path)
totalAdditions += additions
totalRemovals += removals
// Update history
file, err := p.files.GetByPathAndSession(ctx, absPath, sessionID)
if err != nil && change.Type != diff.ActionAdd {
// If not adding a file, create history entry for existing file
_, err = p.files.Create(ctx, sessionID, absPath, oldContent)
if err != nil {
logging.Debug("Error creating file history", "error", err)
}
}
if err == nil && change.Type != diff.ActionAdd && file.Content != oldContent {
// User manually changed content, store intermediate version
_, err = p.files.CreateVersion(ctx, sessionID, absPath, oldContent)
if err != nil {
logging.Debug("Error creating file history version", "error", err)
}
}
// Store new version
if change.Type == diff.ActionDelete {
_, err = p.files.CreateVersion(ctx, sessionID, absPath, "")
} else {
_, err = p.files.CreateVersion(ctx, sessionID, absPath, newContent)
}
if err != nil {
logging.Debug("Error creating file history version", "error", err)
}
// Record file operations
recordFileWrite(absPath)
recordFileRead(absPath)
}
// Run LSP diagnostics on all changed files
for _, filePath := range changedFiles {
waitForLspDiagnostics(ctx, filePath, p.lspClients)
}
result := fmt.Sprintf("Patch applied successfully. %d files changed, %d additions, %d removals",
len(changedFiles), totalAdditions, totalRemovals)
diagnosticsText := ""
for _, filePath := range changedFiles {
diagnosticsText += getDiagnostics(filePath, p.lspClients)
}
if diagnosticsText != "" {
result += "\n\nDiagnostics:\n" + diagnosticsText
}
return WithResponseMetadata(
NewTextResponse(result),
PatchResponseMetadata{
FilesChanged: changedFiles,
Additions: totalAdditions,
Removals: totalRemovals,
}), nil
}

View File

@@ -1,304 +0,0 @@
package shell
import (
"context"
"errors"
"fmt"
"os"
"os/exec"
"path/filepath"
"strings"
"sync"
"syscall"
"time"
)
type PersistentShell struct {
cmd *exec.Cmd
stdin *os.File
isAlive bool
cwd string
mu sync.Mutex
commandQueue chan *commandExecution
}
type commandExecution struct {
command string
timeout time.Duration
resultChan chan commandResult
ctx context.Context
}
type commandResult struct {
stdout string
stderr string
exitCode int
interrupted bool
err error
}
var (
shellInstance *PersistentShell
shellInstanceOnce sync.Once
)
func GetPersistentShell(workingDir string) *PersistentShell {
shellInstanceOnce.Do(func() {
shellInstance = newPersistentShell(workingDir)
})
if !shellInstance.isAlive {
shellInstance = newPersistentShell(shellInstance.cwd)
}
return shellInstance
}
func newPersistentShell(cwd string) *PersistentShell {
shellPath := os.Getenv("SHELL")
if shellPath == "" {
shellPath = "/bin/bash"
}
cmd := exec.Command(shellPath, "-l")
cmd.Dir = cwd
stdinPipe, err := cmd.StdinPipe()
if err != nil {
return nil
}
cmd.Env = append(os.Environ(), "GIT_EDITOR=true")
err = cmd.Start()
if err != nil {
return nil
}
shell := &PersistentShell{
cmd: cmd,
stdin: stdinPipe.(*os.File),
isAlive: true,
cwd: cwd,
commandQueue: make(chan *commandExecution, 10),
}
go func() {
defer func() {
if r := recover(); r != nil {
fmt.Fprintf(os.Stderr, "Panic in shell command processor: %v\n", r)
shell.isAlive = false
close(shell.commandQueue)
}
}()
shell.processCommands()
}()
go func() {
err := cmd.Wait()
if err != nil {
// Log the error if needed
}
shell.isAlive = false
close(shell.commandQueue)
}()
return shell
}
func (s *PersistentShell) processCommands() {
for cmd := range s.commandQueue {
result := s.execCommand(cmd.command, cmd.timeout, cmd.ctx)
cmd.resultChan <- result
}
}
func (s *PersistentShell) execCommand(command string, timeout time.Duration, ctx context.Context) commandResult {
s.mu.Lock()
defer s.mu.Unlock()
if !s.isAlive {
return commandResult{
stderr: "Shell is not alive",
exitCode: 1,
err: errors.New("shell is not alive"),
}
}
tempDir := os.TempDir()
stdoutFile := filepath.Join(tempDir, fmt.Sprintf("opencode-stdout-%d", time.Now().UnixNano()))
stderrFile := filepath.Join(tempDir, fmt.Sprintf("opencode-stderr-%d", time.Now().UnixNano()))
statusFile := filepath.Join(tempDir, fmt.Sprintf("opencode-status-%d", time.Now().UnixNano()))
cwdFile := filepath.Join(tempDir, fmt.Sprintf("opencode-cwd-%d", time.Now().UnixNano()))
defer func() {
os.Remove(stdoutFile)
os.Remove(stderrFile)
os.Remove(statusFile)
os.Remove(cwdFile)
}()
fullCommand := fmt.Sprintf(`
eval %s < /dev/null > %s 2> %s
EXEC_EXIT_CODE=$?
pwd > %s
echo $EXEC_EXIT_CODE > %s
`,
shellQuote(command),
shellQuote(stdoutFile),
shellQuote(stderrFile),
shellQuote(cwdFile),
shellQuote(statusFile),
)
_, err := s.stdin.Write([]byte(fullCommand + "\n"))
if err != nil {
return commandResult{
stderr: fmt.Sprintf("Failed to write command to shell: %v", err),
exitCode: 1,
err: err,
}
}
interrupted := false
startTime := time.Now()
done := make(chan bool)
go func() {
for {
select {
case <-ctx.Done():
s.killChildren()
interrupted = true
done <- true
return
case <-time.After(10 * time.Millisecond):
if fileExists(statusFile) && fileSize(statusFile) > 0 {
done <- true
return
}
if timeout > 0 {
elapsed := time.Since(startTime)
if elapsed > timeout {
s.killChildren()
interrupted = true
done <- true
return
}
}
}
}
}()
<-done
stdout := readFileOrEmpty(stdoutFile)
stderr := readFileOrEmpty(stderrFile)
exitCodeStr := readFileOrEmpty(statusFile)
newCwd := readFileOrEmpty(cwdFile)
exitCode := 0
if exitCodeStr != "" {
fmt.Sscanf(exitCodeStr, "%d", &exitCode)
} else if interrupted {
exitCode = 143
stderr += "\nCommand execution timed out or was interrupted"
}
if newCwd != "" {
s.cwd = strings.TrimSpace(newCwd)
}
return commandResult{
stdout: stdout,
stderr: stderr,
exitCode: exitCode,
interrupted: interrupted,
}
}
func (s *PersistentShell) killChildren() {
if s.cmd == nil || s.cmd.Process == nil {
return
}
pgrepCmd := exec.Command("pgrep", "-P", fmt.Sprintf("%d", s.cmd.Process.Pid))
output, err := pgrepCmd.Output()
if err != nil {
return
}
for pidStr := range strings.SplitSeq(string(output), "\n") {
if pidStr = strings.TrimSpace(pidStr); pidStr != "" {
var pid int
fmt.Sscanf(pidStr, "%d", &pid)
if pid > 0 {
proc, err := os.FindProcess(pid)
if err == nil {
proc.Signal(syscall.SIGTERM)
}
}
}
}
}
func (s *PersistentShell) Exec(ctx context.Context, command string, timeoutMs int) (string, string, int, bool, error) {
if !s.isAlive {
return "", "Shell is not alive", 1, false, errors.New("shell is not alive")
}
timeout := time.Duration(timeoutMs) * time.Millisecond
resultChan := make(chan commandResult)
s.commandQueue <- &commandExecution{
command: command,
timeout: timeout,
resultChan: resultChan,
ctx: ctx,
}
result := <-resultChan
return result.stdout, result.stderr, result.exitCode, result.interrupted, result.err
}
func (s *PersistentShell) Close() {
s.mu.Lock()
defer s.mu.Unlock()
if !s.isAlive {
return
}
s.stdin.Write([]byte("exit\n"))
s.cmd.Process.Kill()
s.isAlive = false
}
func shellQuote(s string) string {
return "'" + strings.ReplaceAll(s, "'", "'\\''") + "'"
}
func readFileOrEmpty(path string) string {
content, err := os.ReadFile(path)
if err != nil {
return ""
}
return string(content)
}
func fileExists(path string) bool {
_, err := os.Stat(path)
return err == nil
}
func fileSize(path string) int64 {
info, err := os.Stat(path)
if err != nil {
return 0
}
return info.Size()
}

View File

@@ -1,383 +0,0 @@
package tools
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"strings"
"time"
)
type SourcegraphParams struct {
Query string `json:"query"`
Count int `json:"count,omitempty"`
ContextWindow int `json:"context_window,omitempty"`
Timeout int `json:"timeout,omitempty"`
}
type SourcegraphResponseMetadata struct {
NumberOfMatches int `json:"number_of_matches"`
Truncated bool `json:"truncated"`
}
type sourcegraphTool struct {
client *http.Client
}
const (
SourcegraphToolName = "sourcegraph"
sourcegraphToolDescription = `Search code across public repositories using Sourcegraph's GraphQL API.
WHEN TO USE THIS TOOL:
- Use when you need to find code examples or implementations across public repositories
- Helpful for researching how others have solved similar problems
- Useful for discovering patterns and best practices in open source code
HOW TO USE:
- Provide a search query using Sourcegraph's query syntax
- Optionally specify the number of results to return (default: 10)
- Optionally set a timeout for the request
QUERY SYNTAX:
- Basic search: "fmt.Println" searches for exact matches
- File filters: "file:.go fmt.Println" limits to Go files
- Repository filters: "repo:^github\.com/golang/go$ fmt.Println" limits to specific repos
- Language filters: "lang:go fmt.Println" limits to Go code
- Boolean operators: "fmt.Println AND log.Fatal" for combined terms
- Regular expressions: "fmt\.(Print|Printf|Println)" for pattern matching
- Quoted strings: "\"exact phrase\"" for exact phrase matching
- Exclude filters: "-file:test" or "-repo:forks" to exclude matches
ADVANCED FILTERS:
- Repository filters:
* "repo:name" - Match repositories with name containing "name"
* "repo:^github\.com/org/repo$" - Exact repository match
* "repo:org/repo@branch" - Search specific branch
* "repo:org/repo rev:branch" - Alternative branch syntax
* "-repo:name" - Exclude repositories
* "fork:yes" or "fork:only" - Include or only show forks
* "archived:yes" or "archived:only" - Include or only show archived repos
* "visibility:public" or "visibility:private" - Filter by visibility
- File filters:
* "file:\.js$" - Files with .js extension
* "file:internal/" - Files in internal directory
* "-file:test" - Exclude test files
* "file:has.content(Copyright)" - Files containing "Copyright"
* "file:has.contributor([email protected])" - Files with specific contributor
- Content filters:
* "content:\"exact string\"" - Search for exact string
* "-content:\"unwanted\"" - Exclude files with unwanted content
* "case:yes" - Case-sensitive search
- Type filters:
* "type:symbol" - Search for symbols (functions, classes, etc.)
* "type:file" - Search file content only
* "type:path" - Search filenames only
* "type:diff" - Search code changes
* "type:commit" - Search commit messages
- Commit/diff search:
* "after:\"1 month ago\"" - Commits after date
* "before:\"2023-01-01\"" - Commits before date
* "author:name" - Commits by author
* "message:\"fix bug\"" - Commits with message
- Result selection:
* "select:repo" - Show only repository names
* "select:file" - Show only file paths
* "select:content" - Show only matching content
* "select:symbol" - Show only matching symbols
- Result control:
* "count:100" - Return up to 100 results
* "count:all" - Return all results
* "timeout:30s" - Set search timeout
EXAMPLES:
- "file:.go context.WithTimeout" - Find Go code using context.WithTimeout
- "lang:typescript useState type:symbol" - Find TypeScript React useState hooks
- "repo:^github\.com/kubernetes/kubernetes$ pod list type:file" - Find Kubernetes files related to pod listing
- "repo:sourcegraph/sourcegraph$ after:\"3 months ago\" type:diff database" - Recent changes to database code
- "file:Dockerfile (alpine OR ubuntu) -content:alpine:latest" - Dockerfiles with specific base images
- "repo:has.path(\.py) file:requirements.txt tensorflow" - Python projects using TensorFlow
BOOLEAN OPERATORS:
- "term1 AND term2" - Results containing both terms
- "term1 OR term2" - Results containing either term
- "term1 NOT term2" - Results with term1 but not term2
- "term1 and (term2 or term3)" - Grouping with parentheses
LIMITATIONS:
- Only searches public repositories
- Rate limits may apply
- Complex queries may take longer to execute
- Maximum of 20 results per query
TIPS:
- Use specific file extensions to narrow results
- Add repo: filters for more targeted searches
- Use type:symbol to find function/method definitions
- Use type:file to find relevant files`
)
func NewSourcegraphTool() BaseTool {
return &sourcegraphTool{
client: &http.Client{
Timeout: 30 * time.Second,
},
}
}
func (t *sourcegraphTool) Info() ToolInfo {
return ToolInfo{
Name: SourcegraphToolName,
Description: sourcegraphToolDescription,
Parameters: map[string]any{
"query": map[string]any{
"type": "string",
"description": "The Sourcegraph search query",
},
"count": map[string]any{
"type": "number",
"description": "Optional number of results to return (default: 10, max: 20)",
},
"context_window": map[string]any{
"type": "number",
"description": "The context around the match to return (default: 10 lines)",
},
"timeout": map[string]any{
"type": "number",
"description": "Optional timeout in seconds (max 120)",
},
},
Required: []string{"query"},
}
}
func (t *sourcegraphTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error) {
var params SourcegraphParams
if err := json.Unmarshal([]byte(call.Input), &params); err != nil {
return NewTextErrorResponse("Failed to parse sourcegraph parameters: " + err.Error()), nil
}
if params.Query == "" {
return NewTextErrorResponse("Query parameter is required"), nil
}
if params.Count <= 0 {
params.Count = 10
} else if params.Count > 20 {
params.Count = 20 // Limit to 20 results
}
if params.ContextWindow <= 0 {
params.ContextWindow = 10 // Default context window
}
client := t.client
if params.Timeout > 0 {
maxTimeout := 120 // 2 minutes
if params.Timeout > maxTimeout {
params.Timeout = maxTimeout
}
client = &http.Client{
Timeout: time.Duration(params.Timeout) * time.Second,
}
}
type graphqlRequest struct {
Query string `json:"query"`
Variables struct {
Query string `json:"query"`
} `json:"variables"`
}
request := graphqlRequest{
Query: "query Search($query: String!) { search(query: $query, version: V2, patternType: keyword ) { results { matchCount, limitHit, resultCount, approximateResultCount, missing { name }, timedout { name }, indexUnavailable, results { __typename, ... on FileMatch { repository { name }, file { path, url, content }, lineMatches { preview, lineNumber, offsetAndLengths } } } } } }",
}
request.Variables.Query = params.Query
graphqlQueryBytes, err := json.Marshal(request)
if err != nil {
return ToolResponse{}, fmt.Errorf("failed to marshal GraphQL request: %w", err)
}
graphqlQuery := string(graphqlQueryBytes)
req, err := http.NewRequestWithContext(
ctx,
"POST",
"https://sourcegraph.com/.api/graphql",
bytes.NewBuffer([]byte(graphqlQuery)),
)
if err != nil {
return ToolResponse{}, fmt.Errorf("failed to create request: %w", err)
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("User-Agent", "opencode/1.0")
resp, err := client.Do(req)
if err != nil {
return ToolResponse{}, fmt.Errorf("failed to fetch URL: %w", err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
body, _ := io.ReadAll(resp.Body)
if len(body) > 0 {
return NewTextErrorResponse(fmt.Sprintf("Request failed with status code: %d, response: %s", resp.StatusCode, string(body))), nil
}
return NewTextErrorResponse(fmt.Sprintf("Request failed with status code: %d", resp.StatusCode)), nil
}
body, err := io.ReadAll(resp.Body)
if err != nil {
return ToolResponse{}, fmt.Errorf("failed to read response body: %w", err)
}
var result map[string]any
if err = json.Unmarshal(body, &result); err != nil {
return ToolResponse{}, fmt.Errorf("failed to unmarshal response: %w", err)
}
formattedResults, err := formatSourcegraphResults(result, params.ContextWindow)
if err != nil {
return NewTextErrorResponse("Failed to format results: " + err.Error()), nil
}
return NewTextResponse(formattedResults), nil
}
func formatSourcegraphResults(result map[string]any, contextWindow int) (string, error) {
var buffer strings.Builder
if errors, ok := result["errors"].([]any); ok && len(errors) > 0 {
buffer.WriteString("## Sourcegraph API Error\n\n")
for _, err := range errors {
if errMap, ok := err.(map[string]any); ok {
if message, ok := errMap["message"].(string); ok {
buffer.WriteString(fmt.Sprintf("- %s\n", message))
}
}
}
return buffer.String(), nil
}
data, ok := result["data"].(map[string]any)
if !ok {
return "", fmt.Errorf("invalid response format: missing data field")
}
search, ok := data["search"].(map[string]any)
if !ok {
return "", fmt.Errorf("invalid response format: missing search field")
}
searchResults, ok := search["results"].(map[string]any)
if !ok {
return "", fmt.Errorf("invalid response format: missing results field")
}
matchCount, _ := searchResults["matchCount"].(float64)
resultCount, _ := searchResults["resultCount"].(float64)
limitHit, _ := searchResults["limitHit"].(bool)
buffer.WriteString("# Sourcegraph Search Results\n\n")
buffer.WriteString(fmt.Sprintf("Found %d matches across %d results\n", int(matchCount), int(resultCount)))
if limitHit {
buffer.WriteString("(Result limit reached, try a more specific query)\n")
}
buffer.WriteString("\n")
results, ok := searchResults["results"].([]any)
if !ok || len(results) == 0 {
buffer.WriteString("No results found. Try a different query.\n")
return buffer.String(), nil
}
maxResults := 10
if len(results) > maxResults {
results = results[:maxResults]
}
for i, res := range results {
fileMatch, ok := res.(map[string]any)
if !ok {
continue
}
typeName, _ := fileMatch["__typename"].(string)
if typeName != "FileMatch" {
continue
}
repo, _ := fileMatch["repository"].(map[string]any)
file, _ := fileMatch["file"].(map[string]any)
lineMatches, _ := fileMatch["lineMatches"].([]any)
if repo == nil || file == nil {
continue
}
repoName, _ := repo["name"].(string)
filePath, _ := file["path"].(string)
fileURL, _ := file["url"].(string)
fileContent, _ := file["content"].(string)
buffer.WriteString(fmt.Sprintf("## Result %d: %s/%s\n\n", i+1, repoName, filePath))
if fileURL != "" {
buffer.WriteString(fmt.Sprintf("URL: %s\n\n", fileURL))
}
if len(lineMatches) > 0 {
for _, lm := range lineMatches {
lineMatch, ok := lm.(map[string]any)
if !ok {
continue
}
lineNumber, _ := lineMatch["lineNumber"].(float64)
preview, _ := lineMatch["preview"].(string)
if fileContent != "" {
lines := strings.Split(fileContent, "\n")
buffer.WriteString("```\n")
startLine := max(1, int(lineNumber)-contextWindow)
for j := startLine - 1; j < int(lineNumber)-1 && j < len(lines); j++ {
if j >= 0 {
buffer.WriteString(fmt.Sprintf("%d| %s\n", j+1, lines[j]))
}
}
buffer.WriteString(fmt.Sprintf("%d| %s\n", int(lineNumber), preview))
endLine := int(lineNumber) + contextWindow
for j := int(lineNumber); j < endLine && j < len(lines); j++ {
if j < len(lines) {
buffer.WriteString(fmt.Sprintf("%d| %s\n", j+1, lines[j]))
}
}
buffer.WriteString("```\n\n")
} else {
buffer.WriteString("```\n")
buffer.WriteString(fmt.Sprintf("%d| %s\n", int(lineNumber), preview))
buffer.WriteString("```\n\n")
}
}
}
}
return buffer.String(), nil
}

View File

@@ -1,84 +0,0 @@
package tools
import (
"context"
"encoding/json"
)
type ToolInfo struct {
Name string
Description string
Parameters map[string]any
Required []string
}
type toolResponseType string
type (
sessionIDContextKey string
messageIDContextKey string
)
const (
ToolResponseTypeText toolResponseType = "text"
ToolResponseTypeImage toolResponseType = "image"
SessionIDContextKey sessionIDContextKey = "session_id"
MessageIDContextKey messageIDContextKey = "message_id"
)
type ToolResponse struct {
Type toolResponseType `json:"type"`
Content string `json:"content"`
Metadata string `json:"metadata,omitempty"`
IsError bool `json:"is_error"`
}
func NewTextResponse(content string) ToolResponse {
return ToolResponse{
Type: ToolResponseTypeText,
Content: content,
}
}
func WithResponseMetadata(response ToolResponse, metadata any) ToolResponse {
if metadata != nil {
metadataBytes, err := json.Marshal(metadata)
if err != nil {
return response
}
response.Metadata = string(metadataBytes)
}
return response
}
func NewTextErrorResponse(content string) ToolResponse {
return ToolResponse{
Type: ToolResponseTypeText,
Content: content,
IsError: true,
}
}
type ToolCall struct {
ID string `json:"id"`
Name string `json:"name"`
Input string `json:"input"`
}
type BaseTool interface {
Info() ToolInfo
Run(ctx context.Context, params ToolCall) (ToolResponse, error)
}
func GetContextValues(ctx context.Context) (string, string) {
sessionID := ctx.Value(SessionIDContextKey)
messageID := ctx.Value(MessageIDContextKey)
if sessionID == nil {
return "", ""
}
if messageID == nil {
return sessionID.(string), ""
}
return sessionID.(string), messageID.(string)
}

View File

@@ -1,312 +0,0 @@
package tools
import (
"bufio"
"context"
"encoding/json"
"fmt"
"io"
"os"
"path/filepath"
"strings"
"github.com/kujtimiihoxha/opencode/internal/config"
"github.com/kujtimiihoxha/opencode/internal/lsp"
)
type ViewParams struct {
FilePath string `json:"file_path"`
Offset int `json:"offset"`
Limit int `json:"limit"`
}
type viewTool struct {
lspClients map[string]*lsp.Client
}
type ViewResponseMetadata struct {
FilePath string `json:"file_path"`
Content string `json:"content"`
}
const (
ViewToolName = "view"
MaxReadSize = 250 * 1024
DefaultReadLimit = 2000
MaxLineLength = 2000
viewDescription = `File viewing tool that reads and displays the contents of files with line numbers, allowing you to examine code, logs, or text data.
WHEN TO USE THIS TOOL:
- Use when you need to read the contents of a specific file
- Helpful for examining source code, configuration files, or log files
- Perfect for looking at text-based file formats
HOW TO USE:
- Provide the path to the file you want to view
- Optionally specify an offset to start reading from a specific line
- Optionally specify a limit to control how many lines are read
FEATURES:
- Displays file contents with line numbers for easy reference
- Can read from any position in a file using the offset parameter
- Handles large files by limiting the number of lines read
- Automatically truncates very long lines for better display
- Suggests similar file names when the requested file isn't found
LIMITATIONS:
- Maximum file size is 250KB
- Default reading limit is 2000 lines
- Lines longer than 2000 characters are truncated
- Cannot display binary files or images
- Images can be identified but not displayed
TIPS:
- Use with Glob tool to first find files you want to view
- For code exploration, first use Grep to find relevant files, then View to examine them
- When viewing large files, use the offset parameter to read specific sections`
)
func NewViewTool(lspClients map[string]*lsp.Client) BaseTool {
return &viewTool{
lspClients,
}
}
func (v *viewTool) Info() ToolInfo {
return ToolInfo{
Name: ViewToolName,
Description: viewDescription,
Parameters: map[string]any{
"file_path": map[string]any{
"type": "string",
"description": "The path to the file to read",
},
"offset": map[string]any{
"type": "integer",
"description": "The line number to start reading from (0-based)",
},
"limit": map[string]any{
"type": "integer",
"description": "The number of lines to read (defaults to 2000)",
},
},
Required: []string{"file_path"},
}
}
// Run implements Tool.
func (v *viewTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error) {
var params ViewParams
if err := json.Unmarshal([]byte(call.Input), &params); err != nil {
return NewTextErrorResponse(fmt.Sprintf("error parsing parameters: %s", err)), nil
}
if params.FilePath == "" {
return NewTextErrorResponse("file_path is required"), nil
}
// Handle relative paths
filePath := params.FilePath
if !filepath.IsAbs(filePath) {
filePath = filepath.Join(config.WorkingDirectory(), filePath)
}
// Check if file exists
fileInfo, err := os.Stat(filePath)
if err != nil {
if os.IsNotExist(err) {
// Try to offer suggestions for similarly named files
dir := filepath.Dir(filePath)
base := filepath.Base(filePath)
dirEntries, dirErr := os.ReadDir(dir)
if dirErr == nil {
var suggestions []string
for _, entry := range dirEntries {
if strings.Contains(strings.ToLower(entry.Name()), strings.ToLower(base)) ||
strings.Contains(strings.ToLower(base), strings.ToLower(entry.Name())) {
suggestions = append(suggestions, filepath.Join(dir, entry.Name()))
if len(suggestions) >= 3 {
break
}
}
}
if len(suggestions) > 0 {
return NewTextErrorResponse(fmt.Sprintf("File not found: %s\n\nDid you mean one of these?\n%s",
filePath, strings.Join(suggestions, "\n"))), nil
}
}
return NewTextErrorResponse(fmt.Sprintf("File not found: %s", filePath)), nil
}
return ToolResponse{}, fmt.Errorf("error accessing file: %w", err)
}
// Check if it's a directory
if fileInfo.IsDir() {
return NewTextErrorResponse(fmt.Sprintf("Path is a directory, not a file: %s", filePath)), nil
}
// Check file size
if fileInfo.Size() > MaxReadSize {
return NewTextErrorResponse(fmt.Sprintf("File is too large (%d bytes). Maximum size is %d bytes",
fileInfo.Size(), MaxReadSize)), nil
}
// Set default limit if not provided
if params.Limit <= 0 {
params.Limit = DefaultReadLimit
}
// Check if it's an image file
isImage, imageType := isImageFile(filePath)
// TODO: handle images
if isImage {
return NewTextErrorResponse(fmt.Sprintf("This is an image file of type: %s\nUse a different tool to process images", imageType)), nil
}
// Read the file content
content, lineCount, err := readTextFile(filePath, params.Offset, params.Limit)
if err != nil {
return ToolResponse{}, fmt.Errorf("error reading file: %w", err)
}
notifyLspOpenFile(ctx, filePath, v.lspClients)
output := "<file>\n"
// Format the output with line numbers
output += addLineNumbers(content, params.Offset+1)
// Add a note if the content was truncated
if lineCount > params.Offset+len(strings.Split(content, "\n")) {
output += fmt.Sprintf("\n\n(File has more lines. Use 'offset' parameter to read beyond line %d)",
params.Offset+len(strings.Split(content, "\n")))
}
output += "\n</file>\n"
output += getDiagnostics(filePath, v.lspClients)
recordFileRead(filePath)
return WithResponseMetadata(
NewTextResponse(output),
ViewResponseMetadata{
FilePath: filePath,
Content: content,
},
), nil
}
func addLineNumbers(content string, startLine int) string {
if content == "" {
return ""
}
lines := strings.Split(content, "\n")
var result []string
for i, line := range lines {
line = strings.TrimSuffix(line, "\r")
lineNum := i + startLine
numStr := fmt.Sprintf("%d", lineNum)
if len(numStr) >= 6 {
result = append(result, fmt.Sprintf("%s|%s", numStr, line))
} else {
paddedNum := fmt.Sprintf("%6s", numStr)
result = append(result, fmt.Sprintf("%s|%s", paddedNum, line))
}
}
return strings.Join(result, "\n")
}
func readTextFile(filePath string, offset, limit int) (string, int, error) {
file, err := os.Open(filePath)
if err != nil {
return "", 0, err
}
defer file.Close()
lineCount := 0
scanner := NewLineScanner(file)
if offset > 0 {
for lineCount < offset && scanner.Scan() {
lineCount++
}
if err = scanner.Err(); err != nil {
return "", 0, err
}
}
if offset == 0 {
_, err = file.Seek(0, io.SeekStart)
if err != nil {
return "", 0, err
}
}
var lines []string
lineCount = offset
for scanner.Scan() && len(lines) < limit {
lineCount++
lineText := scanner.Text()
if len(lineText) > MaxLineLength {
lineText = lineText[:MaxLineLength] + "..."
}
lines = append(lines, lineText)
}
// Continue scanning to get total line count
for scanner.Scan() {
lineCount++
}
if err := scanner.Err(); err != nil {
return "", 0, err
}
return strings.Join(lines, "\n"), lineCount, nil
}
func isImageFile(filePath string) (bool, string) {
ext := strings.ToLower(filepath.Ext(filePath))
switch ext {
case ".jpg", ".jpeg":
return true, "JPEG"
case ".png":
return true, "PNG"
case ".gif":
return true, "GIF"
case ".bmp":
return true, "BMP"
case ".svg":
return true, "SVG"
case ".webp":
return true, "WebP"
default:
return false, ""
}
}
type LineScanner struct {
scanner *bufio.Scanner
}
func NewLineScanner(r io.Reader) *LineScanner {
return &LineScanner{
scanner: bufio.NewScanner(r),
}
}
func (s *LineScanner) Scan() bool {
return s.scanner.Scan()
}
func (s *LineScanner) Text() string {
return s.scanner.Text()
}
func (s *LineScanner) Err() error {
return s.scanner.Err()
}

View File

@@ -1,227 +0,0 @@
package tools
import (
"context"
"encoding/json"
"fmt"
"os"
"path/filepath"
"strings"
"time"
"github.com/kujtimiihoxha/opencode/internal/config"
"github.com/kujtimiihoxha/opencode/internal/diff"
"github.com/kujtimiihoxha/opencode/internal/history"
"github.com/kujtimiihoxha/opencode/internal/logging"
"github.com/kujtimiihoxha/opencode/internal/lsp"
"github.com/kujtimiihoxha/opencode/internal/permission"
)
type WriteParams struct {
FilePath string `json:"file_path"`
Content string `json:"content"`
}
type WritePermissionsParams struct {
FilePath string `json:"file_path"`
Diff string `json:"diff"`
}
type writeTool struct {
lspClients map[string]*lsp.Client
permissions permission.Service
files history.Service
}
type WriteResponseMetadata struct {
Diff string `json:"diff"`
Additions int `json:"additions"`
Removals int `json:"removals"`
}
const (
WriteToolName = "write"
writeDescription = `File writing tool that creates or updates files in the filesystem, allowing you to save or modify text content.
WHEN TO USE THIS TOOL:
- Use when you need to create a new file
- Helpful for updating existing files with modified content
- Perfect for saving generated code, configurations, or text data
HOW TO USE:
- Provide the path to the file you want to write
- Include the content to be written to the file
- The tool will create any necessary parent directories
FEATURES:
- Can create new files or overwrite existing ones
- Creates parent directories automatically if they don't exist
- Checks if the file has been modified since last read for safety
- Avoids unnecessary writes when content hasn't changed
LIMITATIONS:
- You should read a file before writing to it to avoid conflicts
- Cannot append to files (rewrites the entire file)
TIPS:
- Use the View tool first to examine existing files before modifying them
- Use the LS tool to verify the correct location when creating new files
- Combine with Glob and Grep tools to find and modify multiple files
- Always include descriptive comments when making changes to existing code`
)
func NewWriteTool(lspClients map[string]*lsp.Client, permissions permission.Service, files history.Service) BaseTool {
return &writeTool{
lspClients: lspClients,
permissions: permissions,
files: files,
}
}
func (w *writeTool) Info() ToolInfo {
return ToolInfo{
Name: WriteToolName,
Description: writeDescription,
Parameters: map[string]any{
"file_path": map[string]any{
"type": "string",
"description": "The path to the file to write",
},
"content": map[string]any{
"type": "string",
"description": "The content to write to the file",
},
},
Required: []string{"file_path", "content"},
}
}
func (w *writeTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error) {
var params WriteParams
if err := json.Unmarshal([]byte(call.Input), &params); err != nil {
return NewTextErrorResponse(fmt.Sprintf("error parsing parameters: %s", err)), nil
}
if params.FilePath == "" {
return NewTextErrorResponse("file_path is required"), nil
}
if params.Content == "" {
return NewTextErrorResponse("content is required"), nil
}
filePath := params.FilePath
if !filepath.IsAbs(filePath) {
filePath = filepath.Join(config.WorkingDirectory(), filePath)
}
fileInfo, err := os.Stat(filePath)
if err == nil {
if fileInfo.IsDir() {
return NewTextErrorResponse(fmt.Sprintf("Path is a directory, not a file: %s", filePath)), nil
}
modTime := fileInfo.ModTime()
lastRead := getLastReadTime(filePath)
if modTime.After(lastRead) {
return NewTextErrorResponse(fmt.Sprintf("File %s has been modified since it was last read.\nLast modification: %s\nLast read: %s\n\nPlease read the file again before modifying it.",
filePath, modTime.Format(time.RFC3339), lastRead.Format(time.RFC3339))), nil
}
oldContent, readErr := os.ReadFile(filePath)
if readErr == nil && string(oldContent) == params.Content {
return NewTextErrorResponse(fmt.Sprintf("File %s already contains the exact content. No changes made.", filePath)), nil
}
} else if !os.IsNotExist(err) {
return ToolResponse{}, fmt.Errorf("error checking file: %w", err)
}
dir := filepath.Dir(filePath)
if err = os.MkdirAll(dir, 0o755); err != nil {
return ToolResponse{}, fmt.Errorf("error creating directory: %w", err)
}
oldContent := ""
if fileInfo != nil && !fileInfo.IsDir() {
oldBytes, readErr := os.ReadFile(filePath)
if readErr == nil {
oldContent = string(oldBytes)
}
}
sessionID, messageID := GetContextValues(ctx)
if sessionID == "" || messageID == "" {
return ToolResponse{}, fmt.Errorf("session_id and message_id are required")
}
diff, additions, removals := diff.GenerateDiff(
oldContent,
params.Content,
filePath,
)
rootDir := config.WorkingDirectory()
permissionPath := filepath.Dir(filePath)
if strings.HasPrefix(filePath, rootDir) {
permissionPath = rootDir
}
p := w.permissions.Request(
permission.CreatePermissionRequest{
SessionID: sessionID,
Path: permissionPath,
ToolName: WriteToolName,
Action: "write",
Description: fmt.Sprintf("Create file %s", filePath),
Params: WritePermissionsParams{
FilePath: filePath,
Diff: diff,
},
},
)
if !p {
return ToolResponse{}, permission.ErrorPermissionDenied
}
err = os.WriteFile(filePath, []byte(params.Content), 0o644)
if err != nil {
return ToolResponse{}, fmt.Errorf("error writing file: %w", err)
}
// Check if file exists in history
file, err := w.files.GetByPathAndSession(ctx, filePath, sessionID)
if err != nil {
_, err = w.files.Create(ctx, sessionID, filePath, oldContent)
if err != nil {
// Log error but don't fail the operation
return ToolResponse{}, fmt.Errorf("error creating file history: %w", err)
}
}
if file.Content != oldContent {
// User Manually changed the content store an intermediate version
_, err = w.files.CreateVersion(ctx, sessionID, filePath, oldContent)
if err != nil {
logging.Debug("Error creating file history version", "error", err)
}
}
// Store the new version
_, err = w.files.CreateVersion(ctx, sessionID, filePath, params.Content)
if err != nil {
logging.Debug("Error creating file history version", "error", err)
}
recordFileWrite(filePath)
recordFileRead(filePath)
waitForLspDiagnostics(ctx, filePath, w.lspClients)
result := fmt.Sprintf("File successfully written: %s", filePath)
result = fmt.Sprintf("<result>\n%s\n</result>", result)
result += getDiagnostics(filePath, w.lspClients)
return WithResponseMetadata(NewTextResponse(result),
WriteResponseMetadata{
Diff: diff,
Additions: additions,
Removals: removals,
},
), nil
}

View File

@@ -1,78 +0,0 @@
package logging
import (
"fmt"
"log/slog"
"os"
"runtime/debug"
"time"
)
func Info(msg string, args ...any) {
slog.Info(msg, args...)
}
func Debug(msg string, args ...any) {
slog.Debug(msg, args...)
}
func Warn(msg string, args ...any) {
slog.Warn(msg, args...)
}
func Error(msg string, args ...any) {
slog.Error(msg, args...)
}
func InfoPersist(msg string, args ...any) {
args = append(args, persistKeyArg, true)
slog.Info(msg, args...)
}
func DebugPersist(msg string, args ...any) {
args = append(args, persistKeyArg, true)
slog.Debug(msg, args...)
}
func WarnPersist(msg string, args ...any) {
args = append(args, persistKeyArg, true)
slog.Warn(msg, args...)
}
func ErrorPersist(msg string, args ...any) {
args = append(args, persistKeyArg, true)
slog.Error(msg, args...)
}
// RecoverPanic is a common function to handle panics gracefully.
// It logs the error, creates a panic log file with stack trace,
// and executes an optional cleanup function before returning.
func RecoverPanic(name string, cleanup func()) {
if r := recover(); r != nil {
// Log the panic
ErrorPersist(fmt.Sprintf("Panic in %s: %v", name, r))
// Create a timestamped panic log file
timestamp := time.Now().Format("20060102-150405")
filename := fmt.Sprintf("opencode-panic-%s-%s.log", name, timestamp)
file, err := os.Create(filename)
if err != nil {
ErrorPersist(fmt.Sprintf("Failed to create panic log: %v", err))
} else {
defer file.Close()
// Write panic information and stack trace
fmt.Fprintf(file, "Panic in %s: %v\n\n", name, r)
fmt.Fprintf(file, "Time: %s\n\n", time.Now().Format(time.RFC3339))
fmt.Fprintf(file, "Stack Trace:\n%s\n", debug.Stack())
InfoPersist(fmt.Sprintf("Panic details written to %s", filename))
}
// Execute cleanup function if provided
if cleanup != nil {
cleanup()
}
}
}

View File

@@ -1,21 +0,0 @@
package logging
import (
"time"
)
// LogMessage is the event payload for a log message
type LogMessage struct {
ID string
Time time.Time
Level string
Persist bool // used when we want to show the mesage in the status bar
PersistTime time.Duration // used when we want to show the mesage in the status bar
Message string `json:"msg"`
Attributes []Attr
}
type Attr struct {
Key string
Value string
}

View File

@@ -1,101 +0,0 @@
package logging
import (
"bytes"
"context"
"fmt"
"strings"
"sync"
"time"
"github.com/go-logfmt/logfmt"
"github.com/kujtimiihoxha/opencode/internal/pubsub"
)
const (
persistKeyArg = "$_persist"
PersistTimeArg = "$_persist_time"
)
type LogData struct {
messages []LogMessage
*pubsub.Broker[LogMessage]
lock sync.Mutex
}
func (l *LogData) Add(msg LogMessage) {
l.lock.Lock()
defer l.lock.Unlock()
l.messages = append(l.messages, msg)
l.Publish(pubsub.CreatedEvent, msg)
}
func (l *LogData) List() []LogMessage {
l.lock.Lock()
defer l.lock.Unlock()
return l.messages
}
var defaultLogData = &LogData{
messages: make([]LogMessage, 0),
Broker: pubsub.NewBroker[LogMessage](),
}
type writer struct{}
func (w *writer) Write(p []byte) (int, error) {
d := logfmt.NewDecoder(bytes.NewReader(p))
for d.ScanRecord() {
msg := LogMessage{
ID: fmt.Sprintf("%d", time.Now().UnixNano()),
Time: time.Now(),
}
for d.ScanKeyval() {
switch string(d.Key()) {
case "time":
parsed, err := time.Parse(time.RFC3339, string(d.Value()))
if err != nil {
return 0, fmt.Errorf("parsing time: %w", err)
}
msg.Time = parsed
case "level":
msg.Level = strings.ToLower(string(d.Value()))
case "msg":
msg.Message = string(d.Value())
default:
if string(d.Key()) == persistKeyArg {
msg.Persist = true
} else if string(d.Key()) == PersistTimeArg {
parsed, err := time.ParseDuration(string(d.Value()))
if err != nil {
continue
}
msg.PersistTime = parsed
} else {
msg.Attributes = append(msg.Attributes, Attr{
Key: string(d.Key()),
Value: string(d.Value()),
})
}
}
}
defaultLogData.Add(msg)
}
if d.Err() != nil {
return 0, d.Err()
}
return len(p), nil
}
func NewWriter() *writer {
w := &writer{}
return w
}
func Subscribe(ctx context.Context) <-chan pubsub.Event[LogMessage] {
return defaultLogData.Subscribe(ctx)
}
func List() []LogMessage {
return defaultLogData.List()
}

View File

@@ -1,778 +0,0 @@
package lsp
import (
"bufio"
"context"
"encoding/json"
"fmt"
"io"
"os"
"os/exec"
"path/filepath"
"strings"
"sync"
"sync/atomic"
"time"
"github.com/kujtimiihoxha/opencode/internal/config"
"github.com/kujtimiihoxha/opencode/internal/logging"
"github.com/kujtimiihoxha/opencode/internal/lsp/protocol"
)
type Client struct {
Cmd *exec.Cmd
stdin io.WriteCloser
stdout *bufio.Reader
stderr io.ReadCloser
// Request ID counter
nextID atomic.Int32
// Response handlers
handlers map[int32]chan *Message
handlersMu sync.RWMutex
// Server request handlers
serverRequestHandlers map[string]ServerRequestHandler
serverHandlersMu sync.RWMutex
// Notification handlers
notificationHandlers map[string]NotificationHandler
notificationMu sync.RWMutex
// Diagnostic cache
diagnostics map[protocol.DocumentUri][]protocol.Diagnostic
diagnosticsMu sync.RWMutex
// Files are currently opened by the LSP
openFiles map[string]*OpenFileInfo
openFilesMu sync.RWMutex
// Server state
serverState atomic.Value
}
func NewClient(ctx context.Context, command string, args ...string) (*Client, error) {
cmd := exec.CommandContext(ctx, command, args...)
// Copy env
cmd.Env = os.Environ()
stdin, err := cmd.StdinPipe()
if err != nil {
return nil, fmt.Errorf("failed to create stdin pipe: %w", err)
}
stdout, err := cmd.StdoutPipe()
if err != nil {
return nil, fmt.Errorf("failed to create stdout pipe: %w", err)
}
stderr, err := cmd.StderrPipe()
if err != nil {
return nil, fmt.Errorf("failed to create stderr pipe: %w", err)
}
client := &Client{
Cmd: cmd,
stdin: stdin,
stdout: bufio.NewReader(stdout),
stderr: stderr,
handlers: make(map[int32]chan *Message),
notificationHandlers: make(map[string]NotificationHandler),
serverRequestHandlers: make(map[string]ServerRequestHandler),
diagnostics: make(map[protocol.DocumentUri][]protocol.Diagnostic),
openFiles: make(map[string]*OpenFileInfo),
}
// Initialize server state
client.serverState.Store(StateStarting)
// Start the LSP server process
if err := cmd.Start(); err != nil {
return nil, fmt.Errorf("failed to start LSP server: %w", err)
}
// Handle stderr in a separate goroutine
go func() {
scanner := bufio.NewScanner(stderr)
for scanner.Scan() {
fmt.Fprintf(os.Stderr, "LSP Server: %s\n", scanner.Text())
}
if err := scanner.Err(); err != nil {
fmt.Fprintf(os.Stderr, "Error reading stderr: %v\n", err)
}
}()
// Start message handling loop
go func() {
defer logging.RecoverPanic("LSP-message-handler", func() {
logging.ErrorPersist("LSP message handler crashed, LSP functionality may be impaired")
})
client.handleMessages()
}()
return client, nil
}
func (c *Client) RegisterNotificationHandler(method string, handler NotificationHandler) {
c.notificationMu.Lock()
defer c.notificationMu.Unlock()
c.notificationHandlers[method] = handler
}
func (c *Client) RegisterServerRequestHandler(method string, handler ServerRequestHandler) {
c.serverHandlersMu.Lock()
defer c.serverHandlersMu.Unlock()
c.serverRequestHandlers[method] = handler
}
func (c *Client) InitializeLSPClient(ctx context.Context, workspaceDir string) (*protocol.InitializeResult, error) {
initParams := &protocol.InitializeParams{
WorkspaceFoldersInitializeParams: protocol.WorkspaceFoldersInitializeParams{
WorkspaceFolders: []protocol.WorkspaceFolder{
{
URI: protocol.URI("file://" + workspaceDir),
Name: workspaceDir,
},
},
},
XInitializeParams: protocol.XInitializeParams{
ProcessID: int32(os.Getpid()),
ClientInfo: &protocol.ClientInfo{
Name: "mcp-language-server",
Version: "0.1.0",
},
RootPath: workspaceDir,
RootURI: protocol.DocumentUri("file://" + workspaceDir),
Capabilities: protocol.ClientCapabilities{
Workspace: protocol.WorkspaceClientCapabilities{
Configuration: true,
DidChangeConfiguration: protocol.DidChangeConfigurationClientCapabilities{
DynamicRegistration: true,
},
DidChangeWatchedFiles: protocol.DidChangeWatchedFilesClientCapabilities{
DynamicRegistration: true,
RelativePatternSupport: true,
},
},
TextDocument: protocol.TextDocumentClientCapabilities{
Synchronization: &protocol.TextDocumentSyncClientCapabilities{
DynamicRegistration: true,
DidSave: true,
},
Completion: protocol.CompletionClientCapabilities{
CompletionItem: protocol.ClientCompletionItemOptions{},
},
CodeLens: &protocol.CodeLensClientCapabilities{
DynamicRegistration: true,
},
DocumentSymbol: protocol.DocumentSymbolClientCapabilities{},
CodeAction: protocol.CodeActionClientCapabilities{
CodeActionLiteralSupport: protocol.ClientCodeActionLiteralOptions{
CodeActionKind: protocol.ClientCodeActionKindOptions{
ValueSet: []protocol.CodeActionKind{},
},
},
},
PublishDiagnostics: protocol.PublishDiagnosticsClientCapabilities{
VersionSupport: true,
},
SemanticTokens: protocol.SemanticTokensClientCapabilities{
Requests: protocol.ClientSemanticTokensRequestOptions{
Range: &protocol.Or_ClientSemanticTokensRequestOptions_range{},
Full: &protocol.Or_ClientSemanticTokensRequestOptions_full{},
},
TokenTypes: []string{},
TokenModifiers: []string{},
Formats: []protocol.TokenFormat{},
},
},
Window: protocol.WindowClientCapabilities{},
},
InitializationOptions: map[string]any{
"codelenses": map[string]bool{
"generate": true,
"regenerate_cgo": true,
"test": true,
"tidy": true,
"upgrade_dependency": true,
"vendor": true,
"vulncheck": false,
},
},
},
}
var result protocol.InitializeResult
if err := c.Call(ctx, "initialize", initParams, &result); err != nil {
return nil, fmt.Errorf("initialize failed: %w", err)
}
if err := c.Notify(ctx, "initialized", struct{}{}); err != nil {
return nil, fmt.Errorf("initialized notification failed: %w", err)
}
// Register handlers
c.RegisterServerRequestHandler("workspace/applyEdit", HandleApplyEdit)
c.RegisterServerRequestHandler("workspace/configuration", HandleWorkspaceConfiguration)
c.RegisterServerRequestHandler("client/registerCapability", HandleRegisterCapability)
c.RegisterNotificationHandler("window/showMessage", HandleServerMessage)
c.RegisterNotificationHandler("textDocument/publishDiagnostics",
func(params json.RawMessage) { HandleDiagnostics(c, params) })
// Notify the LSP server
err := c.Initialized(ctx, protocol.InitializedParams{})
if err != nil {
return nil, fmt.Errorf("initialization failed: %w", err)
}
return &result, nil
}
func (c *Client) Close() error {
// Try to close all open files first
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
defer cancel()
// Attempt to close files but continue shutdown regardless
c.CloseAllFiles(ctx)
// Close stdin to signal the server
if err := c.stdin.Close(); err != nil {
return fmt.Errorf("failed to close stdin: %w", err)
}
// Use a channel to handle the Wait with timeout
done := make(chan error, 1)
go func() {
done <- c.Cmd.Wait()
}()
// Wait for process to exit with timeout
select {
case err := <-done:
return err
case <-time.After(2 * time.Second):
// If we timeout, try to kill the process
if err := c.Cmd.Process.Kill(); err != nil {
return fmt.Errorf("failed to kill process: %w", err)
}
return fmt.Errorf("process killed after timeout")
}
}
type ServerState int
const (
StateStarting ServerState = iota
StateReady
StateError
)
// GetServerState returns the current state of the LSP server
func (c *Client) GetServerState() ServerState {
if val := c.serverState.Load(); val != nil {
return val.(ServerState)
}
return StateStarting
}
// SetServerState sets the current state of the LSP server
func (c *Client) SetServerState(state ServerState) {
c.serverState.Store(state)
}
// WaitForServerReady waits for the server to be ready by polling the server
// with a simple request until it responds successfully or times out
func (c *Client) WaitForServerReady(ctx context.Context) error {
cnf := config.Get()
// Set initial state
c.SetServerState(StateStarting)
// Create a context with timeout
ctx, cancel := context.WithTimeout(ctx, 30*time.Second)
defer cancel()
// Try to ping the server with a simple request
ticker := time.NewTicker(500 * time.Millisecond)
defer ticker.Stop()
if cnf.DebugLSP {
logging.Debug("Waiting for LSP server to be ready...")
}
// Determine server type for specialized initialization
serverType := c.detectServerType()
// For TypeScript-like servers, we need to open some key files first
if serverType == ServerTypeTypeScript {
if cnf.DebugLSP {
logging.Debug("TypeScript-like server detected, opening key configuration files")
}
c.openKeyConfigFiles(ctx)
}
for {
select {
case <-ctx.Done():
c.SetServerState(StateError)
return fmt.Errorf("timeout waiting for LSP server to be ready")
case <-ticker.C:
// Try a ping method appropriate for this server type
err := c.pingServerByType(ctx, serverType)
if err == nil {
// Server responded successfully
c.SetServerState(StateReady)
if cnf.DebugLSP {
logging.Debug("LSP server is ready")
}
return nil
} else {
logging.Debug("LSP server not ready yet", "error", err, "serverType", serverType)
}
if cnf.DebugLSP {
logging.Debug("LSP server not ready yet", "error", err, "serverType", serverType)
}
}
}
}
// ServerType represents the type of LSP server
type ServerType int
const (
ServerTypeUnknown ServerType = iota
ServerTypeGo
ServerTypeTypeScript
ServerTypeRust
ServerTypePython
ServerTypeGeneric
)
// detectServerType tries to determine what type of LSP server we're dealing with
func (c *Client) detectServerType() ServerType {
if c.Cmd == nil {
return ServerTypeUnknown
}
cmdPath := strings.ToLower(c.Cmd.Path)
switch {
case strings.Contains(cmdPath, "gopls"):
return ServerTypeGo
case strings.Contains(cmdPath, "typescript") || strings.Contains(cmdPath, "vtsls") || strings.Contains(cmdPath, "tsserver"):
return ServerTypeTypeScript
case strings.Contains(cmdPath, "rust-analyzer"):
return ServerTypeRust
case strings.Contains(cmdPath, "pyright") || strings.Contains(cmdPath, "pylsp") || strings.Contains(cmdPath, "python"):
return ServerTypePython
default:
return ServerTypeGeneric
}
}
// openKeyConfigFiles opens important configuration files that help initialize the server
func (c *Client) openKeyConfigFiles(ctx context.Context) {
workDir := config.WorkingDirectory()
serverType := c.detectServerType()
var filesToOpen []string
switch serverType {
case ServerTypeTypeScript:
// TypeScript servers need these config files to properly initialize
filesToOpen = []string{
filepath.Join(workDir, "tsconfig.json"),
filepath.Join(workDir, "package.json"),
filepath.Join(workDir, "jsconfig.json"),
}
// Also find and open a few TypeScript files to help the server initialize
c.openTypeScriptFiles(ctx, workDir)
case ServerTypeGo:
filesToOpen = []string{
filepath.Join(workDir, "go.mod"),
filepath.Join(workDir, "go.sum"),
}
case ServerTypeRust:
filesToOpen = []string{
filepath.Join(workDir, "Cargo.toml"),
filepath.Join(workDir, "Cargo.lock"),
}
}
// Try to open each file, ignoring errors if they don't exist
for _, file := range filesToOpen {
if _, err := os.Stat(file); err == nil {
// File exists, try to open it
if err := c.OpenFile(ctx, file); err != nil {
logging.Debug("Failed to open key config file", "file", file, "error", err)
} else {
logging.Debug("Opened key config file for initialization", "file", file)
}
}
}
}
// pingServerByType sends a ping request appropriate for the server type
func (c *Client) pingServerByType(ctx context.Context, serverType ServerType) error {
switch serverType {
case ServerTypeTypeScript:
// For TypeScript, try a document symbol request on an open file
return c.pingTypeScriptServer(ctx)
case ServerTypeGo:
// For Go, workspace/symbol works well
return c.pingWithWorkspaceSymbol(ctx)
case ServerTypeRust:
// For Rust, workspace/symbol works well
return c.pingWithWorkspaceSymbol(ctx)
default:
// Default ping method
return c.pingWithWorkspaceSymbol(ctx)
}
}
// pingTypeScriptServer tries to ping a TypeScript server with appropriate methods
func (c *Client) pingTypeScriptServer(ctx context.Context) error {
// First try workspace/symbol which works for many servers
if err := c.pingWithWorkspaceSymbol(ctx); err == nil {
return nil
}
// If that fails, try to find an open file and request document symbols
c.openFilesMu.RLock()
defer c.openFilesMu.RUnlock()
// If we have any open files, try to get document symbols for one
for uri := range c.openFiles {
filePath := strings.TrimPrefix(uri, "file://")
if strings.HasSuffix(filePath, ".ts") || strings.HasSuffix(filePath, ".js") ||
strings.HasSuffix(filePath, ".tsx") || strings.HasSuffix(filePath, ".jsx") {
var symbols []protocol.DocumentSymbol
err := c.Call(ctx, "textDocument/documentSymbol", protocol.DocumentSymbolParams{
TextDocument: protocol.TextDocumentIdentifier{
URI: protocol.DocumentUri(uri),
},
}, &symbols)
if err == nil {
return nil
}
}
}
// If we have no open TypeScript files, try to find and open one
workDir := config.WorkingDirectory()
err := filepath.WalkDir(workDir, func(path string, d os.DirEntry, err error) error {
if err != nil {
return err
}
// Skip directories and non-TypeScript files
if d.IsDir() {
return nil
}
ext := filepath.Ext(path)
if ext == ".ts" || ext == ".js" || ext == ".tsx" || ext == ".jsx" {
// Found a TypeScript file, try to open it
if err := c.OpenFile(ctx, path); err == nil {
// Successfully opened, stop walking
return filepath.SkipAll
}
}
return nil
})
if err != nil {
logging.Debug("Error walking directory for TypeScript files", "error", err)
}
// Final fallback - just try a generic capability
return c.pingWithServerCapabilities(ctx)
}
// openTypeScriptFiles finds and opens TypeScript files to help initialize the server
func (c *Client) openTypeScriptFiles(ctx context.Context, workDir string) {
cnf := config.Get()
filesOpened := 0
maxFilesToOpen := 5 // Limit to a reasonable number of files
// Find and open TypeScript files
err := filepath.WalkDir(workDir, func(path string, d os.DirEntry, err error) error {
if err != nil {
return err
}
// Skip directories and non-TypeScript files
if d.IsDir() {
// Skip common directories to avoid wasting time
if shouldSkipDir(path) {
return filepath.SkipDir
}
return nil
}
// Check if we've opened enough files
if filesOpened >= maxFilesToOpen {
return filepath.SkipAll
}
// Check file extension
ext := filepath.Ext(path)
if ext == ".ts" || ext == ".tsx" || ext == ".js" || ext == ".jsx" {
// Try to open the file
if err := c.OpenFile(ctx, path); err == nil {
filesOpened++
if cnf.DebugLSP {
logging.Debug("Opened TypeScript file for initialization", "file", path)
}
}
}
return nil
})
if err != nil && cnf.DebugLSP {
logging.Debug("Error walking directory for TypeScript files", "error", err)
}
if cnf.DebugLSP {
logging.Debug("Opened TypeScript files for initialization", "count", filesOpened)
}
}
// shouldSkipDir returns true if the directory should be skipped during file search
func shouldSkipDir(path string) bool {
dirName := filepath.Base(path)
// Skip hidden directories
if strings.HasPrefix(dirName, ".") {
return true
}
// Skip common directories that won't contain relevant source files
skipDirs := map[string]bool{
"node_modules": true,
"dist": true,
"build": true,
"coverage": true,
"vendor": true,
"target": true,
}
return skipDirs[dirName]
}
// pingWithWorkspaceSymbol tries a workspace/symbol request
func (c *Client) pingWithWorkspaceSymbol(ctx context.Context) error {
var result []protocol.SymbolInformation
return c.Call(ctx, "workspace/symbol", protocol.WorkspaceSymbolParams{
Query: "",
}, &result)
}
// pingWithServerCapabilities tries to get server capabilities
func (c *Client) pingWithServerCapabilities(ctx context.Context) error {
// This is a very lightweight request that should work for most servers
return c.Notify(ctx, "$/cancelRequest", struct{ ID int }{ID: -1})
}
type OpenFileInfo struct {
Version int32
URI protocol.DocumentUri
}
func (c *Client) OpenFile(ctx context.Context, filepath string) error {
uri := fmt.Sprintf("file://%s", filepath)
c.openFilesMu.Lock()
if _, exists := c.openFiles[uri]; exists {
c.openFilesMu.Unlock()
return nil // Already open
}
c.openFilesMu.Unlock()
// Skip files that do not exist or cannot be read
content, err := os.ReadFile(filepath)
if err != nil {
return fmt.Errorf("error reading file: %w", err)
}
params := protocol.DidOpenTextDocumentParams{
TextDocument: protocol.TextDocumentItem{
URI: protocol.DocumentUri(uri),
LanguageID: DetectLanguageID(uri),
Version: 1,
Text: string(content),
},
}
if err := c.Notify(ctx, "textDocument/didOpen", params); err != nil {
return err
}
c.openFilesMu.Lock()
c.openFiles[uri] = &OpenFileInfo{
Version: 1,
URI: protocol.DocumentUri(uri),
}
c.openFilesMu.Unlock()
return nil
}
func (c *Client) NotifyChange(ctx context.Context, filepath string) error {
uri := fmt.Sprintf("file://%s", filepath)
content, err := os.ReadFile(filepath)
if err != nil {
return fmt.Errorf("error reading file: %w", err)
}
c.openFilesMu.Lock()
fileInfo, isOpen := c.openFiles[uri]
if !isOpen {
c.openFilesMu.Unlock()
return fmt.Errorf("cannot notify change for unopened file: %s", filepath)
}
// Increment version
fileInfo.Version++
version := fileInfo.Version
c.openFilesMu.Unlock()
params := protocol.DidChangeTextDocumentParams{
TextDocument: protocol.VersionedTextDocumentIdentifier{
TextDocumentIdentifier: protocol.TextDocumentIdentifier{
URI: protocol.DocumentUri(uri),
},
Version: version,
},
ContentChanges: []protocol.TextDocumentContentChangeEvent{
{
Value: protocol.TextDocumentContentChangeWholeDocument{
Text: string(content),
},
},
},
}
return c.Notify(ctx, "textDocument/didChange", params)
}
func (c *Client) CloseFile(ctx context.Context, filepath string) error {
cnf := config.Get()
uri := fmt.Sprintf("file://%s", filepath)
c.openFilesMu.Lock()
if _, exists := c.openFiles[uri]; !exists {
c.openFilesMu.Unlock()
return nil // Already closed
}
c.openFilesMu.Unlock()
params := protocol.DidCloseTextDocumentParams{
TextDocument: protocol.TextDocumentIdentifier{
URI: protocol.DocumentUri(uri),
},
}
if cnf.DebugLSP {
logging.Debug("Closing file", "file", filepath)
}
if err := c.Notify(ctx, "textDocument/didClose", params); err != nil {
return err
}
c.openFilesMu.Lock()
delete(c.openFiles, uri)
c.openFilesMu.Unlock()
return nil
}
func (c *Client) IsFileOpen(filepath string) bool {
uri := fmt.Sprintf("file://%s", filepath)
c.openFilesMu.RLock()
defer c.openFilesMu.RUnlock()
_, exists := c.openFiles[uri]
return exists
}
// CloseAllFiles closes all currently open files
func (c *Client) CloseAllFiles(ctx context.Context) {
cnf := config.Get()
c.openFilesMu.Lock()
filesToClose := make([]string, 0, len(c.openFiles))
// First collect all URIs that need to be closed
for uri := range c.openFiles {
// Convert URI back to file path by trimming "file://" prefix
filePath := strings.TrimPrefix(uri, "file://")
filesToClose = append(filesToClose, filePath)
}
c.openFilesMu.Unlock()
// Then close them all
for _, filePath := range filesToClose {
err := c.CloseFile(ctx, filePath)
if err != nil && cnf.DebugLSP {
logging.Warn("Error closing file", "file", filePath, "error", err)
}
}
if cnf.DebugLSP {
logging.Debug("Closed all files", "files", filesToClose)
}
}
func (c *Client) GetFileDiagnostics(uri protocol.DocumentUri) []protocol.Diagnostic {
c.diagnosticsMu.RLock()
defer c.diagnosticsMu.RUnlock()
return c.diagnostics[uri]
}
// GetDiagnostics returns all diagnostics for all files
func (c *Client) GetDiagnostics() map[protocol.DocumentUri][]protocol.Diagnostic {
return c.diagnostics
}
// OpenFileOnDemand opens a file only if it's not already open
// This is used for lazy-loading files when they're actually needed
func (c *Client) OpenFileOnDemand(ctx context.Context, filepath string) error {
// Check if the file is already open
if c.IsFileOpen(filepath) {
return nil
}
// Open the file
return c.OpenFile(ctx, filepath)
}
// GetDiagnosticsForFile ensures a file is open and returns its diagnostics
// This is useful for on-demand diagnostics when using lazy loading
func (c *Client) GetDiagnosticsForFile(ctx context.Context, filepath string) ([]protocol.Diagnostic, error) {
uri := fmt.Sprintf("file://%s", filepath)
documentUri := protocol.DocumentUri(uri)
// Make sure the file is open
if !c.IsFileOpen(filepath) {
if err := c.OpenFile(ctx, filepath); err != nil {
return nil, fmt.Errorf("failed to open file for diagnostics: %w", err)
}
// Give the LSP server a moment to process the file
time.Sleep(100 * time.Millisecond)
}
// Get diagnostics
c.diagnosticsMu.RLock()
diagnostics := c.diagnostics[documentUri]
c.diagnosticsMu.RUnlock()
return diagnostics, nil
}

View File

@@ -1,108 +0,0 @@
package lsp
import (
"encoding/json"
"github.com/kujtimiihoxha/opencode/internal/config"
"github.com/kujtimiihoxha/opencode/internal/logging"
"github.com/kujtimiihoxha/opencode/internal/lsp/protocol"
"github.com/kujtimiihoxha/opencode/internal/lsp/util"
)
// Requests
func HandleWorkspaceConfiguration(params json.RawMessage) (any, error) {
return []map[string]any{{}}, nil
}
func HandleRegisterCapability(params json.RawMessage) (any, error) {
var registerParams protocol.RegistrationParams
if err := json.Unmarshal(params, &registerParams); err != nil {
logging.Error("Error unmarshaling registration params", "error", err)
return nil, err
}
for _, reg := range registerParams.Registrations {
switch reg.Method {
case "workspace/didChangeWatchedFiles":
// Parse the registration options
optionsJSON, err := json.Marshal(reg.RegisterOptions)
if err != nil {
logging.Error("Error marshaling registration options", "error", err)
continue
}
var options protocol.DidChangeWatchedFilesRegistrationOptions
if err := json.Unmarshal(optionsJSON, &options); err != nil {
logging.Error("Error unmarshaling registration options", "error", err)
continue
}
// Store the file watchers registrations
notifyFileWatchRegistration(reg.ID, options.Watchers)
}
}
return nil, nil
}
func HandleApplyEdit(params json.RawMessage) (any, error) {
var edit protocol.ApplyWorkspaceEditParams
if err := json.Unmarshal(params, &edit); err != nil {
return nil, err
}
err := util.ApplyWorkspaceEdit(edit.Edit)
if err != nil {
logging.Error("Error applying workspace edit", "error", err)
return protocol.ApplyWorkspaceEditResult{Applied: false, FailureReason: err.Error()}, nil
}
return protocol.ApplyWorkspaceEditResult{Applied: true}, nil
}
// FileWatchRegistrationHandler is a function that will be called when file watch registrations are received
type FileWatchRegistrationHandler func(id string, watchers []protocol.FileSystemWatcher)
// fileWatchHandler holds the current handler for file watch registrations
var fileWatchHandler FileWatchRegistrationHandler
// RegisterFileWatchHandler sets the handler for file watch registrations
func RegisterFileWatchHandler(handler FileWatchRegistrationHandler) {
fileWatchHandler = handler
}
// notifyFileWatchRegistration notifies the handler about new file watch registrations
func notifyFileWatchRegistration(id string, watchers []protocol.FileSystemWatcher) {
if fileWatchHandler != nil {
fileWatchHandler(id, watchers)
}
}
// Notifications
func HandleServerMessage(params json.RawMessage) {
cnf := config.Get()
var msg struct {
Type int `json:"type"`
Message string `json:"message"`
}
if err := json.Unmarshal(params, &msg); err == nil {
if cnf.DebugLSP {
logging.Debug("Server message", "type", msg.Type, "message", msg.Message)
}
}
}
func HandleDiagnostics(client *Client, params json.RawMessage) {
var diagParams protocol.PublishDiagnosticsParams
if err := json.Unmarshal(params, &diagParams); err != nil {
logging.Error("Error unmarshaling diagnostics params", "error", err)
return
}
client.diagnosticsMu.Lock()
defer client.diagnosticsMu.Unlock()
client.diagnostics[diagParams.URI] = diagParams.Diagnostics
}

View File

@@ -1,132 +0,0 @@
package lsp
import (
"path/filepath"
"strings"
"github.com/kujtimiihoxha/opencode/internal/lsp/protocol"
)
func DetectLanguageID(uri string) protocol.LanguageKind {
ext := strings.ToLower(filepath.Ext(uri))
switch ext {
case ".abap":
return protocol.LangABAP
case ".bat":
return protocol.LangWindowsBat
case ".bib", ".bibtex":
return protocol.LangBibTeX
case ".clj":
return protocol.LangClojure
case ".coffee":
return protocol.LangCoffeescript
case ".c":
return protocol.LangC
case ".cpp", ".cxx", ".cc", ".c++":
return protocol.LangCPP
case ".cs":
return protocol.LangCSharp
case ".css":
return protocol.LangCSS
case ".d":
return protocol.LangD
case ".pas", ".pascal":
return protocol.LangDelphi
case ".diff", ".patch":
return protocol.LangDiff
case ".dart":
return protocol.LangDart
case ".dockerfile":
return protocol.LangDockerfile
case ".ex", ".exs":
return protocol.LangElixir
case ".erl", ".hrl":
return protocol.LangErlang
case ".fs", ".fsi", ".fsx", ".fsscript":
return protocol.LangFSharp
case ".gitcommit":
return protocol.LangGitCommit
case ".gitrebase":
return protocol.LangGitRebase
case ".go":
return protocol.LangGo
case ".groovy":
return protocol.LangGroovy
case ".hbs", ".handlebars":
return protocol.LangHandlebars
case ".hs":
return protocol.LangHaskell
case ".html", ".htm":
return protocol.LangHTML
case ".ini":
return protocol.LangIni
case ".java":
return protocol.LangJava
case ".js":
return protocol.LangJavaScript
case ".jsx":
return protocol.LangJavaScriptReact
case ".json":
return protocol.LangJSON
case ".tex", ".latex":
return protocol.LangLaTeX
case ".less":
return protocol.LangLess
case ".lua":
return protocol.LangLua
case ".makefile", "makefile":
return protocol.LangMakefile
case ".md", ".markdown":
return protocol.LangMarkdown
case ".m":
return protocol.LangObjectiveC
case ".mm":
return protocol.LangObjectiveCPP
case ".pl":
return protocol.LangPerl
case ".pm":
return protocol.LangPerl6
case ".php":
return protocol.LangPHP
case ".ps1", ".psm1":
return protocol.LangPowershell
case ".pug", ".jade":
return protocol.LangPug
case ".py":
return protocol.LangPython
case ".r":
return protocol.LangR
case ".cshtml", ".razor":
return protocol.LangRazor
case ".rb":
return protocol.LangRuby
case ".rs":
return protocol.LangRust
case ".scss":
return protocol.LangSCSS
case ".sass":
return protocol.LangSASS
case ".scala":
return protocol.LangScala
case ".shader":
return protocol.LangShaderLab
case ".sh", ".bash", ".zsh", ".ksh":
return protocol.LangShellScript
case ".sql":
return protocol.LangSQL
case ".swift":
return protocol.LangSwift
case ".ts":
return protocol.LangTypeScript
case ".tsx":
return protocol.LangTypeScriptReact
case ".xml":
return protocol.LangXML
case ".xsl":
return protocol.LangXSL
case ".yaml", ".yml":
return protocol.LangYAML
default:
return protocol.LanguageKind("") // Unknown language
}
}

View File

@@ -1,554 +0,0 @@
// Generated code. Do not edit
package lsp
import (
"context"
"github.com/kujtimiihoxha/opencode/internal/lsp/protocol"
)
// Implementation sends a textDocument/implementation request to the LSP server.
// A request to resolve the implementation locations of a symbol at a given text document position. The request's parameter is of type TextDocumentPositionParams the response is of type Definition or a Thenable that resolves to such.
func (c *Client) Implementation(ctx context.Context, params protocol.ImplementationParams) (protocol.Or_Result_textDocument_implementation, error) {
var result protocol.Or_Result_textDocument_implementation
err := c.Call(ctx, "textDocument/implementation", params, &result)
return result, err
}
// TypeDefinition sends a textDocument/typeDefinition request to the LSP server.
// A request to resolve the type definition locations of a symbol at a given text document position. The request's parameter is of type TextDocumentPositionParams the response is of type Definition or a Thenable that resolves to such.
func (c *Client) TypeDefinition(ctx context.Context, params protocol.TypeDefinitionParams) (protocol.Or_Result_textDocument_typeDefinition, error) {
var result protocol.Or_Result_textDocument_typeDefinition
err := c.Call(ctx, "textDocument/typeDefinition", params, &result)
return result, err
}
// DocumentColor sends a textDocument/documentColor request to the LSP server.
// A request to list all color symbols found in a given text document. The request's parameter is of type DocumentColorParams the response is of type ColorInformation ColorInformation[] or a Thenable that resolves to such.
func (c *Client) DocumentColor(ctx context.Context, params protocol.DocumentColorParams) ([]protocol.ColorInformation, error) {
var result []protocol.ColorInformation
err := c.Call(ctx, "textDocument/documentColor", params, &result)
return result, err
}
// ColorPresentation sends a textDocument/colorPresentation request to the LSP server.
// A request to list all presentation for a color. The request's parameter is of type ColorPresentationParams the response is of type ColorInformation ColorInformation[] or a Thenable that resolves to such.
func (c *Client) ColorPresentation(ctx context.Context, params protocol.ColorPresentationParams) ([]protocol.ColorPresentation, error) {
var result []protocol.ColorPresentation
err := c.Call(ctx, "textDocument/colorPresentation", params, &result)
return result, err
}
// FoldingRange sends a textDocument/foldingRange request to the LSP server.
// A request to provide folding ranges in a document. The request's parameter is of type FoldingRangeParams, the response is of type FoldingRangeList or a Thenable that resolves to such.
func (c *Client) FoldingRange(ctx context.Context, params protocol.FoldingRangeParams) ([]protocol.FoldingRange, error) {
var result []protocol.FoldingRange
err := c.Call(ctx, "textDocument/foldingRange", params, &result)
return result, err
}
// Declaration sends a textDocument/declaration request to the LSP server.
// A request to resolve the type definition locations of a symbol at a given text document position. The request's parameter is of type TextDocumentPositionParams the response is of type Declaration or a typed array of DeclarationLink or a Thenable that resolves to such.
func (c *Client) Declaration(ctx context.Context, params protocol.DeclarationParams) (protocol.Or_Result_textDocument_declaration, error) {
var result protocol.Or_Result_textDocument_declaration
err := c.Call(ctx, "textDocument/declaration", params, &result)
return result, err
}
// SelectionRange sends a textDocument/selectionRange request to the LSP server.
// A request to provide selection ranges in a document. The request's parameter is of type SelectionRangeParams, the response is of type SelectionRange SelectionRange[] or a Thenable that resolves to such.
func (c *Client) SelectionRange(ctx context.Context, params protocol.SelectionRangeParams) ([]protocol.SelectionRange, error) {
var result []protocol.SelectionRange
err := c.Call(ctx, "textDocument/selectionRange", params, &result)
return result, err
}
// PrepareCallHierarchy sends a textDocument/prepareCallHierarchy request to the LSP server.
// A request to result a CallHierarchyItem in a document at a given position. Can be used as an input to an incoming or outgoing call hierarchy. Since 3.16.0
func (c *Client) PrepareCallHierarchy(ctx context.Context, params protocol.CallHierarchyPrepareParams) ([]protocol.CallHierarchyItem, error) {
var result []protocol.CallHierarchyItem
err := c.Call(ctx, "textDocument/prepareCallHierarchy", params, &result)
return result, err
}
// IncomingCalls sends a callHierarchy/incomingCalls request to the LSP server.
// A request to resolve the incoming calls for a given CallHierarchyItem. Since 3.16.0
func (c *Client) IncomingCalls(ctx context.Context, params protocol.CallHierarchyIncomingCallsParams) ([]protocol.CallHierarchyIncomingCall, error) {
var result []protocol.CallHierarchyIncomingCall
err := c.Call(ctx, "callHierarchy/incomingCalls", params, &result)
return result, err
}
// OutgoingCalls sends a callHierarchy/outgoingCalls request to the LSP server.
// A request to resolve the outgoing calls for a given CallHierarchyItem. Since 3.16.0
func (c *Client) OutgoingCalls(ctx context.Context, params protocol.CallHierarchyOutgoingCallsParams) ([]protocol.CallHierarchyOutgoingCall, error) {
var result []protocol.CallHierarchyOutgoingCall
err := c.Call(ctx, "callHierarchy/outgoingCalls", params, &result)
return result, err
}
// SemanticTokensFull sends a textDocument/semanticTokens/full request to the LSP server.
// Since 3.16.0
func (c *Client) SemanticTokensFull(ctx context.Context, params protocol.SemanticTokensParams) (protocol.SemanticTokens, error) {
var result protocol.SemanticTokens
err := c.Call(ctx, "textDocument/semanticTokens/full", params, &result)
return result, err
}
// SemanticTokensFullDelta sends a textDocument/semanticTokens/full/delta request to the LSP server.
// Since 3.16.0
func (c *Client) SemanticTokensFullDelta(ctx context.Context, params protocol.SemanticTokensDeltaParams) (protocol.Or_Result_textDocument_semanticTokens_full_delta, error) {
var result protocol.Or_Result_textDocument_semanticTokens_full_delta
err := c.Call(ctx, "textDocument/semanticTokens/full/delta", params, &result)
return result, err
}
// SemanticTokensRange sends a textDocument/semanticTokens/range request to the LSP server.
// Since 3.16.0
func (c *Client) SemanticTokensRange(ctx context.Context, params protocol.SemanticTokensRangeParams) (protocol.SemanticTokens, error) {
var result protocol.SemanticTokens
err := c.Call(ctx, "textDocument/semanticTokens/range", params, &result)
return result, err
}
// LinkedEditingRange sends a textDocument/linkedEditingRange request to the LSP server.
// A request to provide ranges that can be edited together. Since 3.16.0
func (c *Client) LinkedEditingRange(ctx context.Context, params protocol.LinkedEditingRangeParams) (protocol.LinkedEditingRanges, error) {
var result protocol.LinkedEditingRanges
err := c.Call(ctx, "textDocument/linkedEditingRange", params, &result)
return result, err
}
// WillCreateFiles sends a workspace/willCreateFiles request to the LSP server.
// The will create files request is sent from the client to the server before files are actually created as long as the creation is triggered from within the client. The request can return a WorkspaceEdit which will be applied to workspace before the files are created. Hence the WorkspaceEdit can not manipulate the content of the file to be created. Since 3.16.0
func (c *Client) WillCreateFiles(ctx context.Context, params protocol.CreateFilesParams) (protocol.WorkspaceEdit, error) {
var result protocol.WorkspaceEdit
err := c.Call(ctx, "workspace/willCreateFiles", params, &result)
return result, err
}
// WillRenameFiles sends a workspace/willRenameFiles request to the LSP server.
// The will rename files request is sent from the client to the server before files are actually renamed as long as the rename is triggered from within the client. Since 3.16.0
func (c *Client) WillRenameFiles(ctx context.Context, params protocol.RenameFilesParams) (protocol.WorkspaceEdit, error) {
var result protocol.WorkspaceEdit
err := c.Call(ctx, "workspace/willRenameFiles", params, &result)
return result, err
}
// WillDeleteFiles sends a workspace/willDeleteFiles request to the LSP server.
// The did delete files notification is sent from the client to the server when files were deleted from within the client. Since 3.16.0
func (c *Client) WillDeleteFiles(ctx context.Context, params protocol.DeleteFilesParams) (protocol.WorkspaceEdit, error) {
var result protocol.WorkspaceEdit
err := c.Call(ctx, "workspace/willDeleteFiles", params, &result)
return result, err
}
// Moniker sends a textDocument/moniker request to the LSP server.
// A request to get the moniker of a symbol at a given text document position. The request parameter is of type TextDocumentPositionParams. The response is of type Moniker Moniker[] or null.
func (c *Client) Moniker(ctx context.Context, params protocol.MonikerParams) ([]protocol.Moniker, error) {
var result []protocol.Moniker
err := c.Call(ctx, "textDocument/moniker", params, &result)
return result, err
}
// PrepareTypeHierarchy sends a textDocument/prepareTypeHierarchy request to the LSP server.
// A request to result a TypeHierarchyItem in a document at a given position. Can be used as an input to a subtypes or supertypes type hierarchy. Since 3.17.0
func (c *Client) PrepareTypeHierarchy(ctx context.Context, params protocol.TypeHierarchyPrepareParams) ([]protocol.TypeHierarchyItem, error) {
var result []protocol.TypeHierarchyItem
err := c.Call(ctx, "textDocument/prepareTypeHierarchy", params, &result)
return result, err
}
// Supertypes sends a typeHierarchy/supertypes request to the LSP server.
// A request to resolve the supertypes for a given TypeHierarchyItem. Since 3.17.0
func (c *Client) Supertypes(ctx context.Context, params protocol.TypeHierarchySupertypesParams) ([]protocol.TypeHierarchyItem, error) {
var result []protocol.TypeHierarchyItem
err := c.Call(ctx, "typeHierarchy/supertypes", params, &result)
return result, err
}
// Subtypes sends a typeHierarchy/subtypes request to the LSP server.
// A request to resolve the subtypes for a given TypeHierarchyItem. Since 3.17.0
func (c *Client) Subtypes(ctx context.Context, params protocol.TypeHierarchySubtypesParams) ([]protocol.TypeHierarchyItem, error) {
var result []protocol.TypeHierarchyItem
err := c.Call(ctx, "typeHierarchy/subtypes", params, &result)
return result, err
}
// InlineValue sends a textDocument/inlineValue request to the LSP server.
// A request to provide inline values in a document. The request's parameter is of type InlineValueParams, the response is of type InlineValue InlineValue[] or a Thenable that resolves to such. Since 3.17.0
func (c *Client) InlineValue(ctx context.Context, params protocol.InlineValueParams) ([]protocol.InlineValue, error) {
var result []protocol.InlineValue
err := c.Call(ctx, "textDocument/inlineValue", params, &result)
return result, err
}
// InlayHint sends a textDocument/inlayHint request to the LSP server.
// A request to provide inlay hints in a document. The request's parameter is of type InlayHintsParams, the response is of type InlayHint InlayHint[] or a Thenable that resolves to such. Since 3.17.0
func (c *Client) InlayHint(ctx context.Context, params protocol.InlayHintParams) ([]protocol.InlayHint, error) {
var result []protocol.InlayHint
err := c.Call(ctx, "textDocument/inlayHint", params, &result)
return result, err
}
// Resolve sends a inlayHint/resolve request to the LSP server.
// A request to resolve additional properties for an inlay hint. The request's parameter is of type InlayHint, the response is of type InlayHint or a Thenable that resolves to such. Since 3.17.0
func (c *Client) Resolve(ctx context.Context, params protocol.InlayHint) (protocol.InlayHint, error) {
var result protocol.InlayHint
err := c.Call(ctx, "inlayHint/resolve", params, &result)
return result, err
}
// Diagnostic sends a textDocument/diagnostic request to the LSP server.
// The document diagnostic request definition. Since 3.17.0
func (c *Client) Diagnostic(ctx context.Context, params protocol.DocumentDiagnosticParams) (protocol.DocumentDiagnosticReport, error) {
var result protocol.DocumentDiagnosticReport
err := c.Call(ctx, "textDocument/diagnostic", params, &result)
return result, err
}
// DiagnosticWorkspace sends a workspace/diagnostic request to the LSP server.
// The workspace diagnostic request definition. Since 3.17.0
func (c *Client) DiagnosticWorkspace(ctx context.Context, params protocol.WorkspaceDiagnosticParams) (protocol.WorkspaceDiagnosticReport, error) {
var result protocol.WorkspaceDiagnosticReport
err := c.Call(ctx, "workspace/diagnostic", params, &result)
return result, err
}
// InlineCompletion sends a textDocument/inlineCompletion request to the LSP server.
// A request to provide inline completions in a document. The request's parameter is of type InlineCompletionParams, the response is of type InlineCompletion InlineCompletion[] or a Thenable that resolves to such. Since 3.18.0 PROPOSED
func (c *Client) InlineCompletion(ctx context.Context, params protocol.InlineCompletionParams) (protocol.Or_Result_textDocument_inlineCompletion, error) {
var result protocol.Or_Result_textDocument_inlineCompletion
err := c.Call(ctx, "textDocument/inlineCompletion", params, &result)
return result, err
}
// TextDocumentContent sends a workspace/textDocumentContent request to the LSP server.
// The workspace/textDocumentContent request is sent from the client to the server to request the content of a text document. Since 3.18.0 PROPOSED
func (c *Client) TextDocumentContent(ctx context.Context, params protocol.TextDocumentContentParams) (string, error) {
var result string
err := c.Call(ctx, "workspace/textDocumentContent", params, &result)
return result, err
}
// Initialize sends a initialize request to the LSP server.
// The initialize request is sent from the client to the server. It is sent once as the request after starting up the server. The requests parameter is of type InitializeParams the response if of type InitializeResult of a Thenable that resolves to such.
func (c *Client) Initialize(ctx context.Context, params protocol.ParamInitialize) (protocol.InitializeResult, error) {
var result protocol.InitializeResult
err := c.Call(ctx, "initialize", params, &result)
return result, err
}
// Shutdown sends a shutdown request to the LSP server.
// A shutdown request is sent from the client to the server. It is sent once when the client decides to shutdown the server. The only notification that is sent after a shutdown request is the exit event.
func (c *Client) Shutdown(ctx context.Context) error {
return c.Call(ctx, "shutdown", nil, nil)
}
// WillSaveWaitUntil sends a textDocument/willSaveWaitUntil request to the LSP server.
// A document will save request is sent from the client to the server before the document is actually saved. The request can return an array of TextEdits which will be applied to the text document before it is saved. Please note that clients might drop results if computing the text edits took too long or if a server constantly fails on this request. This is done to keep the save fast and reliable.
func (c *Client) WillSaveWaitUntil(ctx context.Context, params protocol.WillSaveTextDocumentParams) ([]protocol.TextEdit, error) {
var result []protocol.TextEdit
err := c.Call(ctx, "textDocument/willSaveWaitUntil", params, &result)
return result, err
}
// Completion sends a textDocument/completion request to the LSP server.
// Request to request completion at a given text document position. The request's parameter is of type TextDocumentPosition the response is of type CompletionItem CompletionItem[] or CompletionList or a Thenable that resolves to such. The request can delay the computation of the CompletionItem.detail detail and CompletionItem.documentation documentation properties to the completionItem/resolve request. However, properties that are needed for the initial sorting and filtering, like sortText, filterText, insertText, and textEdit, must not be changed during resolve.
func (c *Client) Completion(ctx context.Context, params protocol.CompletionParams) (protocol.Or_Result_textDocument_completion, error) {
var result protocol.Or_Result_textDocument_completion
err := c.Call(ctx, "textDocument/completion", params, &result)
return result, err
}
// ResolveCompletionItem sends a completionItem/resolve request to the LSP server.
// Request to resolve additional information for a given completion item.The request's parameter is of type CompletionItem the response is of type CompletionItem or a Thenable that resolves to such.
func (c *Client) ResolveCompletionItem(ctx context.Context, params protocol.CompletionItem) (protocol.CompletionItem, error) {
var result protocol.CompletionItem
err := c.Call(ctx, "completionItem/resolve", params, &result)
return result, err
}
// Hover sends a textDocument/hover request to the LSP server.
// Request to request hover information at a given text document position. The request's parameter is of type TextDocumentPosition the response is of type Hover or a Thenable that resolves to such.
func (c *Client) Hover(ctx context.Context, params protocol.HoverParams) (protocol.Hover, error) {
var result protocol.Hover
err := c.Call(ctx, "textDocument/hover", params, &result)
return result, err
}
// SignatureHelp sends a textDocument/signatureHelp request to the LSP server.
func (c *Client) SignatureHelp(ctx context.Context, params protocol.SignatureHelpParams) (protocol.SignatureHelp, error) {
var result protocol.SignatureHelp
err := c.Call(ctx, "textDocument/signatureHelp", params, &result)
return result, err
}
// Definition sends a textDocument/definition request to the LSP server.
// A request to resolve the definition location of a symbol at a given text document position. The request's parameter is of type TextDocumentPosition the response is of either type Definition or a typed array of DefinitionLink or a Thenable that resolves to such.
func (c *Client) Definition(ctx context.Context, params protocol.DefinitionParams) (protocol.Or_Result_textDocument_definition, error) {
var result protocol.Or_Result_textDocument_definition
err := c.Call(ctx, "textDocument/definition", params, &result)
return result, err
}
// References sends a textDocument/references request to the LSP server.
// A request to resolve project-wide references for the symbol denoted by the given text document position. The request's parameter is of type ReferenceParams the response is of type Location Location[] or a Thenable that resolves to such.
func (c *Client) References(ctx context.Context, params protocol.ReferenceParams) ([]protocol.Location, error) {
var result []protocol.Location
err := c.Call(ctx, "textDocument/references", params, &result)
return result, err
}
// DocumentHighlight sends a textDocument/documentHighlight request to the LSP server.
// Request to resolve a DocumentHighlight for a given text document position. The request's parameter is of type TextDocumentPosition the request response is an array of type DocumentHighlight or a Thenable that resolves to such.
func (c *Client) DocumentHighlight(ctx context.Context, params protocol.DocumentHighlightParams) ([]protocol.DocumentHighlight, error) {
var result []protocol.DocumentHighlight
err := c.Call(ctx, "textDocument/documentHighlight", params, &result)
return result, err
}
// DocumentSymbol sends a textDocument/documentSymbol request to the LSP server.
// A request to list all symbols found in a given text document. The request's parameter is of type TextDocumentIdentifier the response is of type SymbolInformation SymbolInformation[] or a Thenable that resolves to such.
func (c *Client) DocumentSymbol(ctx context.Context, params protocol.DocumentSymbolParams) (protocol.Or_Result_textDocument_documentSymbol, error) {
var result protocol.Or_Result_textDocument_documentSymbol
err := c.Call(ctx, "textDocument/documentSymbol", params, &result)
return result, err
}
// CodeAction sends a textDocument/codeAction request to the LSP server.
// A request to provide commands for the given text document and range.
func (c *Client) CodeAction(ctx context.Context, params protocol.CodeActionParams) ([]protocol.Or_Result_textDocument_codeAction_Item0_Elem, error) {
var result []protocol.Or_Result_textDocument_codeAction_Item0_Elem
err := c.Call(ctx, "textDocument/codeAction", params, &result)
return result, err
}
// ResolveCodeAction sends a codeAction/resolve request to the LSP server.
// Request to resolve additional information for a given code action.The request's parameter is of type CodeAction the response is of type CodeAction or a Thenable that resolves to such.
func (c *Client) ResolveCodeAction(ctx context.Context, params protocol.CodeAction) (protocol.CodeAction, error) {
var result protocol.CodeAction
err := c.Call(ctx, "codeAction/resolve", params, &result)
return result, err
}
// Symbol sends a workspace/symbol request to the LSP server.
// A request to list project-wide symbols matching the query string given by the WorkspaceSymbolParams. The response is of type SymbolInformation SymbolInformation[] or a Thenable that resolves to such. Since 3.17.0 - support for WorkspaceSymbol in the returned data. Clients need to advertise support for WorkspaceSymbols via the client capability workspace.symbol.resolveSupport.
func (c *Client) Symbol(ctx context.Context, params protocol.WorkspaceSymbolParams) (protocol.Or_Result_workspace_symbol, error) {
var result protocol.Or_Result_workspace_symbol
err := c.Call(ctx, "workspace/symbol", params, &result)
return result, err
}
// ResolveWorkspaceSymbol sends a workspaceSymbol/resolve request to the LSP server.
// A request to resolve the range inside the workspace symbol's location. Since 3.17.0
func (c *Client) ResolveWorkspaceSymbol(ctx context.Context, params protocol.WorkspaceSymbol) (protocol.WorkspaceSymbol, error) {
var result protocol.WorkspaceSymbol
err := c.Call(ctx, "workspaceSymbol/resolve", params, &result)
return result, err
}
// CodeLens sends a textDocument/codeLens request to the LSP server.
// A request to provide code lens for the given text document.
func (c *Client) CodeLens(ctx context.Context, params protocol.CodeLensParams) ([]protocol.CodeLens, error) {
var result []protocol.CodeLens
err := c.Call(ctx, "textDocument/codeLens", params, &result)
return result, err
}
// ResolveCodeLens sends a codeLens/resolve request to the LSP server.
// A request to resolve a command for a given code lens.
func (c *Client) ResolveCodeLens(ctx context.Context, params protocol.CodeLens) (protocol.CodeLens, error) {
var result protocol.CodeLens
err := c.Call(ctx, "codeLens/resolve", params, &result)
return result, err
}
// DocumentLink sends a textDocument/documentLink request to the LSP server.
// A request to provide document links
func (c *Client) DocumentLink(ctx context.Context, params protocol.DocumentLinkParams) ([]protocol.DocumentLink, error) {
var result []protocol.DocumentLink
err := c.Call(ctx, "textDocument/documentLink", params, &result)
return result, err
}
// ResolveDocumentLink sends a documentLink/resolve request to the LSP server.
// Request to resolve additional information for a given document link. The request's parameter is of type DocumentLink the response is of type DocumentLink or a Thenable that resolves to such.
func (c *Client) ResolveDocumentLink(ctx context.Context, params protocol.DocumentLink) (protocol.DocumentLink, error) {
var result protocol.DocumentLink
err := c.Call(ctx, "documentLink/resolve", params, &result)
return result, err
}
// Formatting sends a textDocument/formatting request to the LSP server.
// A request to format a whole document.
func (c *Client) Formatting(ctx context.Context, params protocol.DocumentFormattingParams) ([]protocol.TextEdit, error) {
var result []protocol.TextEdit
err := c.Call(ctx, "textDocument/formatting", params, &result)
return result, err
}
// RangeFormatting sends a textDocument/rangeFormatting request to the LSP server.
// A request to format a range in a document.
func (c *Client) RangeFormatting(ctx context.Context, params protocol.DocumentRangeFormattingParams) ([]protocol.TextEdit, error) {
var result []protocol.TextEdit
err := c.Call(ctx, "textDocument/rangeFormatting", params, &result)
return result, err
}
// RangesFormatting sends a textDocument/rangesFormatting request to the LSP server.
// A request to format ranges in a document. Since 3.18.0 PROPOSED
func (c *Client) RangesFormatting(ctx context.Context, params protocol.DocumentRangesFormattingParams) ([]protocol.TextEdit, error) {
var result []protocol.TextEdit
err := c.Call(ctx, "textDocument/rangesFormatting", params, &result)
return result, err
}
// OnTypeFormatting sends a textDocument/onTypeFormatting request to the LSP server.
// A request to format a document on type.
func (c *Client) OnTypeFormatting(ctx context.Context, params protocol.DocumentOnTypeFormattingParams) ([]protocol.TextEdit, error) {
var result []protocol.TextEdit
err := c.Call(ctx, "textDocument/onTypeFormatting", params, &result)
return result, err
}
// Rename sends a textDocument/rename request to the LSP server.
// A request to rename a symbol.
func (c *Client) Rename(ctx context.Context, params protocol.RenameParams) (protocol.WorkspaceEdit, error) {
var result protocol.WorkspaceEdit
err := c.Call(ctx, "textDocument/rename", params, &result)
return result, err
}
// PrepareRename sends a textDocument/prepareRename request to the LSP server.
// A request to test and perform the setup necessary for a rename. Since 3.16 - support for default behavior
func (c *Client) PrepareRename(ctx context.Context, params protocol.PrepareRenameParams) (protocol.PrepareRenameResult, error) {
var result protocol.PrepareRenameResult
err := c.Call(ctx, "textDocument/prepareRename", params, &result)
return result, err
}
// ExecuteCommand sends a workspace/executeCommand request to the LSP server.
// A request send from the client to the server to execute a command. The request might return a workspace edit which the client will apply to the workspace.
func (c *Client) ExecuteCommand(ctx context.Context, params protocol.ExecuteCommandParams) (any, error) {
var result any
err := c.Call(ctx, "workspace/executeCommand", params, &result)
return result, err
}
// DidChangeWorkspaceFolders sends a workspace/didChangeWorkspaceFolders notification to the LSP server.
// The workspace/didChangeWorkspaceFolders notification is sent from the client to the server when the workspace folder configuration changes.
func (c *Client) DidChangeWorkspaceFolders(ctx context.Context, params protocol.DidChangeWorkspaceFoldersParams) error {
return c.Notify(ctx, "workspace/didChangeWorkspaceFolders", params)
}
// WorkDoneProgressCancel sends a window/workDoneProgress/cancel notification to the LSP server.
// The window/workDoneProgress/cancel notification is sent from the client to the server to cancel a progress initiated on the server side.
func (c *Client) WorkDoneProgressCancel(ctx context.Context, params protocol.WorkDoneProgressCancelParams) error {
return c.Notify(ctx, "window/workDoneProgress/cancel", params)
}
// DidCreateFiles sends a workspace/didCreateFiles notification to the LSP server.
// The did create files notification is sent from the client to the server when files were created from within the client. Since 3.16.0
func (c *Client) DidCreateFiles(ctx context.Context, params protocol.CreateFilesParams) error {
return c.Notify(ctx, "workspace/didCreateFiles", params)
}
// DidRenameFiles sends a workspace/didRenameFiles notification to the LSP server.
// The did rename files notification is sent from the client to the server when files were renamed from within the client. Since 3.16.0
func (c *Client) DidRenameFiles(ctx context.Context, params protocol.RenameFilesParams) error {
return c.Notify(ctx, "workspace/didRenameFiles", params)
}
// DidDeleteFiles sends a workspace/didDeleteFiles notification to the LSP server.
// The will delete files request is sent from the client to the server before files are actually deleted as long as the deletion is triggered from within the client. Since 3.16.0
func (c *Client) DidDeleteFiles(ctx context.Context, params protocol.DeleteFilesParams) error {
return c.Notify(ctx, "workspace/didDeleteFiles", params)
}
// DidOpenNotebookDocument sends a notebookDocument/didOpen notification to the LSP server.
// A notification sent when a notebook opens. Since 3.17.0
func (c *Client) DidOpenNotebookDocument(ctx context.Context, params protocol.DidOpenNotebookDocumentParams) error {
return c.Notify(ctx, "notebookDocument/didOpen", params)
}
// DidChangeNotebookDocument sends a notebookDocument/didChange notification to the LSP server.
func (c *Client) DidChangeNotebookDocument(ctx context.Context, params protocol.DidChangeNotebookDocumentParams) error {
return c.Notify(ctx, "notebookDocument/didChange", params)
}
// DidSaveNotebookDocument sends a notebookDocument/didSave notification to the LSP server.
// A notification sent when a notebook document is saved. Since 3.17.0
func (c *Client) DidSaveNotebookDocument(ctx context.Context, params protocol.DidSaveNotebookDocumentParams) error {
return c.Notify(ctx, "notebookDocument/didSave", params)
}
// DidCloseNotebookDocument sends a notebookDocument/didClose notification to the LSP server.
// A notification sent when a notebook closes. Since 3.17.0
func (c *Client) DidCloseNotebookDocument(ctx context.Context, params protocol.DidCloseNotebookDocumentParams) error {
return c.Notify(ctx, "notebookDocument/didClose", params)
}
// Initialized sends a initialized notification to the LSP server.
// The initialized notification is sent from the client to the server after the client is fully initialized and the server is allowed to send requests from the server to the client.
func (c *Client) Initialized(ctx context.Context, params protocol.InitializedParams) error {
return c.Notify(ctx, "initialized", params)
}
// Exit sends a exit notification to the LSP server.
// The exit event is sent from the client to the server to ask the server to exit its process.
func (c *Client) Exit(ctx context.Context) error {
return c.Notify(ctx, "exit", nil)
}
// DidChangeConfiguration sends a workspace/didChangeConfiguration notification to the LSP server.
// The configuration change notification is sent from the client to the server when the client's configuration has changed. The notification contains the changed configuration as defined by the language client.
func (c *Client) DidChangeConfiguration(ctx context.Context, params protocol.DidChangeConfigurationParams) error {
return c.Notify(ctx, "workspace/didChangeConfiguration", params)
}
// DidOpen sends a textDocument/didOpen notification to the LSP server.
// The document open notification is sent from the client to the server to signal newly opened text documents. The document's truth is now managed by the client and the server must not try to read the document's truth using the document's uri. Open in this sense means it is managed by the client. It doesn't necessarily mean that its content is presented in an editor. An open notification must not be sent more than once without a corresponding close notification send before. This means open and close notification must be balanced and the max open count is one.
func (c *Client) DidOpen(ctx context.Context, params protocol.DidOpenTextDocumentParams) error {
return c.Notify(ctx, "textDocument/didOpen", params)
}
// DidChange sends a textDocument/didChange notification to the LSP server.
// The document change notification is sent from the client to the server to signal changes to a text document.
func (c *Client) DidChange(ctx context.Context, params protocol.DidChangeTextDocumentParams) error {
return c.Notify(ctx, "textDocument/didChange", params)
}
// DidClose sends a textDocument/didClose notification to the LSP server.
// The document close notification is sent from the client to the server when the document got closed in the client. The document's truth now exists where the document's uri points to (e.g. if the document's uri is a file uri the truth now exists on disk). As with the open notification the close notification is about managing the document's content. Receiving a close notification doesn't mean that the document was open in an editor before. A close notification requires a previous open notification to be sent.
func (c *Client) DidClose(ctx context.Context, params protocol.DidCloseTextDocumentParams) error {
return c.Notify(ctx, "textDocument/didClose", params)
}
// DidSave sends a textDocument/didSave notification to the LSP server.
// The document save notification is sent from the client to the server when the document got saved in the client.
func (c *Client) DidSave(ctx context.Context, params protocol.DidSaveTextDocumentParams) error {
return c.Notify(ctx, "textDocument/didSave", params)
}
// WillSave sends a textDocument/willSave notification to the LSP server.
// A document will save notification is sent from the client to the server before the document is actually saved.
func (c *Client) WillSave(ctx context.Context, params protocol.WillSaveTextDocumentParams) error {
return c.Notify(ctx, "textDocument/willSave", params)
}
// DidChangeWatchedFiles sends a workspace/didChangeWatchedFiles notification to the LSP server.
// The watched files notification is sent from the client to the server when the client detects changes to file watched by the language client.
func (c *Client) DidChangeWatchedFiles(ctx context.Context, params protocol.DidChangeWatchedFilesParams) error {
return c.Notify(ctx, "workspace/didChangeWatchedFiles", params)
}
// SetTrace sends a $/setTrace notification to the LSP server.
func (c *Client) SetTrace(ctx context.Context, params protocol.SetTraceParams) error {
return c.Notify(ctx, "$/setTrace", params)
}
// Progress sends a $/progress notification to the LSP server.
func (c *Client) Progress(ctx context.Context, params protocol.ProgressParams) error {
return c.Notify(ctx, "$/progress", params)
}

View File

@@ -1,48 +0,0 @@
package lsp
import (
"encoding/json"
)
// Message represents a JSON-RPC 2.0 message
type Message struct {
JSONRPC string `json:"jsonrpc"`
ID int32 `json:"id,omitempty"`
Method string `json:"method,omitempty"`
Params json.RawMessage `json:"params,omitempty"`
Result json.RawMessage `json:"result,omitempty"`
Error *ResponseError `json:"error,omitempty"`
}
// ResponseError represents a JSON-RPC 2.0 error
type ResponseError struct {
Code int `json:"code"`
Message string `json:"message"`
}
func NewRequest(id int32, method string, params any) (*Message, error) {
paramsJSON, err := json.Marshal(params)
if err != nil {
return nil, err
}
return &Message{
JSONRPC: "2.0",
ID: id,
Method: method,
Params: paramsJSON,
}, nil
}
func NewNotification(method string, params any) (*Message, error) {
paramsJSON, err := json.Marshal(params)
if err != nil {
return nil, err
}
return &Message{
JSONRPC: "2.0",
Method: method,
Params: paramsJSON,
}, nil
}

View File

@@ -1,27 +0,0 @@
Copyright 2009 The Go Authors.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Google LLC nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@@ -1,117 +0,0 @@
package protocol
import "fmt"
// TextEditResult is an interface for types that represent workspace symbols
type WorkspaceSymbolResult interface {
GetName() string
GetLocation() Location
isWorkspaceSymbol() // marker method
}
func (ws *WorkspaceSymbol) GetName() string { return ws.Name }
func (ws *WorkspaceSymbol) GetLocation() Location {
switch v := ws.Location.Value.(type) {
case Location:
return v
case LocationUriOnly:
return Location{URI: v.URI}
}
return Location{}
}
func (ws *WorkspaceSymbol) isWorkspaceSymbol() {}
func (si *SymbolInformation) GetName() string { return si.Name }
func (si *SymbolInformation) GetLocation() Location { return si.Location }
func (si *SymbolInformation) isWorkspaceSymbol() {}
// Results converts the Value to a slice of WorkspaceSymbolResult
func (r Or_Result_workspace_symbol) Results() ([]WorkspaceSymbolResult, error) {
if r.Value == nil {
return make([]WorkspaceSymbolResult, 0), nil
}
switch v := r.Value.(type) {
case []WorkspaceSymbol:
results := make([]WorkspaceSymbolResult, len(v))
for i := range v {
results[i] = &v[i]
}
return results, nil
case []SymbolInformation:
results := make([]WorkspaceSymbolResult, len(v))
for i := range v {
results[i] = &v[i]
}
return results, nil
default:
return nil, fmt.Errorf("unknown symbol type: %T", r.Value)
}
}
// TextEditResult is an interface for types that represent document symbols
type DocumentSymbolResult interface {
GetRange() Range
GetName() string
isDocumentSymbol() // marker method
}
func (ds *DocumentSymbol) GetRange() Range { return ds.Range }
func (ds *DocumentSymbol) GetName() string { return ds.Name }
func (ds *DocumentSymbol) isDocumentSymbol() {}
func (si *SymbolInformation) GetRange() Range { return si.Location.Range }
// Note: SymbolInformation already has GetName() implemented above
func (si *SymbolInformation) isDocumentSymbol() {}
// Results converts the Value to a slice of DocumentSymbolResult
func (r Or_Result_textDocument_documentSymbol) Results() ([]DocumentSymbolResult, error) {
if r.Value == nil {
return make([]DocumentSymbolResult, 0), nil
}
switch v := r.Value.(type) {
case []DocumentSymbol:
results := make([]DocumentSymbolResult, len(v))
for i := range v {
results[i] = &v[i]
}
return results, nil
case []SymbolInformation:
results := make([]DocumentSymbolResult, len(v))
for i := range v {
results[i] = &v[i]
}
return results, nil
default:
return nil, fmt.Errorf("unknown document symbol type: %T", v)
}
}
// TextEditResult is an interface for types that can be used as text edits
type TextEditResult interface {
GetRange() Range
GetNewText() string
isTextEdit() // marker method
}
func (te *TextEdit) GetRange() Range { return te.Range }
func (te *TextEdit) GetNewText() string { return te.NewText }
func (te *TextEdit) isTextEdit() {}
// Convert Or_TextDocumentEdit_edits_Elem to TextEdit
func (e Or_TextDocumentEdit_edits_Elem) AsTextEdit() (TextEdit, error) {
if e.Value == nil {
return TextEdit{}, fmt.Errorf("nil text edit")
}
switch v := e.Value.(type) {
case TextEdit:
return v, nil
case AnnotatedTextEdit:
return TextEdit{
Range: v.Range,
NewText: v.NewText,
}, nil
default:
return TextEdit{}, fmt.Errorf("unknown text edit type: %T", e.Value)
}
}

View File

@@ -1,58 +0,0 @@
package protocol
import (
"fmt"
"strings"
)
// PatternInfo is an interface for types that represent glob patterns
type PatternInfo interface {
GetPattern() string
GetBasePath() string
isPattern() // marker method
}
// StringPattern implements PatternInfo for string patterns
type StringPattern struct {
Pattern string
}
func (p StringPattern) GetPattern() string { return p.Pattern }
func (p StringPattern) GetBasePath() string { return "" }
func (p StringPattern) isPattern() {}
// RelativePatternInfo implements PatternInfo for RelativePattern
type RelativePatternInfo struct {
RP RelativePattern
BasePath string
}
func (p RelativePatternInfo) GetPattern() string { return string(p.RP.Pattern) }
func (p RelativePatternInfo) GetBasePath() string { return p.BasePath }
func (p RelativePatternInfo) isPattern() {}
// AsPattern converts GlobPattern to a PatternInfo object
func (g *GlobPattern) AsPattern() (PatternInfo, error) {
if g.Value == nil {
return nil, fmt.Errorf("nil pattern")
}
switch v := g.Value.(type) {
case string:
return StringPattern{Pattern: v}, nil
case RelativePattern:
// Handle BaseURI which could be string or DocumentUri
basePath := ""
switch baseURI := v.BaseURI.Value.(type) {
case string:
basePath = strings.TrimPrefix(baseURI, "file://")
case DocumentUri:
basePath = strings.TrimPrefix(string(baseURI), "file://")
default:
return nil, fmt.Errorf("unknown BaseURI type: %T", v.BaseURI.Value)
}
return RelativePatternInfo{RP: v, BasePath: basePath}, nil
default:
return nil, fmt.Errorf("unknown pattern type: %T", g.Value)
}
}

View File

@@ -1,30 +0,0 @@
package protocol
var TableKindMap = map[SymbolKind]string{
File: "File",
Module: "Module",
Namespace: "Namespace",
Package: "Package",
Class: "Class",
Method: "Method",
Property: "Property",
Field: "Field",
Constructor: "Constructor",
Enum: "Enum",
Interface: "Interface",
Function: "Function",
Variable: "Variable",
Constant: "Constant",
String: "String",
Number: "Number",
Boolean: "Boolean",
Array: "Array",
Object: "Object",
Key: "Key",
Null: "Null",
EnumMember: "EnumMember",
Struct: "Struct",
Event: "Event",
Operator: "Operator",
TypeParameter: "TypeParameter",
}

View File

@@ -1,81 +0,0 @@
// Copyright 2022 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package protocol
import (
"encoding/json"
"fmt"
)
// DocumentChange is a union of various file edit operations.
//
// Exactly one field of this struct is non-nil; see [DocumentChange.Valid].
//
// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#resourceChanges
type DocumentChange struct {
TextDocumentEdit *TextDocumentEdit
CreateFile *CreateFile
RenameFile *RenameFile
DeleteFile *DeleteFile
}
// Valid reports whether the DocumentChange sum-type value is valid,
// that is, exactly one of create, delete, edit, or rename.
func (ch DocumentChange) Valid() bool {
n := 0
if ch.TextDocumentEdit != nil {
n++
}
if ch.CreateFile != nil {
n++
}
if ch.RenameFile != nil {
n++
}
if ch.DeleteFile != nil {
n++
}
return n == 1
}
func (d *DocumentChange) UnmarshalJSON(data []byte) error {
var m map[string]any
if err := json.Unmarshal(data, &m); err != nil {
return err
}
if _, ok := m["textDocument"]; ok {
d.TextDocumentEdit = new(TextDocumentEdit)
return json.Unmarshal(data, d.TextDocumentEdit)
}
// The {Create,Rename,Delete}File types all share a 'kind' field.
kind := m["kind"]
switch kind {
case "create":
d.CreateFile = new(CreateFile)
return json.Unmarshal(data, d.CreateFile)
case "rename":
d.RenameFile = new(RenameFile)
return json.Unmarshal(data, d.RenameFile)
case "delete":
d.DeleteFile = new(DeleteFile)
return json.Unmarshal(data, d.DeleteFile)
}
return fmt.Errorf("DocumentChanges: unexpected kind: %q", kind)
}
func (d *DocumentChange) MarshalJSON() ([]byte, error) {
if d.TextDocumentEdit != nil {
return json.Marshal(d.TextDocumentEdit)
} else if d.CreateFile != nil {
return json.Marshal(d.CreateFile)
} else if d.RenameFile != nil {
return json.Marshal(d.RenameFile)
} else if d.DeleteFile != nil {
return json.Marshal(d.DeleteFile)
}
return nil, fmt.Errorf("empty DocumentChanges union value")
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,218 +0,0 @@
// Copyright 2023 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package protocol
// This file declares URI, DocumentUri, and its methods.
//
// For the LSP definition of these types, see
// https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#uri
import (
"fmt"
"net/url"
"path/filepath"
"strings"
"unicode"
)
// A DocumentUri is the URI of a client editor document.
//
// According to the LSP specification:
//
// Care should be taken to handle encoding in URIs. For
// example, some clients (such as VS Code) may encode colons
// in drive letters while others do not. The URIs below are
// both valid, but clients and servers should be consistent
// with the form they use themselves to ensure the other party
// doesnt interpret them as distinct URIs. Clients and
// servers should not assume that each other are encoding the
// same way (for example a client encoding colons in drive
// letters cannot assume server responses will have encoded
// colons). The same applies to casing of drive letters - one
// party should not assume the other party will return paths
// with drive letters cased the same as it.
//
// file:///c:/project/readme.md
// file:///C%3A/project/readme.md
//
// This is done during JSON unmarshalling;
// see [DocumentUri.UnmarshalText] for details.
type DocumentUri string
// A URI is an arbitrary URL (e.g. https), not necessarily a file.
type URI = string
// UnmarshalText implements decoding of DocumentUri values.
//
// In particular, it implements a systematic correction of various odd
// features of the definition of DocumentUri in the LSP spec that
// appear to be workarounds for bugs in VS Code. For example, it may
// URI-encode the URI itself, so that colon becomes %3A, and it may
// send file://foo.go URIs that have two slashes (not three) and no
// hostname.
//
// We use UnmarshalText, not UnmarshalJSON, because it is called even
// for non-addressable values such as keys and values of map[K]V,
// where there is no pointer of type *K or *V on which to call
// UnmarshalJSON. (See Go issue #28189 for more detail.)
//
// Non-empty DocumentUris are valid "file"-scheme URIs.
// The empty DocumentUri is valid.
func (uri *DocumentUri) UnmarshalText(data []byte) (err error) {
*uri, err = ParseDocumentUri(string(data))
return
}
// Path returns the file path for the given URI.
//
// DocumentUri("").Path() returns the empty string.
//
// Path panics if called on a URI that is not a valid filename.
func (uri DocumentUri) Path() string {
filename, err := filename(uri)
if err != nil {
// e.g. ParseRequestURI failed.
//
// This can only affect DocumentUris created by
// direct string manipulation; all DocumentUris
// received from the client pass through
// ParseRequestURI, which ensures validity.
panic(err)
}
return filepath.FromSlash(filename)
}
// Dir returns the URI for the directory containing the receiver.
func (uri DocumentUri) Dir() DocumentUri {
// This function could be more efficiently implemented by avoiding any call
// to Path(), but at least consolidates URI manipulation.
return URIFromPath(uri.DirPath())
}
// DirPath returns the file path to the directory containing this URI, which
// must be a file URI.
func (uri DocumentUri) DirPath() string {
return filepath.Dir(uri.Path())
}
func filename(uri DocumentUri) (string, error) {
if uri == "" {
return "", nil
}
// This conservative check for the common case
// of a simple non-empty absolute POSIX filename
// avoids the allocation of a net.URL.
if strings.HasPrefix(string(uri), "file:///") {
rest := string(uri)[len("file://"):] // leave one slash
for i := range len(rest) {
b := rest[i]
// Reject these cases:
if b < ' ' || b == 0x7f || // control character
b == '%' || b == '+' || // URI escape
b == ':' || // Windows drive letter
b == '@' || b == '&' || b == '?' { // authority or query
goto slow
}
}
return rest, nil
}
slow:
u, err := url.ParseRequestURI(string(uri))
if err != nil {
return "", err
}
if u.Scheme != fileScheme {
return "", fmt.Errorf("only file URIs are supported, got %q from %q", u.Scheme, uri)
}
// If the URI is a Windows URI, we trim the leading "/" and uppercase
// the drive letter, which will never be case sensitive.
if isWindowsDriveURIPath(u.Path) {
u.Path = strings.ToUpper(string(u.Path[1])) + u.Path[2:]
}
return u.Path, nil
}
// ParseDocumentUri interprets a string as a DocumentUri, applying VS
// Code workarounds; see [DocumentUri.UnmarshalText] for details.
func ParseDocumentUri(s string) (DocumentUri, error) {
if s == "" {
return "", nil
}
if !strings.HasPrefix(s, "file://") {
return "", fmt.Errorf("DocumentUri scheme is not 'file': %s", s)
}
// VS Code sends URLs with only two slashes,
// which are invalid. golang/go#39789.
if !strings.HasPrefix(s, "file:///") {
s = "file:///" + s[len("file://"):]
}
// Even though the input is a URI, it may not be in canonical form. VS Code
// in particular over-escapes :, @, etc. Unescape and re-encode to canonicalize.
path, err := url.PathUnescape(s[len("file://"):])
if err != nil {
return "", err
}
// File URIs from Windows may have lowercase drive letters.
// Since drive letters are guaranteed to be case insensitive,
// we change them to uppercase to remain consistent.
// For example, file:///c:/x/y/z becomes file:///C:/x/y/z.
if isWindowsDriveURIPath(path) {
path = path[:1] + strings.ToUpper(string(path[1])) + path[2:]
}
u := url.URL{Scheme: fileScheme, Path: path}
return DocumentUri(u.String()), nil
}
// URIFromPath returns DocumentUri for the supplied file path.
// Given "", it returns "".
func URIFromPath(path string) DocumentUri {
if path == "" {
return ""
}
if !isWindowsDrivePath(path) {
if abs, err := filepath.Abs(path); err == nil {
path = abs
}
}
// Check the file path again, in case it became absolute.
if isWindowsDrivePath(path) {
path = "/" + strings.ToUpper(string(path[0])) + path[1:]
}
path = filepath.ToSlash(path)
u := url.URL{
Scheme: fileScheme,
Path: path,
}
return DocumentUri(u.String())
}
const fileScheme = "file"
// isWindowsDrivePath returns true if the file path is of the form used by
// Windows. We check if the path begins with a drive letter, followed by a ":".
// For example: C:/x/y/z.
func isWindowsDrivePath(path string) bool {
if len(path) < 3 {
return false
}
return unicode.IsLetter(rune(path[0])) && path[1] == ':'
}
// isWindowsDriveURIPath returns true if the file URI is of the format used by
// Windows URIs. The url.Parse package does not specially handle Windows paths
// (see golang/go#6027), so we check if the URI path has a drive prefix (e.g. "/C:").
func isWindowsDriveURIPath(uri string) bool {
if len(uri) < 4 {
return false
}
return uri[0] == '/' && unicode.IsLetter(rune(uri[1])) && uri[2] == ':'
}

View File

@@ -1,272 +0,0 @@
package lsp
import (
"bufio"
"context"
"encoding/json"
"fmt"
"io"
"strings"
"github.com/kujtimiihoxha/opencode/internal/config"
"github.com/kujtimiihoxha/opencode/internal/logging"
)
// Write writes an LSP message to the given writer
func WriteMessage(w io.Writer, msg *Message) error {
data, err := json.Marshal(msg)
if err != nil {
return fmt.Errorf("failed to marshal message: %w", err)
}
cnf := config.Get()
if cnf.DebugLSP {
logging.Debug("Sending message to server", "method", msg.Method, "id", msg.ID)
}
_, err = fmt.Fprintf(w, "Content-Length: %d\r\n\r\n", len(data))
if err != nil {
return fmt.Errorf("failed to write header: %w", err)
}
_, err = w.Write(data)
if err != nil {
return fmt.Errorf("failed to write message: %w", err)
}
return nil
}
// ReadMessage reads a single LSP message from the given reader
func ReadMessage(r *bufio.Reader) (*Message, error) {
cnf := config.Get()
// Read headers
var contentLength int
for {
line, err := r.ReadString('\n')
if err != nil {
return nil, fmt.Errorf("failed to read header: %w", err)
}
line = strings.TrimSpace(line)
if cnf.DebugLSP {
logging.Debug("Received header", "line", line)
}
if line == "" {
break // End of headers
}
if strings.HasPrefix(line, "Content-Length: ") {
_, err := fmt.Sscanf(line, "Content-Length: %d", &contentLength)
if err != nil {
return nil, fmt.Errorf("invalid Content-Length: %w", err)
}
}
}
if cnf.DebugLSP {
logging.Debug("Content-Length", "length", contentLength)
}
// Read content
content := make([]byte, contentLength)
_, err := io.ReadFull(r, content)
if err != nil {
return nil, fmt.Errorf("failed to read content: %w", err)
}
if cnf.DebugLSP {
logging.Debug("Received content", "content", string(content))
}
// Parse message
var msg Message
if err := json.Unmarshal(content, &msg); err != nil {
return nil, fmt.Errorf("failed to unmarshal message: %w", err)
}
return &msg, nil
}
// handleMessages reads and dispatches messages in a loop
func (c *Client) handleMessages() {
cnf := config.Get()
for {
msg, err := ReadMessage(c.stdout)
if err != nil {
if cnf.DebugLSP {
logging.Error("Error reading message", "error", err)
}
return
}
// Handle server->client request (has both Method and ID)
if msg.Method != "" && msg.ID != 0 {
if cnf.DebugLSP {
logging.Debug("Received request from server", "method", msg.Method, "id", msg.ID)
}
response := &Message{
JSONRPC: "2.0",
ID: msg.ID,
}
// Look up handler for this method
c.serverHandlersMu.RLock()
handler, ok := c.serverRequestHandlers[msg.Method]
c.serverHandlersMu.RUnlock()
if ok {
result, err := handler(msg.Params)
if err != nil {
response.Error = &ResponseError{
Code: -32603,
Message: err.Error(),
}
} else {
rawJSON, err := json.Marshal(result)
if err != nil {
response.Error = &ResponseError{
Code: -32603,
Message: fmt.Sprintf("failed to marshal response: %v", err),
}
} else {
response.Result = rawJSON
}
}
} else {
response.Error = &ResponseError{
Code: -32601,
Message: fmt.Sprintf("method not found: %s", msg.Method),
}
}
// Send response back to server
if err := WriteMessage(c.stdin, response); err != nil {
logging.Error("Error sending response to server", "error", err)
}
continue
}
// Handle notification (has Method but no ID)
if msg.Method != "" && msg.ID == 0 {
c.notificationMu.RLock()
handler, ok := c.notificationHandlers[msg.Method]
c.notificationMu.RUnlock()
if ok {
if cnf.DebugLSP {
logging.Debug("Handling notification", "method", msg.Method)
}
go handler(msg.Params)
} else if cnf.DebugLSP {
logging.Debug("No handler for notification", "method", msg.Method)
}
continue
}
// Handle response to our request (has ID but no Method)
if msg.ID != 0 && msg.Method == "" {
c.handlersMu.RLock()
ch, ok := c.handlers[msg.ID]
c.handlersMu.RUnlock()
if ok {
if cnf.DebugLSP {
logging.Debug("Received response for request", "id", msg.ID)
}
ch <- msg
close(ch)
} else if cnf.DebugLSP {
logging.Debug("No handler for response", "id", msg.ID)
}
}
}
}
// Call makes a request and waits for the response
func (c *Client) Call(ctx context.Context, method string, params any, result any) error {
cnf := config.Get()
id := c.nextID.Add(1)
if cnf.DebugLSP {
logging.Debug("Making call", "method", method, "id", id)
}
msg, err := NewRequest(id, method, params)
if err != nil {
return fmt.Errorf("failed to create request: %w", err)
}
// Create response channel
ch := make(chan *Message, 1)
c.handlersMu.Lock()
c.handlers[id] = ch
c.handlersMu.Unlock()
defer func() {
c.handlersMu.Lock()
delete(c.handlers, id)
c.handlersMu.Unlock()
}()
// Send request
if err := WriteMessage(c.stdin, msg); err != nil {
return fmt.Errorf("failed to send request: %w", err)
}
if cnf.DebugLSP {
logging.Debug("Request sent", "method", method, "id", id)
}
// Wait for response
resp := <-ch
if cnf.DebugLSP {
logging.Debug("Received response", "id", id)
}
if resp.Error != nil {
return fmt.Errorf("request failed: %s (code: %d)", resp.Error.Message, resp.Error.Code)
}
if result != nil {
// If result is a json.RawMessage, just copy the raw bytes
if rawMsg, ok := result.(*json.RawMessage); ok {
*rawMsg = resp.Result
return nil
}
// Otherwise unmarshal into the provided type
if err := json.Unmarshal(resp.Result, result); err != nil {
return fmt.Errorf("failed to unmarshal result: %w", err)
}
}
return nil
}
// Notify sends a notification (a request without an ID that doesn't expect a response)
func (c *Client) Notify(ctx context.Context, method string, params any) error {
cnf := config.Get()
if cnf.DebugLSP {
logging.Debug("Sending notification", "method", method)
}
msg, err := NewNotification(method, params)
if err != nil {
return fmt.Errorf("failed to create notification: %w", err)
}
if err := WriteMessage(c.stdin, msg); err != nil {
return fmt.Errorf("failed to send notification: %w", err)
}
return nil
}
type (
NotificationHandler func(params json.RawMessage)
ServerRequestHandler func(params json.RawMessage) (any, error)
)

View File

@@ -1,239 +0,0 @@
package util
import (
"bytes"
"fmt"
"os"
"sort"
"strings"
"github.com/kujtimiihoxha/opencode/internal/lsp/protocol"
)
func applyTextEdits(uri protocol.DocumentUri, edits []protocol.TextEdit) error {
path := strings.TrimPrefix(string(uri), "file://")
// Read the file content
content, err := os.ReadFile(path)
if err != nil {
return fmt.Errorf("failed to read file: %w", err)
}
// Detect line ending style
var lineEnding string
if bytes.Contains(content, []byte("\r\n")) {
lineEnding = "\r\n"
} else {
lineEnding = "\n"
}
// Track if file ends with a newline
endsWithNewline := len(content) > 0 && bytes.HasSuffix(content, []byte(lineEnding))
// Split into lines without the endings
lines := strings.Split(string(content), lineEnding)
// Check for overlapping edits
for i, edit1 := range edits {
for j := i + 1; j < len(edits); j++ {
if rangesOverlap(edit1.Range, edits[j].Range) {
return fmt.Errorf("overlapping edits detected between edit %d and %d", i, j)
}
}
}
// Sort edits in reverse order
sortedEdits := make([]protocol.TextEdit, len(edits))
copy(sortedEdits, edits)
sort.Slice(sortedEdits, func(i, j int) bool {
if sortedEdits[i].Range.Start.Line != sortedEdits[j].Range.Start.Line {
return sortedEdits[i].Range.Start.Line > sortedEdits[j].Range.Start.Line
}
return sortedEdits[i].Range.Start.Character > sortedEdits[j].Range.Start.Character
})
// Apply each edit
for _, edit := range sortedEdits {
newLines, err := applyTextEdit(lines, edit)
if err != nil {
return fmt.Errorf("failed to apply edit: %w", err)
}
lines = newLines
}
// Join lines with proper line endings
var newContent strings.Builder
for i, line := range lines {
if i > 0 {
newContent.WriteString(lineEnding)
}
newContent.WriteString(line)
}
// Only add a newline if the original file had one and we haven't already added it
if endsWithNewline && !strings.HasSuffix(newContent.String(), lineEnding) {
newContent.WriteString(lineEnding)
}
if err := os.WriteFile(path, []byte(newContent.String()), 0o644); err != nil {
return fmt.Errorf("failed to write file: %w", err)
}
return nil
}
func applyTextEdit(lines []string, edit protocol.TextEdit) ([]string, error) {
startLine := int(edit.Range.Start.Line)
endLine := int(edit.Range.End.Line)
startChar := int(edit.Range.Start.Character)
endChar := int(edit.Range.End.Character)
// Validate positions
if startLine < 0 || startLine >= len(lines) {
return nil, fmt.Errorf("invalid start line: %d", startLine)
}
if endLine < 0 || endLine >= len(lines) {
endLine = len(lines) - 1
}
// Create result slice with initial capacity
result := make([]string, 0, len(lines))
// Copy lines before edit
result = append(result, lines[:startLine]...)
// Get the prefix of the start line
startLineContent := lines[startLine]
if startChar < 0 || startChar > len(startLineContent) {
startChar = len(startLineContent)
}
prefix := startLineContent[:startChar]
// Get the suffix of the end line
endLineContent := lines[endLine]
if endChar < 0 || endChar > len(endLineContent) {
endChar = len(endLineContent)
}
suffix := endLineContent[endChar:]
// Handle the edit
if edit.NewText == "" {
if prefix+suffix != "" {
result = append(result, prefix+suffix)
}
} else {
// Split new text into lines, being careful not to add extra newlines
// newLines := strings.Split(strings.TrimRight(edit.NewText, "\n"), "\n")
newLines := strings.Split(edit.NewText, "\n")
if len(newLines) == 1 {
// Single line change
result = append(result, prefix+newLines[0]+suffix)
} else {
// Multi-line change
result = append(result, prefix+newLines[0])
result = append(result, newLines[1:len(newLines)-1]...)
result = append(result, newLines[len(newLines)-1]+suffix)
}
}
// Add remaining lines
if endLine+1 < len(lines) {
result = append(result, lines[endLine+1:]...)
}
return result, nil
}
// applyDocumentChange applies a DocumentChange (create/rename/delete operations)
func applyDocumentChange(change protocol.DocumentChange) error {
if change.CreateFile != nil {
path := strings.TrimPrefix(string(change.CreateFile.URI), "file://")
if change.CreateFile.Options != nil {
if change.CreateFile.Options.Overwrite {
// Proceed with overwrite
} else if change.CreateFile.Options.IgnoreIfExists {
if _, err := os.Stat(path); err == nil {
return nil // File exists and we're ignoring it
}
}
}
if err := os.WriteFile(path, []byte(""), 0o644); err != nil {
return fmt.Errorf("failed to create file: %w", err)
}
}
if change.DeleteFile != nil {
path := strings.TrimPrefix(string(change.DeleteFile.URI), "file://")
if change.DeleteFile.Options != nil && change.DeleteFile.Options.Recursive {
if err := os.RemoveAll(path); err != nil {
return fmt.Errorf("failed to delete directory recursively: %w", err)
}
} else {
if err := os.Remove(path); err != nil {
return fmt.Errorf("failed to delete file: %w", err)
}
}
}
if change.RenameFile != nil {
oldPath := strings.TrimPrefix(string(change.RenameFile.OldURI), "file://")
newPath := strings.TrimPrefix(string(change.RenameFile.NewURI), "file://")
if change.RenameFile.Options != nil {
if !change.RenameFile.Options.Overwrite {
if _, err := os.Stat(newPath); err == nil {
return fmt.Errorf("target file already exists and overwrite is not allowed: %s", newPath)
}
}
}
if err := os.Rename(oldPath, newPath); err != nil {
return fmt.Errorf("failed to rename file: %w", err)
}
}
if change.TextDocumentEdit != nil {
textEdits := make([]protocol.TextEdit, len(change.TextDocumentEdit.Edits))
for i, edit := range change.TextDocumentEdit.Edits {
var err error
textEdits[i], err = edit.AsTextEdit()
if err != nil {
return fmt.Errorf("invalid edit type: %w", err)
}
}
return applyTextEdits(change.TextDocumentEdit.TextDocument.URI, textEdits)
}
return nil
}
// ApplyWorkspaceEdit applies the given WorkspaceEdit to the filesystem
func ApplyWorkspaceEdit(edit protocol.WorkspaceEdit) error {
// Handle Changes field
for uri, textEdits := range edit.Changes {
if err := applyTextEdits(uri, textEdits); err != nil {
return fmt.Errorf("failed to apply text edits: %w", err)
}
}
// Handle DocumentChanges field
for _, change := range edit.DocumentChanges {
if err := applyDocumentChange(change); err != nil {
return fmt.Errorf("failed to apply document change: %w", err)
}
}
return nil
}
func rangesOverlap(r1, r2 protocol.Range) bool {
if r1.Start.Line > r2.End.Line || r2.Start.Line > r1.End.Line {
return false
}
if r1.Start.Line == r2.End.Line && r1.Start.Character > r2.End.Character {
return false
}
if r2.Start.Line == r1.End.Line && r2.Start.Character > r1.End.Character {
return false
}
return true
}

View File

@@ -1,981 +0,0 @@
package watcher
import (
"context"
"fmt"
"os"
"path/filepath"
"strings"
"sync"
"time"
"github.com/bmatcuk/doublestar/v4"
"github.com/fsnotify/fsnotify"
"github.com/kujtimiihoxha/opencode/internal/config"
"github.com/kujtimiihoxha/opencode/internal/logging"
"github.com/kujtimiihoxha/opencode/internal/lsp"
"github.com/kujtimiihoxha/opencode/internal/lsp/protocol"
)
// WorkspaceWatcher manages LSP file watching
type WorkspaceWatcher struct {
client *lsp.Client
workspacePath string
debounceTime time.Duration
debounceMap map[string]*time.Timer
debounceMu sync.Mutex
// File watchers registered by the server
registrations []protocol.FileSystemWatcher
registrationMu sync.RWMutex
}
// NewWorkspaceWatcher creates a new workspace watcher
func NewWorkspaceWatcher(client *lsp.Client) *WorkspaceWatcher {
return &WorkspaceWatcher{
client: client,
debounceTime: 300 * time.Millisecond,
debounceMap: make(map[string]*time.Timer),
registrations: []protocol.FileSystemWatcher{},
}
}
// AddRegistrations adds file watchers to track
func (w *WorkspaceWatcher) AddRegistrations(ctx context.Context, id string, watchers []protocol.FileSystemWatcher) {
cnf := config.Get()
logging.Debug("Adding file watcher registrations")
w.registrationMu.Lock()
defer w.registrationMu.Unlock()
// Add new watchers
w.registrations = append(w.registrations, watchers...)
// Print detailed registration information for debugging
if cnf.DebugLSP {
logging.Debug("Adding file watcher registrations",
"id", id,
"watchers", len(watchers),
"total", len(w.registrations),
)
for i, watcher := range watchers {
logging.Debug("Registration", "index", i+1)
// Log the GlobPattern
switch v := watcher.GlobPattern.Value.(type) {
case string:
logging.Debug("GlobPattern", "pattern", v)
case protocol.RelativePattern:
logging.Debug("GlobPattern", "pattern", v.Pattern)
// Log BaseURI details
switch u := v.BaseURI.Value.(type) {
case string:
logging.Debug("BaseURI", "baseURI", u)
case protocol.DocumentUri:
logging.Debug("BaseURI", "baseURI", u)
default:
logging.Debug("BaseURI", "baseURI", u)
}
default:
logging.Debug("GlobPattern", "unknown type", fmt.Sprintf("%T", v))
}
// Log WatchKind
watchKind := protocol.WatchKind(protocol.WatchChange | protocol.WatchCreate | protocol.WatchDelete)
if watcher.Kind != nil {
watchKind = *watcher.Kind
}
logging.Debug("WatchKind", "kind", watchKind)
}
}
// Determine server type for specialized handling
serverName := getServerNameFromContext(ctx)
logging.Debug("Server type detected", "serverName", serverName)
// Check if this server has sent file watchers
hasFileWatchers := len(watchers) > 0
// For servers that need file preloading, we'll use a smart approach
if shouldPreloadFiles(serverName) || !hasFileWatchers {
go func() {
startTime := time.Now()
filesOpened := 0
// Determine max files to open based on server type
maxFilesToOpen := 50 // Default conservative limit
switch serverName {
case "typescript", "typescript-language-server", "tsserver", "vtsls":
// TypeScript servers benefit from seeing more files
maxFilesToOpen = 100
case "java", "jdtls":
// Java servers need to see many files for project model
maxFilesToOpen = 200
}
// First, open high-priority files
highPriorityFilesOpened := w.openHighPriorityFiles(ctx, serverName)
filesOpened += highPriorityFilesOpened
if cnf.DebugLSP {
logging.Debug("Opened high-priority files",
"count", highPriorityFilesOpened,
"serverName", serverName)
}
// If we've already opened enough high-priority files, we might not need more
if filesOpened >= maxFilesToOpen {
if cnf.DebugLSP {
logging.Debug("Reached file limit with high-priority files",
"filesOpened", filesOpened,
"maxFiles", maxFilesToOpen)
}
return
}
// For the remaining slots, walk the directory and open matching files
err := filepath.WalkDir(w.workspacePath, func(path string, d os.DirEntry, err error) error {
if err != nil {
return err
}
// Skip directories that should be excluded
if d.IsDir() {
if path != w.workspacePath && shouldExcludeDir(path) {
if cnf.DebugLSP {
logging.Debug("Skipping excluded directory", "path", path)
}
return filepath.SkipDir
}
} else {
// Process files, but limit the total number
if filesOpened < maxFilesToOpen {
// Only process if it's not already open (high-priority files were opened earlier)
if !w.client.IsFileOpen(path) {
w.openMatchingFile(ctx, path)
filesOpened++
// Add a small delay after every 10 files to prevent overwhelming the server
if filesOpened%10 == 0 {
time.Sleep(50 * time.Millisecond)
}
}
} else {
// We've reached our limit, stop walking
return filepath.SkipAll
}
}
return nil
})
elapsedTime := time.Since(startTime)
if cnf.DebugLSP {
logging.Debug("Limited workspace scan complete",
"filesOpened", filesOpened,
"maxFiles", maxFilesToOpen,
"elapsedTime", elapsedTime.Seconds(),
"workspacePath", w.workspacePath,
)
}
if err != nil && cnf.DebugLSP {
logging.Debug("Error scanning workspace for files to open", "error", err)
}
}()
} else if cnf.DebugLSP {
logging.Debug("Using on-demand file loading for server", "server", serverName)
}
}
// openHighPriorityFiles opens important files for the server type
// Returns the number of files opened
func (w *WorkspaceWatcher) openHighPriorityFiles(ctx context.Context, serverName string) int {
cnf := config.Get()
filesOpened := 0
// Define patterns for high-priority files based on server type
var patterns []string
switch serverName {
case "typescript", "typescript-language-server", "tsserver", "vtsls":
patterns = []string{
"**/tsconfig.json",
"**/package.json",
"**/jsconfig.json",
"**/index.ts",
"**/index.js",
"**/main.ts",
"**/main.js",
}
case "gopls":
patterns = []string{
"**/go.mod",
"**/go.sum",
"**/main.go",
}
case "rust-analyzer":
patterns = []string{
"**/Cargo.toml",
"**/Cargo.lock",
"**/src/lib.rs",
"**/src/main.rs",
}
case "python", "pyright", "pylsp":
patterns = []string{
"**/pyproject.toml",
"**/setup.py",
"**/requirements.txt",
"**/__init__.py",
"**/__main__.py",
}
case "clangd":
patterns = []string{
"**/CMakeLists.txt",
"**/Makefile",
"**/compile_commands.json",
}
case "java", "jdtls":
patterns = []string{
"**/pom.xml",
"**/build.gradle",
"**/src/main/java/**/*.java",
}
default:
// For unknown servers, use common configuration files
patterns = []string{
"**/package.json",
"**/Makefile",
"**/CMakeLists.txt",
"**/.editorconfig",
}
}
// For each pattern, find and open matching files
for _, pattern := range patterns {
// Use doublestar.Glob to find files matching the pattern (supports ** patterns)
matches, err := doublestar.Glob(os.DirFS(w.workspacePath), pattern)
if err != nil {
if cnf.DebugLSP {
logging.Debug("Error finding high-priority files", "pattern", pattern, "error", err)
}
continue
}
for _, match := range matches {
// Convert relative path to absolute
fullPath := filepath.Join(w.workspacePath, match)
// Skip directories and excluded files
info, err := os.Stat(fullPath)
if err != nil || info.IsDir() || shouldExcludeFile(fullPath) {
continue
}
// Open the file
if err := w.client.OpenFile(ctx, fullPath); err != nil {
if cnf.DebugLSP {
logging.Debug("Error opening high-priority file", "path", fullPath, "error", err)
}
} else {
filesOpened++
if cnf.DebugLSP {
logging.Debug("Opened high-priority file", "path", fullPath)
}
}
// Add a small delay to prevent overwhelming the server
time.Sleep(20 * time.Millisecond)
// Limit the number of files opened per pattern
if filesOpened >= 5 && (serverName != "java" && serverName != "jdtls") {
break
}
}
}
return filesOpened
}
// WatchWorkspace sets up file watching for a workspace
func (w *WorkspaceWatcher) WatchWorkspace(ctx context.Context, workspacePath string) {
cnf := config.Get()
w.workspacePath = workspacePath
// Store the watcher in the context for later use
ctx = context.WithValue(ctx, "workspaceWatcher", w)
// If the server name isn't already in the context, try to detect it
if _, ok := ctx.Value("serverName").(string); !ok {
serverName := getServerNameFromContext(ctx)
ctx = context.WithValue(ctx, "serverName", serverName)
}
serverName := getServerNameFromContext(ctx)
logging.Debug("Starting workspace watcher", "workspacePath", workspacePath, "serverName", serverName)
// Register handler for file watcher registrations from the server
lsp.RegisterFileWatchHandler(func(id string, watchers []protocol.FileSystemWatcher) {
w.AddRegistrations(ctx, id, watchers)
})
watcher, err := fsnotify.NewWatcher()
if err != nil {
logging.Error("Error creating watcher", "error", err)
}
defer watcher.Close()
// Watch the workspace recursively
err = filepath.WalkDir(workspacePath, func(path string, d os.DirEntry, err error) error {
if err != nil {
return err
}
// Skip excluded directories (except workspace root)
if d.IsDir() && path != workspacePath {
if shouldExcludeDir(path) {
if cnf.DebugLSP {
logging.Debug("Skipping excluded directory", "path", path)
}
return filepath.SkipDir
}
}
// Add directories to watcher
if d.IsDir() {
err = watcher.Add(path)
if err != nil {
logging.Error("Error watching path", "path", path, "error", err)
}
}
return nil
})
if err != nil {
logging.Error("Error walking workspace", "error", err)
}
// Event loop
for {
select {
case <-ctx.Done():
return
case event, ok := <-watcher.Events:
if !ok {
return
}
uri := fmt.Sprintf("file://%s", event.Name)
// Add new directories to the watcher
if event.Op&fsnotify.Create != 0 {
if info, err := os.Stat(event.Name); err == nil {
if info.IsDir() {
// Skip excluded directories
if !shouldExcludeDir(event.Name) {
if err := watcher.Add(event.Name); err != nil {
logging.Error("Error adding directory to watcher", "path", event.Name, "error", err)
}
}
} else {
// For newly created files
if !shouldExcludeFile(event.Name) {
w.openMatchingFile(ctx, event.Name)
}
}
}
}
// Debug logging
if cnf.DebugLSP {
matched, kind := w.isPathWatched(event.Name)
logging.Debug("File event",
"path", event.Name,
"operation", event.Op.String(),
"watched", matched,
"kind", kind,
)
}
// Check if this path should be watched according to server registrations
if watched, watchKind := w.isPathWatched(event.Name); watched {
switch {
case event.Op&fsnotify.Write != 0:
if watchKind&protocol.WatchChange != 0 {
w.debounceHandleFileEvent(ctx, uri, protocol.FileChangeType(protocol.Changed))
}
case event.Op&fsnotify.Create != 0:
// Already handled earlier in the event loop
// Just send the notification if needed
info, _ := os.Stat(event.Name)
if !info.IsDir() && watchKind&protocol.WatchCreate != 0 {
w.debounceHandleFileEvent(ctx, uri, protocol.FileChangeType(protocol.Created))
}
case event.Op&fsnotify.Remove != 0:
if watchKind&protocol.WatchDelete != 0 {
w.handleFileEvent(ctx, uri, protocol.FileChangeType(protocol.Deleted))
}
case event.Op&fsnotify.Rename != 0:
// For renames, first delete
if watchKind&protocol.WatchDelete != 0 {
w.handleFileEvent(ctx, uri, protocol.FileChangeType(protocol.Deleted))
}
// Then check if the new file exists and create an event
if info, err := os.Stat(event.Name); err == nil && !info.IsDir() {
if watchKind&protocol.WatchCreate != 0 {
w.debounceHandleFileEvent(ctx, uri, protocol.FileChangeType(protocol.Created))
}
}
}
}
case err, ok := <-watcher.Errors:
if !ok {
return
}
logging.Error("Error watching file", "error", err)
}
}
}
// isPathWatched checks if a path should be watched based on server registrations
func (w *WorkspaceWatcher) isPathWatched(path string) (bool, protocol.WatchKind) {
w.registrationMu.RLock()
defer w.registrationMu.RUnlock()
// If no explicit registrations, watch everything
if len(w.registrations) == 0 {
return true, protocol.WatchKind(protocol.WatchChange | protocol.WatchCreate | protocol.WatchDelete)
}
// Check each registration
for _, reg := range w.registrations {
isMatch := w.matchesPattern(path, reg.GlobPattern)
if isMatch {
kind := protocol.WatchKind(protocol.WatchChange | protocol.WatchCreate | protocol.WatchDelete)
if reg.Kind != nil {
kind = *reg.Kind
}
return true, kind
}
}
return false, 0
}
// matchesGlob handles advanced glob patterns including ** and alternatives
func matchesGlob(pattern, path string) bool {
// Handle file extension patterns with braces like *.{go,mod,sum}
if strings.Contains(pattern, "{") && strings.Contains(pattern, "}") {
// Extract extensions from pattern like "*.{go,mod,sum}"
parts := strings.SplitN(pattern, "{", 2)
if len(parts) == 2 {
prefix := parts[0]
extPart := strings.SplitN(parts[1], "}", 2)
if len(extPart) == 2 {
extensions := strings.Split(extPart[0], ",")
suffix := extPart[1]
// Check if the path matches any of the extensions
for _, ext := range extensions {
extPattern := prefix + ext + suffix
isMatch := matchesSimpleGlob(extPattern, path)
if isMatch {
return true
}
}
return false
}
}
}
return matchesSimpleGlob(pattern, path)
}
// matchesSimpleGlob handles glob patterns with ** wildcards
func matchesSimpleGlob(pattern, path string) bool {
// Handle special case for **/*.ext pattern (common in LSP)
if strings.HasPrefix(pattern, "**/") {
rest := strings.TrimPrefix(pattern, "**/")
// If the rest is a simple file extension pattern like *.go
if strings.HasPrefix(rest, "*.") {
ext := strings.TrimPrefix(rest, "*")
isMatch := strings.HasSuffix(path, ext)
return isMatch
}
// Otherwise, try to check if the path ends with the rest part
isMatch := strings.HasSuffix(path, rest)
// If it matches directly, great!
if isMatch {
return true
}
// Otherwise, check if any path component matches
pathComponents := strings.Split(path, "/")
for i := range pathComponents {
subPath := strings.Join(pathComponents[i:], "/")
if strings.HasSuffix(subPath, rest) {
return true
}
}
return false
}
// Handle other ** wildcard pattern cases
if strings.Contains(pattern, "**") {
parts := strings.Split(pattern, "**")
// Validate the path starts with the first part
if !strings.HasPrefix(path, parts[0]) && parts[0] != "" {
return false
}
// For patterns like "**/*.go", just check the suffix
if len(parts) == 2 && parts[0] == "" {
isMatch := strings.HasSuffix(path, parts[1])
return isMatch
}
// For other patterns, handle middle part
remaining := strings.TrimPrefix(path, parts[0])
if len(parts) == 2 {
isMatch := strings.HasSuffix(remaining, parts[1])
return isMatch
}
}
// Handle simple * wildcard for file extension patterns (*.go, *.sum, etc)
if strings.HasPrefix(pattern, "*.") {
ext := strings.TrimPrefix(pattern, "*")
isMatch := strings.HasSuffix(path, ext)
return isMatch
}
// Fall back to simple matching for simpler patterns
matched, err := filepath.Match(pattern, path)
if err != nil {
logging.Error("Error matching pattern", "pattern", pattern, "path", path, "error", err)
return false
}
return matched
}
// matchesPattern checks if a path matches the glob pattern
func (w *WorkspaceWatcher) matchesPattern(path string, pattern protocol.GlobPattern) bool {
patternInfo, err := pattern.AsPattern()
if err != nil {
logging.Error("Error parsing pattern", "pattern", pattern, "error", err)
return false
}
basePath := patternInfo.GetBasePath()
patternText := patternInfo.GetPattern()
path = filepath.ToSlash(path)
// For simple patterns without base path
if basePath == "" {
// Check if the pattern matches the full path or just the file extension
fullPathMatch := matchesGlob(patternText, path)
baseNameMatch := matchesGlob(patternText, filepath.Base(path))
return fullPathMatch || baseNameMatch
}
// For relative patterns
basePath = strings.TrimPrefix(basePath, "file://")
basePath = filepath.ToSlash(basePath)
// Make path relative to basePath for matching
relPath, err := filepath.Rel(basePath, path)
if err != nil {
logging.Error("Error getting relative path", "path", path, "basePath", basePath, "error", err)
return false
}
relPath = filepath.ToSlash(relPath)
isMatch := matchesGlob(patternText, relPath)
return isMatch
}
// debounceHandleFileEvent handles file events with debouncing to reduce notifications
func (w *WorkspaceWatcher) debounceHandleFileEvent(ctx context.Context, uri string, changeType protocol.FileChangeType) {
w.debounceMu.Lock()
defer w.debounceMu.Unlock()
// Create a unique key based on URI and change type
key := fmt.Sprintf("%s:%d", uri, changeType)
// Cancel existing timer if any
if timer, exists := w.debounceMap[key]; exists {
timer.Stop()
}
// Create new timer
w.debounceMap[key] = time.AfterFunc(w.debounceTime, func() {
w.handleFileEvent(ctx, uri, changeType)
// Cleanup timer after execution
w.debounceMu.Lock()
delete(w.debounceMap, key)
w.debounceMu.Unlock()
})
}
// handleFileEvent sends file change notifications
func (w *WorkspaceWatcher) handleFileEvent(ctx context.Context, uri string, changeType protocol.FileChangeType) {
// If the file is open and it's a change event, use didChange notification
filePath := uri[7:] // Remove "file://" prefix
if changeType == protocol.FileChangeType(protocol.Changed) && w.client.IsFileOpen(filePath) {
err := w.client.NotifyChange(ctx, filePath)
if err != nil {
logging.Error("Error notifying change", "error", err)
}
return
}
// Notify LSP server about the file event using didChangeWatchedFiles
if err := w.notifyFileEvent(ctx, uri, changeType); err != nil {
logging.Error("Error notifying LSP server about file event", "error", err)
}
}
// notifyFileEvent sends a didChangeWatchedFiles notification for a file event
func (w *WorkspaceWatcher) notifyFileEvent(ctx context.Context, uri string, changeType protocol.FileChangeType) error {
cnf := config.Get()
if cnf.DebugLSP {
logging.Debug("Notifying file event",
"uri", uri,
"changeType", changeType,
)
}
params := protocol.DidChangeWatchedFilesParams{
Changes: []protocol.FileEvent{
{
URI: protocol.DocumentUri(uri),
Type: changeType,
},
},
}
return w.client.DidChangeWatchedFiles(ctx, params)
}
// getServerNameFromContext extracts the server name from the context
// This is a best-effort function that tries to identify which LSP server we're dealing with
func getServerNameFromContext(ctx context.Context) string {
// First check if the server name is directly stored in the context
if serverName, ok := ctx.Value("serverName").(string); ok && serverName != "" {
return strings.ToLower(serverName)
}
// Otherwise, try to extract server name from the client command path
if w, ok := ctx.Value("workspaceWatcher").(*WorkspaceWatcher); ok && w != nil && w.client != nil && w.client.Cmd != nil {
path := strings.ToLower(w.client.Cmd.Path)
// Extract server name from path
if strings.Contains(path, "typescript") || strings.Contains(path, "tsserver") || strings.Contains(path, "vtsls") {
return "typescript"
} else if strings.Contains(path, "gopls") {
return "gopls"
} else if strings.Contains(path, "rust-analyzer") {
return "rust-analyzer"
} else if strings.Contains(path, "pyright") || strings.Contains(path, "pylsp") || strings.Contains(path, "python") {
return "python"
} else if strings.Contains(path, "clangd") {
return "clangd"
} else if strings.Contains(path, "jdtls") || strings.Contains(path, "java") {
return "java"
}
// Return the base name as fallback
return filepath.Base(path)
}
return "unknown"
}
// shouldPreloadFiles determines if we should preload files for a specific language server
// Some servers work better with preloaded files, others don't need it
func shouldPreloadFiles(serverName string) bool {
// TypeScript/JavaScript servers typically need some files preloaded
// to properly resolve imports and provide intellisense
switch serverName {
case "typescript", "typescript-language-server", "tsserver", "vtsls":
return true
case "java", "jdtls":
// Java servers often need to see source files to build the project model
return true
default:
// For most servers, we'll use lazy loading by default
return false
}
}
// Common patterns for directories and files to exclude
// TODO: make configurable
var (
excludedDirNames = map[string]bool{
".git": true,
"node_modules": true,
"dist": true,
"build": true,
"out": true,
"bin": true,
".idea": true,
".vscode": true,
".cache": true,
"coverage": true,
"target": true, // Rust build output
"vendor": true, // Go vendor directory
}
excludedFileExtensions = map[string]bool{
".swp": true,
".swo": true,
".tmp": true,
".temp": true,
".bak": true,
".log": true,
".o": true, // Object files
".so": true, // Shared libraries
".dylib": true, // macOS shared libraries
".dll": true, // Windows shared libraries
".a": true, // Static libraries
".exe": true, // Windows executables
".lock": true, // Lock files
}
// Large binary files that shouldn't be opened
largeBinaryExtensions = map[string]bool{
".png": true,
".jpg": true,
".jpeg": true,
".gif": true,
".bmp": true,
".ico": true,
".zip": true,
".tar": true,
".gz": true,
".rar": true,
".7z": true,
".pdf": true,
".mp3": true,
".mp4": true,
".mov": true,
".wav": true,
".wasm": true,
}
// Maximum file size to open (5MB)
maxFileSize int64 = 5 * 1024 * 1024
)
// shouldExcludeDir returns true if the directory should be excluded from watching/opening
func shouldExcludeDir(dirPath string) bool {
dirName := filepath.Base(dirPath)
// Skip dot directories
if strings.HasPrefix(dirName, ".") {
return true
}
// Skip common excluded directories
if excludedDirNames[dirName] {
return true
}
return false
}
// shouldExcludeFile returns true if the file should be excluded from opening
func shouldExcludeFile(filePath string) bool {
fileName := filepath.Base(filePath)
cnf := config.Get()
// Skip dot files
if strings.HasPrefix(fileName, ".") {
return true
}
// Check file extension
ext := strings.ToLower(filepath.Ext(filePath))
if excludedFileExtensions[ext] || largeBinaryExtensions[ext] {
return true
}
// Skip temporary files
if strings.HasSuffix(filePath, "~") {
return true
}
// Check file size
info, err := os.Stat(filePath)
if err != nil {
// If we can't stat the file, skip it
return true
}
// Skip large files
if info.Size() > maxFileSize {
if cnf.DebugLSP {
logging.Debug("Skipping large file",
"path", filePath,
"size", info.Size(),
"maxSize", maxFileSize,
"debug", cnf.Debug,
"sizeMB", float64(info.Size())/(1024*1024),
"maxSizeMB", float64(maxFileSize)/(1024*1024),
)
}
return true
}
return false
}
// openMatchingFile opens a file if it matches any of the registered patterns
func (w *WorkspaceWatcher) openMatchingFile(ctx context.Context, path string) {
cnf := config.Get()
// Skip directories
info, err := os.Stat(path)
if err != nil || info.IsDir() {
return
}
// Skip excluded files
if shouldExcludeFile(path) {
return
}
// Check if this path should be watched according to server registrations
if watched, _ := w.isPathWatched(path); watched {
// Get server name for specialized handling
serverName := getServerNameFromContext(ctx)
// Check if the file is a high-priority file that should be opened immediately
// This helps with project initialization for certain language servers
if isHighPriorityFile(path, serverName) {
if cnf.DebugLSP {
logging.Debug("Opening high-priority file", "path", path, "serverName", serverName)
}
if err := w.client.OpenFile(ctx, path); err != nil && cnf.DebugLSP {
logging.Error("Error opening high-priority file", "path", path, "error", err)
}
return
}
// For non-high-priority files, we'll use different strategies based on server type
if shouldPreloadFiles(serverName) {
// For servers that benefit from preloading, open files but with limits
// Check file size - for preloading we're more conservative
if info.Size() > (1 * 1024 * 1024) { // 1MB limit for preloaded files
if cnf.DebugLSP {
logging.Debug("Skipping large file for preloading", "path", path, "size", info.Size())
}
return
}
// Check file extension for common source files
ext := strings.ToLower(filepath.Ext(path))
// Only preload source files for the specific language
shouldOpen := false
switch serverName {
case "typescript", "typescript-language-server", "tsserver", "vtsls":
shouldOpen = ext == ".ts" || ext == ".js" || ext == ".tsx" || ext == ".jsx"
case "gopls":
shouldOpen = ext == ".go"
case "rust-analyzer":
shouldOpen = ext == ".rs"
case "python", "pyright", "pylsp":
shouldOpen = ext == ".py"
case "clangd":
shouldOpen = ext == ".c" || ext == ".cpp" || ext == ".h" || ext == ".hpp"
case "java", "jdtls":
shouldOpen = ext == ".java"
default:
// For unknown servers, be conservative
shouldOpen = false
}
if shouldOpen {
// Don't need to check if it's already open - the client.OpenFile handles that
if err := w.client.OpenFile(ctx, path); err != nil && cnf.DebugLSP {
logging.Error("Error opening file", "path", path, "error", err)
}
}
}
}
}
// isHighPriorityFile determines if a file should be opened immediately
// regardless of the preloading strategy
func isHighPriorityFile(path string, serverName string) bool {
fileName := filepath.Base(path)
ext := filepath.Ext(path)
switch serverName {
case "typescript", "typescript-language-server", "tsserver", "vtsls":
// For TypeScript, we want to open configuration files immediately
return fileName == "tsconfig.json" ||
fileName == "package.json" ||
fileName == "jsconfig.json" ||
// Also open main entry points
fileName == "index.ts" ||
fileName == "index.js" ||
fileName == "main.ts" ||
fileName == "main.js"
case "gopls":
// For Go, we want to open go.mod files immediately
return fileName == "go.mod" ||
fileName == "go.sum" ||
// Also open main.go files
fileName == "main.go"
case "rust-analyzer":
// For Rust, we want to open Cargo.toml files immediately
return fileName == "Cargo.toml" ||
fileName == "Cargo.lock" ||
// Also open lib.rs and main.rs
fileName == "lib.rs" ||
fileName == "main.rs"
case "python", "pyright", "pylsp":
// For Python, open key project files
return fileName == "pyproject.toml" ||
fileName == "setup.py" ||
fileName == "requirements.txt" ||
fileName == "__init__.py" ||
fileName == "__main__.py"
case "clangd":
// For C/C++, open key project files
return fileName == "CMakeLists.txt" ||
fileName == "Makefile" ||
fileName == "compile_commands.json"
case "java", "jdtls":
// For Java, open key project files
return fileName == "pom.xml" ||
fileName == "build.gradle" ||
ext == ".java" // Java servers often need to see source files
}
// For unknown servers, prioritize common configuration files
return fileName == "package.json" ||
fileName == "Makefile" ||
fileName == "CMakeLists.txt" ||
fileName == ".editorconfig"
}

View File

@@ -1,324 +0,0 @@
package message
import (
"encoding/base64"
"slices"
"time"
"github.com/kujtimiihoxha/opencode/internal/llm/models"
)
type MessageRole string
const (
Assistant MessageRole = "assistant"
User MessageRole = "user"
System MessageRole = "system"
Tool MessageRole = "tool"
)
type FinishReason string
const (
FinishReasonEndTurn FinishReason = "end_turn"
FinishReasonMaxTokens FinishReason = "max_tokens"
FinishReasonToolUse FinishReason = "tool_use"
FinishReasonCanceled FinishReason = "canceled"
FinishReasonError FinishReason = "error"
FinishReasonPermissionDenied FinishReason = "permission_denied"
// Should never happen
FinishReasonUnknown FinishReason = "unknown"
)
type ContentPart interface {
isPart()
}
type ReasoningContent struct {
Thinking string `json:"thinking"`
}
func (tc ReasoningContent) String() string {
return tc.Thinking
}
func (ReasoningContent) isPart() {}
type TextContent struct {
Text string `json:"text"`
}
func (tc TextContent) String() string {
return tc.Text
}
func (TextContent) isPart() {}
type ImageURLContent struct {
URL string `json:"url"`
Detail string `json:"detail,omitempty"`
}
func (iuc ImageURLContent) String() string {
return iuc.URL
}
func (ImageURLContent) isPart() {}
type BinaryContent struct {
MIMEType string
Data []byte
}
func (bc BinaryContent) String() string {
base64Encoded := base64.StdEncoding.EncodeToString(bc.Data)
return "data:" + bc.MIMEType + ";base64," + base64Encoded
}
func (BinaryContent) isPart() {}
type ToolCall struct {
ID string `json:"id"`
Name string `json:"name"`
Input string `json:"input"`
Type string `json:"type"`
Finished bool `json:"finished"`
}
func (ToolCall) isPart() {}
type ToolResult struct {
ToolCallID string `json:"tool_call_id"`
Name string `json:"name"`
Content string `json:"content"`
Metadata string `json:"metadata"`
IsError bool `json:"is_error"`
}
func (ToolResult) isPart() {}
type Finish struct {
Reason FinishReason `json:"reason"`
Time int64 `json:"time"`
}
func (Finish) isPart() {}
type Message struct {
ID string
Role MessageRole
SessionID string
Parts []ContentPart
Model models.ModelID
CreatedAt int64
UpdatedAt int64
}
func (m *Message) Content() TextContent {
for _, part := range m.Parts {
if c, ok := part.(TextContent); ok {
return c
}
}
return TextContent{}
}
func (m *Message) ReasoningContent() ReasoningContent {
for _, part := range m.Parts {
if c, ok := part.(ReasoningContent); ok {
return c
}
}
return ReasoningContent{}
}
func (m *Message) ImageURLContent() []ImageURLContent {
imageURLContents := make([]ImageURLContent, 0)
for _, part := range m.Parts {
if c, ok := part.(ImageURLContent); ok {
imageURLContents = append(imageURLContents, c)
}
}
return imageURLContents
}
func (m *Message) BinaryContent() []BinaryContent {
binaryContents := make([]BinaryContent, 0)
for _, part := range m.Parts {
if c, ok := part.(BinaryContent); ok {
binaryContents = append(binaryContents, c)
}
}
return binaryContents
}
func (m *Message) ToolCalls() []ToolCall {
toolCalls := make([]ToolCall, 0)
for _, part := range m.Parts {
if c, ok := part.(ToolCall); ok {
toolCalls = append(toolCalls, c)
}
}
return toolCalls
}
func (m *Message) ToolResults() []ToolResult {
toolResults := make([]ToolResult, 0)
for _, part := range m.Parts {
if c, ok := part.(ToolResult); ok {
toolResults = append(toolResults, c)
}
}
return toolResults
}
func (m *Message) IsFinished() bool {
for _, part := range m.Parts {
if _, ok := part.(Finish); ok {
return true
}
}
return false
}
func (m *Message) FinishPart() *Finish {
for _, part := range m.Parts {
if c, ok := part.(Finish); ok {
return &c
}
}
return nil
}
func (m *Message) FinishReason() FinishReason {
for _, part := range m.Parts {
if c, ok := part.(Finish); ok {
return c.Reason
}
}
return ""
}
func (m *Message) IsThinking() bool {
if m.ReasoningContent().Thinking != "" && m.Content().Text == "" && !m.IsFinished() {
return true
}
return false
}
func (m *Message) AppendContent(delta string) {
found := false
for i, part := range m.Parts {
if c, ok := part.(TextContent); ok {
m.Parts[i] = TextContent{Text: c.Text + delta}
found = true
}
}
if !found {
m.Parts = append(m.Parts, TextContent{Text: delta})
}
}
func (m *Message) AppendReasoningContent(delta string) {
found := false
for i, part := range m.Parts {
if c, ok := part.(ReasoningContent); ok {
m.Parts[i] = ReasoningContent{Thinking: c.Thinking + delta}
found = true
}
}
if !found {
m.Parts = append(m.Parts, ReasoningContent{Thinking: delta})
}
}
func (m *Message) FinishToolCall(toolCallID string) {
for i, part := range m.Parts {
if c, ok := part.(ToolCall); ok {
if c.ID == toolCallID {
m.Parts[i] = ToolCall{
ID: c.ID,
Name: c.Name,
Input: c.Input,
Type: c.Type,
Finished: true,
}
return
}
}
}
}
func (m *Message) AppendToolCallInput(toolCallID string, inputDelta string) {
for i, part := range m.Parts {
if c, ok := part.(ToolCall); ok {
if c.ID == toolCallID {
m.Parts[i] = ToolCall{
ID: c.ID,
Name: c.Name,
Input: c.Input + inputDelta,
Type: c.Type,
Finished: c.Finished,
}
return
}
}
}
}
func (m *Message) AddToolCall(tc ToolCall) {
for i, part := range m.Parts {
if c, ok := part.(ToolCall); ok {
if c.ID == tc.ID {
m.Parts[i] = tc
return
}
}
}
m.Parts = append(m.Parts, tc)
}
func (m *Message) SetToolCalls(tc []ToolCall) {
// remove any existing tool call part it could have multiple
parts := make([]ContentPart, 0)
for _, part := range m.Parts {
if _, ok := part.(ToolCall); ok {
continue
}
parts = append(parts, part)
}
m.Parts = parts
for _, toolCall := range tc {
m.Parts = append(m.Parts, toolCall)
}
}
func (m *Message) AddToolResult(tr ToolResult) {
m.Parts = append(m.Parts, tr)
}
func (m *Message) SetToolResults(tr []ToolResult) {
for _, toolResult := range tr {
m.Parts = append(m.Parts, toolResult)
}
}
func (m *Message) AddFinish(reason FinishReason) {
// remove any existing finish part
for i, part := range m.Parts {
if _, ok := part.(Finish); ok {
m.Parts = slices.Delete(m.Parts, i, i+1)
break
}
}
m.Parts = append(m.Parts, Finish{Reason: reason, Time: time.Now().Unix()})
}
func (m *Message) AddImageURL(url, detail string) {
m.Parts = append(m.Parts, ImageURLContent{URL: url, Detail: detail})
}
func (m *Message) AddBinary(mimeType string, data []byte) {
m.Parts = append(m.Parts, BinaryContent{MIMEType: mimeType, Data: data})
}

View File

@@ -1,282 +0,0 @@
package message
import (
"context"
"database/sql"
"encoding/json"
"fmt"
"time"
"github.com/google/uuid"
"github.com/kujtimiihoxha/opencode/internal/db"
"github.com/kujtimiihoxha/opencode/internal/llm/models"
"github.com/kujtimiihoxha/opencode/internal/pubsub"
)
type CreateMessageParams struct {
Role MessageRole
Parts []ContentPart
Model models.ModelID
}
type Service interface {
pubsub.Suscriber[Message]
Create(ctx context.Context, sessionID string, params CreateMessageParams) (Message, error)
Update(ctx context.Context, message Message) error
Get(ctx context.Context, id string) (Message, error)
List(ctx context.Context, sessionID string) ([]Message, error)
Delete(ctx context.Context, id string) error
DeleteSessionMessages(ctx context.Context, sessionID string) error
}
type service struct {
*pubsub.Broker[Message]
q db.Querier
}
func NewService(q db.Querier) Service {
return &service{
Broker: pubsub.NewBroker[Message](),
q: q,
}
}
func (s *service) Delete(ctx context.Context, id string) error {
message, err := s.Get(ctx, id)
if err != nil {
return err
}
err = s.q.DeleteMessage(ctx, message.ID)
if err != nil {
return err
}
s.Publish(pubsub.DeletedEvent, message)
return nil
}
func (s *service) Create(ctx context.Context, sessionID string, params CreateMessageParams) (Message, error) {
if params.Role != Assistant {
params.Parts = append(params.Parts, Finish{
Reason: "stop",
})
}
partsJSON, err := marshallParts(params.Parts)
if err != nil {
return Message{}, err
}
dbMessage, err := s.q.CreateMessage(ctx, db.CreateMessageParams{
ID: uuid.New().String(),
SessionID: sessionID,
Role: string(params.Role),
Parts: string(partsJSON),
Model: sql.NullString{String: string(params.Model), Valid: true},
})
if err != nil {
return Message{}, err
}
message, err := s.fromDBItem(dbMessage)
if err != nil {
return Message{}, err
}
s.Publish(pubsub.CreatedEvent, message)
return message, nil
}
func (s *service) DeleteSessionMessages(ctx context.Context, sessionID string) error {
messages, err := s.List(ctx, sessionID)
if err != nil {
return err
}
for _, message := range messages {
if message.SessionID == sessionID {
err = s.Delete(ctx, message.ID)
if err != nil {
return err
}
}
}
return nil
}
func (s *service) Update(ctx context.Context, message Message) error {
parts, err := marshallParts(message.Parts)
if err != nil {
return err
}
finishedAt := sql.NullInt64{}
if f := message.FinishPart(); f != nil {
finishedAt.Int64 = f.Time
finishedAt.Valid = true
}
err = s.q.UpdateMessage(ctx, db.UpdateMessageParams{
ID: message.ID,
Parts: string(parts),
FinishedAt: finishedAt,
})
if err != nil {
return err
}
message.UpdatedAt = time.Now().Unix()
s.Publish(pubsub.UpdatedEvent, message)
return nil
}
func (s *service) Get(ctx context.Context, id string) (Message, error) {
dbMessage, err := s.q.GetMessage(ctx, id)
if err != nil {
return Message{}, err
}
return s.fromDBItem(dbMessage)
}
func (s *service) List(ctx context.Context, sessionID string) ([]Message, error) {
dbMessages, err := s.q.ListMessagesBySession(ctx, sessionID)
if err != nil {
return nil, err
}
messages := make([]Message, len(dbMessages))
for i, dbMessage := range dbMessages {
messages[i], err = s.fromDBItem(dbMessage)
if err != nil {
return nil, err
}
}
return messages, nil
}
func (s *service) fromDBItem(item db.Message) (Message, error) {
parts, err := unmarshallParts([]byte(item.Parts))
if err != nil {
return Message{}, err
}
return Message{
ID: item.ID,
SessionID: item.SessionID,
Role: MessageRole(item.Role),
Parts: parts,
Model: models.ModelID(item.Model.String),
CreatedAt: item.CreatedAt,
UpdatedAt: item.UpdatedAt,
}, nil
}
type partType string
const (
reasoningType partType = "reasoning"
textType partType = "text"
imageURLType partType = "image_url"
binaryType partType = "binary"
toolCallType partType = "tool_call"
toolResultType partType = "tool_result"
finishType partType = "finish"
)
type partWrapper struct {
Type partType `json:"type"`
Data ContentPart `json:"data"`
}
func marshallParts(parts []ContentPart) ([]byte, error) {
wrappedParts := make([]partWrapper, len(parts))
for i, part := range parts {
var typ partType
switch part.(type) {
case ReasoningContent:
typ = reasoningType
case TextContent:
typ = textType
case ImageURLContent:
typ = imageURLType
case BinaryContent:
typ = binaryType
case ToolCall:
typ = toolCallType
case ToolResult:
typ = toolResultType
case Finish:
typ = finishType
default:
return nil, fmt.Errorf("unknown part type: %T", part)
}
wrappedParts[i] = partWrapper{
Type: typ,
Data: part,
}
}
return json.Marshal(wrappedParts)
}
func unmarshallParts(data []byte) ([]ContentPart, error) {
temp := []json.RawMessage{}
if err := json.Unmarshal(data, &temp); err != nil {
return nil, err
}
parts := make([]ContentPart, 0)
for _, rawPart := range temp {
var wrapper struct {
Type partType `json:"type"`
Data json.RawMessage `json:"data"`
}
if err := json.Unmarshal(rawPart, &wrapper); err != nil {
return nil, err
}
switch wrapper.Type {
case reasoningType:
part := ReasoningContent{}
if err := json.Unmarshal(wrapper.Data, &part); err != nil {
return nil, err
}
parts = append(parts, part)
case textType:
part := TextContent{}
if err := json.Unmarshal(wrapper.Data, &part); err != nil {
return nil, err
}
parts = append(parts, part)
case imageURLType:
part := ImageURLContent{}
if err := json.Unmarshal(wrapper.Data, &part); err != nil {
return nil, err
}
case binaryType:
part := BinaryContent{}
if err := json.Unmarshal(wrapper.Data, &part); err != nil {
return nil, err
}
parts = append(parts, part)
case toolCallType:
part := ToolCall{}
if err := json.Unmarshal(wrapper.Data, &part); err != nil {
return nil, err
}
parts = append(parts, part)
case toolResultType:
part := ToolResult{}
if err := json.Unmarshal(wrapper.Data, &part); err != nil {
return nil, err
}
parts = append(parts, part)
case finishType:
part := Finish{}
if err := json.Unmarshal(wrapper.Data, &part); err != nil {
return nil, err
}
parts = append(parts, part)
default:
return nil, fmt.Errorf("unknown part type: %s", wrapper.Type)
}
}
return parts, nil
}

View File

@@ -1,124 +0,0 @@
package permission
import (
"errors"
"path/filepath"
"slices"
"sync"
"time"
"github.com/google/uuid"
"github.com/kujtimiihoxha/opencode/internal/config"
"github.com/kujtimiihoxha/opencode/internal/pubsub"
)
var ErrorPermissionDenied = errors.New("permission denied")
type CreatePermissionRequest struct {
SessionID string `json:"session_id"`
ToolName string `json:"tool_name"`
Description string `json:"description"`
Action string `json:"action"`
Params any `json:"params"`
Path string `json:"path"`
}
type PermissionRequest struct {
ID string `json:"id"`
SessionID string `json:"session_id"`
ToolName string `json:"tool_name"`
Description string `json:"description"`
Action string `json:"action"`
Params any `json:"params"`
Path string `json:"path"`
}
type Service interface {
pubsub.Suscriber[PermissionRequest]
GrantPersistant(permission PermissionRequest)
Grant(permission PermissionRequest)
Deny(permission PermissionRequest)
Request(opts CreatePermissionRequest) bool
AutoApproveSession(sessionID string)
}
type permissionService struct {
*pubsub.Broker[PermissionRequest]
sessionPermissions []PermissionRequest
pendingRequests sync.Map
autoApproveSessions []string
}
func (s *permissionService) GrantPersistant(permission PermissionRequest) {
respCh, ok := s.pendingRequests.Load(permission.ID)
if ok {
respCh.(chan bool) <- true
}
s.sessionPermissions = append(s.sessionPermissions, permission)
}
func (s *permissionService) Grant(permission PermissionRequest) {
respCh, ok := s.pendingRequests.Load(permission.ID)
if ok {
respCh.(chan bool) <- true
}
}
func (s *permissionService) Deny(permission PermissionRequest) {
respCh, ok := s.pendingRequests.Load(permission.ID)
if ok {
respCh.(chan bool) <- false
}
}
func (s *permissionService) Request(opts CreatePermissionRequest) bool {
if slices.Contains(s.autoApproveSessions, opts.SessionID) {
return true
}
dir := filepath.Dir(opts.Path)
if dir == "." {
dir = config.WorkingDirectory()
}
permission := PermissionRequest{
ID: uuid.New().String(),
Path: dir,
SessionID: opts.SessionID,
ToolName: opts.ToolName,
Description: opts.Description,
Action: opts.Action,
Params: opts.Params,
}
for _, p := range s.sessionPermissions {
if p.ToolName == permission.ToolName && p.Action == permission.Action && p.SessionID == permission.SessionID && p.Path == permission.Path {
return true
}
}
respCh := make(chan bool, 1)
s.pendingRequests.Store(permission.ID, respCh)
defer s.pendingRequests.Delete(permission.ID)
s.Publish(pubsub.CreatedEvent, permission)
// Wait for the response with a timeout
select {
case resp := <-respCh:
return resp
case <-time.After(10 * time.Minute):
return false
}
}
func (s *permissionService) AutoApproveSession(sessionID string) {
s.autoApproveSessions = append(s.autoApproveSessions, sessionID)
}
func NewPermissionService() Service {
return &permissionService{
Broker: pubsub.NewBroker[PermissionRequest](),
sessionPermissions: make([]PermissionRequest, 0),
}
}

View File

@@ -1,116 +0,0 @@
package pubsub
import (
"context"
"sync"
)
const bufferSize = 64
type Broker[T any] struct {
subs map[chan Event[T]]struct{}
mu sync.RWMutex
done chan struct{}
subCount int
maxEvents int
}
func NewBroker[T any]() *Broker[T] {
return NewBrokerWithOptions[T](bufferSize, 1000)
}
func NewBrokerWithOptions[T any](channelBufferSize, maxEvents int) *Broker[T] {
b := &Broker[T]{
subs: make(map[chan Event[T]]struct{}),
done: make(chan struct{}),
subCount: 0,
maxEvents: maxEvents,
}
return b
}
func (b *Broker[T]) Shutdown() {
select {
case <-b.done: // Already closed
return
default:
close(b.done)
}
b.mu.Lock()
defer b.mu.Unlock()
for ch := range b.subs {
delete(b.subs, ch)
close(ch)
}
b.subCount = 0
}
func (b *Broker[T]) Subscribe(ctx context.Context) <-chan Event[T] {
b.mu.Lock()
defer b.mu.Unlock()
select {
case <-b.done:
ch := make(chan Event[T])
close(ch)
return ch
default:
}
sub := make(chan Event[T], bufferSize)
b.subs[sub] = struct{}{}
b.subCount++
go func() {
<-ctx.Done()
b.mu.Lock()
defer b.mu.Unlock()
select {
case <-b.done:
return
default:
}
delete(b.subs, sub)
close(sub)
b.subCount--
}()
return sub
}
func (b *Broker[T]) GetSubscriberCount() int {
b.mu.RLock()
defer b.mu.RUnlock()
return b.subCount
}
func (b *Broker[T]) Publish(t EventType, payload T) {
b.mu.RLock()
select {
case <-b.done:
b.mu.RUnlock()
return
default:
}
subscribers := make([]chan Event[T], 0, len(b.subs))
for sub := range b.subs {
subscribers = append(subscribers, sub)
}
b.mu.RUnlock()
event := Event[T]{Type: t, Payload: payload}
for _, sub := range subscribers {
select {
case sub <- event:
default:
}
}
}

Some files were not shown because too many files have changed in this diff Show More