Compare commits

...

35 Commits

Author SHA1 Message Date
Dax Raad
fcd5ff7ebe sync 2026-01-24 00:04:37 -05:00
Dax Raad
c2e234ec4d sync 2026-01-24 00:04:10 -05:00
Dax Raad
38f735bfc6 sync 2026-01-24 00:02:32 -05:00
Dax Raad
a4183c3b2c sync 2026-01-23 23:57:20 -05:00
Dax Raad
2c234b8d62 core: migrate project table from JSON to structured columns for better query performance 2026-01-23 23:55:18 -05:00
Github Action
9f96d8aa78 Update aarch64-darwin hash 2026-01-23 23:54:07 -05:00
Github Action
4007e57c52 Update Nix flake.lock and x86_64-linux hash 2026-01-23 23:53:59 -05:00
Dax Raad
d472512eba core: consolidate session-related SQL tables into single file 2026-01-23 23:53:29 -05:00
Dax Raad
f6b28b61c7 core: fix message ordering and add custom storage dir support for migration 2026-01-23 23:53:29 -05:00
Github Action
0bf9d66da5 Update aarch64-darwin hash 2026-01-23 23:53:28 -05:00
Github Action
eabd78cab6 Update Nix flake.lock and x86_64-linux hash 2026-01-23 23:52:47 -05:00
Dax Raad
7bc8851fc4 commit 2026-01-23 23:51:53 -05:00
Frank
af5e405391 zen: remove grok code model 2026-01-23 23:25:34 -05:00
Alex Yaroshuk
8a216a6ad5 fix(app): normalize path separators for session diff filtering on Windows (#10291) 2026-01-23 16:17:47 -06:00
Ariane Emory
225b72ca36 feat: always center selected item in selection dialogs (resolves #10209) (#10207) 2026-01-23 11:59:39 -06:00
Rahul A Mistry
8105f186dc fix(app): center checkbox indicator in provider selection (#10267) 2026-01-23 10:23:24 -06:00
GitHub Action
4f1bdf1c59 chore: generate 2026-01-23 16:22:07 +00:00
Frank
472695caca zen: fix balance not shown 2026-01-23 11:21:08 -05:00
GitHub Action
469fd43c71 chore: generate 2026-01-23 15:59:00 +00:00
Frank
24d942349f zen: use balance after rate limited 2026-01-23 10:58:00 -05:00
Edin
65c236c071 feat(app): auto-open oauth links for codex and copilot (#10258) 2026-01-23 09:35:44 -06:00
GitHub Action
d6c5ddd6dc ignore: update download stats 2026-01-23 2026-01-23 12:05:37 +00:00
Adam
e5fe50f7da fix(app): close delete workspace dialog immediately 2026-01-23 05:41:51 -06:00
Adam
b6beda1569 fix: type error 2026-01-23 05:32:37 -06:00
GitHub Action
f34b509fe7 chore: generate 2026-01-23 11:19:35 +00:00
Adam
2a2d800ac4 fix: type error 2026-01-23 05:18:57 -06:00
Adam
4afb46f571 perf(app): don't remount directory layout 2026-01-23 05:18:57 -06:00
Adam
c4d223eb99 perf(app): faster workspace creation 2026-01-23 05:18:42 -06:00
GitHub Action
3fbda54045 chore: generate 2026-01-23 11:10:22 +00:00
Shantur Rathore
41ede06b20 docs(ecosystem): Add CodeNomad entry to ecosystem documentation (#10222) 2026-01-23 05:09:38 -06:00
Adam
82ec84982e Reapply "wip(app): line selection"
This reverts commit df7b6792cd.
2026-01-23 05:01:10 -06:00
Adam
df7b6792cd Revert "wip(app): line selection"
This reverts commit 1780bab1ce.
2026-01-23 04:58:41 -06:00
Devin Griffin
c72d9a473c fix(app): View all sessions flakiness (#10149) 2026-01-23 04:57:10 -06:00
GitHub Action
d3688b150a chore: generate 2026-01-23 10:55:37 +00:00
Rahul A Mistry
e376e1de16 fix(app): enable dialog dismiss on model selector (dialog.tsx) (#10203) 2026-01-23 04:55:00 -06:00
89 changed files with 3783 additions and 716 deletions

View File

@@ -208,3 +208,4 @@
| 2026-01-20 | 5,128,999 (+267,891) | 1,903,665 (+40,553) | 7,032,664 (+308,444) |
| 2026-01-21 | 5,444,842 (+315,843) | 1,962,531 (+58,866) | 7,407,373 (+374,709) |
| 2026-01-22 | 5,766,340 (+321,498) | 2,029,487 (+66,956) | 7,795,827 (+388,454) |
| 2026-01-23 | 6,096,236 (+329,896) | 2,096,235 (+66,748) | 8,192,471 (+396,644) |

117
bun.lock
View File

@@ -311,6 +311,7 @@
"clipboardy": "4.0.0",
"decimal.js": "10.5.0",
"diff": "catalog:",
"drizzle-orm": "0.45.1",
"fuzzysort": "3.1.0",
"gray-matter": "4.0.3",
"hono": "catalog:",
@@ -352,6 +353,8 @@
"@types/turndown": "5.0.5",
"@types/yargs": "17.0.33",
"@typescript/native-preview": "catalog:",
"better-sqlite3": "12.6.0",
"drizzle-kit": "0.31.8",
"typescript": "catalog:",
"vscode-languageserver-types": "3.17.5",
"why-is-node-running": "3.2.2",
@@ -2042,12 +2045,18 @@
"before-after-hook": ["before-after-hook@2.2.3", "", {}, "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ=="],
"better-sqlite3": ["better-sqlite3@12.6.0", "", { "dependencies": { "bindings": "^1.5.0", "prebuild-install": "^7.1.1" } }, "sha512-FXI191x+D6UPWSze5IzZjhz+i9MK9nsuHsmTX9bXVl52k06AfZ2xql0lrgIUuzsMsJ7Vgl5kIptvDgBLIV3ZSQ=="],
"bignumber.js": ["bignumber.js@9.3.1", "", {}, "sha512-Ko0uX15oIUS7wJ3Rb30Fs6SkVbLmPBAKdlm7q9+ak9bbIeFf0MwuBsQV6z7+X768/cHsfg+WlysDWJcmthjsjQ=="],
"binary": ["binary@0.3.0", "", { "dependencies": { "buffers": "~0.1.1", "chainsaw": "~0.1.0" } }, "sha512-D4H1y5KYwpJgK8wk1Cue5LLPgmwHKYSChkbspQg5JtVuR5ulGckxfR62H3AE9UDkdMC8yyXlqYihuz3Aqg2XZg=="],
"binary-extensions": ["binary-extensions@2.3.0", "", {}, "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw=="],
"bindings": ["bindings@1.5.0", "", { "dependencies": { "file-uri-to-path": "1.0.0" } }, "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ=="],
"bl": ["bl@4.1.0", "", { "dependencies": { "buffer": "^5.5.0", "inherits": "^2.0.4", "readable-stream": "^3.4.0" } }, "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w=="],
"blake3-wasm": ["blake3-wasm@2.1.5", "", {}, "sha512-F1+K8EbfOZE49dtoPtmxUQrpXaBIl3ICvasLh+nJta0xkz+9kF/7uet9fLnwKqhDrmj6g+6K3Tw9yQPUg2ka5g=="],
"blob-to-buffer": ["blob-to-buffer@1.2.9", "", {}, "sha512-BF033y5fN6OCofD3vgHmNtwZWRcq9NLyyxyILx9hfMy1sXYy4ojFl765hJ2lP0YaN2fuxPaLO2Vzzoxy0FLFFA=="],
@@ -2254,6 +2263,10 @@
"decode-named-character-reference": ["decode-named-character-reference@1.2.0", "", { "dependencies": { "character-entities": "^2.0.0" } }, "sha512-c6fcElNV6ShtZXmsgNgFFV5tVX2PaV4g+MOAkb8eXHvn6sryJBrZa9r0zV6+dtTyoCKxtDy5tyQ5ZwQuidtd+Q=="],
"decompress-response": ["decompress-response@6.0.0", "", { "dependencies": { "mimic-response": "^3.1.0" } }, "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ=="],
"deep-extend": ["deep-extend@0.6.0", "", {}, "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA=="],
"deepmerge": ["deepmerge@4.3.1", "", {}, "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A=="],
"default-browser": ["default-browser@5.4.0", "", { "dependencies": { "bundle-name": "^4.1.0", "default-browser-id": "^5.0.0" } }, "sha512-XDuvSq38Hr1MdN47EDvYtx3U0MTqpCEn+F6ft8z2vYDzMrvQhVp0ui9oQdqW3MvK3vqUETglt1tVGgjLuJ5izg=="],
@@ -2346,6 +2359,8 @@
"encodeurl": ["encodeurl@2.0.0", "", {}, "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg=="],
"end-of-stream": ["end-of-stream@1.4.5", "", { "dependencies": { "once": "^1.4.0" } }, "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg=="],
"engine.io-client": ["engine.io-client@6.6.4", "", { "dependencies": { "@socket.io/component-emitter": "~3.1.0", "debug": "~4.4.1", "engine.io-parser": "~5.2.1", "ws": "~8.18.3", "xmlhttprequest-ssl": "~2.1.1" } }, "sha512-+kjUJnZGwzewFDw951CDWcwj35vMNf2fcj7xQWOctq1F2i1jkDdVvdFG9kM/BEChymCH36KgjnW0NsL58JYRxw=="],
"engine.io-parser": ["engine.io-parser@5.2.3", "", {}, "sha512-HqD3yTBfnBxIrbnM1DoD6Pcq8NECnh8d4As1Qgh0z5Gg3jRRIqijury0CL3ghu/edArpUYiYqQiDUQBIs4np3Q=="],
@@ -2430,6 +2445,8 @@
"exit-hook": ["exit-hook@2.2.1", "", {}, "sha512-eNTPlAD67BmP31LDINZ3U7HSF8l57TxOY2PmBJ1shpCvpnxBF93mWCE8YHBnXs8qiUZJc9WDcWIeC3a2HIAMfw=="],
"expand-template": ["expand-template@2.0.3", "", {}, "sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg=="],
"expect-type": ["expect-type@1.3.0", "", {}, "sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA=="],
"express": ["express@4.21.2", "", { "dependencies": { "accepts": "~1.3.8", "array-flatten": "1.1.1", "body-parser": "1.20.3", "content-disposition": "0.5.4", "content-type": "~1.0.4", "cookie": "0.7.1", "cookie-signature": "1.0.6", "debug": "2.6.9", "depd": "2.0.0", "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "etag": "~1.8.1", "finalhandler": "1.3.1", "fresh": "0.5.2", "http-errors": "2.0.0", "merge-descriptors": "1.0.3", "methods": "~1.1.2", "on-finished": "2.4.1", "parseurl": "~1.3.3", "path-to-regexp": "0.1.12", "proxy-addr": "~2.0.7", "qs": "6.13.0", "range-parser": "~1.2.1", "safe-buffer": "5.2.1", "send": "0.19.0", "serve-static": "1.16.2", "setprototypeof": "1.2.0", "statuses": "2.0.1", "type-is": "~1.6.18", "utils-merge": "1.0.1", "vary": "~1.1.2" } }, "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA=="],
@@ -2464,6 +2481,8 @@
"file-type": ["file-type@16.5.4", "", { "dependencies": { "readable-web-to-node-stream": "^3.0.0", "strtok3": "^6.2.4", "token-types": "^4.1.1" } }, "sha512-/yFHK0aGjFEgDJjEKP0pWCplsPFPhwyfwevf/pVxiN0tmE4L9LmwWxWukdJSHdoCli4VgQLehjJtwQBnqmsKcw=="],
"file-uri-to-path": ["file-uri-to-path@1.0.0", "", {}, "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw=="],
"fill-range": ["fill-range@7.1.1", "", { "dependencies": { "to-regex-range": "^5.0.1" } }, "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg=="],
"finalhandler": ["finalhandler@1.3.1", "", { "dependencies": { "debug": "2.6.9", "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "on-finished": "2.4.1", "parseurl": "~1.3.3", "statuses": "2.0.1", "unpipe": "~1.0.0" } }, "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ=="],
@@ -2502,6 +2521,8 @@
"fresh": ["fresh@0.5.2", "", {}, "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q=="],
"fs-constants": ["fs-constants@1.0.0", "", {}, "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow=="],
"fs-extra": ["fs-extra@10.1.0", "", { "dependencies": { "graceful-fs": "^4.2.0", "jsonfile": "^6.0.1", "universalify": "^2.0.0" } }, "sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ=="],
"fs.realpath": ["fs.realpath@1.0.0", "", {}, "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw=="],
@@ -2552,6 +2573,8 @@
"giget": ["giget@2.0.0", "", { "dependencies": { "citty": "^0.1.6", "consola": "^3.4.0", "defu": "^6.1.4", "node-fetch-native": "^1.6.6", "nypm": "^0.6.0", "pathe": "^2.0.3" }, "bin": { "giget": "dist/cli.mjs" } }, "sha512-L5bGsVkxJbJgdnwyuheIunkGatUF/zssUoxxjACCseZYAVbaqdh9Tsmmlkl8vYan09H7sbvKt4pS8GqKLBrEzA=="],
"github-from-package": ["github-from-package@0.0.0", "", {}, "sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw=="],
"github-slugger": ["github-slugger@2.0.0", "", {}, "sha512-IaOQ9puYtjrkq7Y0Ygl9KDZnrf/aiUJYUpVf89y8kyaxbRG7Y1SrX/jaumrv81vc61+kiMempujsM3Yw7w5qcw=="],
"glob": ["glob@11.1.0", "", { "dependencies": { "foreground-child": "^3.3.1", "jackspeak": "^4.1.1", "minimatch": "^10.1.1", "minipass": "^7.1.2", "package-json-from-dist": "^1.0.0", "path-scurry": "^2.0.0" }, "bin": { "glob": "dist/esm/bin.mjs" } }, "sha512-vuNwKSaKiqm7g0THUBu2x7ckSs3XJLXE+2ssL7/MfTGPLLcrJQ/4Uq1CjPTtO5cCIiRxqvN6Twy1qOwhL0Xjcw=="],
@@ -3090,6 +3113,8 @@
"mimic-fn": ["mimic-fn@4.0.0", "", {}, "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw=="],
"mimic-response": ["mimic-response@3.1.0", "", {}, "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ=="],
"miniflare": ["miniflare@4.20251118.1", "", { "dependencies": { "@cspotcode/source-map-support": "0.8.1", "acorn": "8.14.0", "acorn-walk": "8.3.2", "exit-hook": "2.2.1", "glob-to-regexp": "0.4.1", "sharp": "^0.33.5", "stoppable": "1.1.0", "undici": "7.14.0", "workerd": "1.20251118.0", "ws": "8.18.0", "youch": "4.1.0-beta.10", "zod": "3.22.3" }, "bin": { "miniflare": "bootstrap.js" } }, "sha512-uLSAE/DvOm392fiaig4LOaatxLjM7xzIniFRG5Y3yF9IduOYLLK/pkCPQNCgKQH3ou0YJRHnTN+09LPfqYNTQQ=="],
"minimatch": ["minimatch@10.0.3", "", { "dependencies": { "@isaacs/brace-expansion": "^5.0.0" } }, "sha512-IPZ167aShDZZUMdRk66cyQAW3qr0WzbHkPdMYa8bzZhlHhO3jALbKdxcaak7W9FfT2rZNpQuUu4Od7ILEpXSaw=="],
@@ -3102,6 +3127,8 @@
"mkdirp": ["mkdirp@0.5.6", "", { "dependencies": { "minimist": "^1.2.6" }, "bin": { "mkdirp": "bin/cmd.js" } }, "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw=="],
"mkdirp-classic": ["mkdirp-classic@0.5.3", "", {}, "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A=="],
"mrmime": ["mrmime@2.0.1", "", {}, "sha512-Y3wQdFg2Va6etvQ5I82yUhGdsKrcYox6p7FfL1LbK2J4V01F9TGlepTIhnK24t7koZibmg82KGglhA1XK5IsLQ=="],
"ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="],
@@ -3120,6 +3147,8 @@
"nanoid": ["nanoid@3.3.11", "", { "bin": { "nanoid": "bin/nanoid.cjs" } }, "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w=="],
"napi-build-utils": ["napi-build-utils@2.0.0", "", {}, "sha512-GEbrYkbfF7MoNaoh2iGG84Mnf/WZfB0GdGEsM8wz7Expx/LlWf5U8t9nvJKXSp3qr5IsEbK04cBGhol/KwOsWA=="],
"negotiator": ["negotiator@0.6.3", "", {}, "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg=="],
"neotraverse": ["neotraverse@0.6.18", "", {}, "sha512-Z4SmBUweYa09+o6pG+eASabEpP6QkQ70yHj351pQoEXIs8uHbaU2DWVmzBANKgflPa47A50PtB2+NgRpQvr7vA=="],
@@ -3132,6 +3161,8 @@
"no-case": ["no-case@3.0.4", "", { "dependencies": { "lower-case": "^2.0.2", "tslib": "^2.0.3" } }, "sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg=="],
"node-abi": ["node-abi@3.85.0", "", { "dependencies": { "semver": "^7.3.5" } }, "sha512-zsFhmbkAzwhTft6nd3VxcG0cvJsT70rL+BIGHWVq5fi6MwGrHwzqKaxXE+Hl2GmnGItnDKPPkO5/LQqjVkIdFg=="],
"node-addon-api": ["node-addon-api@7.1.1", "", {}, "sha512-5m3bsyrjFWE1xf7nz7YXdN4udnVtXK6/Yfgn5qnahL6bCkf2yKt4k3nuTKAtT4r3IG8JNR2ncsIMdZuAzJjHQQ=="],
"node-domexception": ["node-domexception@1.0.0", "", {}, "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ=="],
@@ -3328,6 +3359,8 @@
"powershell-utils": ["powershell-utils@0.1.0", "", {}, "sha512-dM0jVuXJPsDN6DvRpea484tCUaMiXWjuCn++HGTqUWzGDjv5tZkEZldAJ/UMlqRYGFrD/etByo4/xOuC/snX2A=="],
"prebuild-install": ["prebuild-install@7.1.3", "", { "dependencies": { "detect-libc": "^2.0.0", "expand-template": "^2.0.3", "github-from-package": "0.0.0", "minimist": "^1.2.3", "mkdirp-classic": "^0.5.3", "napi-build-utils": "^2.0.0", "node-abi": "^3.3.0", "pump": "^3.0.0", "rc": "^1.2.7", "simple-get": "^4.0.0", "tar-fs": "^2.0.0", "tunnel-agent": "^0.6.0" }, "bin": { "prebuild-install": "bin.js" } }, "sha512-8Mf2cbV7x1cXPUILADGI3wuhfqWvtiLA1iclTDbFRZkgRQS0NqsPZphna9V+HyTEadheuPmjaJMsbzKQFOzLug=="],
"prettier": ["prettier@3.6.2", "", { "bin": { "prettier": "bin/prettier.cjs" } }, "sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ=="],
"pretty": ["pretty@2.0.0", "", { "dependencies": { "condense-newlines": "^0.2.1", "extend-shallow": "^2.0.1", "js-beautify": "^1.6.12" } }, "sha512-G9xUchgTEiNpormdYBl+Pha50gOUovT18IvAe7EYMZ1/f9W/WWMPRn+xI68yXNMUk3QXHDwo/1wV/4NejVNe1w=="],
@@ -3350,6 +3383,8 @@
"proxy-from-env": ["proxy-from-env@1.1.0", "", {}, "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="],
"pump": ["pump@3.0.3", "", { "dependencies": { "end-of-stream": "^1.1.0", "once": "^1.3.1" } }, "sha512-todwxLMY7/heScKmntwQG8CXVkWUOdYxIvY2s0VWAAMh/nd8SoYiRaKjlr7+iCs984f2P8zvrfWcDDYVb73NfA=="],
"punycode": ["punycode@1.3.2", "", {}, "sha512-RofWgt/7fL5wP1Y7fxE7/EmTLzQVnB0ycyibJ0OOHIlJqTNzglYFxVwETOcIoJqJmpDXJ9xImDv+Fq34F/d4Dw=="],
"qs": ["qs@6.14.0", "", { "dependencies": { "side-channel": "^1.1.0" } }, "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w=="],
@@ -3366,6 +3401,8 @@
"raw-body": ["raw-body@2.5.2", "", { "dependencies": { "bytes": "3.1.2", "http-errors": "2.0.0", "iconv-lite": "0.4.24", "unpipe": "1.0.0" } }, "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA=="],
"rc": ["rc@1.2.8", "", { "dependencies": { "deep-extend": "^0.6.0", "ini": "~1.3.0", "minimist": "^1.2.0", "strip-json-comments": "~2.0.1" }, "bin": { "rc": "./cli.js" } }, "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw=="],
"rc9": ["rc9@2.1.2", "", { "dependencies": { "defu": "^6.1.4", "destr": "^2.0.3" } }, "sha512-btXCnMmRIBINM2LDZoEmOogIZU7Qe7zn4BpomSKZ/ykbLObuBdvG+mFq11DL6fjH1DRwHhrlgtYWG96bJiC7Cg=="],
"react": ["react@18.2.0", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ=="],
@@ -3552,6 +3589,10 @@
"signal-exit": ["signal-exit@4.1.0", "", {}, "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw=="],
"simple-concat": ["simple-concat@1.0.1", "", {}, "sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q=="],
"simple-get": ["simple-get@4.0.1", "", { "dependencies": { "decompress-response": "^6.0.0", "once": "^1.3.1", "simple-concat": "^1.0.0" } }, "sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA=="],
"simple-swizzle": ["simple-swizzle@0.2.4", "", { "dependencies": { "is-arrayish": "^0.3.1" } }, "sha512-nAu1WFPQSMNr2Zn9PGSZK9AGn4t/y97lEm+MXTtUDwfP0ksAIX4nO+6ruD9Jwut4C49SB1Ws+fbXsm/yScWOHw=="],
"simple-xml-to-json": ["simple-xml-to-json@1.2.3", "", {}, "sha512-kWJDCr9EWtZ+/EYYM5MareWj2cRnZGF93YDNpH4jQiHB+hBIZnfPFSQiVMzZOdk+zXWqTZ/9fTeQNu2DqeiudA=="],
@@ -3656,6 +3697,8 @@
"strip-final-newline": ["strip-final-newline@3.0.0", "", {}, "sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw=="],
"strip-json-comments": ["strip-json-comments@2.0.1", "", {}, "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ=="],
"stripe": ["stripe@18.0.0", "", { "dependencies": { "@types/node": ">=8.1.0", "qs": "^6.11.0" } }, "sha512-3Fs33IzKUby//9kCkCa1uRpinAoTvj6rJgQ2jrBEysoxEvfsclvXdna1amyEYbA2EKkjynuB4+L/kleCCaWTpA=="],
"strnum": ["strnum@1.1.2", "", {}, "sha512-vrN+B7DBIoTTZjnPNewwhx6cBA/H+IS7rfW68n7XxC1y7uoiGQBxaKzqucGUgavX15dJgiGztLJ8vxuEzwqBdA=="],
@@ -3682,6 +3725,8 @@
"tar": ["tar@7.5.2", "", { "dependencies": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", "minipass": "^7.1.2", "minizlib": "^3.1.0", "yallist": "^5.0.0" } }, "sha512-7NyxrTE4Anh8km8iEy7o0QYPs+0JKBTj5ZaqHg6B39erLg0qYXN3BijtShwbsNSvQ+LN75+KV+C4QR/f6Gwnpg=="],
"tar-fs": ["tar-fs@2.1.4", "", { "dependencies": { "chownr": "^1.1.1", "mkdirp-classic": "^0.5.2", "pump": "^3.0.0", "tar-stream": "^2.1.4" } }, "sha512-mDAjwmZdh7LTT6pNleZ05Yt65HC3E+NiQzl672vQG38jIrehtJk/J3mNwIg+vShQPcLF/LV7CMnDW6vjj6sfYQ=="],
"tar-stream": ["tar-stream@3.1.7", "", { "dependencies": { "b4a": "^1.6.4", "fast-fifo": "^1.2.0", "streamx": "^2.15.0" } }, "sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ=="],
"terracotta": ["terracotta@1.0.6", "", { "dependencies": { "solid-use": "^0.9.0" }, "peerDependencies": { "solid-js": "^1.8" } }, "sha512-yVrmT/Lg6a3tEbeYEJH8ksb1PYkR5FA9k5gr1TchaSNIiA2ZWs5a+koEbePXwlBP0poaV7xViZ/v50bQFcMgqw=="],
@@ -3746,6 +3791,8 @@
"tunnel": ["tunnel@0.0.6", "", {}, "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg=="],
"tunnel-agent": ["tunnel-agent@0.6.0", "", { "dependencies": { "safe-buffer": "^5.0.1" } }, "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w=="],
"turbo": ["turbo@2.5.6", "", { "optionalDependencies": { "turbo-darwin-64": "2.5.6", "turbo-darwin-arm64": "2.5.6", "turbo-linux-64": "2.5.6", "turbo-linux-arm64": "2.5.6", "turbo-windows-64": "2.5.6", "turbo-windows-arm64": "2.5.6" }, "bin": { "turbo": "bin/turbo" } }, "sha512-gxToHmi9oTBNB05UjUsrWf0OyN5ZXtD0apOarC1KIx232Vp3WimRNy3810QzeNSgyD5rsaIDXlxlbnOzlouo+w=="],
"turbo-darwin-64": ["turbo-darwin-64@2.5.6", "", { "os": "darwin", "cpu": "x64" }, "sha512-3C1xEdo4aFwMJAPvtlPqz1Sw/+cddWIOmsalHFMrsqqydcptwBfu26WW2cDm3u93bUzMbBJ8k3zNKFqxJ9ei2A=="],
@@ -4310,6 +4357,10 @@
"babel-plugin-module-resolver/glob": ["glob@9.3.5", "", { "dependencies": { "fs.realpath": "^1.0.0", "minimatch": "^8.0.2", "minipass": "^4.2.4", "path-scurry": "^1.6.1" } }, "sha512-e1LleDykUz2Iu+MTYdkSsuWX8lvAjAcs0Xef0lNIu0S2wOAzuTxCJtcd9S3cijlwYF18EsU3rzb8jPVobxDh9Q=="],
"bl/buffer": ["buffer@5.7.1", "", { "dependencies": { "base64-js": "^1.3.1", "ieee754": "^1.1.13" } }, "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ=="],
"bl/readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="],
"body-parser/debug": ["debug@2.6.9", "", { "dependencies": { "ms": "2.0.0" } }, "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA=="],
"body-parser/iconv-lite": ["iconv-lite@0.4.24", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3" } }, "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA=="],
@@ -4414,6 +4465,10 @@
"opencode/@ai-sdk/openai-compatible": ["@ai-sdk/openai-compatible@1.0.30", "", { "dependencies": { "@ai-sdk/provider": "2.0.1", "@ai-sdk/provider-utils": "3.0.20" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-thubwhRtv9uicAxSWwNpinM7hiL/0CkhL/ymPaHuKvI494J7HIzn8KQZQ2ymRz284WTIZnI7VMyyejxW4RMM6w=="],
"opencode/drizzle-kit": ["drizzle-kit@0.31.8", "", { "dependencies": { "@drizzle-team/brocli": "^0.10.2", "@esbuild-kit/esm-loader": "^2.5.5", "esbuild": "^0.25.4", "esbuild-register": "^3.5.0" }, "bin": { "drizzle-kit": "bin.cjs" } }, "sha512-O9EC/miwdnRDY10qRxM8P3Pg8hXe3LyU4ZipReKOgTwn4OqANmftj8XJz1UPUAS6NMHf0E2htjsbQujUTkncCg=="],
"opencode/drizzle-orm": ["drizzle-orm@0.45.1", "", { "peerDependencies": { "@aws-sdk/client-rds-data": ">=3", "@cloudflare/workers-types": ">=4", "@electric-sql/pglite": ">=0.2.0", "@libsql/client": ">=0.10.0", "@libsql/client-wasm": ">=0.10.0", "@neondatabase/serverless": ">=0.10.0", "@op-engineering/op-sqlite": ">=2", "@opentelemetry/api": "^1.4.1", "@planetscale/database": ">=1.13", "@prisma/client": "*", "@tidbcloud/serverless": "*", "@types/better-sqlite3": "*", "@types/pg": "*", "@types/sql.js": "*", "@upstash/redis": ">=1.34.7", "@vercel/postgres": ">=0.8.0", "@xata.io/client": "*", "better-sqlite3": ">=7", "bun-types": "*", "expo-sqlite": ">=14.0.0", "gel": ">=2", "knex": "*", "kysely": "*", "mysql2": ">=2", "pg": ">=8", "postgres": ">=3", "sql.js": ">=1", "sqlite3": ">=5" }, "optionalPeers": ["@aws-sdk/client-rds-data", "@cloudflare/workers-types", "@electric-sql/pglite", "@libsql/client", "@libsql/client-wasm", "@neondatabase/serverless", "@op-engineering/op-sqlite", "@opentelemetry/api", "@planetscale/database", "@prisma/client", "@tidbcloud/serverless", "@types/better-sqlite3", "@types/pg", "@types/sql.js", "@upstash/redis", "@vercel/postgres", "@xata.io/client", "better-sqlite3", "bun-types", "expo-sqlite", "gel", "knex", "kysely", "mysql2", "pg", "postgres", "sql.js", "sqlite3"] }, "sha512-Te0FOdKIistGNPMq2jscdqngBRfBpC8uMFVwqjf6gtTVJHIQ/dosgV/CLBU2N4ZJBsXL5savCba9b0YJskKdcA=="],
"opencontrol/@modelcontextprotocol/sdk": ["@modelcontextprotocol/sdk@1.6.1", "", { "dependencies": { "content-type": "^1.0.5", "cors": "^2.8.5", "eventsource": "^3.0.2", "express": "^5.0.1", "express-rate-limit": "^7.5.0", "pkce-challenge": "^4.1.0", "raw-body": "^3.0.0", "zod": "^3.23.8", "zod-to-json-schema": "^3.24.1" } }, "sha512-oxzMzYCkZHMntzuyerehK3fV6A2Kwh5BD6CGEJSVDU2QNEhfLOptf2X7esQgaHZXHZY0oHmMsOtIDLP71UJXgA=="],
"opencontrol/@tsconfig/bun": ["@tsconfig/bun@1.0.7", "", {}, "sha512-udGrGJBNQdXGVulehc1aWT73wkR9wdaGBtB6yL70RJsqwW/yJhIg6ZbRlPOfIUiFNrnBuYLBi9CSmMKfDC7dvA=="],
@@ -4444,6 +4499,8 @@
"postcss-load-config/lilconfig": ["lilconfig@3.1.3", "", {}, "sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw=="],
"prebuild-install/detect-libc": ["detect-libc@2.1.2", "", {}, "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ=="],
"prompts/kleur": ["kleur@3.0.3", "", {}, "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w=="],
"raw-body/iconv-lite": ["iconv-lite@0.4.24", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3" } }, "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA=="],
@@ -4492,6 +4549,10 @@
"tar/yallist": ["yallist@5.0.0", "", {}, "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw=="],
"tar-fs/chownr": ["chownr@1.1.4", "", {}, "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg=="],
"tar-fs/tar-stream": ["tar-stream@2.2.0", "", { "dependencies": { "bl": "^4.0.3", "end-of-stream": "^1.4.1", "fs-constants": "^1.0.0", "inherits": "^2.0.3", "readable-stream": "^3.1.1" } }, "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ=="],
"terser/commander": ["commander@2.20.3", "", {}, "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ=="],
"token-types/ieee754": ["ieee754@1.2.1", "", {}, "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA=="],
@@ -4946,6 +5007,8 @@
"babel-plugin-module-resolver/glob/path-scurry": ["path-scurry@1.11.1", "", { "dependencies": { "lru-cache": "^10.2.0", "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" } }, "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA=="],
"bl/buffer/ieee754": ["ieee754@1.2.1", "", {}, "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA=="],
"body-parser/debug/ms": ["ms@2.0.0", "", {}, "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="],
"c12/chokidar/readdirp": ["readdirp@5.0.0", "", {}, "sha512-9u/XQ1pvrQtYyMpZe7DXKv2p5CNvyVwzUB6uhLAnQwHMSgKMBR62lc7AHljaeteeHXn11XTAaLLUVZYVZyuRBQ=="],
@@ -5020,6 +5083,8 @@
"lazystream/readable-stream/string_decoder": ["string_decoder@1.1.1", "", { "dependencies": { "safe-buffer": "~5.1.0" } }, "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg=="],
"opencode/drizzle-kit/esbuild": ["esbuild@0.25.12", "", { "optionalDependencies": { "@esbuild/aix-ppc64": "0.25.12", "@esbuild/android-arm": "0.25.12", "@esbuild/android-arm64": "0.25.12", "@esbuild/android-x64": "0.25.12", "@esbuild/darwin-arm64": "0.25.12", "@esbuild/darwin-x64": "0.25.12", "@esbuild/freebsd-arm64": "0.25.12", "@esbuild/freebsd-x64": "0.25.12", "@esbuild/linux-arm": "0.25.12", "@esbuild/linux-arm64": "0.25.12", "@esbuild/linux-ia32": "0.25.12", "@esbuild/linux-loong64": "0.25.12", "@esbuild/linux-mips64el": "0.25.12", "@esbuild/linux-ppc64": "0.25.12", "@esbuild/linux-riscv64": "0.25.12", "@esbuild/linux-s390x": "0.25.12", "@esbuild/linux-x64": "0.25.12", "@esbuild/netbsd-arm64": "0.25.12", "@esbuild/netbsd-x64": "0.25.12", "@esbuild/openbsd-arm64": "0.25.12", "@esbuild/openbsd-x64": "0.25.12", "@esbuild/openharmony-arm64": "0.25.12", "@esbuild/sunos-x64": "0.25.12", "@esbuild/win32-arm64": "0.25.12", "@esbuild/win32-ia32": "0.25.12", "@esbuild/win32-x64": "0.25.12" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-bbPBYYrtZbkt6Os6FiTLCTFxvq4tt3JKall1vRwshA3fdVztsLAatFaZobhkBC8/BrPetoa0oksYoKXoG4ryJg=="],
"opencontrol/@modelcontextprotocol/sdk/express": ["express@5.1.0", "", { "dependencies": { "accepts": "^2.0.0", "body-parser": "^2.2.0", "content-disposition": "^1.0.0", "content-type": "^1.0.5", "cookie": "^0.7.1", "cookie-signature": "^1.2.1", "debug": "^4.4.0", "encodeurl": "^2.0.0", "escape-html": "^1.0.3", "etag": "^1.8.1", "finalhandler": "^2.1.0", "fresh": "^2.0.0", "http-errors": "^2.0.0", "merge-descriptors": "^2.0.0", "mime-types": "^3.0.0", "on-finished": "^2.4.1", "once": "^1.4.0", "parseurl": "^1.3.3", "proxy-addr": "^2.0.7", "qs": "^6.14.0", "range-parser": "^1.2.1", "router": "^2.2.0", "send": "^1.1.0", "serve-static": "^2.2.0", "statuses": "^2.0.1", "type-is": "^2.0.1", "vary": "^1.1.2" } }, "sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA=="],
"opencontrol/@modelcontextprotocol/sdk/pkce-challenge": ["pkce-challenge@4.1.0", "", {}, "sha512-ZBmhE1C9LcPoH9XZSdwiPtbPHZROwAnMy+kIFQVrnMCxY4Cudlz3gBOpzilgc0jOgRaiT3sIWfpMomW2ar2orQ=="],
@@ -5046,6 +5111,8 @@
"string-width-cjs/strip-ansi/ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="],
"tar-fs/tar-stream/readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="],
"tw-to-css/tailwindcss/chokidar": ["chokidar@3.6.0", "", { "dependencies": { "anymatch": "~3.1.2", "braces": "~3.0.2", "glob-parent": "~5.1.2", "is-binary-path": "~2.1.0", "is-glob": "~4.0.1", "normalize-path": "~3.0.0", "readdirp": "~3.6.0" }, "optionalDependencies": { "fsevents": "~2.3.2" } }, "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw=="],
"tw-to-css/tailwindcss/glob-parent": ["glob-parent@6.0.2", "", { "dependencies": { "is-glob": "^4.0.3" } }, "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A=="],
@@ -5192,6 +5259,56 @@
"js-beautify/glob/path-scurry/lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="],
"opencode/drizzle-kit/esbuild/@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.25.12", "", { "os": "aix", "cpu": "ppc64" }, "sha512-Hhmwd6CInZ3dwpuGTF8fJG6yoWmsToE+vYgD4nytZVxcu1ulHpUQRAB1UJ8+N1Am3Mz4+xOByoQoSZf4D+CpkA=="],
"opencode/drizzle-kit/esbuild/@esbuild/android-arm": ["@esbuild/android-arm@0.25.12", "", { "os": "android", "cpu": "arm" }, "sha512-VJ+sKvNA/GE7Ccacc9Cha7bpS8nyzVv0jdVgwNDaR4gDMC/2TTRc33Ip8qrNYUcpkOHUT5OZ0bUcNNVZQ9RLlg=="],
"opencode/drizzle-kit/esbuild/@esbuild/android-arm64": ["@esbuild/android-arm64@0.25.12", "", { "os": "android", "cpu": "arm64" }, "sha512-6AAmLG7zwD1Z159jCKPvAxZd4y/VTO0VkprYy+3N2FtJ8+BQWFXU+OxARIwA46c5tdD9SsKGZ/1ocqBS/gAKHg=="],
"opencode/drizzle-kit/esbuild/@esbuild/android-x64": ["@esbuild/android-x64@0.25.12", "", { "os": "android", "cpu": "x64" }, "sha512-5jbb+2hhDHx5phYR2By8GTWEzn6I9UqR11Kwf22iKbNpYrsmRB18aX/9ivc5cabcUiAT/wM+YIZ6SG9QO6a8kg=="],
"opencode/drizzle-kit/esbuild/@esbuild/darwin-arm64": ["@esbuild/darwin-arm64@0.25.12", "", { "os": "darwin", "cpu": "arm64" }, "sha512-N3zl+lxHCifgIlcMUP5016ESkeQjLj/959RxxNYIthIg+CQHInujFuXeWbWMgnTo4cp5XVHqFPmpyu9J65C1Yg=="],
"opencode/drizzle-kit/esbuild/@esbuild/darwin-x64": ["@esbuild/darwin-x64@0.25.12", "", { "os": "darwin", "cpu": "x64" }, "sha512-HQ9ka4Kx21qHXwtlTUVbKJOAnmG1ipXhdWTmNXiPzPfWKpXqASVcWdnf2bnL73wgjNrFXAa3yYvBSd9pzfEIpA=="],
"opencode/drizzle-kit/esbuild/@esbuild/freebsd-arm64": ["@esbuild/freebsd-arm64@0.25.12", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-gA0Bx759+7Jve03K1S0vkOu5Lg/85dou3EseOGUes8flVOGxbhDDh/iZaoek11Y8mtyKPGF3vP8XhnkDEAmzeg=="],
"opencode/drizzle-kit/esbuild/@esbuild/freebsd-x64": ["@esbuild/freebsd-x64@0.25.12", "", { "os": "freebsd", "cpu": "x64" }, "sha512-TGbO26Yw2xsHzxtbVFGEXBFH0FRAP7gtcPE7P5yP7wGy7cXK2oO7RyOhL5NLiqTlBh47XhmIUXuGciXEqYFfBQ=="],
"opencode/drizzle-kit/esbuild/@esbuild/linux-arm": ["@esbuild/linux-arm@0.25.12", "", { "os": "linux", "cpu": "arm" }, "sha512-lPDGyC1JPDou8kGcywY0YILzWlhhnRjdof3UlcoqYmS9El818LLfJJc3PXXgZHrHCAKs/Z2SeZtDJr5MrkxtOw=="],
"opencode/drizzle-kit/esbuild/@esbuild/linux-arm64": ["@esbuild/linux-arm64@0.25.12", "", { "os": "linux", "cpu": "arm64" }, "sha512-8bwX7a8FghIgrupcxb4aUmYDLp8pX06rGh5HqDT7bB+8Rdells6mHvrFHHW2JAOPZUbnjUpKTLg6ECyzvas2AQ=="],
"opencode/drizzle-kit/esbuild/@esbuild/linux-ia32": ["@esbuild/linux-ia32@0.25.12", "", { "os": "linux", "cpu": "ia32" }, "sha512-0y9KrdVnbMM2/vG8KfU0byhUN+EFCny9+8g202gYqSSVMonbsCfLjUO+rCci7pM0WBEtz+oK/PIwHkzxkyharA=="],
"opencode/drizzle-kit/esbuild/@esbuild/linux-loong64": ["@esbuild/linux-loong64@0.25.12", "", { "os": "linux", "cpu": "none" }, "sha512-h///Lr5a9rib/v1GGqXVGzjL4TMvVTv+s1DPoxQdz7l/AYv6LDSxdIwzxkrPW438oUXiDtwM10o9PmwS/6Z0Ng=="],
"opencode/drizzle-kit/esbuild/@esbuild/linux-mips64el": ["@esbuild/linux-mips64el@0.25.12", "", { "os": "linux", "cpu": "none" }, "sha512-iyRrM1Pzy9GFMDLsXn1iHUm18nhKnNMWscjmp4+hpafcZjrr2WbT//d20xaGljXDBYHqRcl8HnxbX6uaA/eGVw=="],
"opencode/drizzle-kit/esbuild/@esbuild/linux-ppc64": ["@esbuild/linux-ppc64@0.25.12", "", { "os": "linux", "cpu": "ppc64" }, "sha512-9meM/lRXxMi5PSUqEXRCtVjEZBGwB7P/D4yT8UG/mwIdze2aV4Vo6U5gD3+RsoHXKkHCfSxZKzmDssVlRj1QQA=="],
"opencode/drizzle-kit/esbuild/@esbuild/linux-riscv64": ["@esbuild/linux-riscv64@0.25.12", "", { "os": "linux", "cpu": "none" }, "sha512-Zr7KR4hgKUpWAwb1f3o5ygT04MzqVrGEGXGLnj15YQDJErYu/BGg+wmFlIDOdJp0PmB0lLvxFIOXZgFRrdjR0w=="],
"opencode/drizzle-kit/esbuild/@esbuild/linux-s390x": ["@esbuild/linux-s390x@0.25.12", "", { "os": "linux", "cpu": "s390x" }, "sha512-MsKncOcgTNvdtiISc/jZs/Zf8d0cl/t3gYWX8J9ubBnVOwlk65UIEEvgBORTiljloIWnBzLs4qhzPkJcitIzIg=="],
"opencode/drizzle-kit/esbuild/@esbuild/linux-x64": ["@esbuild/linux-x64@0.25.12", "", { "os": "linux", "cpu": "x64" }, "sha512-uqZMTLr/zR/ed4jIGnwSLkaHmPjOjJvnm6TVVitAa08SLS9Z0VM8wIRx7gWbJB5/J54YuIMInDquWyYvQLZkgw=="],
"opencode/drizzle-kit/esbuild/@esbuild/netbsd-arm64": ["@esbuild/netbsd-arm64@0.25.12", "", { "os": "none", "cpu": "arm64" }, "sha512-xXwcTq4GhRM7J9A8Gv5boanHhRa/Q9KLVmcyXHCTaM4wKfIpWkdXiMog/KsnxzJ0A1+nD+zoecuzqPmCRyBGjg=="],
"opencode/drizzle-kit/esbuild/@esbuild/netbsd-x64": ["@esbuild/netbsd-x64@0.25.12", "", { "os": "none", "cpu": "x64" }, "sha512-Ld5pTlzPy3YwGec4OuHh1aCVCRvOXdH8DgRjfDy/oumVovmuSzWfnSJg+VtakB9Cm0gxNO9BzWkj6mtO1FMXkQ=="],
"opencode/drizzle-kit/esbuild/@esbuild/openbsd-arm64": ["@esbuild/openbsd-arm64@0.25.12", "", { "os": "openbsd", "cpu": "arm64" }, "sha512-fF96T6KsBo/pkQI950FARU9apGNTSlZGsv1jZBAlcLL1MLjLNIWPBkj5NlSz8aAzYKg+eNqknrUJ24QBybeR5A=="],
"opencode/drizzle-kit/esbuild/@esbuild/openbsd-x64": ["@esbuild/openbsd-x64@0.25.12", "", { "os": "openbsd", "cpu": "x64" }, "sha512-MZyXUkZHjQxUvzK7rN8DJ3SRmrVrke8ZyRusHlP+kuwqTcfWLyqMOE3sScPPyeIXN/mDJIfGXvcMqCgYKekoQw=="],
"opencode/drizzle-kit/esbuild/@esbuild/sunos-x64": ["@esbuild/sunos-x64@0.25.12", "", { "os": "sunos", "cpu": "x64" }, "sha512-3wGSCDyuTHQUzt0nV7bocDy72r2lI33QL3gkDNGkod22EsYl04sMf0qLb8luNKTOmgF/eDEDP5BFNwoBKH441w=="],
"opencode/drizzle-kit/esbuild/@esbuild/win32-arm64": ["@esbuild/win32-arm64@0.25.12", "", { "os": "win32", "cpu": "arm64" }, "sha512-rMmLrur64A7+DKlnSuwqUdRKyd3UE7oPJZmnljqEptesKM8wx9J8gx5u0+9Pq0fQQW8vqeKebwNXdfOyP+8Bsg=="],
"opencode/drizzle-kit/esbuild/@esbuild/win32-ia32": ["@esbuild/win32-ia32@0.25.12", "", { "os": "win32", "cpu": "ia32" }, "sha512-HkqnmmBoCbCwxUKKNPBixiWDGCpQGVsrQfJoVGYLPT41XWF8lHuE5N6WhVia2n4o5QK5M4tYr21827fNhi4byQ=="],
"opencode/drizzle-kit/esbuild/@esbuild/win32-x64": ["@esbuild/win32-x64@0.25.12", "", { "os": "win32", "cpu": "x64" }, "sha512-alJC0uCZpTFrSL0CCDjcgleBXPnCrEAhTBILpeAp7M/OFgoqtAetfBzX0xM00MUsVVPpVjlPuMbREqnZCXaTnA=="],
"opencontrol/@modelcontextprotocol/sdk/express/accepts": ["accepts@2.0.0", "", { "dependencies": { "mime-types": "^3.0.0", "negotiator": "^1.0.0" } }, "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng=="],
"opencontrol/@modelcontextprotocol/sdk/express/body-parser": ["body-parser@2.2.0", "", { "dependencies": { "bytes": "^3.1.2", "content-type": "^1.0.5", "debug": "^4.4.0", "http-errors": "^2.0.0", "iconv-lite": "^0.6.3", "on-finished": "^2.4.1", "qs": "^6.14.0", "raw-body": "^3.0.0", "type-is": "^2.0.0" } }, "sha512-02qvAaxv8tp7fBa/mw1ga98OGm+eCbqzJOKoRt70sLmfEEi+jyBYVTDGfCL/k06/4EMk/z01gCe7HoCH/f2LTg=="],

View File

@@ -6,7 +6,7 @@ export const domain = (() => {
export const zoneID = "430ba34c138cfb5360826c4909f99be8"
new cloudflare.RegionalHostname("RegionalHostname", {
new cloudflxare.RegionalHostname("RegionalHostname", {
hostname: domain,
regionKey: "us",
zoneId: zoneID,

View File

@@ -1,8 +1,17 @@
{
"nodeModules": {
"x86_64-linux": "sha256-wSkJcUnS0ODOYkbkjRnxnjfWYKQOVXwkDNB8qrikuLA=",
<<<<<<< HEAD
"x86_64-linux": "sha256-H8QVUC5shGI97Ut/wDSYsSuprHpwssJ1MHSHojn+zNI=",
"aarch64-linux": "sha256-4BlpH/oIXRJEjkQydXDv1oi1Yx7li3k1dKHUy2/Gb10=",
"aarch64-darwin": "sha256-awW0ooZo/QfB2xmRdZ9XLNzQ9sP/mbN+rTg215id6nc=",
"aarch64-darwin": "sha256-IOgZ/LP4lvFX3OlalaFuQFYAEFwP+lxz3BRwvu4Hmj4=",
"x86_64-darwin": "sha256-CHrE2z+LqY2WXTQeGWG5LNMF1AY4UGSwViJAy4IwIVw="
=======
"x86_64-linux": "sha256-9QHW6Ue9VO1VKsu6sg4gRtxgifQGNJlfVVXaa0Uc0XQ=",
<<<<<<< HEAD
"aarch64-darwin": "sha256-IOgZ/LP4lvFX3OlalaFuQFYAEFwP+lxz3BRwvu4Hmj4="
>>>>>>> 6e0a58c50 (Update Nix flake.lock and x86_64-linux hash)
=======
"aarch64-darwin": "sha256-G8tTkuUSFQNOmjbu6cIi6qeyNWtGogtUVNi2CSgcgX0="
>>>>>>> 8a0e3e909 (Update aarch64-darwin hash)
}
}

View File

@@ -187,7 +187,7 @@ export function DialogConnectProvider(props: { provider: string }) {
{(i) => (
<div class="w-full flex items-center gap-x-2">
<div class="w-4 h-2 rounded-[1px] bg-input-base shadow-xs-border-base flex items-center justify-center">
<div class="w-2.5 h-0.5 bg-icon-strong-base hidden" data-slot="list-item-extra-icon" />
<div class="w-2.5 h-0.5 ml-0 bg-icon-strong-base hidden" data-slot="list-item-extra-icon" />
</div>
<span>{methodLabel(i)}</span>
</div>
@@ -373,6 +373,9 @@ export function DialogConnectProvider(props: { provider: string }) {
})
onMount(async () => {
if (store.authorization?.url) {
platform.openLink(store.authorization.url)
}
const result = await globalSDK.client.provider.oauth
.callback({
providerID: props.provider,

View File

@@ -32,8 +32,8 @@ export function DialogSelectFile() {
const dialog = useDialog()
const params = useParams()
const sessionKey = createMemo(() => `${params.dir}${params.id ? "/" + params.id : ""}`)
const tabs = createMemo(() => layout.tabs(sessionKey()))
const view = createMemo(() => layout.view(sessionKey()))
const tabs = createMemo(() => layout.tabs(sessionKey))
const view = createMemo(() => layout.view(sessionKey))
const state = { cleanup: undefined as (() => void) | void, committed: false }
const [grouped, setGrouped] = createSignal(false)
const common = [

View File

@@ -56,6 +56,12 @@ export const DialogSelectProvider: Component = () => {
<Show when={i.id === "anthropic"}>
<div class="text-14-regular text-text-weak">{language.t("dialog.provider.anthropic.note")}</div>
</Show>
<Show when={i.id === "openai"}>
<div class="text-14-regular text-text-weak">{language.t("dialog.provider.openai.note")}</div>
</Show>
<Show when={i.id.startsWith("github-copilot")}>
<div class="text-14-regular text-text-weak">{language.t("dialog.provider.copilot.note")}</div>
</Show>
</div>
)}
</List>

View File

@@ -48,6 +48,7 @@ import { useProviders } from "@/hooks/use-providers"
import { useCommand } from "@/context/command"
import { Persist, persisted } from "@/utils/persist"
import { Identifier } from "@/utils/id"
import { Worktree as WorktreeState } from "@/utils/worktree"
import { SessionContextUsage } from "@/components/session-context-usage"
import { usePermission } from "@/context/permission"
import { useLanguage } from "@/context/language"
@@ -61,6 +62,13 @@ import { base64Encode } from "@opencode-ai/util/encode"
const ACCEPTED_IMAGE_TYPES = ["image/png", "image/jpeg", "image/gif", "image/webp"]
const ACCEPTED_FILE_TYPES = [...ACCEPTED_IMAGE_TYPES, "application/pdf"]
type PendingPrompt = {
abort: AbortController
cleanup: VoidFunction
}
const pending = new Map<string, PendingPrompt>()
interface PromptInputProps {
class?: string
ref?: (el: HTMLDivElement) => void
@@ -159,8 +167,8 @@ export const PromptInput: Component<PromptInputProps> = (props) => {
}
const sessionKey = createMemo(() => `${params.dir}${params.id ? "/" + params.id : ""}`)
const tabs = createMemo(() => layout.tabs(sessionKey()))
const view = createMemo(() => layout.view(sessionKey()))
const tabs = createMemo(() => layout.tabs(sessionKey))
const view = createMemo(() => layout.view(sessionKey))
const recent = createMemo(() => {
const all = tabs().all()
@@ -846,12 +854,22 @@ export const PromptInput: Component<PromptInputProps> = (props) => {
setStore("popover", null)
}
const abort = () =>
sdk.client.session
const abort = async () => {
const sessionID = params.id
if (!sessionID) return Promise.resolve()
const queued = pending.get(sessionID)
if (queued) {
queued.abort.abort()
queued.cleanup()
pending.delete(sessionID)
return Promise.resolve()
}
return sdk.client.session
.abort({
sessionID: params.id!,
sessionID,
})
.catch(() => {})
}
const addToHistory = (prompt: Prompt, mode: "normal" | "shell") => {
const text = prompt
@@ -1111,6 +1129,7 @@ export const PromptInput: Component<PromptInputProps> = (props) => {
})
return
}
WorktreeState.pending(createdWorktree.directory)
sessionDirectory = createdWorktree.directory
}
@@ -1369,10 +1388,27 @@ export const PromptInput: Component<PromptInputProps> = (props) => {
model,
}
const setSyncStore = sessionDirectory === projectDirectory ? sync.set : globalSync.child(sessionDirectory)[1]
const addOptimisticMessage = () => {
setSyncStore(
if (sessionDirectory === projectDirectory) {
sync.set(
produce((draft) => {
const messages = draft.message[session.id]
if (!messages) {
draft.message[session.id] = [optimisticMessage]
} else {
const result = Binary.search(messages, messageID, (m) => m.id)
messages.splice(result.index, 0, optimisticMessage)
}
draft.part[messageID] = optimisticParts
.filter((p) => !!p?.id)
.slice()
.sort((a, b) => a.id.localeCompare(b.id))
}),
)
return
}
globalSync.child(sessionDirectory)[1](
produce((draft) => {
const messages = draft.message[session.id]
if (!messages) {
@@ -1390,7 +1426,21 @@ export const PromptInput: Component<PromptInputProps> = (props) => {
}
const removeOptimisticMessage = () => {
setSyncStore(
if (sessionDirectory === projectDirectory) {
sync.set(
produce((draft) => {
const messages = draft.message[session.id]
if (messages) {
const result = Binary.search(messages, messageID, (m) => m.id)
if (result.found) messages.splice(result.index, 1)
}
delete draft.part[messageID]
}),
)
return
}
globalSync.child(sessionDirectory)[1](
produce((draft) => {
const messages = draft.message[session.id]
if (messages) {
@@ -1409,20 +1459,20 @@ export const PromptInput: Component<PromptInputProps> = (props) => {
clearInput()
addOptimisticMessage()
client.session
.prompt({
sessionID: session.id,
agent,
model,
messageID,
parts: requestParts,
variant,
})
.catch((err) => {
showToast({
title: language.t("prompt.toast.promptSendFailed.title"),
description: errorMessage(err),
})
const waitForWorktree = async () => {
const worktree = WorktreeState.get(sessionDirectory)
if (!worktree || worktree.status !== "pending") return true
if (sessionDirectory === projectDirectory) {
sync.set("session_status", session.id, { type: "busy" })
}
const controller = new AbortController()
const cleanup = () => {
if (sessionDirectory === projectDirectory) {
sync.set("session_status", session.id, { type: "idle" })
}
removeOptimisticMessage()
for (const item of commentItems) {
prompt.context.add({
@@ -1435,7 +1485,73 @@ export const PromptInput: Component<PromptInputProps> = (props) => {
})
}
restoreInput()
}
pending.set(session.id, { abort: controller, cleanup })
const abort = new Promise<Awaited<ReturnType<typeof WorktreeState.wait>>>((resolve) => {
if (controller.signal.aborted) {
resolve({ status: "failed", message: "aborted" })
return
}
controller.signal.addEventListener(
"abort",
() => {
resolve({ status: "failed", message: "aborted" })
},
{ once: true },
)
})
const timeoutMs = 5 * 60 * 1000
const timeout = new Promise<Awaited<ReturnType<typeof WorktreeState.wait>>>((resolve) => {
setTimeout(() => {
resolve({ status: "failed", message: "Workspace is still preparing" })
}, timeoutMs)
})
const result = await Promise.race([WorktreeState.wait(sessionDirectory), abort, timeout])
pending.delete(session.id)
if (controller.signal.aborted) return false
if (result.status === "failed") throw new Error(result.message)
return true
}
const send = async () => {
const ok = await waitForWorktree()
if (!ok) return
await client.session.prompt({
sessionID: session.id,
agent,
model,
messageID,
parts: requestParts,
variant,
})
}
void send().catch((err) => {
pending.delete(session.id)
if (sessionDirectory === projectDirectory) {
sync.set("session_status", session.id, { type: "idle" })
}
showToast({
title: language.t("prompt.toast.promptSendFailed.title"),
description: errorMessage(err),
})
removeOptimisticMessage()
for (const item of commentItems) {
prompt.context.add({
type: "file",
path: item.path,
selection: item.selection,
comment: item.comment,
commentID: item.commentID,
preview: item.preview,
})
}
restoreInput()
})
}
return (

View File

@@ -21,8 +21,8 @@ export function SessionContextUsage(props: SessionContextUsageProps) {
const variant = createMemo(() => props.variant ?? "button")
const sessionKey = createMemo(() => `${params.dir}${params.id ? "/" + params.id : ""}`)
const tabs = createMemo(() => layout.tabs(sessionKey()))
const view = createMemo(() => layout.view(sessionKey()))
const tabs = createMemo(() => layout.tabs(sessionKey))
const view = createMemo(() => layout.view(sessionKey))
const messages = createMemo(() => (params.id ? (sync.data.message[params.id] ?? []) : []))
const cost = createMemo(() => {

View File

@@ -50,7 +50,7 @@ export function SessionHeader() {
const showShare = createMemo(() => shareEnabled() && !!currentSession())
const showReview = createMemo(() => !!currentSession())
const sessionKey = createMemo(() => `${params.dir}${params.id ? "/" + params.id : ""}`)
const view = createMemo(() => layout.view(sessionKey()))
const view = createMemo(() => layout.view(sessionKey))
const [state, setState] = createStore({
share: false,

View File

@@ -189,6 +189,8 @@ export const { use: useFile, provider: FileProvider } = createSimpleContext({
const params = useParams()
const language = useLanguage()
const scope = createMemo(() => sdk.directory)
const directory = createMemo(() => sync.data.path.directory)
function normalize(input: string) {
@@ -234,6 +236,12 @@ export const { use: useFile, provider: FileProvider } = createSimpleContext({
file: {},
})
createEffect(() => {
scope()
inflight.clear()
setStore("file", {})
})
const viewCache = new Map<string, ViewCacheEntry>()
const disposeViews = () => {
@@ -284,12 +292,16 @@ export const { use: useFile, provider: FileProvider } = createSimpleContext({
const path = normalize(input)
if (!path) return Promise.resolve()
const directory = scope()
const key = `${directory}\n${path}`
const client = sdk.client
ensure(path)
const current = store.file[path]
if (!options?.force && current?.loaded) return Promise.resolve()
const pending = inflight.get(path)
const pending = inflight.get(key)
if (pending) return pending
setStore(
@@ -301,9 +313,10 @@ export const { use: useFile, provider: FileProvider } = createSimpleContext({
}),
)
const promise = sdk.client.file
const promise = client.file
.read({ path })
.then((x) => {
if (scope() !== directory) return
setStore(
"file",
path,
@@ -315,6 +328,7 @@ export const { use: useFile, provider: FileProvider } = createSimpleContext({
)
})
.catch((e) => {
if (scope() !== directory) return
setStore(
"file",
path,
@@ -330,10 +344,10 @@ export const { use: useFile, provider: FileProvider } = createSimpleContext({
})
})
.finally(() => {
inflight.delete(path)
inflight.delete(key)
})
inflight.set(path, promise)
inflight.set(key, promise)
return promise
}

View File

@@ -1,5 +1,5 @@
import { createStore, produce } from "solid-js/store"
import { batch, createEffect, createMemo, onCleanup, onMount } from "solid-js"
import { batch, createEffect, createMemo, on, onCleanup, onMount, type Accessor } from "solid-js"
import { createSimpleContext } from "@opencode-ai/ui/context"
import { useGlobalSync } from "./global-sync"
import { useGlobalSDK } from "./global-sdk"
@@ -432,10 +432,24 @@ export const { use: useLayout, provider: LayoutProvider } = createSimpleContext(
setStore("mobileSidebar", "opened", (x) => !x)
},
},
view(sessionKey: string) {
touch(sessionKey)
scroll.seed(sessionKey)
const s = createMemo(() => store.sessionView[sessionKey] ?? { scroll: {} })
view(sessionKey: string | Accessor<string>) {
const key = typeof sessionKey === "function" ? sessionKey : () => sessionKey
touch(key())
scroll.seed(key())
createEffect(
on(
key,
(value) => {
touch(value)
scroll.seed(value)
},
{ defer: true },
),
)
const s = createMemo(() => store.sessionView[key()] ?? { scroll: {} })
const terminalOpened = createMemo(() => store.terminal?.opened ?? false)
const reviewPanelOpened = createMemo(() => store.review?.panelOpened ?? true)
@@ -465,10 +479,10 @@ export const { use: useLayout, provider: LayoutProvider } = createSimpleContext(
return {
scroll(tab: string) {
return scroll.scroll(sessionKey, tab)
return scroll.scroll(key(), tab)
},
setScroll(tab: string, pos: SessionScroll) {
scroll.setScroll(sessionKey, tab, pos)
scroll.setScroll(key(), tab, pos)
},
terminal: {
opened: terminalOpened,
@@ -497,9 +511,10 @@ export const { use: useLayout, provider: LayoutProvider } = createSimpleContext(
review: {
open: createMemo(() => s().reviewOpen),
setOpen(open: string[]) {
const current = store.sessionView[sessionKey]
const session = key()
const current = store.sessionView[session]
if (!current) {
setStore("sessionView", sessionKey, {
setStore("sessionView", session, {
scroll: {},
reviewOpen: open,
})
@@ -507,93 +522,111 @@ export const { use: useLayout, provider: LayoutProvider } = createSimpleContext(
}
if (same(current.reviewOpen, open)) return
setStore("sessionView", sessionKey, "reviewOpen", open)
setStore("sessionView", session, "reviewOpen", open)
},
},
}
},
tabs(sessionKey: string) {
touch(sessionKey)
const tabs = createMemo(() => store.sessionTabs[sessionKey] ?? { all: [] })
tabs(sessionKey: string | Accessor<string>) {
const key = typeof sessionKey === "function" ? sessionKey : () => sessionKey
touch(key())
createEffect(
on(
key,
(value) => {
touch(value)
},
{ defer: true },
),
)
const tabs = createMemo(() => store.sessionTabs[key()] ?? { all: [] })
return {
tabs,
active: createMemo(() => tabs().active),
all: createMemo(() => tabs().all),
setActive(tab: string | undefined) {
if (!store.sessionTabs[sessionKey]) {
setStore("sessionTabs", sessionKey, { all: [], active: tab })
const session = key()
if (!store.sessionTabs[session]) {
setStore("sessionTabs", session, { all: [], active: tab })
} else {
setStore("sessionTabs", sessionKey, "active", tab)
setStore("sessionTabs", session, "active", tab)
}
},
setAll(all: string[]) {
if (!store.sessionTabs[sessionKey]) {
setStore("sessionTabs", sessionKey, { all, active: undefined })
const session = key()
if (!store.sessionTabs[session]) {
setStore("sessionTabs", session, { all, active: undefined })
} else {
setStore("sessionTabs", sessionKey, "all", all)
setStore("sessionTabs", session, "all", all)
}
},
async open(tab: string) {
const current = store.sessionTabs[sessionKey] ?? { all: [] }
const session = key()
const current = store.sessionTabs[session] ?? { all: [] }
if (tab === "review") {
if (!store.sessionTabs[sessionKey]) {
setStore("sessionTabs", sessionKey, { all: [], active: tab })
if (!store.sessionTabs[session]) {
setStore("sessionTabs", session, { all: [], active: tab })
return
}
setStore("sessionTabs", sessionKey, "active", tab)
setStore("sessionTabs", session, "active", tab)
return
}
if (tab === "context") {
const all = [tab, ...current.all.filter((x) => x !== tab)]
if (!store.sessionTabs[sessionKey]) {
setStore("sessionTabs", sessionKey, { all, active: tab })
if (!store.sessionTabs[session]) {
setStore("sessionTabs", session, { all, active: tab })
return
}
setStore("sessionTabs", sessionKey, "all", all)
setStore("sessionTabs", sessionKey, "active", tab)
setStore("sessionTabs", session, "all", all)
setStore("sessionTabs", session, "active", tab)
return
}
if (!current.all.includes(tab)) {
if (!store.sessionTabs[sessionKey]) {
setStore("sessionTabs", sessionKey, { all: [tab], active: tab })
if (!store.sessionTabs[session]) {
setStore("sessionTabs", session, { all: [tab], active: tab })
return
}
setStore("sessionTabs", sessionKey, "all", [...current.all, tab])
setStore("sessionTabs", sessionKey, "active", tab)
setStore("sessionTabs", session, "all", [...current.all, tab])
setStore("sessionTabs", session, "active", tab)
return
}
if (!store.sessionTabs[sessionKey]) {
setStore("sessionTabs", sessionKey, { all: current.all, active: tab })
if (!store.sessionTabs[session]) {
setStore("sessionTabs", session, { all: current.all, active: tab })
return
}
setStore("sessionTabs", sessionKey, "active", tab)
setStore("sessionTabs", session, "active", tab)
},
close(tab: string) {
const current = store.sessionTabs[sessionKey]
const session = key()
const current = store.sessionTabs[session]
if (!current) return
const all = current.all.filter((x) => x !== tab)
batch(() => {
setStore("sessionTabs", sessionKey, "all", all)
setStore("sessionTabs", session, "all", all)
if (current.active !== tab) return
const index = current.all.findIndex((f) => f === tab)
const next = all[index - 1] ?? all[0]
setStore("sessionTabs", sessionKey, "active", next)
setStore("sessionTabs", session, "active", next)
})
},
move(tab: string, to: number) {
const current = store.sessionTabs[sessionKey]
const session = key()
const current = store.sessionTabs[session]
if (!current) return
const index = current.all.findIndex((f) => f === tab)
if (index === -1) return
setStore(
"sessionTabs",
sessionKey,
session,
"all",
produce((opened) => {
opened.splice(to, 0, opened.splice(index, 1)[0])

View File

@@ -1,5 +1,5 @@
import { createStore, produce, reconcile } from "solid-js/store"
import { batch, createMemo, onCleanup } from "solid-js"
import { batch, createEffect, createMemo, onCleanup } from "solid-js"
import { filter, firstBy, flat, groupBy, mapValues, pipe, uniqueBy, values } from "remeda"
import type { FileContent, FileNode, Model, Provider, File as FileStatus } from "@opencode-ai/sdk/v2"
import { createSimpleContext } from "@opencode-ai/ui/context"
@@ -338,6 +338,12 @@ export const { use: useLocal, provider: LocalProvider } = createSimpleContext({
node: {}, // Object.fromEntries(sync.data.node.map((x) => [x.path, x])),
})
const scope = createMemo(() => sdk.directory)
createEffect(() => {
scope()
setStore("node", {})
})
// const changeset = createMemo(() => new Set(sync.data.changes.map((f) => f.path)))
// const changes = createMemo(() => Array.from(changeset()).sort((a, b) => a.localeCompare(b)))
@@ -394,10 +400,13 @@ export const { use: useLocal, provider: LocalProvider } = createSimpleContext({
const relative = (path: string) => path.replace(sync.data.path.directory + "/", "")
const load = async (path: string) => {
const directory = scope()
const client = sdk.client
const relativePath = relative(path)
await sdk.client.file
await client.file
.read({ path: relativePath })
.then((x) => {
if (scope() !== directory) return
if (!store.node[relativePath]) return
setStore(
"node",
@@ -409,6 +418,7 @@ export const { use: useLocal, provider: LocalProvider } = createSimpleContext({
)
})
.catch((e) => {
if (scope() !== directory) return
showToast({
variant: "error",
title: language.t("toast.file.loadFailed.title"),
@@ -453,9 +463,12 @@ export const { use: useLocal, provider: LocalProvider } = createSimpleContext({
}
const list = async (path: string) => {
return sdk.client.file
const directory = scope()
const client = sdk.client
return client.file
.list({ path: path + "/" })
.then((x) => {
if (scope() !== directory) return
setStore(
"node",
produce((draft) => {

View File

@@ -1,7 +1,7 @@
import { createOpencodeClient, type Event } from "@opencode-ai/sdk/v2/client"
import { createSimpleContext } from "@opencode-ai/ui/context"
import { createGlobalEmitter } from "@solid-primitives/event-bus"
import { onCleanup } from "solid-js"
import { createEffect, createMemo, onCleanup } from "solid-js"
import { useGlobalSDK } from "./global-sdk"
import { usePlatform } from "./platform"
@@ -10,22 +10,39 @@ export const { use: useSDK, provider: SDKProvider } = createSimpleContext({
init: (props: { directory: string }) => {
const platform = usePlatform()
const globalSDK = useGlobalSDK()
const sdk = createOpencodeClient({
baseUrl: globalSDK.url,
fetch: platform.fetch,
directory: props.directory,
throwOnError: true,
})
const directory = createMemo(() => props.directory)
const client = createMemo(() =>
createOpencodeClient({
baseUrl: globalSDK.url,
fetch: platform.fetch,
directory: directory(),
throwOnError: true,
}),
)
const emitter = createGlobalEmitter<{
[key in Event["type"]]: Extract<Event, { type: key }>
}>()
const unsub = globalSDK.event.on(props.directory, (event) => {
emitter.emit(event.type, event)
createEffect(() => {
const unsub = globalSDK.event.on(directory(), (event) => {
emitter.emit(event.type, event)
})
onCleanup(unsub)
})
onCleanup(unsub)
return { directory: props.directory, client: sdk, event: emitter, url: globalSDK.url }
return {
get directory() {
return directory()
},
get client() {
return client()
},
event: emitter,
get url() {
return globalSDK.url
},
}
},
})

View File

@@ -7,13 +7,20 @@ import { useGlobalSync } from "./global-sync"
import { useSDK } from "./sdk"
import type { Message, Part } from "@opencode-ai/sdk/v2/client"
const keyFor = (directory: string, id: string) => `${directory}\n${id}`
export const { use: useSync, provider: SyncProvider } = createSimpleContext({
name: "Sync",
init: () => {
const globalSync = useGlobalSync()
const sdk = useSDK()
const [store, setStore] = globalSync.child(sdk.directory)
const absolute = (path: string) => (store.path.directory + "/" + path).replace("//", "/")
type Child = ReturnType<(typeof globalSync)["child"]>
type Store = Child[0]
type Setter = Child[1]
const current = createMemo(() => globalSync.child(sdk.directory))
const absolute = (path: string) => (current()[0].path.directory + "/" + path).replace("//", "/")
const chunk = 400
const inflight = new Map<string, Promise<void>>()
const inflightDiff = new Map<string, Promise<void>>()
@@ -25,6 +32,7 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({
})
const getSession = (sessionID: string) => {
const store = current()[0]
const match = Binary.search(store.session, sessionID, (s) => s.id)
if (match.found) return store.session[match.index]
return undefined
@@ -35,22 +43,30 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({
return Math.ceil(count / chunk) * chunk
}
const hydrateMessages = (sessionID: string) => {
if (meta.limit[sessionID] !== undefined) return
const hydrateMessages = (directory: string, store: Store, sessionID: string) => {
const key = keyFor(directory, sessionID)
if (meta.limit[key] !== undefined) return
const messages = store.message[sessionID]
if (!messages) return
const limit = limitFor(messages.length)
setMeta("limit", sessionID, limit)
setMeta("complete", sessionID, messages.length < limit)
setMeta("limit", key, limit)
setMeta("complete", key, messages.length < limit)
}
const loadMessages = async (sessionID: string, limit: number) => {
if (meta.loading[sessionID]) return
const loadMessages = async (input: {
directory: string
client: typeof sdk.client
setStore: Setter
sessionID: string
limit: number
}) => {
const key = keyFor(input.directory, input.sessionID)
if (meta.loading[key]) return
setMeta("loading", sessionID, true)
await retry(() => sdk.client.session.messages({ sessionID, limit }))
setMeta("loading", key, true)
await retry(() => input.client.session.messages({ sessionID: input.sessionID, limit: input.limit }))
.then((messages) => {
const items = (messages.data ?? []).filter((x) => !!x?.info?.id)
const next = items
@@ -60,10 +76,10 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({
.sort((a, b) => a.id.localeCompare(b.id))
batch(() => {
setStore("message", sessionID, reconcile(next, { key: "id" }))
input.setStore("message", input.sessionID, reconcile(next, { key: "id" }))
for (const message of items) {
setStore(
input.setStore(
"part",
message.info.id,
reconcile(
@@ -76,25 +92,30 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({
)
}
setMeta("limit", sessionID, limit)
setMeta("complete", sessionID, next.length < limit)
setMeta("limit", key, input.limit)
setMeta("complete", key, next.length < input.limit)
})
})
.finally(() => {
setMeta("loading", sessionID, false)
setMeta("loading", key, false)
})
}
return {
data: store,
set: setStore,
get data() {
return current()[0]
},
get set(): Setter {
return current()[1]
},
get status() {
return store.status
return current()[0].status
},
get ready() {
return store.status !== "loading"
return current()[0].status !== "loading"
},
get project() {
const store = current()[0]
const match = Binary.search(globalSync.data.project, store.project, (p) => p.id)
if (match.found) return globalSync.data.project[match.index]
return undefined
@@ -116,7 +137,7 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({
agent: input.agent,
model: input.model,
}
setStore(
current()[1](
produce((draft) => {
const messages = draft.message[input.sessionID]
if (!messages) {
@@ -133,20 +154,28 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({
)
},
async sync(sessionID: string) {
const hasSession = getSession(sessionID) !== undefined
hydrateMessages(sessionID)
const directory = sdk.directory
const client = sdk.client
const [store, setStore] = globalSync.child(directory)
const hasSession = (() => {
const match = Binary.search(store.session, sessionID, (s) => s.id)
return match.found
})()
hydrateMessages(directory, store, sessionID)
const hasMessages = store.message[sessionID] !== undefined
if (hasSession && hasMessages) return
const pending = inflight.get(sessionID)
const key = keyFor(directory, sessionID)
const pending = inflight.get(key)
if (pending) return pending
const limit = meta.limit[sessionID] ?? chunk
const limit = meta.limit[key] ?? chunk
const sessionReq = hasSession
? Promise.resolve()
: retry(() => sdk.client.session.get({ sessionID })).then((session) => {
: retry(() => client.session.get({ sessionID })).then((session) => {
const data = session.data
if (!data) return
setStore(
@@ -162,72 +191,104 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({
)
})
const messagesReq = hasMessages ? Promise.resolve() : loadMessages(sessionID, limit)
const messagesReq = hasMessages
? Promise.resolve()
: loadMessages({
directory,
client,
setStore,
sessionID,
limit,
})
const promise = Promise.all([sessionReq, messagesReq])
.then(() => {})
.finally(() => {
inflight.delete(sessionID)
inflight.delete(key)
})
inflight.set(sessionID, promise)
inflight.set(key, promise)
return promise
},
async diff(sessionID: string) {
const directory = sdk.directory
const client = sdk.client
const [store, setStore] = globalSync.child(directory)
if (store.session_diff[sessionID] !== undefined) return
const pending = inflightDiff.get(sessionID)
const key = keyFor(directory, sessionID)
const pending = inflightDiff.get(key)
if (pending) return pending
const promise = retry(() => sdk.client.session.diff({ sessionID }))
const promise = retry(() => client.session.diff({ sessionID }))
.then((diff) => {
setStore("session_diff", sessionID, reconcile(diff.data ?? [], { key: "file" }))
})
.finally(() => {
inflightDiff.delete(sessionID)
inflightDiff.delete(key)
})
inflightDiff.set(sessionID, promise)
inflightDiff.set(key, promise)
return promise
},
async todo(sessionID: string) {
const directory = sdk.directory
const client = sdk.client
const [store, setStore] = globalSync.child(directory)
if (store.todo[sessionID] !== undefined) return
const pending = inflightTodo.get(sessionID)
const key = keyFor(directory, sessionID)
const pending = inflightTodo.get(key)
if (pending) return pending
const promise = retry(() => sdk.client.session.todo({ sessionID }))
const promise = retry(() => client.session.todo({ sessionID }))
.then((todo) => {
setStore("todo", sessionID, reconcile(todo.data ?? [], { key: "id" }))
})
.finally(() => {
inflightTodo.delete(sessionID)
inflightTodo.delete(key)
})
inflightTodo.set(sessionID, promise)
inflightTodo.set(key, promise)
return promise
},
history: {
more(sessionID: string) {
const store = current()[0]
const key = keyFor(sdk.directory, sessionID)
if (store.message[sessionID] === undefined) return false
if (meta.limit[sessionID] === undefined) return false
if (meta.complete[sessionID]) return false
if (meta.limit[key] === undefined) return false
if (meta.complete[key]) return false
return true
},
loading(sessionID: string) {
return meta.loading[sessionID] ?? false
const key = keyFor(sdk.directory, sessionID)
return meta.loading[key] ?? false
},
async loadMore(sessionID: string, count = chunk) {
if (meta.loading[sessionID]) return
if (meta.complete[sessionID]) return
const directory = sdk.directory
const client = sdk.client
const [, setStore] = globalSync.child(directory)
const key = keyFor(directory, sessionID)
if (meta.loading[key]) return
if (meta.complete[key]) return
const current = meta.limit[sessionID] ?? chunk
await loadMessages(sessionID, current + count)
const currentLimit = meta.limit[key] ?? chunk
await loadMessages({
directory,
client,
setStore,
sessionID,
limit: currentLimit + count,
})
},
},
fetch: async (count = 10) => {
const directory = sdk.directory
const client = sdk.client
const [store, setStore] = globalSync.child(directory)
setStore("limit", (x) => x + count)
await sdk.client.session.list().then((x) => {
await client.session.list().then((x) => {
const sessions = (x.data ?? [])
.filter((s) => !!s?.id)
.slice()
@@ -236,9 +297,12 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({
setStore("session", reconcile(sessions, { key: "id" }))
})
},
more: createMemo(() => store.session.length >= store.limit),
more: createMemo(() => current()[0].session.length >= current()[0].limit),
archive: async (sessionID: string) => {
await sdk.client.session.update({ sessionID, time: { archived: Date.now() } })
const directory = sdk.directory
const client = sdk.client
const [, setStore] = globalSync.child(directory)
await client.session.update({ sessionID, time: { archived: Date.now() } })
setStore(
produce((draft) => {
const match = Binary.search(draft.session, sessionID, (s) => s.id)
@@ -249,7 +313,7 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({
},
absolute,
get directory() {
return store.path.directory
return current()[0].path.directory
},
}
},

View File

@@ -88,6 +88,8 @@ export const dict = {
"dialog.provider.group.other": "آخر",
"dialog.provider.tag.recommended": "موصى به",
"dialog.provider.anthropic.note": "اتصل باستخدام Claude Pro/Max أو مفتاح API",
"dialog.provider.openai.note": "اتصل باستخدام ChatGPT Pro/Plus أو مفتاح API",
"dialog.provider.copilot.note": "اتصل باستخدام Copilot أو مفتاح API",
"dialog.model.select.title": "تحديد نموذج",
"dialog.model.search.placeholder": "البحث عن نماذج",

View File

@@ -88,6 +88,8 @@ export const dict = {
"dialog.provider.group.other": "Outro",
"dialog.provider.tag.recommended": "Recomendado",
"dialog.provider.anthropic.note": "Conectar com Claude Pro/Max ou chave de API",
"dialog.provider.openai.note": "Conectar com ChatGPT Pro/Plus ou chave de API",
"dialog.provider.copilot.note": "Conectar com Copilot ou chave de API",
"dialog.model.select.title": "Selecionar modelo",
"dialog.model.search.placeholder": "Buscar modelos",

View File

@@ -86,6 +86,8 @@ export const dict = {
"dialog.provider.group.other": "Andre",
"dialog.provider.tag.recommended": "Anbefalet",
"dialog.provider.anthropic.note": "Forbind med Claude Pro/Max eller API-nøgle",
"dialog.provider.openai.note": "Forbind med ChatGPT Pro/Plus eller API-nøgle",
"dialog.provider.copilot.note": "Forbind med Copilot eller API-nøgle",
"dialog.model.select.title": "Vælg model",
"dialog.model.search.placeholder": "Søg modeller",

View File

@@ -90,6 +90,8 @@ export const dict = {
"dialog.provider.group.other": "Andere",
"dialog.provider.tag.recommended": "Empfohlen",
"dialog.provider.anthropic.note": "Mit Claude Pro/Max oder API-Schlüssel verbinden",
"dialog.provider.openai.note": "Mit ChatGPT Pro/Plus oder API-Schlüssel verbinden",
"dialog.provider.copilot.note": "Mit Copilot oder API-Schlüssel verbinden",
"dialog.model.select.title": "Modell auswählen",
"dialog.model.search.placeholder": "Modelle durchsuchen",

View File

@@ -88,6 +88,8 @@ export const dict = {
"dialog.provider.group.other": "Other",
"dialog.provider.tag.recommended": "Recommended",
"dialog.provider.anthropic.note": "Connect with Claude Pro/Max or API key",
"dialog.provider.openai.note": "Connect with ChatGPT Pro/Plus or API key",
"dialog.provider.copilot.note": "Connect with Copilot or API key",
"dialog.model.select.title": "Select model",
"dialog.model.search.placeholder": "Search models",

View File

@@ -86,6 +86,8 @@ export const dict = {
"dialog.provider.group.other": "Otro",
"dialog.provider.tag.recommended": "Recomendado",
"dialog.provider.anthropic.note": "Conectar con Claude Pro/Max o clave API",
"dialog.provider.openai.note": "Conectar con ChatGPT Pro/Plus o clave API",
"dialog.provider.copilot.note": "Conectar con Copilot o clave API",
"dialog.model.select.title": "Seleccionar modelo",
"dialog.model.search.placeholder": "Buscar modelos",

View File

@@ -86,6 +86,8 @@ export const dict = {
"dialog.provider.group.other": "Autre",
"dialog.provider.tag.recommended": "Recommandé",
"dialog.provider.anthropic.note": "Connectez-vous avec Claude Pro/Max ou une clé API",
"dialog.provider.openai.note": "Connectez-vous avec ChatGPT Pro/Plus ou une clé API",
"dialog.provider.copilot.note": "Connectez-vous avec Copilot ou une clé API",
"dialog.model.select.title": "Sélectionner un modèle",
"dialog.model.search.placeholder": "Rechercher des modèles",

View File

@@ -86,6 +86,8 @@ export const dict = {
"dialog.provider.group.other": "その他",
"dialog.provider.tag.recommended": "推奨",
"dialog.provider.anthropic.note": "Claude Pro/MaxまたはAPIキーで接続",
"dialog.provider.openai.note": "ChatGPT Pro/PlusまたはAPIキーで接続",
"dialog.provider.copilot.note": "CopilotまたはAPIキーで接続",
"dialog.model.select.title": "モデルを選択",
"dialog.model.search.placeholder": "モデルを検索",

View File

@@ -90,6 +90,8 @@ export const dict = {
"dialog.provider.group.other": "기타",
"dialog.provider.tag.recommended": "추천",
"dialog.provider.anthropic.note": "Claude Pro/Max 또는 API 키로 연결",
"dialog.provider.openai.note": "ChatGPT Pro/Plus 또는 API 키로 연결",
"dialog.provider.copilot.note": "Copilot 또는 API 키로 연결",
"dialog.model.select.title": "모델 선택",
"dialog.model.search.placeholder": "모델 검색",

View File

@@ -91,6 +91,8 @@ export const dict = {
"dialog.provider.group.other": "Andre",
"dialog.provider.tag.recommended": "Anbefalt",
"dialog.provider.anthropic.note": "Koble til med Claude Pro/Max eller API-nøkkel",
"dialog.provider.openai.note": "Koble til med ChatGPT Pro/Plus eller API-nøkkel",
"dialog.provider.copilot.note": "Koble til med Copilot eller API-nøkkel",
"dialog.model.select.title": "Velg modell",
"dialog.model.search.placeholder": "Søk etter modeller",

View File

@@ -88,6 +88,8 @@ export const dict = {
"dialog.provider.group.other": "Inne",
"dialog.provider.tag.recommended": "Zalecane",
"dialog.provider.anthropic.note": "Połącz z Claude Pro/Max lub kluczem API",
"dialog.provider.openai.note": "Połącz z ChatGPT Pro/Plus lub kluczem API",
"dialog.provider.copilot.note": "Połącz z Copilot lub kluczem API",
"dialog.model.select.title": "Wybierz model",
"dialog.model.search.placeholder": "Szukaj modeli",

View File

@@ -88,6 +88,8 @@ export const dict = {
"dialog.provider.group.other": "Другие",
"dialog.provider.tag.recommended": "Рекомендуемые",
"dialog.provider.anthropic.note": "Подключитесь с помощью Claude Pro/Max или API ключа",
"dialog.provider.openai.note": "Подключитесь с помощью ChatGPT Pro/Plus или API ключа",
"dialog.provider.copilot.note": "Подключитесь с помощью Copilot или API ключа",
"dialog.model.select.title": "Выбрать модель",
"dialog.model.search.placeholder": "Поиск моделей",

View File

@@ -90,6 +90,8 @@ export const dict = {
"dialog.provider.group.other": "其他",
"dialog.provider.tag.recommended": "推荐",
"dialog.provider.anthropic.note": "使用 Claude Pro/Max 或 API 密钥连接",
"dialog.provider.openai.note": "使用 ChatGPT Pro/Plus 或 API 密钥连接",
"dialog.provider.copilot.note": "使用 Copilot 或 API 密钥连接",
"dialog.model.select.title": "选择模型",
"dialog.model.search.placeholder": "搜索模型",

View File

@@ -90,6 +90,8 @@ export const dict = {
"dialog.provider.group.other": "其他",
"dialog.provider.tag.recommended": "推薦",
"dialog.provider.anthropic.note": "使用 Claude Pro/Max 或 API 金鑰連線",
"dialog.provider.openai.note": "使用 ChatGPT Pro/Plus 或 API 金鑰連線",
"dialog.provider.copilot.note": "使用 Copilot 或 API 金鑰連線",
"dialog.model.select.title": "選擇模型",
"dialog.model.search.placeholder": "搜尋模型",

View File

@@ -16,7 +16,7 @@ export default function Layout(props: ParentProps) {
return base64Decode(params.dir!)
})
return (
<Show when={params.dir} keyed>
<Show when={params.dir}>
<SDKProvider directory={directory()}>
<SyncProvider>
{iife(() => {

View File

@@ -56,6 +56,7 @@ import { usePermission } from "@/context/permission"
import { Binary } from "@opencode-ai/util/binary"
import { retry } from "@opencode-ai/util/retry"
import { playSound, soundSrc } from "@/utils/sound"
import { Worktree as WorktreeState } from "@/utils/worktree"
import { useDialog } from "@opencode-ai/ui/context/dialog"
import { useTheme, type ColorScheme } from "@opencode-ai/ui/theme"
@@ -332,6 +333,18 @@ export default function Layout(props: ParentProps) {
const cooldownMs = 5000
const unsub = globalSDK.event.listen((e) => {
if (e.details?.type === "worktree.ready") {
setBusy(e.name, false)
WorktreeState.ready(e.name)
return
}
if (e.details?.type === "worktree.failed") {
setBusy(e.name, false)
WorktreeState.failed(e.name, e.details.properties?.message ?? language.t("common.requestFailed"))
return
}
if (e.details?.type !== "permission.asked" && e.details?.type !== "question.asked") return
const title =
e.details.type === "permission.asked"
@@ -551,6 +564,7 @@ export default function Layout(props: ParentProps) {
const project = currentProject()
if (!project) return
const local = project.worktree
const dirs = [project.worktree, ...(project.sandboxes ?? [])]
const existing = store.workspaceOrder[project.worktree]
if (!existing) {
@@ -558,9 +572,9 @@ export default function Layout(props: ParentProps) {
return
}
const keep = existing.filter((d) => dirs.includes(d))
const missing = dirs.filter((d) => !existing.includes(d))
const merged = [...keep, ...missing]
const keep = existing.filter((d) => d !== local && dirs.includes(d))
const missing = dirs.filter((d) => d !== local && !existing.includes(d))
const merged = [local, ...missing, ...keep]
if (merged.length !== existing.length) {
setStore("workspaceOrder", project.worktree, merged)
@@ -1246,9 +1260,9 @@ export default function Layout(props: ParentProps) {
})
})
const handleDelete = async () => {
await deleteWorkspace(props.directory)
const handleDelete = () => {
dialog.close()
void deleteWorkspace(props.directory)
}
const description = () => {
@@ -1434,17 +1448,22 @@ export default function Layout(props: ParentProps) {
function workspaceIds(project: LocalProject | undefined) {
if (!project) return []
const dirs = [project.worktree, ...(project.sandboxes ?? [])]
const local = project.worktree
const dirs = [local, ...(project.sandboxes ?? [])]
const active = currentProject()
const directory = active?.worktree === project.worktree && params.dir ? base64Decode(params.dir) : undefined
const next = directory && directory !== project.worktree && !dirs.includes(directory) ? [...dirs, directory] : dirs
const extra = directory && directory !== local && !dirs.includes(directory) ? directory : undefined
const pending = extra ? WorktreeState.get(extra)?.status === "pending" : false
const existing = store.workspaceOrder[project.worktree]
if (!existing) return next
if (!existing) return extra ? [...dirs, extra] : dirs
const keep = existing.filter((d) => next.includes(d))
const missing = next.filter((d) => !existing.includes(d))
return [...keep, ...missing]
const keep = existing.filter((d) => d !== local && dirs.includes(d))
const missing = dirs.filter((d) => d !== local && !existing.includes(d))
const merged = [local, ...(pending && extra ? [extra] : []), ...missing, ...keep]
if (!extra) return merged
if (pending) return merged
return [...merged, extra]
}
function handleWorkspaceDragStart(event: unknown) {
@@ -2136,11 +2155,11 @@ export default function Layout(props: ParentProps) {
variant="ghost"
class="flex w-full text-left justify-start text-text-base px-2 hover:bg-transparent active:bg-transparent"
onClick={() => {
layout.sidebar.open()
if (selected()) {
setOpen(false)
return
}
layout.sidebar.open()
navigateToProject(props.project.worktree)
}}
>
@@ -2237,8 +2256,19 @@ export default function Layout(props: ParentProps) {
if (!created?.directory) return
setBusy(created.directory, true)
WorktreeState.pending(created.directory)
setStore("workspaceExpanded", created.directory, true)
setStore("workspaceOrder", current.worktree, (prev) => {
const existing = prev ?? []
const local = current.worktree
const next = existing.filter((d) => d !== local && d !== created.directory)
return [local, created.directory, ...next]
})
globalSync.child(created.directory)
navigate(`/${base64Encode(created.directory)}/session`)
layout.mobileSidebar.hide()
}
command.register(() => [

View File

@@ -199,8 +199,8 @@ export default function Page() {
const permission = usePermission()
const [pendingMessage, setPendingMessage] = createSignal<string | undefined>(undefined)
const sessionKey = createMemo(() => `${params.dir}${params.id ? "/" + params.id : ""}`)
const tabs = createMemo(() => layout.tabs(sessionKey()))
const view = createMemo(() => layout.view(sessionKey()))
const tabs = createMemo(() => layout.tabs(sessionKey))
const view = createMemo(() => layout.view(sessionKey))
if (import.meta.env.DEV) {
createEffect(

View File

@@ -0,0 +1,58 @@
const normalize = (directory: string) => directory.replace(/[\\/]+$/, "")
type State =
| {
status: "pending"
}
| {
status: "ready"
}
| {
status: "failed"
message: string
}
const state = new Map<string, State>()
const waiters = new Map<string, Array<(state: State) => void>>()
export const Worktree = {
get(directory: string) {
return state.get(normalize(directory))
},
pending(directory: string) {
const key = normalize(directory)
const current = state.get(key)
if (current && current.status !== "pending") return
state.set(key, { status: "pending" })
},
ready(directory: string) {
const key = normalize(directory)
state.set(key, { status: "ready" })
const list = waiters.get(key)
if (!list) return
waiters.delete(key)
for (const fn of list) fn({ status: "ready" })
},
failed(directory: string, message: string) {
const key = normalize(directory)
state.set(key, { status: "failed", message })
const list = waiters.get(key)
if (!list) return
waiters.delete(key)
for (const fn of list) fn({ status: "failed", message })
},
wait(directory: string) {
const key = normalize(directory)
const current = state.get(key)
if (current && current.status !== "pending") return Promise.resolve(current)
return new Promise<State>((resolve) => {
const list = waiters.get(key)
if (!list) {
waiters.set(key, [resolve])
return
}
list.push(resolve)
})
},
}

View File

@@ -59,4 +59,84 @@
font-size: var(--font-size-sm);
color: var(--color-text-muted);
}
[data-slot="setting-row"] {
display: flex;
align-items: center;
justify-content: space-between;
gap: var(--space-3);
margin-top: var(--space-4);
p {
font-size: var(--font-size-sm);
line-height: 1.5;
color: var(--color-text-secondary);
margin: 0;
}
}
[data-slot="toggle-label"] {
position: relative;
display: inline-block;
width: 2.5rem;
height: 1.5rem;
cursor: pointer;
flex-shrink: 0;
input {
opacity: 0;
width: 0;
height: 0;
}
span {
position: absolute;
inset: 0;
background-color: #ccc;
border: 1px solid #bbb;
border-radius: 1.5rem;
transition: all 0.3s ease;
cursor: pointer;
&::before {
content: "";
position: absolute;
top: 50%;
left: 0.125rem;
width: 1.25rem;
height: 1.25rem;
background-color: white;
border: 1px solid #ddd;
border-radius: 50%;
transform: translateY(-50%);
transition: all 0.3s ease;
}
}
input:checked + span {
background-color: #21ad0e;
border-color: #148605;
&::before {
transform: translateX(1rem) translateY(-50%);
}
}
&:hover span {
box-shadow: 0 0 0 2px rgba(34, 197, 94, 0.2);
}
input:checked:hover + span {
box-shadow: 0 0 0 2px rgba(34, 197, 94, 0.3);
}
&:has(input:disabled) {
cursor: not-allowed;
}
input:disabled + span {
opacity: 0.5;
cursor: not-allowed;
}
}
}

View File

@@ -2,7 +2,7 @@ import { action, useParams, useAction, useSubmission, json, query, createAsync }
import { createStore } from "solid-js/store"
import { Show } from "solid-js"
import { Billing } from "@opencode-ai/console-core/billing.js"
import { Database, eq, and, isNull } from "@opencode-ai/console-core/drizzle/index.js"
import { Database, eq, and, isNull, sql } from "@opencode-ai/console-core/drizzle/index.js"
import { BillingTable, SubscriptionTable } from "@opencode-ai/console-core/schema/billing.sql.js"
import { Actor } from "@opencode-ai/console-core/actor.js"
import { Black } from "@opencode-ai/console-core/black.js"
@@ -32,6 +32,7 @@ const querySubscription = query(async (workspaceID: string) => {
return {
plan: row.subscription.plan,
useBalance: row.subscription.useBalance ?? false,
rollingUsage: Black.analyzeRollingUsage({
plan: row.subscription.plan,
usage: row.rollingUsage ?? 0,
@@ -107,6 +108,30 @@ const createSessionUrl = action(async (workspaceID: string, returnUrl: string) =
)
}, "sessionUrl")
const setUseBalance = action(async (form: FormData) => {
"use server"
const workspaceID = form.get("workspaceID")?.toString()
if (!workspaceID) return { error: "Workspace ID is required" }
const useBalance = form.get("useBalance")?.toString() === "true"
return json(
await withActor(async () => {
await Database.use((tx) =>
tx
.update(BillingTable)
.set({
subscription: useBalance
? sql`JSON_SET(subscription, '$.useBalance', true)`
: sql`JSON_REMOVE(subscription, '$.useBalance')`,
})
.where(eq(BillingTable.workspaceID, workspaceID)),
)
return { error: undefined }
}, workspaceID).catch((e) => ({ error: e.message as string })),
{ revalidate: [queryBillingInfo.key, querySubscription.key] },
)
}, "setUseBalance")
export function BlackSection() {
const params = useParams()
const billing = createAsync(() => queryBillingInfo(params.id!))
@@ -117,6 +142,7 @@ export function BlackSection() {
const cancelSubmission = useSubmission(cancelWaitlist)
const enrollAction = useAction(enroll)
const enrollSubmission = useSubmission(enroll)
const useBalanceSubmission = useSubmission(setUseBalance)
const [store, setStore] = createStore({
sessionRedirecting: false,
cancelled: false,
@@ -185,6 +211,20 @@ export function BlackSection() {
<span data-slot="reset-time">Resets in {formatResetTime(sub().weeklyUsage.resetInSec)}</span>
</div>
</div>
<form action={setUseBalance} method="post" data-slot="setting-row">
<p>Use your available balance after reaching the usage limits</p>
<input type="hidden" name="workspaceID" value={params.id} />
<input type="hidden" name="useBalance" value={sub().useBalance ? "false" : "true"} />
<label data-slot="toggle-label">
<input
type="checkbox"
checked={sub().useBalance}
disabled={useBalanceSubmission.pending}
onChange={(e) => e.currentTarget.form?.requestSubmit()}
/>
<span></span>
</label>
</form>
</section>
)}
</Show>

View File

@@ -43,8 +43,9 @@ export default function () {
</span>
<Show when={userInfo()?.isAdmin}>
<span data-slot="billing-info">
<Switch>
<Match when={!billingInfo()?.customerID}>
<Show
when={billingInfo()?.customerID}
fallback={
<button
data-color="primary"
data-size="sm"
@@ -53,13 +54,12 @@ export default function () {
>
{checkoutSubmission.pending || store.checkoutRedirecting ? "Loading..." : "Enable billing"}
</button>
</Match>
<Match when={!billingInfo()?.subscriptionID}>
<span data-slot="balance">
Current balance <b>${balance()}</b>
</span>
</Match>
</Switch>
}
>
<span data-slot="balance">
Current balance <b>${balance()}</b>
</span>
</Show>
</span>
</Show>
</p>

View File

@@ -110,6 +110,7 @@ export const queryBillingInfo = query(async (workspaceID: string) => {
timeMonthlyUsageUpdated: billing.timeMonthlyUsageUpdated,
reloadError: billing.reloadError,
timeReloadError: billing.timeReloadError,
subscription: billing.subscription,
subscriptionID: billing.subscriptionID,
subscriptionPlan: billing.subscriptionPlan,
timeSubscriptionBooked: billing.timeSubscriptionBooked,

View File

@@ -84,6 +84,7 @@ export async function handler(
const stickyTracker = createStickyTracker(modelInfo.stickyProvider, sessionId)
const stickyProvider = await stickyTracker?.get()
const authInfo = await authenticate(modelInfo)
const billingSource = validateBilling(authInfo, modelInfo)
const retriableRequest = async (retry: RetryOptions = { excludeProviders: [], retryCount: 0 }) => {
const providerInfo = selectProvider(
@@ -96,7 +97,6 @@ export async function handler(
retry,
stickyProvider,
)
validateBilling(authInfo, modelInfo)
validateModelSettings(authInfo)
updateProviderKey(authInfo, providerInfo)
logger.metric({ provider: providerInfo.id })
@@ -183,7 +183,7 @@ export async function handler(
const tokensInfo = providerInfo.normalizeUsage(json.usage)
await trialLimiter?.track(tokensInfo)
await rateLimiter?.track()
const costInfo = await trackUsage(authInfo, modelInfo, providerInfo, tokensInfo)
const costInfo = await trackUsage(authInfo, modelInfo, providerInfo, billingSource, tokensInfo)
await reload(authInfo, costInfo)
return new Response(body, {
status: resStatus,
@@ -219,7 +219,7 @@ export async function handler(
if (usage) {
const tokensInfo = providerInfo.normalizeUsage(usage)
await trialLimiter?.track(tokensInfo)
const costInfo = await trackUsage(authInfo, modelInfo, providerInfo, tokensInfo)
const costInfo = await trackUsage(authInfo, modelInfo, providerInfo, billingSource, tokensInfo)
await reload(authInfo, costInfo)
}
c.close()
@@ -484,54 +484,58 @@ export async function handler(
}
function validateBilling(authInfo: AuthInfo, modelInfo: ModelInfo) {
if (!authInfo) return
if (authInfo.provider?.credentials) return
if (authInfo.isFree) return
if (modelInfo.allowAnonymous) return
if (!authInfo) return "anonymous"
if (authInfo.provider?.credentials) return "free"
if (authInfo.isFree) return "free"
if (modelInfo.allowAnonymous) return "free"
// Validate subscription billing
if (authInfo.billing.subscription && authInfo.subscription) {
const sub = authInfo.subscription
const plan = authInfo.billing.subscription.plan
try {
const sub = authInfo.subscription
const plan = authInfo.billing.subscription.plan
const formatRetryTime = (seconds: number) => {
const days = Math.floor(seconds / 86400)
if (days >= 1) return `${days} day${days > 1 ? "s" : ""}`
const hours = Math.floor(seconds / 3600)
const minutes = Math.ceil((seconds % 3600) / 60)
if (hours >= 1) return `${hours}hr ${minutes}min`
return `${minutes}min`
const formatRetryTime = (seconds: number) => {
const days = Math.floor(seconds / 86400)
if (days >= 1) return `${days} day${days > 1 ? "s" : ""}`
const hours = Math.floor(seconds / 3600)
const minutes = Math.ceil((seconds % 3600) / 60)
if (hours >= 1) return `${hours}hr ${minutes}min`
return `${minutes}min`
}
// Check weekly limit
if (sub.fixedUsage && sub.timeFixedUpdated) {
const result = Black.analyzeWeeklyUsage({
plan,
usage: sub.fixedUsage,
timeUpdated: sub.timeFixedUpdated,
})
if (result.status === "rate-limited")
throw new SubscriptionError(
`Subscription quota exceeded. Retry in ${formatRetryTime(result.resetInSec)}.`,
result.resetInSec,
)
}
// Check rolling limit
if (sub.rollingUsage && sub.timeRollingUpdated) {
const result = Black.analyzeRollingUsage({
plan,
usage: sub.rollingUsage,
timeUpdated: sub.timeRollingUpdated,
})
if (result.status === "rate-limited")
throw new SubscriptionError(
`Subscription quota exceeded. Retry in ${formatRetryTime(result.resetInSec)}.`,
result.resetInSec,
)
}
return "subscription"
} catch (e) {
if (!authInfo.billing.subscription.useBalance) throw e
}
// Check weekly limit
if (sub.fixedUsage && sub.timeFixedUpdated) {
const result = Black.analyzeWeeklyUsage({
plan,
usage: sub.fixedUsage,
timeUpdated: sub.timeFixedUpdated,
})
if (result.status === "rate-limited")
throw new SubscriptionError(
`Subscription quota exceeded. Retry in ${formatRetryTime(result.resetInSec)}.`,
result.resetInSec,
)
}
// Check rolling limit
if (sub.rollingUsage && sub.timeRollingUpdated) {
const result = Black.analyzeRollingUsage({
plan,
usage: sub.rollingUsage,
timeUpdated: sub.timeRollingUpdated,
})
if (result.status === "rate-limited")
throw new SubscriptionError(
`Subscription quota exceeded. Retry in ${formatRetryTime(result.resetInSec)}.`,
result.resetInSec,
)
}
return
}
// Validate pay as you go billing
@@ -571,6 +575,8 @@ export async function handler(
throw new UserLimitError(
`You have reached your monthly spending limit of $${authInfo.user.monthlyLimit}. Manage your limits here: https://opencode.ai/workspace/${authInfo.workspaceID}/members`,
)
return "balance"
}
function validateModelSettings(authInfo: AuthInfo) {
@@ -587,6 +593,7 @@ export async function handler(
authInfo: AuthInfo,
modelInfo: ModelInfo,
providerInfo: ProviderInfo,
billingSource: ReturnType<typeof validateBilling>,
usageInfo: UsageInfo,
) {
const { inputTokens, outputTokens, reasoningTokens, cacheReadTokens, cacheWrite5mTokens, cacheWrite1hTokens } =
@@ -643,7 +650,8 @@ export async function handler(
"cost.total": Math.round(totalCostInCent),
})
if (!authInfo) return
if (billingSource === "anonymous") return
authInfo = authInfo!
const cost = authInfo.provider?.credentials ? 0 : centsToMicroCents(totalCostInCent)
await Database.use((db) =>
@@ -661,13 +669,13 @@ export async function handler(
cacheWrite1hTokens,
cost,
keyID: authInfo.apiKeyId,
enrichment: authInfo.subscription ? { plan: "sub" } : undefined,
enrichment: billingSource === "subscription" ? { plan: "sub" } : undefined,
}),
db
.update(KeyTable)
.set({ timeUsed: sql`now()` })
.where(and(eq(KeyTable.workspaceID, authInfo.workspaceID), eq(KeyTable.id, authInfo.apiKeyId))),
...(authInfo.subscription
...(billingSource === "subscription"
? (() => {
const plan = authInfo.billing.subscription!.plan
const black = BlackData.getLimits({ plan })

View File

@@ -24,9 +24,10 @@ export const BillingTable = mysqlTable(
timeReloadLockedTill: utc("time_reload_locked_till"),
subscription: json("subscription").$type<{
status: "subscribed"
coupon?: string
seats: number
plan: "20" | "100" | "200"
useBalance?: boolean
coupon?: string
}>(),
subscriptionID: varchar("subscription_id", { length: 28 }),
subscriptionPlan: mysqlEnum("subscription_plan", SubscriptionPlan),

View File

@@ -0,0 +1,10 @@
import { defineConfig } from "drizzle-kit"
export default defineConfig({
dialect: "sqlite",
schema: "./src/**/*.sql.ts",
out: "./migration",
dbCredentials: {
url: "/home/thdxr/.local/share/opencode/opencode.db",
},
})

View File

@@ -0,0 +1,91 @@
CREATE TABLE `project` (
`id` text PRIMARY KEY NOT NULL,
`worktree` text NOT NULL,
`vcs` text,
`name` text,
`icon_url` text,
`icon_color` text,
`time_created` integer NOT NULL,
`time_updated` integer NOT NULL,
`time_initialized` integer,
`sandboxes` text NOT NULL
);
--> statement-breakpoint
CREATE TABLE `message` (
`id` text PRIMARY KEY NOT NULL,
`session_id` text NOT NULL,
`role` text NOT NULL,
`data` text NOT NULL,
FOREIGN KEY (`session_id`) REFERENCES `session`(`id`) ON UPDATE no action ON DELETE cascade
);
--> statement-breakpoint
CREATE INDEX `message_session_idx` ON `message` (`session_id`);--> statement-breakpoint
CREATE TABLE `part` (
`id` text PRIMARY KEY NOT NULL,
`message_id` text NOT NULL,
`type` text NOT NULL,
`data` text NOT NULL,
FOREIGN KEY (`message_id`) REFERENCES `message`(`id`) ON UPDATE no action ON DELETE cascade
);
--> statement-breakpoint
CREATE INDEX `part_message_idx` ON `part` (`message_id`);--> statement-breakpoint
CREATE TABLE `permission` (
`project_id` text PRIMARY KEY NOT NULL,
`data` text NOT NULL,
FOREIGN KEY (`project_id`) REFERENCES `project`(`id`) ON UPDATE no action ON DELETE cascade
);
--> statement-breakpoint
CREATE TABLE `session_diff` (
`session_id` text NOT NULL,
`file` text NOT NULL,
`before` text NOT NULL,
`after` text NOT NULL,
`additions` integer NOT NULL,
`deletions` integer NOT NULL,
FOREIGN KEY (`session_id`) REFERENCES `session`(`id`) ON UPDATE no action ON DELETE cascade
);
--> statement-breakpoint
CREATE INDEX `session_diff_session_idx` ON `session_diff` (`session_id`);--> statement-breakpoint
CREATE TABLE `session` (
`id` text PRIMARY KEY NOT NULL,
`project_id` text NOT NULL,
`parent_id` text,
`slug` text NOT NULL,
`directory` text NOT NULL,
`title` text NOT NULL,
`version` text NOT NULL,
`share_url` text,
`summary_additions` integer,
`summary_deletions` integer,
`summary_files` integer,
`summary_diffs` text,
`revert_message_id` text,
`revert_part_id` text,
`revert_snapshot` text,
`revert_diff` text,
`permission` text,
`time_created` integer NOT NULL,
`time_updated` integer NOT NULL,
`time_compacting` integer,
`time_archived` integer,
FOREIGN KEY (`project_id`) REFERENCES `project`(`id`) ON UPDATE no action ON DELETE cascade
);
--> statement-breakpoint
CREATE INDEX `session_project_idx` ON `session` (`project_id`);--> statement-breakpoint
CREATE INDEX `session_parent_idx` ON `session` (`parent_id`);--> statement-breakpoint
CREATE TABLE `todo` (
`session_id` text PRIMARY KEY NOT NULL,
`data` text NOT NULL,
FOREIGN KEY (`session_id`) REFERENCES `session`(`id`) ON UPDATE no action ON DELETE cascade
);
--> statement-breakpoint
CREATE TABLE `session_share` (
`session_id` text PRIMARY KEY NOT NULL,
`data` text NOT NULL,
FOREIGN KEY (`session_id`) REFERENCES `session`(`id`) ON UPDATE no action ON DELETE cascade
);
--> statement-breakpoint
CREATE TABLE `share` (
`session_id` text PRIMARY KEY NOT NULL,
`data` text NOT NULL
);

View File

@@ -0,0 +1,616 @@
{
"version": "6",
"dialect": "sqlite",
"id": "f7bf061b-aa6c-4b68-a29f-c210c54f109d",
"prevId": "00000000-0000-0000-0000-000000000000",
"tables": {
"project": {
"name": "project",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"worktree": {
"name": "worktree",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"vcs": {
"name": "vcs",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"name": {
"name": "name",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"icon_url": {
"name": "icon_url",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"icon_color": {
"name": "icon_color",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"time_created": {
"name": "time_created",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"time_updated": {
"name": "time_updated",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"time_initialized": {
"name": "time_initialized",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"sandboxes": {
"name": "sandboxes",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"message": {
"name": "message",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"session_id": {
"name": "session_id",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"role": {
"name": "role",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"data": {
"name": "data",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {
"message_session_idx": {
"name": "message_session_idx",
"columns": [
"session_id"
],
"isUnique": false
}
},
"foreignKeys": {
"message_session_id_session_id_fk": {
"name": "message_session_id_session_id_fk",
"tableFrom": "message",
"tableTo": "session",
"columnsFrom": [
"session_id"
],
"columnsTo": [
"id"
],
"onDelete": "cascade",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"part": {
"name": "part",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"message_id": {
"name": "message_id",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"type": {
"name": "type",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"data": {
"name": "data",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {
"part_message_idx": {
"name": "part_message_idx",
"columns": [
"message_id"
],
"isUnique": false
}
},
"foreignKeys": {
"part_message_id_message_id_fk": {
"name": "part_message_id_message_id_fk",
"tableFrom": "part",
"tableTo": "message",
"columnsFrom": [
"message_id"
],
"columnsTo": [
"id"
],
"onDelete": "cascade",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"permission": {
"name": "permission",
"columns": {
"project_id": {
"name": "project_id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"data": {
"name": "data",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {
"permission_project_id_project_id_fk": {
"name": "permission_project_id_project_id_fk",
"tableFrom": "permission",
"tableTo": "project",
"columnsFrom": [
"project_id"
],
"columnsTo": [
"id"
],
"onDelete": "cascade",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"session_diff": {
"name": "session_diff",
"columns": {
"session_id": {
"name": "session_id",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"file": {
"name": "file",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"before": {
"name": "before",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"after": {
"name": "after",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"additions": {
"name": "additions",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"deletions": {
"name": "deletions",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {
"session_diff_session_idx": {
"name": "session_diff_session_idx",
"columns": [
"session_id"
],
"isUnique": false
}
},
"foreignKeys": {
"session_diff_session_id_session_id_fk": {
"name": "session_diff_session_id_session_id_fk",
"tableFrom": "session_diff",
"tableTo": "session",
"columnsFrom": [
"session_id"
],
"columnsTo": [
"id"
],
"onDelete": "cascade",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"session": {
"name": "session",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"project_id": {
"name": "project_id",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"parent_id": {
"name": "parent_id",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"slug": {
"name": "slug",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"directory": {
"name": "directory",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"title": {
"name": "title",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"version": {
"name": "version",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"share_url": {
"name": "share_url",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"summary_additions": {
"name": "summary_additions",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"summary_deletions": {
"name": "summary_deletions",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"summary_files": {
"name": "summary_files",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"summary_diffs": {
"name": "summary_diffs",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"revert_message_id": {
"name": "revert_message_id",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"revert_part_id": {
"name": "revert_part_id",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"revert_snapshot": {
"name": "revert_snapshot",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"revert_diff": {
"name": "revert_diff",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"permission": {
"name": "permission",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"time_created": {
"name": "time_created",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"time_updated": {
"name": "time_updated",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"time_compacting": {
"name": "time_compacting",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"time_archived": {
"name": "time_archived",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
}
},
"indexes": {
"session_project_idx": {
"name": "session_project_idx",
"columns": [
"project_id"
],
"isUnique": false
},
"session_parent_idx": {
"name": "session_parent_idx",
"columns": [
"parent_id"
],
"isUnique": false
}
},
"foreignKeys": {
"session_project_id_project_id_fk": {
"name": "session_project_id_project_id_fk",
"tableFrom": "session",
"tableTo": "project",
"columnsFrom": [
"project_id"
],
"columnsTo": [
"id"
],
"onDelete": "cascade",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"todo": {
"name": "todo",
"columns": {
"session_id": {
"name": "session_id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"data": {
"name": "data",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {
"todo_session_id_session_id_fk": {
"name": "todo_session_id_session_id_fk",
"tableFrom": "todo",
"tableTo": "session",
"columnsFrom": [
"session_id"
],
"columnsTo": [
"id"
],
"onDelete": "cascade",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"session_share": {
"name": "session_share",
"columns": {
"session_id": {
"name": "session_id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"data": {
"name": "data",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {
"session_share_session_id_session_id_fk": {
"name": "session_share_session_id_session_id_fk",
"tableFrom": "session_share",
"tableTo": "session",
"columnsFrom": [
"session_id"
],
"columnsTo": [
"id"
],
"onDelete": "cascade",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"share": {
"name": "share",
"columns": {
"session_id": {
"name": "session_id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"data": {
"name": "data",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
}
},
"views": {},
"enums": {},
"_meta": {
"schemas": {},
"tables": {},
"columns": {}
},
"internal": {
"indexes": {}
}
}

View File

@@ -0,0 +1,13 @@
{
"version": "7",
"dialect": "sqlite",
"entries": [
{
"idx": 0,
"version": "6",
"when": 1768625754197,
"tag": "0000_normal_wind_dancer",
"breakpoints": true
}
]
}

View File

@@ -41,6 +41,8 @@
"@types/turndown": "5.0.5",
"@types/yargs": "17.0.33",
"@typescript/native-preview": "catalog:",
"better-sqlite3": "12.6.0",
"drizzle-kit": "0.31.8",
"typescript": "catalog:",
"vscode-languageserver-types": "3.17.5",
"why-is-node-running": "3.2.2",
@@ -97,6 +99,7 @@
"clipboardy": "4.0.0",
"decimal.js": "10.5.0",
"diff": "catalog:",
"drizzle-orm": "0.45.1",
"fuzzysort": "3.1.0",
"gray-matter": "4.0.3",
"hono": "catalog:",

View File

@@ -99,6 +99,12 @@ const targets = singleFlag
})
: allTargets
// Check migrations are up to date and generate embedded migrations file
console.log("Checking migrations...")
await $`bun run script/check-migrations.ts`
console.log("Generating migrations embed...")
await $`bun run script/generate-migrations.ts`
await $`rm -rf dist`
const binaries: Record<string, string> = {}

View File

@@ -0,0 +1,16 @@
#!/usr/bin/env bun
import { $ } from "bun"
// drizzle-kit check compares schema to migrations, exits non-zero if drift
const result = await $`bun drizzle-kit check`.quiet().nothrow()
if (result.exitCode !== 0) {
console.error("Schema has changes not captured in migrations!")
console.error("Run: bun drizzle-kit generate")
console.error("")
console.error(result.stderr.toString())
process.exit(1)
}
console.log("Migrations are up to date")

View File

@@ -0,0 +1,49 @@
#!/usr/bin/env bun
import { Glob } from "bun"
import path from "path"
import fs from "fs"
const migrationsDir = "./migration"
const outFile = "./src/storage/migrations.generated.ts"
if (!fs.existsSync(migrationsDir)) {
console.log("No migrations directory found, creating empty migrations file")
await Bun.write(
outFile,
`// Auto-generated - do not edit
export const migrations: { name: string; sql: string }[] = []
`,
)
process.exit(0)
}
const files = Array.from(new Glob("*.sql").scanSync({ cwd: migrationsDir })).sort()
if (files.length === 0) {
console.log("No migrations found, creating empty migrations file")
await Bun.write(
outFile,
`// Auto-generated - do not edit
export const migrations: { name: string; sql: string }[] = []
`,
)
process.exit(0)
}
const imports = files.map((f, i) => `import m${i} from "../../migration/${f}" with { type: "text" }`).join("\n")
const entries = files.map((f, i) => ` { name: "${path.basename(f, ".sql")}", sql: m${i} },`).join("\n")
await Bun.write(
outFile,
`// Auto-generated - do not edit
${imports}
export const migrations = [
${entries}
]
`,
)
console.log(`Generated migrations file with ${files.length} migrations`)

0
packages/opencode/script/postinstall.mjs Normal file → Executable file
View File

0
packages/opencode/script/publish-registries.ts Normal file → Executable file
View File

View File

@@ -0,0 +1,147 @@
import type { Argv } from "yargs"
import { cmd } from "./cmd"
import { bootstrap } from "../bootstrap"
import { UI } from "../ui"
import { Database } from "../../storage/db"
import { ProjectTable } from "../../project/project.sql"
import { Project } from "../../project/project"
import {
SessionTable,
MessageTable,
PartTable,
SessionDiffTable,
TodoTable,
PermissionTable,
} from "../../session/session.sql"
import { Session } from "../../session"
import { SessionShareTable, ShareTable } from "../../share/share.sql"
import path from "path"
import fs from "fs/promises"
export const DatabaseCommand = cmd({
command: "database",
describe: "database management commands",
builder: (yargs) => yargs.command(ExportCommand).demandCommand(),
async handler() {},
})
const ExportCommand = cmd({
command: "export",
describe: "export database to JSON files",
builder: (yargs: Argv) => {
return yargs.option("output", {
alias: ["o"],
describe: "output directory",
type: "string",
demandOption: true,
})
},
handler: async (args) => {
await bootstrap(process.cwd(), async () => {
const outDir = path.resolve(args.output)
await fs.mkdir(outDir, { recursive: true })
const stats = {
projects: 0,
sessions: 0,
messages: 0,
parts: 0,
diffs: 0,
todos: 0,
permissions: 0,
sessionShares: 0,
shares: 0,
}
// Export projects
const projectDir = path.join(outDir, "project")
await fs.mkdir(projectDir, { recursive: true })
for (const row of Database.use((db) => db.select().from(ProjectTable).all())) {
const project = Project.fromRow(row)
await Bun.write(path.join(projectDir, `${row.id}.json`), JSON.stringify(project, null, 2))
stats.projects++
}
// Export sessions (organized by projectID)
const sessionDir = path.join(outDir, "session")
for (const row of Database.use((db) => db.select().from(SessionTable).all())) {
const dir = path.join(sessionDir, row.projectID)
await fs.mkdir(dir, { recursive: true })
await Bun.write(path.join(dir, `${row.id}.json`), JSON.stringify(Session.fromRow(row), null, 2))
stats.sessions++
}
// Export messages (organized by sessionID)
const messageDir = path.join(outDir, "message")
for (const row of Database.use((db) => db.select().from(MessageTable).all())) {
const dir = path.join(messageDir, row.sessionID)
await fs.mkdir(dir, { recursive: true })
await Bun.write(path.join(dir, `${row.id}.json`), JSON.stringify(row.data, null, 2))
stats.messages++
}
// Export parts (organized by messageID)
const partDir = path.join(outDir, "part")
for (const row of Database.use((db) => db.select().from(PartTable).all())) {
const dir = path.join(partDir, row.messageID)
await fs.mkdir(dir, { recursive: true })
await Bun.write(path.join(dir, `${row.id}.json`), JSON.stringify(row.data, null, 2))
stats.parts++
}
// Export session diffs
const diffDir = path.join(outDir, "session_diff")
await fs.mkdir(diffDir, { recursive: true })
for (const row of Database.use((db) => db.select().from(SessionDiffTable).all())) {
await Bun.write(path.join(diffDir, `${row.sessionID}.json`), JSON.stringify(row, null, 2))
stats.diffs++
}
// Export todos
const todoDir = path.join(outDir, "todo")
await fs.mkdir(todoDir, { recursive: true })
for (const row of Database.use((db) => db.select().from(TodoTable).all())) {
await Bun.write(path.join(todoDir, `${row.sessionID}.json`), JSON.stringify(row.data, null, 2))
stats.todos++
}
// Export permissions
const permDir = path.join(outDir, "permission")
await fs.mkdir(permDir, { recursive: true })
for (const row of Database.use((db) => db.select().from(PermissionTable).all())) {
await Bun.write(path.join(permDir, `${row.projectID}.json`), JSON.stringify(row.data, null, 2))
stats.permissions++
}
// Export session shares
const sessionShareDir = path.join(outDir, "session_share")
await fs.mkdir(sessionShareDir, { recursive: true })
for (const row of Database.use((db) => db.select().from(SessionShareTable).all())) {
await Bun.write(path.join(sessionShareDir, `${row.sessionID}.json`), JSON.stringify(row.data, null, 2))
stats.sessionShares++
}
// Export shares
const shareDir = path.join(outDir, "share")
await fs.mkdir(shareDir, { recursive: true })
for (const row of Database.use((db) => db.select().from(ShareTable).all())) {
await Bun.write(path.join(shareDir, `${row.sessionID}.json`), JSON.stringify(row.data, null, 2))
stats.shares++
}
// Create migration marker so this can be imported back
await Bun.write(path.join(outDir, "migration"), Date.now().toString())
UI.println(`Exported to ${outDir}:`)
UI.println(` ${stats.projects} projects`)
UI.println(` ${stats.sessions} sessions`)
UI.println(` ${stats.messages} messages`)
UI.println(` ${stats.parts} parts`)
UI.println(` ${stats.diffs} session diffs`)
UI.println(` ${stats.todos} todos`)
UI.println(` ${stats.permissions} permissions`)
UI.println(` ${stats.sessionShares} session shares`)
UI.println(` ${stats.shares} shares`)
})
},
})

View File

@@ -2,7 +2,8 @@ import type { Argv } from "yargs"
import { Session } from "../../session"
import { cmd } from "./cmd"
import { bootstrap } from "../bootstrap"
import { Storage } from "../../storage/storage"
import { Database } from "../../storage/db"
import { SessionTable, MessageTable, PartTable } from "../../session/session.sql"
import { Instance } from "../../project/instance"
import { EOL } from "os"
@@ -81,13 +82,63 @@ export const ImportCommand = cmd({
return
}
await Storage.write(["session", Instance.project.id, exportData.info.id], exportData.info)
const info = exportData.info
const row = {
id: info.id,
projectID: Instance.project.id,
parentID: info.parentID,
slug: info.slug,
directory: info.directory,
title: info.title,
version: info.version,
share_url: info.share?.url,
summary_additions: info.summary?.additions,
summary_deletions: info.summary?.deletions,
summary_files: info.summary?.files,
summary_diffs: info.summary?.diffs,
revert_messageID: info.revert?.messageID,
revert_partID: info.revert?.partID,
revert_snapshot: info.revert?.snapshot,
revert_diff: info.revert?.diff,
permission: info.permission,
time_created: info.time.created,
time_updated: info.time.updated,
time_compacting: info.time.compacting,
time_archived: info.time.archived,
}
Database.use((db) =>
db.insert(SessionTable).values(row).onConflictDoUpdate({ target: SessionTable.id, set: row }).run(),
)
for (const msg of exportData.messages) {
await Storage.write(["message", exportData.info.id, msg.info.id], msg.info)
const { id: msgId, sessionID: msgSessionID, role: msgRole, ...msgData } = msg.info
Database.use((db) =>
db
.insert(MessageTable)
.values({
id: msgId,
sessionID: exportData.info.id,
role: msgRole,
data: msgData,
})
.onConflictDoUpdate({ target: MessageTable.id, set: { role: msgRole, data: msgData } })
.run(),
)
for (const part of msg.parts) {
await Storage.write(["part", msg.info.id, part.id], part)
const { id: partId, messageID: _, sessionID: __, type: partType, ...partData } = part
Database.use((db) =>
db
.insert(PartTable)
.values({
id: partId,
messageID: msg.info.id,
type: partType,
data: partData,
})
.onConflictDoUpdate({ target: PartTable.id, set: { type: partType, data: partData } })
.run(),
)
}
}

View File

@@ -2,7 +2,9 @@ import type { Argv } from "yargs"
import { cmd } from "./cmd"
import { Session } from "../../session"
import { bootstrap } from "../bootstrap"
import { Storage } from "../../storage/storage"
import { Database } from "../../storage/db"
import { ProjectTable } from "../../project/project.sql"
import { SessionTable } from "../../session/session.sql"
import { Project } from "../../project/project"
import { Instance } from "../../project/instance"
@@ -83,25 +85,8 @@ async function getCurrentProject(): Promise<Project.Info> {
}
async function getAllSessions(): Promise<Session.Info[]> {
const sessions: Session.Info[] = []
const projectKeys = await Storage.list(["project"])
const projects = await Promise.all(projectKeys.map((key) => Storage.read<Project.Info>(key)))
for (const project of projects) {
if (!project) continue
const sessionKeys = await Storage.list(["session", project.id])
const projectSessions = await Promise.all(sessionKeys.map((key) => Storage.read<Session.Info>(key)))
for (const session of projectSessions) {
if (session) {
sessions.push(session)
}
}
}
return sessions
const sessionRows = Database.use((db) => db.select().from(SessionTable).all())
return sessionRows.map((row) => Session.fromRow(row))
}
export async function aggregateSessionStats(days?: number, projectFilter?: string): Promise<SessionStats> {

View File

@@ -144,7 +144,7 @@ export function DialogSelect<T>(props: DialogSelectProps<T>) {
let next = store.selected + direction
if (next < 0) next = flat().length - 1
if (next >= flat().length) next = 0
moveTo(next)
moveTo(next, true)
}
function moveTo(next: number, center = false) {

View File

@@ -26,6 +26,7 @@ import { EOL } from "os"
import { WebCommand } from "./cli/cmd/web"
import { PrCommand } from "./cli/cmd/pr"
import { SessionCommand } from "./cli/cmd/session"
import { DatabaseCommand } from "./cli/cmd/database"
process.on("unhandledRejection", (e) => {
Log.Default.error("rejection", {
@@ -97,6 +98,7 @@ const cli = yargs(hideBin(process.argv))
.command(GithubCommand)
.command(PrCommand)
.command(SessionCommand)
.command(DatabaseCommand)
.fail((msg, err) => {
if (
msg?.startsWith("Unknown argument") ||

View File

@@ -3,7 +3,9 @@ import { BusEvent } from "@/bus/bus-event"
import { Config } from "@/config/config"
import { Identifier } from "@/id/id"
import { Instance } from "@/project/instance"
import { Storage } from "@/storage/storage"
import { Database } from "@/storage/db"
import { PermissionTable } from "@/session/session.sql"
import { eq } from "drizzle-orm"
import { fn } from "@/util/fn"
import { Log } from "@/util/log"
import { Wildcard } from "@/util/wildcard"
@@ -107,7 +109,10 @@ export namespace PermissionNext {
const state = Instance.state(async () => {
const projectID = Instance.project.id
const stored = await Storage.read<Ruleset>(["permission", projectID]).catch(() => [] as Ruleset)
const row = Database.use((db) =>
db.select().from(PermissionTable).where(eq(PermissionTable.projectID, projectID)).get(),
)
const stored = row?.data ?? ([] as Ruleset)
const pending: Record<
string,

View File

@@ -0,0 +1,14 @@
import { integer, sqliteTable, text } from "drizzle-orm/sqlite-core"
export const ProjectTable = sqliteTable("project", {
id: text("id").primaryKey(),
worktree: text("worktree").notNull(),
vcs: text("vcs"),
name: text("name"),
icon_url: text("icon_url"),
icon_color: text("icon_color"),
time_created: integer("time_created").notNull(),
time_updated: integer("time_updated").notNull(),
time_initialized: integer("time_initialized"),
sandboxes: text("sandboxes", { mode: "json" }).notNull().$type<string[]>(),
})

View File

@@ -3,10 +3,13 @@ import fs from "fs/promises"
import { Filesystem } from "../util/filesystem"
import path from "path"
import { $ } from "bun"
import { Storage } from "../storage/storage"
import { Database } from "../storage/db"
import { ProjectTable } from "./project.sql"
import { SessionTable } from "../session/session.sql"
import { eq } from "drizzle-orm"
import { Log } from "../util/log"
import { Flag } from "@/flag/flag"
import { Session } from "../session"
import { work } from "../util/queue"
import { fn } from "@opencode-ai/util/fn"
import { BusEvent } from "@/bus/bus-event"
@@ -50,6 +53,28 @@ export namespace Project {
Updated: BusEvent.define("project.updated", Info),
}
type Row = typeof ProjectTable.$inferSelect
export function fromRow(row: Row): Info {
const icon =
row.icon_url || row.icon_color
? { url: row.icon_url ?? undefined, color: row.icon_color ?? undefined }
: undefined
return {
id: row.id,
worktree: row.worktree,
vcs: row.vcs as Info["vcs"],
name: row.name ?? undefined,
icon,
time: {
created: row.time_created,
updated: row.time_updated,
initialized: row.time_initialized ?? undefined,
},
sandboxes: row.sandboxes,
}
}
export async function fromDirectory(directory: string) {
log.info("fromDirectory", { directory })
@@ -175,9 +200,10 @@ export namespace Project {
}
})
let existing = await Storage.read<Info>(["project", id]).catch(() => undefined)
if (!existing) {
existing = {
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get())
const existing = await iife(async () => {
if (row) return fromRow(row)
const fresh: Info = {
id,
worktree,
vcs: vcs as Info["vcs"],
@@ -190,10 +216,8 @@ export namespace Project {
if (id !== "global") {
await migrateFromGlobal(id, worktree)
}
}
// migrate old projects before sandboxes
if (!existing.sandboxes) existing.sandboxes = []
return fresh
})
if (Flag.OPENCODE_EXPERIMENTAL_ICON_DISCOVERY) discover(existing)
@@ -208,7 +232,31 @@ export namespace Project {
}
if (sandbox !== result.worktree && !result.sandboxes.includes(sandbox)) result.sandboxes.push(sandbox)
result.sandboxes = result.sandboxes.filter((x) => existsSync(x))
await Storage.write<Info>(["project", id], result)
const insert = {
id: result.id,
worktree: result.worktree,
vcs: result.vcs,
name: result.name,
icon_url: result.icon?.url,
icon_color: result.icon?.color,
time_created: result.time.created,
time_updated: result.time.updated,
time_initialized: result.time.initialized,
sandboxes: result.sandboxes,
}
const update = {
worktree: result.worktree,
vcs: result.vcs,
name: result.name,
icon_url: result.icon?.url,
icon_color: result.icon?.color,
time_updated: result.time.updated,
time_initialized: result.time.initialized,
sandboxes: result.sandboxes,
}
Database.use((db) =>
db.insert(ProjectTable).values(insert).onConflictDoUpdate({ target: ProjectTable.id, set: update }).run(),
)
GlobalBus.emit("event", {
payload: {
type: Event.Updated.type,
@@ -249,42 +297,48 @@ export namespace Project {
}
async function migrateFromGlobal(newProjectID: string, worktree: string) {
const globalProject = await Storage.read<Info>(["project", "global"]).catch(() => undefined)
if (!globalProject) return
const globalRow = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, "global")).get())
if (!globalRow) return
const globalSessions = await Storage.list(["session", "global"]).catch(() => [])
const globalSessions = Database.use((db) =>
db.select().from(SessionTable).where(eq(SessionTable.projectID, "global")).all(),
)
if (globalSessions.length === 0) return
log.info("migrating sessions from global", { newProjectID, worktree, count: globalSessions.length })
await work(10, globalSessions, async (key) => {
const sessionID = key[key.length - 1]
const session = await Storage.read<Session.Info>(key).catch(() => undefined)
if (!session) return
if (session.directory && session.directory !== worktree) return
await work(10, globalSessions, async (row) => {
if (row.directory && row.directory !== worktree) return
session.projectID = newProjectID
log.info("migrating session", { sessionID, from: "global", to: newProjectID })
await Storage.write(["session", newProjectID, sessionID], session)
await Storage.remove(key)
log.info("migrating session", { sessionID: row.id, from: "global", to: newProjectID })
Database.use((db) =>
db.update(SessionTable).set({ projectID: newProjectID }).where(eq(SessionTable.id, row.id)).run(),
)
}).catch((error) => {
log.error("failed to migrate sessions from global to project", { error, projectId: newProjectID })
})
}
export async function setInitialized(projectID: string) {
await Storage.update<Info>(["project", projectID], (draft) => {
draft.time.initialized = Date.now()
})
export function setInitialized(projectID: string) {
Database.use((db) =>
db
.update(ProjectTable)
.set({
time_initialized: Date.now(),
})
.where(eq(ProjectTable.id, projectID))
.run(),
)
}
export async function list() {
const keys = await Storage.list(["project"])
const projects = await Promise.all(keys.map((x) => Storage.read<Info>(x)))
return projects.map((project) => ({
...project,
sandboxes: project.sandboxes?.filter((x) => existsSync(x)),
}))
export function list() {
return Database.use((db) =>
db
.select()
.from(ProjectTable)
.all()
.map((row) => fromRow(row)),
)
}
export const update = fn(
@@ -295,61 +349,83 @@ export namespace Project {
commands: Info.shape.commands.optional(),
}),
async (input) => {
const result = await Storage.update<Info>(["project", input.projectID], (draft) => {
if (input.name !== undefined) draft.name = input.name
if (input.icon !== undefined) {
draft.icon = {
...draft.icon,
}
if (input.icon.url !== undefined) draft.icon.url = input.icon.url
if (input.icon.override !== undefined) draft.icon.override = input.icon.override || undefined
if (input.icon.color !== undefined) draft.icon.color = input.icon.color
}
if (input.commands?.start !== undefined) {
const start = input.commands.start || undefined
draft.commands = {
...(draft.commands ?? {}),
}
draft.commands.start = start
if (!draft.commands.start) draft.commands = undefined
}
draft.time.updated = Date.now()
})
const result = Database.use((db) =>
db
.update(ProjectTable)
.set({
name: input.name,
icon_url: input.icon?.url,
icon_color: input.icon?.color,
time_updated: Date.now(),
})
.where(eq(ProjectTable.id, input.projectID))
.returning()
.get(),
)
if (!result) throw new Error(`Project not found: ${input.projectID}`)
const data = fromRow(result)
GlobalBus.emit("event", {
payload: {
type: Event.Updated.type,
properties: result,
properties: data,
},
})
return result
return data
},
)
export async function sandboxes(projectID: string) {
const project = await Storage.read<Info>(["project", projectID]).catch(() => undefined)
if (!project?.sandboxes) return []
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, projectID)).get())
if (!row) return []
const data = fromRow(row)
const valid: string[] = []
for (const dir of project.sandboxes) {
for (const dir of data.sandboxes) {
const stat = await fs.stat(dir).catch(() => undefined)
if (stat?.isDirectory()) valid.push(dir)
}
return valid
}
export async function removeSandbox(projectID: string, directory: string) {
const result = await Storage.update<Info>(["project", projectID], (draft) => {
const sandboxes = draft.sandboxes ?? []
draft.sandboxes = sandboxes.filter((sandbox) => sandbox !== directory)
draft.time.updated = Date.now()
})
export async function addSandbox(projectID: string, directory: string) {
const row = db().select().from(ProjectTable).where(eq(ProjectTable.id, projectID)).get()
if (!row) throw new Error(`Project not found: ${projectID}`)
const sandboxes = row.sandboxes ?? []
if (!sandboxes.includes(directory)) sandboxes.push(directory)
const result = db()
.update(ProjectTable)
.set({ sandboxes, time_updated: Date.now() })
.where(eq(ProjectTable.id, projectID))
.returning()
.get()
if (!result) throw new Error(`Project not found: ${projectID}`)
const data = fromRow(result)
GlobalBus.emit("event", {
payload: {
type: Event.Updated.type,
properties: result,
properties: data,
},
})
return result
return data
}
export async function removeSandbox(projectID: string, directory: string) {
const row = db().select().from(ProjectTable).where(eq(ProjectTable.id, projectID)).get()
if (!row) throw new Error(`Project not found: ${projectID}`)
const sandboxes = (row.sandboxes ?? []).filter((s) => s !== directory)
const result = db()
.update(ProjectTable)
.set({ sandboxes, time_updated: Date.now() })
.where(eq(ProjectTable.id, projectID))
.returning()
.get()
if (!result) throw new Error(`Project not found: ${projectID}`)
const data = fromRow(result)
GlobalBus.emit("event", {
payload: {
type: Event.Updated.type,
properties: data,
},
})
return data
}
}

View File

@@ -1,6 +1,6 @@
import { resolver } from "hono-openapi"
import z from "zod"
import { Storage } from "../storage/storage"
import { NotFoundError } from "../storage/db"
export const ERRORS = {
400: {
@@ -25,7 +25,7 @@ export const ERRORS = {
description: "Not found",
content: {
"application/json": {
schema: resolver(Storage.NotFoundError.Schema),
schema: resolver(NotFoundError.Schema),
},
},
},

View File

@@ -31,7 +31,7 @@ import { ExperimentalRoutes } from "./routes/experimental"
import { ProviderRoutes } from "./routes/provider"
import { lazy } from "../util/lazy"
import { InstanceBootstrap } from "../project/bootstrap"
import { Storage } from "../storage/storage"
import { NotFoundError } from "../storage/db"
import type { ContentfulStatusCode } from "hono/utils/http-status"
import { websocket } from "hono/bun"
import { HTTPException } from "hono/http-exception"
@@ -65,7 +65,7 @@ export namespace Server {
})
if (err instanceof NamedError) {
let status: ContentfulStatusCode
if (err instanceof Storage.NotFoundError) status = 404
if (err instanceof NotFoundError) status = 404
else if (err instanceof Provider.ModelNotFoundError) status = 400
else if (err.name.startsWith("Worktree")) status = 400
else status = 500

View File

@@ -2,7 +2,6 @@ import { BusEvent } from "@/bus/bus-event"
import { Bus } from "@/bus"
import { Session } from "."
import { Identifier } from "../id/id"
import { Instance } from "../project/instance"
import { Provider } from "../provider/provider"
import { MessageV2 } from "./message-v2"
import z from "zod"
@@ -109,10 +108,6 @@ export namespace SessionCompaction {
mode: "compaction",
agent: "compaction",
summary: true,
path: {
cwd: Instance.directory,
root: Instance.worktree,
},
cost: 0,
tokens: {
output: 0,

View File

@@ -10,7 +10,10 @@ import { Flag } from "../flag/flag"
import { Identifier } from "../id/id"
import { Installation } from "../installation"
import { Storage } from "../storage/storage"
import { db, NotFoundError } from "../storage/db"
import { SessionTable, MessageTable, PartTable, SessionDiffTable } from "./session.sql"
import { ShareTable } from "../share/share.sql"
import { eq } from "drizzle-orm"
import { Log } from "../util/log"
import { MessageV2 } from "./message-v2"
import { Instance } from "../project/instance"
@@ -39,6 +42,49 @@ export namespace Session {
).test(title)
}
type SessionRow = typeof SessionTable.$inferSelect
export function fromRow(row: SessionRow): Info {
const summary =
row.summary_additions !== null || row.summary_deletions !== null || row.summary_files !== null
? {
additions: row.summary_additions ?? 0,
deletions: row.summary_deletions ?? 0,
files: row.summary_files ?? 0,
diffs: row.summary_diffs ?? undefined,
}
: undefined
const share = row.share_url ? { url: row.share_url } : undefined
const revert =
row.revert_messageID !== null
? {
messageID: row.revert_messageID,
partID: row.revert_partID ?? undefined,
snapshot: row.revert_snapshot ?? undefined,
diff: row.revert_diff ?? undefined,
}
: undefined
return {
id: row.id,
slug: row.slug,
projectID: row.projectID,
directory: row.directory,
parentID: row.parentID ?? undefined,
title: row.title,
version: row.version,
summary,
share,
revert,
permission: row.permission ?? undefined,
time: {
created: row.time_created,
updated: row.time_updated,
compacting: row.time_compacting ?? undefined,
archived: row.time_archived ?? undefined,
},
}
}
export const Info = z
.object({
id: Identifier.schema("session"),
@@ -184,9 +230,10 @@ export namespace Session {
)
export const touch = fn(Identifier.schema("session"), async (sessionID) => {
await update(sessionID, (draft) => {
draft.time.updated = Date.now()
})
const now = Date.now()
db().update(SessionTable).set({ time_updated: now }).where(eq(SessionTable.id, sessionID)).run()
const session = await get(sessionID)
Bus.publish(Event.Updated, { info: session })
})
export async function createNext(input: {
@@ -211,21 +258,29 @@ export namespace Session {
},
}
log.info("created", result)
await Storage.write(["session", Instance.project.id, result.id], result)
db()
.insert(SessionTable)
.values({
id: result.id,
projectID: result.projectID,
parentID: result.parentID,
slug: result.slug,
directory: result.directory,
title: result.title,
version: result.version,
permission: result.permission,
time_created: result.time.created,
time_updated: result.time.updated,
})
.run()
Bus.publish(Event.Created, {
info: result,
})
const cfg = await Config.get()
if (!result.parentID && (Flag.OPENCODE_AUTO_SHARE || cfg.share === "auto"))
share(result.id)
.then((share) => {
update(result.id, (draft) => {
draft.share = share
})
})
.catch(() => {
// Silently ignore sharing errors during session creation
})
share(result.id).catch(() => {
// Silently ignore sharing errors during session creation
})
Bus.publish(Event.Updated, {
info: result,
})
@@ -240,12 +295,14 @@ export namespace Session {
}
export const get = fn(Identifier.schema("session"), async (id) => {
const read = await Storage.read<Info>(["session", Instance.project.id, id])
return read as Info
const row = db().select().from(SessionTable).where(eq(SessionTable.id, id)).get()
if (!row) throw new NotFoundError({ message: `Session not found: ${id}` })
return fromRow(row)
})
export const getShare = fn(Identifier.schema("session"), async (id) => {
return Storage.read<ShareInfo>(["share", id])
const row = db().select().from(ShareTable).where(eq(ShareTable.sessionID, id)).get()
return row?.data
})
export const share = fn(Identifier.schema("session"), async (id) => {
@@ -280,23 +337,24 @@ export namespace Session {
)
})
export async function update(id: string, editor: (session: Info) => void, options?: { touch?: boolean }) {
const project = Instance.project
const result = await Storage.update<Info>(["session", project.id, id], (draft) => {
editor(draft)
if (options?.touch !== false) {
draft.time.updated = Date.now()
}
})
export function update(id: string, editor: (session: Info) => void, options?: { touch?: boolean }) {
const row = db().select().from(SessionTable).where(eq(SessionTable.id, id)).get()
if (!row) throw new Error(`Session not found: ${id}`)
const data = fromRow(row)
editor(data)
if (options?.touch !== false) {
data.time.updated = Date.now()
}
db().update(SessionTable).set(toRow(data)).where(eq(SessionTable.id, id)).run()
Bus.publish(Event.Updated, {
info: result,
info: data,
})
return result
return data
}
export const diff = fn(Identifier.schema("session"), async (sessionID) => {
const diffs = await Storage.read<Snapshot.FileDiff[]>(["session_diff", sessionID])
return diffs ?? []
const row = db().select().from(SessionDiffTable).where(eq(SessionDiffTable.sessionID, sessionID)).get()
return row?.data ?? []
})
export const messages = fn(
@@ -315,39 +373,28 @@ export namespace Session {
},
)
export async function* list() {
export function* list() {
const project = Instance.project
for (const item of await Storage.list(["session", project.id])) {
yield Storage.read<Info>(item)
const rows = db().select().from(SessionTable).where(eq(SessionTable.projectID, project.id)).all()
for (const row of rows) {
yield fromRow(row)
}
}
export const children = fn(Identifier.schema("session"), async (parentID) => {
const project = Instance.project
const result = [] as Session.Info[]
for (const item of await Storage.list(["session", project.id])) {
const session = await Storage.read<Info>(item)
if (session.parentID !== parentID) continue
result.push(session)
}
return result
const rows = db().select().from(SessionTable).where(eq(SessionTable.parentID, parentID)).all()
return rows.map((row) => fromRow(row))
})
export const remove = fn(Identifier.schema("session"), async (sessionID) => {
const project = Instance.project
try {
const session = await get(sessionID)
for (const child of await children(sessionID)) {
await remove(child.id)
}
await unshare(sessionID).catch(() => {})
for (const msg of await Storage.list(["message", sessionID])) {
for (const part of await Storage.list(["part", msg.at(-1)!])) {
await Storage.remove(part)
}
await Storage.remove(msg)
}
await Storage.remove(["session", project.id, sessionID])
// CASCADE delete handles messages and parts automatically
db().delete(SessionTable).where(eq(SessionTable.id, sessionID)).run()
Bus.publish(Event.Deleted, {
info: session,
})
@@ -357,7 +404,15 @@ export namespace Session {
})
export const updateMessage = fn(MessageV2.Info, async (msg) => {
await Storage.write(["message", msg.sessionID, msg.id], msg)
db()
.insert(MessageTable)
.values({
id: msg.id,
sessionID: msg.sessionID,
data: msg,
})
.onConflictDoUpdate({ target: MessageTable.id, set: { data: msg } })
.run()
Bus.publish(MessageV2.Event.Updated, {
info: msg,
})
@@ -370,7 +425,8 @@ export namespace Session {
messageID: Identifier.schema("message"),
}),
async (input) => {
await Storage.remove(["message", input.sessionID, input.messageID])
// CASCADE delete handles parts automatically
db().delete(MessageTable).where(eq(MessageTable.id, input.messageID)).run()
Bus.publish(MessageV2.Event.Removed, {
sessionID: input.sessionID,
messageID: input.messageID,
@@ -386,7 +442,7 @@ export namespace Session {
partID: Identifier.schema("part"),
}),
async (input) => {
await Storage.remove(["part", input.messageID, input.partID])
db().delete(PartTable).where(eq(PartTable.id, input.partID)).run()
Bus.publish(MessageV2.Event.PartRemoved, {
sessionID: input.sessionID,
messageID: input.messageID,
@@ -411,7 +467,16 @@ export namespace Session {
export const updatePart = fn(UpdatePartInput, async (input) => {
const part = "delta" in input ? input.part : input
const delta = "delta" in input ? input.delta : undefined
await Storage.write(["part", part.messageID, part.id], part)
db()
.insert(PartTable)
.values({
id: part.id,
messageID: part.messageID,
sessionID: part.sessionID,
data: part,
})
.onConflictDoUpdate({ target: PartTable.id, set: { data: part } })
.run()
Bus.publish(MessageV2.Event.PartUpdated, {
part,
delta,

View File

@@ -6,7 +6,9 @@ import { Identifier } from "../id/id"
import { LSP } from "../lsp"
import { Snapshot } from "@/snapshot"
import { fn } from "@/util/fn"
import { Storage } from "@/storage/storage"
import { Database } from "@/storage/db"
import { MessageTable, PartTable } from "./session.sql"
import { eq, desc, lt, and, inArray } from "drizzle-orm"
import { ProviderTransform } from "@/provider/transform"
import { STATUS_CODES } from "http"
import { iife } from "@/util/iife"
@@ -38,8 +40,8 @@ export namespace MessageV2 {
const PartBase = z.object({
id: z.string(),
sessionID: z.string(),
messageID: z.string(),
sessionID: z.string(),
})
export const SnapshotPart = PartBase.extend({
@@ -370,10 +372,6 @@ export namespace MessageV2 {
*/
mode: z.string(),
agent: z.string(),
path: z.object({
cwd: z.string(),
root: z.string(),
}),
summary: z.boolean().optional(),
cost: z.number(),
tokens: z.object({
@@ -607,21 +605,92 @@ export namespace MessageV2 {
}
export const stream = fn(Identifier.schema("session"), async function* (sessionID) {
const list = await Array.fromAsync(await Storage.list(["message", sessionID]))
for (let i = list.length - 1; i >= 0; i--) {
yield await get({
sessionID,
messageID: list[i][2],
})
const SIZE = 25
let cursor: string | undefined
while (true) {
const conditions = [eq(MessageTable.sessionID, sessionID)]
if (cursor) conditions.push(lt(MessageTable.id, cursor))
const ids = Database.use((db) =>
db
.select({ id: MessageTable.id })
.from(MessageTable)
.where(and(...conditions))
.orderBy(desc(MessageTable.id))
.limit(SIZE)
.all(),
)
if (ids.length === 0) break
const rows = Database.use((db) =>
db
.select({
message: MessageTable,
part: PartTable,
})
.from(MessageTable)
.leftJoin(PartTable, eq(PartTable.message_id, MessageTable.id))
.where(
inArray(
MessageTable.id,
ids.map((row) => row.id),
),
)
.orderBy(desc(MessageTable.id), PartTable.id)
.all(),
)
const grouped = Map.groupBy(rows, (row) => row.message.id)
for (const id of ids) {
const group = grouped.get(id.id) ?? []
const first = group[0]
if (!first) continue
yield {
info: {
...first.message.data,
role: first.message.role,
id: first.message.id,
sessionID: first.message.sessionID,
} as Info,
parts: group
.filter((row) => row.part)
.map((row) => ({
...row.part!.data,
type: row.part!.type,
id: row.part!.id,
messageID: row.part!.message_id,
sessionID: first.message.sessionID,
})) as Part[],
}
}
cursor = ids[ids.length - 1]?.id
if (ids.length < SIZE) break
}
})
export const parts = fn(Identifier.schema("message"), async (messageID) => {
const result = [] as MessageV2.Part[]
for (const item of await Storage.list(["part", messageID])) {
const read = await Storage.read<MessageV2.Part>(item)
result.push(read)
}
const rows = Database.use((db) =>
db
.select({
id: PartTable.id,
messageID: PartTable.message_id,
sessionID: MessageTable.sessionID,
type: PartTable.type,
data: PartTable.data,
})
.from(PartTable)
.innerJoin(MessageTable, eq(PartTable.message_id, MessageTable.id))
.where(eq(PartTable.message_id, messageID))
.all(),
)
const result = rows.map((row) => ({
...row.data,
type: row.type,
id: row.id,
messageID: row.messageID,
sessionID: row.sessionID,
})) as Part[]
result.sort((a, b) => (a.id > b.id ? 1 : -1))
return result
})
@@ -632,9 +701,36 @@ export namespace MessageV2 {
messageID: Identifier.schema("message"),
}),
async (input) => {
const rows = Database.use((db) =>
db
.select({
message: MessageTable,
part: PartTable,
})
.from(MessageTable)
.leftJoin(PartTable, eq(PartTable.message_id, MessageTable.id))
.where(eq(MessageTable.id, input.messageID))
.orderBy(PartTable.id)
.all(),
)
const first = rows[0]
if (!first) throw new Error(`Message not found: ${input.messageID}`)
return {
info: await Storage.read<MessageV2.Info>(["message", input.sessionID, input.messageID]),
parts: await parts(input.messageID),
info: {
...first.message.data,
role: first.message.role,
id: first.message.id,
sessionID: first.message.sessionID,
} as Info,
parts: rows
.filter((row) => row.part)
.map((row) => ({
...row.part!.data,
type: row.part!.type,
id: row.part!.id,
messageID: row.part!.message_id,
sessionID: first.message.sessionID,
})) as Part[],
}
},
)

View File

@@ -166,10 +166,13 @@ export namespace SessionPrompt {
})
}
if (permissions.length > 0) {
session.permission = permissions
await Session.update(session.id, (draft) => {
draft.permission = permissions
})
Session.update(
session.id,
(draft) => {
draft.permission = permissions
},
{ touch: false },
)
}
if (input.noReply === true) {
@@ -325,10 +328,6 @@ export namespace SessionPrompt {
sessionID,
mode: task.agent,
agent: task.agent,
path: {
cwd: Instance.directory,
root: Instance.worktree,
},
cost: 0,
tokens: {
input: 0,
@@ -527,10 +526,6 @@ export namespace SessionPrompt {
role: "assistant",
mode: agent.name,
agent: agent.name,
path: {
cwd: Instance.directory,
root: Instance.worktree,
},
cost: 0,
tokens: {
input: 0,
@@ -1384,10 +1379,6 @@ NOTE: At any point in time through this workflow you should feel free to ask the
mode: input.agent,
agent: input.agent,
cost: 0,
path: {
cwd: Instance.directory,
root: Instance.worktree,
},
time: {
created: Date.now(),
},

View File

@@ -5,7 +5,9 @@ import { MessageV2 } from "./message-v2"
import { Session } from "."
import { Log } from "../util/log"
import { splitWhen } from "remeda"
import { Storage } from "../storage/storage"
import { db } from "../storage/db"
import { MessageTable, PartTable, SessionTable, SessionDiffTable } from "./session.sql"
import { eq } from "drizzle-orm"
import { Bus } from "../bus"
import { SessionPrompt } from "./prompt"
import { SessionSummary } from "./summary"
@@ -54,13 +56,17 @@ export namespace SessionRevert {
}
if (revert) {
const session = await Session.get(input.sessionID)
revert.snapshot = session.revert?.snapshot ?? (await Snapshot.track())
const current = await Session.get(input.sessionID)
revert.snapshot = current.revert?.snapshot ?? (await Snapshot.track())
await Snapshot.revert(patches)
if (revert.snapshot) revert.diff = await Snapshot.diff(revert.snapshot)
const rangeMessages = all.filter((msg) => msg.info.id >= revert!.messageID)
const diffs = await SessionSummary.computeDiff({ messages: rangeMessages })
await Storage.write(["session_diff", input.sessionID], diffs)
db()
.insert(SessionDiffTable)
.values({ sessionID: input.sessionID, data: diffs })
.onConflictDoUpdate({ target: SessionDiffTable.sessionID, set: { data: diffs } })
.run()
Bus.publish(Session.Event.Diff, {
sessionID: input.sessionID,
diff: diffs,
@@ -83,10 +89,21 @@ export namespace SessionRevert {
const session = await Session.get(input.sessionID)
if (!session.revert) return session
if (session.revert.snapshot) await Snapshot.restore(session.revert.snapshot)
const next = await Session.update(input.sessionID, (draft) => {
draft.revert = undefined
})
return next
const now = Date.now()
db()
.update(SessionTable)
.set({
revert_messageID: null,
revert_partID: null,
revert_snapshot: null,
revert_diff: null,
time_updated: now,
})
.where(eq(SessionTable.id, input.sessionID))
.run()
const updated = await Session.get(input.sessionID)
Bus.publish(Session.Event.Updated, { info: updated })
return updated
}
export async function cleanup(session: Session.Info) {
@@ -97,7 +114,7 @@ export namespace SessionRevert {
const [preserve, remove] = splitWhen(msgs, (x) => x.info.id === messageID)
msgs = preserve
for (const msg of remove) {
await Storage.remove(["message", sessionID, msg.info.id])
db().delete(MessageTable).where(eq(MessageTable.id, msg.info.id)).run()
await Bus.publish(MessageV2.Event.Removed, { sessionID: sessionID, messageID: msg.info.id })
}
const last = preserve.at(-1)
@@ -106,7 +123,7 @@ export namespace SessionRevert {
const [preserveParts, removeParts] = splitWhen(last.parts, (x) => x.id === partID)
last.parts = preserveParts
for (const part of removeParts) {
await Storage.remove(["part", last.info.id, part.id])
db().delete(PartTable).where(eq(PartTable.id, part.id)).run()
await Bus.publish(MessageV2.Event.PartRemoved, {
sessionID: sessionID,
messageID: last.info.id,
@@ -114,8 +131,19 @@ export namespace SessionRevert {
})
}
}
await Session.update(sessionID, (draft) => {
draft.revert = undefined
})
const now = Date.now()
db()
.update(SessionTable)
.set({
revert_messageID: null,
revert_partID: null,
revert_snapshot: null,
revert_diff: null,
time_updated: now,
})
.where(eq(SessionTable.id, sessionID))
.run()
const updated = await Session.get(sessionID)
Bus.publish(Session.Event.Updated, { info: updated })
}
}

View File

@@ -0,0 +1,83 @@
import { sqliteTable, text, integer, index } from "drizzle-orm/sqlite-core"
import { ProjectTable } from "../project/project.sql"
import type { MessageV2 } from "./message-v2"
import type { Snapshot } from "@/snapshot"
import type { Todo } from "./todo"
import type { PermissionNext } from "@/permission/next"
export const SessionTable = sqliteTable(
"session",
{
id: text("id").primaryKey(),
projectID: text("project_id")
.notNull()
.references(() => ProjectTable.id, { onDelete: "cascade" }),
parentID: text("parent_id"),
slug: text("slug").notNull(),
directory: text("directory").notNull(),
title: text("title").notNull(),
version: text("version").notNull(),
share_url: text("share_url"),
summary_additions: integer("summary_additions"),
summary_deletions: integer("summary_deletions"),
summary_files: integer("summary_files"),
summary_diffs: text("summary_diffs", { mode: "json" }).$type<Snapshot.FileDiff[]>(),
revert_messageID: text("revert_message_id"),
revert_partID: text("revert_part_id"),
revert_snapshot: text("revert_snapshot"),
revert_diff: text("revert_diff"),
permission: text("permission", { mode: "json" }).$type<PermissionNext.Ruleset>(),
time_created: integer("time_created").notNull(),
time_updated: integer("time_updated").notNull(),
time_compacting: integer("time_compacting"),
time_archived: integer("time_archived"),
},
(table) => [index("session_project_idx").on(table.projectID), index("session_parent_idx").on(table.parentID)],
)
export const MessageTable = sqliteTable(
"message",
{
id: text("id").primaryKey(),
sessionID: text("session_id")
.notNull()
.references(() => SessionTable.id, { onDelete: "cascade" }),
createdAt: integer("created_at").notNull(),
data: text("data", { mode: "json" }).notNull().$type<MessageV2.Info>(),
},
(table) => [index("message_session_idx").on(table.sessionID)],
)
export const PartTable = sqliteTable(
"part",
{
id: text("id").primaryKey(),
messageID: text("message_id")
.notNull()
.references(() => MessageTable.id, { onDelete: "cascade" }),
sessionID: text("session_id").notNull(),
data: text("data", { mode: "json" }).notNull().$type<MessageV2.Part>(),
},
(table) => [index("part_message_idx").on(table.messageID), index("part_session_idx").on(table.sessionID)],
)
export const SessionDiffTable = sqliteTable("session_diff", {
sessionID: text("session_id")
.primaryKey()
.references(() => SessionTable.id, { onDelete: "cascade" }),
data: text("data", { mode: "json" }).notNull().$type<Snapshot.FileDiff[]>(),
})
export const TodoTable = sqliteTable("todo", {
sessionID: text("session_id")
.primaryKey()
.references(() => SessionTable.id, { onDelete: "cascade" }),
data: text("data", { mode: "json" }).notNull().$type<Todo.Info[]>(),
})
export const PermissionTable = sqliteTable("permission", {
projectID: text("project_id")
.primaryKey()
.references(() => ProjectTable.id, { onDelete: "cascade" }),
data: text("data", { mode: "json" }).notNull().$type<PermissionNext.Ruleset>(),
})

View File

@@ -11,7 +11,9 @@ import { Snapshot } from "@/snapshot"
import { Log } from "@/util/log"
import path from "path"
import { Instance } from "@/project/instance"
import { Storage } from "@/storage/storage"
import { Database } from "@/storage/db"
import { SessionDiffTable, SessionTable } from "./session.sql"
import { eq } from "drizzle-orm"
import { Bus } from "@/bus"
import { LLM } from "./llm"
@@ -40,21 +42,35 @@ export namespace SessionSummary {
.flatMap((x) => x.parts)
.filter((x) => x.type === "patch")
.flatMap((x) => x.files)
.map((x) => path.relative(Instance.worktree, x)),
.map((x) => path.relative(Instance.worktree, x).replaceAll("\\", "/")),
)
const diffs = await computeDiff({ messages: input.messages }).then((x) =>
x.filter((x) => {
return files.has(x.file)
}),
)
await Session.update(input.sessionID, (draft) => {
draft.summary = {
additions: diffs.reduce((sum, x) => sum + x.additions, 0),
deletions: diffs.reduce((sum, x) => sum + x.deletions, 0),
files: diffs.length,
}
})
await Storage.write(["session_diff", input.sessionID], diffs)
const now = Date.now()
Database.use((db) =>
db
.update(SessionTable)
.set({
summary_additions: diffs.reduce((sum, x) => sum + x.additions, 0),
summary_deletions: diffs.reduce((sum, x) => sum + x.deletions, 0),
summary_files: diffs.length,
time_updated: now,
})
.where(eq(SessionTable.id, input.sessionID))
.run(),
)
const session = await Session.get(input.sessionID)
Bus.publish(Session.Event.Updated, { info: session })
Database.use((db) =>
db
.insert(SessionDiffTable)
.values({ sessionID: input.sessionID, data: diffs })
.onConflictDoUpdate({ target: SessionDiffTable.sessionID, set: { data: diffs } })
.run(),
)
Bus.publish(Session.Event.Diff, {
sessionID: input.sessionID,
diff: diffs,
@@ -116,7 +132,10 @@ export namespace SessionSummary {
messageID: Identifier.schema("message").optional(),
}),
async (input) => {
return Storage.read<Snapshot.FileDiff[]>(["session_diff", input.sessionID]).catch(() => [])
const row = Database.use((db) =>
db.select().from(SessionDiffTable).where(eq(SessionDiffTable.sessionID, input.sessionID)).get(),
)
return row?.data ?? []
},
)

View File

@@ -1,7 +1,9 @@
import { BusEvent } from "@/bus/bus-event"
import { Bus } from "@/bus"
import z from "zod"
import { Storage } from "../storage/storage"
import { Database } from "../storage/db"
import { TodoTable } from "./session.sql"
import { eq } from "drizzle-orm"
export namespace Todo {
export const Info = z
@@ -24,14 +26,19 @@ export namespace Todo {
),
}
export async function update(input: { sessionID: string; todos: Info[] }) {
await Storage.write(["todo", input.sessionID], input.todos)
export function update(input: { sessionID: string; todos: Info[] }) {
Database.use((db) =>
db
.insert(TodoTable)
.values({ sessionID: input.sessionID, data: input.todos })
.onConflictDoUpdate({ target: TodoTable.sessionID, set: { data: input.todos } })
.run(),
)
Bus.publish(Event.Updated, input)
}
export async function get(sessionID: string) {
return Storage.read<Info[]>(["todo", sessionID])
.then((x) => x || [])
.catch(() => [])
export function get(sessionID: string) {
const row = Database.use((db) => db.select().from(TodoTable).where(eq(TodoTable.sessionID, sessionID)).get())
return row?.data ?? []
}
}

View File

@@ -4,7 +4,9 @@ import { ulid } from "ulid"
import { Provider } from "@/provider/provider"
import { Session } from "@/session"
import { MessageV2 } from "@/session/message-v2"
import { Storage } from "@/storage/storage"
import { Database } from "@/storage/db"
import { SessionShareTable } from "./share.sql"
import { eq } from "drizzle-orm"
import { Log } from "@/util/log"
import type * as SDK from "@opencode-ai/sdk/v2"
@@ -77,17 +79,22 @@ export namespace ShareNext {
})
.then((x) => x.json())
.then((x) => x as { id: string; url: string; secret: string })
await Storage.write(["session_share", sessionID], result)
Database.use((db) =>
db
.insert(SessionShareTable)
.values({ sessionID, data: result })
.onConflictDoUpdate({ target: SessionShareTable.sessionID, set: { data: result } })
.run(),
)
fullSync(sessionID)
return result
}
function get(sessionID: string) {
return Storage.read<{
id: string
secret: string
url: string
}>(["session_share", sessionID])
const row = Database.use((db) =>
db.select().from(SessionShareTable).where(eq(SessionShareTable.sessionID, sessionID)).get(),
)
return row?.data
}
type Data =
@@ -132,7 +139,7 @@ export namespace ShareNext {
const queued = queue.get(sessionID)
if (!queued) return
queue.delete(sessionID)
const share = await get(sessionID).catch(() => undefined)
const share = get(sessionID)
if (!share) return
await fetch(`${await url()}/api/share/${share.id}/sync`, {
@@ -163,7 +170,7 @@ export namespace ShareNext {
secret: share.secret,
}),
})
await Storage.remove(["session_share", sessionID])
Database.use((db) => db.delete(SessionShareTable).where(eq(SessionShareTable.sessionID, sessionID)).run())
}
async function fullSync(sessionID: string) {

View File

@@ -0,0 +1,19 @@
import { sqliteTable, text } from "drizzle-orm/sqlite-core"
import { SessionTable } from "../session/session.sql"
import type { Session } from "../session"
export const SessionShareTable = sqliteTable("session_share", {
sessionID: text("session_id")
.primaryKey()
.references(() => SessionTable.id, { onDelete: "cascade" }),
data: text("data", { mode: "json" }).notNull().$type<{
id: string
secret: string
url: string
}>(),
})
export const ShareTable = sqliteTable("share", {
sessionID: text("session_id").primaryKey(),
data: text("data", { mode: "json" }).notNull().$type<Session.ShareInfo>(),
})

5
packages/opencode/src/sql.d.ts vendored Normal file
View File

@@ -0,0 +1,5 @@
// Type declarations for SQL file imports with { type: "text" }
declare module "*.sql" {
const content: string
export default content
}

View File

@@ -0,0 +1,122 @@
import { Database as SqliteDatabase } from "bun:sqlite"
import { drizzle } from "drizzle-orm/bun-sqlite"
import type { BunSQLiteDatabase } from "drizzle-orm/bun-sqlite"
import { lazy } from "../util/lazy"
import { Global } from "../global"
import { Log } from "../util/log"
import { migrations } from "./migrations.generated"
import { migrateFromJson } from "./json-migration"
import { NamedError } from "@opencode-ai/util/error"
import { Context } from "../util/context"
import z from "zod"
import path from "path"
export const NotFoundError = NamedError.create(
"NotFoundError",
z.object({
message: z.string(),
}),
)
const log = Log.create({ service: "db" })
export namespace Database {
export type DB = BunSQLiteDatabase
const connection = lazy(() => {
const dbPath = path.join(Global.Path.data, "opencode.db")
log.info("opening database", { path: dbPath })
const sqlite = new SqliteDatabase(dbPath, { create: true })
sqlite.run("PRAGMA journal_mode = WAL")
sqlite.run("PRAGMA synchronous = NORMAL")
sqlite.run("PRAGMA busy_timeout = 5000")
sqlite.run("PRAGMA cache_size = -64000")
sqlite.run("PRAGMA foreign_keys = ON")
migrate(sqlite)
// Run JSON migration after schema is ready
try {
migrateFromJson(sqlite)
} catch (e) {
log.error("json migration failed", { error: e })
}
return drizzle(sqlite)
})
function migrate(sqlite: SqliteDatabase) {
sqlite.run(`
CREATE TABLE IF NOT EXISTS _migrations (
name TEXT PRIMARY KEY,
applied_at INTEGER NOT NULL
)
`)
const applied = new Set(
sqlite
.query<{ name: string }, []>("SELECT name FROM _migrations")
.all()
.map((r) => r.name),
)
for (const migration of migrations) {
if (applied.has(migration.name)) continue
log.info("applying migration", { name: migration.name })
sqlite.exec(migration.sql)
sqlite.run("INSERT INTO _migrations (name, applied_at) VALUES (?, ?)", [migration.name, Date.now()])
}
}
const TransactionContext = Context.create<{
db: DB
effects: (() => void | Promise<void>)[]
}>("database")
export function use<T>(callback: (db: DB) => T): T {
try {
const ctx = TransactionContext.use()
return callback(ctx.db)
} catch (err) {
if (err instanceof Context.NotFound) {
const effects: (() => void | Promise<void>)[] = []
const result = TransactionContext.provide({ db: connection(), effects }, () => callback(connection()))
for (const fx of effects) fx()
return result
}
throw err
}
}
export function fn<Input, T>(callback: (input: Input, db: DB) => T) {
return (input: Input) => use((db) => callback(input, db))
}
export function effect(fx: () => void | Promise<void>) {
try {
const ctx = TransactionContext.use()
ctx.effects.push(fx)
} catch {
fx()
}
}
export function transaction<T>(callback: (db: DB) => T): T {
try {
const ctx = TransactionContext.use()
return callback(ctx.db)
} catch (err) {
if (err instanceof Context.NotFound) {
const effects: (() => void | Promise<void>)[] = []
const result = connection().transaction((tx) => {
return TransactionContext.provide({ db: tx as unknown as DB, effects }, () => callback(tx as unknown as DB))
})
for (const fx of effects) fx()
return result
}
throw err
}
}
}

View File

@@ -0,0 +1,339 @@
import { Database } from "bun:sqlite"
import { drizzle } from "drizzle-orm/bun-sqlite"
import { Global } from "../global"
import { Log } from "../util/log"
import { ProjectTable } from "../project/project.sql"
import { SessionTable, MessageTable, PartTable, TodoTable, PermissionTable } from "../session/session.sql"
import { SessionShareTable, ShareTable } from "../share/share.sql"
import path from "path"
import fs from "fs"
const log = Log.create({ service: "json-migration" })
export function migrateFromJson(sqlite: Database, customStorageDir?: string) {
const storageDir = customStorageDir ?? path.join(Global.Path.data, "storage")
const migrationMarker = path.join(storageDir, "sqlite-migrated")
if (fs.existsSync(migrationMarker)) {
log.info("json migration already completed")
return
}
if (!fs.existsSync(path.join(storageDir, "migration"))) {
log.info("no json storage found, skipping migration")
fs.writeFileSync(migrationMarker, Date.now().toString())
return
}
log.info("starting json to sqlite migration", { storageDir })
const db = drizzle(sqlite)
const stats = {
projects: 0,
sessions: 0,
messages: 0,
parts: 0,
diffs: 0,
todos: 0,
permissions: 0,
shares: 0,
errors: [] as string[],
}
// Run entire migration in a single transaction for performance
sqlite.run("BEGIN TRANSACTION")
try {
// Track existing IDs to avoid repeated DB lookups
const projectIDs = new Set<string>()
const sessionIDs = new Set<string>()
const messageIDs = new Set<string>()
// Migrate projects first (no FK deps)
const projectGlob = new Bun.Glob("project/*.json")
const projectFiles = Array.from(projectGlob.scanSync({ cwd: storageDir, absolute: true }))
const projectValues: (typeof ProjectTable.$inferInsert)[] = []
for (const file of projectFiles) {
try {
const data = JSON.parse(fs.readFileSync(file, "utf-8"))
if (!data.id) {
stats.errors.push(`project missing id: ${file}`)
continue
}
projectIDs.add(data.id)
projectValues.push({
id: data.id,
worktree: data.worktree ?? "/",
vcs: data.vcs,
name: data.name ?? undefined,
icon_url: data.icon?.url,
icon_color: data.icon?.color,
time_created: data.time?.created ?? Date.now(),
time_updated: data.time?.updated ?? Date.now(),
time_initialized: data.time?.initialized,
sandboxes: data.sandboxes ?? [],
})
} catch (e) {
stats.errors.push(`failed to migrate project ${file}: ${e}`)
}
}
if (projectValues.length > 0) {
db.insert(ProjectTable).values(projectValues).onConflictDoNothing().run()
stats.projects = projectValues.length
}
log.info("migrated projects", { count: stats.projects })
// Migrate sessions (depends on projects)
const sessionGlob = new Bun.Glob("session/*/*.json")
const sessionFiles = Array.from(sessionGlob.scanSync({ cwd: storageDir, absolute: true }))
const sessionValues: (typeof SessionTable.$inferInsert)[] = []
for (const file of sessionFiles) {
try {
const data = JSON.parse(fs.readFileSync(file, "utf-8"))
if (!data.id || !data.projectID) {
stats.errors.push(`session missing id or projectID: ${file}`)
continue
}
if (!projectIDs.has(data.projectID)) {
log.warn("skipping orphaned session", { sessionID: data.id, projectID: data.projectID })
continue
}
sessionIDs.add(data.id)
sessionValues.push({
id: data.id,
projectID: data.projectID,
parentID: data.parentID ?? null,
slug: data.slug ?? "",
directory: data.directory ?? "",
title: data.title ?? "",
version: data.version ?? "",
share_url: data.share?.url ?? null,
summary_additions: data.summary?.additions ?? null,
summary_deletions: data.summary?.deletions ?? null,
summary_files: data.summary?.files ?? null,
summary_diffs: data.summary?.diffs ?? null,
revert_messageID: data.revert?.messageID ?? null,
revert_partID: data.revert?.partID ?? null,
revert_snapshot: data.revert?.snapshot ?? null,
revert_diff: data.revert?.diff ?? null,
permission: data.permission ?? null,
time_created: data.time?.created ?? Date.now(),
time_updated: data.time?.updated ?? Date.now(),
time_compacting: data.time?.compacting ?? null,
time_archived: data.time?.archived ?? null,
})
} catch (e) {
stats.errors.push(`failed to migrate session ${file}: ${e}`)
}
}
if (sessionValues.length > 0) {
db.insert(SessionTable).values(sessionValues).onConflictDoNothing().run()
stats.sessions = sessionValues.length
}
log.info("migrated sessions", { count: stats.sessions })
// Migrate messages (depends on sessions)
const messageGlob = new Bun.Glob("message/*/*.json")
const messageFiles = Array.from(messageGlob.scanSync({ cwd: storageDir, absolute: true }))
const messageValues: (typeof MessageTable.$inferInsert)[] = []
for (const file of messageFiles) {
try {
const data = JSON.parse(fs.readFileSync(file, "utf-8"))
if (!data.id || !data.sessionID) {
stats.errors.push(`message missing id or sessionID: ${file}`)
continue
}
if (!sessionIDs.has(data.sessionID)) {
log.warn("skipping orphaned message", { messageID: data.id, sessionID: data.sessionID })
continue
}
messageIDs.add(data.id)
const { id, sessionID, role, ...rest } = data
messageValues.push({ id, sessionID, role, data: rest })
} catch (e) {
stats.errors.push(`failed to migrate message ${file}: ${e}`)
}
}
if (messageValues.length > 0) {
db.insert(MessageTable).values(messageValues).onConflictDoNothing().run()
stats.messages = messageValues.length
}
log.info("migrated messages", { count: stats.messages })
// Migrate parts (depends on messages)
const partGlob = new Bun.Glob("part/*/*.json")
const partFiles = Array.from(partGlob.scanSync({ cwd: storageDir, absolute: true }))
const partValues: (typeof PartTable.$inferInsert)[] = []
for (const file of partFiles) {
try {
const data = JSON.parse(fs.readFileSync(file, "utf-8"))
if (!data.id || !data.messageID) {
stats.errors.push(`part missing id or messageID: ${file}`)
continue
}
if (!messageIDs.has(data.messageID)) {
log.warn("skipping orphaned part", { partID: data.id, messageID: data.messageID })
continue
}
const { id, messageID, sessionID: _, type, ...rest } = data
partValues.push({ id, message_id: messageID, type, data: rest })
} catch (e) {
stats.errors.push(`failed to migrate part ${file}: ${e}`)
}
}
if (partValues.length > 0) {
db.insert(PartTable).values(partValues).onConflictDoNothing().run()
stats.parts = partValues.length
}
log.info("migrated parts", { count: stats.parts })
// Migrate session diffs (use prepared statement for batch insert)
const diffGlob = new Bun.Glob("session_diff/*.json")
const diffFiles = Array.from(diffGlob.scanSync({ cwd: storageDir, absolute: true }))
const diffStmt = sqlite.prepare("INSERT OR IGNORE INTO session_diff (session_id, data) VALUES (?, ?)")
for (const file of diffFiles) {
try {
const data = JSON.parse(fs.readFileSync(file, "utf-8"))
const sessionID = path.basename(file, ".json")
if (!sessionIDs.has(sessionID)) {
log.warn("skipping orphaned session_diff", { sessionID })
continue
}
diffStmt.run(sessionID, JSON.stringify(data))
stats.diffs++
} catch (e) {
stats.errors.push(`failed to migrate session_diff ${file}: ${e}`)
}
}
log.info("migrated session diffs", { count: stats.diffs })
// Migrate todos
const todoGlob = new Bun.Glob("todo/*.json")
const todoFiles = Array.from(todoGlob.scanSync({ cwd: storageDir, absolute: true }))
const todoValues: (typeof TodoTable.$inferInsert)[] = []
for (const file of todoFiles) {
try {
const data = JSON.parse(fs.readFileSync(file, "utf-8"))
const sessionID = path.basename(file, ".json")
if (!sessionIDs.has(sessionID)) {
log.warn("skipping orphaned todo", { sessionID })
continue
}
todoValues.push({ sessionID, data })
} catch (e) {
stats.errors.push(`failed to migrate todo ${file}: ${e}`)
}
}
if (todoValues.length > 0) {
db.insert(TodoTable).values(todoValues).onConflictDoNothing().run()
stats.todos = todoValues.length
}
log.info("migrated todos", { count: stats.todos })
// Migrate permissions
const permGlob = new Bun.Glob("permission/*.json")
const permFiles = Array.from(permGlob.scanSync({ cwd: storageDir, absolute: true }))
const permValues: (typeof PermissionTable.$inferInsert)[] = []
for (const file of permFiles) {
try {
const data = JSON.parse(fs.readFileSync(file, "utf-8"))
const projectID = path.basename(file, ".json")
if (!projectIDs.has(projectID)) {
log.warn("skipping orphaned permission", { projectID })
continue
}
permValues.push({ projectID, data })
} catch (e) {
stats.errors.push(`failed to migrate permission ${file}: ${e}`)
}
}
if (permValues.length > 0) {
db.insert(PermissionTable).values(permValues).onConflictDoNothing().run()
stats.permissions = permValues.length
}
log.info("migrated permissions", { count: stats.permissions })
// Migrate session shares
const shareGlob = new Bun.Glob("session_share/*.json")
const shareFiles = Array.from(shareGlob.scanSync({ cwd: storageDir, absolute: true }))
const shareValues: (typeof SessionShareTable.$inferInsert)[] = []
for (const file of shareFiles) {
try {
const data = JSON.parse(fs.readFileSync(file, "utf-8"))
const sessionID = path.basename(file, ".json")
if (!sessionIDs.has(sessionID)) {
log.warn("skipping orphaned session_share", { sessionID })
continue
}
shareValues.push({ sessionID, data })
} catch (e) {
stats.errors.push(`failed to migrate session_share ${file}: ${e}`)
}
}
if (shareValues.length > 0) {
db.insert(SessionShareTable).values(shareValues).onConflictDoNothing().run()
stats.shares = shareValues.length
}
log.info("migrated session shares", { count: stats.shares })
// Migrate shares (downloaded shared sessions, no FK)
const share2Glob = new Bun.Glob("share/*.json")
const share2Files = Array.from(share2Glob.scanSync({ cwd: storageDir, absolute: true }))
const share2Values: (typeof ShareTable.$inferInsert)[] = []
for (const file of share2Files) {
try {
const data = JSON.parse(fs.readFileSync(file, "utf-8"))
const sessionID = path.basename(file, ".json")
share2Values.push({ sessionID, data })
} catch (e) {
stats.errors.push(`failed to migrate share ${file}: ${e}`)
}
}
if (share2Values.length > 0) {
db.insert(ShareTable).values(share2Values).onConflictDoNothing().run()
}
sqlite.run("COMMIT")
} catch (e) {
sqlite.run("ROLLBACK")
throw e
}
// Mark migration complete
fs.writeFileSync(migrationMarker, Date.now().toString())
log.info("json migration complete", {
projects: stats.projects,
sessions: stats.sessions,
messages: stats.messages,
parts: stats.parts,
diffs: stats.diffs,
todos: stats.todos,
permissions: stats.permissions,
shares: stats.shares,
errorCount: stats.errors.length,
})
if (stats.errors.length > 0) {
log.warn("migration errors", { errors: stats.errors.slice(0, 20) })
}
return stats
}

View File

@@ -0,0 +1,6 @@
// Auto-generated - do not edit
import m0 from "../../migration/0000_normal_wind_dancer.sql" with { type: "text" }
export const migrations = [
{ name: "0000_normal_wind_dancer", sql: m0 },
]

View File

@@ -1,227 +0,0 @@
import { Log } from "../util/log"
import path from "path"
import fs from "fs/promises"
import { Global } from "../global"
import { Filesystem } from "../util/filesystem"
import { lazy } from "../util/lazy"
import { Lock } from "../util/lock"
import { $ } from "bun"
import { NamedError } from "@opencode-ai/util/error"
import z from "zod"
export namespace Storage {
const log = Log.create({ service: "storage" })
type Migration = (dir: string) => Promise<void>
export const NotFoundError = NamedError.create(
"NotFoundError",
z.object({
message: z.string(),
}),
)
const MIGRATIONS: Migration[] = [
async (dir) => {
const project = path.resolve(dir, "../project")
if (!(await Filesystem.isDir(project))) return
for await (const projectDir of new Bun.Glob("*").scan({
cwd: project,
onlyFiles: false,
})) {
log.info(`migrating project ${projectDir}`)
let projectID = projectDir
const fullProjectDir = path.join(project, projectDir)
let worktree = "/"
if (projectID !== "global") {
for await (const msgFile of new Bun.Glob("storage/session/message/*/*.json").scan({
cwd: path.join(project, projectDir),
absolute: true,
})) {
const json = await Bun.file(msgFile).json()
worktree = json.path?.root
if (worktree) break
}
if (!worktree) continue
if (!(await Filesystem.isDir(worktree))) continue
const [id] = await $`git rev-list --max-parents=0 --all`
.quiet()
.nothrow()
.cwd(worktree)
.text()
.then((x) =>
x
.split("\n")
.filter(Boolean)
.map((x) => x.trim())
.toSorted(),
)
if (!id) continue
projectID = id
await Bun.write(
path.join(dir, "project", projectID + ".json"),
JSON.stringify({
id,
vcs: "git",
worktree,
time: {
created: Date.now(),
initialized: Date.now(),
},
}),
)
log.info(`migrating sessions for project ${projectID}`)
for await (const sessionFile of new Bun.Glob("storage/session/info/*.json").scan({
cwd: fullProjectDir,
absolute: true,
})) {
const dest = path.join(dir, "session", projectID, path.basename(sessionFile))
log.info("copying", {
sessionFile,
dest,
})
const session = await Bun.file(sessionFile).json()
await Bun.write(dest, JSON.stringify(session))
log.info(`migrating messages for session ${session.id}`)
for await (const msgFile of new Bun.Glob(`storage/session/message/${session.id}/*.json`).scan({
cwd: fullProjectDir,
absolute: true,
})) {
const dest = path.join(dir, "message", session.id, path.basename(msgFile))
log.info("copying", {
msgFile,
dest,
})
const message = await Bun.file(msgFile).json()
await Bun.write(dest, JSON.stringify(message))
log.info(`migrating parts for message ${message.id}`)
for await (const partFile of new Bun.Glob(`storage/session/part/${session.id}/${message.id}/*.json`).scan(
{
cwd: fullProjectDir,
absolute: true,
},
)) {
const dest = path.join(dir, "part", message.id, path.basename(partFile))
const part = await Bun.file(partFile).json()
log.info("copying", {
partFile,
dest,
})
await Bun.write(dest, JSON.stringify(part))
}
}
}
}
}
},
async (dir) => {
for await (const item of new Bun.Glob("session/*/*.json").scan({
cwd: dir,
absolute: true,
})) {
const session = await Bun.file(item).json()
if (!session.projectID) continue
if (!session.summary?.diffs) continue
const { diffs } = session.summary
await Bun.file(path.join(dir, "session_diff", session.id + ".json")).write(JSON.stringify(diffs))
await Bun.file(path.join(dir, "session", session.projectID, session.id + ".json")).write(
JSON.stringify({
...session,
summary: {
additions: diffs.reduce((sum: any, x: any) => sum + x.additions, 0),
deletions: diffs.reduce((sum: any, x: any) => sum + x.deletions, 0),
},
}),
)
}
},
]
const state = lazy(async () => {
const dir = path.join(Global.Path.data, "storage")
const migration = await Bun.file(path.join(dir, "migration"))
.json()
.then((x) => parseInt(x))
.catch(() => 0)
for (let index = migration; index < MIGRATIONS.length; index++) {
log.info("running migration", { index })
const migration = MIGRATIONS[index]
await migration(dir).catch(() => log.error("failed to run migration", { index }))
await Bun.write(path.join(dir, "migration"), (index + 1).toString())
}
return {
dir,
}
})
export async function remove(key: string[]) {
const dir = await state().then((x) => x.dir)
const target = path.join(dir, ...key) + ".json"
return withErrorHandling(async () => {
await fs.unlink(target).catch(() => {})
})
}
export async function read<T>(key: string[]) {
const dir = await state().then((x) => x.dir)
const target = path.join(dir, ...key) + ".json"
return withErrorHandling(async () => {
using _ = await Lock.read(target)
const result = await Bun.file(target).json()
return result as T
})
}
export async function update<T>(key: string[], fn: (draft: T) => void) {
const dir = await state().then((x) => x.dir)
const target = path.join(dir, ...key) + ".json"
return withErrorHandling(async () => {
using _ = await Lock.write(target)
const content = await Bun.file(target).json()
fn(content)
await Bun.write(target, JSON.stringify(content, null, 2))
return content as T
})
}
export async function write<T>(key: string[], content: T) {
const dir = await state().then((x) => x.dir)
const target = path.join(dir, ...key) + ".json"
return withErrorHandling(async () => {
using _ = await Lock.write(target)
await Bun.write(target, JSON.stringify(content, null, 2))
})
}
async function withErrorHandling<T>(body: () => Promise<T>) {
return body().catch((e) => {
if (!(e instanceof Error)) throw e
const errnoException = e as NodeJS.ErrnoException
if (errnoException.code === "ENOENT") {
throw new NotFoundError({ message: `Resource not found: ${errnoException.path}` })
}
throw e
})
}
const glob = new Bun.Glob("**/*")
export async function list(prefix: string[]) {
const dir = await state().then((x) => x.dir)
try {
const result = await Array.fromAsync(
glob.scan({
cwd: path.join(dir, ...prefix),
onlyFiles: true,
}),
).then((results) => results.map((x) => [...prefix, ...x.slice(0, -5).split(path.sep)]))
result.sort()
return result
} catch {
return []
}
}
}

View File

@@ -5,12 +5,33 @@ import z from "zod"
import { NamedError } from "@opencode-ai/util/error"
import { Global } from "../global"
import { Instance } from "../project/instance"
import { InstanceBootstrap } from "../project/bootstrap"
import { Project } from "../project/project"
import { Storage } from "../storage/storage"
import { fn } from "../util/fn"
import { Config } from "@/config/config"
import { Log } from "../util/log"
import { BusEvent } from "@/bus/bus-event"
import { GlobalBus } from "@/bus/global"
export namespace Worktree {
const log = Log.create({ service: "worktree" })
export const Event = {
Ready: BusEvent.define(
"worktree.ready",
z.object({
name: z.string(),
branch: z.string(),
}),
),
Failed: BusEvent.define(
"worktree.failed",
z.object({
message: z.string(),
}),
),
}
export const Info = z
.object({
name: z.string(),
@@ -234,7 +255,7 @@ export namespace Worktree {
const base = input?.name ? slug(input.name) : ""
const info = await candidate(root, base || undefined)
const created = await $`git worktree add -b ${info.branch} ${info.directory}`
const created = await $`git worktree add --no-checkout -b ${info.branch} ${info.directory}`
.quiet()
.nothrow()
.cwd(Instance.worktree)
@@ -242,24 +263,88 @@ export namespace Worktree {
throw new CreateFailedError({ message: errorText(created) || "Failed to create git worktree" })
}
const project = await Storage.read<Project.Info>(["project", Instance.project.id]).catch(() => Instance.project)
const startup = project.commands?.start?.trim()
if (startup) {
const ran = await runStartCommand(info.directory, startup)
if (ran.exitCode !== 0) {
throw new StartCommandFailedError({
message: errorText(ran) || "Project start command failed",
})
}
}
await Project.addSandbox(Instance.project.id, info.directory).catch(() => undefined)
const projectID = Instance.project.id
const extra = input?.startCommand?.trim()
if (extra) {
const ran = await runStartCommand(info.directory, extra)
if (ran.exitCode !== 0) {
throw new StartCommandFailedError({ message: errorText(ran) || "Worktree start command failed" })
setTimeout(() => {
const start = async () => {
const populated = await $`git reset --hard`.quiet().nothrow().cwd(info.directory)
if (populated.exitCode !== 0) {
const message = errorText(populated) || "Failed to populate worktree"
log.error("worktree checkout failed", { directory: info.directory, message })
GlobalBus.emit("event", {
directory: info.directory,
payload: {
type: Event.Failed.type,
properties: {
message,
},
},
})
return
}
const booted = await Instance.provide({
directory: info.directory,
init: InstanceBootstrap,
fn: () => undefined,
})
.then(() => true)
.catch((error) => {
const message = error instanceof Error ? error.message : String(error)
log.error("worktree bootstrap failed", { directory: info.directory, message })
GlobalBus.emit("event", {
directory: info.directory,
payload: {
type: Event.Failed.type,
properties: {
message,
},
},
})
return false
})
if (!booted) return
GlobalBus.emit("event", {
directory: info.directory,
payload: {
type: Event.Ready.type,
properties: {
name: info.name,
branch: info.branch,
},
},
})
const project = await Storage.read<Project.Info>(["project", projectID]).catch(() => undefined)
const startup = project?.commands?.start?.trim() ?? ""
const run = async (cmd: string, kind: "project" | "worktree") => {
const ran = await runStartCommand(info.directory, cmd)
if (ran.exitCode === 0) return true
log.error("worktree start command failed", {
kind,
directory: info.directory,
message: errorText(ran),
})
return false
}
if (startup) {
const ok = await run(startup, "project")
if (!ok) return
}
if (extra) {
await run(extra, "worktree")
}
}
}
void start().catch((error) => {
log.error("worktree start task failed", { directory: info.directory, error })
})
}, 0)
return info
})

View File

@@ -2,7 +2,6 @@ import { test, expect } from "bun:test"
import os from "os"
import { PermissionNext } from "../../src/permission/next"
import { Instance } from "../../src/project/instance"
import { Storage } from "../../src/storage/storage"
import { tmpdir } from "../fixture/fixture"
// fromConfig tests

View File

@@ -1,7 +1,9 @@
import { describe, expect, test } from "bun:test"
import { Project } from "../../src/project/project"
import { Log } from "../../src/util/log"
import { Storage } from "../../src/storage/storage"
import { Database } from "../../src/storage/db"
import { ProjectTable } from "../../src/project/project.sql"
import { eq } from "drizzle-orm"
import { $ } from "bun"
import path from "path"
import { tmpdir } from "../fixture/fixture"
@@ -99,11 +101,12 @@ describe("Project.discover", () => {
await Project.discover(project)
const updated = await Storage.read<Project.Info>(["project", project.id])
expect(updated.icon).toBeDefined()
expect(updated.icon?.url).toStartWith("data:")
expect(updated.icon?.url).toContain("base64")
expect(updated.icon?.color).toBeUndefined()
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, project.id)).get())
const updated = row ? Project.fromRow(row) : undefined
expect(updated?.icon).toBeDefined()
expect(updated?.icon?.url).toStartWith("data:")
expect(updated?.icon?.url).toContain("base64")
expect(updated?.icon?.color).toBeUndefined()
})
test("should not discover non-image files", async () => {
@@ -114,7 +117,8 @@ describe("Project.discover", () => {
await Project.discover(project)
const updated = await Storage.read<Project.Info>(["project", project.id])
expect(updated.icon).toBeUndefined()
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, project.id)).get())
const updated = row ? Project.fromRow(row) : undefined
expect(updated?.icon).toBeUndefined()
})
})

View File

@@ -1,8 +1,7 @@
import { describe, expect, test, beforeEach, afterEach } from "bun:test"
import { describe, expect, test } from "bun:test"
import path from "path"
import { Session } from "../../src/session"
import { SessionRevert } from "../../src/session/revert"
import { SessionCompaction } from "../../src/session/compaction"
import { MessageV2 } from "../../src/session/message-v2"
import { Log } from "../../src/util/log"
import { Instance } from "../../src/project/instance"
@@ -53,10 +52,6 @@ describe("revert + compact workflow", () => {
sessionID,
mode: "default",
agent: "default",
path: {
cwd: tmp.path,
root: tmp.path,
},
cost: 0,
tokens: {
output: 0,
@@ -113,10 +108,6 @@ describe("revert + compact workflow", () => {
sessionID,
mode: "default",
agent: "default",
path: {
cwd: tmp.path,
root: tmp.path,
},
cost: 0,
tokens: {
output: 0,
@@ -227,10 +218,6 @@ describe("revert + compact workflow", () => {
sessionID,
mode: "default",
agent: "default",
path: {
cwd: tmp.path,
root: tmp.path,
},
cost: 0,
tokens: {
output: 0,

View File

@@ -0,0 +1,628 @@
import { describe, test, expect, beforeEach, afterEach } from "bun:test"
import { Database } from "bun:sqlite"
import { drizzle } from "drizzle-orm/bun-sqlite"
import { eq } from "drizzle-orm"
import path from "path"
import fs from "fs/promises"
import os from "os"
import { migrateFromJson } from "../../src/storage/json-migration"
import { ProjectTable } from "../../src/project/project.sql"
import { Project } from "../../src/project/project"
import { SessionTable, MessageTable, PartTable, TodoTable, PermissionTable } from "../../src/session/session.sql"
import { SessionShareTable, ShareTable } from "../../src/share/share.sql"
import { migrations } from "../../src/storage/migrations.generated"
// Test fixtures
const fixtures = {
project: {
id: "proj_test123abc",
name: "Test Project",
worktree: "/test/path",
vcs: "git" as const,
sandboxes: [],
},
session: {
id: "ses_test456def",
projectID: "proj_test123abc",
slug: "test-session",
directory: "/test/path",
title: "Test Session",
version: "1.0.0",
time: { created: 1700000000000, updated: 1700000001000 },
},
message: {
id: "msg_test789ghi",
sessionID: "ses_test456def",
role: "user" as const,
agent: "default",
model: { providerID: "openai", modelID: "gpt-4" },
time: { created: 1700000000000 },
},
part: {
id: "prt_testabc123",
messageID: "msg_test789ghi",
sessionID: "ses_test456def",
type: "text" as const,
text: "Hello, world!",
},
}
// Helper to create test storage directory structure
async function setupStorageDir(baseDir: string) {
const storageDir = path.join(baseDir, "storage")
await fs.mkdir(path.join(storageDir, "project"), { recursive: true })
await fs.mkdir(path.join(storageDir, "session", "proj_test123abc"), { recursive: true })
await fs.mkdir(path.join(storageDir, "message", "ses_test456def"), { recursive: true })
await fs.mkdir(path.join(storageDir, "part", "msg_test789ghi"), { recursive: true })
await fs.mkdir(path.join(storageDir, "session_diff"), { recursive: true })
await fs.mkdir(path.join(storageDir, "todo"), { recursive: true })
await fs.mkdir(path.join(storageDir, "permission"), { recursive: true })
await fs.mkdir(path.join(storageDir, "session_share"), { recursive: true })
await fs.mkdir(path.join(storageDir, "share"), { recursive: true })
// Create legacy marker to indicate JSON storage exists
await Bun.write(path.join(storageDir, "migration"), "1")
return storageDir
}
// Helper to create in-memory test database with schema
function createTestDb() {
const sqlite = new Database(":memory:")
sqlite.exec("PRAGMA foreign_keys = ON")
// Apply schema migrations
for (const migration of migrations) {
const statements = migration.sql.split("--> statement-breakpoint")
for (const stmt of statements) {
const trimmed = stmt.trim()
if (trimmed) sqlite.exec(trimmed)
}
}
return sqlite
}
describe("JSON to SQLite migration", () => {
let tmpDir: string
let storageDir: string
let sqlite: Database
beforeEach(async () => {
tmpDir = path.join(os.tmpdir(), "opencode-migration-test-" + Math.random().toString(36).slice(2))
await fs.mkdir(tmpDir, { recursive: true })
storageDir = await setupStorageDir(tmpDir)
sqlite = createTestDb()
})
afterEach(async () => {
sqlite.close()
await fs.rm(tmpDir, { recursive: true, force: true })
})
describe("basic functionality", () => {
test("migrates all entity types successfully", async () => {
// Write test fixtures
await Bun.write(path.join(storageDir, "project", `${fixtures.project.id}.json`), JSON.stringify(fixtures.project))
await Bun.write(
path.join(storageDir, "session", fixtures.project.id, `${fixtures.session.id}.json`),
JSON.stringify(fixtures.session),
)
await Bun.write(
path.join(storageDir, "message", fixtures.session.id, `${fixtures.message.id}.json`),
JSON.stringify(fixtures.message),
)
await Bun.write(
path.join(storageDir, "part", fixtures.message.id, `${fixtures.part.id}.json`),
JSON.stringify(fixtures.part),
)
const stats = await migrateFromJson(sqlite, storageDir)
expect(stats?.projects).toBe(1)
expect(stats?.sessions).toBe(1)
expect(stats?.messages).toBe(1)
expect(stats?.parts).toBe(1)
expect(stats?.errors.length).toBe(0)
// Verify data in database
const db = drizzle(sqlite)
const projects = db.select().from(ProjectTable).all()
expect(projects.length).toBe(1)
expect(projects[0].id).toBe(fixtures.project.id)
const sessions = db.select().from(SessionTable).all()
expect(sessions.length).toBe(1)
expect(sessions[0].id).toBe(fixtures.session.id)
const messages = db.select().from(MessageTable).all()
expect(messages.length).toBe(1)
expect(messages[0].id).toBe(fixtures.message.id)
const parts = db.select().from(PartTable).all()
expect(parts.length).toBe(1)
expect(parts[0].id).toBe(fixtures.part.id)
})
test("skips migration when marker file exists", async () => {
// Create marker file
await Bun.write(path.join(storageDir, "sqlite-migrated"), Date.now().toString())
// Write project that should NOT be migrated
await Bun.write(path.join(storageDir, "project", `${fixtures.project.id}.json`), JSON.stringify(fixtures.project))
const stats = await migrateFromJson(sqlite, storageDir)
expect(stats).toBeUndefined()
// Verify nothing was migrated
const db = drizzle(sqlite)
const projects = db.select().from(ProjectTable).all()
expect(projects.length).toBe(0)
})
test("skips migration when no JSON storage exists", async () => {
// Remove the legacy migration marker
await fs.rm(path.join(storageDir, "migration"))
const stats = await migrateFromJson(sqlite, storageDir)
expect(stats).toBeUndefined()
// Marker file should be created
const marker = await Bun.file(path.join(storageDir, "sqlite-migrated")).exists()
expect(marker).toBe(true)
})
test("creates marker file after successful migration", async () => {
await Bun.write(path.join(storageDir, "project", `${fixtures.project.id}.json`), JSON.stringify(fixtures.project))
await migrateFromJson(sqlite, storageDir)
const marker = await Bun.file(path.join(storageDir, "sqlite-migrated")).exists()
expect(marker).toBe(true)
})
})
describe("project migration", () => {
test("migrates project with all fields", async () => {
const project = { ...fixtures.project, icon: { url: "data:image/png;base64,..." } }
await Bun.write(path.join(storageDir, "project", `${project.id}.json`), JSON.stringify(project))
const stats = await migrateFromJson(sqlite, storageDir)
expect(stats?.projects).toBe(1)
const db = drizzle(sqlite)
const row = db.select().from(ProjectTable).where(eq(ProjectTable.id, project.id)).get()
const migrated = row ? Project.fromRow(row) : undefined
expect(migrated?.id).toBe(project.id)
expect(migrated?.icon?.url).toBe(project.icon.url)
})
test("skips project with missing id field", async () => {
const invalidProject = { name: "No ID Project" }
await Bun.write(path.join(storageDir, "project", "invalid.json"), JSON.stringify(invalidProject))
const stats = await migrateFromJson(sqlite, storageDir)
expect(stats?.projects).toBe(0)
expect(stats?.errors.length).toBe(1)
expect(stats?.errors[0]).toContain("missing id")
})
test("skips project with invalid JSON", async () => {
await Bun.write(path.join(storageDir, "project", "bad.json"), "{ invalid json }")
const stats = await migrateFromJson(sqlite, storageDir)
expect(stats?.projects).toBe(0)
expect(stats?.errors.length).toBe(1)
expect(stats?.errors[0]).toContain("failed to migrate project")
})
})
describe("session migration", () => {
test("migrates session with valid projectID", async () => {
await Bun.write(path.join(storageDir, "project", `${fixtures.project.id}.json`), JSON.stringify(fixtures.project))
await Bun.write(
path.join(storageDir, "session", fixtures.project.id, `${fixtures.session.id}.json`),
JSON.stringify(fixtures.session),
)
const stats = await migrateFromJson(sqlite, storageDir)
expect(stats?.sessions).toBe(1)
const db = drizzle(sqlite)
const row = db.select().from(SessionTable).where(eq(SessionTable.id, fixtures.session.id)).get()
expect(row?.id).toBe(fixtures.session.id)
expect(row?.projectID).toBe(fixtures.project.id)
expect(row?.time_created).toBe(fixtures.session.time.created)
expect(row?.time_updated).toBe(fixtures.session.time.updated)
})
test("migrates session with parentID", async () => {
await Bun.write(path.join(storageDir, "project", `${fixtures.project.id}.json`), JSON.stringify(fixtures.project))
const childSession = { ...fixtures.session, id: "ses_child123", parentID: fixtures.session.id }
// Create parent session first
await Bun.write(
path.join(storageDir, "session", fixtures.project.id, `${fixtures.session.id}.json`),
JSON.stringify(fixtures.session),
)
await Bun.write(
path.join(storageDir, "session", fixtures.project.id, `${childSession.id}.json`),
JSON.stringify(childSession),
)
const stats = await migrateFromJson(sqlite, storageDir)
expect(stats?.sessions).toBe(2)
const db = drizzle(sqlite)
const row = db.select().from(SessionTable).where(eq(SessionTable.id, childSession.id)).get()
expect(row?.parentID).toBe(fixtures.session.id)
})
test("skips orphaned session (missing project)", async () => {
// Don't create the project, just the session
await Bun.write(
path.join(storageDir, "session", fixtures.project.id, `${fixtures.session.id}.json`),
JSON.stringify(fixtures.session),
)
const stats = await migrateFromJson(sqlite, storageDir)
expect(stats?.sessions).toBe(0)
// Orphaned sessions are logged as warnings, not errors
expect(stats?.errors.length).toBe(0)
})
test("handles missing time fields with Date.now() fallback", async () => {
await Bun.write(path.join(storageDir, "project", `${fixtures.project.id}.json`), JSON.stringify(fixtures.project))
const sessionNoTime = { ...fixtures.session, time: undefined }
await Bun.write(
path.join(storageDir, "session", fixtures.project.id, `${fixtures.session.id}.json`),
JSON.stringify(sessionNoTime),
)
const before = Date.now()
const stats = await migrateFromJson(sqlite, storageDir)
const after = Date.now()
expect(stats?.sessions).toBe(1)
const db = drizzle(sqlite)
const row = db.select().from(SessionTable).where(eq(SessionTable.id, fixtures.session.id)).get()
expect(row?.time_created).toBeGreaterThanOrEqual(before)
expect(row?.time_created).toBeLessThanOrEqual(after)
})
test("skips session with missing required fields", async () => {
await Bun.write(path.join(storageDir, "project", `${fixtures.project.id}.json`), JSON.stringify(fixtures.project))
const invalidSession = { id: "ses_noproj" } // missing projectID
await Bun.write(
path.join(storageDir, "session", fixtures.project.id, "invalid.json"),
JSON.stringify(invalidSession),
)
const stats = await migrateFromJson(sqlite, storageDir)
expect(stats?.sessions).toBe(0)
expect(stats?.errors.length).toBe(1)
expect(stats?.errors[0]).toContain("missing id or projectID")
})
})
describe("message migration", () => {
test("migrates message with valid sessionID", async () => {
await Bun.write(path.join(storageDir, "project", `${fixtures.project.id}.json`), JSON.stringify(fixtures.project))
await Bun.write(
path.join(storageDir, "session", fixtures.project.id, `${fixtures.session.id}.json`),
JSON.stringify(fixtures.session),
)
await Bun.write(
path.join(storageDir, "message", fixtures.session.id, `${fixtures.message.id}.json`),
JSON.stringify(fixtures.message),
)
const stats = await migrateFromJson(sqlite, storageDir)
expect(stats?.messages).toBe(1)
const db = drizzle(sqlite)
const row = db.select().from(MessageTable).where(eq(MessageTable.id, fixtures.message.id)).get()
expect(row?.id).toBe(fixtures.message.id)
expect(row?.sessionID).toBe(fixtures.session.id)
})
test("skips orphaned message (missing session)", async () => {
// Don't create the session, just the message
await Bun.write(
path.join(storageDir, "message", fixtures.session.id, `${fixtures.message.id}.json`),
JSON.stringify(fixtures.message),
)
const stats = await migrateFromJson(sqlite, storageDir)
expect(stats?.messages).toBe(0)
})
test("skips message with missing required fields", async () => {
await Bun.write(path.join(storageDir, "project", `${fixtures.project.id}.json`), JSON.stringify(fixtures.project))
await Bun.write(
path.join(storageDir, "session", fixtures.project.id, `${fixtures.session.id}.json`),
JSON.stringify(fixtures.session),
)
const invalidMessage = { id: "msg_nosess" } // missing sessionID
await Bun.write(
path.join(storageDir, "message", fixtures.session.id, "invalid.json"),
JSON.stringify(invalidMessage),
)
const stats = await migrateFromJson(sqlite, storageDir)
expect(stats?.messages).toBe(0)
expect(stats?.errors.length).toBe(1)
expect(stats?.errors[0]).toContain("missing id or sessionID")
})
})
describe("part migration", () => {
test("migrates part with valid messageID", async () => {
await Bun.write(path.join(storageDir, "project", `${fixtures.project.id}.json`), JSON.stringify(fixtures.project))
await Bun.write(
path.join(storageDir, "session", fixtures.project.id, `${fixtures.session.id}.json`),
JSON.stringify(fixtures.session),
)
await Bun.write(
path.join(storageDir, "message", fixtures.session.id, `${fixtures.message.id}.json`),
JSON.stringify(fixtures.message),
)
await Bun.write(
path.join(storageDir, "part", fixtures.message.id, `${fixtures.part.id}.json`),
JSON.stringify(fixtures.part),
)
const stats = await migrateFromJson(sqlite, storageDir)
expect(stats?.parts).toBe(1)
const db = drizzle(sqlite)
const row = db.select().from(PartTable).where(eq(PartTable.id, fixtures.part.id)).get()
expect(row?.id).toBe(fixtures.part.id)
expect(row?.message_id).toBe(fixtures.message.id)
})
test("skips orphaned part (missing message)", async () => {
await Bun.write(
path.join(storageDir, "part", fixtures.message.id, `${fixtures.part.id}.json`),
JSON.stringify(fixtures.part),
)
const stats = await migrateFromJson(sqlite, storageDir)
expect(stats?.parts).toBe(0)
})
test("skips part with missing required fields", async () => {
await Bun.write(path.join(storageDir, "project", `${fixtures.project.id}.json`), JSON.stringify(fixtures.project))
await Bun.write(
path.join(storageDir, "session", fixtures.project.id, `${fixtures.session.id}.json`),
JSON.stringify(fixtures.session),
)
await Bun.write(
path.join(storageDir, "message", fixtures.session.id, `${fixtures.message.id}.json`),
JSON.stringify(fixtures.message),
)
const invalidPart = { id: "prt_nomsg" } // missing messageID and sessionID
await Bun.write(path.join(storageDir, "part", fixtures.message.id, "invalid.json"), JSON.stringify(invalidPart))
const stats = await migrateFromJson(sqlite, storageDir)
expect(stats?.parts).toBe(0)
expect(stats?.errors.length).toBe(1)
expect(stats?.errors[0]).toContain("missing id or messageID")
})
})
describe("auxiliary tables", () => {
test("migrates session_diff correctly", async () => {
await Bun.write(path.join(storageDir, "project", `${fixtures.project.id}.json`), JSON.stringify(fixtures.project))
await Bun.write(
path.join(storageDir, "session", fixtures.project.id, `${fixtures.session.id}.json`),
JSON.stringify(fixtures.session),
)
const diff = [{ file: "test.ts", before: "", after: "console.log('hello')", additions: 10, deletions: 5 }]
await Bun.write(path.join(storageDir, "session_diff", `${fixtures.session.id}.json`), JSON.stringify(diff))
const stats = await migrateFromJson(sqlite, storageDir)
expect(stats?.diffs).toBe(1)
// Query raw since TypeScript schema doesn't match migration
const row = sqlite
.query<{ data: string }, [string]>("SELECT data FROM session_diff WHERE session_id = ?")
.get(fixtures.session.id)
expect(row?.data).toBeDefined()
const data = JSON.parse(row!.data)
expect(data[0].file).toBe("test.ts")
})
test("migrates todo correctly", async () => {
await Bun.write(path.join(storageDir, "project", `${fixtures.project.id}.json`), JSON.stringify(fixtures.project))
await Bun.write(
path.join(storageDir, "session", fixtures.project.id, `${fixtures.session.id}.json`),
JSON.stringify(fixtures.session),
)
const todo = [{ id: "1", content: "Test todo", status: "pending", priority: "high" }]
await Bun.write(path.join(storageDir, "todo", `${fixtures.session.id}.json`), JSON.stringify(todo))
const stats = await migrateFromJson(sqlite, storageDir)
expect(stats?.todos).toBe(1)
const db = drizzle(sqlite)
const row = db.select().from(TodoTable).where(eq(TodoTable.sessionID, fixtures.session.id)).get()
expect(row?.data).toBeDefined()
})
test("migrates permission correctly", async () => {
await Bun.write(path.join(storageDir, "project", `${fixtures.project.id}.json`), JSON.stringify(fixtures.project))
const permission = [{ permission: "bash", pattern: "*", action: "allow" as const }]
await Bun.write(path.join(storageDir, "permission", `${fixtures.project.id}.json`), JSON.stringify(permission))
const stats = await migrateFromJson(sqlite, storageDir)
expect(stats?.permissions).toBe(1)
const db = drizzle(sqlite)
const row = db.select().from(PermissionTable).where(eq(PermissionTable.projectID, fixtures.project.id)).get()
expect(row?.data).toBeDefined()
})
test("migrates session_share correctly", async () => {
await Bun.write(path.join(storageDir, "project", `${fixtures.project.id}.json`), JSON.stringify(fixtures.project))
await Bun.write(
path.join(storageDir, "session", fixtures.project.id, `${fixtures.session.id}.json`),
JSON.stringify(fixtures.session),
)
const share = { id: "share_123", secret: "abc123", url: "https://share.example.com/abc123" }
await Bun.write(path.join(storageDir, "session_share", `${fixtures.session.id}.json`), JSON.stringify(share))
const stats = await migrateFromJson(sqlite, storageDir)
expect(stats?.shares).toBe(1)
const db = drizzle(sqlite)
const row = db.select().from(SessionShareTable).where(eq(SessionShareTable.sessionID, fixtures.session.id)).get()
expect(row?.data.secret).toBe("abc123")
})
test("migrates share correctly (no FK check)", async () => {
// Share table has no FK, so we can create without project/session
const share = { secret: "test_secret", url: "https://example.com/share" }
const shareID = "ses_shared123"
await Bun.write(path.join(storageDir, "share", `${shareID}.json`), JSON.stringify(share))
const stats = await migrateFromJson(sqlite, storageDir)
// Note: shares count is tracked under stats.shares but share table is migrated separately
const db = drizzle(sqlite)
const row = db.select().from(ShareTable).where(eq(ShareTable.sessionID, shareID)).get()
expect(row?.data.secret).toBe("test_secret")
})
test("skips orphaned session_diff", async () => {
const diff = { files: [] }
await Bun.write(path.join(storageDir, "session_diff", "ses_nonexistent.json"), JSON.stringify(diff))
const stats = await migrateFromJson(sqlite, storageDir)
expect(stats?.diffs).toBe(0)
})
test("skips orphaned todo", async () => {
const todo = { items: [] }
await Bun.write(path.join(storageDir, "todo", "ses_nonexistent.json"), JSON.stringify(todo))
const stats = await migrateFromJson(sqlite, storageDir)
expect(stats?.todos).toBe(0)
})
test("skips orphaned permission", async () => {
const permission = { rules: [] }
await Bun.write(path.join(storageDir, "permission", "proj_nonexistent.json"), JSON.stringify(permission))
const stats = await migrateFromJson(sqlite, storageDir)
expect(stats?.permissions).toBe(0)
})
test("skips orphaned session_share", async () => {
const share = { secret: "test" }
await Bun.write(path.join(storageDir, "session_share", "ses_nonexistent.json"), JSON.stringify(share))
const stats = await migrateFromJson(sqlite, storageDir)
expect(stats?.shares).toBe(0)
})
})
describe("error handling", () => {
test("continues migration after single file error", async () => {
// Write one valid and one invalid project
await Bun.write(path.join(storageDir, "project", `${fixtures.project.id}.json`), JSON.stringify(fixtures.project))
await Bun.write(path.join(storageDir, "project", "invalid.json"), "{ invalid json }")
const stats = await migrateFromJson(sqlite, storageDir)
expect(stats?.projects).toBe(1) // Valid one was migrated
expect(stats?.errors.length).toBe(1) // Error was recorded
})
test("collects all errors in stats.errors array", async () => {
// Write multiple invalid files
await Bun.write(path.join(storageDir, "project", "bad1.json"), "{ invalid }")
await Bun.write(path.join(storageDir, "project", "bad2.json"), "not json at all")
await Bun.write(path.join(storageDir, "project", "bad3.json"), JSON.stringify({ name: "no id" }))
const stats = await migrateFromJson(sqlite, storageDir)
expect(stats?.projects).toBe(0)
expect(stats?.errors.length).toBe(3)
})
})
describe("data integrity", () => {
test("preserves all JSON data fields in data column", async () => {
const fullProject = {
id: "proj_full",
name: "Full Project",
worktree: "/path/to/project",
vcs: "git" as const,
sandboxes: ["/path/one", "/path/two"],
time: { created: 1700000000000, updated: 1700000001000 },
icon: { url: "data:image/png;base64,abc", color: "#ff0000" },
}
await Bun.write(path.join(storageDir, "project", `${fullProject.id}.json`), JSON.stringify(fullProject))
await migrateFromJson(sqlite, storageDir)
const db = drizzle(sqlite)
const row = db.select().from(ProjectTable).where(eq(ProjectTable.id, fullProject.id)).get()
const data = row ? Project.fromRow(row) : undefined
expect(data?.id).toBe(fullProject.id)
expect(data?.name).toBe(fullProject.name)
expect(data?.sandboxes).toEqual(fullProject.sandboxes)
expect(data?.icon?.color).toBe("#ff0000")
})
test("handles unicode in text fields", async () => {
const unicodeProject = {
id: "proj_unicode",
name: "Проект с юникодом 🚀",
worktree: "/path/测试",
vcs: "git" as const,
sandboxes: [],
}
await Bun.write(path.join(storageDir, "project", `${unicodeProject.id}.json`), JSON.stringify(unicodeProject))
await migrateFromJson(sqlite, storageDir)
const db = drizzle(sqlite)
const row = db.select().from(ProjectTable).where(eq(ProjectTable.id, unicodeProject.id)).get()
const data = row ? Project.fromRow(row) : undefined
expect(data?.name).toBe("Проект с юникодом 🚀")
expect(data?.worktree).toBe("/path/测试")
})
test("migration is idempotent with onConflictDoNothing", async () => {
await Bun.write(path.join(storageDir, "project", `${fixtures.project.id}.json`), JSON.stringify(fixtures.project))
// Run migration twice (manually, since marker file would block second run)
const stats1 = await migrateFromJson(sqlite, storageDir)
expect(stats1?.projects).toBe(1)
// Remove marker and run again
await fs.rm(path.join(storageDir, "sqlite-migrated"))
const stats2 = await migrateFromJson(sqlite, storageDir)
expect(stats2?.projects).toBe(1) // Would be 1 even though already exists (onConflictDoNothing)
// Verify only one record exists
const db = drizzle(sqlite)
const projects = db.select().from(ProjectTable).all()
expect(projects.length).toBe(1)
})
})
})

View File

@@ -25,4 +25,4 @@
"typescript": "catalog:",
"@typescript/native-preview": "catalog:"
}
}
}

View File

@@ -30,4 +30,4 @@
"publishConfig": {
"directory": "dist"
}
}
}

View File

@@ -866,6 +866,21 @@ export type EventPtyDeleted = {
}
}
export type EventWorktreeReady = {
type: "worktree.ready"
properties: {
name: string
branch: string
}
}
export type EventWorktreeFailed = {
type: "worktree.failed"
properties: {
message: string
}
}
export type Event =
| EventInstallationUpdated
| EventInstallationUpdateAvailable
@@ -907,6 +922,8 @@ export type Event =
| EventPtyUpdated
| EventPtyExited
| EventPtyDeleted
| EventWorktreeReady
| EventWorktreeFailed
export type GlobalEvent = {
directory: string

View File

@@ -8121,6 +8121,47 @@
},
"required": ["type", "properties"]
},
"Event.worktree.ready": {
"type": "object",
"properties": {
"type": {
"type": "string",
"const": "worktree.ready"
},
"properties": {
"type": "object",
"properties": {
"name": {
"type": "string"
},
"branch": {
"type": "string"
}
},
"required": ["name", "branch"]
}
},
"required": ["type", "properties"]
},
"Event.worktree.failed": {
"type": "object",
"properties": {
"type": {
"type": "string",
"const": "worktree.failed"
},
"properties": {
"type": "object",
"properties": {
"message": {
"type": "string"
}
},
"required": ["message"]
}
},
"required": ["type", "properties"]
},
"Event": {
"anyOf": [
{
@@ -8242,6 +8283,12 @@
},
{
"$ref": "#/components/schemas/Event.pty.deleted"
},
{
"$ref": "#/components/schemas/Event.worktree.ready"
},
{
"$ref": "#/components/schemas/Event.worktree.failed"
}
]
},

View File

@@ -20,6 +20,7 @@
display: flex;
align-items: center;
justify-content: center;
pointer-events: none;
[data-slot="dialog-container"] {
position: relative;
@@ -41,6 +42,7 @@
max-height: 100%;
min-height: 280px;
overflow: auto;
pointer-events: auto;
/* Hide scrollbar */
scrollbar-width: none;

View File

@@ -53,7 +53,7 @@ function init() {
}}
>
<Kobalte.Portal>
<Kobalte.Overlay data-component="dialog-overlay" />
<Kobalte.Overlay data-component="dialog-overlay" onClick={close} />
{element()}
</Kobalte.Portal>
</Kobalte>

View File

@@ -0,0 +1,21 @@
import { AsyncLocalStorage } from "node:async_hooks"
export namespace Context {
export class NotFound extends Error {}
export function create<T>() {
const storage = new AsyncLocalStorage<T>()
return {
use() {
const result = storage.getStore()
if (!result) {
throw new NotFound()
}
return result
},
provide<R>(value: T, fn: () => R) {
return storage.run(value, fn)
},
}
}
}

View File

@@ -63,6 +63,7 @@ You can also check out [awesome-opencode](https://github.com/awesome-opencode/aw
| [OpenCode-Obsidian](https://github.com/mtymek/opencode-obsidian) | Obsidian plugin that embedds OpenCode in Obsidian's UI |
| [OpenWork](https://github.com/different-ai/openwork) | An open-source alternative to Claude Cowork, powered by OpenCode |
| [ocx](https://github.com/kdcokenny/ocx) | OpenCode extension manager with portable, isolated profiles. |
| [CodeNomad](https://github.com/NeuralNomadsAI/CodeNomad) | Desktop, Web, Mobile and Remote Client App for OpenCode |
---

View File

@@ -86,7 +86,6 @@ You can also access our models through the following API endpoints.
| Kimi K2 | kimi-k2 | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
| Kimi K2 Thinking | kimi-k2-thinking | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
| Qwen3 Coder 480B | qwen3-coder | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
| Grok Code Fast 1 | grok-code | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
| Big Pickle | big-pickle | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
The [model id](/docs/config/#models) in your OpenCode config
@@ -112,7 +111,6 @@ We support a pay-as-you-go model. Below are the prices **per 1M tokens**.
| Model | Input | Output | Cached Read | Cached Write |
| --------------------------------- | ------ | ------ | ----------- | ------------ |
| Big Pickle | Free | Free | Free | - |
| Grok Code Fast 1 | Free | Free | Free | - |
| GLM 4.7 | $0.60 | $2.20 | $0.10 | - |
| GLM 4.6 | $0.60 | $2.20 | $0.10 | - |
| Kimi K2 | $0.40 | $2.50 | - | - |
@@ -147,7 +145,6 @@ Credit card fees are passed along at cost (4.4% + $0.30 per transaction); we don
The free models:
- Grok Code Fast 1 is currently free on OpenCode for a limited time. The xAI team is using this time to collect feedback and improve Grok Code.
- Big Pickle is a stealth model that's free on OpenCode for a limited time. The team is using this time to collect feedback and improve the model.
<a href={email}>Contact us</a> if you have any questions.
@@ -177,7 +174,6 @@ charging you more than $20 if your balance goes below $5.
All our models are hosted in the US. Our providers follow a zero-retention policy and do not use your data for model training, with the following exceptions:
- Grok Code Fast 1: During its free period, collected data may be used to improve Grok Code.
- Big Pickle: During its free period, collected data may be used to improve the model.
- OpenAI APIs: Requests are retained for 30 days in accordance with [OpenAI's Data Policies](https://platform.openai.com/docs/guides/your-data).
- Anthropic APIs: Requests are retained for 30 days in accordance with [Anthropic's Data Policies](https://docs.anthropic.com/en/docs/claude-code/data-usage).

View File

@@ -0,0 +1,2 @@
// Auto-generated - do not edit
export const migrations: { name: string; sql: string }[] = []