mirror of
https://github.com/anomalyco/opencode.git
synced 2026-02-10 10:54:28 +00:00
Compare commits
12 Commits
github-v1.
...
sqlite
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fcd5ff7ebe | ||
|
|
c2e234ec4d | ||
|
|
38f735bfc6 | ||
|
|
a4183c3b2c | ||
|
|
2c234b8d62 | ||
|
|
9f96d8aa78 | ||
|
|
4007e57c52 | ||
|
|
d472512eba | ||
|
|
f6b28b61c7 | ||
|
|
0bf9d66da5 | ||
|
|
eabd78cab6 | ||
|
|
7bc8851fc4 |
117
bun.lock
117
bun.lock
@@ -311,6 +311,7 @@
|
||||
"clipboardy": "4.0.0",
|
||||
"decimal.js": "10.5.0",
|
||||
"diff": "catalog:",
|
||||
"drizzle-orm": "0.45.1",
|
||||
"fuzzysort": "3.1.0",
|
||||
"gray-matter": "4.0.3",
|
||||
"hono": "catalog:",
|
||||
@@ -352,6 +353,8 @@
|
||||
"@types/turndown": "5.0.5",
|
||||
"@types/yargs": "17.0.33",
|
||||
"@typescript/native-preview": "catalog:",
|
||||
"better-sqlite3": "12.6.0",
|
||||
"drizzle-kit": "0.31.8",
|
||||
"typescript": "catalog:",
|
||||
"vscode-languageserver-types": "3.17.5",
|
||||
"why-is-node-running": "3.2.2",
|
||||
@@ -2042,12 +2045,18 @@
|
||||
|
||||
"before-after-hook": ["before-after-hook@2.2.3", "", {}, "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ=="],
|
||||
|
||||
"better-sqlite3": ["better-sqlite3@12.6.0", "", { "dependencies": { "bindings": "^1.5.0", "prebuild-install": "^7.1.1" } }, "sha512-FXI191x+D6UPWSze5IzZjhz+i9MK9nsuHsmTX9bXVl52k06AfZ2xql0lrgIUuzsMsJ7Vgl5kIptvDgBLIV3ZSQ=="],
|
||||
|
||||
"bignumber.js": ["bignumber.js@9.3.1", "", {}, "sha512-Ko0uX15oIUS7wJ3Rb30Fs6SkVbLmPBAKdlm7q9+ak9bbIeFf0MwuBsQV6z7+X768/cHsfg+WlysDWJcmthjsjQ=="],
|
||||
|
||||
"binary": ["binary@0.3.0", "", { "dependencies": { "buffers": "~0.1.1", "chainsaw": "~0.1.0" } }, "sha512-D4H1y5KYwpJgK8wk1Cue5LLPgmwHKYSChkbspQg5JtVuR5ulGckxfR62H3AE9UDkdMC8yyXlqYihuz3Aqg2XZg=="],
|
||||
|
||||
"binary-extensions": ["binary-extensions@2.3.0", "", {}, "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw=="],
|
||||
|
||||
"bindings": ["bindings@1.5.0", "", { "dependencies": { "file-uri-to-path": "1.0.0" } }, "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ=="],
|
||||
|
||||
"bl": ["bl@4.1.0", "", { "dependencies": { "buffer": "^5.5.0", "inherits": "^2.0.4", "readable-stream": "^3.4.0" } }, "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w=="],
|
||||
|
||||
"blake3-wasm": ["blake3-wasm@2.1.5", "", {}, "sha512-F1+K8EbfOZE49dtoPtmxUQrpXaBIl3ICvasLh+nJta0xkz+9kF/7uet9fLnwKqhDrmj6g+6K3Tw9yQPUg2ka5g=="],
|
||||
|
||||
"blob-to-buffer": ["blob-to-buffer@1.2.9", "", {}, "sha512-BF033y5fN6OCofD3vgHmNtwZWRcq9NLyyxyILx9hfMy1sXYy4ojFl765hJ2lP0YaN2fuxPaLO2Vzzoxy0FLFFA=="],
|
||||
@@ -2254,6 +2263,10 @@
|
||||
|
||||
"decode-named-character-reference": ["decode-named-character-reference@1.2.0", "", { "dependencies": { "character-entities": "^2.0.0" } }, "sha512-c6fcElNV6ShtZXmsgNgFFV5tVX2PaV4g+MOAkb8eXHvn6sryJBrZa9r0zV6+dtTyoCKxtDy5tyQ5ZwQuidtd+Q=="],
|
||||
|
||||
"decompress-response": ["decompress-response@6.0.0", "", { "dependencies": { "mimic-response": "^3.1.0" } }, "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ=="],
|
||||
|
||||
"deep-extend": ["deep-extend@0.6.0", "", {}, "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA=="],
|
||||
|
||||
"deepmerge": ["deepmerge@4.3.1", "", {}, "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A=="],
|
||||
|
||||
"default-browser": ["default-browser@5.4.0", "", { "dependencies": { "bundle-name": "^4.1.0", "default-browser-id": "^5.0.0" } }, "sha512-XDuvSq38Hr1MdN47EDvYtx3U0MTqpCEn+F6ft8z2vYDzMrvQhVp0ui9oQdqW3MvK3vqUETglt1tVGgjLuJ5izg=="],
|
||||
@@ -2346,6 +2359,8 @@
|
||||
|
||||
"encodeurl": ["encodeurl@2.0.0", "", {}, "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg=="],
|
||||
|
||||
"end-of-stream": ["end-of-stream@1.4.5", "", { "dependencies": { "once": "^1.4.0" } }, "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg=="],
|
||||
|
||||
"engine.io-client": ["engine.io-client@6.6.4", "", { "dependencies": { "@socket.io/component-emitter": "~3.1.0", "debug": "~4.4.1", "engine.io-parser": "~5.2.1", "ws": "~8.18.3", "xmlhttprequest-ssl": "~2.1.1" } }, "sha512-+kjUJnZGwzewFDw951CDWcwj35vMNf2fcj7xQWOctq1F2i1jkDdVvdFG9kM/BEChymCH36KgjnW0NsL58JYRxw=="],
|
||||
|
||||
"engine.io-parser": ["engine.io-parser@5.2.3", "", {}, "sha512-HqD3yTBfnBxIrbnM1DoD6Pcq8NECnh8d4As1Qgh0z5Gg3jRRIqijury0CL3ghu/edArpUYiYqQiDUQBIs4np3Q=="],
|
||||
@@ -2430,6 +2445,8 @@
|
||||
|
||||
"exit-hook": ["exit-hook@2.2.1", "", {}, "sha512-eNTPlAD67BmP31LDINZ3U7HSF8l57TxOY2PmBJ1shpCvpnxBF93mWCE8YHBnXs8qiUZJc9WDcWIeC3a2HIAMfw=="],
|
||||
|
||||
"expand-template": ["expand-template@2.0.3", "", {}, "sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg=="],
|
||||
|
||||
"expect-type": ["expect-type@1.3.0", "", {}, "sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA=="],
|
||||
|
||||
"express": ["express@4.21.2", "", { "dependencies": { "accepts": "~1.3.8", "array-flatten": "1.1.1", "body-parser": "1.20.3", "content-disposition": "0.5.4", "content-type": "~1.0.4", "cookie": "0.7.1", "cookie-signature": "1.0.6", "debug": "2.6.9", "depd": "2.0.0", "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "etag": "~1.8.1", "finalhandler": "1.3.1", "fresh": "0.5.2", "http-errors": "2.0.0", "merge-descriptors": "1.0.3", "methods": "~1.1.2", "on-finished": "2.4.1", "parseurl": "~1.3.3", "path-to-regexp": "0.1.12", "proxy-addr": "~2.0.7", "qs": "6.13.0", "range-parser": "~1.2.1", "safe-buffer": "5.2.1", "send": "0.19.0", "serve-static": "1.16.2", "setprototypeof": "1.2.0", "statuses": "2.0.1", "type-is": "~1.6.18", "utils-merge": "1.0.1", "vary": "~1.1.2" } }, "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA=="],
|
||||
@@ -2464,6 +2481,8 @@
|
||||
|
||||
"file-type": ["file-type@16.5.4", "", { "dependencies": { "readable-web-to-node-stream": "^3.0.0", "strtok3": "^6.2.4", "token-types": "^4.1.1" } }, "sha512-/yFHK0aGjFEgDJjEKP0pWCplsPFPhwyfwevf/pVxiN0tmE4L9LmwWxWukdJSHdoCli4VgQLehjJtwQBnqmsKcw=="],
|
||||
|
||||
"file-uri-to-path": ["file-uri-to-path@1.0.0", "", {}, "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw=="],
|
||||
|
||||
"fill-range": ["fill-range@7.1.1", "", { "dependencies": { "to-regex-range": "^5.0.1" } }, "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg=="],
|
||||
|
||||
"finalhandler": ["finalhandler@1.3.1", "", { "dependencies": { "debug": "2.6.9", "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "on-finished": "2.4.1", "parseurl": "~1.3.3", "statuses": "2.0.1", "unpipe": "~1.0.0" } }, "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ=="],
|
||||
@@ -2502,6 +2521,8 @@
|
||||
|
||||
"fresh": ["fresh@0.5.2", "", {}, "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q=="],
|
||||
|
||||
"fs-constants": ["fs-constants@1.0.0", "", {}, "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow=="],
|
||||
|
||||
"fs-extra": ["fs-extra@10.1.0", "", { "dependencies": { "graceful-fs": "^4.2.0", "jsonfile": "^6.0.1", "universalify": "^2.0.0" } }, "sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ=="],
|
||||
|
||||
"fs.realpath": ["fs.realpath@1.0.0", "", {}, "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw=="],
|
||||
@@ -2552,6 +2573,8 @@
|
||||
|
||||
"giget": ["giget@2.0.0", "", { "dependencies": { "citty": "^0.1.6", "consola": "^3.4.0", "defu": "^6.1.4", "node-fetch-native": "^1.6.6", "nypm": "^0.6.0", "pathe": "^2.0.3" }, "bin": { "giget": "dist/cli.mjs" } }, "sha512-L5bGsVkxJbJgdnwyuheIunkGatUF/zssUoxxjACCseZYAVbaqdh9Tsmmlkl8vYan09H7sbvKt4pS8GqKLBrEzA=="],
|
||||
|
||||
"github-from-package": ["github-from-package@0.0.0", "", {}, "sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw=="],
|
||||
|
||||
"github-slugger": ["github-slugger@2.0.0", "", {}, "sha512-IaOQ9puYtjrkq7Y0Ygl9KDZnrf/aiUJYUpVf89y8kyaxbRG7Y1SrX/jaumrv81vc61+kiMempujsM3Yw7w5qcw=="],
|
||||
|
||||
"glob": ["glob@11.1.0", "", { "dependencies": { "foreground-child": "^3.3.1", "jackspeak": "^4.1.1", "minimatch": "^10.1.1", "minipass": "^7.1.2", "package-json-from-dist": "^1.0.0", "path-scurry": "^2.0.0" }, "bin": { "glob": "dist/esm/bin.mjs" } }, "sha512-vuNwKSaKiqm7g0THUBu2x7ckSs3XJLXE+2ssL7/MfTGPLLcrJQ/4Uq1CjPTtO5cCIiRxqvN6Twy1qOwhL0Xjcw=="],
|
||||
@@ -3090,6 +3113,8 @@
|
||||
|
||||
"mimic-fn": ["mimic-fn@4.0.0", "", {}, "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw=="],
|
||||
|
||||
"mimic-response": ["mimic-response@3.1.0", "", {}, "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ=="],
|
||||
|
||||
"miniflare": ["miniflare@4.20251118.1", "", { "dependencies": { "@cspotcode/source-map-support": "0.8.1", "acorn": "8.14.0", "acorn-walk": "8.3.2", "exit-hook": "2.2.1", "glob-to-regexp": "0.4.1", "sharp": "^0.33.5", "stoppable": "1.1.0", "undici": "7.14.0", "workerd": "1.20251118.0", "ws": "8.18.0", "youch": "4.1.0-beta.10", "zod": "3.22.3" }, "bin": { "miniflare": "bootstrap.js" } }, "sha512-uLSAE/DvOm392fiaig4LOaatxLjM7xzIniFRG5Y3yF9IduOYLLK/pkCPQNCgKQH3ou0YJRHnTN+09LPfqYNTQQ=="],
|
||||
|
||||
"minimatch": ["minimatch@10.0.3", "", { "dependencies": { "@isaacs/brace-expansion": "^5.0.0" } }, "sha512-IPZ167aShDZZUMdRk66cyQAW3qr0WzbHkPdMYa8bzZhlHhO3jALbKdxcaak7W9FfT2rZNpQuUu4Od7ILEpXSaw=="],
|
||||
@@ -3102,6 +3127,8 @@
|
||||
|
||||
"mkdirp": ["mkdirp@0.5.6", "", { "dependencies": { "minimist": "^1.2.6" }, "bin": { "mkdirp": "bin/cmd.js" } }, "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw=="],
|
||||
|
||||
"mkdirp-classic": ["mkdirp-classic@0.5.3", "", {}, "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A=="],
|
||||
|
||||
"mrmime": ["mrmime@2.0.1", "", {}, "sha512-Y3wQdFg2Va6etvQ5I82yUhGdsKrcYox6p7FfL1LbK2J4V01F9TGlepTIhnK24t7koZibmg82KGglhA1XK5IsLQ=="],
|
||||
|
||||
"ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="],
|
||||
@@ -3120,6 +3147,8 @@
|
||||
|
||||
"nanoid": ["nanoid@3.3.11", "", { "bin": { "nanoid": "bin/nanoid.cjs" } }, "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w=="],
|
||||
|
||||
"napi-build-utils": ["napi-build-utils@2.0.0", "", {}, "sha512-GEbrYkbfF7MoNaoh2iGG84Mnf/WZfB0GdGEsM8wz7Expx/LlWf5U8t9nvJKXSp3qr5IsEbK04cBGhol/KwOsWA=="],
|
||||
|
||||
"negotiator": ["negotiator@0.6.3", "", {}, "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg=="],
|
||||
|
||||
"neotraverse": ["neotraverse@0.6.18", "", {}, "sha512-Z4SmBUweYa09+o6pG+eASabEpP6QkQ70yHj351pQoEXIs8uHbaU2DWVmzBANKgflPa47A50PtB2+NgRpQvr7vA=="],
|
||||
@@ -3132,6 +3161,8 @@
|
||||
|
||||
"no-case": ["no-case@3.0.4", "", { "dependencies": { "lower-case": "^2.0.2", "tslib": "^2.0.3" } }, "sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg=="],
|
||||
|
||||
"node-abi": ["node-abi@3.85.0", "", { "dependencies": { "semver": "^7.3.5" } }, "sha512-zsFhmbkAzwhTft6nd3VxcG0cvJsT70rL+BIGHWVq5fi6MwGrHwzqKaxXE+Hl2GmnGItnDKPPkO5/LQqjVkIdFg=="],
|
||||
|
||||
"node-addon-api": ["node-addon-api@7.1.1", "", {}, "sha512-5m3bsyrjFWE1xf7nz7YXdN4udnVtXK6/Yfgn5qnahL6bCkf2yKt4k3nuTKAtT4r3IG8JNR2ncsIMdZuAzJjHQQ=="],
|
||||
|
||||
"node-domexception": ["node-domexception@1.0.0", "", {}, "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ=="],
|
||||
@@ -3328,6 +3359,8 @@
|
||||
|
||||
"powershell-utils": ["powershell-utils@0.1.0", "", {}, "sha512-dM0jVuXJPsDN6DvRpea484tCUaMiXWjuCn++HGTqUWzGDjv5tZkEZldAJ/UMlqRYGFrD/etByo4/xOuC/snX2A=="],
|
||||
|
||||
"prebuild-install": ["prebuild-install@7.1.3", "", { "dependencies": { "detect-libc": "^2.0.0", "expand-template": "^2.0.3", "github-from-package": "0.0.0", "minimist": "^1.2.3", "mkdirp-classic": "^0.5.3", "napi-build-utils": "^2.0.0", "node-abi": "^3.3.0", "pump": "^3.0.0", "rc": "^1.2.7", "simple-get": "^4.0.0", "tar-fs": "^2.0.0", "tunnel-agent": "^0.6.0" }, "bin": { "prebuild-install": "bin.js" } }, "sha512-8Mf2cbV7x1cXPUILADGI3wuhfqWvtiLA1iclTDbFRZkgRQS0NqsPZphna9V+HyTEadheuPmjaJMsbzKQFOzLug=="],
|
||||
|
||||
"prettier": ["prettier@3.6.2", "", { "bin": { "prettier": "bin/prettier.cjs" } }, "sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ=="],
|
||||
|
||||
"pretty": ["pretty@2.0.0", "", { "dependencies": { "condense-newlines": "^0.2.1", "extend-shallow": "^2.0.1", "js-beautify": "^1.6.12" } }, "sha512-G9xUchgTEiNpormdYBl+Pha50gOUovT18IvAe7EYMZ1/f9W/WWMPRn+xI68yXNMUk3QXHDwo/1wV/4NejVNe1w=="],
|
||||
@@ -3350,6 +3383,8 @@
|
||||
|
||||
"proxy-from-env": ["proxy-from-env@1.1.0", "", {}, "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="],
|
||||
|
||||
"pump": ["pump@3.0.3", "", { "dependencies": { "end-of-stream": "^1.1.0", "once": "^1.3.1" } }, "sha512-todwxLMY7/heScKmntwQG8CXVkWUOdYxIvY2s0VWAAMh/nd8SoYiRaKjlr7+iCs984f2P8zvrfWcDDYVb73NfA=="],
|
||||
|
||||
"punycode": ["punycode@1.3.2", "", {}, "sha512-RofWgt/7fL5wP1Y7fxE7/EmTLzQVnB0ycyibJ0OOHIlJqTNzglYFxVwETOcIoJqJmpDXJ9xImDv+Fq34F/d4Dw=="],
|
||||
|
||||
"qs": ["qs@6.14.0", "", { "dependencies": { "side-channel": "^1.1.0" } }, "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w=="],
|
||||
@@ -3366,6 +3401,8 @@
|
||||
|
||||
"raw-body": ["raw-body@2.5.2", "", { "dependencies": { "bytes": "3.1.2", "http-errors": "2.0.0", "iconv-lite": "0.4.24", "unpipe": "1.0.0" } }, "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA=="],
|
||||
|
||||
"rc": ["rc@1.2.8", "", { "dependencies": { "deep-extend": "^0.6.0", "ini": "~1.3.0", "minimist": "^1.2.0", "strip-json-comments": "~2.0.1" }, "bin": { "rc": "./cli.js" } }, "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw=="],
|
||||
|
||||
"rc9": ["rc9@2.1.2", "", { "dependencies": { "defu": "^6.1.4", "destr": "^2.0.3" } }, "sha512-btXCnMmRIBINM2LDZoEmOogIZU7Qe7zn4BpomSKZ/ykbLObuBdvG+mFq11DL6fjH1DRwHhrlgtYWG96bJiC7Cg=="],
|
||||
|
||||
"react": ["react@18.2.0", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ=="],
|
||||
@@ -3552,6 +3589,10 @@
|
||||
|
||||
"signal-exit": ["signal-exit@4.1.0", "", {}, "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw=="],
|
||||
|
||||
"simple-concat": ["simple-concat@1.0.1", "", {}, "sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q=="],
|
||||
|
||||
"simple-get": ["simple-get@4.0.1", "", { "dependencies": { "decompress-response": "^6.0.0", "once": "^1.3.1", "simple-concat": "^1.0.0" } }, "sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA=="],
|
||||
|
||||
"simple-swizzle": ["simple-swizzle@0.2.4", "", { "dependencies": { "is-arrayish": "^0.3.1" } }, "sha512-nAu1WFPQSMNr2Zn9PGSZK9AGn4t/y97lEm+MXTtUDwfP0ksAIX4nO+6ruD9Jwut4C49SB1Ws+fbXsm/yScWOHw=="],
|
||||
|
||||
"simple-xml-to-json": ["simple-xml-to-json@1.2.3", "", {}, "sha512-kWJDCr9EWtZ+/EYYM5MareWj2cRnZGF93YDNpH4jQiHB+hBIZnfPFSQiVMzZOdk+zXWqTZ/9fTeQNu2DqeiudA=="],
|
||||
@@ -3656,6 +3697,8 @@
|
||||
|
||||
"strip-final-newline": ["strip-final-newline@3.0.0", "", {}, "sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw=="],
|
||||
|
||||
"strip-json-comments": ["strip-json-comments@2.0.1", "", {}, "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ=="],
|
||||
|
||||
"stripe": ["stripe@18.0.0", "", { "dependencies": { "@types/node": ">=8.1.0", "qs": "^6.11.0" } }, "sha512-3Fs33IzKUby//9kCkCa1uRpinAoTvj6rJgQ2jrBEysoxEvfsclvXdna1amyEYbA2EKkjynuB4+L/kleCCaWTpA=="],
|
||||
|
||||
"strnum": ["strnum@1.1.2", "", {}, "sha512-vrN+B7DBIoTTZjnPNewwhx6cBA/H+IS7rfW68n7XxC1y7uoiGQBxaKzqucGUgavX15dJgiGztLJ8vxuEzwqBdA=="],
|
||||
@@ -3682,6 +3725,8 @@
|
||||
|
||||
"tar": ["tar@7.5.2", "", { "dependencies": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", "minipass": "^7.1.2", "minizlib": "^3.1.0", "yallist": "^5.0.0" } }, "sha512-7NyxrTE4Anh8km8iEy7o0QYPs+0JKBTj5ZaqHg6B39erLg0qYXN3BijtShwbsNSvQ+LN75+KV+C4QR/f6Gwnpg=="],
|
||||
|
||||
"tar-fs": ["tar-fs@2.1.4", "", { "dependencies": { "chownr": "^1.1.1", "mkdirp-classic": "^0.5.2", "pump": "^3.0.0", "tar-stream": "^2.1.4" } }, "sha512-mDAjwmZdh7LTT6pNleZ05Yt65HC3E+NiQzl672vQG38jIrehtJk/J3mNwIg+vShQPcLF/LV7CMnDW6vjj6sfYQ=="],
|
||||
|
||||
"tar-stream": ["tar-stream@3.1.7", "", { "dependencies": { "b4a": "^1.6.4", "fast-fifo": "^1.2.0", "streamx": "^2.15.0" } }, "sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ=="],
|
||||
|
||||
"terracotta": ["terracotta@1.0.6", "", { "dependencies": { "solid-use": "^0.9.0" }, "peerDependencies": { "solid-js": "^1.8" } }, "sha512-yVrmT/Lg6a3tEbeYEJH8ksb1PYkR5FA9k5gr1TchaSNIiA2ZWs5a+koEbePXwlBP0poaV7xViZ/v50bQFcMgqw=="],
|
||||
@@ -3746,6 +3791,8 @@
|
||||
|
||||
"tunnel": ["tunnel@0.0.6", "", {}, "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg=="],
|
||||
|
||||
"tunnel-agent": ["tunnel-agent@0.6.0", "", { "dependencies": { "safe-buffer": "^5.0.1" } }, "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w=="],
|
||||
|
||||
"turbo": ["turbo@2.5.6", "", { "optionalDependencies": { "turbo-darwin-64": "2.5.6", "turbo-darwin-arm64": "2.5.6", "turbo-linux-64": "2.5.6", "turbo-linux-arm64": "2.5.6", "turbo-windows-64": "2.5.6", "turbo-windows-arm64": "2.5.6" }, "bin": { "turbo": "bin/turbo" } }, "sha512-gxToHmi9oTBNB05UjUsrWf0OyN5ZXtD0apOarC1KIx232Vp3WimRNy3810QzeNSgyD5rsaIDXlxlbnOzlouo+w=="],
|
||||
|
||||
"turbo-darwin-64": ["turbo-darwin-64@2.5.6", "", { "os": "darwin", "cpu": "x64" }, "sha512-3C1xEdo4aFwMJAPvtlPqz1Sw/+cddWIOmsalHFMrsqqydcptwBfu26WW2cDm3u93bUzMbBJ8k3zNKFqxJ9ei2A=="],
|
||||
@@ -4310,6 +4357,10 @@
|
||||
|
||||
"babel-plugin-module-resolver/glob": ["glob@9.3.5", "", { "dependencies": { "fs.realpath": "^1.0.0", "minimatch": "^8.0.2", "minipass": "^4.2.4", "path-scurry": "^1.6.1" } }, "sha512-e1LleDykUz2Iu+MTYdkSsuWX8lvAjAcs0Xef0lNIu0S2wOAzuTxCJtcd9S3cijlwYF18EsU3rzb8jPVobxDh9Q=="],
|
||||
|
||||
"bl/buffer": ["buffer@5.7.1", "", { "dependencies": { "base64-js": "^1.3.1", "ieee754": "^1.1.13" } }, "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ=="],
|
||||
|
||||
"bl/readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="],
|
||||
|
||||
"body-parser/debug": ["debug@2.6.9", "", { "dependencies": { "ms": "2.0.0" } }, "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA=="],
|
||||
|
||||
"body-parser/iconv-lite": ["iconv-lite@0.4.24", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3" } }, "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA=="],
|
||||
@@ -4414,6 +4465,10 @@
|
||||
|
||||
"opencode/@ai-sdk/openai-compatible": ["@ai-sdk/openai-compatible@1.0.30", "", { "dependencies": { "@ai-sdk/provider": "2.0.1", "@ai-sdk/provider-utils": "3.0.20" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-thubwhRtv9uicAxSWwNpinM7hiL/0CkhL/ymPaHuKvI494J7HIzn8KQZQ2ymRz284WTIZnI7VMyyejxW4RMM6w=="],
|
||||
|
||||
"opencode/drizzle-kit": ["drizzle-kit@0.31.8", "", { "dependencies": { "@drizzle-team/brocli": "^0.10.2", "@esbuild-kit/esm-loader": "^2.5.5", "esbuild": "^0.25.4", "esbuild-register": "^3.5.0" }, "bin": { "drizzle-kit": "bin.cjs" } }, "sha512-O9EC/miwdnRDY10qRxM8P3Pg8hXe3LyU4ZipReKOgTwn4OqANmftj8XJz1UPUAS6NMHf0E2htjsbQujUTkncCg=="],
|
||||
|
||||
"opencode/drizzle-orm": ["drizzle-orm@0.45.1", "", { "peerDependencies": { "@aws-sdk/client-rds-data": ">=3", "@cloudflare/workers-types": ">=4", "@electric-sql/pglite": ">=0.2.0", "@libsql/client": ">=0.10.0", "@libsql/client-wasm": ">=0.10.0", "@neondatabase/serverless": ">=0.10.0", "@op-engineering/op-sqlite": ">=2", "@opentelemetry/api": "^1.4.1", "@planetscale/database": ">=1.13", "@prisma/client": "*", "@tidbcloud/serverless": "*", "@types/better-sqlite3": "*", "@types/pg": "*", "@types/sql.js": "*", "@upstash/redis": ">=1.34.7", "@vercel/postgres": ">=0.8.0", "@xata.io/client": "*", "better-sqlite3": ">=7", "bun-types": "*", "expo-sqlite": ">=14.0.0", "gel": ">=2", "knex": "*", "kysely": "*", "mysql2": ">=2", "pg": ">=8", "postgres": ">=3", "sql.js": ">=1", "sqlite3": ">=5" }, "optionalPeers": ["@aws-sdk/client-rds-data", "@cloudflare/workers-types", "@electric-sql/pglite", "@libsql/client", "@libsql/client-wasm", "@neondatabase/serverless", "@op-engineering/op-sqlite", "@opentelemetry/api", "@planetscale/database", "@prisma/client", "@tidbcloud/serverless", "@types/better-sqlite3", "@types/pg", "@types/sql.js", "@upstash/redis", "@vercel/postgres", "@xata.io/client", "better-sqlite3", "bun-types", "expo-sqlite", "gel", "knex", "kysely", "mysql2", "pg", "postgres", "sql.js", "sqlite3"] }, "sha512-Te0FOdKIistGNPMq2jscdqngBRfBpC8uMFVwqjf6gtTVJHIQ/dosgV/CLBU2N4ZJBsXL5savCba9b0YJskKdcA=="],
|
||||
|
||||
"opencontrol/@modelcontextprotocol/sdk": ["@modelcontextprotocol/sdk@1.6.1", "", { "dependencies": { "content-type": "^1.0.5", "cors": "^2.8.5", "eventsource": "^3.0.2", "express": "^5.0.1", "express-rate-limit": "^7.5.0", "pkce-challenge": "^4.1.0", "raw-body": "^3.0.0", "zod": "^3.23.8", "zod-to-json-schema": "^3.24.1" } }, "sha512-oxzMzYCkZHMntzuyerehK3fV6A2Kwh5BD6CGEJSVDU2QNEhfLOptf2X7esQgaHZXHZY0oHmMsOtIDLP71UJXgA=="],
|
||||
|
||||
"opencontrol/@tsconfig/bun": ["@tsconfig/bun@1.0.7", "", {}, "sha512-udGrGJBNQdXGVulehc1aWT73wkR9wdaGBtB6yL70RJsqwW/yJhIg6ZbRlPOfIUiFNrnBuYLBi9CSmMKfDC7dvA=="],
|
||||
@@ -4444,6 +4499,8 @@
|
||||
|
||||
"postcss-load-config/lilconfig": ["lilconfig@3.1.3", "", {}, "sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw=="],
|
||||
|
||||
"prebuild-install/detect-libc": ["detect-libc@2.1.2", "", {}, "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ=="],
|
||||
|
||||
"prompts/kleur": ["kleur@3.0.3", "", {}, "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w=="],
|
||||
|
||||
"raw-body/iconv-lite": ["iconv-lite@0.4.24", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3" } }, "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA=="],
|
||||
@@ -4492,6 +4549,10 @@
|
||||
|
||||
"tar/yallist": ["yallist@5.0.0", "", {}, "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw=="],
|
||||
|
||||
"tar-fs/chownr": ["chownr@1.1.4", "", {}, "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg=="],
|
||||
|
||||
"tar-fs/tar-stream": ["tar-stream@2.2.0", "", { "dependencies": { "bl": "^4.0.3", "end-of-stream": "^1.4.1", "fs-constants": "^1.0.0", "inherits": "^2.0.3", "readable-stream": "^3.1.1" } }, "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ=="],
|
||||
|
||||
"terser/commander": ["commander@2.20.3", "", {}, "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ=="],
|
||||
|
||||
"token-types/ieee754": ["ieee754@1.2.1", "", {}, "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA=="],
|
||||
@@ -4946,6 +5007,8 @@
|
||||
|
||||
"babel-plugin-module-resolver/glob/path-scurry": ["path-scurry@1.11.1", "", { "dependencies": { "lru-cache": "^10.2.0", "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" } }, "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA=="],
|
||||
|
||||
"bl/buffer/ieee754": ["ieee754@1.2.1", "", {}, "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA=="],
|
||||
|
||||
"body-parser/debug/ms": ["ms@2.0.0", "", {}, "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="],
|
||||
|
||||
"c12/chokidar/readdirp": ["readdirp@5.0.0", "", {}, "sha512-9u/XQ1pvrQtYyMpZe7DXKv2p5CNvyVwzUB6uhLAnQwHMSgKMBR62lc7AHljaeteeHXn11XTAaLLUVZYVZyuRBQ=="],
|
||||
@@ -5020,6 +5083,8 @@
|
||||
|
||||
"lazystream/readable-stream/string_decoder": ["string_decoder@1.1.1", "", { "dependencies": { "safe-buffer": "~5.1.0" } }, "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg=="],
|
||||
|
||||
"opencode/drizzle-kit/esbuild": ["esbuild@0.25.12", "", { "optionalDependencies": { "@esbuild/aix-ppc64": "0.25.12", "@esbuild/android-arm": "0.25.12", "@esbuild/android-arm64": "0.25.12", "@esbuild/android-x64": "0.25.12", "@esbuild/darwin-arm64": "0.25.12", "@esbuild/darwin-x64": "0.25.12", "@esbuild/freebsd-arm64": "0.25.12", "@esbuild/freebsd-x64": "0.25.12", "@esbuild/linux-arm": "0.25.12", "@esbuild/linux-arm64": "0.25.12", "@esbuild/linux-ia32": "0.25.12", "@esbuild/linux-loong64": "0.25.12", "@esbuild/linux-mips64el": "0.25.12", "@esbuild/linux-ppc64": "0.25.12", "@esbuild/linux-riscv64": "0.25.12", "@esbuild/linux-s390x": "0.25.12", "@esbuild/linux-x64": "0.25.12", "@esbuild/netbsd-arm64": "0.25.12", "@esbuild/netbsd-x64": "0.25.12", "@esbuild/openbsd-arm64": "0.25.12", "@esbuild/openbsd-x64": "0.25.12", "@esbuild/openharmony-arm64": "0.25.12", "@esbuild/sunos-x64": "0.25.12", "@esbuild/win32-arm64": "0.25.12", "@esbuild/win32-ia32": "0.25.12", "@esbuild/win32-x64": "0.25.12" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-bbPBYYrtZbkt6Os6FiTLCTFxvq4tt3JKall1vRwshA3fdVztsLAatFaZobhkBC8/BrPetoa0oksYoKXoG4ryJg=="],
|
||||
|
||||
"opencontrol/@modelcontextprotocol/sdk/express": ["express@5.1.0", "", { "dependencies": { "accepts": "^2.0.0", "body-parser": "^2.2.0", "content-disposition": "^1.0.0", "content-type": "^1.0.5", "cookie": "^0.7.1", "cookie-signature": "^1.2.1", "debug": "^4.4.0", "encodeurl": "^2.0.0", "escape-html": "^1.0.3", "etag": "^1.8.1", "finalhandler": "^2.1.0", "fresh": "^2.0.0", "http-errors": "^2.0.0", "merge-descriptors": "^2.0.0", "mime-types": "^3.0.0", "on-finished": "^2.4.1", "once": "^1.4.0", "parseurl": "^1.3.3", "proxy-addr": "^2.0.7", "qs": "^6.14.0", "range-parser": "^1.2.1", "router": "^2.2.0", "send": "^1.1.0", "serve-static": "^2.2.0", "statuses": "^2.0.1", "type-is": "^2.0.1", "vary": "^1.1.2" } }, "sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA=="],
|
||||
|
||||
"opencontrol/@modelcontextprotocol/sdk/pkce-challenge": ["pkce-challenge@4.1.0", "", {}, "sha512-ZBmhE1C9LcPoH9XZSdwiPtbPHZROwAnMy+kIFQVrnMCxY4Cudlz3gBOpzilgc0jOgRaiT3sIWfpMomW2ar2orQ=="],
|
||||
@@ -5046,6 +5111,8 @@
|
||||
|
||||
"string-width-cjs/strip-ansi/ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="],
|
||||
|
||||
"tar-fs/tar-stream/readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="],
|
||||
|
||||
"tw-to-css/tailwindcss/chokidar": ["chokidar@3.6.0", "", { "dependencies": { "anymatch": "~3.1.2", "braces": "~3.0.2", "glob-parent": "~5.1.2", "is-binary-path": "~2.1.0", "is-glob": "~4.0.1", "normalize-path": "~3.0.0", "readdirp": "~3.6.0" }, "optionalDependencies": { "fsevents": "~2.3.2" } }, "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw=="],
|
||||
|
||||
"tw-to-css/tailwindcss/glob-parent": ["glob-parent@6.0.2", "", { "dependencies": { "is-glob": "^4.0.3" } }, "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A=="],
|
||||
@@ -5192,6 +5259,56 @@
|
||||
|
||||
"js-beautify/glob/path-scurry/lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="],
|
||||
|
||||
"opencode/drizzle-kit/esbuild/@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.25.12", "", { "os": "aix", "cpu": "ppc64" }, "sha512-Hhmwd6CInZ3dwpuGTF8fJG6yoWmsToE+vYgD4nytZVxcu1ulHpUQRAB1UJ8+N1Am3Mz4+xOByoQoSZf4D+CpkA=="],
|
||||
|
||||
"opencode/drizzle-kit/esbuild/@esbuild/android-arm": ["@esbuild/android-arm@0.25.12", "", { "os": "android", "cpu": "arm" }, "sha512-VJ+sKvNA/GE7Ccacc9Cha7bpS8nyzVv0jdVgwNDaR4gDMC/2TTRc33Ip8qrNYUcpkOHUT5OZ0bUcNNVZQ9RLlg=="],
|
||||
|
||||
"opencode/drizzle-kit/esbuild/@esbuild/android-arm64": ["@esbuild/android-arm64@0.25.12", "", { "os": "android", "cpu": "arm64" }, "sha512-6AAmLG7zwD1Z159jCKPvAxZd4y/VTO0VkprYy+3N2FtJ8+BQWFXU+OxARIwA46c5tdD9SsKGZ/1ocqBS/gAKHg=="],
|
||||
|
||||
"opencode/drizzle-kit/esbuild/@esbuild/android-x64": ["@esbuild/android-x64@0.25.12", "", { "os": "android", "cpu": "x64" }, "sha512-5jbb+2hhDHx5phYR2By8GTWEzn6I9UqR11Kwf22iKbNpYrsmRB18aX/9ivc5cabcUiAT/wM+YIZ6SG9QO6a8kg=="],
|
||||
|
||||
"opencode/drizzle-kit/esbuild/@esbuild/darwin-arm64": ["@esbuild/darwin-arm64@0.25.12", "", { "os": "darwin", "cpu": "arm64" }, "sha512-N3zl+lxHCifgIlcMUP5016ESkeQjLj/959RxxNYIthIg+CQHInujFuXeWbWMgnTo4cp5XVHqFPmpyu9J65C1Yg=="],
|
||||
|
||||
"opencode/drizzle-kit/esbuild/@esbuild/darwin-x64": ["@esbuild/darwin-x64@0.25.12", "", { "os": "darwin", "cpu": "x64" }, "sha512-HQ9ka4Kx21qHXwtlTUVbKJOAnmG1ipXhdWTmNXiPzPfWKpXqASVcWdnf2bnL73wgjNrFXAa3yYvBSd9pzfEIpA=="],
|
||||
|
||||
"opencode/drizzle-kit/esbuild/@esbuild/freebsd-arm64": ["@esbuild/freebsd-arm64@0.25.12", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-gA0Bx759+7Jve03K1S0vkOu5Lg/85dou3EseOGUes8flVOGxbhDDh/iZaoek11Y8mtyKPGF3vP8XhnkDEAmzeg=="],
|
||||
|
||||
"opencode/drizzle-kit/esbuild/@esbuild/freebsd-x64": ["@esbuild/freebsd-x64@0.25.12", "", { "os": "freebsd", "cpu": "x64" }, "sha512-TGbO26Yw2xsHzxtbVFGEXBFH0FRAP7gtcPE7P5yP7wGy7cXK2oO7RyOhL5NLiqTlBh47XhmIUXuGciXEqYFfBQ=="],
|
||||
|
||||
"opencode/drizzle-kit/esbuild/@esbuild/linux-arm": ["@esbuild/linux-arm@0.25.12", "", { "os": "linux", "cpu": "arm" }, "sha512-lPDGyC1JPDou8kGcywY0YILzWlhhnRjdof3UlcoqYmS9El818LLfJJc3PXXgZHrHCAKs/Z2SeZtDJr5MrkxtOw=="],
|
||||
|
||||
"opencode/drizzle-kit/esbuild/@esbuild/linux-arm64": ["@esbuild/linux-arm64@0.25.12", "", { "os": "linux", "cpu": "arm64" }, "sha512-8bwX7a8FghIgrupcxb4aUmYDLp8pX06rGh5HqDT7bB+8Rdells6mHvrFHHW2JAOPZUbnjUpKTLg6ECyzvas2AQ=="],
|
||||
|
||||
"opencode/drizzle-kit/esbuild/@esbuild/linux-ia32": ["@esbuild/linux-ia32@0.25.12", "", { "os": "linux", "cpu": "ia32" }, "sha512-0y9KrdVnbMM2/vG8KfU0byhUN+EFCny9+8g202gYqSSVMonbsCfLjUO+rCci7pM0WBEtz+oK/PIwHkzxkyharA=="],
|
||||
|
||||
"opencode/drizzle-kit/esbuild/@esbuild/linux-loong64": ["@esbuild/linux-loong64@0.25.12", "", { "os": "linux", "cpu": "none" }, "sha512-h///Lr5a9rib/v1GGqXVGzjL4TMvVTv+s1DPoxQdz7l/AYv6LDSxdIwzxkrPW438oUXiDtwM10o9PmwS/6Z0Ng=="],
|
||||
|
||||
"opencode/drizzle-kit/esbuild/@esbuild/linux-mips64el": ["@esbuild/linux-mips64el@0.25.12", "", { "os": "linux", "cpu": "none" }, "sha512-iyRrM1Pzy9GFMDLsXn1iHUm18nhKnNMWscjmp4+hpafcZjrr2WbT//d20xaGljXDBYHqRcl8HnxbX6uaA/eGVw=="],
|
||||
|
||||
"opencode/drizzle-kit/esbuild/@esbuild/linux-ppc64": ["@esbuild/linux-ppc64@0.25.12", "", { "os": "linux", "cpu": "ppc64" }, "sha512-9meM/lRXxMi5PSUqEXRCtVjEZBGwB7P/D4yT8UG/mwIdze2aV4Vo6U5gD3+RsoHXKkHCfSxZKzmDssVlRj1QQA=="],
|
||||
|
||||
"opencode/drizzle-kit/esbuild/@esbuild/linux-riscv64": ["@esbuild/linux-riscv64@0.25.12", "", { "os": "linux", "cpu": "none" }, "sha512-Zr7KR4hgKUpWAwb1f3o5ygT04MzqVrGEGXGLnj15YQDJErYu/BGg+wmFlIDOdJp0PmB0lLvxFIOXZgFRrdjR0w=="],
|
||||
|
||||
"opencode/drizzle-kit/esbuild/@esbuild/linux-s390x": ["@esbuild/linux-s390x@0.25.12", "", { "os": "linux", "cpu": "s390x" }, "sha512-MsKncOcgTNvdtiISc/jZs/Zf8d0cl/t3gYWX8J9ubBnVOwlk65UIEEvgBORTiljloIWnBzLs4qhzPkJcitIzIg=="],
|
||||
|
||||
"opencode/drizzle-kit/esbuild/@esbuild/linux-x64": ["@esbuild/linux-x64@0.25.12", "", { "os": "linux", "cpu": "x64" }, "sha512-uqZMTLr/zR/ed4jIGnwSLkaHmPjOjJvnm6TVVitAa08SLS9Z0VM8wIRx7gWbJB5/J54YuIMInDquWyYvQLZkgw=="],
|
||||
|
||||
"opencode/drizzle-kit/esbuild/@esbuild/netbsd-arm64": ["@esbuild/netbsd-arm64@0.25.12", "", { "os": "none", "cpu": "arm64" }, "sha512-xXwcTq4GhRM7J9A8Gv5boanHhRa/Q9KLVmcyXHCTaM4wKfIpWkdXiMog/KsnxzJ0A1+nD+zoecuzqPmCRyBGjg=="],
|
||||
|
||||
"opencode/drizzle-kit/esbuild/@esbuild/netbsd-x64": ["@esbuild/netbsd-x64@0.25.12", "", { "os": "none", "cpu": "x64" }, "sha512-Ld5pTlzPy3YwGec4OuHh1aCVCRvOXdH8DgRjfDy/oumVovmuSzWfnSJg+VtakB9Cm0gxNO9BzWkj6mtO1FMXkQ=="],
|
||||
|
||||
"opencode/drizzle-kit/esbuild/@esbuild/openbsd-arm64": ["@esbuild/openbsd-arm64@0.25.12", "", { "os": "openbsd", "cpu": "arm64" }, "sha512-fF96T6KsBo/pkQI950FARU9apGNTSlZGsv1jZBAlcLL1MLjLNIWPBkj5NlSz8aAzYKg+eNqknrUJ24QBybeR5A=="],
|
||||
|
||||
"opencode/drizzle-kit/esbuild/@esbuild/openbsd-x64": ["@esbuild/openbsd-x64@0.25.12", "", { "os": "openbsd", "cpu": "x64" }, "sha512-MZyXUkZHjQxUvzK7rN8DJ3SRmrVrke8ZyRusHlP+kuwqTcfWLyqMOE3sScPPyeIXN/mDJIfGXvcMqCgYKekoQw=="],
|
||||
|
||||
"opencode/drizzle-kit/esbuild/@esbuild/sunos-x64": ["@esbuild/sunos-x64@0.25.12", "", { "os": "sunos", "cpu": "x64" }, "sha512-3wGSCDyuTHQUzt0nV7bocDy72r2lI33QL3gkDNGkod22EsYl04sMf0qLb8luNKTOmgF/eDEDP5BFNwoBKH441w=="],
|
||||
|
||||
"opencode/drizzle-kit/esbuild/@esbuild/win32-arm64": ["@esbuild/win32-arm64@0.25.12", "", { "os": "win32", "cpu": "arm64" }, "sha512-rMmLrur64A7+DKlnSuwqUdRKyd3UE7oPJZmnljqEptesKM8wx9J8gx5u0+9Pq0fQQW8vqeKebwNXdfOyP+8Bsg=="],
|
||||
|
||||
"opencode/drizzle-kit/esbuild/@esbuild/win32-ia32": ["@esbuild/win32-ia32@0.25.12", "", { "os": "win32", "cpu": "ia32" }, "sha512-HkqnmmBoCbCwxUKKNPBixiWDGCpQGVsrQfJoVGYLPT41XWF8lHuE5N6WhVia2n4o5QK5M4tYr21827fNhi4byQ=="],
|
||||
|
||||
"opencode/drizzle-kit/esbuild/@esbuild/win32-x64": ["@esbuild/win32-x64@0.25.12", "", { "os": "win32", "cpu": "x64" }, "sha512-alJC0uCZpTFrSL0CCDjcgleBXPnCrEAhTBILpeAp7M/OFgoqtAetfBzX0xM00MUsVVPpVjlPuMbREqnZCXaTnA=="],
|
||||
|
||||
"opencontrol/@modelcontextprotocol/sdk/express/accepts": ["accepts@2.0.0", "", { "dependencies": { "mime-types": "^3.0.0", "negotiator": "^1.0.0" } }, "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng=="],
|
||||
|
||||
"opencontrol/@modelcontextprotocol/sdk/express/body-parser": ["body-parser@2.2.0", "", { "dependencies": { "bytes": "^3.1.2", "content-type": "^1.0.5", "debug": "^4.4.0", "http-errors": "^2.0.0", "iconv-lite": "^0.6.3", "on-finished": "^2.4.1", "qs": "^6.14.0", "raw-body": "^3.0.0", "type-is": "^2.0.0" } }, "sha512-02qvAaxv8tp7fBa/mw1ga98OGm+eCbqzJOKoRt70sLmfEEi+jyBYVTDGfCL/k06/4EMk/z01gCe7HoCH/f2LTg=="],
|
||||
|
||||
@@ -6,7 +6,7 @@ export const domain = (() => {
|
||||
|
||||
export const zoneID = "430ba34c138cfb5360826c4909f99be8"
|
||||
|
||||
new cloudflare.RegionalHostname("RegionalHostname", {
|
||||
new cloudflxare.RegionalHostname("RegionalHostname", {
|
||||
hostname: domain,
|
||||
regionKey: "us",
|
||||
zoneId: zoneID,
|
||||
|
||||
@@ -1,8 +1,17 @@
|
||||
{
|
||||
"nodeModules": {
|
||||
"x86_64-linux": "sha256-wSkJcUnS0ODOYkbkjRnxnjfWYKQOVXwkDNB8qrikuLA=",
|
||||
<<<<<<< HEAD
|
||||
"x86_64-linux": "sha256-H8QVUC5shGI97Ut/wDSYsSuprHpwssJ1MHSHojn+zNI=",
|
||||
"aarch64-linux": "sha256-4BlpH/oIXRJEjkQydXDv1oi1Yx7li3k1dKHUy2/Gb10=",
|
||||
"aarch64-darwin": "sha256-awW0ooZo/QfB2xmRdZ9XLNzQ9sP/mbN+rTg215id6nc=",
|
||||
"aarch64-darwin": "sha256-IOgZ/LP4lvFX3OlalaFuQFYAEFwP+lxz3BRwvu4Hmj4=",
|
||||
"x86_64-darwin": "sha256-CHrE2z+LqY2WXTQeGWG5LNMF1AY4UGSwViJAy4IwIVw="
|
||||
=======
|
||||
"x86_64-linux": "sha256-9QHW6Ue9VO1VKsu6sg4gRtxgifQGNJlfVVXaa0Uc0XQ=",
|
||||
<<<<<<< HEAD
|
||||
"aarch64-darwin": "sha256-IOgZ/LP4lvFX3OlalaFuQFYAEFwP+lxz3BRwvu4Hmj4="
|
||||
>>>>>>> 6e0a58c50 (Update Nix flake.lock and x86_64-linux hash)
|
||||
=======
|
||||
"aarch64-darwin": "sha256-G8tTkuUSFQNOmjbu6cIi6qeyNWtGogtUVNi2CSgcgX0="
|
||||
>>>>>>> 8a0e3e909 (Update aarch64-darwin hash)
|
||||
}
|
||||
}
|
||||
|
||||
10
packages/opencode/drizzle.config.ts
Normal file
10
packages/opencode/drizzle.config.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { defineConfig } from "drizzle-kit"
|
||||
|
||||
export default defineConfig({
|
||||
dialect: "sqlite",
|
||||
schema: "./src/**/*.sql.ts",
|
||||
out: "./migration",
|
||||
dbCredentials: {
|
||||
url: "/home/thdxr/.local/share/opencode/opencode.db",
|
||||
},
|
||||
})
|
||||
91
packages/opencode/migration/0000_normal_wind_dancer.sql
Normal file
91
packages/opencode/migration/0000_normal_wind_dancer.sql
Normal file
@@ -0,0 +1,91 @@
|
||||
CREATE TABLE `project` (
|
||||
`id` text PRIMARY KEY NOT NULL,
|
||||
`worktree` text NOT NULL,
|
||||
`vcs` text,
|
||||
`name` text,
|
||||
`icon_url` text,
|
||||
`icon_color` text,
|
||||
`time_created` integer NOT NULL,
|
||||
`time_updated` integer NOT NULL,
|
||||
`time_initialized` integer,
|
||||
`sandboxes` text NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE `message` (
|
||||
`id` text PRIMARY KEY NOT NULL,
|
||||
`session_id` text NOT NULL,
|
||||
`role` text NOT NULL,
|
||||
`data` text NOT NULL,
|
||||
FOREIGN KEY (`session_id`) REFERENCES `session`(`id`) ON UPDATE no action ON DELETE cascade
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE INDEX `message_session_idx` ON `message` (`session_id`);--> statement-breakpoint
|
||||
CREATE TABLE `part` (
|
||||
`id` text PRIMARY KEY NOT NULL,
|
||||
`message_id` text NOT NULL,
|
||||
`type` text NOT NULL,
|
||||
`data` text NOT NULL,
|
||||
FOREIGN KEY (`message_id`) REFERENCES `message`(`id`) ON UPDATE no action ON DELETE cascade
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE INDEX `part_message_idx` ON `part` (`message_id`);--> statement-breakpoint
|
||||
CREATE TABLE `permission` (
|
||||
`project_id` text PRIMARY KEY NOT NULL,
|
||||
`data` text NOT NULL,
|
||||
FOREIGN KEY (`project_id`) REFERENCES `project`(`id`) ON UPDATE no action ON DELETE cascade
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE `session_diff` (
|
||||
`session_id` text NOT NULL,
|
||||
`file` text NOT NULL,
|
||||
`before` text NOT NULL,
|
||||
`after` text NOT NULL,
|
||||
`additions` integer NOT NULL,
|
||||
`deletions` integer NOT NULL,
|
||||
FOREIGN KEY (`session_id`) REFERENCES `session`(`id`) ON UPDATE no action ON DELETE cascade
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE INDEX `session_diff_session_idx` ON `session_diff` (`session_id`);--> statement-breakpoint
|
||||
CREATE TABLE `session` (
|
||||
`id` text PRIMARY KEY NOT NULL,
|
||||
`project_id` text NOT NULL,
|
||||
`parent_id` text,
|
||||
`slug` text NOT NULL,
|
||||
`directory` text NOT NULL,
|
||||
`title` text NOT NULL,
|
||||
`version` text NOT NULL,
|
||||
`share_url` text,
|
||||
`summary_additions` integer,
|
||||
`summary_deletions` integer,
|
||||
`summary_files` integer,
|
||||
`summary_diffs` text,
|
||||
`revert_message_id` text,
|
||||
`revert_part_id` text,
|
||||
`revert_snapshot` text,
|
||||
`revert_diff` text,
|
||||
`permission` text,
|
||||
`time_created` integer NOT NULL,
|
||||
`time_updated` integer NOT NULL,
|
||||
`time_compacting` integer,
|
||||
`time_archived` integer,
|
||||
FOREIGN KEY (`project_id`) REFERENCES `project`(`id`) ON UPDATE no action ON DELETE cascade
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE INDEX `session_project_idx` ON `session` (`project_id`);--> statement-breakpoint
|
||||
CREATE INDEX `session_parent_idx` ON `session` (`parent_id`);--> statement-breakpoint
|
||||
CREATE TABLE `todo` (
|
||||
`session_id` text PRIMARY KEY NOT NULL,
|
||||
`data` text NOT NULL,
|
||||
FOREIGN KEY (`session_id`) REFERENCES `session`(`id`) ON UPDATE no action ON DELETE cascade
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE `session_share` (
|
||||
`session_id` text PRIMARY KEY NOT NULL,
|
||||
`data` text NOT NULL,
|
||||
FOREIGN KEY (`session_id`) REFERENCES `session`(`id`) ON UPDATE no action ON DELETE cascade
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE `share` (
|
||||
`session_id` text PRIMARY KEY NOT NULL,
|
||||
`data` text NOT NULL
|
||||
);
|
||||
616
packages/opencode/migration/meta/0000_snapshot.json
Normal file
616
packages/opencode/migration/meta/0000_snapshot.json
Normal file
@@ -0,0 +1,616 @@
|
||||
{
|
||||
"version": "6",
|
||||
"dialect": "sqlite",
|
||||
"id": "f7bf061b-aa6c-4b68-a29f-c210c54f109d",
|
||||
"prevId": "00000000-0000-0000-0000-000000000000",
|
||||
"tables": {
|
||||
"project": {
|
||||
"name": "project",
|
||||
"columns": {
|
||||
"id": {
|
||||
"name": "id",
|
||||
"type": "text",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"worktree": {
|
||||
"name": "worktree",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"vcs": {
|
||||
"name": "vcs",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"name": {
|
||||
"name": "name",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"icon_url": {
|
||||
"name": "icon_url",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"icon_color": {
|
||||
"name": "icon_color",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"time_created": {
|
||||
"name": "time_created",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"time_updated": {
|
||||
"name": "time_updated",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"time_initialized": {
|
||||
"name": "time_initialized",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"sandboxes": {
|
||||
"name": "sandboxes",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {},
|
||||
"foreignKeys": {},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
},
|
||||
"message": {
|
||||
"name": "message",
|
||||
"columns": {
|
||||
"id": {
|
||||
"name": "id",
|
||||
"type": "text",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"session_id": {
|
||||
"name": "session_id",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"role": {
|
||||
"name": "role",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"data": {
|
||||
"name": "data",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {
|
||||
"message_session_idx": {
|
||||
"name": "message_session_idx",
|
||||
"columns": [
|
||||
"session_id"
|
||||
],
|
||||
"isUnique": false
|
||||
}
|
||||
},
|
||||
"foreignKeys": {
|
||||
"message_session_id_session_id_fk": {
|
||||
"name": "message_session_id_session_id_fk",
|
||||
"tableFrom": "message",
|
||||
"tableTo": "session",
|
||||
"columnsFrom": [
|
||||
"session_id"
|
||||
],
|
||||
"columnsTo": [
|
||||
"id"
|
||||
],
|
||||
"onDelete": "cascade",
|
||||
"onUpdate": "no action"
|
||||
}
|
||||
},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
},
|
||||
"part": {
|
||||
"name": "part",
|
||||
"columns": {
|
||||
"id": {
|
||||
"name": "id",
|
||||
"type": "text",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"message_id": {
|
||||
"name": "message_id",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"type": {
|
||||
"name": "type",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"data": {
|
||||
"name": "data",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {
|
||||
"part_message_idx": {
|
||||
"name": "part_message_idx",
|
||||
"columns": [
|
||||
"message_id"
|
||||
],
|
||||
"isUnique": false
|
||||
}
|
||||
},
|
||||
"foreignKeys": {
|
||||
"part_message_id_message_id_fk": {
|
||||
"name": "part_message_id_message_id_fk",
|
||||
"tableFrom": "part",
|
||||
"tableTo": "message",
|
||||
"columnsFrom": [
|
||||
"message_id"
|
||||
],
|
||||
"columnsTo": [
|
||||
"id"
|
||||
],
|
||||
"onDelete": "cascade",
|
||||
"onUpdate": "no action"
|
||||
}
|
||||
},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
},
|
||||
"permission": {
|
||||
"name": "permission",
|
||||
"columns": {
|
||||
"project_id": {
|
||||
"name": "project_id",
|
||||
"type": "text",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"data": {
|
||||
"name": "data",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {},
|
||||
"foreignKeys": {
|
||||
"permission_project_id_project_id_fk": {
|
||||
"name": "permission_project_id_project_id_fk",
|
||||
"tableFrom": "permission",
|
||||
"tableTo": "project",
|
||||
"columnsFrom": [
|
||||
"project_id"
|
||||
],
|
||||
"columnsTo": [
|
||||
"id"
|
||||
],
|
||||
"onDelete": "cascade",
|
||||
"onUpdate": "no action"
|
||||
}
|
||||
},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
},
|
||||
"session_diff": {
|
||||
"name": "session_diff",
|
||||
"columns": {
|
||||
"session_id": {
|
||||
"name": "session_id",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"file": {
|
||||
"name": "file",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"before": {
|
||||
"name": "before",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"after": {
|
||||
"name": "after",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"additions": {
|
||||
"name": "additions",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"deletions": {
|
||||
"name": "deletions",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {
|
||||
"session_diff_session_idx": {
|
||||
"name": "session_diff_session_idx",
|
||||
"columns": [
|
||||
"session_id"
|
||||
],
|
||||
"isUnique": false
|
||||
}
|
||||
},
|
||||
"foreignKeys": {
|
||||
"session_diff_session_id_session_id_fk": {
|
||||
"name": "session_diff_session_id_session_id_fk",
|
||||
"tableFrom": "session_diff",
|
||||
"tableTo": "session",
|
||||
"columnsFrom": [
|
||||
"session_id"
|
||||
],
|
||||
"columnsTo": [
|
||||
"id"
|
||||
],
|
||||
"onDelete": "cascade",
|
||||
"onUpdate": "no action"
|
||||
}
|
||||
},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
},
|
||||
"session": {
|
||||
"name": "session",
|
||||
"columns": {
|
||||
"id": {
|
||||
"name": "id",
|
||||
"type": "text",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"project_id": {
|
||||
"name": "project_id",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"parent_id": {
|
||||
"name": "parent_id",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"slug": {
|
||||
"name": "slug",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"directory": {
|
||||
"name": "directory",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"title": {
|
||||
"name": "title",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"version": {
|
||||
"name": "version",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"share_url": {
|
||||
"name": "share_url",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"summary_additions": {
|
||||
"name": "summary_additions",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"summary_deletions": {
|
||||
"name": "summary_deletions",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"summary_files": {
|
||||
"name": "summary_files",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"summary_diffs": {
|
||||
"name": "summary_diffs",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"revert_message_id": {
|
||||
"name": "revert_message_id",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"revert_part_id": {
|
||||
"name": "revert_part_id",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"revert_snapshot": {
|
||||
"name": "revert_snapshot",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"revert_diff": {
|
||||
"name": "revert_diff",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"permission": {
|
||||
"name": "permission",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"time_created": {
|
||||
"name": "time_created",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"time_updated": {
|
||||
"name": "time_updated",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"time_compacting": {
|
||||
"name": "time_compacting",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"time_archived": {
|
||||
"name": "time_archived",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {
|
||||
"session_project_idx": {
|
||||
"name": "session_project_idx",
|
||||
"columns": [
|
||||
"project_id"
|
||||
],
|
||||
"isUnique": false
|
||||
},
|
||||
"session_parent_idx": {
|
||||
"name": "session_parent_idx",
|
||||
"columns": [
|
||||
"parent_id"
|
||||
],
|
||||
"isUnique": false
|
||||
}
|
||||
},
|
||||
"foreignKeys": {
|
||||
"session_project_id_project_id_fk": {
|
||||
"name": "session_project_id_project_id_fk",
|
||||
"tableFrom": "session",
|
||||
"tableTo": "project",
|
||||
"columnsFrom": [
|
||||
"project_id"
|
||||
],
|
||||
"columnsTo": [
|
||||
"id"
|
||||
],
|
||||
"onDelete": "cascade",
|
||||
"onUpdate": "no action"
|
||||
}
|
||||
},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
},
|
||||
"todo": {
|
||||
"name": "todo",
|
||||
"columns": {
|
||||
"session_id": {
|
||||
"name": "session_id",
|
||||
"type": "text",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"data": {
|
||||
"name": "data",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {},
|
||||
"foreignKeys": {
|
||||
"todo_session_id_session_id_fk": {
|
||||
"name": "todo_session_id_session_id_fk",
|
||||
"tableFrom": "todo",
|
||||
"tableTo": "session",
|
||||
"columnsFrom": [
|
||||
"session_id"
|
||||
],
|
||||
"columnsTo": [
|
||||
"id"
|
||||
],
|
||||
"onDelete": "cascade",
|
||||
"onUpdate": "no action"
|
||||
}
|
||||
},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
},
|
||||
"session_share": {
|
||||
"name": "session_share",
|
||||
"columns": {
|
||||
"session_id": {
|
||||
"name": "session_id",
|
||||
"type": "text",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"data": {
|
||||
"name": "data",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {},
|
||||
"foreignKeys": {
|
||||
"session_share_session_id_session_id_fk": {
|
||||
"name": "session_share_session_id_session_id_fk",
|
||||
"tableFrom": "session_share",
|
||||
"tableTo": "session",
|
||||
"columnsFrom": [
|
||||
"session_id"
|
||||
],
|
||||
"columnsTo": [
|
||||
"id"
|
||||
],
|
||||
"onDelete": "cascade",
|
||||
"onUpdate": "no action"
|
||||
}
|
||||
},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
},
|
||||
"share": {
|
||||
"name": "share",
|
||||
"columns": {
|
||||
"session_id": {
|
||||
"name": "session_id",
|
||||
"type": "text",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"data": {
|
||||
"name": "data",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {},
|
||||
"foreignKeys": {},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
}
|
||||
},
|
||||
"views": {},
|
||||
"enums": {},
|
||||
"_meta": {
|
||||
"schemas": {},
|
||||
"tables": {},
|
||||
"columns": {}
|
||||
},
|
||||
"internal": {
|
||||
"indexes": {}
|
||||
}
|
||||
}
|
||||
13
packages/opencode/migration/meta/_journal.json
Normal file
13
packages/opencode/migration/meta/_journal.json
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"version": "7",
|
||||
"dialect": "sqlite",
|
||||
"entries": [
|
||||
{
|
||||
"idx": 0,
|
||||
"version": "6",
|
||||
"when": 1768625754197,
|
||||
"tag": "0000_normal_wind_dancer",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -41,6 +41,8 @@
|
||||
"@types/turndown": "5.0.5",
|
||||
"@types/yargs": "17.0.33",
|
||||
"@typescript/native-preview": "catalog:",
|
||||
"better-sqlite3": "12.6.0",
|
||||
"drizzle-kit": "0.31.8",
|
||||
"typescript": "catalog:",
|
||||
"vscode-languageserver-types": "3.17.5",
|
||||
"why-is-node-running": "3.2.2",
|
||||
@@ -97,6 +99,7 @@
|
||||
"clipboardy": "4.0.0",
|
||||
"decimal.js": "10.5.0",
|
||||
"diff": "catalog:",
|
||||
"drizzle-orm": "0.45.1",
|
||||
"fuzzysort": "3.1.0",
|
||||
"gray-matter": "4.0.3",
|
||||
"hono": "catalog:",
|
||||
|
||||
@@ -99,6 +99,12 @@ const targets = singleFlag
|
||||
})
|
||||
: allTargets
|
||||
|
||||
// Check migrations are up to date and generate embedded migrations file
|
||||
console.log("Checking migrations...")
|
||||
await $`bun run script/check-migrations.ts`
|
||||
console.log("Generating migrations embed...")
|
||||
await $`bun run script/generate-migrations.ts`
|
||||
|
||||
await $`rm -rf dist`
|
||||
|
||||
const binaries: Record<string, string> = {}
|
||||
|
||||
16
packages/opencode/script/check-migrations.ts
Executable file
16
packages/opencode/script/check-migrations.ts
Executable file
@@ -0,0 +1,16 @@
|
||||
#!/usr/bin/env bun
|
||||
|
||||
import { $ } from "bun"
|
||||
|
||||
// drizzle-kit check compares schema to migrations, exits non-zero if drift
|
||||
const result = await $`bun drizzle-kit check`.quiet().nothrow()
|
||||
|
||||
if (result.exitCode !== 0) {
|
||||
console.error("Schema has changes not captured in migrations!")
|
||||
console.error("Run: bun drizzle-kit generate")
|
||||
console.error("")
|
||||
console.error(result.stderr.toString())
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
console.log("Migrations are up to date")
|
||||
49
packages/opencode/script/generate-migrations.ts
Executable file
49
packages/opencode/script/generate-migrations.ts
Executable file
@@ -0,0 +1,49 @@
|
||||
#!/usr/bin/env bun
|
||||
|
||||
import { Glob } from "bun"
|
||||
import path from "path"
|
||||
import fs from "fs"
|
||||
|
||||
const migrationsDir = "./migration"
|
||||
const outFile = "./src/storage/migrations.generated.ts"
|
||||
|
||||
if (!fs.existsSync(migrationsDir)) {
|
||||
console.log("No migrations directory found, creating empty migrations file")
|
||||
await Bun.write(
|
||||
outFile,
|
||||
`// Auto-generated - do not edit
|
||||
export const migrations: { name: string; sql: string }[] = []
|
||||
`,
|
||||
)
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
const files = Array.from(new Glob("*.sql").scanSync({ cwd: migrationsDir })).sort()
|
||||
|
||||
if (files.length === 0) {
|
||||
console.log("No migrations found, creating empty migrations file")
|
||||
await Bun.write(
|
||||
outFile,
|
||||
`// Auto-generated - do not edit
|
||||
export const migrations: { name: string; sql: string }[] = []
|
||||
`,
|
||||
)
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
const imports = files.map((f, i) => `import m${i} from "../../migration/${f}" with { type: "text" }`).join("\n")
|
||||
|
||||
const entries = files.map((f, i) => ` { name: "${path.basename(f, ".sql")}", sql: m${i} },`).join("\n")
|
||||
|
||||
await Bun.write(
|
||||
outFile,
|
||||
`// Auto-generated - do not edit
|
||||
${imports}
|
||||
|
||||
export const migrations = [
|
||||
${entries}
|
||||
]
|
||||
`,
|
||||
)
|
||||
|
||||
console.log(`Generated migrations file with ${files.length} migrations`)
|
||||
0
packages/opencode/script/postinstall.mjs
Normal file → Executable file
0
packages/opencode/script/postinstall.mjs
Normal file → Executable file
0
packages/opencode/script/publish-registries.ts
Normal file → Executable file
0
packages/opencode/script/publish-registries.ts
Normal file → Executable file
147
packages/opencode/src/cli/cmd/database.ts
Normal file
147
packages/opencode/src/cli/cmd/database.ts
Normal file
@@ -0,0 +1,147 @@
|
||||
import type { Argv } from "yargs"
|
||||
import { cmd } from "./cmd"
|
||||
import { bootstrap } from "../bootstrap"
|
||||
import { UI } from "../ui"
|
||||
import { Database } from "../../storage/db"
|
||||
import { ProjectTable } from "../../project/project.sql"
|
||||
import { Project } from "../../project/project"
|
||||
import {
|
||||
SessionTable,
|
||||
MessageTable,
|
||||
PartTable,
|
||||
SessionDiffTable,
|
||||
TodoTable,
|
||||
PermissionTable,
|
||||
} from "../../session/session.sql"
|
||||
import { Session } from "../../session"
|
||||
import { SessionShareTable, ShareTable } from "../../share/share.sql"
|
||||
import path from "path"
|
||||
import fs from "fs/promises"
|
||||
|
||||
export const DatabaseCommand = cmd({
|
||||
command: "database",
|
||||
describe: "database management commands",
|
||||
builder: (yargs) => yargs.command(ExportCommand).demandCommand(),
|
||||
async handler() {},
|
||||
})
|
||||
|
||||
const ExportCommand = cmd({
|
||||
command: "export",
|
||||
describe: "export database to JSON files",
|
||||
builder: (yargs: Argv) => {
|
||||
return yargs.option("output", {
|
||||
alias: ["o"],
|
||||
describe: "output directory",
|
||||
type: "string",
|
||||
demandOption: true,
|
||||
})
|
||||
},
|
||||
handler: async (args) => {
|
||||
await bootstrap(process.cwd(), async () => {
|
||||
const outDir = path.resolve(args.output)
|
||||
await fs.mkdir(outDir, { recursive: true })
|
||||
|
||||
const stats = {
|
||||
projects: 0,
|
||||
sessions: 0,
|
||||
messages: 0,
|
||||
parts: 0,
|
||||
diffs: 0,
|
||||
todos: 0,
|
||||
permissions: 0,
|
||||
sessionShares: 0,
|
||||
shares: 0,
|
||||
}
|
||||
|
||||
// Export projects
|
||||
const projectDir = path.join(outDir, "project")
|
||||
await fs.mkdir(projectDir, { recursive: true })
|
||||
for (const row of Database.use((db) => db.select().from(ProjectTable).all())) {
|
||||
const project = Project.fromRow(row)
|
||||
await Bun.write(path.join(projectDir, `${row.id}.json`), JSON.stringify(project, null, 2))
|
||||
stats.projects++
|
||||
}
|
||||
|
||||
// Export sessions (organized by projectID)
|
||||
const sessionDir = path.join(outDir, "session")
|
||||
for (const row of Database.use((db) => db.select().from(SessionTable).all())) {
|
||||
const dir = path.join(sessionDir, row.projectID)
|
||||
await fs.mkdir(dir, { recursive: true })
|
||||
await Bun.write(path.join(dir, `${row.id}.json`), JSON.stringify(Session.fromRow(row), null, 2))
|
||||
stats.sessions++
|
||||
}
|
||||
|
||||
// Export messages (organized by sessionID)
|
||||
const messageDir = path.join(outDir, "message")
|
||||
for (const row of Database.use((db) => db.select().from(MessageTable).all())) {
|
||||
const dir = path.join(messageDir, row.sessionID)
|
||||
await fs.mkdir(dir, { recursive: true })
|
||||
await Bun.write(path.join(dir, `${row.id}.json`), JSON.stringify(row.data, null, 2))
|
||||
stats.messages++
|
||||
}
|
||||
|
||||
// Export parts (organized by messageID)
|
||||
const partDir = path.join(outDir, "part")
|
||||
for (const row of Database.use((db) => db.select().from(PartTable).all())) {
|
||||
const dir = path.join(partDir, row.messageID)
|
||||
await fs.mkdir(dir, { recursive: true })
|
||||
await Bun.write(path.join(dir, `${row.id}.json`), JSON.stringify(row.data, null, 2))
|
||||
stats.parts++
|
||||
}
|
||||
|
||||
// Export session diffs
|
||||
const diffDir = path.join(outDir, "session_diff")
|
||||
await fs.mkdir(diffDir, { recursive: true })
|
||||
for (const row of Database.use((db) => db.select().from(SessionDiffTable).all())) {
|
||||
await Bun.write(path.join(diffDir, `${row.sessionID}.json`), JSON.stringify(row, null, 2))
|
||||
stats.diffs++
|
||||
}
|
||||
|
||||
// Export todos
|
||||
const todoDir = path.join(outDir, "todo")
|
||||
await fs.mkdir(todoDir, { recursive: true })
|
||||
for (const row of Database.use((db) => db.select().from(TodoTable).all())) {
|
||||
await Bun.write(path.join(todoDir, `${row.sessionID}.json`), JSON.stringify(row.data, null, 2))
|
||||
stats.todos++
|
||||
}
|
||||
|
||||
// Export permissions
|
||||
const permDir = path.join(outDir, "permission")
|
||||
await fs.mkdir(permDir, { recursive: true })
|
||||
for (const row of Database.use((db) => db.select().from(PermissionTable).all())) {
|
||||
await Bun.write(path.join(permDir, `${row.projectID}.json`), JSON.stringify(row.data, null, 2))
|
||||
stats.permissions++
|
||||
}
|
||||
|
||||
// Export session shares
|
||||
const sessionShareDir = path.join(outDir, "session_share")
|
||||
await fs.mkdir(sessionShareDir, { recursive: true })
|
||||
for (const row of Database.use((db) => db.select().from(SessionShareTable).all())) {
|
||||
await Bun.write(path.join(sessionShareDir, `${row.sessionID}.json`), JSON.stringify(row.data, null, 2))
|
||||
stats.sessionShares++
|
||||
}
|
||||
|
||||
// Export shares
|
||||
const shareDir = path.join(outDir, "share")
|
||||
await fs.mkdir(shareDir, { recursive: true })
|
||||
for (const row of Database.use((db) => db.select().from(ShareTable).all())) {
|
||||
await Bun.write(path.join(shareDir, `${row.sessionID}.json`), JSON.stringify(row.data, null, 2))
|
||||
stats.shares++
|
||||
}
|
||||
|
||||
// Create migration marker so this can be imported back
|
||||
await Bun.write(path.join(outDir, "migration"), Date.now().toString())
|
||||
|
||||
UI.println(`Exported to ${outDir}:`)
|
||||
UI.println(` ${stats.projects} projects`)
|
||||
UI.println(` ${stats.sessions} sessions`)
|
||||
UI.println(` ${stats.messages} messages`)
|
||||
UI.println(` ${stats.parts} parts`)
|
||||
UI.println(` ${stats.diffs} session diffs`)
|
||||
UI.println(` ${stats.todos} todos`)
|
||||
UI.println(` ${stats.permissions} permissions`)
|
||||
UI.println(` ${stats.sessionShares} session shares`)
|
||||
UI.println(` ${stats.shares} shares`)
|
||||
})
|
||||
},
|
||||
})
|
||||
@@ -2,7 +2,8 @@ import type { Argv } from "yargs"
|
||||
import { Session } from "../../session"
|
||||
import { cmd } from "./cmd"
|
||||
import { bootstrap } from "../bootstrap"
|
||||
import { Storage } from "../../storage/storage"
|
||||
import { Database } from "../../storage/db"
|
||||
import { SessionTable, MessageTable, PartTable } from "../../session/session.sql"
|
||||
import { Instance } from "../../project/instance"
|
||||
import { EOL } from "os"
|
||||
|
||||
@@ -81,13 +82,63 @@ export const ImportCommand = cmd({
|
||||
return
|
||||
}
|
||||
|
||||
await Storage.write(["session", Instance.project.id, exportData.info.id], exportData.info)
|
||||
const info = exportData.info
|
||||
const row = {
|
||||
id: info.id,
|
||||
projectID: Instance.project.id,
|
||||
parentID: info.parentID,
|
||||
slug: info.slug,
|
||||
directory: info.directory,
|
||||
title: info.title,
|
||||
version: info.version,
|
||||
share_url: info.share?.url,
|
||||
summary_additions: info.summary?.additions,
|
||||
summary_deletions: info.summary?.deletions,
|
||||
summary_files: info.summary?.files,
|
||||
summary_diffs: info.summary?.diffs,
|
||||
revert_messageID: info.revert?.messageID,
|
||||
revert_partID: info.revert?.partID,
|
||||
revert_snapshot: info.revert?.snapshot,
|
||||
revert_diff: info.revert?.diff,
|
||||
permission: info.permission,
|
||||
time_created: info.time.created,
|
||||
time_updated: info.time.updated,
|
||||
time_compacting: info.time.compacting,
|
||||
time_archived: info.time.archived,
|
||||
}
|
||||
Database.use((db) =>
|
||||
db.insert(SessionTable).values(row).onConflictDoUpdate({ target: SessionTable.id, set: row }).run(),
|
||||
)
|
||||
|
||||
for (const msg of exportData.messages) {
|
||||
await Storage.write(["message", exportData.info.id, msg.info.id], msg.info)
|
||||
const { id: msgId, sessionID: msgSessionID, role: msgRole, ...msgData } = msg.info
|
||||
Database.use((db) =>
|
||||
db
|
||||
.insert(MessageTable)
|
||||
.values({
|
||||
id: msgId,
|
||||
sessionID: exportData.info.id,
|
||||
role: msgRole,
|
||||
data: msgData,
|
||||
})
|
||||
.onConflictDoUpdate({ target: MessageTable.id, set: { role: msgRole, data: msgData } })
|
||||
.run(),
|
||||
)
|
||||
|
||||
for (const part of msg.parts) {
|
||||
await Storage.write(["part", msg.info.id, part.id], part)
|
||||
const { id: partId, messageID: _, sessionID: __, type: partType, ...partData } = part
|
||||
Database.use((db) =>
|
||||
db
|
||||
.insert(PartTable)
|
||||
.values({
|
||||
id: partId,
|
||||
messageID: msg.info.id,
|
||||
type: partType,
|
||||
data: partData,
|
||||
})
|
||||
.onConflictDoUpdate({ target: PartTable.id, set: { type: partType, data: partData } })
|
||||
.run(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,7 +2,9 @@ import type { Argv } from "yargs"
|
||||
import { cmd } from "./cmd"
|
||||
import { Session } from "../../session"
|
||||
import { bootstrap } from "../bootstrap"
|
||||
import { Storage } from "../../storage/storage"
|
||||
import { Database } from "../../storage/db"
|
||||
import { ProjectTable } from "../../project/project.sql"
|
||||
import { SessionTable } from "../../session/session.sql"
|
||||
import { Project } from "../../project/project"
|
||||
import { Instance } from "../../project/instance"
|
||||
|
||||
@@ -83,25 +85,8 @@ async function getCurrentProject(): Promise<Project.Info> {
|
||||
}
|
||||
|
||||
async function getAllSessions(): Promise<Session.Info[]> {
|
||||
const sessions: Session.Info[] = []
|
||||
|
||||
const projectKeys = await Storage.list(["project"])
|
||||
const projects = await Promise.all(projectKeys.map((key) => Storage.read<Project.Info>(key)))
|
||||
|
||||
for (const project of projects) {
|
||||
if (!project) continue
|
||||
|
||||
const sessionKeys = await Storage.list(["session", project.id])
|
||||
const projectSessions = await Promise.all(sessionKeys.map((key) => Storage.read<Session.Info>(key)))
|
||||
|
||||
for (const session of projectSessions) {
|
||||
if (session) {
|
||||
sessions.push(session)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return sessions
|
||||
const sessionRows = Database.use((db) => db.select().from(SessionTable).all())
|
||||
return sessionRows.map((row) => Session.fromRow(row))
|
||||
}
|
||||
|
||||
export async function aggregateSessionStats(days?: number, projectFilter?: string): Promise<SessionStats> {
|
||||
|
||||
@@ -26,6 +26,7 @@ import { EOL } from "os"
|
||||
import { WebCommand } from "./cli/cmd/web"
|
||||
import { PrCommand } from "./cli/cmd/pr"
|
||||
import { SessionCommand } from "./cli/cmd/session"
|
||||
import { DatabaseCommand } from "./cli/cmd/database"
|
||||
|
||||
process.on("unhandledRejection", (e) => {
|
||||
Log.Default.error("rejection", {
|
||||
@@ -97,6 +98,7 @@ const cli = yargs(hideBin(process.argv))
|
||||
.command(GithubCommand)
|
||||
.command(PrCommand)
|
||||
.command(SessionCommand)
|
||||
.command(DatabaseCommand)
|
||||
.fail((msg, err) => {
|
||||
if (
|
||||
msg?.startsWith("Unknown argument") ||
|
||||
|
||||
@@ -3,7 +3,9 @@ import { BusEvent } from "@/bus/bus-event"
|
||||
import { Config } from "@/config/config"
|
||||
import { Identifier } from "@/id/id"
|
||||
import { Instance } from "@/project/instance"
|
||||
import { Storage } from "@/storage/storage"
|
||||
import { Database } from "@/storage/db"
|
||||
import { PermissionTable } from "@/session/session.sql"
|
||||
import { eq } from "drizzle-orm"
|
||||
import { fn } from "@/util/fn"
|
||||
import { Log } from "@/util/log"
|
||||
import { Wildcard } from "@/util/wildcard"
|
||||
@@ -107,7 +109,10 @@ export namespace PermissionNext {
|
||||
|
||||
const state = Instance.state(async () => {
|
||||
const projectID = Instance.project.id
|
||||
const stored = await Storage.read<Ruleset>(["permission", projectID]).catch(() => [] as Ruleset)
|
||||
const row = Database.use((db) =>
|
||||
db.select().from(PermissionTable).where(eq(PermissionTable.projectID, projectID)).get(),
|
||||
)
|
||||
const stored = row?.data ?? ([] as Ruleset)
|
||||
|
||||
const pending: Record<
|
||||
string,
|
||||
|
||||
14
packages/opencode/src/project/project.sql.ts
Normal file
14
packages/opencode/src/project/project.sql.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import { integer, sqliteTable, text } from "drizzle-orm/sqlite-core"
|
||||
|
||||
export const ProjectTable = sqliteTable("project", {
|
||||
id: text("id").primaryKey(),
|
||||
worktree: text("worktree").notNull(),
|
||||
vcs: text("vcs"),
|
||||
name: text("name"),
|
||||
icon_url: text("icon_url"),
|
||||
icon_color: text("icon_color"),
|
||||
time_created: integer("time_created").notNull(),
|
||||
time_updated: integer("time_updated").notNull(),
|
||||
time_initialized: integer("time_initialized"),
|
||||
sandboxes: text("sandboxes", { mode: "json" }).notNull().$type<string[]>(),
|
||||
})
|
||||
@@ -3,10 +3,13 @@ import fs from "fs/promises"
|
||||
import { Filesystem } from "../util/filesystem"
|
||||
import path from "path"
|
||||
import { $ } from "bun"
|
||||
import { Storage } from "../storage/storage"
|
||||
import { Database } from "../storage/db"
|
||||
import { ProjectTable } from "./project.sql"
|
||||
import { SessionTable } from "../session/session.sql"
|
||||
import { eq } from "drizzle-orm"
|
||||
import { Log } from "../util/log"
|
||||
import { Flag } from "@/flag/flag"
|
||||
import { Session } from "../session"
|
||||
|
||||
import { work } from "../util/queue"
|
||||
import { fn } from "@opencode-ai/util/fn"
|
||||
import { BusEvent } from "@/bus/bus-event"
|
||||
@@ -50,6 +53,28 @@ export namespace Project {
|
||||
Updated: BusEvent.define("project.updated", Info),
|
||||
}
|
||||
|
||||
type Row = typeof ProjectTable.$inferSelect
|
||||
|
||||
export function fromRow(row: Row): Info {
|
||||
const icon =
|
||||
row.icon_url || row.icon_color
|
||||
? { url: row.icon_url ?? undefined, color: row.icon_color ?? undefined }
|
||||
: undefined
|
||||
return {
|
||||
id: row.id,
|
||||
worktree: row.worktree,
|
||||
vcs: row.vcs as Info["vcs"],
|
||||
name: row.name ?? undefined,
|
||||
icon,
|
||||
time: {
|
||||
created: row.time_created,
|
||||
updated: row.time_updated,
|
||||
initialized: row.time_initialized ?? undefined,
|
||||
},
|
||||
sandboxes: row.sandboxes,
|
||||
}
|
||||
}
|
||||
|
||||
export async function fromDirectory(directory: string) {
|
||||
log.info("fromDirectory", { directory })
|
||||
|
||||
@@ -175,9 +200,10 @@ export namespace Project {
|
||||
}
|
||||
})
|
||||
|
||||
let existing = await Storage.read<Info>(["project", id]).catch(() => undefined)
|
||||
if (!existing) {
|
||||
existing = {
|
||||
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get())
|
||||
const existing = await iife(async () => {
|
||||
if (row) return fromRow(row)
|
||||
const fresh: Info = {
|
||||
id,
|
||||
worktree,
|
||||
vcs: vcs as Info["vcs"],
|
||||
@@ -190,10 +216,8 @@ export namespace Project {
|
||||
if (id !== "global") {
|
||||
await migrateFromGlobal(id, worktree)
|
||||
}
|
||||
}
|
||||
|
||||
// migrate old projects before sandboxes
|
||||
if (!existing.sandboxes) existing.sandboxes = []
|
||||
return fresh
|
||||
})
|
||||
|
||||
if (Flag.OPENCODE_EXPERIMENTAL_ICON_DISCOVERY) discover(existing)
|
||||
|
||||
@@ -208,7 +232,31 @@ export namespace Project {
|
||||
}
|
||||
if (sandbox !== result.worktree && !result.sandboxes.includes(sandbox)) result.sandboxes.push(sandbox)
|
||||
result.sandboxes = result.sandboxes.filter((x) => existsSync(x))
|
||||
await Storage.write<Info>(["project", id], result)
|
||||
const insert = {
|
||||
id: result.id,
|
||||
worktree: result.worktree,
|
||||
vcs: result.vcs,
|
||||
name: result.name,
|
||||
icon_url: result.icon?.url,
|
||||
icon_color: result.icon?.color,
|
||||
time_created: result.time.created,
|
||||
time_updated: result.time.updated,
|
||||
time_initialized: result.time.initialized,
|
||||
sandboxes: result.sandboxes,
|
||||
}
|
||||
const update = {
|
||||
worktree: result.worktree,
|
||||
vcs: result.vcs,
|
||||
name: result.name,
|
||||
icon_url: result.icon?.url,
|
||||
icon_color: result.icon?.color,
|
||||
time_updated: result.time.updated,
|
||||
time_initialized: result.time.initialized,
|
||||
sandboxes: result.sandboxes,
|
||||
}
|
||||
Database.use((db) =>
|
||||
db.insert(ProjectTable).values(insert).onConflictDoUpdate({ target: ProjectTable.id, set: update }).run(),
|
||||
)
|
||||
GlobalBus.emit("event", {
|
||||
payload: {
|
||||
type: Event.Updated.type,
|
||||
@@ -249,42 +297,48 @@ export namespace Project {
|
||||
}
|
||||
|
||||
async function migrateFromGlobal(newProjectID: string, worktree: string) {
|
||||
const globalProject = await Storage.read<Info>(["project", "global"]).catch(() => undefined)
|
||||
if (!globalProject) return
|
||||
const globalRow = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, "global")).get())
|
||||
if (!globalRow) return
|
||||
|
||||
const globalSessions = await Storage.list(["session", "global"]).catch(() => [])
|
||||
const globalSessions = Database.use((db) =>
|
||||
db.select().from(SessionTable).where(eq(SessionTable.projectID, "global")).all(),
|
||||
)
|
||||
if (globalSessions.length === 0) return
|
||||
|
||||
log.info("migrating sessions from global", { newProjectID, worktree, count: globalSessions.length })
|
||||
|
||||
await work(10, globalSessions, async (key) => {
|
||||
const sessionID = key[key.length - 1]
|
||||
const session = await Storage.read<Session.Info>(key).catch(() => undefined)
|
||||
if (!session) return
|
||||
if (session.directory && session.directory !== worktree) return
|
||||
await work(10, globalSessions, async (row) => {
|
||||
if (row.directory && row.directory !== worktree) return
|
||||
|
||||
session.projectID = newProjectID
|
||||
log.info("migrating session", { sessionID, from: "global", to: newProjectID })
|
||||
await Storage.write(["session", newProjectID, sessionID], session)
|
||||
await Storage.remove(key)
|
||||
log.info("migrating session", { sessionID: row.id, from: "global", to: newProjectID })
|
||||
Database.use((db) =>
|
||||
db.update(SessionTable).set({ projectID: newProjectID }).where(eq(SessionTable.id, row.id)).run(),
|
||||
)
|
||||
}).catch((error) => {
|
||||
log.error("failed to migrate sessions from global to project", { error, projectId: newProjectID })
|
||||
})
|
||||
}
|
||||
|
||||
export async function setInitialized(projectID: string) {
|
||||
await Storage.update<Info>(["project", projectID], (draft) => {
|
||||
draft.time.initialized = Date.now()
|
||||
})
|
||||
export function setInitialized(projectID: string) {
|
||||
Database.use((db) =>
|
||||
db
|
||||
.update(ProjectTable)
|
||||
.set({
|
||||
time_initialized: Date.now(),
|
||||
})
|
||||
.where(eq(ProjectTable.id, projectID))
|
||||
.run(),
|
||||
)
|
||||
}
|
||||
|
||||
export async function list() {
|
||||
const keys = await Storage.list(["project"])
|
||||
const projects = await Promise.all(keys.map((x) => Storage.read<Info>(x)))
|
||||
return projects.map((project) => ({
|
||||
...project,
|
||||
sandboxes: project.sandboxes?.filter((x) => existsSync(x)),
|
||||
}))
|
||||
export function list() {
|
||||
return Database.use((db) =>
|
||||
db
|
||||
.select()
|
||||
.from(ProjectTable)
|
||||
.all()
|
||||
.map((row) => fromRow(row)),
|
||||
)
|
||||
}
|
||||
|
||||
export const update = fn(
|
||||
@@ -295,43 +349,37 @@ export namespace Project {
|
||||
commands: Info.shape.commands.optional(),
|
||||
}),
|
||||
async (input) => {
|
||||
const result = await Storage.update<Info>(["project", input.projectID], (draft) => {
|
||||
if (input.name !== undefined) draft.name = input.name
|
||||
if (input.icon !== undefined) {
|
||||
draft.icon = {
|
||||
...draft.icon,
|
||||
}
|
||||
if (input.icon.url !== undefined) draft.icon.url = input.icon.url
|
||||
if (input.icon.override !== undefined) draft.icon.override = input.icon.override || undefined
|
||||
if (input.icon.color !== undefined) draft.icon.color = input.icon.color
|
||||
}
|
||||
|
||||
if (input.commands?.start !== undefined) {
|
||||
const start = input.commands.start || undefined
|
||||
draft.commands = {
|
||||
...(draft.commands ?? {}),
|
||||
}
|
||||
draft.commands.start = start
|
||||
if (!draft.commands.start) draft.commands = undefined
|
||||
}
|
||||
|
||||
draft.time.updated = Date.now()
|
||||
})
|
||||
const result = Database.use((db) =>
|
||||
db
|
||||
.update(ProjectTable)
|
||||
.set({
|
||||
name: input.name,
|
||||
icon_url: input.icon?.url,
|
||||
icon_color: input.icon?.color,
|
||||
time_updated: Date.now(),
|
||||
})
|
||||
.where(eq(ProjectTable.id, input.projectID))
|
||||
.returning()
|
||||
.get(),
|
||||
)
|
||||
if (!result) throw new Error(`Project not found: ${input.projectID}`)
|
||||
const data = fromRow(result)
|
||||
GlobalBus.emit("event", {
|
||||
payload: {
|
||||
type: Event.Updated.type,
|
||||
properties: result,
|
||||
properties: data,
|
||||
},
|
||||
})
|
||||
return result
|
||||
return data
|
||||
},
|
||||
)
|
||||
|
||||
export async function sandboxes(projectID: string) {
|
||||
const project = await Storage.read<Info>(["project", projectID]).catch(() => undefined)
|
||||
if (!project?.sandboxes) return []
|
||||
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, projectID)).get())
|
||||
if (!row) return []
|
||||
const data = fromRow(row)
|
||||
const valid: string[] = []
|
||||
for (const dir of project.sandboxes) {
|
||||
for (const dir of data.sandboxes) {
|
||||
const stat = await fs.stat(dir).catch(() => undefined)
|
||||
if (stat?.isDirectory()) valid.push(dir)
|
||||
}
|
||||
@@ -339,33 +387,45 @@ export namespace Project {
|
||||
}
|
||||
|
||||
export async function addSandbox(projectID: string, directory: string) {
|
||||
const result = await Storage.update<Info>(["project", projectID], (draft) => {
|
||||
const sandboxes = draft.sandboxes ?? []
|
||||
if (!sandboxes.includes(directory)) sandboxes.push(directory)
|
||||
draft.sandboxes = sandboxes
|
||||
draft.time.updated = Date.now()
|
||||
})
|
||||
const row = db().select().from(ProjectTable).where(eq(ProjectTable.id, projectID)).get()
|
||||
if (!row) throw new Error(`Project not found: ${projectID}`)
|
||||
const sandboxes = row.sandboxes ?? []
|
||||
if (!sandboxes.includes(directory)) sandboxes.push(directory)
|
||||
const result = db()
|
||||
.update(ProjectTable)
|
||||
.set({ sandboxes, time_updated: Date.now() })
|
||||
.where(eq(ProjectTable.id, projectID))
|
||||
.returning()
|
||||
.get()
|
||||
if (!result) throw new Error(`Project not found: ${projectID}`)
|
||||
const data = fromRow(result)
|
||||
GlobalBus.emit("event", {
|
||||
payload: {
|
||||
type: Event.Updated.type,
|
||||
properties: result,
|
||||
properties: data,
|
||||
},
|
||||
})
|
||||
return result
|
||||
return data
|
||||
}
|
||||
|
||||
export async function removeSandbox(projectID: string, directory: string) {
|
||||
const result = await Storage.update<Info>(["project", projectID], (draft) => {
|
||||
const sandboxes = draft.sandboxes ?? []
|
||||
draft.sandboxes = sandboxes.filter((sandbox) => sandbox !== directory)
|
||||
draft.time.updated = Date.now()
|
||||
})
|
||||
const row = db().select().from(ProjectTable).where(eq(ProjectTable.id, projectID)).get()
|
||||
if (!row) throw new Error(`Project not found: ${projectID}`)
|
||||
const sandboxes = (row.sandboxes ?? []).filter((s) => s !== directory)
|
||||
const result = db()
|
||||
.update(ProjectTable)
|
||||
.set({ sandboxes, time_updated: Date.now() })
|
||||
.where(eq(ProjectTable.id, projectID))
|
||||
.returning()
|
||||
.get()
|
||||
if (!result) throw new Error(`Project not found: ${projectID}`)
|
||||
const data = fromRow(result)
|
||||
GlobalBus.emit("event", {
|
||||
payload: {
|
||||
type: Event.Updated.type,
|
||||
properties: result,
|
||||
properties: data,
|
||||
},
|
||||
})
|
||||
return result
|
||||
return data
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { resolver } from "hono-openapi"
|
||||
import z from "zod"
|
||||
import { Storage } from "../storage/storage"
|
||||
import { NotFoundError } from "../storage/db"
|
||||
|
||||
export const ERRORS = {
|
||||
400: {
|
||||
@@ -25,7 +25,7 @@ export const ERRORS = {
|
||||
description: "Not found",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Storage.NotFoundError.Schema),
|
||||
schema: resolver(NotFoundError.Schema),
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
@@ -31,7 +31,7 @@ import { ExperimentalRoutes } from "./routes/experimental"
|
||||
import { ProviderRoutes } from "./routes/provider"
|
||||
import { lazy } from "../util/lazy"
|
||||
import { InstanceBootstrap } from "../project/bootstrap"
|
||||
import { Storage } from "../storage/storage"
|
||||
import { NotFoundError } from "../storage/db"
|
||||
import type { ContentfulStatusCode } from "hono/utils/http-status"
|
||||
import { websocket } from "hono/bun"
|
||||
import { HTTPException } from "hono/http-exception"
|
||||
@@ -65,7 +65,7 @@ export namespace Server {
|
||||
})
|
||||
if (err instanceof NamedError) {
|
||||
let status: ContentfulStatusCode
|
||||
if (err instanceof Storage.NotFoundError) status = 404
|
||||
if (err instanceof NotFoundError) status = 404
|
||||
else if (err instanceof Provider.ModelNotFoundError) status = 400
|
||||
else if (err.name.startsWith("Worktree")) status = 400
|
||||
else status = 500
|
||||
|
||||
@@ -2,7 +2,6 @@ import { BusEvent } from "@/bus/bus-event"
|
||||
import { Bus } from "@/bus"
|
||||
import { Session } from "."
|
||||
import { Identifier } from "../id/id"
|
||||
import { Instance } from "../project/instance"
|
||||
import { Provider } from "../provider/provider"
|
||||
import { MessageV2 } from "./message-v2"
|
||||
import z from "zod"
|
||||
@@ -109,10 +108,6 @@ export namespace SessionCompaction {
|
||||
mode: "compaction",
|
||||
agent: "compaction",
|
||||
summary: true,
|
||||
path: {
|
||||
cwd: Instance.directory,
|
||||
root: Instance.worktree,
|
||||
},
|
||||
cost: 0,
|
||||
tokens: {
|
||||
output: 0,
|
||||
|
||||
@@ -10,7 +10,10 @@ import { Flag } from "../flag/flag"
|
||||
import { Identifier } from "../id/id"
|
||||
import { Installation } from "../installation"
|
||||
|
||||
import { Storage } from "../storage/storage"
|
||||
import { db, NotFoundError } from "../storage/db"
|
||||
import { SessionTable, MessageTable, PartTable, SessionDiffTable } from "./session.sql"
|
||||
import { ShareTable } from "../share/share.sql"
|
||||
import { eq } from "drizzle-orm"
|
||||
import { Log } from "../util/log"
|
||||
import { MessageV2 } from "./message-v2"
|
||||
import { Instance } from "../project/instance"
|
||||
@@ -39,6 +42,49 @@ export namespace Session {
|
||||
).test(title)
|
||||
}
|
||||
|
||||
type SessionRow = typeof SessionTable.$inferSelect
|
||||
|
||||
export function fromRow(row: SessionRow): Info {
|
||||
const summary =
|
||||
row.summary_additions !== null || row.summary_deletions !== null || row.summary_files !== null
|
||||
? {
|
||||
additions: row.summary_additions ?? 0,
|
||||
deletions: row.summary_deletions ?? 0,
|
||||
files: row.summary_files ?? 0,
|
||||
diffs: row.summary_diffs ?? undefined,
|
||||
}
|
||||
: undefined
|
||||
const share = row.share_url ? { url: row.share_url } : undefined
|
||||
const revert =
|
||||
row.revert_messageID !== null
|
||||
? {
|
||||
messageID: row.revert_messageID,
|
||||
partID: row.revert_partID ?? undefined,
|
||||
snapshot: row.revert_snapshot ?? undefined,
|
||||
diff: row.revert_diff ?? undefined,
|
||||
}
|
||||
: undefined
|
||||
return {
|
||||
id: row.id,
|
||||
slug: row.slug,
|
||||
projectID: row.projectID,
|
||||
directory: row.directory,
|
||||
parentID: row.parentID ?? undefined,
|
||||
title: row.title,
|
||||
version: row.version,
|
||||
summary,
|
||||
share,
|
||||
revert,
|
||||
permission: row.permission ?? undefined,
|
||||
time: {
|
||||
created: row.time_created,
|
||||
updated: row.time_updated,
|
||||
compacting: row.time_compacting ?? undefined,
|
||||
archived: row.time_archived ?? undefined,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export const Info = z
|
||||
.object({
|
||||
id: Identifier.schema("session"),
|
||||
@@ -184,9 +230,10 @@ export namespace Session {
|
||||
)
|
||||
|
||||
export const touch = fn(Identifier.schema("session"), async (sessionID) => {
|
||||
await update(sessionID, (draft) => {
|
||||
draft.time.updated = Date.now()
|
||||
})
|
||||
const now = Date.now()
|
||||
db().update(SessionTable).set({ time_updated: now }).where(eq(SessionTable.id, sessionID)).run()
|
||||
const session = await get(sessionID)
|
||||
Bus.publish(Event.Updated, { info: session })
|
||||
})
|
||||
|
||||
export async function createNext(input: {
|
||||
@@ -211,21 +258,29 @@ export namespace Session {
|
||||
},
|
||||
}
|
||||
log.info("created", result)
|
||||
await Storage.write(["session", Instance.project.id, result.id], result)
|
||||
db()
|
||||
.insert(SessionTable)
|
||||
.values({
|
||||
id: result.id,
|
||||
projectID: result.projectID,
|
||||
parentID: result.parentID,
|
||||
slug: result.slug,
|
||||
directory: result.directory,
|
||||
title: result.title,
|
||||
version: result.version,
|
||||
permission: result.permission,
|
||||
time_created: result.time.created,
|
||||
time_updated: result.time.updated,
|
||||
})
|
||||
.run()
|
||||
Bus.publish(Event.Created, {
|
||||
info: result,
|
||||
})
|
||||
const cfg = await Config.get()
|
||||
if (!result.parentID && (Flag.OPENCODE_AUTO_SHARE || cfg.share === "auto"))
|
||||
share(result.id)
|
||||
.then((share) => {
|
||||
update(result.id, (draft) => {
|
||||
draft.share = share
|
||||
})
|
||||
})
|
||||
.catch(() => {
|
||||
// Silently ignore sharing errors during session creation
|
||||
})
|
||||
share(result.id).catch(() => {
|
||||
// Silently ignore sharing errors during session creation
|
||||
})
|
||||
Bus.publish(Event.Updated, {
|
||||
info: result,
|
||||
})
|
||||
@@ -240,12 +295,14 @@ export namespace Session {
|
||||
}
|
||||
|
||||
export const get = fn(Identifier.schema("session"), async (id) => {
|
||||
const read = await Storage.read<Info>(["session", Instance.project.id, id])
|
||||
return read as Info
|
||||
const row = db().select().from(SessionTable).where(eq(SessionTable.id, id)).get()
|
||||
if (!row) throw new NotFoundError({ message: `Session not found: ${id}` })
|
||||
return fromRow(row)
|
||||
})
|
||||
|
||||
export const getShare = fn(Identifier.schema("session"), async (id) => {
|
||||
return Storage.read<ShareInfo>(["share", id])
|
||||
const row = db().select().from(ShareTable).where(eq(ShareTable.sessionID, id)).get()
|
||||
return row?.data
|
||||
})
|
||||
|
||||
export const share = fn(Identifier.schema("session"), async (id) => {
|
||||
@@ -280,23 +337,24 @@ export namespace Session {
|
||||
)
|
||||
})
|
||||
|
||||
export async function update(id: string, editor: (session: Info) => void, options?: { touch?: boolean }) {
|
||||
const project = Instance.project
|
||||
const result = await Storage.update<Info>(["session", project.id, id], (draft) => {
|
||||
editor(draft)
|
||||
if (options?.touch !== false) {
|
||||
draft.time.updated = Date.now()
|
||||
}
|
||||
})
|
||||
export function update(id: string, editor: (session: Info) => void, options?: { touch?: boolean }) {
|
||||
const row = db().select().from(SessionTable).where(eq(SessionTable.id, id)).get()
|
||||
if (!row) throw new Error(`Session not found: ${id}`)
|
||||
const data = fromRow(row)
|
||||
editor(data)
|
||||
if (options?.touch !== false) {
|
||||
data.time.updated = Date.now()
|
||||
}
|
||||
db().update(SessionTable).set(toRow(data)).where(eq(SessionTable.id, id)).run()
|
||||
Bus.publish(Event.Updated, {
|
||||
info: result,
|
||||
info: data,
|
||||
})
|
||||
return result
|
||||
return data
|
||||
}
|
||||
|
||||
export const diff = fn(Identifier.schema("session"), async (sessionID) => {
|
||||
const diffs = await Storage.read<Snapshot.FileDiff[]>(["session_diff", sessionID])
|
||||
return diffs ?? []
|
||||
const row = db().select().from(SessionDiffTable).where(eq(SessionDiffTable.sessionID, sessionID)).get()
|
||||
return row?.data ?? []
|
||||
})
|
||||
|
||||
export const messages = fn(
|
||||
@@ -315,39 +373,28 @@ export namespace Session {
|
||||
},
|
||||
)
|
||||
|
||||
export async function* list() {
|
||||
export function* list() {
|
||||
const project = Instance.project
|
||||
for (const item of await Storage.list(["session", project.id])) {
|
||||
yield Storage.read<Info>(item)
|
||||
const rows = db().select().from(SessionTable).where(eq(SessionTable.projectID, project.id)).all()
|
||||
for (const row of rows) {
|
||||
yield fromRow(row)
|
||||
}
|
||||
}
|
||||
|
||||
export const children = fn(Identifier.schema("session"), async (parentID) => {
|
||||
const project = Instance.project
|
||||
const result = [] as Session.Info[]
|
||||
for (const item of await Storage.list(["session", project.id])) {
|
||||
const session = await Storage.read<Info>(item)
|
||||
if (session.parentID !== parentID) continue
|
||||
result.push(session)
|
||||
}
|
||||
return result
|
||||
const rows = db().select().from(SessionTable).where(eq(SessionTable.parentID, parentID)).all()
|
||||
return rows.map((row) => fromRow(row))
|
||||
})
|
||||
|
||||
export const remove = fn(Identifier.schema("session"), async (sessionID) => {
|
||||
const project = Instance.project
|
||||
try {
|
||||
const session = await get(sessionID)
|
||||
for (const child of await children(sessionID)) {
|
||||
await remove(child.id)
|
||||
}
|
||||
await unshare(sessionID).catch(() => {})
|
||||
for (const msg of await Storage.list(["message", sessionID])) {
|
||||
for (const part of await Storage.list(["part", msg.at(-1)!])) {
|
||||
await Storage.remove(part)
|
||||
}
|
||||
await Storage.remove(msg)
|
||||
}
|
||||
await Storage.remove(["session", project.id, sessionID])
|
||||
// CASCADE delete handles messages and parts automatically
|
||||
db().delete(SessionTable).where(eq(SessionTable.id, sessionID)).run()
|
||||
Bus.publish(Event.Deleted, {
|
||||
info: session,
|
||||
})
|
||||
@@ -357,7 +404,15 @@ export namespace Session {
|
||||
})
|
||||
|
||||
export const updateMessage = fn(MessageV2.Info, async (msg) => {
|
||||
await Storage.write(["message", msg.sessionID, msg.id], msg)
|
||||
db()
|
||||
.insert(MessageTable)
|
||||
.values({
|
||||
id: msg.id,
|
||||
sessionID: msg.sessionID,
|
||||
data: msg,
|
||||
})
|
||||
.onConflictDoUpdate({ target: MessageTable.id, set: { data: msg } })
|
||||
.run()
|
||||
Bus.publish(MessageV2.Event.Updated, {
|
||||
info: msg,
|
||||
})
|
||||
@@ -370,7 +425,8 @@ export namespace Session {
|
||||
messageID: Identifier.schema("message"),
|
||||
}),
|
||||
async (input) => {
|
||||
await Storage.remove(["message", input.sessionID, input.messageID])
|
||||
// CASCADE delete handles parts automatically
|
||||
db().delete(MessageTable).where(eq(MessageTable.id, input.messageID)).run()
|
||||
Bus.publish(MessageV2.Event.Removed, {
|
||||
sessionID: input.sessionID,
|
||||
messageID: input.messageID,
|
||||
@@ -386,7 +442,7 @@ export namespace Session {
|
||||
partID: Identifier.schema("part"),
|
||||
}),
|
||||
async (input) => {
|
||||
await Storage.remove(["part", input.messageID, input.partID])
|
||||
db().delete(PartTable).where(eq(PartTable.id, input.partID)).run()
|
||||
Bus.publish(MessageV2.Event.PartRemoved, {
|
||||
sessionID: input.sessionID,
|
||||
messageID: input.messageID,
|
||||
@@ -411,7 +467,16 @@ export namespace Session {
|
||||
export const updatePart = fn(UpdatePartInput, async (input) => {
|
||||
const part = "delta" in input ? input.part : input
|
||||
const delta = "delta" in input ? input.delta : undefined
|
||||
await Storage.write(["part", part.messageID, part.id], part)
|
||||
db()
|
||||
.insert(PartTable)
|
||||
.values({
|
||||
id: part.id,
|
||||
messageID: part.messageID,
|
||||
sessionID: part.sessionID,
|
||||
data: part,
|
||||
})
|
||||
.onConflictDoUpdate({ target: PartTable.id, set: { data: part } })
|
||||
.run()
|
||||
Bus.publish(MessageV2.Event.PartUpdated, {
|
||||
part,
|
||||
delta,
|
||||
|
||||
@@ -6,7 +6,9 @@ import { Identifier } from "../id/id"
|
||||
import { LSP } from "../lsp"
|
||||
import { Snapshot } from "@/snapshot"
|
||||
import { fn } from "@/util/fn"
|
||||
import { Storage } from "@/storage/storage"
|
||||
import { Database } from "@/storage/db"
|
||||
import { MessageTable, PartTable } from "./session.sql"
|
||||
import { eq, desc, lt, and, inArray } from "drizzle-orm"
|
||||
import { ProviderTransform } from "@/provider/transform"
|
||||
import { STATUS_CODES } from "http"
|
||||
import { iife } from "@/util/iife"
|
||||
@@ -38,8 +40,8 @@ export namespace MessageV2 {
|
||||
|
||||
const PartBase = z.object({
|
||||
id: z.string(),
|
||||
sessionID: z.string(),
|
||||
messageID: z.string(),
|
||||
sessionID: z.string(),
|
||||
})
|
||||
|
||||
export const SnapshotPart = PartBase.extend({
|
||||
@@ -370,10 +372,6 @@ export namespace MessageV2 {
|
||||
*/
|
||||
mode: z.string(),
|
||||
agent: z.string(),
|
||||
path: z.object({
|
||||
cwd: z.string(),
|
||||
root: z.string(),
|
||||
}),
|
||||
summary: z.boolean().optional(),
|
||||
cost: z.number(),
|
||||
tokens: z.object({
|
||||
@@ -607,21 +605,92 @@ export namespace MessageV2 {
|
||||
}
|
||||
|
||||
export const stream = fn(Identifier.schema("session"), async function* (sessionID) {
|
||||
const list = await Array.fromAsync(await Storage.list(["message", sessionID]))
|
||||
for (let i = list.length - 1; i >= 0; i--) {
|
||||
yield await get({
|
||||
sessionID,
|
||||
messageID: list[i][2],
|
||||
})
|
||||
const SIZE = 25
|
||||
let cursor: string | undefined
|
||||
while (true) {
|
||||
const conditions = [eq(MessageTable.sessionID, sessionID)]
|
||||
if (cursor) conditions.push(lt(MessageTable.id, cursor))
|
||||
|
||||
const ids = Database.use((db) =>
|
||||
db
|
||||
.select({ id: MessageTable.id })
|
||||
.from(MessageTable)
|
||||
.where(and(...conditions))
|
||||
.orderBy(desc(MessageTable.id))
|
||||
.limit(SIZE)
|
||||
.all(),
|
||||
)
|
||||
if (ids.length === 0) break
|
||||
|
||||
const rows = Database.use((db) =>
|
||||
db
|
||||
.select({
|
||||
message: MessageTable,
|
||||
part: PartTable,
|
||||
})
|
||||
.from(MessageTable)
|
||||
.leftJoin(PartTable, eq(PartTable.message_id, MessageTable.id))
|
||||
.where(
|
||||
inArray(
|
||||
MessageTable.id,
|
||||
ids.map((row) => row.id),
|
||||
),
|
||||
)
|
||||
.orderBy(desc(MessageTable.id), PartTable.id)
|
||||
.all(),
|
||||
)
|
||||
|
||||
const grouped = Map.groupBy(rows, (row) => row.message.id)
|
||||
for (const id of ids) {
|
||||
const group = grouped.get(id.id) ?? []
|
||||
const first = group[0]
|
||||
if (!first) continue
|
||||
yield {
|
||||
info: {
|
||||
...first.message.data,
|
||||
role: first.message.role,
|
||||
id: first.message.id,
|
||||
sessionID: first.message.sessionID,
|
||||
} as Info,
|
||||
parts: group
|
||||
.filter((row) => row.part)
|
||||
.map((row) => ({
|
||||
...row.part!.data,
|
||||
type: row.part!.type,
|
||||
id: row.part!.id,
|
||||
messageID: row.part!.message_id,
|
||||
sessionID: first.message.sessionID,
|
||||
})) as Part[],
|
||||
}
|
||||
}
|
||||
|
||||
cursor = ids[ids.length - 1]?.id
|
||||
if (ids.length < SIZE) break
|
||||
}
|
||||
})
|
||||
|
||||
export const parts = fn(Identifier.schema("message"), async (messageID) => {
|
||||
const result = [] as MessageV2.Part[]
|
||||
for (const item of await Storage.list(["part", messageID])) {
|
||||
const read = await Storage.read<MessageV2.Part>(item)
|
||||
result.push(read)
|
||||
}
|
||||
const rows = Database.use((db) =>
|
||||
db
|
||||
.select({
|
||||
id: PartTable.id,
|
||||
messageID: PartTable.message_id,
|
||||
sessionID: MessageTable.sessionID,
|
||||
type: PartTable.type,
|
||||
data: PartTable.data,
|
||||
})
|
||||
.from(PartTable)
|
||||
.innerJoin(MessageTable, eq(PartTable.message_id, MessageTable.id))
|
||||
.where(eq(PartTable.message_id, messageID))
|
||||
.all(),
|
||||
)
|
||||
const result = rows.map((row) => ({
|
||||
...row.data,
|
||||
type: row.type,
|
||||
id: row.id,
|
||||
messageID: row.messageID,
|
||||
sessionID: row.sessionID,
|
||||
})) as Part[]
|
||||
result.sort((a, b) => (a.id > b.id ? 1 : -1))
|
||||
return result
|
||||
})
|
||||
@@ -632,9 +701,36 @@ export namespace MessageV2 {
|
||||
messageID: Identifier.schema("message"),
|
||||
}),
|
||||
async (input) => {
|
||||
const rows = Database.use((db) =>
|
||||
db
|
||||
.select({
|
||||
message: MessageTable,
|
||||
part: PartTable,
|
||||
})
|
||||
.from(MessageTable)
|
||||
.leftJoin(PartTable, eq(PartTable.message_id, MessageTable.id))
|
||||
.where(eq(MessageTable.id, input.messageID))
|
||||
.orderBy(PartTable.id)
|
||||
.all(),
|
||||
)
|
||||
const first = rows[0]
|
||||
if (!first) throw new Error(`Message not found: ${input.messageID}`)
|
||||
return {
|
||||
info: await Storage.read<MessageV2.Info>(["message", input.sessionID, input.messageID]),
|
||||
parts: await parts(input.messageID),
|
||||
info: {
|
||||
...first.message.data,
|
||||
role: first.message.role,
|
||||
id: first.message.id,
|
||||
sessionID: first.message.sessionID,
|
||||
} as Info,
|
||||
parts: rows
|
||||
.filter((row) => row.part)
|
||||
.map((row) => ({
|
||||
...row.part!.data,
|
||||
type: row.part!.type,
|
||||
id: row.part!.id,
|
||||
messageID: row.part!.message_id,
|
||||
sessionID: first.message.sessionID,
|
||||
})) as Part[],
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
@@ -166,10 +166,13 @@ export namespace SessionPrompt {
|
||||
})
|
||||
}
|
||||
if (permissions.length > 0) {
|
||||
session.permission = permissions
|
||||
await Session.update(session.id, (draft) => {
|
||||
draft.permission = permissions
|
||||
})
|
||||
Session.update(
|
||||
session.id,
|
||||
(draft) => {
|
||||
draft.permission = permissions
|
||||
},
|
||||
{ touch: false },
|
||||
)
|
||||
}
|
||||
|
||||
if (input.noReply === true) {
|
||||
@@ -325,10 +328,6 @@ export namespace SessionPrompt {
|
||||
sessionID,
|
||||
mode: task.agent,
|
||||
agent: task.agent,
|
||||
path: {
|
||||
cwd: Instance.directory,
|
||||
root: Instance.worktree,
|
||||
},
|
||||
cost: 0,
|
||||
tokens: {
|
||||
input: 0,
|
||||
@@ -527,10 +526,6 @@ export namespace SessionPrompt {
|
||||
role: "assistant",
|
||||
mode: agent.name,
|
||||
agent: agent.name,
|
||||
path: {
|
||||
cwd: Instance.directory,
|
||||
root: Instance.worktree,
|
||||
},
|
||||
cost: 0,
|
||||
tokens: {
|
||||
input: 0,
|
||||
@@ -1384,10 +1379,6 @@ NOTE: At any point in time through this workflow you should feel free to ask the
|
||||
mode: input.agent,
|
||||
agent: input.agent,
|
||||
cost: 0,
|
||||
path: {
|
||||
cwd: Instance.directory,
|
||||
root: Instance.worktree,
|
||||
},
|
||||
time: {
|
||||
created: Date.now(),
|
||||
},
|
||||
|
||||
@@ -5,7 +5,9 @@ import { MessageV2 } from "./message-v2"
|
||||
import { Session } from "."
|
||||
import { Log } from "../util/log"
|
||||
import { splitWhen } from "remeda"
|
||||
import { Storage } from "../storage/storage"
|
||||
import { db } from "../storage/db"
|
||||
import { MessageTable, PartTable, SessionTable, SessionDiffTable } from "./session.sql"
|
||||
import { eq } from "drizzle-orm"
|
||||
import { Bus } from "../bus"
|
||||
import { SessionPrompt } from "./prompt"
|
||||
import { SessionSummary } from "./summary"
|
||||
@@ -54,13 +56,17 @@ export namespace SessionRevert {
|
||||
}
|
||||
|
||||
if (revert) {
|
||||
const session = await Session.get(input.sessionID)
|
||||
revert.snapshot = session.revert?.snapshot ?? (await Snapshot.track())
|
||||
const current = await Session.get(input.sessionID)
|
||||
revert.snapshot = current.revert?.snapshot ?? (await Snapshot.track())
|
||||
await Snapshot.revert(patches)
|
||||
if (revert.snapshot) revert.diff = await Snapshot.diff(revert.snapshot)
|
||||
const rangeMessages = all.filter((msg) => msg.info.id >= revert!.messageID)
|
||||
const diffs = await SessionSummary.computeDiff({ messages: rangeMessages })
|
||||
await Storage.write(["session_diff", input.sessionID], diffs)
|
||||
db()
|
||||
.insert(SessionDiffTable)
|
||||
.values({ sessionID: input.sessionID, data: diffs })
|
||||
.onConflictDoUpdate({ target: SessionDiffTable.sessionID, set: { data: diffs } })
|
||||
.run()
|
||||
Bus.publish(Session.Event.Diff, {
|
||||
sessionID: input.sessionID,
|
||||
diff: diffs,
|
||||
@@ -83,10 +89,21 @@ export namespace SessionRevert {
|
||||
const session = await Session.get(input.sessionID)
|
||||
if (!session.revert) return session
|
||||
if (session.revert.snapshot) await Snapshot.restore(session.revert.snapshot)
|
||||
const next = await Session.update(input.sessionID, (draft) => {
|
||||
draft.revert = undefined
|
||||
})
|
||||
return next
|
||||
const now = Date.now()
|
||||
db()
|
||||
.update(SessionTable)
|
||||
.set({
|
||||
revert_messageID: null,
|
||||
revert_partID: null,
|
||||
revert_snapshot: null,
|
||||
revert_diff: null,
|
||||
time_updated: now,
|
||||
})
|
||||
.where(eq(SessionTable.id, input.sessionID))
|
||||
.run()
|
||||
const updated = await Session.get(input.sessionID)
|
||||
Bus.publish(Session.Event.Updated, { info: updated })
|
||||
return updated
|
||||
}
|
||||
|
||||
export async function cleanup(session: Session.Info) {
|
||||
@@ -97,7 +114,7 @@ export namespace SessionRevert {
|
||||
const [preserve, remove] = splitWhen(msgs, (x) => x.info.id === messageID)
|
||||
msgs = preserve
|
||||
for (const msg of remove) {
|
||||
await Storage.remove(["message", sessionID, msg.info.id])
|
||||
db().delete(MessageTable).where(eq(MessageTable.id, msg.info.id)).run()
|
||||
await Bus.publish(MessageV2.Event.Removed, { sessionID: sessionID, messageID: msg.info.id })
|
||||
}
|
||||
const last = preserve.at(-1)
|
||||
@@ -106,7 +123,7 @@ export namespace SessionRevert {
|
||||
const [preserveParts, removeParts] = splitWhen(last.parts, (x) => x.id === partID)
|
||||
last.parts = preserveParts
|
||||
for (const part of removeParts) {
|
||||
await Storage.remove(["part", last.info.id, part.id])
|
||||
db().delete(PartTable).where(eq(PartTable.id, part.id)).run()
|
||||
await Bus.publish(MessageV2.Event.PartRemoved, {
|
||||
sessionID: sessionID,
|
||||
messageID: last.info.id,
|
||||
@@ -114,8 +131,19 @@ export namespace SessionRevert {
|
||||
})
|
||||
}
|
||||
}
|
||||
await Session.update(sessionID, (draft) => {
|
||||
draft.revert = undefined
|
||||
})
|
||||
const now = Date.now()
|
||||
db()
|
||||
.update(SessionTable)
|
||||
.set({
|
||||
revert_messageID: null,
|
||||
revert_partID: null,
|
||||
revert_snapshot: null,
|
||||
revert_diff: null,
|
||||
time_updated: now,
|
||||
})
|
||||
.where(eq(SessionTable.id, sessionID))
|
||||
.run()
|
||||
const updated = await Session.get(sessionID)
|
||||
Bus.publish(Session.Event.Updated, { info: updated })
|
||||
}
|
||||
}
|
||||
|
||||
83
packages/opencode/src/session/session.sql.ts
Normal file
83
packages/opencode/src/session/session.sql.ts
Normal file
@@ -0,0 +1,83 @@
|
||||
import { sqliteTable, text, integer, index } from "drizzle-orm/sqlite-core"
|
||||
import { ProjectTable } from "../project/project.sql"
|
||||
import type { MessageV2 } from "./message-v2"
|
||||
import type { Snapshot } from "@/snapshot"
|
||||
import type { Todo } from "./todo"
|
||||
import type { PermissionNext } from "@/permission/next"
|
||||
|
||||
export const SessionTable = sqliteTable(
|
||||
"session",
|
||||
{
|
||||
id: text("id").primaryKey(),
|
||||
projectID: text("project_id")
|
||||
.notNull()
|
||||
.references(() => ProjectTable.id, { onDelete: "cascade" }),
|
||||
parentID: text("parent_id"),
|
||||
slug: text("slug").notNull(),
|
||||
directory: text("directory").notNull(),
|
||||
title: text("title").notNull(),
|
||||
version: text("version").notNull(),
|
||||
share_url: text("share_url"),
|
||||
summary_additions: integer("summary_additions"),
|
||||
summary_deletions: integer("summary_deletions"),
|
||||
summary_files: integer("summary_files"),
|
||||
summary_diffs: text("summary_diffs", { mode: "json" }).$type<Snapshot.FileDiff[]>(),
|
||||
revert_messageID: text("revert_message_id"),
|
||||
revert_partID: text("revert_part_id"),
|
||||
revert_snapshot: text("revert_snapshot"),
|
||||
revert_diff: text("revert_diff"),
|
||||
permission: text("permission", { mode: "json" }).$type<PermissionNext.Ruleset>(),
|
||||
time_created: integer("time_created").notNull(),
|
||||
time_updated: integer("time_updated").notNull(),
|
||||
time_compacting: integer("time_compacting"),
|
||||
time_archived: integer("time_archived"),
|
||||
},
|
||||
(table) => [index("session_project_idx").on(table.projectID), index("session_parent_idx").on(table.parentID)],
|
||||
)
|
||||
|
||||
export const MessageTable = sqliteTable(
|
||||
"message",
|
||||
{
|
||||
id: text("id").primaryKey(),
|
||||
sessionID: text("session_id")
|
||||
.notNull()
|
||||
.references(() => SessionTable.id, { onDelete: "cascade" }),
|
||||
createdAt: integer("created_at").notNull(),
|
||||
data: text("data", { mode: "json" }).notNull().$type<MessageV2.Info>(),
|
||||
},
|
||||
(table) => [index("message_session_idx").on(table.sessionID)],
|
||||
)
|
||||
|
||||
export const PartTable = sqliteTable(
|
||||
"part",
|
||||
{
|
||||
id: text("id").primaryKey(),
|
||||
messageID: text("message_id")
|
||||
.notNull()
|
||||
.references(() => MessageTable.id, { onDelete: "cascade" }),
|
||||
sessionID: text("session_id").notNull(),
|
||||
data: text("data", { mode: "json" }).notNull().$type<MessageV2.Part>(),
|
||||
},
|
||||
(table) => [index("part_message_idx").on(table.messageID), index("part_session_idx").on(table.sessionID)],
|
||||
)
|
||||
|
||||
export const SessionDiffTable = sqliteTable("session_diff", {
|
||||
sessionID: text("session_id")
|
||||
.primaryKey()
|
||||
.references(() => SessionTable.id, { onDelete: "cascade" }),
|
||||
data: text("data", { mode: "json" }).notNull().$type<Snapshot.FileDiff[]>(),
|
||||
})
|
||||
|
||||
export const TodoTable = sqliteTable("todo", {
|
||||
sessionID: text("session_id")
|
||||
.primaryKey()
|
||||
.references(() => SessionTable.id, { onDelete: "cascade" }),
|
||||
data: text("data", { mode: "json" }).notNull().$type<Todo.Info[]>(),
|
||||
})
|
||||
|
||||
export const PermissionTable = sqliteTable("permission", {
|
||||
projectID: text("project_id")
|
||||
.primaryKey()
|
||||
.references(() => ProjectTable.id, { onDelete: "cascade" }),
|
||||
data: text("data", { mode: "json" }).notNull().$type<PermissionNext.Ruleset>(),
|
||||
})
|
||||
@@ -11,7 +11,9 @@ import { Snapshot } from "@/snapshot"
|
||||
import { Log } from "@/util/log"
|
||||
import path from "path"
|
||||
import { Instance } from "@/project/instance"
|
||||
import { Storage } from "@/storage/storage"
|
||||
import { Database } from "@/storage/db"
|
||||
import { SessionDiffTable, SessionTable } from "./session.sql"
|
||||
import { eq } from "drizzle-orm"
|
||||
import { Bus } from "@/bus"
|
||||
|
||||
import { LLM } from "./llm"
|
||||
@@ -47,14 +49,28 @@ export namespace SessionSummary {
|
||||
return files.has(x.file)
|
||||
}),
|
||||
)
|
||||
await Session.update(input.sessionID, (draft) => {
|
||||
draft.summary = {
|
||||
additions: diffs.reduce((sum, x) => sum + x.additions, 0),
|
||||
deletions: diffs.reduce((sum, x) => sum + x.deletions, 0),
|
||||
files: diffs.length,
|
||||
}
|
||||
})
|
||||
await Storage.write(["session_diff", input.sessionID], diffs)
|
||||
const now = Date.now()
|
||||
Database.use((db) =>
|
||||
db
|
||||
.update(SessionTable)
|
||||
.set({
|
||||
summary_additions: diffs.reduce((sum, x) => sum + x.additions, 0),
|
||||
summary_deletions: diffs.reduce((sum, x) => sum + x.deletions, 0),
|
||||
summary_files: diffs.length,
|
||||
time_updated: now,
|
||||
})
|
||||
.where(eq(SessionTable.id, input.sessionID))
|
||||
.run(),
|
||||
)
|
||||
const session = await Session.get(input.sessionID)
|
||||
Bus.publish(Session.Event.Updated, { info: session })
|
||||
Database.use((db) =>
|
||||
db
|
||||
.insert(SessionDiffTable)
|
||||
.values({ sessionID: input.sessionID, data: diffs })
|
||||
.onConflictDoUpdate({ target: SessionDiffTable.sessionID, set: { data: diffs } })
|
||||
.run(),
|
||||
)
|
||||
Bus.publish(Session.Event.Diff, {
|
||||
sessionID: input.sessionID,
|
||||
diff: diffs,
|
||||
@@ -116,7 +132,10 @@ export namespace SessionSummary {
|
||||
messageID: Identifier.schema("message").optional(),
|
||||
}),
|
||||
async (input) => {
|
||||
return Storage.read<Snapshot.FileDiff[]>(["session_diff", input.sessionID]).catch(() => [])
|
||||
const row = Database.use((db) =>
|
||||
db.select().from(SessionDiffTable).where(eq(SessionDiffTable.sessionID, input.sessionID)).get(),
|
||||
)
|
||||
return row?.data ?? []
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import { BusEvent } from "@/bus/bus-event"
|
||||
import { Bus } from "@/bus"
|
||||
import z from "zod"
|
||||
import { Storage } from "../storage/storage"
|
||||
import { Database } from "../storage/db"
|
||||
import { TodoTable } from "./session.sql"
|
||||
import { eq } from "drizzle-orm"
|
||||
|
||||
export namespace Todo {
|
||||
export const Info = z
|
||||
@@ -24,14 +26,19 @@ export namespace Todo {
|
||||
),
|
||||
}
|
||||
|
||||
export async function update(input: { sessionID: string; todos: Info[] }) {
|
||||
await Storage.write(["todo", input.sessionID], input.todos)
|
||||
export function update(input: { sessionID: string; todos: Info[] }) {
|
||||
Database.use((db) =>
|
||||
db
|
||||
.insert(TodoTable)
|
||||
.values({ sessionID: input.sessionID, data: input.todos })
|
||||
.onConflictDoUpdate({ target: TodoTable.sessionID, set: { data: input.todos } })
|
||||
.run(),
|
||||
)
|
||||
Bus.publish(Event.Updated, input)
|
||||
}
|
||||
|
||||
export async function get(sessionID: string) {
|
||||
return Storage.read<Info[]>(["todo", sessionID])
|
||||
.then((x) => x || [])
|
||||
.catch(() => [])
|
||||
export function get(sessionID: string) {
|
||||
const row = Database.use((db) => db.select().from(TodoTable).where(eq(TodoTable.sessionID, sessionID)).get())
|
||||
return row?.data ?? []
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,7 +4,9 @@ import { ulid } from "ulid"
|
||||
import { Provider } from "@/provider/provider"
|
||||
import { Session } from "@/session"
|
||||
import { MessageV2 } from "@/session/message-v2"
|
||||
import { Storage } from "@/storage/storage"
|
||||
import { Database } from "@/storage/db"
|
||||
import { SessionShareTable } from "./share.sql"
|
||||
import { eq } from "drizzle-orm"
|
||||
import { Log } from "@/util/log"
|
||||
import type * as SDK from "@opencode-ai/sdk/v2"
|
||||
|
||||
@@ -77,17 +79,22 @@ export namespace ShareNext {
|
||||
})
|
||||
.then((x) => x.json())
|
||||
.then((x) => x as { id: string; url: string; secret: string })
|
||||
await Storage.write(["session_share", sessionID], result)
|
||||
Database.use((db) =>
|
||||
db
|
||||
.insert(SessionShareTable)
|
||||
.values({ sessionID, data: result })
|
||||
.onConflictDoUpdate({ target: SessionShareTable.sessionID, set: { data: result } })
|
||||
.run(),
|
||||
)
|
||||
fullSync(sessionID)
|
||||
return result
|
||||
}
|
||||
|
||||
function get(sessionID: string) {
|
||||
return Storage.read<{
|
||||
id: string
|
||||
secret: string
|
||||
url: string
|
||||
}>(["session_share", sessionID])
|
||||
const row = Database.use((db) =>
|
||||
db.select().from(SessionShareTable).where(eq(SessionShareTable.sessionID, sessionID)).get(),
|
||||
)
|
||||
return row?.data
|
||||
}
|
||||
|
||||
type Data =
|
||||
@@ -132,7 +139,7 @@ export namespace ShareNext {
|
||||
const queued = queue.get(sessionID)
|
||||
if (!queued) return
|
||||
queue.delete(sessionID)
|
||||
const share = await get(sessionID).catch(() => undefined)
|
||||
const share = get(sessionID)
|
||||
if (!share) return
|
||||
|
||||
await fetch(`${await url()}/api/share/${share.id}/sync`, {
|
||||
@@ -163,7 +170,7 @@ export namespace ShareNext {
|
||||
secret: share.secret,
|
||||
}),
|
||||
})
|
||||
await Storage.remove(["session_share", sessionID])
|
||||
Database.use((db) => db.delete(SessionShareTable).where(eq(SessionShareTable.sessionID, sessionID)).run())
|
||||
}
|
||||
|
||||
async function fullSync(sessionID: string) {
|
||||
|
||||
19
packages/opencode/src/share/share.sql.ts
Normal file
19
packages/opencode/src/share/share.sql.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import { sqliteTable, text } from "drizzle-orm/sqlite-core"
|
||||
import { SessionTable } from "../session/session.sql"
|
||||
import type { Session } from "../session"
|
||||
|
||||
export const SessionShareTable = sqliteTable("session_share", {
|
||||
sessionID: text("session_id")
|
||||
.primaryKey()
|
||||
.references(() => SessionTable.id, { onDelete: "cascade" }),
|
||||
data: text("data", { mode: "json" }).notNull().$type<{
|
||||
id: string
|
||||
secret: string
|
||||
url: string
|
||||
}>(),
|
||||
})
|
||||
|
||||
export const ShareTable = sqliteTable("share", {
|
||||
sessionID: text("session_id").primaryKey(),
|
||||
data: text("data", { mode: "json" }).notNull().$type<Session.ShareInfo>(),
|
||||
})
|
||||
5
packages/opencode/src/sql.d.ts
vendored
Normal file
5
packages/opencode/src/sql.d.ts
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
// Type declarations for SQL file imports with { type: "text" }
|
||||
declare module "*.sql" {
|
||||
const content: string
|
||||
export default content
|
||||
}
|
||||
122
packages/opencode/src/storage/db.ts
Normal file
122
packages/opencode/src/storage/db.ts
Normal file
@@ -0,0 +1,122 @@
|
||||
import { Database as SqliteDatabase } from "bun:sqlite"
|
||||
import { drizzle } from "drizzle-orm/bun-sqlite"
|
||||
import type { BunSQLiteDatabase } from "drizzle-orm/bun-sqlite"
|
||||
import { lazy } from "../util/lazy"
|
||||
import { Global } from "../global"
|
||||
import { Log } from "../util/log"
|
||||
import { migrations } from "./migrations.generated"
|
||||
import { migrateFromJson } from "./json-migration"
|
||||
import { NamedError } from "@opencode-ai/util/error"
|
||||
import { Context } from "../util/context"
|
||||
import z from "zod"
|
||||
import path from "path"
|
||||
|
||||
export const NotFoundError = NamedError.create(
|
||||
"NotFoundError",
|
||||
z.object({
|
||||
message: z.string(),
|
||||
}),
|
||||
)
|
||||
|
||||
const log = Log.create({ service: "db" })
|
||||
|
||||
export namespace Database {
|
||||
export type DB = BunSQLiteDatabase
|
||||
|
||||
const connection = lazy(() => {
|
||||
const dbPath = path.join(Global.Path.data, "opencode.db")
|
||||
log.info("opening database", { path: dbPath })
|
||||
|
||||
const sqlite = new SqliteDatabase(dbPath, { create: true })
|
||||
|
||||
sqlite.run("PRAGMA journal_mode = WAL")
|
||||
sqlite.run("PRAGMA synchronous = NORMAL")
|
||||
sqlite.run("PRAGMA busy_timeout = 5000")
|
||||
sqlite.run("PRAGMA cache_size = -64000")
|
||||
sqlite.run("PRAGMA foreign_keys = ON")
|
||||
|
||||
migrate(sqlite)
|
||||
|
||||
// Run JSON migration after schema is ready
|
||||
try {
|
||||
migrateFromJson(sqlite)
|
||||
} catch (e) {
|
||||
log.error("json migration failed", { error: e })
|
||||
}
|
||||
|
||||
return drizzle(sqlite)
|
||||
})
|
||||
|
||||
function migrate(sqlite: SqliteDatabase) {
|
||||
sqlite.run(`
|
||||
CREATE TABLE IF NOT EXISTS _migrations (
|
||||
name TEXT PRIMARY KEY,
|
||||
applied_at INTEGER NOT NULL
|
||||
)
|
||||
`)
|
||||
|
||||
const applied = new Set(
|
||||
sqlite
|
||||
.query<{ name: string }, []>("SELECT name FROM _migrations")
|
||||
.all()
|
||||
.map((r) => r.name),
|
||||
)
|
||||
|
||||
for (const migration of migrations) {
|
||||
if (applied.has(migration.name)) continue
|
||||
log.info("applying migration", { name: migration.name })
|
||||
sqlite.exec(migration.sql)
|
||||
sqlite.run("INSERT INTO _migrations (name, applied_at) VALUES (?, ?)", [migration.name, Date.now()])
|
||||
}
|
||||
}
|
||||
|
||||
const TransactionContext = Context.create<{
|
||||
db: DB
|
||||
effects: (() => void | Promise<void>)[]
|
||||
}>("database")
|
||||
|
||||
export function use<T>(callback: (db: DB) => T): T {
|
||||
try {
|
||||
const ctx = TransactionContext.use()
|
||||
return callback(ctx.db)
|
||||
} catch (err) {
|
||||
if (err instanceof Context.NotFound) {
|
||||
const effects: (() => void | Promise<void>)[] = []
|
||||
const result = TransactionContext.provide({ db: connection(), effects }, () => callback(connection()))
|
||||
for (const fx of effects) fx()
|
||||
return result
|
||||
}
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
export function fn<Input, T>(callback: (input: Input, db: DB) => T) {
|
||||
return (input: Input) => use((db) => callback(input, db))
|
||||
}
|
||||
|
||||
export function effect(fx: () => void | Promise<void>) {
|
||||
try {
|
||||
const ctx = TransactionContext.use()
|
||||
ctx.effects.push(fx)
|
||||
} catch {
|
||||
fx()
|
||||
}
|
||||
}
|
||||
|
||||
export function transaction<T>(callback: (db: DB) => T): T {
|
||||
try {
|
||||
const ctx = TransactionContext.use()
|
||||
return callback(ctx.db)
|
||||
} catch (err) {
|
||||
if (err instanceof Context.NotFound) {
|
||||
const effects: (() => void | Promise<void>)[] = []
|
||||
const result = connection().transaction((tx) => {
|
||||
return TransactionContext.provide({ db: tx as unknown as DB, effects }, () => callback(tx as unknown as DB))
|
||||
})
|
||||
for (const fx of effects) fx()
|
||||
return result
|
||||
}
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
339
packages/opencode/src/storage/json-migration.ts
Normal file
339
packages/opencode/src/storage/json-migration.ts
Normal file
@@ -0,0 +1,339 @@
|
||||
import { Database } from "bun:sqlite"
|
||||
import { drizzle } from "drizzle-orm/bun-sqlite"
|
||||
import { Global } from "../global"
|
||||
import { Log } from "../util/log"
|
||||
import { ProjectTable } from "../project/project.sql"
|
||||
import { SessionTable, MessageTable, PartTable, TodoTable, PermissionTable } from "../session/session.sql"
|
||||
import { SessionShareTable, ShareTable } from "../share/share.sql"
|
||||
import path from "path"
|
||||
import fs from "fs"
|
||||
|
||||
const log = Log.create({ service: "json-migration" })
|
||||
|
||||
export function migrateFromJson(sqlite: Database, customStorageDir?: string) {
|
||||
const storageDir = customStorageDir ?? path.join(Global.Path.data, "storage")
|
||||
const migrationMarker = path.join(storageDir, "sqlite-migrated")
|
||||
|
||||
if (fs.existsSync(migrationMarker)) {
|
||||
log.info("json migration already completed")
|
||||
return
|
||||
}
|
||||
|
||||
if (!fs.existsSync(path.join(storageDir, "migration"))) {
|
||||
log.info("no json storage found, skipping migration")
|
||||
fs.writeFileSync(migrationMarker, Date.now().toString())
|
||||
return
|
||||
}
|
||||
|
||||
log.info("starting json to sqlite migration", { storageDir })
|
||||
|
||||
const db = drizzle(sqlite)
|
||||
const stats = {
|
||||
projects: 0,
|
||||
sessions: 0,
|
||||
messages: 0,
|
||||
parts: 0,
|
||||
diffs: 0,
|
||||
todos: 0,
|
||||
permissions: 0,
|
||||
shares: 0,
|
||||
errors: [] as string[],
|
||||
}
|
||||
|
||||
// Run entire migration in a single transaction for performance
|
||||
sqlite.run("BEGIN TRANSACTION")
|
||||
|
||||
try {
|
||||
// Track existing IDs to avoid repeated DB lookups
|
||||
const projectIDs = new Set<string>()
|
||||
const sessionIDs = new Set<string>()
|
||||
const messageIDs = new Set<string>()
|
||||
|
||||
// Migrate projects first (no FK deps)
|
||||
const projectGlob = new Bun.Glob("project/*.json")
|
||||
const projectFiles = Array.from(projectGlob.scanSync({ cwd: storageDir, absolute: true }))
|
||||
const projectValues: (typeof ProjectTable.$inferInsert)[] = []
|
||||
|
||||
for (const file of projectFiles) {
|
||||
try {
|
||||
const data = JSON.parse(fs.readFileSync(file, "utf-8"))
|
||||
if (!data.id) {
|
||||
stats.errors.push(`project missing id: ${file}`)
|
||||
continue
|
||||
}
|
||||
projectIDs.add(data.id)
|
||||
projectValues.push({
|
||||
id: data.id,
|
||||
worktree: data.worktree ?? "/",
|
||||
vcs: data.vcs,
|
||||
name: data.name ?? undefined,
|
||||
icon_url: data.icon?.url,
|
||||
icon_color: data.icon?.color,
|
||||
time_created: data.time?.created ?? Date.now(),
|
||||
time_updated: data.time?.updated ?? Date.now(),
|
||||
time_initialized: data.time?.initialized,
|
||||
sandboxes: data.sandboxes ?? [],
|
||||
})
|
||||
} catch (e) {
|
||||
stats.errors.push(`failed to migrate project ${file}: ${e}`)
|
||||
}
|
||||
}
|
||||
|
||||
if (projectValues.length > 0) {
|
||||
db.insert(ProjectTable).values(projectValues).onConflictDoNothing().run()
|
||||
stats.projects = projectValues.length
|
||||
}
|
||||
log.info("migrated projects", { count: stats.projects })
|
||||
|
||||
// Migrate sessions (depends on projects)
|
||||
const sessionGlob = new Bun.Glob("session/*/*.json")
|
||||
const sessionFiles = Array.from(sessionGlob.scanSync({ cwd: storageDir, absolute: true }))
|
||||
const sessionValues: (typeof SessionTable.$inferInsert)[] = []
|
||||
|
||||
for (const file of sessionFiles) {
|
||||
try {
|
||||
const data = JSON.parse(fs.readFileSync(file, "utf-8"))
|
||||
if (!data.id || !data.projectID) {
|
||||
stats.errors.push(`session missing id or projectID: ${file}`)
|
||||
continue
|
||||
}
|
||||
if (!projectIDs.has(data.projectID)) {
|
||||
log.warn("skipping orphaned session", { sessionID: data.id, projectID: data.projectID })
|
||||
continue
|
||||
}
|
||||
sessionIDs.add(data.id)
|
||||
sessionValues.push({
|
||||
id: data.id,
|
||||
projectID: data.projectID,
|
||||
parentID: data.parentID ?? null,
|
||||
slug: data.slug ?? "",
|
||||
directory: data.directory ?? "",
|
||||
title: data.title ?? "",
|
||||
version: data.version ?? "",
|
||||
share_url: data.share?.url ?? null,
|
||||
summary_additions: data.summary?.additions ?? null,
|
||||
summary_deletions: data.summary?.deletions ?? null,
|
||||
summary_files: data.summary?.files ?? null,
|
||||
summary_diffs: data.summary?.diffs ?? null,
|
||||
revert_messageID: data.revert?.messageID ?? null,
|
||||
revert_partID: data.revert?.partID ?? null,
|
||||
revert_snapshot: data.revert?.snapshot ?? null,
|
||||
revert_diff: data.revert?.diff ?? null,
|
||||
permission: data.permission ?? null,
|
||||
time_created: data.time?.created ?? Date.now(),
|
||||
time_updated: data.time?.updated ?? Date.now(),
|
||||
time_compacting: data.time?.compacting ?? null,
|
||||
time_archived: data.time?.archived ?? null,
|
||||
})
|
||||
} catch (e) {
|
||||
stats.errors.push(`failed to migrate session ${file}: ${e}`)
|
||||
}
|
||||
}
|
||||
|
||||
if (sessionValues.length > 0) {
|
||||
db.insert(SessionTable).values(sessionValues).onConflictDoNothing().run()
|
||||
stats.sessions = sessionValues.length
|
||||
}
|
||||
log.info("migrated sessions", { count: stats.sessions })
|
||||
|
||||
// Migrate messages (depends on sessions)
|
||||
const messageGlob = new Bun.Glob("message/*/*.json")
|
||||
const messageFiles = Array.from(messageGlob.scanSync({ cwd: storageDir, absolute: true }))
|
||||
const messageValues: (typeof MessageTable.$inferInsert)[] = []
|
||||
|
||||
for (const file of messageFiles) {
|
||||
try {
|
||||
const data = JSON.parse(fs.readFileSync(file, "utf-8"))
|
||||
if (!data.id || !data.sessionID) {
|
||||
stats.errors.push(`message missing id or sessionID: ${file}`)
|
||||
continue
|
||||
}
|
||||
if (!sessionIDs.has(data.sessionID)) {
|
||||
log.warn("skipping orphaned message", { messageID: data.id, sessionID: data.sessionID })
|
||||
continue
|
||||
}
|
||||
messageIDs.add(data.id)
|
||||
const { id, sessionID, role, ...rest } = data
|
||||
messageValues.push({ id, sessionID, role, data: rest })
|
||||
} catch (e) {
|
||||
stats.errors.push(`failed to migrate message ${file}: ${e}`)
|
||||
}
|
||||
}
|
||||
|
||||
if (messageValues.length > 0) {
|
||||
db.insert(MessageTable).values(messageValues).onConflictDoNothing().run()
|
||||
stats.messages = messageValues.length
|
||||
}
|
||||
log.info("migrated messages", { count: stats.messages })
|
||||
|
||||
// Migrate parts (depends on messages)
|
||||
const partGlob = new Bun.Glob("part/*/*.json")
|
||||
const partFiles = Array.from(partGlob.scanSync({ cwd: storageDir, absolute: true }))
|
||||
const partValues: (typeof PartTable.$inferInsert)[] = []
|
||||
|
||||
for (const file of partFiles) {
|
||||
try {
|
||||
const data = JSON.parse(fs.readFileSync(file, "utf-8"))
|
||||
if (!data.id || !data.messageID) {
|
||||
stats.errors.push(`part missing id or messageID: ${file}`)
|
||||
continue
|
||||
}
|
||||
if (!messageIDs.has(data.messageID)) {
|
||||
log.warn("skipping orphaned part", { partID: data.id, messageID: data.messageID })
|
||||
continue
|
||||
}
|
||||
const { id, messageID, sessionID: _, type, ...rest } = data
|
||||
partValues.push({ id, message_id: messageID, type, data: rest })
|
||||
} catch (e) {
|
||||
stats.errors.push(`failed to migrate part ${file}: ${e}`)
|
||||
}
|
||||
}
|
||||
|
||||
if (partValues.length > 0) {
|
||||
db.insert(PartTable).values(partValues).onConflictDoNothing().run()
|
||||
stats.parts = partValues.length
|
||||
}
|
||||
log.info("migrated parts", { count: stats.parts })
|
||||
|
||||
// Migrate session diffs (use prepared statement for batch insert)
|
||||
const diffGlob = new Bun.Glob("session_diff/*.json")
|
||||
const diffFiles = Array.from(diffGlob.scanSync({ cwd: storageDir, absolute: true }))
|
||||
const diffStmt = sqlite.prepare("INSERT OR IGNORE INTO session_diff (session_id, data) VALUES (?, ?)")
|
||||
|
||||
for (const file of diffFiles) {
|
||||
try {
|
||||
const data = JSON.parse(fs.readFileSync(file, "utf-8"))
|
||||
const sessionID = path.basename(file, ".json")
|
||||
if (!sessionIDs.has(sessionID)) {
|
||||
log.warn("skipping orphaned session_diff", { sessionID })
|
||||
continue
|
||||
}
|
||||
diffStmt.run(sessionID, JSON.stringify(data))
|
||||
stats.diffs++
|
||||
} catch (e) {
|
||||
stats.errors.push(`failed to migrate session_diff ${file}: ${e}`)
|
||||
}
|
||||
}
|
||||
log.info("migrated session diffs", { count: stats.diffs })
|
||||
|
||||
// Migrate todos
|
||||
const todoGlob = new Bun.Glob("todo/*.json")
|
||||
const todoFiles = Array.from(todoGlob.scanSync({ cwd: storageDir, absolute: true }))
|
||||
const todoValues: (typeof TodoTable.$inferInsert)[] = []
|
||||
|
||||
for (const file of todoFiles) {
|
||||
try {
|
||||
const data = JSON.parse(fs.readFileSync(file, "utf-8"))
|
||||
const sessionID = path.basename(file, ".json")
|
||||
if (!sessionIDs.has(sessionID)) {
|
||||
log.warn("skipping orphaned todo", { sessionID })
|
||||
continue
|
||||
}
|
||||
todoValues.push({ sessionID, data })
|
||||
} catch (e) {
|
||||
stats.errors.push(`failed to migrate todo ${file}: ${e}`)
|
||||
}
|
||||
}
|
||||
|
||||
if (todoValues.length > 0) {
|
||||
db.insert(TodoTable).values(todoValues).onConflictDoNothing().run()
|
||||
stats.todos = todoValues.length
|
||||
}
|
||||
log.info("migrated todos", { count: stats.todos })
|
||||
|
||||
// Migrate permissions
|
||||
const permGlob = new Bun.Glob("permission/*.json")
|
||||
const permFiles = Array.from(permGlob.scanSync({ cwd: storageDir, absolute: true }))
|
||||
const permValues: (typeof PermissionTable.$inferInsert)[] = []
|
||||
|
||||
for (const file of permFiles) {
|
||||
try {
|
||||
const data = JSON.parse(fs.readFileSync(file, "utf-8"))
|
||||
const projectID = path.basename(file, ".json")
|
||||
if (!projectIDs.has(projectID)) {
|
||||
log.warn("skipping orphaned permission", { projectID })
|
||||
continue
|
||||
}
|
||||
permValues.push({ projectID, data })
|
||||
} catch (e) {
|
||||
stats.errors.push(`failed to migrate permission ${file}: ${e}`)
|
||||
}
|
||||
}
|
||||
|
||||
if (permValues.length > 0) {
|
||||
db.insert(PermissionTable).values(permValues).onConflictDoNothing().run()
|
||||
stats.permissions = permValues.length
|
||||
}
|
||||
log.info("migrated permissions", { count: stats.permissions })
|
||||
|
||||
// Migrate session shares
|
||||
const shareGlob = new Bun.Glob("session_share/*.json")
|
||||
const shareFiles = Array.from(shareGlob.scanSync({ cwd: storageDir, absolute: true }))
|
||||
const shareValues: (typeof SessionShareTable.$inferInsert)[] = []
|
||||
|
||||
for (const file of shareFiles) {
|
||||
try {
|
||||
const data = JSON.parse(fs.readFileSync(file, "utf-8"))
|
||||
const sessionID = path.basename(file, ".json")
|
||||
if (!sessionIDs.has(sessionID)) {
|
||||
log.warn("skipping orphaned session_share", { sessionID })
|
||||
continue
|
||||
}
|
||||
shareValues.push({ sessionID, data })
|
||||
} catch (e) {
|
||||
stats.errors.push(`failed to migrate session_share ${file}: ${e}`)
|
||||
}
|
||||
}
|
||||
|
||||
if (shareValues.length > 0) {
|
||||
db.insert(SessionShareTable).values(shareValues).onConflictDoNothing().run()
|
||||
stats.shares = shareValues.length
|
||||
}
|
||||
log.info("migrated session shares", { count: stats.shares })
|
||||
|
||||
// Migrate shares (downloaded shared sessions, no FK)
|
||||
const share2Glob = new Bun.Glob("share/*.json")
|
||||
const share2Files = Array.from(share2Glob.scanSync({ cwd: storageDir, absolute: true }))
|
||||
const share2Values: (typeof ShareTable.$inferInsert)[] = []
|
||||
|
||||
for (const file of share2Files) {
|
||||
try {
|
||||
const data = JSON.parse(fs.readFileSync(file, "utf-8"))
|
||||
const sessionID = path.basename(file, ".json")
|
||||
share2Values.push({ sessionID, data })
|
||||
} catch (e) {
|
||||
stats.errors.push(`failed to migrate share ${file}: ${e}`)
|
||||
}
|
||||
}
|
||||
|
||||
if (share2Values.length > 0) {
|
||||
db.insert(ShareTable).values(share2Values).onConflictDoNothing().run()
|
||||
}
|
||||
|
||||
sqlite.run("COMMIT")
|
||||
} catch (e) {
|
||||
sqlite.run("ROLLBACK")
|
||||
throw e
|
||||
}
|
||||
|
||||
// Mark migration complete
|
||||
fs.writeFileSync(migrationMarker, Date.now().toString())
|
||||
|
||||
log.info("json migration complete", {
|
||||
projects: stats.projects,
|
||||
sessions: stats.sessions,
|
||||
messages: stats.messages,
|
||||
parts: stats.parts,
|
||||
diffs: stats.diffs,
|
||||
todos: stats.todos,
|
||||
permissions: stats.permissions,
|
||||
shares: stats.shares,
|
||||
errorCount: stats.errors.length,
|
||||
})
|
||||
|
||||
if (stats.errors.length > 0) {
|
||||
log.warn("migration errors", { errors: stats.errors.slice(0, 20) })
|
||||
}
|
||||
|
||||
return stats
|
||||
}
|
||||
6
packages/opencode/src/storage/migrations.generated.ts
Normal file
6
packages/opencode/src/storage/migrations.generated.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
// Auto-generated - do not edit
|
||||
import m0 from "../../migration/0000_normal_wind_dancer.sql" with { type: "text" }
|
||||
|
||||
export const migrations = [
|
||||
{ name: "0000_normal_wind_dancer", sql: m0 },
|
||||
]
|
||||
@@ -1,227 +0,0 @@
|
||||
import { Log } from "../util/log"
|
||||
import path from "path"
|
||||
import fs from "fs/promises"
|
||||
import { Global } from "../global"
|
||||
import { Filesystem } from "../util/filesystem"
|
||||
import { lazy } from "../util/lazy"
|
||||
import { Lock } from "../util/lock"
|
||||
import { $ } from "bun"
|
||||
import { NamedError } from "@opencode-ai/util/error"
|
||||
import z from "zod"
|
||||
|
||||
export namespace Storage {
|
||||
const log = Log.create({ service: "storage" })
|
||||
|
||||
type Migration = (dir: string) => Promise<void>
|
||||
|
||||
export const NotFoundError = NamedError.create(
|
||||
"NotFoundError",
|
||||
z.object({
|
||||
message: z.string(),
|
||||
}),
|
||||
)
|
||||
|
||||
const MIGRATIONS: Migration[] = [
|
||||
async (dir) => {
|
||||
const project = path.resolve(dir, "../project")
|
||||
if (!(await Filesystem.isDir(project))) return
|
||||
for await (const projectDir of new Bun.Glob("*").scan({
|
||||
cwd: project,
|
||||
onlyFiles: false,
|
||||
})) {
|
||||
log.info(`migrating project ${projectDir}`)
|
||||
let projectID = projectDir
|
||||
const fullProjectDir = path.join(project, projectDir)
|
||||
let worktree = "/"
|
||||
|
||||
if (projectID !== "global") {
|
||||
for await (const msgFile of new Bun.Glob("storage/session/message/*/*.json").scan({
|
||||
cwd: path.join(project, projectDir),
|
||||
absolute: true,
|
||||
})) {
|
||||
const json = await Bun.file(msgFile).json()
|
||||
worktree = json.path?.root
|
||||
if (worktree) break
|
||||
}
|
||||
if (!worktree) continue
|
||||
if (!(await Filesystem.isDir(worktree))) continue
|
||||
const [id] = await $`git rev-list --max-parents=0 --all`
|
||||
.quiet()
|
||||
.nothrow()
|
||||
.cwd(worktree)
|
||||
.text()
|
||||
.then((x) =>
|
||||
x
|
||||
.split("\n")
|
||||
.filter(Boolean)
|
||||
.map((x) => x.trim())
|
||||
.toSorted(),
|
||||
)
|
||||
if (!id) continue
|
||||
projectID = id
|
||||
|
||||
await Bun.write(
|
||||
path.join(dir, "project", projectID + ".json"),
|
||||
JSON.stringify({
|
||||
id,
|
||||
vcs: "git",
|
||||
worktree,
|
||||
time: {
|
||||
created: Date.now(),
|
||||
initialized: Date.now(),
|
||||
},
|
||||
}),
|
||||
)
|
||||
|
||||
log.info(`migrating sessions for project ${projectID}`)
|
||||
for await (const sessionFile of new Bun.Glob("storage/session/info/*.json").scan({
|
||||
cwd: fullProjectDir,
|
||||
absolute: true,
|
||||
})) {
|
||||
const dest = path.join(dir, "session", projectID, path.basename(sessionFile))
|
||||
log.info("copying", {
|
||||
sessionFile,
|
||||
dest,
|
||||
})
|
||||
const session = await Bun.file(sessionFile).json()
|
||||
await Bun.write(dest, JSON.stringify(session))
|
||||
log.info(`migrating messages for session ${session.id}`)
|
||||
for await (const msgFile of new Bun.Glob(`storage/session/message/${session.id}/*.json`).scan({
|
||||
cwd: fullProjectDir,
|
||||
absolute: true,
|
||||
})) {
|
||||
const dest = path.join(dir, "message", session.id, path.basename(msgFile))
|
||||
log.info("copying", {
|
||||
msgFile,
|
||||
dest,
|
||||
})
|
||||
const message = await Bun.file(msgFile).json()
|
||||
await Bun.write(dest, JSON.stringify(message))
|
||||
|
||||
log.info(`migrating parts for message ${message.id}`)
|
||||
for await (const partFile of new Bun.Glob(`storage/session/part/${session.id}/${message.id}/*.json`).scan(
|
||||
{
|
||||
cwd: fullProjectDir,
|
||||
absolute: true,
|
||||
},
|
||||
)) {
|
||||
const dest = path.join(dir, "part", message.id, path.basename(partFile))
|
||||
const part = await Bun.file(partFile).json()
|
||||
log.info("copying", {
|
||||
partFile,
|
||||
dest,
|
||||
})
|
||||
await Bun.write(dest, JSON.stringify(part))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
async (dir) => {
|
||||
for await (const item of new Bun.Glob("session/*/*.json").scan({
|
||||
cwd: dir,
|
||||
absolute: true,
|
||||
})) {
|
||||
const session = await Bun.file(item).json()
|
||||
if (!session.projectID) continue
|
||||
if (!session.summary?.diffs) continue
|
||||
const { diffs } = session.summary
|
||||
await Bun.file(path.join(dir, "session_diff", session.id + ".json")).write(JSON.stringify(diffs))
|
||||
await Bun.file(path.join(dir, "session", session.projectID, session.id + ".json")).write(
|
||||
JSON.stringify({
|
||||
...session,
|
||||
summary: {
|
||||
additions: diffs.reduce((sum: any, x: any) => sum + x.additions, 0),
|
||||
deletions: diffs.reduce((sum: any, x: any) => sum + x.deletions, 0),
|
||||
},
|
||||
}),
|
||||
)
|
||||
}
|
||||
},
|
||||
]
|
||||
|
||||
const state = lazy(async () => {
|
||||
const dir = path.join(Global.Path.data, "storage")
|
||||
const migration = await Bun.file(path.join(dir, "migration"))
|
||||
.json()
|
||||
.then((x) => parseInt(x))
|
||||
.catch(() => 0)
|
||||
for (let index = migration; index < MIGRATIONS.length; index++) {
|
||||
log.info("running migration", { index })
|
||||
const migration = MIGRATIONS[index]
|
||||
await migration(dir).catch(() => log.error("failed to run migration", { index }))
|
||||
await Bun.write(path.join(dir, "migration"), (index + 1).toString())
|
||||
}
|
||||
return {
|
||||
dir,
|
||||
}
|
||||
})
|
||||
|
||||
export async function remove(key: string[]) {
|
||||
const dir = await state().then((x) => x.dir)
|
||||
const target = path.join(dir, ...key) + ".json"
|
||||
return withErrorHandling(async () => {
|
||||
await fs.unlink(target).catch(() => {})
|
||||
})
|
||||
}
|
||||
|
||||
export async function read<T>(key: string[]) {
|
||||
const dir = await state().then((x) => x.dir)
|
||||
const target = path.join(dir, ...key) + ".json"
|
||||
return withErrorHandling(async () => {
|
||||
using _ = await Lock.read(target)
|
||||
const result = await Bun.file(target).json()
|
||||
return result as T
|
||||
})
|
||||
}
|
||||
|
||||
export async function update<T>(key: string[], fn: (draft: T) => void) {
|
||||
const dir = await state().then((x) => x.dir)
|
||||
const target = path.join(dir, ...key) + ".json"
|
||||
return withErrorHandling(async () => {
|
||||
using _ = await Lock.write(target)
|
||||
const content = await Bun.file(target).json()
|
||||
fn(content)
|
||||
await Bun.write(target, JSON.stringify(content, null, 2))
|
||||
return content as T
|
||||
})
|
||||
}
|
||||
|
||||
export async function write<T>(key: string[], content: T) {
|
||||
const dir = await state().then((x) => x.dir)
|
||||
const target = path.join(dir, ...key) + ".json"
|
||||
return withErrorHandling(async () => {
|
||||
using _ = await Lock.write(target)
|
||||
await Bun.write(target, JSON.stringify(content, null, 2))
|
||||
})
|
||||
}
|
||||
|
||||
async function withErrorHandling<T>(body: () => Promise<T>) {
|
||||
return body().catch((e) => {
|
||||
if (!(e instanceof Error)) throw e
|
||||
const errnoException = e as NodeJS.ErrnoException
|
||||
if (errnoException.code === "ENOENT") {
|
||||
throw new NotFoundError({ message: `Resource not found: ${errnoException.path}` })
|
||||
}
|
||||
throw e
|
||||
})
|
||||
}
|
||||
|
||||
const glob = new Bun.Glob("**/*")
|
||||
export async function list(prefix: string[]) {
|
||||
const dir = await state().then((x) => x.dir)
|
||||
try {
|
||||
const result = await Array.fromAsync(
|
||||
glob.scan({
|
||||
cwd: path.join(dir, ...prefix),
|
||||
onlyFiles: true,
|
||||
}),
|
||||
).then((results) => results.map((x) => [...prefix, ...x.slice(0, -5).split(path.sep)]))
|
||||
result.sort()
|
||||
return result
|
||||
} catch {
|
||||
return []
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2,7 +2,6 @@ import { test, expect } from "bun:test"
|
||||
import os from "os"
|
||||
import { PermissionNext } from "../../src/permission/next"
|
||||
import { Instance } from "../../src/project/instance"
|
||||
import { Storage } from "../../src/storage/storage"
|
||||
import { tmpdir } from "../fixture/fixture"
|
||||
|
||||
// fromConfig tests
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import { describe, expect, test } from "bun:test"
|
||||
import { Project } from "../../src/project/project"
|
||||
import { Log } from "../../src/util/log"
|
||||
import { Storage } from "../../src/storage/storage"
|
||||
import { Database } from "../../src/storage/db"
|
||||
import { ProjectTable } from "../../src/project/project.sql"
|
||||
import { eq } from "drizzle-orm"
|
||||
import { $ } from "bun"
|
||||
import path from "path"
|
||||
import { tmpdir } from "../fixture/fixture"
|
||||
@@ -99,11 +101,12 @@ describe("Project.discover", () => {
|
||||
|
||||
await Project.discover(project)
|
||||
|
||||
const updated = await Storage.read<Project.Info>(["project", project.id])
|
||||
expect(updated.icon).toBeDefined()
|
||||
expect(updated.icon?.url).toStartWith("data:")
|
||||
expect(updated.icon?.url).toContain("base64")
|
||||
expect(updated.icon?.color).toBeUndefined()
|
||||
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, project.id)).get())
|
||||
const updated = row ? Project.fromRow(row) : undefined
|
||||
expect(updated?.icon).toBeDefined()
|
||||
expect(updated?.icon?.url).toStartWith("data:")
|
||||
expect(updated?.icon?.url).toContain("base64")
|
||||
expect(updated?.icon?.color).toBeUndefined()
|
||||
})
|
||||
|
||||
test("should not discover non-image files", async () => {
|
||||
@@ -114,7 +117,8 @@ describe("Project.discover", () => {
|
||||
|
||||
await Project.discover(project)
|
||||
|
||||
const updated = await Storage.read<Project.Info>(["project", project.id])
|
||||
expect(updated.icon).toBeUndefined()
|
||||
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, project.id)).get())
|
||||
const updated = row ? Project.fromRow(row) : undefined
|
||||
expect(updated?.icon).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import { describe, expect, test, beforeEach, afterEach } from "bun:test"
|
||||
import { describe, expect, test } from "bun:test"
|
||||
import path from "path"
|
||||
import { Session } from "../../src/session"
|
||||
import { SessionRevert } from "../../src/session/revert"
|
||||
import { SessionCompaction } from "../../src/session/compaction"
|
||||
import { MessageV2 } from "../../src/session/message-v2"
|
||||
import { Log } from "../../src/util/log"
|
||||
import { Instance } from "../../src/project/instance"
|
||||
@@ -53,10 +52,6 @@ describe("revert + compact workflow", () => {
|
||||
sessionID,
|
||||
mode: "default",
|
||||
agent: "default",
|
||||
path: {
|
||||
cwd: tmp.path,
|
||||
root: tmp.path,
|
||||
},
|
||||
cost: 0,
|
||||
tokens: {
|
||||
output: 0,
|
||||
@@ -113,10 +108,6 @@ describe("revert + compact workflow", () => {
|
||||
sessionID,
|
||||
mode: "default",
|
||||
agent: "default",
|
||||
path: {
|
||||
cwd: tmp.path,
|
||||
root: tmp.path,
|
||||
},
|
||||
cost: 0,
|
||||
tokens: {
|
||||
output: 0,
|
||||
@@ -227,10 +218,6 @@ describe("revert + compact workflow", () => {
|
||||
sessionID,
|
||||
mode: "default",
|
||||
agent: "default",
|
||||
path: {
|
||||
cwd: tmp.path,
|
||||
root: tmp.path,
|
||||
},
|
||||
cost: 0,
|
||||
tokens: {
|
||||
output: 0,
|
||||
|
||||
628
packages/opencode/test/storage/json-migration.test.ts
Normal file
628
packages/opencode/test/storage/json-migration.test.ts
Normal file
@@ -0,0 +1,628 @@
|
||||
import { describe, test, expect, beforeEach, afterEach } from "bun:test"
|
||||
import { Database } from "bun:sqlite"
|
||||
import { drizzle } from "drizzle-orm/bun-sqlite"
|
||||
import { eq } from "drizzle-orm"
|
||||
import path from "path"
|
||||
import fs from "fs/promises"
|
||||
import os from "os"
|
||||
import { migrateFromJson } from "../../src/storage/json-migration"
|
||||
import { ProjectTable } from "../../src/project/project.sql"
|
||||
import { Project } from "../../src/project/project"
|
||||
import { SessionTable, MessageTable, PartTable, TodoTable, PermissionTable } from "../../src/session/session.sql"
|
||||
import { SessionShareTable, ShareTable } from "../../src/share/share.sql"
|
||||
import { migrations } from "../../src/storage/migrations.generated"
|
||||
|
||||
// Test fixtures
|
||||
const fixtures = {
|
||||
project: {
|
||||
id: "proj_test123abc",
|
||||
name: "Test Project",
|
||||
worktree: "/test/path",
|
||||
vcs: "git" as const,
|
||||
sandboxes: [],
|
||||
},
|
||||
session: {
|
||||
id: "ses_test456def",
|
||||
projectID: "proj_test123abc",
|
||||
slug: "test-session",
|
||||
directory: "/test/path",
|
||||
title: "Test Session",
|
||||
version: "1.0.0",
|
||||
time: { created: 1700000000000, updated: 1700000001000 },
|
||||
},
|
||||
message: {
|
||||
id: "msg_test789ghi",
|
||||
sessionID: "ses_test456def",
|
||||
role: "user" as const,
|
||||
agent: "default",
|
||||
model: { providerID: "openai", modelID: "gpt-4" },
|
||||
time: { created: 1700000000000 },
|
||||
},
|
||||
part: {
|
||||
id: "prt_testabc123",
|
||||
messageID: "msg_test789ghi",
|
||||
sessionID: "ses_test456def",
|
||||
type: "text" as const,
|
||||
text: "Hello, world!",
|
||||
},
|
||||
}
|
||||
|
||||
// Helper to create test storage directory structure
|
||||
async function setupStorageDir(baseDir: string) {
|
||||
const storageDir = path.join(baseDir, "storage")
|
||||
await fs.mkdir(path.join(storageDir, "project"), { recursive: true })
|
||||
await fs.mkdir(path.join(storageDir, "session", "proj_test123abc"), { recursive: true })
|
||||
await fs.mkdir(path.join(storageDir, "message", "ses_test456def"), { recursive: true })
|
||||
await fs.mkdir(path.join(storageDir, "part", "msg_test789ghi"), { recursive: true })
|
||||
await fs.mkdir(path.join(storageDir, "session_diff"), { recursive: true })
|
||||
await fs.mkdir(path.join(storageDir, "todo"), { recursive: true })
|
||||
await fs.mkdir(path.join(storageDir, "permission"), { recursive: true })
|
||||
await fs.mkdir(path.join(storageDir, "session_share"), { recursive: true })
|
||||
await fs.mkdir(path.join(storageDir, "share"), { recursive: true })
|
||||
// Create legacy marker to indicate JSON storage exists
|
||||
await Bun.write(path.join(storageDir, "migration"), "1")
|
||||
return storageDir
|
||||
}
|
||||
|
||||
// Helper to create in-memory test database with schema
|
||||
function createTestDb() {
|
||||
const sqlite = new Database(":memory:")
|
||||
sqlite.exec("PRAGMA foreign_keys = ON")
|
||||
|
||||
// Apply schema migrations
|
||||
for (const migration of migrations) {
|
||||
const statements = migration.sql.split("--> statement-breakpoint")
|
||||
for (const stmt of statements) {
|
||||
const trimmed = stmt.trim()
|
||||
if (trimmed) sqlite.exec(trimmed)
|
||||
}
|
||||
}
|
||||
|
||||
return sqlite
|
||||
}
|
||||
|
||||
describe("JSON to SQLite migration", () => {
|
||||
let tmpDir: string
|
||||
let storageDir: string
|
||||
let sqlite: Database
|
||||
|
||||
beforeEach(async () => {
|
||||
tmpDir = path.join(os.tmpdir(), "opencode-migration-test-" + Math.random().toString(36).slice(2))
|
||||
await fs.mkdir(tmpDir, { recursive: true })
|
||||
storageDir = await setupStorageDir(tmpDir)
|
||||
sqlite = createTestDb()
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
sqlite.close()
|
||||
await fs.rm(tmpDir, { recursive: true, force: true })
|
||||
})
|
||||
|
||||
describe("basic functionality", () => {
|
||||
test("migrates all entity types successfully", async () => {
|
||||
// Write test fixtures
|
||||
await Bun.write(path.join(storageDir, "project", `${fixtures.project.id}.json`), JSON.stringify(fixtures.project))
|
||||
await Bun.write(
|
||||
path.join(storageDir, "session", fixtures.project.id, `${fixtures.session.id}.json`),
|
||||
JSON.stringify(fixtures.session),
|
||||
)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "message", fixtures.session.id, `${fixtures.message.id}.json`),
|
||||
JSON.stringify(fixtures.message),
|
||||
)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "part", fixtures.message.id, `${fixtures.part.id}.json`),
|
||||
JSON.stringify(fixtures.part),
|
||||
)
|
||||
|
||||
const stats = await migrateFromJson(sqlite, storageDir)
|
||||
|
||||
expect(stats?.projects).toBe(1)
|
||||
expect(stats?.sessions).toBe(1)
|
||||
expect(stats?.messages).toBe(1)
|
||||
expect(stats?.parts).toBe(1)
|
||||
expect(stats?.errors.length).toBe(0)
|
||||
|
||||
// Verify data in database
|
||||
const db = drizzle(sqlite)
|
||||
const projects = db.select().from(ProjectTable).all()
|
||||
expect(projects.length).toBe(1)
|
||||
expect(projects[0].id).toBe(fixtures.project.id)
|
||||
|
||||
const sessions = db.select().from(SessionTable).all()
|
||||
expect(sessions.length).toBe(1)
|
||||
expect(sessions[0].id).toBe(fixtures.session.id)
|
||||
|
||||
const messages = db.select().from(MessageTable).all()
|
||||
expect(messages.length).toBe(1)
|
||||
expect(messages[0].id).toBe(fixtures.message.id)
|
||||
|
||||
const parts = db.select().from(PartTable).all()
|
||||
expect(parts.length).toBe(1)
|
||||
expect(parts[0].id).toBe(fixtures.part.id)
|
||||
})
|
||||
|
||||
test("skips migration when marker file exists", async () => {
|
||||
// Create marker file
|
||||
await Bun.write(path.join(storageDir, "sqlite-migrated"), Date.now().toString())
|
||||
|
||||
// Write project that should NOT be migrated
|
||||
await Bun.write(path.join(storageDir, "project", `${fixtures.project.id}.json`), JSON.stringify(fixtures.project))
|
||||
|
||||
const stats = await migrateFromJson(sqlite, storageDir)
|
||||
|
||||
expect(stats).toBeUndefined()
|
||||
|
||||
// Verify nothing was migrated
|
||||
const db = drizzle(sqlite)
|
||||
const projects = db.select().from(ProjectTable).all()
|
||||
expect(projects.length).toBe(0)
|
||||
})
|
||||
|
||||
test("skips migration when no JSON storage exists", async () => {
|
||||
// Remove the legacy migration marker
|
||||
await fs.rm(path.join(storageDir, "migration"))
|
||||
|
||||
const stats = await migrateFromJson(sqlite, storageDir)
|
||||
|
||||
expect(stats).toBeUndefined()
|
||||
|
||||
// Marker file should be created
|
||||
const marker = await Bun.file(path.join(storageDir, "sqlite-migrated")).exists()
|
||||
expect(marker).toBe(true)
|
||||
})
|
||||
|
||||
test("creates marker file after successful migration", async () => {
|
||||
await Bun.write(path.join(storageDir, "project", `${fixtures.project.id}.json`), JSON.stringify(fixtures.project))
|
||||
|
||||
await migrateFromJson(sqlite, storageDir)
|
||||
|
||||
const marker = await Bun.file(path.join(storageDir, "sqlite-migrated")).exists()
|
||||
expect(marker).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe("project migration", () => {
|
||||
test("migrates project with all fields", async () => {
|
||||
const project = { ...fixtures.project, icon: { url: "data:image/png;base64,..." } }
|
||||
await Bun.write(path.join(storageDir, "project", `${project.id}.json`), JSON.stringify(project))
|
||||
|
||||
const stats = await migrateFromJson(sqlite, storageDir)
|
||||
|
||||
expect(stats?.projects).toBe(1)
|
||||
const db = drizzle(sqlite)
|
||||
const row = db.select().from(ProjectTable).where(eq(ProjectTable.id, project.id)).get()
|
||||
const migrated = row ? Project.fromRow(row) : undefined
|
||||
expect(migrated?.id).toBe(project.id)
|
||||
expect(migrated?.icon?.url).toBe(project.icon.url)
|
||||
})
|
||||
|
||||
test("skips project with missing id field", async () => {
|
||||
const invalidProject = { name: "No ID Project" }
|
||||
await Bun.write(path.join(storageDir, "project", "invalid.json"), JSON.stringify(invalidProject))
|
||||
|
||||
const stats = await migrateFromJson(sqlite, storageDir)
|
||||
|
||||
expect(stats?.projects).toBe(0)
|
||||
expect(stats?.errors.length).toBe(1)
|
||||
expect(stats?.errors[0]).toContain("missing id")
|
||||
})
|
||||
|
||||
test("skips project with invalid JSON", async () => {
|
||||
await Bun.write(path.join(storageDir, "project", "bad.json"), "{ invalid json }")
|
||||
|
||||
const stats = await migrateFromJson(sqlite, storageDir)
|
||||
|
||||
expect(stats?.projects).toBe(0)
|
||||
expect(stats?.errors.length).toBe(1)
|
||||
expect(stats?.errors[0]).toContain("failed to migrate project")
|
||||
})
|
||||
})
|
||||
|
||||
describe("session migration", () => {
|
||||
test("migrates session with valid projectID", async () => {
|
||||
await Bun.write(path.join(storageDir, "project", `${fixtures.project.id}.json`), JSON.stringify(fixtures.project))
|
||||
await Bun.write(
|
||||
path.join(storageDir, "session", fixtures.project.id, `${fixtures.session.id}.json`),
|
||||
JSON.stringify(fixtures.session),
|
||||
)
|
||||
|
||||
const stats = await migrateFromJson(sqlite, storageDir)
|
||||
|
||||
expect(stats?.sessions).toBe(1)
|
||||
const db = drizzle(sqlite)
|
||||
const row = db.select().from(SessionTable).where(eq(SessionTable.id, fixtures.session.id)).get()
|
||||
expect(row?.id).toBe(fixtures.session.id)
|
||||
expect(row?.projectID).toBe(fixtures.project.id)
|
||||
expect(row?.time_created).toBe(fixtures.session.time.created)
|
||||
expect(row?.time_updated).toBe(fixtures.session.time.updated)
|
||||
})
|
||||
|
||||
test("migrates session with parentID", async () => {
|
||||
await Bun.write(path.join(storageDir, "project", `${fixtures.project.id}.json`), JSON.stringify(fixtures.project))
|
||||
const childSession = { ...fixtures.session, id: "ses_child123", parentID: fixtures.session.id }
|
||||
|
||||
// Create parent session first
|
||||
await Bun.write(
|
||||
path.join(storageDir, "session", fixtures.project.id, `${fixtures.session.id}.json`),
|
||||
JSON.stringify(fixtures.session),
|
||||
)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "session", fixtures.project.id, `${childSession.id}.json`),
|
||||
JSON.stringify(childSession),
|
||||
)
|
||||
|
||||
const stats = await migrateFromJson(sqlite, storageDir)
|
||||
|
||||
expect(stats?.sessions).toBe(2)
|
||||
const db = drizzle(sqlite)
|
||||
const row = db.select().from(SessionTable).where(eq(SessionTable.id, childSession.id)).get()
|
||||
expect(row?.parentID).toBe(fixtures.session.id)
|
||||
})
|
||||
|
||||
test("skips orphaned session (missing project)", async () => {
|
||||
// Don't create the project, just the session
|
||||
await Bun.write(
|
||||
path.join(storageDir, "session", fixtures.project.id, `${fixtures.session.id}.json`),
|
||||
JSON.stringify(fixtures.session),
|
||||
)
|
||||
|
||||
const stats = await migrateFromJson(sqlite, storageDir)
|
||||
|
||||
expect(stats?.sessions).toBe(0)
|
||||
// Orphaned sessions are logged as warnings, not errors
|
||||
expect(stats?.errors.length).toBe(0)
|
||||
})
|
||||
|
||||
test("handles missing time fields with Date.now() fallback", async () => {
|
||||
await Bun.write(path.join(storageDir, "project", `${fixtures.project.id}.json`), JSON.stringify(fixtures.project))
|
||||
const sessionNoTime = { ...fixtures.session, time: undefined }
|
||||
await Bun.write(
|
||||
path.join(storageDir, "session", fixtures.project.id, `${fixtures.session.id}.json`),
|
||||
JSON.stringify(sessionNoTime),
|
||||
)
|
||||
|
||||
const before = Date.now()
|
||||
const stats = await migrateFromJson(sqlite, storageDir)
|
||||
const after = Date.now()
|
||||
|
||||
expect(stats?.sessions).toBe(1)
|
||||
const db = drizzle(sqlite)
|
||||
const row = db.select().from(SessionTable).where(eq(SessionTable.id, fixtures.session.id)).get()
|
||||
expect(row?.time_created).toBeGreaterThanOrEqual(before)
|
||||
expect(row?.time_created).toBeLessThanOrEqual(after)
|
||||
})
|
||||
|
||||
test("skips session with missing required fields", async () => {
|
||||
await Bun.write(path.join(storageDir, "project", `${fixtures.project.id}.json`), JSON.stringify(fixtures.project))
|
||||
const invalidSession = { id: "ses_noproj" } // missing projectID
|
||||
await Bun.write(
|
||||
path.join(storageDir, "session", fixtures.project.id, "invalid.json"),
|
||||
JSON.stringify(invalidSession),
|
||||
)
|
||||
|
||||
const stats = await migrateFromJson(sqlite, storageDir)
|
||||
|
||||
expect(stats?.sessions).toBe(0)
|
||||
expect(stats?.errors.length).toBe(1)
|
||||
expect(stats?.errors[0]).toContain("missing id or projectID")
|
||||
})
|
||||
})
|
||||
|
||||
describe("message migration", () => {
|
||||
test("migrates message with valid sessionID", async () => {
|
||||
await Bun.write(path.join(storageDir, "project", `${fixtures.project.id}.json`), JSON.stringify(fixtures.project))
|
||||
await Bun.write(
|
||||
path.join(storageDir, "session", fixtures.project.id, `${fixtures.session.id}.json`),
|
||||
JSON.stringify(fixtures.session),
|
||||
)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "message", fixtures.session.id, `${fixtures.message.id}.json`),
|
||||
JSON.stringify(fixtures.message),
|
||||
)
|
||||
|
||||
const stats = await migrateFromJson(sqlite, storageDir)
|
||||
|
||||
expect(stats?.messages).toBe(1)
|
||||
const db = drizzle(sqlite)
|
||||
const row = db.select().from(MessageTable).where(eq(MessageTable.id, fixtures.message.id)).get()
|
||||
expect(row?.id).toBe(fixtures.message.id)
|
||||
expect(row?.sessionID).toBe(fixtures.session.id)
|
||||
})
|
||||
|
||||
test("skips orphaned message (missing session)", async () => {
|
||||
// Don't create the session, just the message
|
||||
await Bun.write(
|
||||
path.join(storageDir, "message", fixtures.session.id, `${fixtures.message.id}.json`),
|
||||
JSON.stringify(fixtures.message),
|
||||
)
|
||||
|
||||
const stats = await migrateFromJson(sqlite, storageDir)
|
||||
|
||||
expect(stats?.messages).toBe(0)
|
||||
})
|
||||
|
||||
test("skips message with missing required fields", async () => {
|
||||
await Bun.write(path.join(storageDir, "project", `${fixtures.project.id}.json`), JSON.stringify(fixtures.project))
|
||||
await Bun.write(
|
||||
path.join(storageDir, "session", fixtures.project.id, `${fixtures.session.id}.json`),
|
||||
JSON.stringify(fixtures.session),
|
||||
)
|
||||
const invalidMessage = { id: "msg_nosess" } // missing sessionID
|
||||
await Bun.write(
|
||||
path.join(storageDir, "message", fixtures.session.id, "invalid.json"),
|
||||
JSON.stringify(invalidMessage),
|
||||
)
|
||||
|
||||
const stats = await migrateFromJson(sqlite, storageDir)
|
||||
|
||||
expect(stats?.messages).toBe(0)
|
||||
expect(stats?.errors.length).toBe(1)
|
||||
expect(stats?.errors[0]).toContain("missing id or sessionID")
|
||||
})
|
||||
})
|
||||
|
||||
describe("part migration", () => {
|
||||
test("migrates part with valid messageID", async () => {
|
||||
await Bun.write(path.join(storageDir, "project", `${fixtures.project.id}.json`), JSON.stringify(fixtures.project))
|
||||
await Bun.write(
|
||||
path.join(storageDir, "session", fixtures.project.id, `${fixtures.session.id}.json`),
|
||||
JSON.stringify(fixtures.session),
|
||||
)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "message", fixtures.session.id, `${fixtures.message.id}.json`),
|
||||
JSON.stringify(fixtures.message),
|
||||
)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "part", fixtures.message.id, `${fixtures.part.id}.json`),
|
||||
JSON.stringify(fixtures.part),
|
||||
)
|
||||
|
||||
const stats = await migrateFromJson(sqlite, storageDir)
|
||||
|
||||
expect(stats?.parts).toBe(1)
|
||||
const db = drizzle(sqlite)
|
||||
const row = db.select().from(PartTable).where(eq(PartTable.id, fixtures.part.id)).get()
|
||||
expect(row?.id).toBe(fixtures.part.id)
|
||||
expect(row?.message_id).toBe(fixtures.message.id)
|
||||
})
|
||||
|
||||
test("skips orphaned part (missing message)", async () => {
|
||||
await Bun.write(
|
||||
path.join(storageDir, "part", fixtures.message.id, `${fixtures.part.id}.json`),
|
||||
JSON.stringify(fixtures.part),
|
||||
)
|
||||
|
||||
const stats = await migrateFromJson(sqlite, storageDir)
|
||||
|
||||
expect(stats?.parts).toBe(0)
|
||||
})
|
||||
|
||||
test("skips part with missing required fields", async () => {
|
||||
await Bun.write(path.join(storageDir, "project", `${fixtures.project.id}.json`), JSON.stringify(fixtures.project))
|
||||
await Bun.write(
|
||||
path.join(storageDir, "session", fixtures.project.id, `${fixtures.session.id}.json`),
|
||||
JSON.stringify(fixtures.session),
|
||||
)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "message", fixtures.session.id, `${fixtures.message.id}.json`),
|
||||
JSON.stringify(fixtures.message),
|
||||
)
|
||||
const invalidPart = { id: "prt_nomsg" } // missing messageID and sessionID
|
||||
await Bun.write(path.join(storageDir, "part", fixtures.message.id, "invalid.json"), JSON.stringify(invalidPart))
|
||||
|
||||
const stats = await migrateFromJson(sqlite, storageDir)
|
||||
|
||||
expect(stats?.parts).toBe(0)
|
||||
expect(stats?.errors.length).toBe(1)
|
||||
expect(stats?.errors[0]).toContain("missing id or messageID")
|
||||
})
|
||||
})
|
||||
|
||||
describe("auxiliary tables", () => {
|
||||
test("migrates session_diff correctly", async () => {
|
||||
await Bun.write(path.join(storageDir, "project", `${fixtures.project.id}.json`), JSON.stringify(fixtures.project))
|
||||
await Bun.write(
|
||||
path.join(storageDir, "session", fixtures.project.id, `${fixtures.session.id}.json`),
|
||||
JSON.stringify(fixtures.session),
|
||||
)
|
||||
const diff = [{ file: "test.ts", before: "", after: "console.log('hello')", additions: 10, deletions: 5 }]
|
||||
await Bun.write(path.join(storageDir, "session_diff", `${fixtures.session.id}.json`), JSON.stringify(diff))
|
||||
|
||||
const stats = await migrateFromJson(sqlite, storageDir)
|
||||
|
||||
expect(stats?.diffs).toBe(1)
|
||||
// Query raw since TypeScript schema doesn't match migration
|
||||
const row = sqlite
|
||||
.query<{ data: string }, [string]>("SELECT data FROM session_diff WHERE session_id = ?")
|
||||
.get(fixtures.session.id)
|
||||
expect(row?.data).toBeDefined()
|
||||
const data = JSON.parse(row!.data)
|
||||
expect(data[0].file).toBe("test.ts")
|
||||
})
|
||||
|
||||
test("migrates todo correctly", async () => {
|
||||
await Bun.write(path.join(storageDir, "project", `${fixtures.project.id}.json`), JSON.stringify(fixtures.project))
|
||||
await Bun.write(
|
||||
path.join(storageDir, "session", fixtures.project.id, `${fixtures.session.id}.json`),
|
||||
JSON.stringify(fixtures.session),
|
||||
)
|
||||
const todo = [{ id: "1", content: "Test todo", status: "pending", priority: "high" }]
|
||||
await Bun.write(path.join(storageDir, "todo", `${fixtures.session.id}.json`), JSON.stringify(todo))
|
||||
|
||||
const stats = await migrateFromJson(sqlite, storageDir)
|
||||
|
||||
expect(stats?.todos).toBe(1)
|
||||
const db = drizzle(sqlite)
|
||||
const row = db.select().from(TodoTable).where(eq(TodoTable.sessionID, fixtures.session.id)).get()
|
||||
expect(row?.data).toBeDefined()
|
||||
})
|
||||
|
||||
test("migrates permission correctly", async () => {
|
||||
await Bun.write(path.join(storageDir, "project", `${fixtures.project.id}.json`), JSON.stringify(fixtures.project))
|
||||
const permission = [{ permission: "bash", pattern: "*", action: "allow" as const }]
|
||||
await Bun.write(path.join(storageDir, "permission", `${fixtures.project.id}.json`), JSON.stringify(permission))
|
||||
|
||||
const stats = await migrateFromJson(sqlite, storageDir)
|
||||
|
||||
expect(stats?.permissions).toBe(1)
|
||||
const db = drizzle(sqlite)
|
||||
const row = db.select().from(PermissionTable).where(eq(PermissionTable.projectID, fixtures.project.id)).get()
|
||||
expect(row?.data).toBeDefined()
|
||||
})
|
||||
|
||||
test("migrates session_share correctly", async () => {
|
||||
await Bun.write(path.join(storageDir, "project", `${fixtures.project.id}.json`), JSON.stringify(fixtures.project))
|
||||
await Bun.write(
|
||||
path.join(storageDir, "session", fixtures.project.id, `${fixtures.session.id}.json`),
|
||||
JSON.stringify(fixtures.session),
|
||||
)
|
||||
const share = { id: "share_123", secret: "abc123", url: "https://share.example.com/abc123" }
|
||||
await Bun.write(path.join(storageDir, "session_share", `${fixtures.session.id}.json`), JSON.stringify(share))
|
||||
|
||||
const stats = await migrateFromJson(sqlite, storageDir)
|
||||
|
||||
expect(stats?.shares).toBe(1)
|
||||
const db = drizzle(sqlite)
|
||||
const row = db.select().from(SessionShareTable).where(eq(SessionShareTable.sessionID, fixtures.session.id)).get()
|
||||
expect(row?.data.secret).toBe("abc123")
|
||||
})
|
||||
|
||||
test("migrates share correctly (no FK check)", async () => {
|
||||
// Share table has no FK, so we can create without project/session
|
||||
const share = { secret: "test_secret", url: "https://example.com/share" }
|
||||
const shareID = "ses_shared123"
|
||||
await Bun.write(path.join(storageDir, "share", `${shareID}.json`), JSON.stringify(share))
|
||||
|
||||
const stats = await migrateFromJson(sqlite, storageDir)
|
||||
|
||||
// Note: shares count is tracked under stats.shares but share table is migrated separately
|
||||
const db = drizzle(sqlite)
|
||||
const row = db.select().from(ShareTable).where(eq(ShareTable.sessionID, shareID)).get()
|
||||
expect(row?.data.secret).toBe("test_secret")
|
||||
})
|
||||
|
||||
test("skips orphaned session_diff", async () => {
|
||||
const diff = { files: [] }
|
||||
await Bun.write(path.join(storageDir, "session_diff", "ses_nonexistent.json"), JSON.stringify(diff))
|
||||
|
||||
const stats = await migrateFromJson(sqlite, storageDir)
|
||||
|
||||
expect(stats?.diffs).toBe(0)
|
||||
})
|
||||
|
||||
test("skips orphaned todo", async () => {
|
||||
const todo = { items: [] }
|
||||
await Bun.write(path.join(storageDir, "todo", "ses_nonexistent.json"), JSON.stringify(todo))
|
||||
|
||||
const stats = await migrateFromJson(sqlite, storageDir)
|
||||
|
||||
expect(stats?.todos).toBe(0)
|
||||
})
|
||||
|
||||
test("skips orphaned permission", async () => {
|
||||
const permission = { rules: [] }
|
||||
await Bun.write(path.join(storageDir, "permission", "proj_nonexistent.json"), JSON.stringify(permission))
|
||||
|
||||
const stats = await migrateFromJson(sqlite, storageDir)
|
||||
|
||||
expect(stats?.permissions).toBe(0)
|
||||
})
|
||||
|
||||
test("skips orphaned session_share", async () => {
|
||||
const share = { secret: "test" }
|
||||
await Bun.write(path.join(storageDir, "session_share", "ses_nonexistent.json"), JSON.stringify(share))
|
||||
|
||||
const stats = await migrateFromJson(sqlite, storageDir)
|
||||
|
||||
expect(stats?.shares).toBe(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe("error handling", () => {
|
||||
test("continues migration after single file error", async () => {
|
||||
// Write one valid and one invalid project
|
||||
await Bun.write(path.join(storageDir, "project", `${fixtures.project.id}.json`), JSON.stringify(fixtures.project))
|
||||
await Bun.write(path.join(storageDir, "project", "invalid.json"), "{ invalid json }")
|
||||
|
||||
const stats = await migrateFromJson(sqlite, storageDir)
|
||||
|
||||
expect(stats?.projects).toBe(1) // Valid one was migrated
|
||||
expect(stats?.errors.length).toBe(1) // Error was recorded
|
||||
})
|
||||
|
||||
test("collects all errors in stats.errors array", async () => {
|
||||
// Write multiple invalid files
|
||||
await Bun.write(path.join(storageDir, "project", "bad1.json"), "{ invalid }")
|
||||
await Bun.write(path.join(storageDir, "project", "bad2.json"), "not json at all")
|
||||
await Bun.write(path.join(storageDir, "project", "bad3.json"), JSON.stringify({ name: "no id" }))
|
||||
|
||||
const stats = await migrateFromJson(sqlite, storageDir)
|
||||
|
||||
expect(stats?.projects).toBe(0)
|
||||
expect(stats?.errors.length).toBe(3)
|
||||
})
|
||||
})
|
||||
|
||||
describe("data integrity", () => {
|
||||
test("preserves all JSON data fields in data column", async () => {
|
||||
const fullProject = {
|
||||
id: "proj_full",
|
||||
name: "Full Project",
|
||||
worktree: "/path/to/project",
|
||||
vcs: "git" as const,
|
||||
sandboxes: ["/path/one", "/path/two"],
|
||||
time: { created: 1700000000000, updated: 1700000001000 },
|
||||
icon: { url: "data:image/png;base64,abc", color: "#ff0000" },
|
||||
}
|
||||
await Bun.write(path.join(storageDir, "project", `${fullProject.id}.json`), JSON.stringify(fullProject))
|
||||
|
||||
await migrateFromJson(sqlite, storageDir)
|
||||
|
||||
const db = drizzle(sqlite)
|
||||
const row = db.select().from(ProjectTable).where(eq(ProjectTable.id, fullProject.id)).get()
|
||||
const data = row ? Project.fromRow(row) : undefined
|
||||
expect(data?.id).toBe(fullProject.id)
|
||||
expect(data?.name).toBe(fullProject.name)
|
||||
expect(data?.sandboxes).toEqual(fullProject.sandboxes)
|
||||
expect(data?.icon?.color).toBe("#ff0000")
|
||||
})
|
||||
|
||||
test("handles unicode in text fields", async () => {
|
||||
const unicodeProject = {
|
||||
id: "proj_unicode",
|
||||
name: "Проект с юникодом 🚀",
|
||||
worktree: "/path/测试",
|
||||
vcs: "git" as const,
|
||||
sandboxes: [],
|
||||
}
|
||||
await Bun.write(path.join(storageDir, "project", `${unicodeProject.id}.json`), JSON.stringify(unicodeProject))
|
||||
|
||||
await migrateFromJson(sqlite, storageDir)
|
||||
|
||||
const db = drizzle(sqlite)
|
||||
const row = db.select().from(ProjectTable).where(eq(ProjectTable.id, unicodeProject.id)).get()
|
||||
const data = row ? Project.fromRow(row) : undefined
|
||||
expect(data?.name).toBe("Проект с юникодом 🚀")
|
||||
expect(data?.worktree).toBe("/path/测试")
|
||||
})
|
||||
|
||||
test("migration is idempotent with onConflictDoNothing", async () => {
|
||||
await Bun.write(path.join(storageDir, "project", `${fixtures.project.id}.json`), JSON.stringify(fixtures.project))
|
||||
|
||||
// Run migration twice (manually, since marker file would block second run)
|
||||
const stats1 = await migrateFromJson(sqlite, storageDir)
|
||||
expect(stats1?.projects).toBe(1)
|
||||
|
||||
// Remove marker and run again
|
||||
await fs.rm(path.join(storageDir, "sqlite-migrated"))
|
||||
const stats2 = await migrateFromJson(sqlite, storageDir)
|
||||
expect(stats2?.projects).toBe(1) // Would be 1 even though already exists (onConflictDoNothing)
|
||||
|
||||
// Verify only one record exists
|
||||
const db = drizzle(sqlite)
|
||||
const projects = db.select().from(ProjectTable).all()
|
||||
expect(projects.length).toBe(1)
|
||||
})
|
||||
})
|
||||
})
|
||||
21
packages/util/src/context.ts
Normal file
21
packages/util/src/context.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import { AsyncLocalStorage } from "node:async_hooks"
|
||||
|
||||
export namespace Context {
|
||||
export class NotFound extends Error {}
|
||||
|
||||
export function create<T>() {
|
||||
const storage = new AsyncLocalStorage<T>()
|
||||
return {
|
||||
use() {
|
||||
const result = storage.getStore()
|
||||
if (!result) {
|
||||
throw new NotFound()
|
||||
}
|
||||
return result
|
||||
},
|
||||
provide<R>(value: T, fn: () => R) {
|
||||
return storage.run(value, fn)
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
2
src/storage/migrations.generated.ts
Normal file
2
src/storage/migrations.generated.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
// Auto-generated - do not edit
|
||||
export const migrations: { name: string; sql: string }[] = []
|
||||
Reference in New Issue
Block a user