mirror of
https://github.com/anomalyco/opencode.git
synced 2026-02-11 03:14:29 +00:00
Compare commits
68 Commits
snapshot-s
...
sqlite2
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4666daa581 | ||
|
|
caf1316116 | ||
|
|
1de66812bf | ||
|
|
94d0c9940a | ||
|
|
5952891b1e | ||
|
|
d7c8a3f50d | ||
|
|
ce353819e8 | ||
|
|
2dae94e5a3 | ||
|
|
c6adc19e41 | ||
|
|
ce56166510 | ||
|
|
5911e4c06a | ||
|
|
42fb840f22 | ||
|
|
4dcfdf6572 | ||
|
|
25f3d6d5a9 | ||
|
|
e19a9e9614 | ||
|
|
fcc903489b | ||
|
|
949e69a9bf | ||
|
|
8c30f551e2 | ||
|
|
cb721497c1 | ||
|
|
4ec6293054 | ||
|
|
b7a323355c | ||
|
|
d4f053042c | ||
|
|
5f552534c7 | ||
|
|
ad5b790bb3 | ||
|
|
ed87341c4f | ||
|
|
794ecab028 | ||
|
|
eeb235724b | ||
|
|
61084e7f6f | ||
|
|
200aef2eb3 | ||
|
|
f6e375a555 | ||
|
|
db908deee5 | ||
|
|
7b72cc3a48 | ||
|
|
b8cbfd48ec | ||
|
|
498cbb2c26 | ||
|
|
d6fbd255b6 | ||
|
|
2de1c82bf7 | ||
|
|
34ebb3d051 | ||
|
|
9c3e3c1ab5 | ||
|
|
3ea499f04e | ||
|
|
ab13c1d1c4 | ||
|
|
53b610c331 | ||
|
|
e3519356f2 | ||
|
|
2619acc0ff | ||
|
|
1bc45dc266 | ||
|
|
2e8feb1c78 | ||
|
|
00e60899cc | ||
|
|
30a918e9d4 | ||
|
|
ac16068140 | ||
|
|
19a41ab297 | ||
|
|
cd174d8cba | ||
|
|
246e901e42 | ||
|
|
0ccef1b31f | ||
|
|
7706f5b6a8 | ||
|
|
63e38555c9 | ||
|
|
f40685ab13 | ||
|
|
a48a5a3462 | ||
|
|
5e1639de2b | ||
|
|
2b05833c32 | ||
|
|
acdcf7fa88 | ||
|
|
bf0754caeb | ||
|
|
4d50a32979 | ||
|
|
57edb0ddc5 | ||
|
|
a614b78c6d | ||
|
|
b9f5a34247 | ||
|
|
81b47a44e2 | ||
|
|
0c1c07467e | ||
|
|
105688bf90 | ||
|
|
1e7b4768b1 |
@@ -16,15 +16,12 @@ wip:
|
||||
|
||||
For anything in the packages/web use the docs: prefix.
|
||||
|
||||
For anything in the packages/app use the ignore: prefix.
|
||||
|
||||
prefer to explain WHY something was done from an end user perspective instead of
|
||||
WHAT was done.
|
||||
|
||||
do not do generic messages like "improved agent experience" be very specific
|
||||
about what user facing changes were made
|
||||
|
||||
if there are changes do a git pull --rebase
|
||||
if there are conflicts DO NOT FIX THEM. notify me and I will fix them
|
||||
|
||||
## GIT DIFF
|
||||
|
||||
@@ -110,3 +110,4 @@ const table = sqliteTable("session", {
|
||||
|
||||
- Avoid mocks as much as possible
|
||||
- Test actual implementation, do not duplicate logic into tests
|
||||
- Tests cannot run from repo root (guard: `do-not-run-tests-from-root`); run from package dirs like `packages/opencode`.
|
||||
|
||||
@@ -40,6 +40,8 @@
|
||||
"@tailwindcss/vite": "4.1.11",
|
||||
"diff": "8.0.2",
|
||||
"dompurify": "3.3.1",
|
||||
"drizzle-kit": "1.0.0-beta.12-a5629fb",
|
||||
"drizzle-orm": "1.0.0-beta.12-a5629fb",
|
||||
"ai": "5.0.124",
|
||||
"hono": "4.10.7",
|
||||
"hono-openapi": "1.1.2",
|
||||
|
||||
@@ -231,6 +231,24 @@ export function applyDirectoryEvent(input: {
|
||||
}
|
||||
break
|
||||
}
|
||||
case "message.part.delta": {
|
||||
const props = event.properties as { messageID: string; partID: string; field: string; delta: string }
|
||||
const parts = input.store.part[props.messageID]
|
||||
if (!parts) break
|
||||
const result = Binary.search(parts, props.partID, (p) => p.id)
|
||||
if (!result.found) break
|
||||
input.setStore(
|
||||
"part",
|
||||
props.messageID,
|
||||
produce((draft) => {
|
||||
const part = draft[result.index]
|
||||
const field = props.field as keyof typeof part
|
||||
const existing = part[field] as string | undefined
|
||||
;(part[field] as string) = (existing ?? "") + props.delta
|
||||
}),
|
||||
)
|
||||
break
|
||||
}
|
||||
case "vcs.branch.updated": {
|
||||
const props = event.properties as { branch: string }
|
||||
const next = { branch: props.branch }
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
"@opencode-ai/console-resource": "workspace:*",
|
||||
"@planetscale/database": "1.19.0",
|
||||
"aws4fetch": "1.0.20",
|
||||
"drizzle-orm": "0.41.0",
|
||||
"drizzle-orm": "catalog:",
|
||||
"postgres": "3.4.7",
|
||||
"stripe": "18.0.0",
|
||||
"ulid": "catalog:",
|
||||
@@ -43,7 +43,7 @@
|
||||
"@tsconfig/node22": "22.0.2",
|
||||
"@types/bun": "1.3.0",
|
||||
"@types/node": "catalog:",
|
||||
"drizzle-kit": "0.30.5",
|
||||
"drizzle-kit": "catalog:",
|
||||
"mysql2": "3.14.4",
|
||||
"typescript": "catalog:",
|
||||
"@typescript/native-preview": "catalog:"
|
||||
|
||||
@@ -4,7 +4,6 @@ export * from "drizzle-orm"
|
||||
import { Client } from "@planetscale/database"
|
||||
|
||||
import { MySqlTransaction, type MySqlTransactionConfig } from "drizzle-orm/mysql-core"
|
||||
import type { ExtractTablesWithRelations } from "drizzle-orm"
|
||||
import type { PlanetScalePreparedQueryHKT, PlanetscaleQueryResultHKT } from "drizzle-orm/planetscale-serverless"
|
||||
import { Context } from "../context"
|
||||
import { memo } from "../util/memo"
|
||||
@@ -14,7 +13,7 @@ export namespace Database {
|
||||
PlanetscaleQueryResultHKT,
|
||||
PlanetScalePreparedQueryHKT,
|
||||
Record<string, never>,
|
||||
ExtractTablesWithRelations<Record<string, never>>
|
||||
any
|
||||
>
|
||||
|
||||
const client = memo(() => {
|
||||
@@ -23,7 +22,7 @@ export namespace Database {
|
||||
username: Resource.Database.username,
|
||||
password: Resource.Database.password,
|
||||
})
|
||||
const db = drizzle(result, {})
|
||||
const db = drizzle({ client: result })
|
||||
return db
|
||||
})
|
||||
|
||||
|
||||
@@ -1,27 +1,10 @@
|
||||
# opencode agent guidelines
|
||||
# opencode database guide
|
||||
|
||||
## Build/Test Commands
|
||||
## Database
|
||||
|
||||
- **Install**: `bun install`
|
||||
- **Run**: `bun run --conditions=browser ./src/index.ts`
|
||||
- **Typecheck**: `bun run typecheck` (npm run typecheck)
|
||||
- **Test**: `bun test` (runs all tests)
|
||||
- **Single test**: `bun test test/tool/tool.test.ts` (specific test file)
|
||||
|
||||
## Code Style
|
||||
|
||||
- **Runtime**: Bun with TypeScript ESM modules
|
||||
- **Imports**: Use relative imports for local modules, named imports preferred
|
||||
- **Types**: Zod schemas for validation, TypeScript interfaces for structure
|
||||
- **Naming**: camelCase for variables/functions, PascalCase for classes/namespaces
|
||||
- **Error handling**: Use Result patterns, avoid throwing exceptions in tools
|
||||
- **File structure**: Namespace-based organization (e.g., `Tool.define()`, `Session.create()`)
|
||||
|
||||
## Architecture
|
||||
|
||||
- **Tools**: Implement `Tool.Info` interface with `execute()` method
|
||||
- **Context**: Pass `sessionID` in tool context, use `App.provide()` for DI
|
||||
- **Validation**: All inputs validated with Zod schemas
|
||||
- **Logging**: Use `Log.create({ service: "name" })` pattern
|
||||
- **Storage**: Use `Storage` namespace for persistence
|
||||
- **API Client**: The TypeScript TUI (built with SolidJS + OpenTUI) communicates with the OpenCode server using `@opencode-ai/sdk`. When adding/modifying server endpoints in `packages/opencode/src/server/server.ts`, run `./script/generate.ts` to regenerate the SDK and related files.
|
||||
- **Schema**: Drizzle schema lives in `src/**/*.sql.ts`.
|
||||
- **Naming**: tables and columns use snake*case; join columns are `<entity>_id`; indexes are `<table>*<column>\_idx`.
|
||||
- **Migrations**: generated by Drizzle Kit using `drizzle.config.ts` (schema: `./src/**/*.sql.ts`, output: `./migration`).
|
||||
- **Command**: `bun run db generate --name <slug>`.
|
||||
- **Output**: creates `migration/<timestamp>_<slug>/migration.sql` and `snapshot.json`.
|
||||
- **Tests**: migration tests should read the per-folder layout (no `_journal.json`).
|
||||
|
||||
10
packages/opencode/drizzle.config.ts
Normal file
10
packages/opencode/drizzle.config.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { defineConfig } from "drizzle-kit"
|
||||
|
||||
export default defineConfig({
|
||||
dialect: "sqlite",
|
||||
schema: "./src/**/*.sql.ts",
|
||||
out: "./migration",
|
||||
dbCredentials: {
|
||||
url: "/home/thdxr/.local/share/opencode/opencode.db",
|
||||
},
|
||||
})
|
||||
@@ -0,0 +1,90 @@
|
||||
CREATE TABLE `project` (
|
||||
`id` text PRIMARY KEY,
|
||||
`worktree` text NOT NULL,
|
||||
`vcs` text,
|
||||
`name` text,
|
||||
`icon_url` text,
|
||||
`icon_color` text,
|
||||
`time_created` integer NOT NULL,
|
||||
`time_updated` integer NOT NULL,
|
||||
`time_initialized` integer,
|
||||
`sandboxes` text NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE `message` (
|
||||
`id` text PRIMARY KEY,
|
||||
`session_id` text NOT NULL,
|
||||
`time_created` integer NOT NULL,
|
||||
`time_updated` integer NOT NULL,
|
||||
`data` text NOT NULL,
|
||||
CONSTRAINT `fk_message_session_id_session_id_fk` FOREIGN KEY (`session_id`) REFERENCES `session`(`id`) ON DELETE CASCADE
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE `part` (
|
||||
`id` text PRIMARY KEY,
|
||||
`message_id` text NOT NULL,
|
||||
`session_id` text NOT NULL,
|
||||
`time_created` integer NOT NULL,
|
||||
`time_updated` integer NOT NULL,
|
||||
`data` text NOT NULL,
|
||||
CONSTRAINT `fk_part_message_id_message_id_fk` FOREIGN KEY (`message_id`) REFERENCES `message`(`id`) ON DELETE CASCADE
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE `permission` (
|
||||
`project_id` text PRIMARY KEY,
|
||||
`time_created` integer NOT NULL,
|
||||
`time_updated` integer NOT NULL,
|
||||
`data` text NOT NULL,
|
||||
CONSTRAINT `fk_permission_project_id_project_id_fk` FOREIGN KEY (`project_id`) REFERENCES `project`(`id`) ON DELETE CASCADE
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE `session` (
|
||||
`id` text PRIMARY KEY,
|
||||
`project_id` text NOT NULL,
|
||||
`parent_id` text,
|
||||
`slug` text NOT NULL,
|
||||
`directory` text NOT NULL,
|
||||
`title` text NOT NULL,
|
||||
`version` text NOT NULL,
|
||||
`share_url` text,
|
||||
`summary_additions` integer,
|
||||
`summary_deletions` integer,
|
||||
`summary_files` integer,
|
||||
`summary_diffs` text,
|
||||
`revert` text,
|
||||
`permission` text,
|
||||
`time_created` integer NOT NULL,
|
||||
`time_updated` integer NOT NULL,
|
||||
`time_compacting` integer,
|
||||
`time_archived` integer,
|
||||
CONSTRAINT `fk_session_project_id_project_id_fk` FOREIGN KEY (`project_id`) REFERENCES `project`(`id`) ON DELETE CASCADE
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE `todo` (
|
||||
`session_id` text NOT NULL,
|
||||
`content` text NOT NULL,
|
||||
`status` text NOT NULL,
|
||||
`priority` text NOT NULL,
|
||||
`position` integer NOT NULL,
|
||||
`time_created` integer NOT NULL,
|
||||
`time_updated` integer NOT NULL,
|
||||
CONSTRAINT `todo_pk` PRIMARY KEY(`session_id`, `position`),
|
||||
CONSTRAINT `fk_todo_session_id_session_id_fk` FOREIGN KEY (`session_id`) REFERENCES `session`(`id`) ON DELETE CASCADE
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE `session_share` (
|
||||
`session_id` text PRIMARY KEY,
|
||||
`id` text NOT NULL,
|
||||
`secret` text NOT NULL,
|
||||
`url` text NOT NULL,
|
||||
`time_created` integer NOT NULL,
|
||||
`time_updated` integer NOT NULL,
|
||||
CONSTRAINT `fk_session_share_session_id_session_id_fk` FOREIGN KEY (`session_id`) REFERENCES `session`(`id`) ON DELETE CASCADE
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE INDEX `message_session_idx` ON `message` (`session_id`);--> statement-breakpoint
|
||||
CREATE INDEX `part_message_idx` ON `part` (`message_id`);--> statement-breakpoint
|
||||
CREATE INDEX `part_session_idx` ON `part` (`session_id`);--> statement-breakpoint
|
||||
CREATE INDEX `session_project_idx` ON `session` (`project_id`);--> statement-breakpoint
|
||||
CREATE INDEX `session_parent_idx` ON `session` (`parent_id`);--> statement-breakpoint
|
||||
CREATE INDEX `todo_session_idx` ON `todo` (`session_id`);
|
||||
@@ -0,0 +1,796 @@
|
||||
{
|
||||
"version": "7",
|
||||
"dialect": "sqlite",
|
||||
"id": "068758ed-a97a-46f6-8a59-6c639ae7c20c",
|
||||
"prevIds": ["00000000-0000-0000-0000-000000000000"],
|
||||
"ddl": [
|
||||
{
|
||||
"name": "project",
|
||||
"entityType": "tables"
|
||||
},
|
||||
{
|
||||
"name": "message",
|
||||
"entityType": "tables"
|
||||
},
|
||||
{
|
||||
"name": "part",
|
||||
"entityType": "tables"
|
||||
},
|
||||
{
|
||||
"name": "permission",
|
||||
"entityType": "tables"
|
||||
},
|
||||
{
|
||||
"name": "session",
|
||||
"entityType": "tables"
|
||||
},
|
||||
{
|
||||
"name": "todo",
|
||||
"entityType": "tables"
|
||||
},
|
||||
{
|
||||
"name": "session_share",
|
||||
"entityType": "tables"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "id",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "worktree",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "vcs",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "name",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "icon_url",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "icon_color",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_created",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_updated",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_initialized",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "sandboxes",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "id",
|
||||
"entityType": "columns",
|
||||
"table": "message"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "session_id",
|
||||
"entityType": "columns",
|
||||
"table": "message"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_created",
|
||||
"entityType": "columns",
|
||||
"table": "message"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_updated",
|
||||
"entityType": "columns",
|
||||
"table": "message"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "data",
|
||||
"entityType": "columns",
|
||||
"table": "message"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "id",
|
||||
"entityType": "columns",
|
||||
"table": "part"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "message_id",
|
||||
"entityType": "columns",
|
||||
"table": "part"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "session_id",
|
||||
"entityType": "columns",
|
||||
"table": "part"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_created",
|
||||
"entityType": "columns",
|
||||
"table": "part"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_updated",
|
||||
"entityType": "columns",
|
||||
"table": "part"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "data",
|
||||
"entityType": "columns",
|
||||
"table": "part"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "project_id",
|
||||
"entityType": "columns",
|
||||
"table": "permission"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_created",
|
||||
"entityType": "columns",
|
||||
"table": "permission"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_updated",
|
||||
"entityType": "columns",
|
||||
"table": "permission"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "data",
|
||||
"entityType": "columns",
|
||||
"table": "permission"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "id",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "project_id",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "parent_id",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "slug",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "directory",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "title",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "version",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "share_url",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "summary_additions",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "summary_deletions",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "summary_files",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "summary_diffs",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "revert",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "permission",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_created",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_updated",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_compacting",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_archived",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "session_id",
|
||||
"entityType": "columns",
|
||||
"table": "todo"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "content",
|
||||
"entityType": "columns",
|
||||
"table": "todo"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "status",
|
||||
"entityType": "columns",
|
||||
"table": "todo"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "priority",
|
||||
"entityType": "columns",
|
||||
"table": "todo"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "position",
|
||||
"entityType": "columns",
|
||||
"table": "todo"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_created",
|
||||
"entityType": "columns",
|
||||
"table": "todo"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_updated",
|
||||
"entityType": "columns",
|
||||
"table": "todo"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "session_id",
|
||||
"entityType": "columns",
|
||||
"table": "session_share"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "id",
|
||||
"entityType": "columns",
|
||||
"table": "session_share"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "secret",
|
||||
"entityType": "columns",
|
||||
"table": "session_share"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "url",
|
||||
"entityType": "columns",
|
||||
"table": "session_share"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_created",
|
||||
"entityType": "columns",
|
||||
"table": "session_share"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_updated",
|
||||
"entityType": "columns",
|
||||
"table": "session_share"
|
||||
},
|
||||
{
|
||||
"columns": ["session_id"],
|
||||
"tableTo": "session",
|
||||
"columnsTo": ["id"],
|
||||
"onUpdate": "NO ACTION",
|
||||
"onDelete": "CASCADE",
|
||||
"nameExplicit": false,
|
||||
"name": "fk_message_session_id_session_id_fk",
|
||||
"entityType": "fks",
|
||||
"table": "message"
|
||||
},
|
||||
{
|
||||
"columns": ["message_id"],
|
||||
"tableTo": "message",
|
||||
"columnsTo": ["id"],
|
||||
"onUpdate": "NO ACTION",
|
||||
"onDelete": "CASCADE",
|
||||
"nameExplicit": false,
|
||||
"name": "fk_part_message_id_message_id_fk",
|
||||
"entityType": "fks",
|
||||
"table": "part"
|
||||
},
|
||||
{
|
||||
"columns": ["project_id"],
|
||||
"tableTo": "project",
|
||||
"columnsTo": ["id"],
|
||||
"onUpdate": "NO ACTION",
|
||||
"onDelete": "CASCADE",
|
||||
"nameExplicit": false,
|
||||
"name": "fk_permission_project_id_project_id_fk",
|
||||
"entityType": "fks",
|
||||
"table": "permission"
|
||||
},
|
||||
{
|
||||
"columns": ["project_id"],
|
||||
"tableTo": "project",
|
||||
"columnsTo": ["id"],
|
||||
"onUpdate": "NO ACTION",
|
||||
"onDelete": "CASCADE",
|
||||
"nameExplicit": false,
|
||||
"name": "fk_session_project_id_project_id_fk",
|
||||
"entityType": "fks",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"columns": ["session_id"],
|
||||
"tableTo": "session",
|
||||
"columnsTo": ["id"],
|
||||
"onUpdate": "NO ACTION",
|
||||
"onDelete": "CASCADE",
|
||||
"nameExplicit": false,
|
||||
"name": "fk_todo_session_id_session_id_fk",
|
||||
"entityType": "fks",
|
||||
"table": "todo"
|
||||
},
|
||||
{
|
||||
"columns": ["session_id"],
|
||||
"tableTo": "session",
|
||||
"columnsTo": ["id"],
|
||||
"onUpdate": "NO ACTION",
|
||||
"onDelete": "CASCADE",
|
||||
"nameExplicit": false,
|
||||
"name": "fk_session_share_session_id_session_id_fk",
|
||||
"entityType": "fks",
|
||||
"table": "session_share"
|
||||
},
|
||||
{
|
||||
"columns": ["session_id", "position"],
|
||||
"nameExplicit": false,
|
||||
"name": "todo_pk",
|
||||
"entityType": "pks",
|
||||
"table": "todo"
|
||||
},
|
||||
{
|
||||
"columns": ["id"],
|
||||
"nameExplicit": false,
|
||||
"name": "project_pk",
|
||||
"table": "project",
|
||||
"entityType": "pks"
|
||||
},
|
||||
{
|
||||
"columns": ["id"],
|
||||
"nameExplicit": false,
|
||||
"name": "message_pk",
|
||||
"table": "message",
|
||||
"entityType": "pks"
|
||||
},
|
||||
{
|
||||
"columns": ["id"],
|
||||
"nameExplicit": false,
|
||||
"name": "part_pk",
|
||||
"table": "part",
|
||||
"entityType": "pks"
|
||||
},
|
||||
{
|
||||
"columns": ["project_id"],
|
||||
"nameExplicit": false,
|
||||
"name": "permission_pk",
|
||||
"table": "permission",
|
||||
"entityType": "pks"
|
||||
},
|
||||
{
|
||||
"columns": ["id"],
|
||||
"nameExplicit": false,
|
||||
"name": "session_pk",
|
||||
"table": "session",
|
||||
"entityType": "pks"
|
||||
},
|
||||
{
|
||||
"columns": ["session_id"],
|
||||
"nameExplicit": false,
|
||||
"name": "session_share_pk",
|
||||
"table": "session_share",
|
||||
"entityType": "pks"
|
||||
},
|
||||
{
|
||||
"columns": [
|
||||
{
|
||||
"value": "session_id",
|
||||
"isExpression": false
|
||||
}
|
||||
],
|
||||
"isUnique": false,
|
||||
"where": null,
|
||||
"origin": "manual",
|
||||
"name": "message_session_idx",
|
||||
"entityType": "indexes",
|
||||
"table": "message"
|
||||
},
|
||||
{
|
||||
"columns": [
|
||||
{
|
||||
"value": "message_id",
|
||||
"isExpression": false
|
||||
}
|
||||
],
|
||||
"isUnique": false,
|
||||
"where": null,
|
||||
"origin": "manual",
|
||||
"name": "part_message_idx",
|
||||
"entityType": "indexes",
|
||||
"table": "part"
|
||||
},
|
||||
{
|
||||
"columns": [
|
||||
{
|
||||
"value": "session_id",
|
||||
"isExpression": false
|
||||
}
|
||||
],
|
||||
"isUnique": false,
|
||||
"where": null,
|
||||
"origin": "manual",
|
||||
"name": "part_session_idx",
|
||||
"entityType": "indexes",
|
||||
"table": "part"
|
||||
},
|
||||
{
|
||||
"columns": [
|
||||
{
|
||||
"value": "project_id",
|
||||
"isExpression": false
|
||||
}
|
||||
],
|
||||
"isUnique": false,
|
||||
"where": null,
|
||||
"origin": "manual",
|
||||
"name": "session_project_idx",
|
||||
"entityType": "indexes",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"columns": [
|
||||
{
|
||||
"value": "parent_id",
|
||||
"isExpression": false
|
||||
}
|
||||
],
|
||||
"isUnique": false,
|
||||
"where": null,
|
||||
"origin": "manual",
|
||||
"name": "session_parent_idx",
|
||||
"entityType": "indexes",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"columns": [
|
||||
{
|
||||
"value": "session_id",
|
||||
"isExpression": false
|
||||
}
|
||||
],
|
||||
"isUnique": false,
|
||||
"where": null,
|
||||
"origin": "manual",
|
||||
"name": "todo_session_idx",
|
||||
"entityType": "indexes",
|
||||
"table": "todo"
|
||||
}
|
||||
],
|
||||
"renames": []
|
||||
}
|
||||
@@ -15,7 +15,8 @@
|
||||
"lint": "echo 'Running lint checks...' && bun test --coverage",
|
||||
"format": "echo 'Formatting code...' && bun run --prettier --write src/**/*.ts",
|
||||
"docs": "echo 'Generating documentation...' && find src -name '*.ts' -exec echo 'Processing: {}' \\;",
|
||||
"deploy": "echo 'Deploying application...' && bun run build && echo 'Deployment completed successfully'"
|
||||
"deploy": "echo 'Deploying application...' && bun run build && echo 'Deployment completed successfully'",
|
||||
"db": "bun drizzle-kit"
|
||||
},
|
||||
"bin": {
|
||||
"opencode": "./bin/opencode"
|
||||
@@ -42,6 +43,8 @@
|
||||
"@types/turndown": "5.0.5",
|
||||
"@types/yargs": "17.0.33",
|
||||
"@typescript/native-preview": "catalog:",
|
||||
"drizzle-kit": "1.0.0-beta.12-a5629fb",
|
||||
"drizzle-orm": "1.0.0-beta.12-a5629fb",
|
||||
"typescript": "catalog:",
|
||||
"vscode-languageserver-types": "3.17.5",
|
||||
"why-is-node-running": "3.2.2",
|
||||
@@ -100,6 +103,7 @@
|
||||
"clipboardy": "4.0.0",
|
||||
"decimal.js": "10.5.0",
|
||||
"diff": "catalog:",
|
||||
"drizzle-orm": "1.0.0-beta.12-a5629fb",
|
||||
"fuzzysort": "3.1.0",
|
||||
"gray-matter": "4.0.3",
|
||||
"hono": "catalog:",
|
||||
@@ -122,5 +126,8 @@
|
||||
"yargs": "18.0.0",
|
||||
"zod": "catalog:",
|
||||
"zod-to-json-schema": "3.24.5"
|
||||
},
|
||||
"overrides": {
|
||||
"drizzle-orm": "1.0.0-beta.12-a5629fb"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,6 +25,32 @@ await Bun.write(
|
||||
)
|
||||
console.log("Generated models-snapshot.ts")
|
||||
|
||||
// Load migrations from migration directories
|
||||
const migrationDirs = (await fs.promises.readdir(path.join(dir, "migration"), { withFileTypes: true }))
|
||||
.filter((entry) => entry.isDirectory() && /^\d{4}\d{2}\d{2}\d{2}\d{2}\d{2}/.test(entry.name))
|
||||
.map((entry) => entry.name)
|
||||
.sort()
|
||||
|
||||
const migrations = await Promise.all(
|
||||
migrationDirs.map(async (name) => {
|
||||
const file = path.join(dir, "migration", name, "migration.sql")
|
||||
const sql = await Bun.file(file).text()
|
||||
const match = /^(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})/.exec(name)
|
||||
const timestamp = match
|
||||
? Date.UTC(
|
||||
Number(match[1]),
|
||||
Number(match[2]) - 1,
|
||||
Number(match[3]),
|
||||
Number(match[4]),
|
||||
Number(match[5]),
|
||||
Number(match[6]),
|
||||
)
|
||||
: 0
|
||||
return { sql, timestamp }
|
||||
}),
|
||||
)
|
||||
console.log(`Loaded ${migrations.length} migrations`)
|
||||
|
||||
const singleFlag = process.argv.includes("--single")
|
||||
const baselineFlag = process.argv.includes("--baseline")
|
||||
const skipInstall = process.argv.includes("--skip-install")
|
||||
@@ -156,6 +182,7 @@ for (const item of targets) {
|
||||
entrypoints: ["./src/index.ts", parserWorker, workerPath],
|
||||
define: {
|
||||
OPENCODE_VERSION: `'${Script.version}'`,
|
||||
OPENCODE_MIGRATIONS: JSON.stringify(migrations),
|
||||
OTUI_TREE_SITTER_WORKER_PATH: bunfsRoot + workerRelativePath,
|
||||
OPENCODE_WORKER_PATH: workerPath,
|
||||
OPENCODE_CHANNEL: `'${Script.channel}'`,
|
||||
|
||||
16
packages/opencode/script/check-migrations.ts
Normal file
16
packages/opencode/script/check-migrations.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
#!/usr/bin/env bun
|
||||
|
||||
import { $ } from "bun"
|
||||
|
||||
// drizzle-kit check compares schema to migrations, exits non-zero if drift
|
||||
const result = await $`bun drizzle-kit check`.quiet().nothrow()
|
||||
|
||||
if (result.exitCode !== 0) {
|
||||
console.error("Schema has changes not captured in migrations!")
|
||||
console.error("Run: bun drizzle-kit generate")
|
||||
console.error("")
|
||||
console.error(result.stderr.toString())
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
console.log("Migrations are up to date")
|
||||
@@ -435,46 +435,68 @@ export namespace ACP {
|
||||
return
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if (part.type === "text") {
|
||||
const delta = props.delta
|
||||
if (delta && part.ignored !== true) {
|
||||
await this.connection
|
||||
.sessionUpdate({
|
||||
sessionId,
|
||||
update: {
|
||||
sessionUpdate: "agent_message_chunk",
|
||||
content: {
|
||||
type: "text",
|
||||
text: delta,
|
||||
},
|
||||
case "message.part.delta": {
|
||||
const props = event.properties
|
||||
const session = this.sessionManager.tryGet(props.sessionID)
|
||||
if (!session) return
|
||||
const sessionId = session.id
|
||||
|
||||
const message = await this.sdk.session
|
||||
.message(
|
||||
{
|
||||
sessionID: props.sessionID,
|
||||
messageID: props.messageID,
|
||||
directory: session.cwd,
|
||||
},
|
||||
{ throwOnError: true },
|
||||
)
|
||||
.then((x) => x.data)
|
||||
.catch((error) => {
|
||||
log.error("unexpected error when fetching message", { error })
|
||||
return undefined
|
||||
})
|
||||
|
||||
if (!message || message.info.role !== "assistant") return
|
||||
|
||||
const part = message.parts.find((p) => p.id === props.partID)
|
||||
if (!part) return
|
||||
|
||||
if (part.type === "text" && props.field === "text" && part.ignored !== true) {
|
||||
await this.connection
|
||||
.sessionUpdate({
|
||||
sessionId,
|
||||
update: {
|
||||
sessionUpdate: "agent_message_chunk",
|
||||
content: {
|
||||
type: "text",
|
||||
text: props.delta,
|
||||
},
|
||||
})
|
||||
.catch((error) => {
|
||||
log.error("failed to send text to ACP", { error })
|
||||
})
|
||||
}
|
||||
},
|
||||
})
|
||||
.catch((error) => {
|
||||
log.error("failed to send text delta to ACP", { error })
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
if (part.type === "reasoning") {
|
||||
const delta = props.delta
|
||||
if (delta) {
|
||||
await this.connection
|
||||
.sessionUpdate({
|
||||
sessionId,
|
||||
update: {
|
||||
sessionUpdate: "agent_thought_chunk",
|
||||
content: {
|
||||
type: "text",
|
||||
text: delta,
|
||||
},
|
||||
if (part.type === "reasoning" && props.field === "text") {
|
||||
await this.connection
|
||||
.sessionUpdate({
|
||||
sessionId,
|
||||
update: {
|
||||
sessionUpdate: "agent_thought_chunk",
|
||||
content: {
|
||||
type: "text",
|
||||
text: props.delta,
|
||||
},
|
||||
})
|
||||
.catch((error) => {
|
||||
log.error("failed to send reasoning to ACP", { error })
|
||||
})
|
||||
}
|
||||
},
|
||||
})
|
||||
.catch((error) => {
|
||||
log.error("failed to send reasoning delta to ACP", { error })
|
||||
})
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
@@ -3,7 +3,8 @@ import type { Session as SDKSession, Message, Part } from "@opencode-ai/sdk/v2"
|
||||
import { Session } from "../../session"
|
||||
import { cmd } from "./cmd"
|
||||
import { bootstrap } from "../bootstrap"
|
||||
import { Storage } from "../../storage/storage"
|
||||
import { Database } from "../../storage/db"
|
||||
import { SessionTable, MessageTable, PartTable } from "../../session/session.sql"
|
||||
import { Instance } from "../../project/instance"
|
||||
import { ShareNext } from "../../share/share-next"
|
||||
import { EOL } from "os"
|
||||
@@ -130,13 +131,35 @@ export const ImportCommand = cmd({
|
||||
return
|
||||
}
|
||||
|
||||
await Storage.write(["session", Instance.project.id, exportData.info.id], exportData.info)
|
||||
Database.use((db) => db.insert(SessionTable).values(Session.toRow(exportData.info)).onConflictDoNothing().run())
|
||||
|
||||
for (const msg of exportData.messages) {
|
||||
await Storage.write(["message", exportData.info.id, msg.info.id], msg.info)
|
||||
Database.use((db) =>
|
||||
db
|
||||
.insert(MessageTable)
|
||||
.values({
|
||||
id: msg.info.id,
|
||||
session_id: exportData.info.id,
|
||||
time_created: msg.info.time?.created ?? Date.now(),
|
||||
data: msg.info,
|
||||
})
|
||||
.onConflictDoNothing()
|
||||
.run(),
|
||||
)
|
||||
|
||||
for (const part of msg.parts) {
|
||||
await Storage.write(["part", msg.info.id, part.id], part)
|
||||
Database.use((db) =>
|
||||
db
|
||||
.insert(PartTable)
|
||||
.values({
|
||||
id: part.id,
|
||||
message_id: msg.info.id,
|
||||
session_id: exportData.info.id,
|
||||
data: part,
|
||||
})
|
||||
.onConflictDoNothing()
|
||||
.run(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,7 +2,8 @@ import type { Argv } from "yargs"
|
||||
import { cmd } from "./cmd"
|
||||
import { Session } from "../../session"
|
||||
import { bootstrap } from "../bootstrap"
|
||||
import { Storage } from "../../storage/storage"
|
||||
import { Database } from "../../storage/db"
|
||||
import { SessionTable } from "../../session/session.sql"
|
||||
import { Project } from "../../project/project"
|
||||
import { Instance } from "../../project/instance"
|
||||
|
||||
@@ -87,25 +88,8 @@ async function getCurrentProject(): Promise<Project.Info> {
|
||||
}
|
||||
|
||||
async function getAllSessions(): Promise<Session.Info[]> {
|
||||
const sessions: Session.Info[] = []
|
||||
|
||||
const projectKeys = await Storage.list(["project"])
|
||||
const projects = await Promise.all(projectKeys.map((key) => Storage.read<Project.Info>(key)))
|
||||
|
||||
for (const project of projects) {
|
||||
if (!project) continue
|
||||
|
||||
const sessionKeys = await Storage.list(["session", project.id])
|
||||
const projectSessions = await Promise.all(sessionKeys.map((key) => Storage.read<Session.Info>(key)))
|
||||
|
||||
for (const session of projectSessions) {
|
||||
if (session) {
|
||||
sessions.push(session)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return sessions
|
||||
const rows = Database.use((db) => db.select().from(SessionTable).all())
|
||||
return rows.map((row) => Session.fromRow(row))
|
||||
}
|
||||
|
||||
export async function aggregateSessionStats(days?: number, projectFilter?: string): Promise<SessionStats> {
|
||||
|
||||
@@ -299,6 +299,24 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({
|
||||
break
|
||||
}
|
||||
|
||||
case "message.part.delta": {
|
||||
const parts = store.part[event.properties.messageID]
|
||||
if (!parts) break
|
||||
const result = Binary.search(parts, event.properties.partID, (p) => p.id)
|
||||
if (!result.found) break
|
||||
setStore(
|
||||
"part",
|
||||
event.properties.messageID,
|
||||
produce((draft) => {
|
||||
const part = draft[result.index]
|
||||
const field = event.properties.field as keyof typeof part
|
||||
const existing = part[field] as string | undefined
|
||||
;(part[field] as string) = (existing ?? "") + event.properties.delta
|
||||
}),
|
||||
)
|
||||
break
|
||||
}
|
||||
|
||||
case "message.part.removed": {
|
||||
const parts = store.part[event.properties.messageID]
|
||||
const result = Binary.search(parts, event.properties.partID, (p) => p.id)
|
||||
|
||||
@@ -2027,8 +2027,8 @@ function ApplyPatch(props: ToolProps<typeof ApplyPatchTool>) {
|
||||
</For>
|
||||
</Match>
|
||||
<Match when={true}>
|
||||
<InlineTool icon="%" pending="Preparing apply_patch..." complete={false} part={props.part}>
|
||||
apply_patch
|
||||
<InlineTool icon="%" pending="Preparing patch..." complete={false} part={props.part}>
|
||||
Patch
|
||||
</InlineTool>
|
||||
</Match>
|
||||
</Switch>
|
||||
|
||||
@@ -26,6 +26,10 @@ import { EOL } from "os"
|
||||
import { WebCommand } from "./cli/cmd/web"
|
||||
import { PrCommand } from "./cli/cmd/pr"
|
||||
import { SessionCommand } from "./cli/cmd/session"
|
||||
import path from "path"
|
||||
import { Global } from "./global"
|
||||
import { JsonMigration } from "./storage/json-migration"
|
||||
import { Database } from "./storage/db"
|
||||
|
||||
process.on("unhandledRejection", (e) => {
|
||||
Log.Default.error("rejection", {
|
||||
@@ -74,6 +78,37 @@ const cli = yargs(hideBin(process.argv))
|
||||
version: Installation.VERSION,
|
||||
args: process.argv.slice(2),
|
||||
})
|
||||
|
||||
const marker = path.join(Global.Path.data, "opencode.db")
|
||||
if (!(await Bun.file(marker).exists())) {
|
||||
console.log("Performing one time database migration, may take a few minutes...")
|
||||
const tty = process.stdout.isTTY
|
||||
const width = 36
|
||||
const orange = "\x1b[38;5;214m"
|
||||
const muted = "\x1b[0;2m"
|
||||
const reset = "\x1b[0m"
|
||||
let last = -1
|
||||
if (tty) process.stdout.write("\x1b[?25l")
|
||||
try {
|
||||
await JsonMigration.run(Database.Client().$client, {
|
||||
progress: (event) => {
|
||||
if (!tty) return
|
||||
const percent = Math.floor((event.current / event.total) * 100)
|
||||
if (percent === last && event.current !== event.total) return
|
||||
last = percent
|
||||
const fill = Math.round((percent / 100) * width)
|
||||
const bar = `${"■".repeat(fill)}${"・".repeat(width - fill)}`
|
||||
process.stdout.write(
|
||||
`\r${orange}${bar} ${percent.toString().padStart(3)}%${reset} ${muted}${event.label.padEnd(12)} ${event.current}/${event.total}${reset}`,
|
||||
)
|
||||
if (event.current === event.total) process.stdout.write("\n")
|
||||
},
|
||||
})
|
||||
} finally {
|
||||
if (tty) process.stdout.write("\x1b[?25h")
|
||||
}
|
||||
console.log("Database migration complete.")
|
||||
}
|
||||
})
|
||||
.usage("\n" + UI.logo())
|
||||
.completion("completion", "generate shell completion script")
|
||||
|
||||
@@ -3,7 +3,8 @@ import { BusEvent } from "@/bus/bus-event"
|
||||
import { Config } from "@/config/config"
|
||||
import { Identifier } from "@/id/id"
|
||||
import { Instance } from "@/project/instance"
|
||||
import { Storage } from "@/storage/storage"
|
||||
import { Database, eq } from "@/storage/db"
|
||||
import { PermissionTable } from "@/session/session.sql"
|
||||
import { fn } from "@/util/fn"
|
||||
import { Log } from "@/util/log"
|
||||
import { Wildcard } from "@/util/wildcard"
|
||||
@@ -105,9 +106,12 @@ export namespace PermissionNext {
|
||||
),
|
||||
}
|
||||
|
||||
const state = Instance.state(async () => {
|
||||
const state = Instance.state(() => {
|
||||
const projectID = Instance.project.id
|
||||
const stored = await Storage.read<Ruleset>(["permission", projectID]).catch(() => [] as Ruleset)
|
||||
const row = Database.use((db) =>
|
||||
db.select().from(PermissionTable).where(eq(PermissionTable.project_id, projectID)).get(),
|
||||
)
|
||||
const stored = row?.data ?? ([] as Ruleset)
|
||||
|
||||
const pending: Record<
|
||||
string,
|
||||
@@ -222,7 +226,8 @@ export namespace PermissionNext {
|
||||
|
||||
// TODO: we don't save the permission ruleset to disk yet until there's
|
||||
// UI to manage it
|
||||
// await Storage.write(["permission", Instance.project.id], s.approved)
|
||||
// db().insert(PermissionTable).values({ projectID: Instance.project.id, data: s.approved })
|
||||
// .onConflictDoUpdate({ target: PermissionTable.projectID, set: { data: s.approved } }).run()
|
||||
return
|
||||
}
|
||||
},
|
||||
@@ -275,6 +280,7 @@ export namespace PermissionNext {
|
||||
}
|
||||
|
||||
export async function list() {
|
||||
return state().then((x) => Object.values(x.pending).map((x) => x.info))
|
||||
const s = await state()
|
||||
return Object.values(s.pending).map((x) => x.info)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { Plugin } from "../plugin"
|
||||
import { Share } from "../share/share"
|
||||
import { Format } from "../format"
|
||||
import { LSP } from "../lsp"
|
||||
import { FileWatcher } from "../file/watcher"
|
||||
@@ -17,7 +16,6 @@ import { Truncate } from "../tool/truncation"
|
||||
export async function InstanceBootstrap() {
|
||||
Log.Default.info("bootstrapping", { directory: Instance.directory })
|
||||
await Plugin.init()
|
||||
Share.init()
|
||||
ShareNext.init()
|
||||
Format.init()
|
||||
await LSP.init()
|
||||
|
||||
14
packages/opencode/src/project/project.sql.ts
Normal file
14
packages/opencode/src/project/project.sql.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import { sqliteTable, text, integer } from "drizzle-orm/sqlite-core"
|
||||
import { Timestamps } from "@/storage/schema.sql"
|
||||
|
||||
export const ProjectTable = sqliteTable("project", {
|
||||
id: text().primaryKey(),
|
||||
worktree: text().notNull(),
|
||||
vcs: text(),
|
||||
name: text(),
|
||||
icon_url: text(),
|
||||
icon_color: text(),
|
||||
...Timestamps,
|
||||
time_initialized: integer(),
|
||||
sandboxes: text({ mode: "json" }).notNull().$type<string[]>(),
|
||||
})
|
||||
@@ -1,18 +1,17 @@
|
||||
import z from "zod"
|
||||
import fs from "fs/promises"
|
||||
import { Filesystem } from "../util/filesystem"
|
||||
import path from "path"
|
||||
import { $ } from "bun"
|
||||
import { Storage } from "../storage/storage"
|
||||
import { Database, eq } from "../storage/db"
|
||||
import { ProjectTable } from "./project.sql"
|
||||
import { SessionTable } from "../session/session.sql"
|
||||
import { Log } from "../util/log"
|
||||
import { Flag } from "@/flag/flag"
|
||||
import { Session } from "../session"
|
||||
import { work } from "../util/queue"
|
||||
import { fn } from "@opencode-ai/util/fn"
|
||||
import { BusEvent } from "@/bus/bus-event"
|
||||
import { iife } from "@/util/iife"
|
||||
import { GlobalBus } from "@/bus/global"
|
||||
import { existsSync } from "fs"
|
||||
|
||||
export namespace Project {
|
||||
const log = Log.create({ service: "project" })
|
||||
@@ -50,66 +49,85 @@ export namespace Project {
|
||||
Updated: BusEvent.define("project.updated", Info),
|
||||
}
|
||||
|
||||
type Row = typeof ProjectTable.$inferSelect
|
||||
|
||||
export function fromRow(row: Row): Info {
|
||||
const icon =
|
||||
row.icon_url || row.icon_color
|
||||
? { url: row.icon_url ?? undefined, color: row.icon_color ?? undefined }
|
||||
: undefined
|
||||
return {
|
||||
id: row.id,
|
||||
worktree: row.worktree,
|
||||
vcs: row.vcs ? Info.shape.vcs.parse(row.vcs) : undefined,
|
||||
name: row.name ?? undefined,
|
||||
icon,
|
||||
time: {
|
||||
created: row.time_created,
|
||||
updated: row.time_updated,
|
||||
initialized: row.time_initialized ?? undefined,
|
||||
},
|
||||
sandboxes: row.sandboxes,
|
||||
}
|
||||
}
|
||||
|
||||
export async function fromDirectory(directory: string) {
|
||||
log.info("fromDirectory", { directory })
|
||||
|
||||
const { id, sandbox, worktree, vcs } = await iife(async () => {
|
||||
const data = await iife(async () => {
|
||||
const matches = Filesystem.up({ targets: [".git"], start: directory })
|
||||
const git = await matches.next().then((x) => x.value)
|
||||
await matches.return()
|
||||
if (git) {
|
||||
let sandbox = path.dirname(git)
|
||||
const sandbox = path.dirname(git)
|
||||
const bin = Bun.which("git")
|
||||
|
||||
const gitBinary = Bun.which("git")
|
||||
|
||||
// cached id calculation
|
||||
let id = await Bun.file(path.join(git, "opencode"))
|
||||
const cached = await Bun.file(path.join(git, "opencode"))
|
||||
.text()
|
||||
.then((x) => x.trim())
|
||||
.catch(() => undefined)
|
||||
|
||||
if (!gitBinary) {
|
||||
if (!bin) {
|
||||
return {
|
||||
id: id ?? "global",
|
||||
id: cached ?? "global",
|
||||
worktree: sandbox,
|
||||
sandbox: sandbox,
|
||||
vcs: Info.shape.vcs.parse(Flag.OPENCODE_FAKE_VCS),
|
||||
}
|
||||
}
|
||||
|
||||
// generate id from root commit
|
||||
if (!id) {
|
||||
const roots = await $`git rev-list --max-parents=0 --all`
|
||||
.quiet()
|
||||
.nothrow()
|
||||
.cwd(sandbox)
|
||||
.text()
|
||||
.then((x) =>
|
||||
x
|
||||
.split("\n")
|
||||
.filter(Boolean)
|
||||
.map((x) => x.trim())
|
||||
.toSorted(),
|
||||
)
|
||||
.catch(() => undefined)
|
||||
|
||||
if (!roots) {
|
||||
return {
|
||||
id: "global",
|
||||
worktree: sandbox,
|
||||
sandbox: sandbox,
|
||||
vcs: Info.shape.vcs.parse(Flag.OPENCODE_FAKE_VCS),
|
||||
}
|
||||
}
|
||||
|
||||
id = roots[0]
|
||||
if (id) {
|
||||
void Bun.file(path.join(git, "opencode"))
|
||||
.write(id)
|
||||
const roots = cached
|
||||
? undefined
|
||||
: await $`git rev-list --max-parents=0 --all`
|
||||
.quiet()
|
||||
.nothrow()
|
||||
.cwd(sandbox)
|
||||
.text()
|
||||
.then((x) =>
|
||||
x
|
||||
.split("\n")
|
||||
.filter(Boolean)
|
||||
.map((x) => x.trim())
|
||||
.toSorted(),
|
||||
)
|
||||
.catch(() => undefined)
|
||||
|
||||
if (!cached && !roots) {
|
||||
return {
|
||||
id: "global",
|
||||
worktree: sandbox,
|
||||
sandbox: sandbox,
|
||||
vcs: Info.shape.vcs.parse(Flag.OPENCODE_FAKE_VCS),
|
||||
}
|
||||
}
|
||||
|
||||
const id = cached ?? roots?.[0]
|
||||
if (!cached && id) {
|
||||
void Bun.file(path.join(git, "opencode"))
|
||||
.write(id)
|
||||
.catch(() => undefined)
|
||||
}
|
||||
|
||||
if (!id) {
|
||||
return {
|
||||
id: "global",
|
||||
@@ -136,33 +154,31 @@ export namespace Project {
|
||||
}
|
||||
}
|
||||
|
||||
sandbox = top
|
||||
|
||||
const worktree = await $`git rev-parse --git-common-dir`
|
||||
const tree = await $`git rev-parse --git-common-dir`
|
||||
.quiet()
|
||||
.nothrow()
|
||||
.cwd(sandbox)
|
||||
.cwd(top)
|
||||
.text()
|
||||
.then((x) => {
|
||||
const dirname = path.dirname(x.trim())
|
||||
if (dirname === ".") return sandbox
|
||||
if (dirname === ".") return top
|
||||
return dirname
|
||||
})
|
||||
.catch(() => undefined)
|
||||
|
||||
if (!worktree) {
|
||||
if (!tree) {
|
||||
return {
|
||||
id,
|
||||
sandbox,
|
||||
worktree: sandbox,
|
||||
sandbox: top,
|
||||
worktree: top,
|
||||
vcs: Info.shape.vcs.parse(Flag.OPENCODE_FAKE_VCS),
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
id,
|
||||
sandbox,
|
||||
worktree,
|
||||
sandbox: top,
|
||||
worktree: tree,
|
||||
vcs: "git",
|
||||
}
|
||||
}
|
||||
@@ -175,47 +191,78 @@ export namespace Project {
|
||||
}
|
||||
})
|
||||
|
||||
let existing = await Storage.read<Info>(["project", id]).catch(() => undefined)
|
||||
if (!existing) {
|
||||
existing = {
|
||||
id,
|
||||
worktree,
|
||||
vcs: vcs as Info["vcs"],
|
||||
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, data.id)).get())
|
||||
const existing = await iife(async () => {
|
||||
if (row) return fromRow(row)
|
||||
const fresh: Info = {
|
||||
id: data.id,
|
||||
worktree: data.worktree,
|
||||
vcs: data.vcs as Info["vcs"],
|
||||
sandboxes: [],
|
||||
time: {
|
||||
created: Date.now(),
|
||||
updated: Date.now(),
|
||||
},
|
||||
}
|
||||
if (id !== "global") {
|
||||
await migrateFromGlobal(id, worktree)
|
||||
if (data.id !== "global") {
|
||||
await migrateFromGlobal(data.id, data.worktree)
|
||||
}
|
||||
}
|
||||
|
||||
// migrate old projects before sandboxes
|
||||
if (!existing.sandboxes) existing.sandboxes = []
|
||||
return fresh
|
||||
})
|
||||
|
||||
if (Flag.OPENCODE_EXPERIMENTAL_ICON_DISCOVERY) discover(existing)
|
||||
|
||||
const result: Info = {
|
||||
...existing,
|
||||
worktree,
|
||||
vcs: vcs as Info["vcs"],
|
||||
worktree: data.worktree,
|
||||
vcs: data.vcs as Info["vcs"],
|
||||
time: {
|
||||
...existing.time,
|
||||
updated: Date.now(),
|
||||
},
|
||||
}
|
||||
if (sandbox !== result.worktree && !result.sandboxes.includes(sandbox)) result.sandboxes.push(sandbox)
|
||||
result.sandboxes = result.sandboxes.filter((x) => existsSync(x))
|
||||
await Storage.write<Info>(["project", id], result)
|
||||
if (data.sandbox !== result.worktree && !result.sandboxes.includes(data.sandbox))
|
||||
result.sandboxes.push(data.sandbox)
|
||||
const sandboxes: string[] = []
|
||||
for (const x of result.sandboxes) {
|
||||
const stat = await Bun.file(x)
|
||||
.stat()
|
||||
.catch(() => undefined)
|
||||
if (stat) sandboxes.push(x)
|
||||
}
|
||||
result.sandboxes = sandboxes
|
||||
const insert = {
|
||||
id: result.id,
|
||||
worktree: result.worktree,
|
||||
vcs: result.vcs ?? null,
|
||||
name: result.name,
|
||||
icon_url: result.icon?.url,
|
||||
icon_color: result.icon?.color,
|
||||
time_created: result.time.created,
|
||||
time_updated: result.time.updated,
|
||||
time_initialized: result.time.initialized,
|
||||
sandboxes: result.sandboxes,
|
||||
}
|
||||
const updateSet = {
|
||||
worktree: result.worktree,
|
||||
vcs: result.vcs ?? null,
|
||||
name: result.name,
|
||||
icon_url: result.icon?.url,
|
||||
icon_color: result.icon?.color,
|
||||
time_updated: result.time.updated,
|
||||
time_initialized: result.time.initialized,
|
||||
sandboxes: result.sandboxes,
|
||||
}
|
||||
Database.use((db) =>
|
||||
db.insert(ProjectTable).values(insert).onConflictDoUpdate({ target: ProjectTable.id, set: updateSet }).run(),
|
||||
)
|
||||
GlobalBus.emit("event", {
|
||||
payload: {
|
||||
type: Event.Updated.type,
|
||||
properties: result,
|
||||
},
|
||||
})
|
||||
return { project: result, sandbox }
|
||||
return { project: result, sandbox: data.sandbox }
|
||||
}
|
||||
|
||||
export async function discover(input: Info) {
|
||||
@@ -248,43 +295,54 @@ export namespace Project {
|
||||
return
|
||||
}
|
||||
|
||||
async function migrateFromGlobal(newProjectID: string, worktree: string) {
|
||||
const globalProject = await Storage.read<Info>(["project", "global"]).catch(() => undefined)
|
||||
if (!globalProject) return
|
||||
async function migrateFromGlobal(id: string, worktree: string) {
|
||||
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, "global")).get())
|
||||
if (!row) return
|
||||
|
||||
const globalSessions = await Storage.list(["session", "global"]).catch(() => [])
|
||||
if (globalSessions.length === 0) return
|
||||
const sessions = Database.use((db) =>
|
||||
db.select().from(SessionTable).where(eq(SessionTable.project_id, "global")).all(),
|
||||
)
|
||||
if (sessions.length === 0) return
|
||||
|
||||
log.info("migrating sessions from global", { newProjectID, worktree, count: globalSessions.length })
|
||||
log.info("migrating sessions from global", { newProjectID: id, worktree, count: sessions.length })
|
||||
|
||||
await work(10, globalSessions, async (key) => {
|
||||
const sessionID = key[key.length - 1]
|
||||
const session = await Storage.read<Session.Info>(key).catch(() => undefined)
|
||||
if (!session) return
|
||||
if (session.directory && session.directory !== worktree) return
|
||||
await work(10, sessions, async (row) => {
|
||||
// Skip sessions that belong to a different directory
|
||||
if (row.directory && row.directory !== worktree) return
|
||||
|
||||
session.projectID = newProjectID
|
||||
log.info("migrating session", { sessionID, from: "global", to: newProjectID })
|
||||
await Storage.write(["session", newProjectID, sessionID], session)
|
||||
await Storage.remove(key)
|
||||
log.info("migrating session", { sessionID: row.id, from: "global", to: id })
|
||||
Database.use((db) => db.update(SessionTable).set({ project_id: id }).where(eq(SessionTable.id, row.id)).run())
|
||||
}).catch((error) => {
|
||||
log.error("failed to migrate sessions from global to project", { error, projectId: newProjectID })
|
||||
log.error("failed to migrate sessions from global to project", { error, projectId: id })
|
||||
})
|
||||
}
|
||||
|
||||
export async function setInitialized(projectID: string) {
|
||||
await Storage.update<Info>(["project", projectID], (draft) => {
|
||||
draft.time.initialized = Date.now()
|
||||
})
|
||||
export function setInitialized(id: string) {
|
||||
Database.use((db) =>
|
||||
db
|
||||
.update(ProjectTable)
|
||||
.set({
|
||||
time_initialized: Date.now(),
|
||||
})
|
||||
.where(eq(ProjectTable.id, id))
|
||||
.run(),
|
||||
)
|
||||
}
|
||||
|
||||
export async function list() {
|
||||
const keys = await Storage.list(["project"])
|
||||
const projects = await Promise.all(keys.map((x) => Storage.read<Info>(x)))
|
||||
return projects.map((project) => ({
|
||||
...project,
|
||||
sandboxes: project.sandboxes?.filter((x) => existsSync(x)),
|
||||
}))
|
||||
export function list() {
|
||||
return Database.use((db) =>
|
||||
db
|
||||
.select()
|
||||
.from(ProjectTable)
|
||||
.all()
|
||||
.map((row) => fromRow(row)),
|
||||
)
|
||||
}
|
||||
|
||||
export function get(id: string): Info | undefined {
|
||||
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get())
|
||||
if (!row) return undefined
|
||||
return fromRow(row)
|
||||
}
|
||||
|
||||
export const update = fn(
|
||||
@@ -295,77 +353,89 @@ export namespace Project {
|
||||
commands: Info.shape.commands.optional(),
|
||||
}),
|
||||
async (input) => {
|
||||
const result = await Storage.update<Info>(["project", input.projectID], (draft) => {
|
||||
if (input.name !== undefined) draft.name = input.name
|
||||
if (input.icon !== undefined) {
|
||||
draft.icon = {
|
||||
...draft.icon,
|
||||
}
|
||||
if (input.icon.url !== undefined) draft.icon.url = input.icon.url
|
||||
if (input.icon.override !== undefined) draft.icon.override = input.icon.override || undefined
|
||||
if (input.icon.color !== undefined) draft.icon.color = input.icon.color
|
||||
}
|
||||
|
||||
if (input.commands?.start !== undefined) {
|
||||
const start = input.commands.start || undefined
|
||||
draft.commands = {
|
||||
...(draft.commands ?? {}),
|
||||
}
|
||||
draft.commands.start = start
|
||||
if (!draft.commands.start) draft.commands = undefined
|
||||
}
|
||||
|
||||
draft.time.updated = Date.now()
|
||||
})
|
||||
const result = Database.use((db) =>
|
||||
db
|
||||
.update(ProjectTable)
|
||||
.set({
|
||||
name: input.name,
|
||||
icon_url: input.icon?.url,
|
||||
icon_color: input.icon?.color,
|
||||
time_updated: Date.now(),
|
||||
})
|
||||
.where(eq(ProjectTable.id, input.projectID))
|
||||
.returning()
|
||||
.get(),
|
||||
)
|
||||
if (!result) throw new Error(`Project not found: ${input.projectID}`)
|
||||
const data = fromRow(result)
|
||||
GlobalBus.emit("event", {
|
||||
payload: {
|
||||
type: Event.Updated.type,
|
||||
properties: result,
|
||||
properties: data,
|
||||
},
|
||||
})
|
||||
return result
|
||||
return data
|
||||
},
|
||||
)
|
||||
|
||||
export async function sandboxes(projectID: string) {
|
||||
const project = await Storage.read<Info>(["project", projectID]).catch(() => undefined)
|
||||
if (!project?.sandboxes) return []
|
||||
export async function sandboxes(id: string) {
|
||||
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get())
|
||||
if (!row) return []
|
||||
const data = fromRow(row)
|
||||
const valid: string[] = []
|
||||
for (const dir of project.sandboxes) {
|
||||
const stat = await fs.stat(dir).catch(() => undefined)
|
||||
for (const dir of data.sandboxes) {
|
||||
const stat = await Bun.file(dir)
|
||||
.stat()
|
||||
.catch(() => undefined)
|
||||
if (stat?.isDirectory()) valid.push(dir)
|
||||
}
|
||||
return valid
|
||||
}
|
||||
|
||||
export async function addSandbox(projectID: string, directory: string) {
|
||||
const result = await Storage.update<Info>(["project", projectID], (draft) => {
|
||||
const sandboxes = draft.sandboxes ?? []
|
||||
if (!sandboxes.includes(directory)) sandboxes.push(directory)
|
||||
draft.sandboxes = sandboxes
|
||||
draft.time.updated = Date.now()
|
||||
})
|
||||
export async function addSandbox(id: string, directory: string) {
|
||||
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get())
|
||||
if (!row) throw new Error(`Project not found: ${id}`)
|
||||
const sandboxes = [...row.sandboxes]
|
||||
if (!sandboxes.includes(directory)) sandboxes.push(directory)
|
||||
const result = Database.use((db) =>
|
||||
db
|
||||
.update(ProjectTable)
|
||||
.set({ sandboxes, time_updated: Date.now() })
|
||||
.where(eq(ProjectTable.id, id))
|
||||
.returning()
|
||||
.get(),
|
||||
)
|
||||
if (!result) throw new Error(`Project not found: ${id}`)
|
||||
const data = fromRow(result)
|
||||
GlobalBus.emit("event", {
|
||||
payload: {
|
||||
type: Event.Updated.type,
|
||||
properties: result,
|
||||
properties: data,
|
||||
},
|
||||
})
|
||||
return result
|
||||
return data
|
||||
}
|
||||
|
||||
export async function removeSandbox(projectID: string, directory: string) {
|
||||
const result = await Storage.update<Info>(["project", projectID], (draft) => {
|
||||
const sandboxes = draft.sandboxes ?? []
|
||||
draft.sandboxes = sandboxes.filter((sandbox) => sandbox !== directory)
|
||||
draft.time.updated = Date.now()
|
||||
})
|
||||
export async function removeSandbox(id: string, directory: string) {
|
||||
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get())
|
||||
if (!row) throw new Error(`Project not found: ${id}`)
|
||||
const sandboxes = row.sandboxes.filter((s) => s !== directory)
|
||||
const result = Database.use((db) =>
|
||||
db
|
||||
.update(ProjectTable)
|
||||
.set({ sandboxes, time_updated: Date.now() })
|
||||
.where(eq(ProjectTable.id, id))
|
||||
.returning()
|
||||
.get(),
|
||||
)
|
||||
if (!result) throw new Error(`Project not found: ${id}`)
|
||||
const data = fromRow(result)
|
||||
GlobalBus.emit("event", {
|
||||
payload: {
|
||||
type: Event.Updated.type,
|
||||
properties: result,
|
||||
properties: data,
|
||||
},
|
||||
})
|
||||
return result
|
||||
return data
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { resolver } from "hono-openapi"
|
||||
import z from "zod"
|
||||
import { Storage } from "../storage/storage"
|
||||
import { NotFoundError } from "../storage/db"
|
||||
|
||||
export const ERRORS = {
|
||||
400: {
|
||||
@@ -25,7 +25,7 @@ export const ERRORS = {
|
||||
description: "Not found",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Storage.NotFoundError.Schema),
|
||||
schema: resolver(NotFoundError.Schema),
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
@@ -3,7 +3,7 @@ import { describeRoute, validator, resolver } from "hono-openapi"
|
||||
import { upgradeWebSocket } from "hono/bun"
|
||||
import z from "zod"
|
||||
import { Pty } from "@/pty"
|
||||
import { Storage } from "../../storage/storage"
|
||||
import { NotFoundError } from "../../storage/db"
|
||||
import { errors } from "../error"
|
||||
import { lazy } from "../../util/lazy"
|
||||
|
||||
@@ -76,7 +76,7 @@ export const PtyRoutes = lazy(() =>
|
||||
async (c) => {
|
||||
const info = Pty.get(c.req.valid("param").ptyID)
|
||||
if (!info) {
|
||||
throw new Storage.NotFoundError({ message: "Session not found" })
|
||||
throw new NotFoundError({ message: "Session not found" })
|
||||
}
|
||||
return c.json(info)
|
||||
},
|
||||
|
||||
@@ -276,18 +276,15 @@ export const SessionRoutes = lazy(() =>
|
||||
const sessionID = c.req.valid("param").sessionID
|
||||
const updates = c.req.valid("json")
|
||||
|
||||
const updatedSession = await Session.update(
|
||||
sessionID,
|
||||
(session) => {
|
||||
if (updates.title !== undefined) {
|
||||
session.title = updates.title
|
||||
}
|
||||
if (updates.time?.archived !== undefined) session.time.archived = updates.time.archived
|
||||
},
|
||||
{ touch: false },
|
||||
)
|
||||
let session = await Session.get(sessionID)
|
||||
if (updates.title !== undefined) {
|
||||
session = await Session.setTitle({ sessionID, title: updates.title })
|
||||
}
|
||||
if (updates.time?.archived !== undefined) {
|
||||
session = await Session.setArchived({ sessionID, time: updates.time.archived })
|
||||
}
|
||||
|
||||
return c.json(updatedSession)
|
||||
return c.json(session)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
|
||||
@@ -31,7 +31,7 @@ import { ExperimentalRoutes } from "./routes/experimental"
|
||||
import { ProviderRoutes } from "./routes/provider"
|
||||
import { lazy } from "../util/lazy"
|
||||
import { InstanceBootstrap } from "../project/bootstrap"
|
||||
import { Storage } from "../storage/storage"
|
||||
import { NotFoundError } from "../storage/db"
|
||||
import type { ContentfulStatusCode } from "hono/utils/http-status"
|
||||
import { websocket } from "hono/bun"
|
||||
import { HTTPException } from "hono/http-exception"
|
||||
@@ -65,7 +65,7 @@ export namespace Server {
|
||||
})
|
||||
if (err instanceof NamedError) {
|
||||
let status: ContentfulStatusCode
|
||||
if (err instanceof Storage.NotFoundError) status = 404
|
||||
if (err instanceof NotFoundError) status = 404
|
||||
else if (err instanceof Provider.ModelNotFoundError) status = 400
|
||||
else if (err.name.startsWith("Worktree")) status = 400
|
||||
else status = 500
|
||||
|
||||
@@ -10,7 +10,9 @@ import { Flag } from "../flag/flag"
|
||||
import { Identifier } from "../id/id"
|
||||
import { Installation } from "../installation"
|
||||
|
||||
import { Storage } from "../storage/storage"
|
||||
import { Database, NotFoundError, eq, and, or, like } from "../storage/db"
|
||||
import { SessionTable, MessageTable, PartTable } from "./session.sql"
|
||||
import { Storage } from "@/storage/storage"
|
||||
import { Log } from "../util/log"
|
||||
import { MessageV2 } from "./message-v2"
|
||||
import { Instance } from "../project/instance"
|
||||
@@ -39,6 +41,64 @@ export namespace Session {
|
||||
).test(title)
|
||||
}
|
||||
|
||||
type SessionRow = typeof SessionTable.$inferSelect
|
||||
|
||||
export function fromRow(row: SessionRow): Info {
|
||||
const summary =
|
||||
row.summary_additions !== null || row.summary_deletions !== null || row.summary_files !== null
|
||||
? {
|
||||
additions: row.summary_additions ?? 0,
|
||||
deletions: row.summary_deletions ?? 0,
|
||||
files: row.summary_files ?? 0,
|
||||
diffs: row.summary_diffs ?? undefined,
|
||||
}
|
||||
: undefined
|
||||
const share = row.share_url ? { url: row.share_url } : undefined
|
||||
const revert = row.revert ?? undefined
|
||||
return {
|
||||
id: row.id,
|
||||
slug: row.slug,
|
||||
projectID: row.project_id,
|
||||
directory: row.directory,
|
||||
parentID: row.parent_id ?? undefined,
|
||||
title: row.title,
|
||||
version: row.version,
|
||||
summary,
|
||||
share,
|
||||
revert,
|
||||
permission: row.permission ?? undefined,
|
||||
time: {
|
||||
created: row.time_created,
|
||||
updated: row.time_updated,
|
||||
compacting: row.time_compacting ?? undefined,
|
||||
archived: row.time_archived ?? undefined,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export function toRow(info: Info) {
|
||||
return {
|
||||
id: info.id,
|
||||
project_id: info.projectID,
|
||||
parent_id: info.parentID,
|
||||
slug: info.slug,
|
||||
directory: info.directory,
|
||||
title: info.title,
|
||||
version: info.version,
|
||||
share_url: info.share?.url,
|
||||
summary_additions: info.summary?.additions,
|
||||
summary_deletions: info.summary?.deletions,
|
||||
summary_files: info.summary?.files,
|
||||
summary_diffs: info.summary?.diffs,
|
||||
revert: info.revert ?? null,
|
||||
permission: info.permission,
|
||||
time_created: info.time.created,
|
||||
time_updated: info.time.updated,
|
||||
time_compacting: info.time.compacting,
|
||||
time_archived: info.time.archived,
|
||||
}
|
||||
}
|
||||
|
||||
function getForkedTitle(title: string): string {
|
||||
const match = title.match(/^(.+) \(fork #(\d+)\)$/)
|
||||
if (match) {
|
||||
@@ -92,16 +152,6 @@ export namespace Session {
|
||||
})
|
||||
export type Info = z.output<typeof Info>
|
||||
|
||||
export const ShareInfo = z
|
||||
.object({
|
||||
secret: z.string(),
|
||||
url: z.string(),
|
||||
})
|
||||
.meta({
|
||||
ref: "SessionShare",
|
||||
})
|
||||
export type ShareInfo = z.output<typeof ShareInfo>
|
||||
|
||||
export const Event = {
|
||||
Created: BusEvent.define(
|
||||
"session.created",
|
||||
@@ -198,8 +248,17 @@ export namespace Session {
|
||||
)
|
||||
|
||||
export const touch = fn(Identifier.schema("session"), async (sessionID) => {
|
||||
await update(sessionID, (draft) => {
|
||||
draft.time.updated = Date.now()
|
||||
const now = Date.now()
|
||||
Database.use((db) => {
|
||||
const row = db
|
||||
.update(SessionTable)
|
||||
.set({ time_updated: now })
|
||||
.where(eq(SessionTable.id, sessionID))
|
||||
.returning()
|
||||
.get()
|
||||
if (!row) throw new NotFoundError({ message: `Session not found: ${sessionID}` })
|
||||
const info = fromRow(row)
|
||||
Database.effect(() => Bus.publish(Event.Updated, { info }))
|
||||
})
|
||||
})
|
||||
|
||||
@@ -225,21 +284,19 @@ export namespace Session {
|
||||
},
|
||||
}
|
||||
log.info("created", result)
|
||||
await Storage.write(["session", Instance.project.id, result.id], result)
|
||||
Bus.publish(Event.Created, {
|
||||
info: result,
|
||||
Database.use((db) => {
|
||||
db.insert(SessionTable).values(toRow(result)).run()
|
||||
Database.effect(() =>
|
||||
Bus.publish(Event.Created, {
|
||||
info: result,
|
||||
}),
|
||||
)
|
||||
})
|
||||
const cfg = await Config.get()
|
||||
if (!result.parentID && (Flag.OPENCODE_AUTO_SHARE || cfg.share === "auto"))
|
||||
share(result.id)
|
||||
.then((share) => {
|
||||
update(result.id, (draft) => {
|
||||
draft.share = share
|
||||
})
|
||||
})
|
||||
.catch(() => {
|
||||
// Silently ignore sharing errors during session creation
|
||||
})
|
||||
share(result.id).catch(() => {
|
||||
// Silently ignore sharing errors during session creation
|
||||
})
|
||||
Bus.publish(Event.Updated, {
|
||||
info: result,
|
||||
})
|
||||
@@ -254,12 +311,9 @@ export namespace Session {
|
||||
}
|
||||
|
||||
export const get = fn(Identifier.schema("session"), async (id) => {
|
||||
const read = await Storage.read<Info>(["session", Instance.project.id, id])
|
||||
return read as Info
|
||||
})
|
||||
|
||||
export const getShare = fn(Identifier.schema("session"), async (id) => {
|
||||
return Storage.read<ShareInfo>(["share", id])
|
||||
const row = Database.use((db) => db.select().from(SessionTable).where(eq(SessionTable.id, id)).get())
|
||||
if (!row) throw new NotFoundError({ message: `Session not found: ${id}` })
|
||||
return fromRow(row)
|
||||
})
|
||||
|
||||
export const share = fn(Identifier.schema("session"), async (id) => {
|
||||
@@ -269,15 +323,12 @@ export namespace Session {
|
||||
}
|
||||
const { ShareNext } = await import("@/share/share-next")
|
||||
const share = await ShareNext.create(id)
|
||||
await update(
|
||||
id,
|
||||
(draft) => {
|
||||
draft.share = {
|
||||
url: share.url,
|
||||
}
|
||||
},
|
||||
{ touch: false },
|
||||
)
|
||||
Database.use((db) => {
|
||||
const row = db.update(SessionTable).set({ share_url: share.url }).where(eq(SessionTable.id, id)).returning().get()
|
||||
if (!row) throw new NotFoundError({ message: `Session not found: ${id}` })
|
||||
const info = fromRow(row)
|
||||
Database.effect(() => Bus.publish(Event.Updated, { info }))
|
||||
})
|
||||
return share
|
||||
})
|
||||
|
||||
@@ -285,32 +336,155 @@ export namespace Session {
|
||||
// Use ShareNext to remove the share (same as share function uses ShareNext to create)
|
||||
const { ShareNext } = await import("@/share/share-next")
|
||||
await ShareNext.remove(id)
|
||||
await update(
|
||||
id,
|
||||
(draft) => {
|
||||
draft.share = undefined
|
||||
},
|
||||
{ touch: false },
|
||||
)
|
||||
Database.use((db) => {
|
||||
const row = db.update(SessionTable).set({ share_url: null }).where(eq(SessionTable.id, id)).returning().get()
|
||||
if (!row) throw new NotFoundError({ message: `Session not found: ${id}` })
|
||||
const info = fromRow(row)
|
||||
Database.effect(() => Bus.publish(Event.Updated, { info }))
|
||||
})
|
||||
})
|
||||
|
||||
export async function update(id: string, editor: (session: Info) => void, options?: { touch?: boolean }) {
|
||||
const project = Instance.project
|
||||
const result = await Storage.update<Info>(["session", project.id, id], (draft) => {
|
||||
editor(draft)
|
||||
if (options?.touch !== false) {
|
||||
draft.time.updated = Date.now()
|
||||
}
|
||||
export const setTitle = fn(
|
||||
z.object({
|
||||
sessionID: Identifier.schema("session"),
|
||||
title: z.string(),
|
||||
}),
|
||||
async (input) => {
|
||||
return Database.use((db) => {
|
||||
const row = db
|
||||
.update(SessionTable)
|
||||
.set({ title: input.title })
|
||||
.where(eq(SessionTable.id, input.sessionID))
|
||||
.returning()
|
||||
.get()
|
||||
if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` })
|
||||
const info = fromRow(row)
|
||||
Database.effect(() => Bus.publish(Event.Updated, { info }))
|
||||
return info
|
||||
})
|
||||
},
|
||||
)
|
||||
|
||||
export const setArchived = fn(
|
||||
z.object({
|
||||
sessionID: Identifier.schema("session"),
|
||||
time: z.number().optional(),
|
||||
}),
|
||||
async (input) => {
|
||||
return Database.use((db) => {
|
||||
const row = db
|
||||
.update(SessionTable)
|
||||
.set({ time_archived: input.time })
|
||||
.where(eq(SessionTable.id, input.sessionID))
|
||||
.returning()
|
||||
.get()
|
||||
if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` })
|
||||
const info = fromRow(row)
|
||||
Database.effect(() => Bus.publish(Event.Updated, { info }))
|
||||
return info
|
||||
})
|
||||
},
|
||||
)
|
||||
|
||||
export const setPermission = fn(
|
||||
z.object({
|
||||
sessionID: Identifier.schema("session"),
|
||||
permission: PermissionNext.Ruleset,
|
||||
}),
|
||||
async (input) => {
|
||||
return Database.use((db) => {
|
||||
const row = db
|
||||
.update(SessionTable)
|
||||
.set({ permission: input.permission, time_updated: Date.now() })
|
||||
.where(eq(SessionTable.id, input.sessionID))
|
||||
.returning()
|
||||
.get()
|
||||
if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` })
|
||||
const info = fromRow(row)
|
||||
Database.effect(() => Bus.publish(Event.Updated, { info }))
|
||||
return info
|
||||
})
|
||||
},
|
||||
)
|
||||
|
||||
export const setRevert = fn(
|
||||
z.object({
|
||||
sessionID: Identifier.schema("session"),
|
||||
revert: Info.shape.revert,
|
||||
summary: Info.shape.summary,
|
||||
}),
|
||||
async (input) => {
|
||||
return Database.use((db) => {
|
||||
const row = db
|
||||
.update(SessionTable)
|
||||
.set({
|
||||
revert: input.revert ?? null,
|
||||
summary_additions: input.summary?.additions,
|
||||
summary_deletions: input.summary?.deletions,
|
||||
summary_files: input.summary?.files,
|
||||
time_updated: Date.now(),
|
||||
})
|
||||
.where(eq(SessionTable.id, input.sessionID))
|
||||
.returning()
|
||||
.get()
|
||||
if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` })
|
||||
const info = fromRow(row)
|
||||
Database.effect(() => Bus.publish(Event.Updated, { info }))
|
||||
return info
|
||||
})
|
||||
},
|
||||
)
|
||||
|
||||
export const clearRevert = fn(Identifier.schema("session"), async (sessionID) => {
|
||||
return Database.use((db) => {
|
||||
const row = db
|
||||
.update(SessionTable)
|
||||
.set({
|
||||
revert: null,
|
||||
time_updated: Date.now(),
|
||||
})
|
||||
.where(eq(SessionTable.id, sessionID))
|
||||
.returning()
|
||||
.get()
|
||||
if (!row) throw new NotFoundError({ message: `Session not found: ${sessionID}` })
|
||||
const info = fromRow(row)
|
||||
Database.effect(() => Bus.publish(Event.Updated, { info }))
|
||||
return info
|
||||
})
|
||||
Bus.publish(Event.Updated, {
|
||||
info: result,
|
||||
})
|
||||
return result
|
||||
}
|
||||
})
|
||||
|
||||
export const setSummary = fn(
|
||||
z.object({
|
||||
sessionID: Identifier.schema("session"),
|
||||
summary: Info.shape.summary,
|
||||
}),
|
||||
async (input) => {
|
||||
return Database.use((db) => {
|
||||
const row = db
|
||||
.update(SessionTable)
|
||||
.set({
|
||||
summary_additions: input.summary?.additions,
|
||||
summary_deletions: input.summary?.deletions,
|
||||
summary_files: input.summary?.files,
|
||||
time_updated: Date.now(),
|
||||
})
|
||||
.where(eq(SessionTable.id, input.sessionID))
|
||||
.returning()
|
||||
.get()
|
||||
if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` })
|
||||
const info = fromRow(row)
|
||||
Database.effect(() => Bus.publish(Event.Updated, { info }))
|
||||
return info
|
||||
})
|
||||
},
|
||||
)
|
||||
|
||||
export const diff = fn(Identifier.schema("session"), async (sessionID) => {
|
||||
const diffs = await Storage.read<Snapshot.FileDiff[]>(["session_diff", sessionID])
|
||||
return diffs ?? []
|
||||
try {
|
||||
return await Storage.read<Snapshot.FileDiff[]>(["session_diff", sessionID])
|
||||
} catch {
|
||||
return []
|
||||
}
|
||||
})
|
||||
|
||||
export const messages = fn(
|
||||
@@ -329,25 +503,37 @@ export namespace Session {
|
||||
},
|
||||
)
|
||||
|
||||
export async function* list() {
|
||||
export function* list() {
|
||||
const project = Instance.project
|
||||
for (const item of await Storage.list(["session", project.id])) {
|
||||
const session = await Storage.read<Info>(item).catch(() => undefined)
|
||||
if (!session) continue
|
||||
yield session
|
||||
const rel = path.relative(Instance.worktree, Instance.directory)
|
||||
const suffix = path.sep + rel
|
||||
const rows = Database.use((db) =>
|
||||
db
|
||||
.select()
|
||||
.from(SessionTable)
|
||||
.where(
|
||||
and(
|
||||
eq(SessionTable.project_id, project.id),
|
||||
or(eq(SessionTable.directory, Instance.directory), like(SessionTable.directory, `%${suffix}`)),
|
||||
),
|
||||
)
|
||||
.all(),
|
||||
)
|
||||
for (const row of rows) {
|
||||
yield fromRow(row)
|
||||
}
|
||||
}
|
||||
|
||||
export const children = fn(Identifier.schema("session"), async (parentID) => {
|
||||
const project = Instance.project
|
||||
const result = [] as Session.Info[]
|
||||
for (const item of await Storage.list(["session", project.id])) {
|
||||
const session = await Storage.read<Info>(item).catch(() => undefined)
|
||||
if (!session) continue
|
||||
if (session.parentID !== parentID) continue
|
||||
result.push(session)
|
||||
}
|
||||
return result
|
||||
const rows = Database.use((db) =>
|
||||
db
|
||||
.select()
|
||||
.from(SessionTable)
|
||||
.where(and(eq(SessionTable.project_id, project.id), eq(SessionTable.parent_id, parentID)))
|
||||
.all(),
|
||||
)
|
||||
return rows.map(fromRow)
|
||||
})
|
||||
|
||||
export const remove = fn(Identifier.schema("session"), async (sessionID) => {
|
||||
@@ -358,15 +544,14 @@ export namespace Session {
|
||||
await remove(child.id)
|
||||
}
|
||||
await unshare(sessionID).catch(() => {})
|
||||
for (const msg of await Storage.list(["message", sessionID])) {
|
||||
for (const part of await Storage.list(["part", msg.at(-1)!])) {
|
||||
await Storage.remove(part)
|
||||
}
|
||||
await Storage.remove(msg)
|
||||
}
|
||||
await Storage.remove(["session", project.id, sessionID])
|
||||
Bus.publish(Event.Deleted, {
|
||||
info: session,
|
||||
// CASCADE delete handles messages and parts automatically
|
||||
Database.use((db) => {
|
||||
db.delete(SessionTable).where(eq(SessionTable.id, sessionID)).run()
|
||||
Database.effect(() =>
|
||||
Bus.publish(Event.Deleted, {
|
||||
info: session,
|
||||
}),
|
||||
)
|
||||
})
|
||||
} catch (e) {
|
||||
log.error(e)
|
||||
@@ -374,9 +559,23 @@ export namespace Session {
|
||||
})
|
||||
|
||||
export const updateMessage = fn(MessageV2.Info, async (msg) => {
|
||||
await Storage.write(["message", msg.sessionID, msg.id], msg)
|
||||
Bus.publish(MessageV2.Event.Updated, {
|
||||
info: msg,
|
||||
const time_created = msg.role === "user" ? msg.time.created : msg.time.created
|
||||
const { id, sessionID, ...data } = msg
|
||||
Database.use((db) => {
|
||||
db.insert(MessageTable)
|
||||
.values({
|
||||
id,
|
||||
session_id: sessionID,
|
||||
time_created,
|
||||
data,
|
||||
})
|
||||
.onConflictDoUpdate({ target: MessageTable.id, set: { data } })
|
||||
.run()
|
||||
Database.effect(() =>
|
||||
Bus.publish(MessageV2.Event.Updated, {
|
||||
info: msg,
|
||||
}),
|
||||
)
|
||||
})
|
||||
return msg
|
||||
})
|
||||
@@ -387,10 +586,15 @@ export namespace Session {
|
||||
messageID: Identifier.schema("message"),
|
||||
}),
|
||||
async (input) => {
|
||||
await Storage.remove(["message", input.sessionID, input.messageID])
|
||||
Bus.publish(MessageV2.Event.Removed, {
|
||||
sessionID: input.sessionID,
|
||||
messageID: input.messageID,
|
||||
// CASCADE delete handles parts automatically
|
||||
Database.use((db) => {
|
||||
db.delete(MessageTable).where(eq(MessageTable.id, input.messageID)).run()
|
||||
Database.effect(() =>
|
||||
Bus.publish(MessageV2.Event.Removed, {
|
||||
sessionID: input.sessionID,
|
||||
messageID: input.messageID,
|
||||
}),
|
||||
)
|
||||
})
|
||||
return input.messageID
|
||||
},
|
||||
@@ -403,39 +607,58 @@ export namespace Session {
|
||||
partID: Identifier.schema("part"),
|
||||
}),
|
||||
async (input) => {
|
||||
await Storage.remove(["part", input.messageID, input.partID])
|
||||
Bus.publish(MessageV2.Event.PartRemoved, {
|
||||
sessionID: input.sessionID,
|
||||
messageID: input.messageID,
|
||||
partID: input.partID,
|
||||
Database.use((db) => {
|
||||
db.delete(PartTable).where(eq(PartTable.id, input.partID)).run()
|
||||
Database.effect(() =>
|
||||
Bus.publish(MessageV2.Event.PartRemoved, {
|
||||
sessionID: input.sessionID,
|
||||
messageID: input.messageID,
|
||||
partID: input.partID,
|
||||
}),
|
||||
)
|
||||
})
|
||||
return input.partID
|
||||
},
|
||||
)
|
||||
|
||||
const UpdatePartInput = z.union([
|
||||
MessageV2.Part,
|
||||
z.object({
|
||||
part: MessageV2.TextPart,
|
||||
delta: z.string(),
|
||||
}),
|
||||
z.object({
|
||||
part: MessageV2.ReasoningPart,
|
||||
delta: z.string(),
|
||||
}),
|
||||
])
|
||||
const UpdatePartInput = MessageV2.Part
|
||||
|
||||
export const updatePart = fn(UpdatePartInput, async (input) => {
|
||||
const part = "delta" in input ? input.part : input
|
||||
const delta = "delta" in input ? input.delta : undefined
|
||||
await Storage.write(["part", part.messageID, part.id], part)
|
||||
Bus.publish(MessageV2.Event.PartUpdated, {
|
||||
part,
|
||||
delta,
|
||||
export const updatePart = fn(UpdatePartInput, async (part) => {
|
||||
const { id, messageID, sessionID, ...data } = part
|
||||
const time = Date.now()
|
||||
Database.use((db) => {
|
||||
db.insert(PartTable)
|
||||
.values({
|
||||
id,
|
||||
message_id: messageID,
|
||||
session_id: sessionID,
|
||||
time_created: time,
|
||||
data,
|
||||
})
|
||||
.onConflictDoUpdate({ target: PartTable.id, set: { data } })
|
||||
.run()
|
||||
Database.effect(() =>
|
||||
Bus.publish(MessageV2.Event.PartUpdated, {
|
||||
part,
|
||||
}),
|
||||
)
|
||||
})
|
||||
return part
|
||||
})
|
||||
|
||||
export const updatePartDelta = fn(
|
||||
z.object({
|
||||
sessionID: z.string(),
|
||||
messageID: z.string(),
|
||||
partID: z.string(),
|
||||
field: z.string(),
|
||||
delta: z.string(),
|
||||
}),
|
||||
async (input) => {
|
||||
Bus.publish(MessageV2.Event.PartDelta, input)
|
||||
},
|
||||
)
|
||||
|
||||
export const getUsage = fn(
|
||||
z.object({
|
||||
model: z.custom<Provider.Model>(),
|
||||
|
||||
@@ -6,6 +6,10 @@ import { Identifier } from "../id/id"
|
||||
import { LSP } from "../lsp"
|
||||
import { Snapshot } from "@/snapshot"
|
||||
import { fn } from "@/util/fn"
|
||||
import { Database, eq, desc, inArray } from "@/storage/db"
|
||||
import { MessageTable, PartTable } from "./session.sql"
|
||||
import { ProviderTransform } from "@/provider/transform"
|
||||
import { STATUS_CODES } from "http"
|
||||
import { Storage } from "@/storage/storage"
|
||||
import { ProviderError } from "@/provider/error"
|
||||
import { iife } from "@/util/iife"
|
||||
@@ -421,7 +425,16 @@ export namespace MessageV2 {
|
||||
"message.part.updated",
|
||||
z.object({
|
||||
part: Part,
|
||||
delta: z.string().optional(),
|
||||
}),
|
||||
),
|
||||
PartDelta: BusEvent.define(
|
||||
"message.part.delta",
|
||||
z.object({
|
||||
sessionID: z.string(),
|
||||
messageID: z.string(),
|
||||
partID: z.string(),
|
||||
field: z.string(),
|
||||
delta: z.string(),
|
||||
}),
|
||||
),
|
||||
PartRemoved: BusEvent.define(
|
||||
@@ -666,23 +679,65 @@ export namespace MessageV2 {
|
||||
}
|
||||
|
||||
export const stream = fn(Identifier.schema("session"), async function* (sessionID) {
|
||||
const list = await Array.fromAsync(await Storage.list(["message", sessionID]))
|
||||
for (let i = list.length - 1; i >= 0; i--) {
|
||||
yield await get({
|
||||
sessionID,
|
||||
messageID: list[i][2],
|
||||
})
|
||||
const size = 50
|
||||
let offset = 0
|
||||
while (true) {
|
||||
const rows = Database.use((db) =>
|
||||
db
|
||||
.select()
|
||||
.from(MessageTable)
|
||||
.where(eq(MessageTable.session_id, sessionID))
|
||||
.orderBy(desc(MessageTable.time_created))
|
||||
.limit(size)
|
||||
.offset(offset)
|
||||
.all(),
|
||||
)
|
||||
if (rows.length === 0) break
|
||||
|
||||
const ids = rows.map((row) => row.id)
|
||||
const partsByMessage = new Map<string, MessageV2.Part[]>()
|
||||
if (ids.length > 0) {
|
||||
const partRows = Database.use((db) =>
|
||||
db
|
||||
.select()
|
||||
.from(PartTable)
|
||||
.where(inArray(PartTable.message_id, ids))
|
||||
.orderBy(PartTable.message_id, PartTable.id)
|
||||
.all(),
|
||||
)
|
||||
for (const row of partRows) {
|
||||
const part = {
|
||||
...row.data,
|
||||
id: row.id,
|
||||
sessionID: row.session_id,
|
||||
messageID: row.message_id,
|
||||
} as MessageV2.Part
|
||||
const list = partsByMessage.get(row.message_id)
|
||||
if (list) list.push(part)
|
||||
else partsByMessage.set(row.message_id, [part])
|
||||
}
|
||||
}
|
||||
|
||||
for (const row of rows) {
|
||||
const info = { ...row.data, id: row.id, sessionID: row.session_id } as MessageV2.Info
|
||||
yield {
|
||||
info,
|
||||
parts: partsByMessage.get(row.id) ?? [],
|
||||
}
|
||||
}
|
||||
|
||||
offset += rows.length
|
||||
if (rows.length < size) break
|
||||
}
|
||||
})
|
||||
|
||||
export const parts = fn(Identifier.schema("message"), async (messageID) => {
|
||||
const result = [] as MessageV2.Part[]
|
||||
for (const item of await Storage.list(["part", messageID])) {
|
||||
const read = await Storage.read<MessageV2.Part>(item)
|
||||
result.push(read)
|
||||
}
|
||||
result.sort((a, b) => (a.id > b.id ? 1 : -1))
|
||||
return result
|
||||
export const parts = fn(Identifier.schema("message"), async (message_id) => {
|
||||
const rows = Database.use((db) =>
|
||||
db.select().from(PartTable).where(eq(PartTable.message_id, message_id)).orderBy(PartTable.id).all(),
|
||||
)
|
||||
return rows.map(
|
||||
(row) => ({ ...row.data, id: row.id, sessionID: row.session_id, messageID: row.message_id }) as MessageV2.Part,
|
||||
)
|
||||
})
|
||||
|
||||
export const get = fn(
|
||||
@@ -691,8 +746,11 @@ export namespace MessageV2 {
|
||||
messageID: Identifier.schema("message"),
|
||||
}),
|
||||
async (input): Promise<WithParts> => {
|
||||
const row = Database.use((db) => db.select().from(MessageTable).where(eq(MessageTable.id, input.messageID)).get())
|
||||
if (!row) throw new Error(`Message not found: ${input.messageID}`)
|
||||
const info = { ...row.data, id: row.id, sessionID: row.session_id } as MessageV2.Info
|
||||
return {
|
||||
info: await Storage.read<MessageV2.Info>(["message", input.sessionID, input.messageID]),
|
||||
info,
|
||||
parts: await parts(input.messageID),
|
||||
}
|
||||
},
|
||||
|
||||
@@ -63,17 +63,19 @@ export namespace SessionProcessor {
|
||||
if (value.id in reasoningMap) {
|
||||
continue
|
||||
}
|
||||
reasoningMap[value.id] = {
|
||||
const reasoningPart = {
|
||||
id: Identifier.ascending("part"),
|
||||
messageID: input.assistantMessage.id,
|
||||
sessionID: input.assistantMessage.sessionID,
|
||||
type: "reasoning",
|
||||
type: "reasoning" as const,
|
||||
text: "",
|
||||
time: {
|
||||
start: Date.now(),
|
||||
},
|
||||
metadata: value.providerMetadata,
|
||||
}
|
||||
reasoningMap[value.id] = reasoningPart
|
||||
await Session.updatePart(reasoningPart)
|
||||
break
|
||||
|
||||
case "reasoning-delta":
|
||||
@@ -81,7 +83,13 @@ export namespace SessionProcessor {
|
||||
const part = reasoningMap[value.id]
|
||||
part.text += value.text
|
||||
if (value.providerMetadata) part.metadata = value.providerMetadata
|
||||
if (part.text) await Session.updatePart({ part, delta: value.text })
|
||||
await Session.updatePartDelta({
|
||||
sessionID: part.sessionID,
|
||||
messageID: part.messageID,
|
||||
partID: part.id,
|
||||
field: "text",
|
||||
delta: value.text,
|
||||
})
|
||||
}
|
||||
break
|
||||
|
||||
@@ -288,17 +296,20 @@ export namespace SessionProcessor {
|
||||
},
|
||||
metadata: value.providerMetadata,
|
||||
}
|
||||
await Session.updatePart(currentText)
|
||||
break
|
||||
|
||||
case "text-delta":
|
||||
if (currentText) {
|
||||
currentText.text += value.text
|
||||
if (value.providerMetadata) currentText.metadata = value.providerMetadata
|
||||
if (currentText.text)
|
||||
await Session.updatePart({
|
||||
part: currentText,
|
||||
delta: value.text,
|
||||
})
|
||||
await Session.updatePartDelta({
|
||||
sessionID: currentText.sessionID,
|
||||
messageID: currentText.messageID,
|
||||
partID: currentText.id,
|
||||
field: "text",
|
||||
delta: value.text,
|
||||
})
|
||||
}
|
||||
break
|
||||
|
||||
|
||||
@@ -165,9 +165,7 @@ export namespace SessionPrompt {
|
||||
}
|
||||
if (permissions.length > 0) {
|
||||
session.permission = permissions
|
||||
await Session.update(session.id, (draft) => {
|
||||
draft.permission = permissions
|
||||
})
|
||||
await Session.setPermission({ sessionID: session.id, permission: permissions })
|
||||
}
|
||||
|
||||
if (input.noReply === true) {
|
||||
@@ -1854,21 +1852,16 @@ NOTE: At any point in time through this workflow you should feel free to ask the
|
||||
],
|
||||
})
|
||||
const text = await result.text.catch((err) => log.error("failed to generate title", { error: err }))
|
||||
if (text)
|
||||
return Session.update(
|
||||
input.session.id,
|
||||
(draft) => {
|
||||
const cleaned = text
|
||||
.replace(/<think>[\s\S]*?<\/think>\s*/g, "")
|
||||
.split("\n")
|
||||
.map((line) => line.trim())
|
||||
.find((line) => line.length > 0)
|
||||
if (!cleaned) return
|
||||
if (text) {
|
||||
const cleaned = text
|
||||
.replace(/<think>[\s\S]*?<\/think>\s*/g, "")
|
||||
.split("\n")
|
||||
.map((line) => line.trim())
|
||||
.find((line) => line.length > 0)
|
||||
if (!cleaned) return
|
||||
|
||||
const title = cleaned.length > 100 ? cleaned.substring(0, 97) + "..." : cleaned
|
||||
draft.title = title
|
||||
},
|
||||
{ touch: false },
|
||||
)
|
||||
const title = cleaned.length > 100 ? cleaned.substring(0, 97) + "..." : cleaned
|
||||
return Session.setTitle({ sessionID: input.session.id, title })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,8 +4,9 @@ import { Snapshot } from "../snapshot"
|
||||
import { MessageV2 } from "./message-v2"
|
||||
import { Session } from "."
|
||||
import { Log } from "../util/log"
|
||||
import { splitWhen } from "remeda"
|
||||
import { Storage } from "../storage/storage"
|
||||
import { Database, eq } from "../storage/db"
|
||||
import { MessageTable, PartTable } from "./session.sql"
|
||||
import { Storage } from "@/storage/storage"
|
||||
import { Bus } from "../bus"
|
||||
import { SessionPrompt } from "./prompt"
|
||||
import { SessionSummary } from "./summary"
|
||||
@@ -65,13 +66,14 @@ export namespace SessionRevert {
|
||||
sessionID: input.sessionID,
|
||||
diff: diffs,
|
||||
})
|
||||
return Session.update(input.sessionID, (draft) => {
|
||||
draft.revert = revert
|
||||
draft.summary = {
|
||||
return Session.setRevert({
|
||||
sessionID: input.sessionID,
|
||||
revert,
|
||||
summary: {
|
||||
additions: diffs.reduce((sum, x) => sum + x.additions, 0),
|
||||
deletions: diffs.reduce((sum, x) => sum + x.deletions, 0),
|
||||
files: diffs.length,
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
return session
|
||||
@@ -83,39 +85,54 @@ export namespace SessionRevert {
|
||||
const session = await Session.get(input.sessionID)
|
||||
if (!session.revert) return session
|
||||
if (session.revert.snapshot) await Snapshot.restore(session.revert.snapshot)
|
||||
const next = await Session.update(input.sessionID, (draft) => {
|
||||
draft.revert = undefined
|
||||
})
|
||||
return next
|
||||
return Session.clearRevert(input.sessionID)
|
||||
}
|
||||
|
||||
export async function cleanup(session: Session.Info) {
|
||||
if (!session.revert) return
|
||||
const sessionID = session.id
|
||||
let msgs = await Session.messages({ sessionID })
|
||||
const msgs = await Session.messages({ sessionID })
|
||||
const messageID = session.revert.messageID
|
||||
const [preserve, remove] = splitWhen(msgs, (x) => x.info.id === messageID)
|
||||
msgs = preserve
|
||||
const preserve = [] as MessageV2.WithParts[]
|
||||
const remove = [] as MessageV2.WithParts[]
|
||||
let target: MessageV2.WithParts | undefined
|
||||
for (const msg of msgs) {
|
||||
if (msg.info.id < messageID) {
|
||||
preserve.push(msg)
|
||||
continue
|
||||
}
|
||||
if (msg.info.id > messageID) {
|
||||
remove.push(msg)
|
||||
continue
|
||||
}
|
||||
if (session.revert.partID) {
|
||||
preserve.push(msg)
|
||||
target = msg
|
||||
continue
|
||||
}
|
||||
remove.push(msg)
|
||||
}
|
||||
for (const msg of remove) {
|
||||
await Storage.remove(["message", sessionID, msg.info.id])
|
||||
Database.use((db) => db.delete(MessageTable).where(eq(MessageTable.id, msg.info.id)).run())
|
||||
await Bus.publish(MessageV2.Event.Removed, { sessionID: sessionID, messageID: msg.info.id })
|
||||
}
|
||||
const last = preserve.at(-1)
|
||||
if (session.revert.partID && last) {
|
||||
if (session.revert.partID && target) {
|
||||
const partID = session.revert.partID
|
||||
const [preserveParts, removeParts] = splitWhen(last.parts, (x) => x.id === partID)
|
||||
last.parts = preserveParts
|
||||
for (const part of removeParts) {
|
||||
await Storage.remove(["part", last.info.id, part.id])
|
||||
await Bus.publish(MessageV2.Event.PartRemoved, {
|
||||
sessionID: sessionID,
|
||||
messageID: last.info.id,
|
||||
partID: part.id,
|
||||
})
|
||||
const removeStart = target.parts.findIndex((part) => part.id === partID)
|
||||
if (removeStart >= 0) {
|
||||
const preserveParts = target.parts.slice(0, removeStart)
|
||||
const removeParts = target.parts.slice(removeStart)
|
||||
target.parts = preserveParts
|
||||
for (const part of removeParts) {
|
||||
Database.use((db) => db.delete(PartTable).where(eq(PartTable.id, part.id)).run())
|
||||
await Bus.publish(MessageV2.Event.PartRemoved, {
|
||||
sessionID: sessionID,
|
||||
messageID: target.info.id,
|
||||
partID: part.id,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
await Session.update(sessionID, (draft) => {
|
||||
draft.revert = undefined
|
||||
})
|
||||
await Session.clearRevert(sessionID)
|
||||
}
|
||||
}
|
||||
|
||||
88
packages/opencode/src/session/session.sql.ts
Normal file
88
packages/opencode/src/session/session.sql.ts
Normal file
@@ -0,0 +1,88 @@
|
||||
import { sqliteTable, text, integer, index, primaryKey } from "drizzle-orm/sqlite-core"
|
||||
import { ProjectTable } from "../project/project.sql"
|
||||
import type { MessageV2 } from "./message-v2"
|
||||
import type { Snapshot } from "@/snapshot"
|
||||
import type { PermissionNext } from "@/permission/next"
|
||||
import { Timestamps } from "@/storage/schema.sql"
|
||||
|
||||
type PartData = Omit<MessageV2.Part, "id" | "sessionID" | "messageID">
|
||||
type InfoData = Omit<MessageV2.Info, "id" | "sessionID">
|
||||
|
||||
export const SessionTable = sqliteTable(
|
||||
"session",
|
||||
{
|
||||
id: text().primaryKey(),
|
||||
project_id: text()
|
||||
.notNull()
|
||||
.references(() => ProjectTable.id, { onDelete: "cascade" }),
|
||||
parent_id: text(),
|
||||
slug: text().notNull(),
|
||||
directory: text().notNull(),
|
||||
title: text().notNull(),
|
||||
version: text().notNull(),
|
||||
share_url: text(),
|
||||
summary_additions: integer(),
|
||||
summary_deletions: integer(),
|
||||
summary_files: integer(),
|
||||
summary_diffs: text({ mode: "json" }).$type<Snapshot.FileDiff[]>(),
|
||||
revert: text({ mode: "json" }).$type<{ messageID: string; partID?: string; snapshot?: string; diff?: string }>(),
|
||||
permission: text({ mode: "json" }).$type<PermissionNext.Ruleset>(),
|
||||
...Timestamps,
|
||||
time_compacting: integer(),
|
||||
time_archived: integer(),
|
||||
},
|
||||
(table) => [index("session_project_idx").on(table.project_id), index("session_parent_idx").on(table.parent_id)],
|
||||
)
|
||||
|
||||
export const MessageTable = sqliteTable(
|
||||
"message",
|
||||
{
|
||||
id: text().primaryKey(),
|
||||
session_id: text()
|
||||
.notNull()
|
||||
.references(() => SessionTable.id, { onDelete: "cascade" }),
|
||||
...Timestamps,
|
||||
data: text({ mode: "json" }).notNull().$type<InfoData>(),
|
||||
},
|
||||
(table) => [index("message_session_idx").on(table.session_id)],
|
||||
)
|
||||
|
||||
export const PartTable = sqliteTable(
|
||||
"part",
|
||||
{
|
||||
id: text().primaryKey(),
|
||||
message_id: text()
|
||||
.notNull()
|
||||
.references(() => MessageTable.id, { onDelete: "cascade" }),
|
||||
session_id: text().notNull(),
|
||||
...Timestamps,
|
||||
data: text({ mode: "json" }).notNull().$type<PartData>(),
|
||||
},
|
||||
(table) => [index("part_message_idx").on(table.message_id), index("part_session_idx").on(table.session_id)],
|
||||
)
|
||||
|
||||
export const TodoTable = sqliteTable(
|
||||
"todo",
|
||||
{
|
||||
session_id: text()
|
||||
.notNull()
|
||||
.references(() => SessionTable.id, { onDelete: "cascade" }),
|
||||
content: text().notNull(),
|
||||
status: text().notNull(),
|
||||
priority: text().notNull(),
|
||||
position: integer().notNull(),
|
||||
...Timestamps,
|
||||
},
|
||||
(table) => [
|
||||
primaryKey({ columns: [table.session_id, table.position] }),
|
||||
index("todo_session_idx").on(table.session_id),
|
||||
],
|
||||
)
|
||||
|
||||
export const PermissionTable = sqliteTable("permission", {
|
||||
project_id: text()
|
||||
.primaryKey()
|
||||
.references(() => ProjectTable.id, { onDelete: "cascade" }),
|
||||
...Timestamps,
|
||||
data: text({ mode: "json" }).notNull().$type<PermissionNext.Ruleset>(),
|
||||
})
|
||||
@@ -90,12 +90,13 @@ export namespace SessionSummary {
|
||||
|
||||
async function summarizeSession(input: { sessionID: string; messages: MessageV2.WithParts[] }) {
|
||||
const diffs = await computeDiff({ messages: input.messages })
|
||||
await Session.update(input.sessionID, (draft) => {
|
||||
draft.summary = {
|
||||
await Session.setSummary({
|
||||
sessionID: input.sessionID,
|
||||
summary: {
|
||||
additions: diffs.reduce((sum, x) => sum + x.additions, 0),
|
||||
deletions: diffs.reduce((sum, x) => sum + x.deletions, 0),
|
||||
files: diffs.length,
|
||||
}
|
||||
},
|
||||
})
|
||||
await Storage.write(["session_diff", input.sessionID], diffs)
|
||||
Bus.publish(Session.Event.Diff, {
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import { BusEvent } from "@/bus/bus-event"
|
||||
import { Bus } from "@/bus"
|
||||
import z from "zod"
|
||||
import { Storage } from "../storage/storage"
|
||||
import { Database, eq, asc } from "../storage/db"
|
||||
import { TodoTable } from "./session.sql"
|
||||
|
||||
export namespace Todo {
|
||||
export const Info = z
|
||||
@@ -9,7 +10,6 @@ export namespace Todo {
|
||||
content: z.string().describe("Brief description of the task"),
|
||||
status: z.string().describe("Current status of the task: pending, in_progress, completed, cancelled"),
|
||||
priority: z.string().describe("Priority level of the task: high, medium, low"),
|
||||
id: z.string().describe("Unique identifier for the todo item"),
|
||||
})
|
||||
.meta({ ref: "Todo" })
|
||||
export type Info = z.infer<typeof Info>
|
||||
@@ -24,14 +24,33 @@ export namespace Todo {
|
||||
),
|
||||
}
|
||||
|
||||
export async function update(input: { sessionID: string; todos: Info[] }) {
|
||||
await Storage.write(["todo", input.sessionID], input.todos)
|
||||
export function update(input: { sessionID: string; todos: Info[] }) {
|
||||
Database.transaction((db) => {
|
||||
db.delete(TodoTable).where(eq(TodoTable.session_id, input.sessionID)).run()
|
||||
if (input.todos.length === 0) return
|
||||
db.insert(TodoTable)
|
||||
.values(
|
||||
input.todos.map((todo, position) => ({
|
||||
session_id: input.sessionID,
|
||||
content: todo.content,
|
||||
status: todo.status,
|
||||
priority: todo.priority,
|
||||
position,
|
||||
})),
|
||||
)
|
||||
.run()
|
||||
})
|
||||
Bus.publish(Event.Updated, input)
|
||||
}
|
||||
|
||||
export async function get(sessionID: string) {
|
||||
return Storage.read<Info[]>(["todo", sessionID])
|
||||
.then((x) => x || [])
|
||||
.catch(() => [])
|
||||
export function get(sessionID: string) {
|
||||
const rows = Database.use((db) =>
|
||||
db.select().from(TodoTable).where(eq(TodoTable.session_id, sessionID)).orderBy(asc(TodoTable.position)).all(),
|
||||
)
|
||||
return rows.map((row) => ({
|
||||
content: row.content,
|
||||
status: row.status,
|
||||
priority: row.priority,
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,7 +4,8 @@ import { ulid } from "ulid"
|
||||
import { Provider } from "@/provider/provider"
|
||||
import { Session } from "@/session"
|
||||
import { MessageV2 } from "@/session/message-v2"
|
||||
import { Storage } from "@/storage/storage"
|
||||
import { Database, eq } from "@/storage/db"
|
||||
import { SessionShareTable } from "./share.sql"
|
||||
import { Log } from "@/util/log"
|
||||
import type * as SDK from "@opencode-ai/sdk/v2"
|
||||
|
||||
@@ -77,17 +78,26 @@ export namespace ShareNext {
|
||||
})
|
||||
.then((x) => x.json())
|
||||
.then((x) => x as { id: string; url: string; secret: string })
|
||||
await Storage.write(["session_share", sessionID], result)
|
||||
Database.use((db) =>
|
||||
db
|
||||
.insert(SessionShareTable)
|
||||
.values({ session_id: sessionID, id: result.id, secret: result.secret, url: result.url })
|
||||
.onConflictDoUpdate({
|
||||
target: SessionShareTable.session_id,
|
||||
set: { id: result.id, secret: result.secret, url: result.url },
|
||||
})
|
||||
.run(),
|
||||
)
|
||||
fullSync(sessionID)
|
||||
return result
|
||||
}
|
||||
|
||||
function get(sessionID: string) {
|
||||
return Storage.read<{
|
||||
id: string
|
||||
secret: string
|
||||
url: string
|
||||
}>(["session_share", sessionID])
|
||||
const row = Database.use((db) =>
|
||||
db.select().from(SessionShareTable).where(eq(SessionShareTable.session_id, sessionID)).get(),
|
||||
)
|
||||
if (!row) return
|
||||
return { id: row.id, secret: row.secret, url: row.url }
|
||||
}
|
||||
|
||||
type Data =
|
||||
@@ -132,7 +142,7 @@ export namespace ShareNext {
|
||||
const queued = queue.get(sessionID)
|
||||
if (!queued) return
|
||||
queue.delete(sessionID)
|
||||
const share = await get(sessionID).catch(() => undefined)
|
||||
const share = get(sessionID)
|
||||
if (!share) return
|
||||
|
||||
await fetch(`${await url()}/api/share/${share.id}/sync`, {
|
||||
@@ -152,7 +162,7 @@ export namespace ShareNext {
|
||||
export async function remove(sessionID: string) {
|
||||
if (disabled) return
|
||||
log.info("removing share", { sessionID })
|
||||
const share = await get(sessionID)
|
||||
const share = get(sessionID)
|
||||
if (!share) return
|
||||
await fetch(`${await url()}/api/share/${share.id}`, {
|
||||
method: "DELETE",
|
||||
@@ -163,7 +173,7 @@ export namespace ShareNext {
|
||||
secret: share.secret,
|
||||
}),
|
||||
})
|
||||
await Storage.remove(["session_share", sessionID])
|
||||
Database.use((db) => db.delete(SessionShareTable).where(eq(SessionShareTable.session_id, sessionID)).run())
|
||||
}
|
||||
|
||||
async function fullSync(sessionID: string) {
|
||||
|
||||
13
packages/opencode/src/share/share.sql.ts
Normal file
13
packages/opencode/src/share/share.sql.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { sqliteTable, text } from "drizzle-orm/sqlite-core"
|
||||
import { SessionTable } from "../session/session.sql"
|
||||
import { Timestamps } from "@/storage/schema.sql"
|
||||
|
||||
export const SessionShareTable = sqliteTable("session_share", {
|
||||
session_id: text()
|
||||
.primaryKey()
|
||||
.references(() => SessionTable.id, { onDelete: "cascade" }),
|
||||
id: text().notNull(),
|
||||
secret: text().notNull(),
|
||||
url: text().notNull(),
|
||||
...Timestamps,
|
||||
})
|
||||
@@ -1,92 +0,0 @@
|
||||
import { Bus } from "../bus"
|
||||
import { Installation } from "../installation"
|
||||
import { Session } from "../session"
|
||||
import { MessageV2 } from "../session/message-v2"
|
||||
import { Log } from "../util/log"
|
||||
|
||||
export namespace Share {
|
||||
const log = Log.create({ service: "share" })
|
||||
|
||||
let queue: Promise<void> = Promise.resolve()
|
||||
const pending = new Map<string, any>()
|
||||
|
||||
export async function sync(key: string, content: any) {
|
||||
if (disabled) return
|
||||
const [root, ...splits] = key.split("/")
|
||||
if (root !== "session") return
|
||||
const [sub, sessionID] = splits
|
||||
if (sub === "share") return
|
||||
const share = await Session.getShare(sessionID).catch(() => {})
|
||||
if (!share) return
|
||||
const { secret } = share
|
||||
pending.set(key, content)
|
||||
queue = queue
|
||||
.then(async () => {
|
||||
const content = pending.get(key)
|
||||
if (content === undefined) return
|
||||
pending.delete(key)
|
||||
|
||||
return fetch(`${URL}/share_sync`, {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
sessionID: sessionID,
|
||||
secret,
|
||||
key: key,
|
||||
content,
|
||||
}),
|
||||
})
|
||||
})
|
||||
.then((x) => {
|
||||
if (x) {
|
||||
log.info("synced", {
|
||||
key: key,
|
||||
status: x.status,
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
export function init() {
|
||||
Bus.subscribe(Session.Event.Updated, async (evt) => {
|
||||
await sync("session/info/" + evt.properties.info.id, evt.properties.info)
|
||||
})
|
||||
Bus.subscribe(MessageV2.Event.Updated, async (evt) => {
|
||||
await sync("session/message/" + evt.properties.info.sessionID + "/" + evt.properties.info.id, evt.properties.info)
|
||||
})
|
||||
Bus.subscribe(MessageV2.Event.PartUpdated, async (evt) => {
|
||||
await sync(
|
||||
"session/part/" +
|
||||
evt.properties.part.sessionID +
|
||||
"/" +
|
||||
evt.properties.part.messageID +
|
||||
"/" +
|
||||
evt.properties.part.id,
|
||||
evt.properties.part,
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
export const URL =
|
||||
process.env["OPENCODE_API"] ??
|
||||
(Installation.isPreview() || Installation.isLocal() ? "https://api.dev.opencode.ai" : "https://api.opencode.ai")
|
||||
|
||||
const disabled = process.env["OPENCODE_DISABLE_SHARE"] === "true" || process.env["OPENCODE_DISABLE_SHARE"] === "1"
|
||||
|
||||
export async function create(sessionID: string) {
|
||||
if (disabled) return { url: "", secret: "" }
|
||||
return fetch(`${URL}/share_create`, {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ sessionID: sessionID }),
|
||||
})
|
||||
.then((x) => x.json())
|
||||
.then((x) => x as { url: string; secret: string })
|
||||
}
|
||||
|
||||
export async function remove(sessionID: string, secret: string) {
|
||||
if (disabled) return {}
|
||||
return fetch(`${URL}/share_delete`, {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ sessionID, secret }),
|
||||
}).then((x) => x.json())
|
||||
}
|
||||
}
|
||||
4
packages/opencode/src/sql.d.ts
vendored
Normal file
4
packages/opencode/src/sql.d.ts
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
declare module "*.sql" {
|
||||
const content: string
|
||||
export default content
|
||||
}
|
||||
140
packages/opencode/src/storage/db.ts
Normal file
140
packages/opencode/src/storage/db.ts
Normal file
@@ -0,0 +1,140 @@
|
||||
import { Database as BunDatabase } from "bun:sqlite"
|
||||
import { drizzle, type SQLiteBunDatabase } from "drizzle-orm/bun-sqlite"
|
||||
import { migrate } from "drizzle-orm/bun-sqlite/migrator"
|
||||
import { type SQLiteTransaction } from "drizzle-orm/sqlite-core"
|
||||
export * from "drizzle-orm"
|
||||
import { Context } from "../util/context"
|
||||
import { lazy } from "../util/lazy"
|
||||
import { Global } from "../global"
|
||||
import { Log } from "../util/log"
|
||||
import { NamedError } from "@opencode-ai/util/error"
|
||||
import z from "zod"
|
||||
import path from "path"
|
||||
import { readFileSync, readdirSync } from "fs"
|
||||
import fs from "fs/promises"
|
||||
import { Instance } from "@/project/instance"
|
||||
|
||||
declare const OPENCODE_MIGRATIONS: { sql: string; timestamp: number }[] | undefined
|
||||
|
||||
export const NotFoundError = NamedError.create(
|
||||
"NotFoundError",
|
||||
z.object({
|
||||
message: z.string(),
|
||||
}),
|
||||
)
|
||||
|
||||
const log = Log.create({ service: "db" })
|
||||
|
||||
export namespace Database {
|
||||
export type Transaction = SQLiteTransaction<"sync", void, Record<string, never>, Record<string, never>>
|
||||
|
||||
type Client = SQLiteBunDatabase
|
||||
|
||||
type Journal = { sql: string; timestamp: number }[]
|
||||
|
||||
function time(tag: string) {
|
||||
const match = /^(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})/.exec(tag)
|
||||
if (!match) return 0
|
||||
return Date.UTC(
|
||||
Number(match[1]),
|
||||
Number(match[2]) - 1,
|
||||
Number(match[3]),
|
||||
Number(match[4]),
|
||||
Number(match[5]),
|
||||
Number(match[6]),
|
||||
)
|
||||
}
|
||||
|
||||
function migrations(dir: string): Journal {
|
||||
const dirs = readdirSync(dir, { withFileTypes: true })
|
||||
.filter((entry) => entry.isDirectory())
|
||||
.map((entry) => entry.name)
|
||||
|
||||
const sql = dirs
|
||||
.map((name) => {
|
||||
const file = path.join(dir, name, "migration.sql")
|
||||
if (!Bun.file(file).size) return
|
||||
return {
|
||||
sql: readFileSync(file, "utf-8"),
|
||||
timestamp: time(name),
|
||||
}
|
||||
})
|
||||
.filter(Boolean) as Journal
|
||||
|
||||
return sql.sort((a, b) => a.timestamp - b.timestamp)
|
||||
}
|
||||
|
||||
export const Client = lazy(() => {
|
||||
log.info("opening database", { path: path.join(Global.Path.data, "opencode.db") })
|
||||
|
||||
const sqlite = new BunDatabase(path.join(Global.Path.data, "opencode.db"), { create: true })
|
||||
|
||||
sqlite.run("PRAGMA journal_mode = WAL")
|
||||
sqlite.run("PRAGMA synchronous = NORMAL")
|
||||
sqlite.run("PRAGMA busy_timeout = 5000")
|
||||
sqlite.run("PRAGMA cache_size = -64000")
|
||||
sqlite.run("PRAGMA foreign_keys = ON")
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
|
||||
// Apply schema migrations
|
||||
const entries =
|
||||
typeof OPENCODE_MIGRATIONS !== "undefined"
|
||||
? OPENCODE_MIGRATIONS
|
||||
: migrations(path.join(import.meta.dirname, "../../migration"))
|
||||
if (entries.length > 0) {
|
||||
log.info("applying migrations", {
|
||||
count: entries.length,
|
||||
mode: typeof OPENCODE_MIGRATIONS !== "undefined" ? "bundled" : "dev",
|
||||
})
|
||||
migrate(db, entries)
|
||||
}
|
||||
|
||||
return db
|
||||
})
|
||||
|
||||
export type TxOrDb = Transaction | Client
|
||||
|
||||
const ctx = Context.create<{
|
||||
tx: TxOrDb
|
||||
effects: (() => void | Promise<void>)[]
|
||||
}>("database")
|
||||
|
||||
export function use<T>(callback: (trx: TxOrDb) => T): T {
|
||||
try {
|
||||
return callback(ctx.use().tx)
|
||||
} catch (err) {
|
||||
if (err instanceof Context.NotFound) {
|
||||
const effects: (() => void | Promise<void>)[] = []
|
||||
const result = ctx.provide({ effects, tx: Client() }, () => callback(Client()))
|
||||
for (const effect of effects) effect()
|
||||
return result
|
||||
}
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
export function effect(fn: () => any | Promise<any>) {
|
||||
try {
|
||||
ctx.use().effects.push(fn)
|
||||
} catch {
|
||||
fn()
|
||||
}
|
||||
}
|
||||
|
||||
export function transaction<T>(callback: (tx: TxOrDb) => T): T {
|
||||
try {
|
||||
return callback(ctx.use().tx)
|
||||
} catch (err) {
|
||||
if (err instanceof Context.NotFound) {
|
||||
const effects: (() => void | Promise<void>)[] = []
|
||||
const result = Client().transaction((tx) => {
|
||||
return ctx.provide({ tx, effects }, () => callback(tx))
|
||||
})
|
||||
for (const effect of effects) effect()
|
||||
return result
|
||||
}
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
436
packages/opencode/src/storage/json-migration.ts
Normal file
436
packages/opencode/src/storage/json-migration.ts
Normal file
@@ -0,0 +1,436 @@
|
||||
import { Database } from "bun:sqlite"
|
||||
import { drizzle } from "drizzle-orm/bun-sqlite"
|
||||
import { Global } from "../global"
|
||||
import { Log } from "../util/log"
|
||||
import { ProjectTable } from "../project/project.sql"
|
||||
import { SessionTable, MessageTable, PartTable, TodoTable, PermissionTable } from "../session/session.sql"
|
||||
import { SessionShareTable } from "../share/share.sql"
|
||||
import path from "path"
|
||||
import { existsSync } from "fs"
|
||||
|
||||
export namespace JsonMigration {
|
||||
const log = Log.create({ service: "json-migration" })
|
||||
|
||||
export type Progress = {
|
||||
current: number
|
||||
total: number
|
||||
label: string
|
||||
}
|
||||
|
||||
type Options = {
|
||||
progress?: (event: Progress) => void
|
||||
}
|
||||
|
||||
export async function run(sqlite: Database, options?: Options) {
|
||||
const storageDir = path.join(Global.Path.data, "storage")
|
||||
|
||||
if (!existsSync(storageDir)) {
|
||||
log.info("storage directory does not exist, skipping migration")
|
||||
return {
|
||||
projects: 0,
|
||||
sessions: 0,
|
||||
messages: 0,
|
||||
parts: 0,
|
||||
todos: 0,
|
||||
permissions: 0,
|
||||
shares: 0,
|
||||
errors: [] as string[],
|
||||
}
|
||||
}
|
||||
|
||||
log.info("starting json to sqlite migration", { storageDir })
|
||||
const start = performance.now()
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
|
||||
// Optimize SQLite for bulk inserts
|
||||
sqlite.exec("PRAGMA journal_mode = WAL")
|
||||
sqlite.exec("PRAGMA synchronous = OFF")
|
||||
sqlite.exec("PRAGMA cache_size = 10000")
|
||||
sqlite.exec("PRAGMA temp_store = MEMORY")
|
||||
const stats = {
|
||||
projects: 0,
|
||||
sessions: 0,
|
||||
messages: 0,
|
||||
parts: 0,
|
||||
todos: 0,
|
||||
permissions: 0,
|
||||
shares: 0,
|
||||
errors: [] as string[],
|
||||
}
|
||||
const orphans = {
|
||||
sessions: 0,
|
||||
todos: 0,
|
||||
permissions: 0,
|
||||
shares: 0,
|
||||
}
|
||||
const errs = stats.errors
|
||||
|
||||
const batchSize = 1000
|
||||
const now = Date.now()
|
||||
|
||||
async function list(pattern: string) {
|
||||
const items: string[] = []
|
||||
const scan = new Bun.Glob(pattern)
|
||||
for await (const file of scan.scan({ cwd: storageDir, absolute: true })) {
|
||||
items.push(file)
|
||||
}
|
||||
return items
|
||||
}
|
||||
|
||||
async function read(files: string[], start: number, end: number) {
|
||||
const count = end - start
|
||||
const tasks = new Array(count)
|
||||
for (let i = 0; i < count; i++) {
|
||||
tasks[i] = Bun.file(files[start + i]).json()
|
||||
}
|
||||
const results = await Promise.allSettled(tasks)
|
||||
const items = new Array(count)
|
||||
for (let i = 0; i < results.length; i++) {
|
||||
const result = results[i]
|
||||
if (result.status === "fulfilled") {
|
||||
items[i] = result.value
|
||||
continue
|
||||
}
|
||||
errs.push(`failed to read ${files[start + i]}: ${result.reason}`)
|
||||
}
|
||||
return items
|
||||
}
|
||||
|
||||
function insert(values: any[], table: any, label: string) {
|
||||
if (values.length === 0) return 0
|
||||
try {
|
||||
db.insert(table).values(values).onConflictDoNothing().run()
|
||||
return values.length
|
||||
} catch (e) {
|
||||
errs.push(`failed to migrate ${label} batch: ${e}`)
|
||||
return 0
|
||||
}
|
||||
}
|
||||
|
||||
// Pre-scan all files upfront to avoid repeated glob operations
|
||||
log.info("scanning files...")
|
||||
const [projectFiles, sessionFiles, messageFiles, partFiles, todoFiles, permFiles, shareFiles] = await Promise.all([
|
||||
list("project/*.json"),
|
||||
list("session/*/*.json"),
|
||||
list("message/*/*.json"),
|
||||
list("part/*/*.json"),
|
||||
list("todo/*.json"),
|
||||
list("permission/*.json"),
|
||||
list("session_share/*.json"),
|
||||
])
|
||||
|
||||
log.info("file scan complete", {
|
||||
projects: projectFiles.length,
|
||||
sessions: sessionFiles.length,
|
||||
messages: messageFiles.length,
|
||||
parts: partFiles.length,
|
||||
todos: todoFiles.length,
|
||||
permissions: permFiles.length,
|
||||
shares: shareFiles.length,
|
||||
})
|
||||
|
||||
const total = Math.max(
|
||||
1,
|
||||
projectFiles.length +
|
||||
sessionFiles.length +
|
||||
messageFiles.length +
|
||||
partFiles.length +
|
||||
todoFiles.length +
|
||||
permFiles.length +
|
||||
shareFiles.length,
|
||||
)
|
||||
const progress = options?.progress
|
||||
let current = 0
|
||||
const step = (label: string, count: number) => {
|
||||
current = Math.min(total, current + count)
|
||||
progress?.({ current, total, label })
|
||||
}
|
||||
|
||||
progress?.({ current, total, label: "starting" })
|
||||
|
||||
sqlite.exec("BEGIN TRANSACTION")
|
||||
|
||||
// Migrate projects first (no FK deps)
|
||||
const projectIds = new Set<string>()
|
||||
const projectValues = [] as any[]
|
||||
for (let i = 0; i < projectFiles.length; i += batchSize) {
|
||||
const end = Math.min(i + batchSize, projectFiles.length)
|
||||
const batch = await read(projectFiles, i, end)
|
||||
projectValues.length = 0
|
||||
for (let j = 0; j < batch.length; j++) {
|
||||
const data = batch[j]
|
||||
if (!data) continue
|
||||
if (!data?.id) {
|
||||
errs.push(`project missing id: ${projectFiles[i + j]}`)
|
||||
continue
|
||||
}
|
||||
projectIds.add(data.id)
|
||||
projectValues.push({
|
||||
id: data.id,
|
||||
worktree: data.worktree ?? "/",
|
||||
vcs: data.vcs,
|
||||
name: data.name ?? undefined,
|
||||
icon_url: data.icon?.url,
|
||||
icon_color: data.icon?.color,
|
||||
time_created: data.time?.created ?? now,
|
||||
time_updated: data.time?.updated ?? now,
|
||||
time_initialized: data.time?.initialized,
|
||||
sandboxes: data.sandboxes ?? [],
|
||||
})
|
||||
}
|
||||
stats.projects += insert(projectValues, ProjectTable, "project")
|
||||
step("projects", end - i)
|
||||
}
|
||||
log.info("migrated projects", { count: stats.projects, duration: Math.round(performance.now() - start) })
|
||||
|
||||
// Migrate sessions (depends on projects)
|
||||
const sessionIds = new Set<string>()
|
||||
const sessionValues = [] as any[]
|
||||
for (let i = 0; i < sessionFiles.length; i += batchSize) {
|
||||
const end = Math.min(i + batchSize, sessionFiles.length)
|
||||
const batch = await read(sessionFiles, i, end)
|
||||
sessionValues.length = 0
|
||||
for (let j = 0; j < batch.length; j++) {
|
||||
const data = batch[j]
|
||||
if (!data) continue
|
||||
if (!data?.id || !data?.projectID) {
|
||||
errs.push(`session missing id or projectID: ${sessionFiles[i + j]}`)
|
||||
continue
|
||||
}
|
||||
if (!projectIds.has(data.projectID)) {
|
||||
orphans.sessions++
|
||||
continue
|
||||
}
|
||||
sessionIds.add(data.id)
|
||||
sessionValues.push({
|
||||
id: data.id,
|
||||
project_id: data.projectID,
|
||||
parent_id: data.parentID ?? null,
|
||||
slug: data.slug ?? "",
|
||||
directory: data.directory ?? "",
|
||||
title: data.title ?? "",
|
||||
version: data.version ?? "",
|
||||
share_url: data.share?.url ?? null,
|
||||
summary_additions: data.summary?.additions ?? null,
|
||||
summary_deletions: data.summary?.deletions ?? null,
|
||||
summary_files: data.summary?.files ?? null,
|
||||
summary_diffs: data.summary?.diffs ?? null,
|
||||
revert: data.revert ?? null,
|
||||
permission: data.permission ?? null,
|
||||
time_created: data.time?.created ?? now,
|
||||
time_updated: data.time?.updated ?? now,
|
||||
time_compacting: data.time?.compacting ?? null,
|
||||
time_archived: data.time?.archived ?? null,
|
||||
})
|
||||
}
|
||||
stats.sessions += insert(sessionValues, SessionTable, "session")
|
||||
step("sessions", end - i)
|
||||
}
|
||||
log.info("migrated sessions", { count: stats.sessions })
|
||||
if (orphans.sessions > 0) {
|
||||
log.warn("skipped orphaned sessions", { count: orphans.sessions })
|
||||
}
|
||||
|
||||
// Migrate messages using pre-scanned file map
|
||||
const allMessageFiles = [] as string[]
|
||||
const allMessageSessions = [] as string[]
|
||||
const messageSessions = new Map<string, string>()
|
||||
for (const file of messageFiles) {
|
||||
const sessionID = path.basename(path.dirname(file))
|
||||
if (!sessionIds.has(sessionID)) continue
|
||||
allMessageFiles.push(file)
|
||||
allMessageSessions.push(sessionID)
|
||||
}
|
||||
|
||||
for (let i = 0; i < allMessageFiles.length; i += batchSize) {
|
||||
const end = Math.min(i + batchSize, allMessageFiles.length)
|
||||
const batch = await read(allMessageFiles, i, end)
|
||||
const values = new Array(batch.length)
|
||||
let count = 0
|
||||
for (let j = 0; j < batch.length; j++) {
|
||||
const data = batch[j]
|
||||
if (!data) continue
|
||||
const file = allMessageFiles[i + j]
|
||||
const id = data.id ?? path.basename(file, ".json")
|
||||
if (!id) {
|
||||
errs.push(`message missing id: ${file}`)
|
||||
continue
|
||||
}
|
||||
const sessionID = allMessageSessions[i + j]
|
||||
messageSessions.set(id, sessionID)
|
||||
const rest = data
|
||||
delete rest.id
|
||||
delete rest.sessionID
|
||||
values[count++] = {
|
||||
id,
|
||||
session_id: sessionID,
|
||||
time_created: data.time?.created ?? now,
|
||||
time_updated: data.time?.updated ?? now,
|
||||
data: rest,
|
||||
}
|
||||
}
|
||||
values.length = count
|
||||
stats.messages += insert(values, MessageTable, "message")
|
||||
step("messages", end - i)
|
||||
}
|
||||
log.info("migrated messages", { count: stats.messages })
|
||||
|
||||
// Migrate parts using pre-scanned file map
|
||||
for (let i = 0; i < partFiles.length; i += batchSize) {
|
||||
const end = Math.min(i + batchSize, partFiles.length)
|
||||
const batch = await read(partFiles, i, end)
|
||||
const values = new Array(batch.length)
|
||||
let count = 0
|
||||
for (let j = 0; j < batch.length; j++) {
|
||||
const data = batch[j]
|
||||
if (!data) continue
|
||||
const file = partFiles[i + j]
|
||||
const id = data.id ?? path.basename(file, ".json")
|
||||
const messageID = data.messageID ?? path.basename(path.dirname(file))
|
||||
if (!id || !messageID) {
|
||||
errs.push(`part missing id/messageID/sessionID: ${file}`)
|
||||
continue
|
||||
}
|
||||
const sessionID = messageSessions.get(messageID)
|
||||
if (!sessionID) {
|
||||
errs.push(`part missing message session: ${file}`)
|
||||
continue
|
||||
}
|
||||
if (!sessionIds.has(sessionID)) continue
|
||||
const rest = data
|
||||
delete rest.id
|
||||
delete rest.messageID
|
||||
delete rest.sessionID
|
||||
values[count++] = {
|
||||
id,
|
||||
message_id: messageID,
|
||||
session_id: sessionID,
|
||||
time_created: data.time?.created ?? now,
|
||||
time_updated: data.time?.updated ?? now,
|
||||
data: rest,
|
||||
}
|
||||
}
|
||||
values.length = count
|
||||
stats.parts += insert(values, PartTable, "part")
|
||||
step("parts", end - i)
|
||||
}
|
||||
log.info("migrated parts", { count: stats.parts })
|
||||
|
||||
// Migrate todos
|
||||
const todoSessions = todoFiles.map((file) => path.basename(file, ".json"))
|
||||
for (let i = 0; i < todoFiles.length; i += batchSize) {
|
||||
const end = Math.min(i + batchSize, todoFiles.length)
|
||||
const batch = await read(todoFiles, i, end)
|
||||
const values = [] as any[]
|
||||
for (let j = 0; j < batch.length; j++) {
|
||||
const data = batch[j]
|
||||
if (!data) continue
|
||||
const sessionID = todoSessions[i + j]
|
||||
if (!sessionIds.has(sessionID)) {
|
||||
orphans.todos++
|
||||
continue
|
||||
}
|
||||
if (!Array.isArray(data)) {
|
||||
errs.push(`todo not an array: ${todoFiles[i + j]}`)
|
||||
continue
|
||||
}
|
||||
for (let position = 0; position < data.length; position++) {
|
||||
const todo = data[position]
|
||||
if (!todo?.content || !todo?.status || !todo?.priority) continue
|
||||
values.push({
|
||||
session_id: sessionID,
|
||||
content: todo.content,
|
||||
status: todo.status,
|
||||
priority: todo.priority,
|
||||
position,
|
||||
time_created: now,
|
||||
time_updated: now,
|
||||
})
|
||||
}
|
||||
}
|
||||
stats.todos += insert(values, TodoTable, "todo")
|
||||
step("todos", end - i)
|
||||
}
|
||||
log.info("migrated todos", { count: stats.todos })
|
||||
if (orphans.todos > 0) {
|
||||
log.warn("skipped orphaned todos", { count: orphans.todos })
|
||||
}
|
||||
|
||||
// Migrate permissions
|
||||
const permProjects = permFiles.map((file) => path.basename(file, ".json"))
|
||||
const permValues = [] as any[]
|
||||
for (let i = 0; i < permFiles.length; i += batchSize) {
|
||||
const end = Math.min(i + batchSize, permFiles.length)
|
||||
const batch = await read(permFiles, i, end)
|
||||
permValues.length = 0
|
||||
for (let j = 0; j < batch.length; j++) {
|
||||
const data = batch[j]
|
||||
if (!data) continue
|
||||
const projectID = permProjects[i + j]
|
||||
if (!projectIds.has(projectID)) {
|
||||
orphans.permissions++
|
||||
continue
|
||||
}
|
||||
permValues.push({ project_id: projectID, data })
|
||||
}
|
||||
stats.permissions += insert(permValues, PermissionTable, "permission")
|
||||
step("permissions", end - i)
|
||||
}
|
||||
log.info("migrated permissions", { count: stats.permissions })
|
||||
if (orphans.permissions > 0) {
|
||||
log.warn("skipped orphaned permissions", { count: orphans.permissions })
|
||||
}
|
||||
|
||||
// Migrate session shares
|
||||
const shareSessions = shareFiles.map((file) => path.basename(file, ".json"))
|
||||
const shareValues = [] as any[]
|
||||
for (let i = 0; i < shareFiles.length; i += batchSize) {
|
||||
const end = Math.min(i + batchSize, shareFiles.length)
|
||||
const batch = await read(shareFiles, i, end)
|
||||
shareValues.length = 0
|
||||
for (let j = 0; j < batch.length; j++) {
|
||||
const data = batch[j]
|
||||
if (!data) continue
|
||||
const sessionID = shareSessions[i + j]
|
||||
if (!sessionIds.has(sessionID)) {
|
||||
orphans.shares++
|
||||
continue
|
||||
}
|
||||
if (!data?.id || !data?.secret || !data?.url) {
|
||||
errs.push(`session_share missing id/secret/url: ${shareFiles[i + j]}`)
|
||||
continue
|
||||
}
|
||||
shareValues.push({ session_id: sessionID, id: data.id, secret: data.secret, url: data.url })
|
||||
}
|
||||
stats.shares += insert(shareValues, SessionShareTable, "session_share")
|
||||
step("shares", end - i)
|
||||
}
|
||||
log.info("migrated session shares", { count: stats.shares })
|
||||
if (orphans.shares > 0) {
|
||||
log.warn("skipped orphaned session shares", { count: orphans.shares })
|
||||
}
|
||||
|
||||
sqlite.exec("COMMIT")
|
||||
|
||||
log.info("json migration complete", {
|
||||
projects: stats.projects,
|
||||
sessions: stats.sessions,
|
||||
messages: stats.messages,
|
||||
parts: stats.parts,
|
||||
todos: stats.todos,
|
||||
permissions: stats.permissions,
|
||||
shares: stats.shares,
|
||||
errorCount: stats.errors.length,
|
||||
duration: Math.round(performance.now() - start),
|
||||
})
|
||||
|
||||
if (stats.errors.length > 0) {
|
||||
log.warn("migration errors", { errors: stats.errors.slice(0, 20) })
|
||||
}
|
||||
|
||||
progress?.({ current: total, total, label: "complete" })
|
||||
|
||||
return stats
|
||||
}
|
||||
}
|
||||
10
packages/opencode/src/storage/schema.sql.ts
Normal file
10
packages/opencode/src/storage/schema.sql.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { integer } from "drizzle-orm/sqlite-core"
|
||||
|
||||
export const Timestamps = {
|
||||
time_created: integer()
|
||||
.notNull()
|
||||
.$default(() => Date.now()),
|
||||
time_updated: integer()
|
||||
.notNull()
|
||||
.$onUpdate(() => Date.now()),
|
||||
}
|
||||
@@ -4,9 +4,14 @@ export function lazy<T>(fn: () => T) {
|
||||
|
||||
const result = (): T => {
|
||||
if (loaded) return value as T
|
||||
loaded = true
|
||||
value = fn()
|
||||
return value as T
|
||||
try {
|
||||
value = fn()
|
||||
loaded = true
|
||||
return value as T
|
||||
} catch (e) {
|
||||
// Don't mark as loaded if initialization failed
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
||||
result.reset = () => {
|
||||
|
||||
@@ -7,7 +7,8 @@ import { Global } from "../global"
|
||||
import { Instance } from "../project/instance"
|
||||
import { InstanceBootstrap } from "../project/bootstrap"
|
||||
import { Project } from "../project/project"
|
||||
import { Storage } from "../storage/storage"
|
||||
import { Database, eq } from "../storage/db"
|
||||
import { ProjectTable } from "../project/project.sql"
|
||||
import { fn } from "../util/fn"
|
||||
import { Log } from "../util/log"
|
||||
import { BusEvent } from "@/bus/bus-event"
|
||||
@@ -307,7 +308,8 @@ export namespace Worktree {
|
||||
}
|
||||
|
||||
async function runStartScripts(directory: string, input: { projectID: string; extra?: string }) {
|
||||
const project = await Storage.read<Project.Info>(["project", input.projectID]).catch(() => undefined)
|
||||
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, input.projectID)).get())
|
||||
const project = row ? Project.fromRow(row) : undefined
|
||||
const startup = project?.commands?.start?.trim() ?? ""
|
||||
const ok = await runStartScript(directory, startup, "project")
|
||||
if (!ok) return false
|
||||
|
||||
@@ -122,12 +122,20 @@ function createFakeAgent() {
|
||||
messages: async () => {
|
||||
return { data: [] }
|
||||
},
|
||||
message: async () => {
|
||||
message: async (params?: any) => {
|
||||
// Return a message with parts that can be looked up by partID
|
||||
return {
|
||||
data: {
|
||||
info: {
|
||||
role: "assistant",
|
||||
},
|
||||
parts: [
|
||||
{
|
||||
id: params?.messageID ? `${params.messageID}_part` : "part_1",
|
||||
type: "text",
|
||||
text: "",
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
},
|
||||
@@ -193,7 +201,7 @@ function createFakeAgent() {
|
||||
}
|
||||
|
||||
describe("acp.agent event subscription", () => {
|
||||
test("routes message.part.updated by the event sessionID (no cross-session pollution)", async () => {
|
||||
test("routes message.part.delta by the event sessionID (no cross-session pollution)", async () => {
|
||||
await using tmp = await tmpdir()
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
@@ -207,14 +215,12 @@ describe("acp.agent event subscription", () => {
|
||||
controller.push({
|
||||
directory: cwd,
|
||||
payload: {
|
||||
type: "message.part.updated",
|
||||
type: "message.part.delta",
|
||||
properties: {
|
||||
part: {
|
||||
sessionID: sessionB,
|
||||
messageID: "msg_1",
|
||||
type: "text",
|
||||
synthetic: false,
|
||||
},
|
||||
sessionID: sessionB,
|
||||
messageID: "msg_1",
|
||||
partID: "msg_1_part",
|
||||
field: "text",
|
||||
delta: "hello",
|
||||
},
|
||||
},
|
||||
@@ -230,7 +236,7 @@ describe("acp.agent event subscription", () => {
|
||||
})
|
||||
})
|
||||
|
||||
test("keeps concurrent sessions isolated when message.part.updated events are interleaved", async () => {
|
||||
test("keeps concurrent sessions isolated when message.part.delta events are interleaved", async () => {
|
||||
await using tmp = await tmpdir()
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
@@ -248,14 +254,12 @@ describe("acp.agent event subscription", () => {
|
||||
controller.push({
|
||||
directory: cwd,
|
||||
payload: {
|
||||
type: "message.part.updated",
|
||||
type: "message.part.delta",
|
||||
properties: {
|
||||
part: {
|
||||
sessionID: sessionId,
|
||||
messageID,
|
||||
type: "text",
|
||||
synthetic: false,
|
||||
},
|
||||
sessionID: sessionId,
|
||||
messageID,
|
||||
partID: `${messageID}_part`,
|
||||
field: "text",
|
||||
delta,
|
||||
},
|
||||
},
|
||||
@@ -402,14 +406,12 @@ describe("acp.agent event subscription", () => {
|
||||
controller.push({
|
||||
directory: cwd,
|
||||
payload: {
|
||||
type: "message.part.updated",
|
||||
type: "message.part.delta",
|
||||
properties: {
|
||||
part: {
|
||||
sessionID: sessionB,
|
||||
messageID: "msg_b",
|
||||
type: "text",
|
||||
synthetic: false,
|
||||
},
|
||||
sessionID: sessionB,
|
||||
messageID: "msg_b",
|
||||
partID: "msg_b_part",
|
||||
field: "text",
|
||||
delta: "session_b_message",
|
||||
},
|
||||
},
|
||||
|
||||
@@ -2,7 +2,6 @@ import { test, expect } from "bun:test"
|
||||
import os from "os"
|
||||
import { PermissionNext } from "../../src/permission/next"
|
||||
import { Instance } from "../../src/project/instance"
|
||||
import { Storage } from "../../src/storage/storage"
|
||||
import { tmpdir } from "../fixture/fixture"
|
||||
|
||||
// fromConfig tests
|
||||
|
||||
@@ -1,63 +1,70 @@
|
||||
// IMPORTANT: Set env vars BEFORE any imports from src/ directory
|
||||
// xdg-basedir reads env vars at import time, so we must set these first
|
||||
import os from "os"
|
||||
import path from "path"
|
||||
import fs from "fs/promises"
|
||||
import fsSync from "fs"
|
||||
import { afterAll } from "bun:test"
|
||||
import os from "os";
|
||||
import path from "path";
|
||||
import fs from "fs/promises";
|
||||
import fsSync from "fs";
|
||||
import { afterAll } from "bun:test";
|
||||
|
||||
const dir = path.join(os.tmpdir(), "opencode-test-data-" + process.pid)
|
||||
await fs.mkdir(dir, { recursive: true })
|
||||
// Set XDG env vars FIRST, before any src/ imports
|
||||
const dir = path.join(os.tmpdir(), "opencode-test-data-" + process.pid);
|
||||
await fs.mkdir(dir, { recursive: true });
|
||||
afterAll(() => {
|
||||
fsSync.rmSync(dir, { recursive: true, force: true })
|
||||
})
|
||||
fsSync.rmSync(dir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
process.env["XDG_DATA_HOME"] = path.join(dir, "share");
|
||||
process.env["XDG_CACHE_HOME"] = path.join(dir, "cache");
|
||||
process.env["XDG_CONFIG_HOME"] = path.join(dir, "config");
|
||||
process.env["XDG_STATE_HOME"] = path.join(dir, "state");
|
||||
process.env["OPENCODE_MODELS_PATH"] = path.join(
|
||||
import.meta.dir,
|
||||
"tool",
|
||||
"fixtures",
|
||||
"models-api.json",
|
||||
);
|
||||
|
||||
// Set test home directory to isolate tests from user's actual home directory
|
||||
// This prevents tests from picking up real user configs/skills from ~/.claude/skills
|
||||
const testHome = path.join(dir, "home")
|
||||
await fs.mkdir(testHome, { recursive: true })
|
||||
process.env["OPENCODE_TEST_HOME"] = testHome
|
||||
const testHome = path.join(dir, "home");
|
||||
await fs.mkdir(testHome, { recursive: true });
|
||||
process.env["OPENCODE_TEST_HOME"] = testHome;
|
||||
|
||||
// Set test managed config directory to isolate tests from system managed settings
|
||||
const testManagedConfigDir = path.join(dir, "managed")
|
||||
process.env["OPENCODE_TEST_MANAGED_CONFIG_DIR"] = testManagedConfigDir
|
||||
|
||||
process.env["XDG_DATA_HOME"] = path.join(dir, "share")
|
||||
process.env["XDG_CACHE_HOME"] = path.join(dir, "cache")
|
||||
process.env["XDG_CONFIG_HOME"] = path.join(dir, "config")
|
||||
process.env["XDG_STATE_HOME"] = path.join(dir, "state")
|
||||
process.env["OPENCODE_MODELS_PATH"] = path.join(import.meta.dir, "tool", "fixtures", "models-api.json")
|
||||
const testManagedConfigDir = path.join(dir, "managed");
|
||||
process.env["OPENCODE_TEST_MANAGED_CONFIG_DIR"] = testManagedConfigDir;
|
||||
|
||||
// Write the cache version file to prevent global/index.ts from clearing the cache
|
||||
const cacheDir = path.join(dir, "cache", "opencode")
|
||||
await fs.mkdir(cacheDir, { recursive: true })
|
||||
await fs.writeFile(path.join(cacheDir, "version"), "14")
|
||||
const cacheDir = path.join(dir, "cache", "opencode");
|
||||
await fs.mkdir(cacheDir, { recursive: true });
|
||||
await fs.writeFile(path.join(cacheDir, "version"), "14");
|
||||
|
||||
// Clear provider env vars to ensure clean test state
|
||||
delete process.env["ANTHROPIC_API_KEY"]
|
||||
delete process.env["OPENAI_API_KEY"]
|
||||
delete process.env["GOOGLE_API_KEY"]
|
||||
delete process.env["GOOGLE_GENERATIVE_AI_API_KEY"]
|
||||
delete process.env["AZURE_OPENAI_API_KEY"]
|
||||
delete process.env["AWS_ACCESS_KEY_ID"]
|
||||
delete process.env["AWS_PROFILE"]
|
||||
delete process.env["AWS_REGION"]
|
||||
delete process.env["AWS_BEARER_TOKEN_BEDROCK"]
|
||||
delete process.env["OPENROUTER_API_KEY"]
|
||||
delete process.env["GROQ_API_KEY"]
|
||||
delete process.env["MISTRAL_API_KEY"]
|
||||
delete process.env["PERPLEXITY_API_KEY"]
|
||||
delete process.env["TOGETHER_API_KEY"]
|
||||
delete process.env["XAI_API_KEY"]
|
||||
delete process.env["DEEPSEEK_API_KEY"]
|
||||
delete process.env["FIREWORKS_API_KEY"]
|
||||
delete process.env["CEREBRAS_API_KEY"]
|
||||
delete process.env["SAMBANOVA_API_KEY"]
|
||||
delete process.env["ANTHROPIC_API_KEY"];
|
||||
delete process.env["OPENAI_API_KEY"];
|
||||
delete process.env["GOOGLE_API_KEY"];
|
||||
delete process.env["GOOGLE_GENERATIVE_AI_API_KEY"];
|
||||
delete process.env["AZURE_OPENAI_API_KEY"];
|
||||
delete process.env["AWS_ACCESS_KEY_ID"];
|
||||
delete process.env["AWS_PROFILE"];
|
||||
delete process.env["AWS_REGION"];
|
||||
delete process.env["AWS_BEARER_TOKEN_BEDROCK"];
|
||||
delete process.env["OPENROUTER_API_KEY"];
|
||||
delete process.env["GROQ_API_KEY"];
|
||||
delete process.env["MISTRAL_API_KEY"];
|
||||
delete process.env["PERPLEXITY_API_KEY"];
|
||||
delete process.env["TOGETHER_API_KEY"];
|
||||
delete process.env["XAI_API_KEY"];
|
||||
delete process.env["DEEPSEEK_API_KEY"];
|
||||
delete process.env["FIREWORKS_API_KEY"];
|
||||
delete process.env["CEREBRAS_API_KEY"];
|
||||
delete process.env["SAMBANOVA_API_KEY"];
|
||||
|
||||
// Now safe to import from src/
|
||||
const { Log } = await import("../src/util/log")
|
||||
const { Log } = await import("../src/util/log");
|
||||
|
||||
Log.init({
|
||||
print: false,
|
||||
dev: true,
|
||||
level: "DEBUG",
|
||||
})
|
||||
});
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { describe, expect, test } from "bun:test"
|
||||
import { Project } from "../../src/project/project"
|
||||
import { Log } from "../../src/util/log"
|
||||
import { Storage } from "../../src/storage/storage"
|
||||
import { $ } from "bun"
|
||||
import path from "path"
|
||||
import { tmpdir } from "../fixture/fixture"
|
||||
@@ -55,37 +54,50 @@ describe("Project.fromDirectory with worktrees", () => {
|
||||
test("should set worktree to root when called from a worktree", async () => {
|
||||
await using tmp = await tmpdir({ git: true })
|
||||
|
||||
const worktreePath = path.join(tmp.path, "..", "worktree-test")
|
||||
await $`git worktree add ${worktreePath} -b test-branch`.cwd(tmp.path).quiet()
|
||||
const worktreePath = path.join(tmp.path, "..", path.basename(tmp.path) + "-worktree")
|
||||
try {
|
||||
await $`git worktree add ${worktreePath} -b test-branch-${Date.now()}`.cwd(tmp.path).quiet()
|
||||
|
||||
const { project, sandbox } = await Project.fromDirectory(worktreePath)
|
||||
const { project, sandbox } = await Project.fromDirectory(worktreePath)
|
||||
|
||||
expect(project.worktree).toBe(tmp.path)
|
||||
expect(sandbox).toBe(worktreePath)
|
||||
expect(project.sandboxes).toContain(worktreePath)
|
||||
expect(project.sandboxes).not.toContain(tmp.path)
|
||||
|
||||
await $`git worktree remove ${worktreePath}`.cwd(tmp.path).quiet()
|
||||
expect(project.worktree).toBe(tmp.path)
|
||||
expect(sandbox).toBe(worktreePath)
|
||||
expect(project.sandboxes).toContain(worktreePath)
|
||||
expect(project.sandboxes).not.toContain(tmp.path)
|
||||
} finally {
|
||||
await $`git worktree remove ${worktreePath}`
|
||||
.cwd(tmp.path)
|
||||
.quiet()
|
||||
.catch(() => {})
|
||||
}
|
||||
})
|
||||
|
||||
test("should accumulate multiple worktrees in sandboxes", async () => {
|
||||
await using tmp = await tmpdir({ git: true })
|
||||
|
||||
const worktree1 = path.join(tmp.path, "..", "worktree-1")
|
||||
const worktree2 = path.join(tmp.path, "..", "worktree-2")
|
||||
await $`git worktree add ${worktree1} -b branch-1`.cwd(tmp.path).quiet()
|
||||
await $`git worktree add ${worktree2} -b branch-2`.cwd(tmp.path).quiet()
|
||||
const worktree1 = path.join(tmp.path, "..", path.basename(tmp.path) + "-wt1")
|
||||
const worktree2 = path.join(tmp.path, "..", path.basename(tmp.path) + "-wt2")
|
||||
try {
|
||||
await $`git worktree add ${worktree1} -b branch-${Date.now()}`.cwd(tmp.path).quiet()
|
||||
await $`git worktree add ${worktree2} -b branch-${Date.now() + 1}`.cwd(tmp.path).quiet()
|
||||
|
||||
await Project.fromDirectory(worktree1)
|
||||
const { project } = await Project.fromDirectory(worktree2)
|
||||
await Project.fromDirectory(worktree1)
|
||||
const { project } = await Project.fromDirectory(worktree2)
|
||||
|
||||
expect(project.worktree).toBe(tmp.path)
|
||||
expect(project.sandboxes).toContain(worktree1)
|
||||
expect(project.sandboxes).toContain(worktree2)
|
||||
expect(project.sandboxes).not.toContain(tmp.path)
|
||||
|
||||
await $`git worktree remove ${worktree1}`.cwd(tmp.path).quiet()
|
||||
await $`git worktree remove ${worktree2}`.cwd(tmp.path).quiet()
|
||||
expect(project.worktree).toBe(tmp.path)
|
||||
expect(project.sandboxes).toContain(worktree1)
|
||||
expect(project.sandboxes).toContain(worktree2)
|
||||
expect(project.sandboxes).not.toContain(tmp.path)
|
||||
} finally {
|
||||
await $`git worktree remove ${worktree1}`
|
||||
.cwd(tmp.path)
|
||||
.quiet()
|
||||
.catch(() => {})
|
||||
await $`git worktree remove ${worktree2}`
|
||||
.cwd(tmp.path)
|
||||
.quiet()
|
||||
.catch(() => {})
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
@@ -99,11 +111,12 @@ describe("Project.discover", () => {
|
||||
|
||||
await Project.discover(project)
|
||||
|
||||
const updated = await Storage.read<Project.Info>(["project", project.id])
|
||||
expect(updated.icon).toBeDefined()
|
||||
expect(updated.icon?.url).toStartWith("data:")
|
||||
expect(updated.icon?.url).toContain("base64")
|
||||
expect(updated.icon?.color).toBeUndefined()
|
||||
const updated = Project.get(project.id)
|
||||
expect(updated).toBeDefined()
|
||||
expect(updated!.icon).toBeDefined()
|
||||
expect(updated!.icon?.url).toStartWith("data:")
|
||||
expect(updated!.icon?.url).toContain("base64")
|
||||
expect(updated!.icon?.color).toBeUndefined()
|
||||
})
|
||||
|
||||
test("should not discover non-image files", async () => {
|
||||
@@ -114,7 +127,8 @@ describe("Project.discover", () => {
|
||||
|
||||
await Project.discover(project)
|
||||
|
||||
const updated = await Storage.read<Project.Info>(["project", project.id])
|
||||
expect(updated.icon).toBeUndefined()
|
||||
const updated = Project.get(project.id)
|
||||
expect(updated).toBeDefined()
|
||||
expect(updated!.icon).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
644
packages/opencode/test/storage/json-migration.test.ts
Normal file
644
packages/opencode/test/storage/json-migration.test.ts
Normal file
@@ -0,0 +1,644 @@
|
||||
import { describe, test, expect, beforeEach, afterEach } from "bun:test"
|
||||
import { Database } from "bun:sqlite"
|
||||
import { drizzle } from "drizzle-orm/bun-sqlite"
|
||||
import { migrate } from "drizzle-orm/bun-sqlite/migrator"
|
||||
import path from "path"
|
||||
import fs from "fs/promises"
|
||||
import { readFileSync, readdirSync } from "fs"
|
||||
import { JsonMigration } from "../../src/storage/json-migration"
|
||||
import { Global } from "../../src/global"
|
||||
import { ProjectTable } from "../../src/project/project.sql"
|
||||
import { SessionTable, MessageTable, PartTable, TodoTable, PermissionTable } from "../../src/session/session.sql"
|
||||
import { SessionShareTable } from "../../src/share/share.sql"
|
||||
|
||||
// Test fixtures
|
||||
const fixtures = {
|
||||
project: {
|
||||
id: "proj_test123abc",
|
||||
name: "Test Project",
|
||||
worktree: "/test/path",
|
||||
vcs: "git" as const,
|
||||
sandboxes: [],
|
||||
},
|
||||
session: {
|
||||
id: "ses_test456def",
|
||||
projectID: "proj_test123abc",
|
||||
slug: "test-session",
|
||||
directory: "/test/path",
|
||||
title: "Test Session",
|
||||
version: "1.0.0",
|
||||
time: { created: 1700000000000, updated: 1700000001000 },
|
||||
},
|
||||
message: {
|
||||
id: "msg_test789ghi",
|
||||
sessionID: "ses_test456def",
|
||||
role: "user" as const,
|
||||
agent: "default",
|
||||
model: { providerID: "openai", modelID: "gpt-4" },
|
||||
time: { created: 1700000000000 },
|
||||
},
|
||||
part: {
|
||||
id: "prt_testabc123",
|
||||
messageID: "msg_test789ghi",
|
||||
sessionID: "ses_test456def",
|
||||
type: "text" as const,
|
||||
text: "Hello, world!",
|
||||
},
|
||||
}
|
||||
|
||||
// Helper to create test storage directory structure
|
||||
async function setupStorageDir() {
|
||||
const storageDir = path.join(Global.Path.data, "storage")
|
||||
await fs.rm(storageDir, { recursive: true, force: true })
|
||||
await fs.mkdir(path.join(storageDir, "project"), { recursive: true })
|
||||
await fs.mkdir(path.join(storageDir, "session", "proj_test123abc"), { recursive: true })
|
||||
await fs.mkdir(path.join(storageDir, "message", "ses_test456def"), { recursive: true })
|
||||
await fs.mkdir(path.join(storageDir, "part", "msg_test789ghi"), { recursive: true })
|
||||
await fs.mkdir(path.join(storageDir, "session_diff"), { recursive: true })
|
||||
await fs.mkdir(path.join(storageDir, "todo"), { recursive: true })
|
||||
await fs.mkdir(path.join(storageDir, "permission"), { recursive: true })
|
||||
await fs.mkdir(path.join(storageDir, "session_share"), { recursive: true })
|
||||
// Create legacy marker to indicate JSON storage exists
|
||||
await Bun.write(path.join(storageDir, "migration"), "1")
|
||||
return storageDir
|
||||
}
|
||||
|
||||
async function writeProject(storageDir: string, project: Record<string, unknown>) {
|
||||
await Bun.write(path.join(storageDir, "project", `${project.id}.json`), JSON.stringify(project))
|
||||
}
|
||||
|
||||
async function writeSession(storageDir: string, projectID: string, session: Record<string, unknown>) {
|
||||
await Bun.write(path.join(storageDir, "session", projectID, `${session.id}.json`), JSON.stringify(session))
|
||||
}
|
||||
|
||||
// Helper to create in-memory test database with schema
|
||||
function createTestDb() {
|
||||
const sqlite = new Database(":memory:")
|
||||
sqlite.exec("PRAGMA foreign_keys = ON")
|
||||
|
||||
// Apply schema migrations using drizzle migrate
|
||||
const dir = path.join(import.meta.dirname, "../../migration")
|
||||
const entries = readdirSync(dir, { withFileTypes: true })
|
||||
const migrations = entries
|
||||
.filter((entry) => entry.isDirectory())
|
||||
.map((entry) => ({
|
||||
sql: readFileSync(path.join(dir, entry.name, "migration.sql"), "utf-8"),
|
||||
timestamp: Number(entry.name.split("_")[0]),
|
||||
}))
|
||||
.sort((a, b) => a.timestamp - b.timestamp)
|
||||
migrate(drizzle({ client: sqlite }), migrations)
|
||||
|
||||
return sqlite
|
||||
}
|
||||
|
||||
describe("JSON to SQLite migration", () => {
|
||||
let storageDir: string
|
||||
let sqlite: Database
|
||||
|
||||
beforeEach(async () => {
|
||||
storageDir = await setupStorageDir()
|
||||
sqlite = createTestDb()
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
sqlite.close()
|
||||
await fs.rm(storageDir, { recursive: true, force: true })
|
||||
})
|
||||
|
||||
test("migrates project", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_test123abc",
|
||||
worktree: "/test/path",
|
||||
vcs: "git",
|
||||
name: "Test Project",
|
||||
time: { created: 1700000000000, updated: 1700000001000 },
|
||||
sandboxes: ["/test/sandbox"],
|
||||
})
|
||||
|
||||
const stats = await JsonMigration.run(sqlite)
|
||||
|
||||
expect(stats?.projects).toBe(1)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
const projects = db.select().from(ProjectTable).all()
|
||||
expect(projects.length).toBe(1)
|
||||
expect(projects[0].id).toBe("proj_test123abc")
|
||||
expect(projects[0].worktree).toBe("/test/path")
|
||||
expect(projects[0].name).toBe("Test Project")
|
||||
expect(projects[0].sandboxes).toEqual(["/test/sandbox"])
|
||||
})
|
||||
|
||||
test("migrates session with individual columns", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_test123abc",
|
||||
worktree: "/test/path",
|
||||
time: { created: Date.now(), updated: Date.now() },
|
||||
sandboxes: [],
|
||||
})
|
||||
|
||||
await writeSession(storageDir, "proj_test123abc", {
|
||||
id: "ses_test456def",
|
||||
projectID: "proj_test123abc",
|
||||
slug: "test-session",
|
||||
directory: "/test/dir",
|
||||
title: "Test Session Title",
|
||||
version: "1.0.0",
|
||||
time: { created: 1700000000000, updated: 1700000001000 },
|
||||
summary: { additions: 10, deletions: 5, files: 3 },
|
||||
share: { url: "https://example.com/share" },
|
||||
})
|
||||
|
||||
await JsonMigration.run(sqlite)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
const sessions = db.select().from(SessionTable).all()
|
||||
expect(sessions.length).toBe(1)
|
||||
expect(sessions[0].id).toBe("ses_test456def")
|
||||
expect(sessions[0].project_id).toBe("proj_test123abc")
|
||||
expect(sessions[0].slug).toBe("test-session")
|
||||
expect(sessions[0].title).toBe("Test Session Title")
|
||||
expect(sessions[0].summary_additions).toBe(10)
|
||||
expect(sessions[0].summary_deletions).toBe(5)
|
||||
expect(sessions[0].share_url).toBe("https://example.com/share")
|
||||
})
|
||||
|
||||
test("migrates messages and parts", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_test123abc",
|
||||
worktree: "/",
|
||||
time: { created: Date.now(), updated: Date.now() },
|
||||
sandboxes: [],
|
||||
})
|
||||
await writeSession(storageDir, "proj_test123abc", { ...fixtures.session })
|
||||
await Bun.write(
|
||||
path.join(storageDir, "message", "ses_test456def", "msg_test789ghi.json"),
|
||||
JSON.stringify({ ...fixtures.message }),
|
||||
)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "part", "msg_test789ghi", "prt_testabc123.json"),
|
||||
JSON.stringify({ ...fixtures.part }),
|
||||
)
|
||||
|
||||
const stats = await JsonMigration.run(sqlite)
|
||||
|
||||
expect(stats?.messages).toBe(1)
|
||||
expect(stats?.parts).toBe(1)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
const messages = db.select().from(MessageTable).all()
|
||||
expect(messages.length).toBe(1)
|
||||
expect(messages[0].id).toBe("msg_test789ghi")
|
||||
|
||||
const parts = db.select().from(PartTable).all()
|
||||
expect(parts.length).toBe(1)
|
||||
expect(parts[0].id).toBe("prt_testabc123")
|
||||
})
|
||||
|
||||
test("migrates legacy parts without ids in body", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_test123abc",
|
||||
worktree: "/",
|
||||
time: { created: Date.now(), updated: Date.now() },
|
||||
sandboxes: [],
|
||||
})
|
||||
await writeSession(storageDir, "proj_test123abc", { ...fixtures.session })
|
||||
await Bun.write(
|
||||
path.join(storageDir, "message", "ses_test456def", "msg_test789ghi.json"),
|
||||
JSON.stringify({
|
||||
role: "user",
|
||||
agent: "default",
|
||||
model: { providerID: "openai", modelID: "gpt-4" },
|
||||
time: { created: 1700000000000 },
|
||||
}),
|
||||
)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "part", "msg_test789ghi", "prt_testabc123.json"),
|
||||
JSON.stringify({
|
||||
type: "text",
|
||||
text: "Hello, world!",
|
||||
}),
|
||||
)
|
||||
|
||||
const stats = await JsonMigration.run(sqlite)
|
||||
|
||||
expect(stats?.messages).toBe(1)
|
||||
expect(stats?.parts).toBe(1)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
const messages = db.select().from(MessageTable).all()
|
||||
expect(messages.length).toBe(1)
|
||||
expect(messages[0].id).toBe("msg_test789ghi")
|
||||
expect(messages[0].session_id).toBe("ses_test456def")
|
||||
expect(messages[0].data).not.toHaveProperty("id")
|
||||
expect(messages[0].data).not.toHaveProperty("sessionID")
|
||||
|
||||
const parts = db.select().from(PartTable).all()
|
||||
expect(parts.length).toBe(1)
|
||||
expect(parts[0].id).toBe("prt_testabc123")
|
||||
expect(parts[0].message_id).toBe("msg_test789ghi")
|
||||
expect(parts[0].session_id).toBe("ses_test456def")
|
||||
expect(parts[0].data).not.toHaveProperty("id")
|
||||
expect(parts[0].data).not.toHaveProperty("messageID")
|
||||
expect(parts[0].data).not.toHaveProperty("sessionID")
|
||||
})
|
||||
|
||||
test("skips orphaned sessions (no parent project)", async () => {
|
||||
await Bun.write(
|
||||
path.join(storageDir, "session", "proj_test123abc", "ses_orphan.json"),
|
||||
JSON.stringify({
|
||||
id: "ses_orphan",
|
||||
projectID: "proj_nonexistent",
|
||||
slug: "orphan",
|
||||
directory: "/",
|
||||
title: "Orphan",
|
||||
version: "1.0.0",
|
||||
time: { created: Date.now(), updated: Date.now() },
|
||||
}),
|
||||
)
|
||||
|
||||
const stats = await JsonMigration.run(sqlite)
|
||||
|
||||
expect(stats?.sessions).toBe(0)
|
||||
})
|
||||
|
||||
test("is idempotent (running twice doesn't duplicate)", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_test123abc",
|
||||
worktree: "/",
|
||||
time: { created: Date.now(), updated: Date.now() },
|
||||
sandboxes: [],
|
||||
})
|
||||
|
||||
await JsonMigration.run(sqlite)
|
||||
await JsonMigration.run(sqlite)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
const projects = db.select().from(ProjectTable).all()
|
||||
expect(projects.length).toBe(1) // Still only 1 due to onConflictDoNothing
|
||||
})
|
||||
|
||||
test("migrates todos", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_test123abc",
|
||||
worktree: "/",
|
||||
time: { created: Date.now(), updated: Date.now() },
|
||||
sandboxes: [],
|
||||
})
|
||||
await writeSession(storageDir, "proj_test123abc", { ...fixtures.session })
|
||||
|
||||
// Create todo file (named by sessionID, contains array of todos)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "todo", "ses_test456def.json"),
|
||||
JSON.stringify([
|
||||
{
|
||||
id: "todo_1",
|
||||
content: "First todo",
|
||||
status: "pending",
|
||||
priority: "high",
|
||||
},
|
||||
{
|
||||
id: "todo_2",
|
||||
content: "Second todo",
|
||||
status: "completed",
|
||||
priority: "medium",
|
||||
},
|
||||
]),
|
||||
)
|
||||
|
||||
const stats = await JsonMigration.run(sqlite)
|
||||
|
||||
expect(stats?.todos).toBe(2)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
const todos = db.select().from(TodoTable).orderBy(TodoTable.position).all()
|
||||
expect(todos.length).toBe(2)
|
||||
expect(todos[0].content).toBe("First todo")
|
||||
expect(todos[0].status).toBe("pending")
|
||||
expect(todos[0].priority).toBe("high")
|
||||
expect(todos[0].position).toBe(0)
|
||||
expect(todos[1].content).toBe("Second todo")
|
||||
expect(todos[1].position).toBe(1)
|
||||
})
|
||||
|
||||
test("todos are ordered by position", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_test123abc",
|
||||
worktree: "/",
|
||||
time: { created: Date.now(), updated: Date.now() },
|
||||
sandboxes: [],
|
||||
})
|
||||
await writeSession(storageDir, "proj_test123abc", { ...fixtures.session })
|
||||
|
||||
await Bun.write(
|
||||
path.join(storageDir, "todo", "ses_test456def.json"),
|
||||
JSON.stringify([
|
||||
{ content: "Third", status: "pending", priority: "low" },
|
||||
{ content: "First", status: "pending", priority: "high" },
|
||||
{ content: "Second", status: "in_progress", priority: "medium" },
|
||||
]),
|
||||
)
|
||||
|
||||
await JsonMigration.run(sqlite)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
const todos = db.select().from(TodoTable).orderBy(TodoTable.position).all()
|
||||
|
||||
expect(todos.length).toBe(3)
|
||||
expect(todos[0].content).toBe("Third")
|
||||
expect(todos[0].position).toBe(0)
|
||||
expect(todos[1].content).toBe("First")
|
||||
expect(todos[1].position).toBe(1)
|
||||
expect(todos[2].content).toBe("Second")
|
||||
expect(todos[2].position).toBe(2)
|
||||
})
|
||||
|
||||
test("migrates permissions", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_test123abc",
|
||||
worktree: "/",
|
||||
time: { created: Date.now(), updated: Date.now() },
|
||||
sandboxes: [],
|
||||
})
|
||||
|
||||
// Create permission file (named by projectID, contains array of rules)
|
||||
const permissionData = [
|
||||
{ permission: "file.read", pattern: "/test/file1.ts", action: "allow" as const },
|
||||
{ permission: "file.write", pattern: "/test/file2.ts", action: "ask" as const },
|
||||
{ permission: "command.run", pattern: "npm install", action: "deny" as const },
|
||||
]
|
||||
await Bun.write(path.join(storageDir, "permission", "proj_test123abc.json"), JSON.stringify(permissionData))
|
||||
|
||||
const stats = await JsonMigration.run(sqlite)
|
||||
|
||||
expect(stats?.permissions).toBe(1)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
const permissions = db.select().from(PermissionTable).all()
|
||||
expect(permissions.length).toBe(1)
|
||||
expect(permissions[0].project_id).toBe("proj_test123abc")
|
||||
expect(permissions[0].data).toEqual(permissionData)
|
||||
})
|
||||
|
||||
test("migrates session shares", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_test123abc",
|
||||
worktree: "/",
|
||||
time: { created: Date.now(), updated: Date.now() },
|
||||
sandboxes: [],
|
||||
})
|
||||
await writeSession(storageDir, "proj_test123abc", { ...fixtures.session })
|
||||
|
||||
// Create session share file (named by sessionID)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "session_share", "ses_test456def.json"),
|
||||
JSON.stringify({
|
||||
id: "share_123",
|
||||
secret: "supersecretkey",
|
||||
url: "https://share.example.com/ses_test456def",
|
||||
}),
|
||||
)
|
||||
|
||||
const stats = await JsonMigration.run(sqlite)
|
||||
|
||||
expect(stats?.shares).toBe(1)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
const shares = db.select().from(SessionShareTable).all()
|
||||
expect(shares.length).toBe(1)
|
||||
expect(shares[0].session_id).toBe("ses_test456def")
|
||||
expect(shares[0].id).toBe("share_123")
|
||||
expect(shares[0].secret).toBe("supersecretkey")
|
||||
expect(shares[0].url).toBe("https://share.example.com/ses_test456def")
|
||||
})
|
||||
|
||||
test("returns empty stats when storage directory does not exist", async () => {
|
||||
await fs.rm(storageDir, { recursive: true, force: true })
|
||||
|
||||
const stats = await JsonMigration.run(sqlite)
|
||||
|
||||
expect(stats.projects).toBe(0)
|
||||
expect(stats.sessions).toBe(0)
|
||||
expect(stats.messages).toBe(0)
|
||||
expect(stats.parts).toBe(0)
|
||||
expect(stats.todos).toBe(0)
|
||||
expect(stats.permissions).toBe(0)
|
||||
expect(stats.shares).toBe(0)
|
||||
expect(stats.errors).toEqual([])
|
||||
})
|
||||
|
||||
test("continues when a JSON file is unreadable and records an error", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_test123abc",
|
||||
worktree: "/",
|
||||
time: { created: Date.now(), updated: Date.now() },
|
||||
sandboxes: [],
|
||||
})
|
||||
await Bun.write(path.join(storageDir, "project", "broken.json"), "{ invalid json")
|
||||
|
||||
const stats = await JsonMigration.run(sqlite)
|
||||
|
||||
expect(stats.projects).toBe(1)
|
||||
expect(stats.errors.some((x) => x.includes("failed to read") && x.includes("broken.json"))).toBe(true)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
const projects = db.select().from(ProjectTable).all()
|
||||
expect(projects.length).toBe(1)
|
||||
expect(projects[0].id).toBe("proj_test123abc")
|
||||
})
|
||||
|
||||
test("skips invalid todo entries while preserving source positions", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_test123abc",
|
||||
worktree: "/",
|
||||
time: { created: Date.now(), updated: Date.now() },
|
||||
sandboxes: [],
|
||||
})
|
||||
await writeSession(storageDir, "proj_test123abc", { ...fixtures.session })
|
||||
|
||||
await Bun.write(
|
||||
path.join(storageDir, "todo", "ses_test456def.json"),
|
||||
JSON.stringify([
|
||||
{ content: "keep-0", status: "pending", priority: "high" },
|
||||
{ content: "drop-1", priority: "low" },
|
||||
{ content: "keep-2", status: "completed", priority: "medium" },
|
||||
]),
|
||||
)
|
||||
|
||||
const stats = await JsonMigration.run(sqlite)
|
||||
expect(stats.todos).toBe(2)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
const todos = db.select().from(TodoTable).orderBy(TodoTable.position).all()
|
||||
expect(todos.length).toBe(2)
|
||||
expect(todos[0].content).toBe("keep-0")
|
||||
expect(todos[0].position).toBe(0)
|
||||
expect(todos[1].content).toBe("keep-2")
|
||||
expect(todos[1].position).toBe(2)
|
||||
})
|
||||
|
||||
test("skips orphaned todos, permissions, and shares", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_test123abc",
|
||||
worktree: "/",
|
||||
time: { created: Date.now(), updated: Date.now() },
|
||||
sandboxes: [],
|
||||
})
|
||||
await writeSession(storageDir, "proj_test123abc", { ...fixtures.session })
|
||||
|
||||
await Bun.write(
|
||||
path.join(storageDir, "todo", "ses_test456def.json"),
|
||||
JSON.stringify([{ content: "valid", status: "pending", priority: "high" }]),
|
||||
)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "todo", "ses_missing.json"),
|
||||
JSON.stringify([{ content: "orphan", status: "pending", priority: "high" }]),
|
||||
)
|
||||
|
||||
await Bun.write(
|
||||
path.join(storageDir, "permission", "proj_test123abc.json"),
|
||||
JSON.stringify([{ permission: "file.read" }]),
|
||||
)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "permission", "proj_missing.json"),
|
||||
JSON.stringify([{ permission: "file.write" }]),
|
||||
)
|
||||
|
||||
await Bun.write(
|
||||
path.join(storageDir, "session_share", "ses_test456def.json"),
|
||||
JSON.stringify({ id: "share_ok", secret: "secret", url: "https://ok.example.com" }),
|
||||
)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "session_share", "ses_missing.json"),
|
||||
JSON.stringify({ id: "share_missing", secret: "secret", url: "https://missing.example.com" }),
|
||||
)
|
||||
|
||||
const stats = await JsonMigration.run(sqlite)
|
||||
|
||||
expect(stats.todos).toBe(1)
|
||||
expect(stats.permissions).toBe(1)
|
||||
expect(stats.shares).toBe(1)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
expect(db.select().from(TodoTable).all().length).toBe(1)
|
||||
expect(db.select().from(PermissionTable).all().length).toBe(1)
|
||||
expect(db.select().from(SessionShareTable).all().length).toBe(1)
|
||||
})
|
||||
|
||||
test("handles mixed corruption and partial validity in one migration run", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_test123abc",
|
||||
worktree: "/ok",
|
||||
time: { created: 1700000000000, updated: 1700000001000 },
|
||||
sandboxes: [],
|
||||
})
|
||||
await Bun.write(
|
||||
path.join(storageDir, "project", "proj_missing_id.json"),
|
||||
JSON.stringify({ worktree: "/bad", sandboxes: [] }),
|
||||
)
|
||||
await Bun.write(path.join(storageDir, "project", "proj_broken.json"), "{ nope")
|
||||
|
||||
await writeSession(storageDir, "proj_test123abc", {
|
||||
id: "ses_test456def",
|
||||
projectID: "proj_test123abc",
|
||||
slug: "ok",
|
||||
directory: "/ok",
|
||||
title: "Ok",
|
||||
version: "1",
|
||||
time: { created: 1700000000000, updated: 1700000001000 },
|
||||
})
|
||||
await Bun.write(
|
||||
path.join(storageDir, "session", "proj_test123abc", "ses_missing_project.json"),
|
||||
JSON.stringify({
|
||||
id: "ses_missing_project",
|
||||
slug: "bad",
|
||||
directory: "/bad",
|
||||
title: "Bad",
|
||||
version: "1",
|
||||
}),
|
||||
)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "session", "proj_test123abc", "ses_orphan.json"),
|
||||
JSON.stringify({
|
||||
id: "ses_orphan",
|
||||
projectID: "proj_missing",
|
||||
slug: "orphan",
|
||||
directory: "/bad",
|
||||
title: "Orphan",
|
||||
version: "1",
|
||||
}),
|
||||
)
|
||||
|
||||
await Bun.write(
|
||||
path.join(storageDir, "message", "ses_test456def", "msg_ok.json"),
|
||||
JSON.stringify({ role: "user", time: { created: 1700000000000 } }),
|
||||
)
|
||||
await Bun.write(path.join(storageDir, "message", "ses_test456def", "msg_broken.json"), "{ nope")
|
||||
await Bun.write(
|
||||
path.join(storageDir, "message", "ses_missing", "msg_orphan.json"),
|
||||
JSON.stringify({ role: "user", time: { created: 1700000000000 } }),
|
||||
)
|
||||
|
||||
await Bun.write(
|
||||
path.join(storageDir, "part", "msg_ok", "part_ok.json"),
|
||||
JSON.stringify({ type: "text", text: "ok" }),
|
||||
)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "part", "msg_missing", "part_missing_message.json"),
|
||||
JSON.stringify({ type: "text", text: "bad" }),
|
||||
)
|
||||
await Bun.write(path.join(storageDir, "part", "msg_ok", "part_broken.json"), "{ nope")
|
||||
|
||||
await Bun.write(
|
||||
path.join(storageDir, "todo", "ses_test456def.json"),
|
||||
JSON.stringify([
|
||||
{ content: "ok", status: "pending", priority: "high" },
|
||||
{ content: "skip", status: "pending" },
|
||||
]),
|
||||
)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "todo", "ses_missing.json"),
|
||||
JSON.stringify([{ content: "orphan", status: "pending", priority: "high" }]),
|
||||
)
|
||||
await Bun.write(path.join(storageDir, "todo", "ses_broken.json"), "{ nope")
|
||||
|
||||
await Bun.write(
|
||||
path.join(storageDir, "permission", "proj_test123abc.json"),
|
||||
JSON.stringify([{ permission: "file.read" }]),
|
||||
)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "permission", "proj_missing.json"),
|
||||
JSON.stringify([{ permission: "file.write" }]),
|
||||
)
|
||||
await Bun.write(path.join(storageDir, "permission", "proj_broken.json"), "{ nope")
|
||||
|
||||
await Bun.write(
|
||||
path.join(storageDir, "session_share", "ses_test456def.json"),
|
||||
JSON.stringify({ id: "share_ok", secret: "secret", url: "https://ok.example.com" }),
|
||||
)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "session_share", "ses_missing.json"),
|
||||
JSON.stringify({ id: "share_orphan", secret: "secret", url: "https://missing.example.com" }),
|
||||
)
|
||||
await Bun.write(path.join(storageDir, "session_share", "ses_broken.json"), "{ nope")
|
||||
|
||||
const stats = await JsonMigration.run(sqlite)
|
||||
|
||||
expect(stats.projects).toBe(1)
|
||||
expect(stats.sessions).toBe(1)
|
||||
expect(stats.messages).toBe(1)
|
||||
expect(stats.parts).toBe(1)
|
||||
expect(stats.todos).toBe(1)
|
||||
expect(stats.permissions).toBe(1)
|
||||
expect(stats.shares).toBe(1)
|
||||
expect(stats.errors.length).toBeGreaterThanOrEqual(6)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
expect(db.select().from(ProjectTable).all().length).toBe(1)
|
||||
expect(db.select().from(SessionTable).all().length).toBe(1)
|
||||
expect(db.select().from(MessageTable).all().length).toBe(1)
|
||||
expect(db.select().from(PartTable).all().length).toBe(1)
|
||||
expect(db.select().from(TodoTable).all().length).toBe(1)
|
||||
expect(db.select().from(PermissionTable).all().length).toBe(1)
|
||||
expect(db.select().from(SessionShareTable).all().length).toBe(1)
|
||||
})
|
||||
})
|
||||
0
packages/sdk/js/openapi.json
Normal file
0
packages/sdk/js/openapi.json
Normal file
@@ -496,7 +496,17 @@ export type EventMessagePartUpdated = {
|
||||
type: "message.part.updated"
|
||||
properties: {
|
||||
part: Part
|
||||
delta?: string
|
||||
}
|
||||
}
|
||||
|
||||
export type EventMessagePartDelta = {
|
||||
type: "message.part.delta"
|
||||
properties: {
|
||||
sessionID: string
|
||||
messageID: string
|
||||
partID: string
|
||||
field: string
|
||||
delta: string
|
||||
}
|
||||
}
|
||||
|
||||
@@ -666,10 +676,6 @@ export type Todo = {
|
||||
* Priority level of the task: high, medium, low
|
||||
*/
|
||||
priority: string
|
||||
/**
|
||||
* Unique identifier for the todo item
|
||||
*/
|
||||
id: string
|
||||
}
|
||||
|
||||
export type EventTodoUpdated = {
|
||||
@@ -918,6 +924,7 @@ export type Event =
|
||||
| EventMessageUpdated
|
||||
| EventMessageRemoved
|
||||
| EventMessagePartUpdated
|
||||
| EventMessagePartDelta
|
||||
| EventMessagePartRemoved
|
||||
| EventPermissionAsked
|
||||
| EventPermissionReplied
|
||||
|
||||
@@ -7249,12 +7249,40 @@
|
||||
"properties": {
|
||||
"part": {
|
||||
"$ref": "#/components/schemas/Part"
|
||||
}
|
||||
},
|
||||
"required": ["part"]
|
||||
}
|
||||
},
|
||||
"required": ["type", "properties"]
|
||||
},
|
||||
"Event.message.part.delta": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": {
|
||||
"type": "string",
|
||||
"const": "message.part.delta"
|
||||
},
|
||||
"properties": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"sessionID": {
|
||||
"type": "string"
|
||||
},
|
||||
"messageID": {
|
||||
"type": "string"
|
||||
},
|
||||
"partID": {
|
||||
"type": "string"
|
||||
},
|
||||
"field": {
|
||||
"type": "string"
|
||||
},
|
||||
"delta": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": ["part"]
|
||||
"required": ["sessionID", "messageID", "partID", "field", "delta"]
|
||||
}
|
||||
},
|
||||
"required": ["type", "properties"]
|
||||
@@ -7668,13 +7696,9 @@
|
||||
"priority": {
|
||||
"description": "Priority level of the task: high, medium, low",
|
||||
"type": "string"
|
||||
},
|
||||
"id": {
|
||||
"description": "Unique identifier for the todo item",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": ["content", "status", "priority", "id"]
|
||||
"required": ["content", "status", "priority"]
|
||||
},
|
||||
"Event.todo.updated": {
|
||||
"type": "object",
|
||||
@@ -8342,6 +8366,9 @@
|
||||
{
|
||||
"$ref": "#/components/schemas/Event.message.part.updated"
|
||||
},
|
||||
{
|
||||
"$ref": "#/components/schemas/Event.message.part.delta"
|
||||
},
|
||||
{
|
||||
"$ref": "#/components/schemas/Event.message.part.removed"
|
||||
},
|
||||
|
||||
Reference in New Issue
Block a user