Compare commits

...

113 Commits

Author SHA1 Message Date
opencode-agent[bot]
9b03beaa4b Apply PR #13052: experiment: use ffi to get around bun raw input/ctrl+c issues 2026-02-11 19:28:46 +00:00
opencode-agent[bot]
522ec55d2c Apply PR #13036: upgrade opentui to 0.1.79 2026-02-11 19:28:45 +00:00
opencode-agent[bot]
9b42265bc1 Apply PR #12755: feat(session): add handoff functionality and agent updates 2026-02-11 19:28:45 +00:00
opencode-agent[bot]
c26759347d Apply PR #12022: feat: update tui model dialog to utilize model family to reduce noise in list 2026-02-11 19:28:44 +00:00
opencode-agent[bot]
1a3089129e Apply PR #11811: feat: make plan mode the default 2026-02-11 19:28:44 +00:00
opencode-agent[bot]
d2cc357d35 Apply PR #10597: sqlite again 2026-02-11 19:28:43 +00:00
Aiden Cline
6b4d617df0 feat: adjust read tool so that it can handle dirs too (#13090) 2026-02-11 13:23:00 -06:00
Dax Raad
e3471526f4 add square logo variants to brand page 2026-02-11 13:47:54 -05:00
Adam
6b30e0b752 chore: update docs sync workflow 2026-02-11 11:47:32 -06:00
Dax
6413e91947 Merge branch 'dev' into feature/session-handoff 2026-02-11 12:45:50 -05:00
Dax Raad
9106d315e4 Merge dev into feature/session-handoff
Resolved conflicts:
- Translation files: kept dev's proper localized versions
- prompt/index.tsx: preserved handoff functionality with dev's placeholderText() pattern
- home.tsx: kept dev's simpler layout
2026-02-11 12:45:20 -05:00
Dax Raad
6097c04a50 Merge remote-tracking branch 'origin' into sqlite2 2026-02-11 12:29:54 -05:00
Dax Raad
667953db85 core: fix json migration to preserve project commands
ensure custom commands configured by users are maintained when migrating from json storage to sqlite
2026-02-11 12:29:35 -05:00
Dax Raad
b2e86aaf24 core: allow users to define custom project commands like 'npm run dev' in project settings 2026-02-11 12:18:48 -05:00
Adam
fbabce1125 fix(app): translations 2026-02-11 11:03:35 -06:00
opencode-agent[bot]
8f56ed5b85 chore: generate 2026-02-11 17:00:20 +00:00
Filip
81b5a6a08b fix(app):workspace reset (#13170)
Co-authored-by: opencode-agent[bot] <opencode-agent[bot]@users.noreply.github.com>
2026-02-11 10:59:09 -06:00
Dax Raad
032931c749 Merge branch 'dev' into sqlite2 2026-02-11 11:45:35 -05:00
opencode-agent[bot]
94cb6390aa chore: generate 2026-02-11 15:37:06 +00:00
Sebastian Herrlinger
f9b55a314f 0.1.79 2026-02-11 10:17:58 +01:00
Sebastian Herrlinger
c095629bb1 upgrade opentui to 0.1.78 2026-02-11 10:16:12 +01:00
LukeParkerDev
a90e8de050 add missing return 2026-02-11 13:24:17 +10:00
LukeParkerDev
56dfbbbc93 hook better instead of 100ms loop lol 2026-02-11 12:57:01 +10:00
LukeParkerDev
943bf316b6 beta conflict workaround 2026-02-11 12:13:49 +10:00
LukeParkerDev
1f0346725a it works but its dumb 2026-02-11 11:37:17 +10:00
Aiden Cline
eabf770053 Merge branch 'dev' into utilize-family-in-dialog 2026-02-10 14:43:15 -06:00
Dax Raad
4666daa581 fix 2026-02-10 13:53:18 -05:00
Dax Raad
caf1316116 Merge branch 'dev' into sqlite2 2026-02-10 13:52:35 -05:00
Dax Raad
1de66812bf Merge branch 'dev' into sqlite2 2026-02-09 17:53:05 -05:00
Dax Raad
173c16581d Merge branch 'dev' into feature/session-handoff 2026-02-09 10:53:24 -05:00
Dax
94d0c9940a Merge branch 'dev' into sqlite2 2026-02-09 10:04:55 -05:00
Dax Raad
76bcd22802 docs: add architecture specs for session page improvements and parallel workstreams 2026-02-08 18:44:58 -05:00
Dax Raad
e06fcfdc43 app: update i18n translations for multiple languages 2026-02-08 18:44:53 -05:00
Dax Raad
246430cb8f tui: position prompt at bottom when resuming from handoff with initial prompt 2026-02-08 18:44:45 -05:00
Dax Raad
f689fc7f75 tui: convert handoff from text command to dedicated mode with slash command 2026-02-08 18:44:35 -05:00
Dax Raad
9b2fd57e6e tui: remove hardcoded handoff from autocomplete 2026-02-08 18:44:23 -05:00
Dax Raad
0d365fa613 tui: add handoff mode to prompt history types 2026-02-08 18:44:21 -05:00
Dax Raad
601e631624 tui: fix route navigation state reconciliation 2026-02-08 18:44:18 -05:00
Dax Raad
2fef02f487 tui: fix disabled commands from appearing in slash autocomplete 2026-02-08 18:44:12 -05:00
Dax Raad
4cc9104942 tui: add handoff text command handling in prompt 2026-02-08 18:44:09 -05:00
Dax Raad
0eaa6b5fc8 tui: add handoff autocomplete suggestion with text command 2026-02-08 18:44:07 -05:00
Dax Raad
e563cff034 core: add handoff API endpoint for session extraction 2026-02-08 18:44:04 -05:00
Dax Raad
3b2550106b sdk: regenerate client types for handoff endpoint 2026-02-08 18:44:01 -05:00
Dax Raad
3ec6bff038 core: add handoff session extraction logic with status tracking 2026-02-08 18:43:55 -05:00
Dax Raad
aab2a6df3b core: add tool-only output mode to LLM for handoff extraction 2026-02-08 18:43:53 -05:00
Dax Raad
84171018f2 core: add handoff prompt template for extracting session context 2026-02-08 18:43:51 -05:00
Dax Raad
280d7e6f91 core: add handoff agent for session context extraction 2026-02-08 18:43:49 -05:00
Dax Raad
5952891b1e core: filter session list to show only sessions relevant to current directory location
When running from a subdirectory, the session list now shows only sessions

that belong to the current directory or its subdirectories, instead of

showing all sessions from the entire project.
2026-02-07 01:18:57 -05:00
Dax Raad
d7c8a3f50d Merge remote-tracking branch 'origin/dev' into sqlite2 2026-02-07 01:11:15 -05:00
Dax Raad
ce353819e8 Merge branch 'dev' into sqlite2 2026-02-05 23:50:20 -05:00
Dax Raad
2dae94e5a3 core: show visual progress bar during database migration so users can see real-time status of projects, sessions, and messages being converted 2026-02-05 23:21:37 -05:00
Dax Raad
c6adc19e41 Merge branch 'dev' into sqlite2 2026-02-05 17:54:46 -05:00
Dax Raad
ce56166510 core: suppress error output when auto-installing dependencies to prevent confusing error messages from appearing in the UI 2026-02-05 12:47:05 -05:00
Dax Raad
5911e4c06a Merge branch 'dev' into sqlite2 2026-02-05 12:39:31 -05:00
Dax Raad
42fb840f22 Merge branch 'dev' into sqlite2 2026-02-05 11:52:58 -05:00
Dax Raad
4dcfdf6572 Merge branch 'dev' into sqlite2 2026-02-04 22:44:49 -05:00
Dax Raad
25f3d6d5a9 Merge branch 'dev' into sqlite2 2026-02-04 22:37:08 -05:00
Dax Raad
e19a9e9614 core: fix migration of legacy messages and parts missing IDs in JSON body
Users upgrading from older versions where message and part IDs were only stored in filenames (not the JSON body) can now have their session data properly migrated to SQLite. This ensures no data loss when transitioning to the new storage format.
2026-02-04 22:35:26 -05:00
Aiden Cline
bb4d978684 feat: update tui model dialog to utilize model family to reduce noise in list 2026-02-03 15:48:40 -06:00
Dax Raad
fcc903489b Merge branch 'dev' into sqlite2 2026-02-03 15:51:09 -05:00
Dax
949e69a9bf Merge branch 'dev' into sqlite2 2026-02-02 23:36:00 -05:00
Dax Raad
afec40e8da feat: make plan mode the default, remove experimental flag
- Remove OPENCODE_EXPERIMENTAL_PLAN_MODE flag from flag.ts
- Update prompt.ts to always use plan mode logic
- Update registry.ts to always include plan tools in CLI
- Remove flag documentation from cli.mdx
2026-02-02 10:40:40 -05:00
Dax
8c30f551e2 Merge branch 'dev' into sqlite2 2026-02-01 23:58:01 -05:00
opencode-agent[bot]
cb721497c1 chore: update nix node_modules hashes 2026-02-02 01:40:44 +00:00
Dax Raad
4ec6293054 fix: type errors in console-core and session
- Fix ExtractTablesWithRelations type compatibility with drizzle-orm beta
- Migrate Session.list() and Session.children() from Storage to SQLite
2026-02-01 20:31:02 -05:00
Dax Raad
b7a323355c fix: ExtractTablesWithRelations type parameter 2026-02-01 20:27:29 -05:00
Dax Raad
d4f053042c sync 2026-02-01 20:26:58 -05:00
Dax Raad
5f552534c7 Merge remote-tracking branch 'origin/sqlite2' into sqlite2 2026-02-01 20:26:24 -05:00
Dax Raad
ad5b790bb3 docs: simplify commit command by removing unnecessary instructions 2026-01-31 20:31:33 -05:00
Dax Raad
ed87341c4f core: fix storage directory existence check during json migration
Use fs.existsSync() instead of Bun.file().exists() since we're checking
for a directory, not a file.
2026-01-31 20:30:36 -05:00
opencode-agent[bot]
794ecab028 chore: update nix node_modules hashes 2026-02-01 01:20:05 +00:00
Dax Raad
eeb235724b Merge dev into sqlite2 2026-01-31 20:08:17 -05:00
Dax Raad
61084e7f6f sync 2026-01-31 16:32:37 -05:00
Dax Raad
200aef2eb3 sync 2026-01-31 16:28:07 -05:00
Dax Raad
f6e375a555 Merge origin/sqlite2 2026-01-31 16:10:11 -05:00
Dax Raad
db908deee5 Merge branch 'dev' into sqlite2 2026-01-31 16:08:47 -05:00
opencode-agent[bot]
7b72cc3a48 chore: update nix node_modules hashes 2026-01-30 16:21:25 +00:00
Dax Raad
b8cbfd48ec format 2026-01-30 11:17:33 -05:00
Dax Raad
498cbb2c26 core: split message part updates into delta events for smoother streaming
Streaming text and reasoning content now uses incremental delta events instead of sending full message parts on each update. This reduces bandwidth and improves real-time response smoothness in the TUI.
2026-01-30 11:16:30 -05:00
Dax Raad
d6fbd255b6 Merge branch 'dev' into sqlite2 2026-01-30 11:01:31 -05:00
Dax Raad
2de1c82bf7 Merge branch 'dev' into sqlite2 2026-01-30 10:03:47 -05:00
Dax Raad
34ebb3d051 Merge branch 'dev' into sqlite2 2026-01-29 16:07:20 -05:00
Dax Raad
9c3e3c1ab5 core: load migrations from timestamp-named directories instead of journal 2026-01-29 16:02:19 -05:00
Github Action
3ea499f04e chore: update nix node_modules hashes 2026-01-29 20:15:38 +00:00
Dax Raad
ab13c1d1c4 Merge branch 'dev' into sqlite2 2026-01-29 15:11:57 -05:00
Dax Raad
53b610c331 sync 2026-01-29 13:38:47 -05:00
Dax Raad
e3519356f2 sync 2026-01-29 13:32:20 -05:00
Dax Raad
2619acc0ff Merge branch 'dev' into sqlite2 2026-01-29 13:23:48 -05:00
Dax Raad
1bc45dc266 ignore: keep Chinese release notes label from blocking updates 2026-01-28 21:52:41 -05:00
Dax Raad
2e8feb1c78 core: significantly speed up data migration by optimizing SQLite settings and batch processing
Reduces migration time from minutes to seconds by enabling WAL mode, increasing batch size to 1000, and pre-scanning files upfront. Users upgrading to the SQLite backend will now see much faster startup times when their existing data is migrated.
2026-01-28 21:51:01 -05:00
Github Action
00e60899cc chore: update nix node_modules hashes 2026-01-28 23:49:18 +00:00
Dax Raad
30a918e9d4 Merge branch 'dev' into sqlite2 2026-01-28 18:45:12 -05:00
Dax Raad
ac16068140 Merge branch 'dev' into sqlite2 2026-01-27 17:45:05 -05:00
Dax Raad
19a41ab297 sync 2026-01-27 17:43:20 -05:00
Dax Raad
cd174d8cba sync 2026-01-27 16:57:51 -05:00
Dax Raad
246e901e42 Merge dev into sqlite2 2026-01-27 16:44:06 -05:00
Dax Raad
0ccef1b31f sync 2026-01-27 16:41:24 -05:00
Dax Raad
7706f5b6a8 core: switch commit command to kimi-k2.5 and improve worktree test reliability 2026-01-27 16:24:21 -05:00
Dax Raad
63e38555c9 sync 2026-01-27 15:33:44 -05:00
Dax Raad
f40685ab13 core: fix Drizzle ORM client initialization and type definitions 2026-01-27 12:38:38 -05:00
Dax Raad
a48a5a3462 core: migrate from custom JSON storage to standard Drizzle migrations to improve database reliability and performance
This replaces the previous manual JSON file system with standard Drizzle migrations, enabling:
- Proper database schema migrations with timestamp-based versioning
- Batched migration for faster migration of large datasets
- Better data integrity with proper table schemas instead of JSON blobs
- Easier database upgrades and rollback capabilities

Migration changes:
- Todo table now uses individual columns with composite PK instead of JSON blob
- Share table removes unused download share data
- Session diff table moved from database table to file storage
- All migrations now use proper Drizzle format with per-folder layout

Users will see a one-time migration on next run that migrates existing JSON data to the new SQLite database.
2026-01-27 12:36:05 -05:00
Dax Raad
5e1639de2b core: improve conversation loading performance by batching database queries
Reduces memory usage and speeds up conversation loading by using pagination
and inArray queries instead of loading all messages at once
2026-01-26 12:33:18 -05:00
Dax Raad
2b05833c32 core: ensure events publish reliably after database operations complete 2026-01-26 12:00:44 -05:00
Dax Raad
acdcf7fa88 core: remove dependency on remeda to simplify dependencies 2026-01-26 11:11:59 -05:00
Dax Raad
bf0754caeb sync 2026-01-26 10:35:53 -05:00
Dax Raad
4d50a32979 Merge dev into sqlite2 2026-01-26 08:53:01 -05:00
Dax Raad
57edb0ddc5 sync 2026-01-26 08:44:19 -05:00
Dax Raad
a614b78c6d tui: upgrade database migration system to drizzle migrator
Replaces custom migration system with drizzle-orm's built-in migrator, bundling migrations at build-time instead of runtime generation. This reduces bundle complexity and provides better integration with drizzle's migration tracking.
2026-01-25 22:27:04 -05:00
Github Action
b9f5a34247 chore: update nix node_modules hashes 2026-01-26 02:32:52 +00:00
Dax Raad
81b47a44e2 Merge branch 'dev' into sqlite2 2026-01-25 21:28:38 -05:00
Dax Raad
0c1c07467e Merge branch 'dev' into sqlite2 2026-01-25 20:18:36 -05:00
Dax Raad
105688bf90 sync 2026-01-25 20:16:56 -05:00
Dax Raad
1e7b4768b1 sync 2026-01-24 11:50:25 -05:00
133 changed files with 9317 additions and 1096 deletions

View File

@@ -64,10 +64,13 @@ jobs:
Requirements:
1. Update all relevant locale docs under packages/web/src/content/docs/<locale>/ so they reflect these English page changes.
2. Preserve frontmatter keys, internal links, code blocks, and existing locale-specific metadata unless the English change requires an update.
3. Keep locale docs structure aligned with their corresponding English pages.
4. Do not modify English source docs in packages/web/src/content/docs/*.mdx.
5. If no locale updates are needed, make no changes.
2. You MUST use the Task tool for translation work and launch subagents with subagent_type `translator` (defined in .opencode/agent/translator.md).
3. Do not translate directly in the primary agent. Use translator subagent output as the source for locale text updates.
4. Run translator subagent Task calls in parallel whenever file/locale translation work is independent.
5. Preserve frontmatter keys, internal links, code blocks, and existing locale-specific metadata unless the English change requires an update.
6. Keep locale docs structure aligned with their corresponding English pages.
7. Do not modify English source docs in packages/web/src/content/docs/*.mdx.
8. If no locale updates are needed, make no changes.
- name: Commit and push locale docs updates
if: steps.changes.outputs.has_changes == 'true'

View File

@@ -16,15 +16,12 @@ wip:
For anything in the packages/web use the docs: prefix.
For anything in the packages/app use the ignore: prefix.
prefer to explain WHY something was done from an end user perspective instead of
WHAT was done.
do not do generic messages like "improved agent experience" be very specific
about what user facing changes were made
if there are changes do a git pull --rebase
if there are conflicts DO NOT FIX THEM. notify me and I will fix them
## GIT DIFF

View File

@@ -32,6 +32,9 @@ description: Use this when you are working on file operations like reading, writ
- Decode tool stderr with `Bun.readableStreamToText`.
- For large writes, use `Bun.write(Bun.file(path), text)`.
NOTE: Bun.file(...).exists() will return `false` if the value is a directory.
Use Filesystem.exists(...) instead if path can be file or directory
## Quick checklist
- Use Bun APIs first.

View File

@@ -110,3 +110,4 @@ const table = sqliteTable("session", {
- Avoid mocks as much as possible
- Test actual implementation, do not duplicate logic into tests
- Tests cannot run from repo root (guard: `do-not-run-tests-from-root`); run from package dirs like `packages/opencode`.

489
bun.lock

File diff suppressed because it is too large Load Diff

View File

@@ -40,6 +40,8 @@
"@tailwindcss/vite": "4.1.11",
"diff": "8.0.2",
"dompurify": "3.3.1",
"drizzle-kit": "1.0.0-beta.12-a5629fb",
"drizzle-orm": "1.0.0-beta.12-a5629fb",
"ai": "5.0.124",
"hono": "4.10.7",
"hono-openapi": "1.1.2",

View File

@@ -231,6 +231,24 @@ export function applyDirectoryEvent(input: {
}
break
}
case "message.part.delta": {
const props = event.properties as { messageID: string; partID: string; field: string; delta: string }
const parts = input.store.part[props.messageID]
if (!parts) break
const result = Binary.search(parts, props.partID, (p) => p.id)
if (!result.found) break
input.setStore(
"part",
props.messageID,
produce((draft) => {
const part = draft[result.index]
const field = props.field as keyof typeof part
const existing = part[field] as string | undefined
;(part[field] as string) = (existing ?? "") + props.delta
}),
)
break
}
case "vcs.branch.updated": {
const props = event.properties as { branch: string }
const next = { branch: props.branch }

View File

@@ -4,6 +4,7 @@ import { createSimpleContext } from "@opencode-ai/ui/context"
import { useGlobalSync } from "./global-sync"
import { useGlobalSDK } from "./global-sdk"
import { useServer } from "./server"
import { usePlatform } from "./platform"
import { Project } from "@opencode-ai/sdk/v2"
import { Persist, persisted, removePersisted } from "@/utils/persist"
import { same } from "@/utils/same"
@@ -90,6 +91,7 @@ export const { use: useLayout, provider: LayoutProvider } = createSimpleContext(
const globalSdk = useGlobalSDK()
const globalSync = useGlobalSync()
const server = useServer()
const platform = usePlatform()
const isRecord = (value: unknown): value is Record<string, unknown> =>
typeof value === "object" && value !== null && !Array.isArray(value)
@@ -200,10 +202,10 @@ export const { use: useLayout, provider: LayoutProvider } = createSimpleContext(
for (const entry of SESSION_STATE_KEYS) {
const target = session ? Persist.session(dir, session, entry.key) : Persist.workspace(dir, entry.key)
void removePersisted(target)
void removePersisted(target, platform)
const legacyKey = `${dir}/${entry.legacy}${session ? "/" + session : ""}.${entry.version}`
void removePersisted({ key: legacyKey })
void removePersisted({ key: legacyKey }, platform)
}
}
}

View File

@@ -3,6 +3,7 @@ import { createSimpleContext } from "@opencode-ai/ui/context"
import { batch, createEffect, createMemo, createRoot, onCleanup } from "solid-js"
import { useParams } from "@solidjs/router"
import { useSDK } from "./sdk"
import type { Platform } from "./platform"
import { Persist, persisted, removePersisted } from "@/utils/persist"
export type LocalPTY = {
@@ -37,14 +38,14 @@ type TerminalCacheEntry = {
const caches = new Set<Map<string, TerminalCacheEntry>>()
export function clearWorkspaceTerminals(dir: string, sessionIDs?: string[]) {
export function clearWorkspaceTerminals(dir: string, sessionIDs?: string[], platform?: Platform) {
const key = getWorkspaceTerminalCacheKey(dir)
for (const cache of caches) {
const entry = cache.get(key)
entry?.value.clear()
}
removePersisted(Persist.workspace(dir, "terminal"))
removePersisted(Persist.workspace(dir, "terminal"), platform)
const legacy = new Set(getLegacyTerminalStorageKeys(dir))
for (const id of sessionIDs ?? []) {
@@ -53,7 +54,7 @@ export function clearWorkspaceTerminals(dir: string, sessionIDs?: string[]) {
}
}
for (const key of legacy) {
removePersisted({ key })
removePersisted({ key }, platform)
}
}

View File

@@ -110,25 +110,30 @@ export const dict = {
"provider.connect.status.inProgress": "جارٍ التفويض...",
"provider.connect.status.waiting": "في انتظار التفويض...",
"provider.connect.status.failed": "فشل التفويض: {{error}}",
"provider.connect.apiKey.description": "أدخل مفتاح واجهة برمجة تطبيقات {{provider}} الخاص بك لتوصيل حسابك واستخدام نماذج {{provider}} في OpenCode.",
"provider.connect.apiKey.description":
"أدخل مفتاح واجهة برمجة تطبيقات {{provider}} الخاص بك لتوصيل حسابك واستخدام نماذج {{provider}} في OpenCode.",
"provider.connect.apiKey.label": "مفتاح واجهة برمجة تطبيقات {{provider}}",
"provider.connect.apiKey.placeholder": "مفتاح API",
"provider.connect.apiKey.required": "مفتاح API مطلوب",
"provider.connect.opencodeZen.line1": "يمنحك OpenCode Zen الوصول إلى مجموعة مختارة من النماذج الموثوقة والمحسنة لوكلاء البرمجة.",
"provider.connect.opencodeZen.line2": "باستخدام مفتاح API واحد، ستحصل على إمكانية الوصول إلى نماذج مثل Claude و GPT و Gemini و GLM والمزيد.",
"provider.connect.opencodeZen.line1":
"يمنحك OpenCode Zen الوصول إلى مجموعة مختارة من النماذج الموثوقة والمحسنة لوكلاء البرمجة.",
"provider.connect.opencodeZen.line2":
"باستخدام مفتاح API واحد، ستحصل على إمكانية الوصول إلى نماذج مثل Claude و GPT و Gemini و GLM والمزيد.",
"provider.connect.opencodeZen.visit.prefix": "قم بزيارة ",
"provider.connect.opencodeZen.visit.link": "opencode.ai/zen",
"provider.connect.opencodeZen.visit.suffix": " للحصول على مفتاح API الخاص بك.",
"provider.connect.oauth.code.visit.prefix": "قم بزيارة ",
"provider.connect.oauth.code.visit.link": "هذا الرابط",
"provider.connect.oauth.code.visit.suffix": " للحصول على رمز التفويض الخاص بك لتوصيل حسابك واستخدام نماذج {{provider}} في OpenCode.",
"provider.connect.oauth.code.visit.suffix":
" للحصول على رمز التفويض الخاص بك لتوصيل حسابك واستخدام نماذج {{provider}} في OpenCode.",
"provider.connect.oauth.code.label": "رمز تفويض {{method}}",
"provider.connect.oauth.code.placeholder": "رمز التفويض",
"provider.connect.oauth.code.required": "رمز التفويض مطلوب",
"provider.connect.oauth.code.invalid": "رمز التفويض غير صالح",
"provider.connect.oauth.auto.visit.prefix": "قم بزيارة ",
"provider.connect.oauth.auto.visit.link": "هذا الرابط",
"provider.connect.oauth.auto.visit.suffix": " وأدخل الرمز أدناه لتوصيل حسابك واستخدام نماذج {{provider}} في OpenCode.",
"provider.connect.oauth.auto.visit.suffix":
" وأدخل الرمز أدناه لتوصيل حسابك واستخدام نماذج {{provider}} في OpenCode.",
"provider.connect.oauth.auto.confirmationCode": "رمز التأكيد",
"provider.connect.toast.connected.title": "تم توصيل {{provider}}",
"provider.connect.toast.connected.description": "نماذج {{provider}} متاحة الآن للاستخدام.",
@@ -200,7 +205,7 @@ export const dict = {
"common.default": "افتراضي",
"common.attachment": "مرفق",
"prompt.placeholder.shell": "أدخل أمر shell...",
"prompt.placeholder.normal": "اسأل أي شيء... \"{{example}}\"",
"prompt.placeholder.normal": 'اسأل أي شيء... "{{example}}"',
"prompt.placeholder.summarizeComments": "لخّص التعليقات…",
"prompt.placeholder.summarizeComment": "لخّص التعليق…",
"prompt.mode.shell": "Shell",
@@ -278,7 +283,8 @@ export const dict = {
"dialog.server.add.checking": "جارٍ التحقق...",
"dialog.server.add.button": "إضافة خادم",
"dialog.server.default.title": "الخادم الافتراضي",
"dialog.server.default.description": "الاتصال بهذا الخادم عند بدء تشغيل التطبيق بدلاً من بدء خادم محلي. يتطلب إعادة التشغيل.",
"dialog.server.default.description":
"الاتصال بهذا الخادم عند بدء تشغيل التطبيق بدلاً من بدء خادم محلي. يتطلب إعادة التشغيل.",
"dialog.server.default.none": "لم يتم تحديد خادم",
"dialog.server.default.set": "تعيين الخادم الحالي كافتراضي",
"dialog.server.default.clear": "مسح",
@@ -301,7 +307,7 @@ export const dict = {
"dialog.project.edit.worktree.startup.description": "يتم تشغيله بعد إنشاء مساحة عمل جديدة (شجرة عمل).",
"dialog.project.edit.worktree.startup.placeholder": "مثال: bun install",
"context.breakdown.title": "تفصيل السياق",
"context.breakdown.note": "تفصيل تقريبي لرموز الإدخال. يشمل \"أخرى\" تعريفات الأدوات والنفقات العامة.",
"context.breakdown.note": 'تفصيل تقريبي لرموز الإدخال. يشمل "أخرى" تعريفات الأدوات والنفقات العامة.',
"context.breakdown.system": "النظام",
"context.breakdown.user": "المستخدم",
"context.breakdown.assistant": "المساعد",
@@ -388,7 +394,8 @@ export const dict = {
"error.page.report.prefix": "يرجى الإبلاغ عن هذا الخطأ لفريق OpenCode",
"error.page.report.discord": "على Discord",
"error.page.version": "الإصدار: {{version}}",
"error.dev.rootNotFound": "لم يتم العثور على العنصر الجذري. هل نسيت إضافته إلى index.html؟ أو ربما تمت كتابة سمة id بشكل خاطئ؟",
"error.dev.rootNotFound":
"لم يتم العثور على العنصر الجذري. هل نسيت إضافته إلى index.html؟ أو ربما تمت كتابة سمة id بشكل خاطئ؟",
"error.globalSync.connectFailed": "تعذر الاتصال بالخادم. هل هناك خادم يعمل في `{{url}}`؟",
"directory.error.invalidUrl": "دليل غير صالح في عنوان URL.",
"error.chain.unknown": "خطأ غير معروف",
@@ -400,12 +407,13 @@ export const dict = {
"error.chain.didYouMean": "هل كنت تعني: {{suggestions}}",
"error.chain.modelNotFound": "النموذج غير موجود: {{provider}}/{{model}}",
"error.chain.checkConfig": "تحقق من أسماء الموفر/النموذج في التكوين (opencode.json)",
"error.chain.mcpFailed": "فشل خادم MCP \"{{name}}\". لاحظ أن OpenCode لا يدعم مصادقة MCP بعد.",
"error.chain.mcpFailed": 'فشل خادم MCP "{{name}}". لاحظ أن OpenCode لا يدعم مصادقة MCP بعد.',
"error.chain.providerAuthFailed": "فشلت مصادقة الموفر ({{provider}}): {{message}}",
"error.chain.providerInitFailed": "فشل تهيئة الموفر \"{{provider}}\". تحقق من بيانات الاعتماد والتكوين.",
"error.chain.providerInitFailed": 'فشل تهيئة الموفر "{{provider}}". تحقق من بيانات الاعتماد والتكوين.',
"error.chain.configJsonInvalid": "ملف التكوين في {{path}} ليس JSON(C) صالحًا",
"error.chain.configJsonInvalidWithMessage": "ملف التكوين في {{path}} ليس JSON(C) صالحًا: {{message}}",
"error.chain.configDirectoryTypo": "الدليل \"{{dir}}\" في {{path}} غير صالح. أعد تسمية الدليل إلى \"{{suggestion}}\" أو قم بإزالته. هذا خطأ مطبعي شائع.",
"error.chain.configDirectoryTypo":
'الدليل "{{dir}}" في {{path}} غير صالح. أعد تسمية الدليل إلى "{{suggestion}}" أو قم بإزالته. هذا خطأ مطبعي شائع.',
"error.chain.configFrontmatterError": "فشل تحليل frontmatter في {{path}}:\n{{message}}",
"error.chain.configInvalid": "ملف التكوين في {{path}} غير صالح",
"error.chain.configInvalidWithMessage": "ملف التكوين في {{path}} غير صالح: {{message}}",
@@ -524,9 +532,10 @@ export const dict = {
"settings.general.row.font.description": "تخصيص الخط الأحادي المستخدم في كتل التعليمات البرمجية",
"settings.general.row.wayland.title": "استخدام Wayland الأصلي",
"settings.general.row.wayland.description": "تعطيل التراجع إلى X11 على Wayland. يتطلب إعادة التشغيل.",
"settings.general.row.wayland.tooltip": "على Linux مع شاشات بمعدلات تحديث مختلطة، يمكن أن يكون Wayland الأصلي أكثر استقرارًا.",
"settings.general.row.wayland.tooltip":
"على Linux مع شاشات بمعدلات تحديث مختلطة، يمكن أن يكون Wayland الأصلي أكثر استقرارًا.",
"settings.general.row.releaseNotes.title": "ملاحظات الإصدار",
"settings.general.row.releaseNotes.description": "عرض نوافذ \"ما الجديد\" المنبثقة بعد التحديثات",
"settings.general.row.releaseNotes.description": 'عرض نوافذ "ما الجديد" المنبثقة بعد التحديثات',
"settings.updates.row.startup.title": "التحقق من التحديثات عند بدء التشغيل",
"settings.updates.row.startup.description": "التحقق تلقائيًا من التحديثات عند تشغيل OpenCode",
"settings.updates.row.check.title": "التحقق من التحديثات",
@@ -647,7 +656,8 @@ export const dict = {
"settings.permissions.tool.read.title": "قراءة",
"settings.permissions.tool.read.description": "قراءة ملف (يطابق مسار الملف)",
"settings.permissions.tool.edit.title": "تحرير",
"settings.permissions.tool.edit.description": "تعديل الملفات، بما في ذلك التحرير والكتابة والتصحيحات والتحرير المتعدد",
"settings.permissions.tool.edit.description":
"تعديل الملفات، بما في ذلك التحرير والكتابة والتصحيحات والتحرير المتعدد",
"settings.permissions.tool.glob.title": "Glob",
"settings.permissions.tool.glob.description": "مطابقة الملفات باستخدام أنماط glob",
"settings.permissions.tool.grep.title": "Grep",
@@ -678,7 +688,7 @@ export const dict = {
"settings.permissions.tool.doom_loop.description": "اكتشاف استدعاءات الأدوات المتكررة بمدخلات متطابقة",
"session.delete.failed.title": "فشل حذف الجلسة",
"session.delete.title": "حذف الجلسة",
"session.delete.confirm": "حذف الجلسة \"{{name}}\",
"session.delete.confirm": 'حذف الجلسة "{{name}}"؟',
"session.delete.button": "حذف الجلسة",
"workspace.new": "مساحة عمل جديدة",
"workspace.type.local": "محلي",
@@ -696,14 +706,13 @@ export const dict = {
"workspace.status.clean": "لم يتم اكتشاف تغييرات غير مدمجة.",
"workspace.status.dirty": "تم اكتشاف تغييرات غير مدمجة في مساحة العمل هذه.",
"workspace.delete.title": "حذف مساحة العمل",
"workspace.delete.confirm": "حذف مساحة العمل \"{{name}}\",
"workspace.delete.confirm": 'حذف مساحة العمل "{{name}}"؟',
"workspace.delete.button": "حذف مساحة العمل",
"workspace.reset.title": "إعادة تعيين مساحة العمل",
"workspace.reset.confirm": "إعادة تعيين مساحة العمل \"{{name}}\",
"workspace.reset.confirm": 'إعادة تعيين مساحة العمل "{{name}}"؟',
"workspace.reset.button": "إعادة تعيين مساحة العمل",
"workspace.reset.archived.none": "لن تتم أرشفة أي جلسات نشطة.",
"workspace.reset.archived.one": "ستتم أرشفة جلسة واحدة.",
"workspace.reset.archived.many": "ستتم أرشفة {{count}} جلسات.",
"workspace.reset.note": "سيؤدي هذا إلى إعادة تعيين مساحة العمل لتتطابق مع الفرع الافتراضي.",
}

View File

@@ -110,25 +110,30 @@ export const dict = {
"provider.connect.status.inProgress": "Autorização em andamento...",
"provider.connect.status.waiting": "Aguardando autorização...",
"provider.connect.status.failed": "Autorização falhou: {{error}}",
"provider.connect.apiKey.description": "Digite sua chave de API do {{provider}} para conectar sua conta e usar modelos do {{provider}} no OpenCode.",
"provider.connect.apiKey.description":
"Digite sua chave de API do {{provider}} para conectar sua conta e usar modelos do {{provider}} no OpenCode.",
"provider.connect.apiKey.label": "Chave de API do {{provider}}",
"provider.connect.apiKey.placeholder": "Chave de API",
"provider.connect.apiKey.required": "A chave de API é obrigatória",
"provider.connect.opencodeZen.line1": "OpenCode Zen oferece acesso a um conjunto selecionado de modelos confiáveis otimizados para agentes de código.",
"provider.connect.opencodeZen.line2": "Com uma única chave de API você terá acesso a modelos como Claude, GPT, Gemini, GLM e mais.",
"provider.connect.opencodeZen.line1":
"OpenCode Zen oferece acesso a um conjunto selecionado de modelos confiáveis otimizados para agentes de código.",
"provider.connect.opencodeZen.line2":
"Com uma única chave de API você terá acesso a modelos como Claude, GPT, Gemini, GLM e mais.",
"provider.connect.opencodeZen.visit.prefix": "Visite ",
"provider.connect.opencodeZen.visit.link": "opencode.ai/zen",
"provider.connect.opencodeZen.visit.suffix": " para obter sua chave de API.",
"provider.connect.oauth.code.visit.prefix": "Visite ",
"provider.connect.oauth.code.visit.link": "este link",
"provider.connect.oauth.code.visit.suffix": " para obter seu código de autorização e conectar sua conta para usar modelos do {{provider}} no OpenCode.",
"provider.connect.oauth.code.visit.suffix":
" para obter seu código de autorização e conectar sua conta para usar modelos do {{provider}} no OpenCode.",
"provider.connect.oauth.code.label": "Código de autorização {{method}}",
"provider.connect.oauth.code.placeholder": "Código de autorização",
"provider.connect.oauth.code.required": "O código de autorização é obrigatório",
"provider.connect.oauth.code.invalid": "Código de autorização inválido",
"provider.connect.oauth.auto.visit.prefix": "Visite ",
"provider.connect.oauth.auto.visit.link": "este link",
"provider.connect.oauth.auto.visit.suffix": " e digite o código abaixo para conectar sua conta e usar modelos do {{provider}} no OpenCode.",
"provider.connect.oauth.auto.visit.suffix":
" e digite o código abaixo para conectar sua conta e usar modelos do {{provider}} no OpenCode.",
"provider.connect.oauth.auto.confirmationCode": "Código de confirmação",
"provider.connect.toast.connected.title": "{{provider}} conectado",
"provider.connect.toast.connected.description": "Modelos do {{provider}} agora estão disponíveis para uso.",
@@ -200,7 +205,7 @@ export const dict = {
"common.default": "Padrão",
"common.attachment": "anexo",
"prompt.placeholder.shell": "Digite comando do shell...",
"prompt.placeholder.normal": "Pergunte qualquer coisa... \"{{example}}\"",
"prompt.placeholder.normal": 'Pergunte qualquer coisa... "{{example}}"',
"prompt.placeholder.summarizeComments": "Resumir comentários…",
"prompt.placeholder.summarizeComment": "Resumir comentário…",
"prompt.mode.shell": "Shell",
@@ -278,7 +283,8 @@ export const dict = {
"dialog.server.add.checking": "Verificando...",
"dialog.server.add.button": "Adicionar",
"dialog.server.default.title": "Servidor padrão",
"dialog.server.default.description": "Conectar a este servidor na inicialização do aplicativo ao invés de iniciar um servidor local. Requer reinicialização.",
"dialog.server.default.description":
"Conectar a este servidor na inicialização do aplicativo ao invés de iniciar um servidor local. Requer reinicialização.",
"dialog.server.default.none": "Nenhum servidor selecionado",
"dialog.server.default.set": "Definir servidor atual como padrão",
"dialog.server.default.clear": "Limpar",
@@ -301,7 +307,8 @@ export const dict = {
"dialog.project.edit.worktree.startup.description": "Executa após criar um novo espaço de trabalho (worktree).",
"dialog.project.edit.worktree.startup.placeholder": "ex: bun install",
"context.breakdown.title": "Detalhamento do Contexto",
"context.breakdown.note": "Detalhamento aproximado dos tokens de entrada. \"Outros\" inclui definições de ferramentas e overhead.",
"context.breakdown.note":
'Detalhamento aproximado dos tokens de entrada. "Outros" inclui definições de ferramentas e overhead.',
"context.breakdown.system": "Sistema",
"context.breakdown.user": "Usuário",
"context.breakdown.assistant": "Assistente",
@@ -388,7 +395,8 @@ export const dict = {
"error.page.report.prefix": "Por favor, reporte este erro para a equipe do OpenCode",
"error.page.report.discord": "no Discord",
"error.page.version": "Versão: {{version}}",
"error.dev.rootNotFound": "Elemento raiz não encontrado. Você esqueceu de adicioná-lo ao seu index.html? Ou talvez o atributo id foi escrito incorretamente?",
"error.dev.rootNotFound":
"Elemento raiz não encontrado. Você esqueceu de adicioná-lo ao seu index.html? Ou talvez o atributo id foi escrito incorretamente?",
"error.globalSync.connectFailed": "Não foi possível conectar ao servidor. Há um servidor executando em `{{url}}`?",
"directory.error.invalidUrl": "Diretório inválido na URL.",
"error.chain.unknown": "Erro desconhecido",
@@ -400,12 +408,15 @@ export const dict = {
"error.chain.didYouMean": "Você quis dizer: {{suggestions}}",
"error.chain.modelNotFound": "Modelo não encontrado: {{provider}}/{{model}}",
"error.chain.checkConfig": "Verifique os nomes de provedor/modelo na sua configuração (opencode.json)",
"error.chain.mcpFailed": "Servidor MCP \"{{name}}\" falhou. Nota: OpenCode ainda não suporta autenticação MCP.",
"error.chain.mcpFailed": 'Servidor MCP "{{name}}" falhou. Nota: OpenCode ainda não suporta autenticação MCP.',
"error.chain.providerAuthFailed": "Autenticação do provedor falhou ({{provider}}): {{message}}",
"error.chain.providerInitFailed": "Falha ao inicializar provedor \"{{provider}}\". Verifique credenciais e configuração.",
"error.chain.providerInitFailed":
'Falha ao inicializar provedor "{{provider}}". Verifique credenciais e configuração.',
"error.chain.configJsonInvalid": "Arquivo de configuração em {{path}} não é um JSON(C) válido",
"error.chain.configJsonInvalidWithMessage": "Arquivo de configuração em {{path}} não é um JSON(C) válido: {{message}}",
"error.chain.configDirectoryTypo": "Diretório \"{{dir}}\" em {{path}} não é válido. Renomeie o diretório para \"{{suggestion}}\" ou remova-o. Este é um erro de digitação comum.",
"error.chain.configJsonInvalidWithMessage":
"Arquivo de configuração em {{path}} não é um JSON(C) válido: {{message}}",
"error.chain.configDirectoryTypo":
'Diretório "{{dir}}" em {{path}} não é válido. Renomeie o diretório para "{{suggestion}}" ou remova-o. Este é um erro de digitação comum.',
"error.chain.configFrontmatterError": "Falha ao analisar frontmatter em {{path}}:\n{{message}}",
"error.chain.configInvalid": "Arquivo de configuração em {{path}} é inválido",
"error.chain.configInvalidWithMessage": "Arquivo de configuração em {{path}} é inválido: {{message}}",
@@ -458,8 +469,10 @@ export const dict = {
"status.popover.tab.plugins": "Plugins",
"status.popover.action.manageServers": "Gerenciar servidores",
"session.share.popover.title": "Publicar na web",
"session.share.popover.description.shared": "Esta sessão é pública na web. Está acessível para qualquer pessoa com o link.",
"session.share.popover.description.unshared": "Compartilhar sessão publicamente na web. Estará acessível para qualquer pessoa com o link.",
"session.share.popover.description.shared":
"Esta sessão é pública na web. Está acessível para qualquer pessoa com o link.",
"session.share.popover.description.unshared":
"Compartilhar sessão publicamente na web. Estará acessível para qualquer pessoa com o link.",
"session.share.action.share": "Compartilhar",
"session.share.action.publish": "Publicar",
"session.share.action.publishing": "Publicando...",
@@ -476,7 +489,8 @@ export const dict = {
"terminal.title.numbered": "Terminal {{number}}",
"terminal.close": "Fechar terminal",
"terminal.connectionLost.title": "Conexão Perdida",
"terminal.connectionLost.description": "A conexão do terminal foi interrompida. Isso pode acontecer quando o servidor reinicia.",
"terminal.connectionLost.description":
"A conexão do terminal foi interrompida. Isso pode acontecer quando o servidor reinicia.",
"common.closeTab": "Fechar aba",
"common.dismiss": "Descartar",
"common.requestFailed": "Requisição falhou",
@@ -524,9 +538,10 @@ export const dict = {
"settings.general.row.font.description": "Personalize a fonte monoespaçada usada em blocos de código",
"settings.general.row.wayland.title": "Usar Wayland nativo",
"settings.general.row.wayland.description": "Desabilitar fallback X11 no Wayland. Requer reinicialização.",
"settings.general.row.wayland.tooltip": "No Linux com monitores de taxas de atualização mistas, Wayland nativo pode ser mais estável.",
"settings.general.row.wayland.tooltip":
"No Linux com monitores de taxas de atualização mistas, Wayland nativo pode ser mais estável.",
"settings.general.row.releaseNotes.title": "Notas da versão",
"settings.general.row.releaseNotes.description": "Mostrar pop-ups de \"Novidades\" após atualizações",
"settings.general.row.releaseNotes.description": 'Mostrar pop-ups de "Novidades" após atualizações',
"settings.updates.row.startup.title": "Verificar atualizações ao iniciar",
"settings.updates.row.startup.description": "Verificar atualizações automaticamente quando o OpenCode iniciar",
"settings.updates.row.check.title": "Verificar atualizações",
@@ -593,9 +608,11 @@ export const dict = {
"sound.option.yup05": "Sim 05",
"sound.option.yup06": "Sim 06",
"settings.general.notifications.agent.title": "Agente",
"settings.general.notifications.agent.description": "Mostrar notificação do sistema quando o agente estiver completo ou precisar de atenção",
"settings.general.notifications.agent.description":
"Mostrar notificação do sistema quando o agente estiver completo ou precisar de atenção",
"settings.general.notifications.permissions.title": "Permissões",
"settings.general.notifications.permissions.description": "Mostrar notificação do sistema quando uma permissão for necessária",
"settings.general.notifications.permissions.description":
"Mostrar notificação do sistema quando uma permissão for necessária",
"settings.general.notifications.errors.title": "Erros",
"settings.general.notifications.errors.description": "Mostrar notificação do sistema quando ocorrer um erro",
"settings.general.sounds.agent.title": "Agente",
@@ -647,7 +664,8 @@ export const dict = {
"settings.permissions.tool.read.title": "Ler",
"settings.permissions.tool.read.description": "Ler um arquivo (corresponde ao caminho do arquivo)",
"settings.permissions.tool.edit.title": "Editar",
"settings.permissions.tool.edit.description": "Modificar arquivos, incluindo edições, escritas, patches e multi-edições",
"settings.permissions.tool.edit.description":
"Modificar arquivos, incluindo edições, escritas, patches e multi-edições",
"settings.permissions.tool.glob.title": "Glob",
"settings.permissions.tool.glob.description": "Corresponder arquivos usando padrões glob",
"settings.permissions.tool.grep.title": "Grep",
@@ -678,7 +696,7 @@ export const dict = {
"settings.permissions.tool.doom_loop.description": "Detectar chamadas de ferramentas repetidas com entrada idêntica",
"session.delete.failed.title": "Falha ao excluir sessão",
"session.delete.title": "Excluir sessão",
"session.delete.confirm": "Excluir sessão \"{{name}}\"?",
"session.delete.confirm": 'Excluir sessão "{{name}}"?',
"session.delete.button": "Excluir sessão",
"workspace.new": "Novo espaço de trabalho",
"workspace.type.local": "local",
@@ -696,14 +714,13 @@ export const dict = {
"workspace.status.clean": "Nenhuma alteração não mesclada detectada.",
"workspace.status.dirty": "Alterações não mescladas detectadas neste espaço de trabalho.",
"workspace.delete.title": "Excluir espaço de trabalho",
"workspace.delete.confirm": "Excluir espaço de trabalho \"{{name}}\"?",
"workspace.delete.confirm": 'Excluir espaço de trabalho "{{name}}"?',
"workspace.delete.button": "Excluir espaço de trabalho",
"workspace.reset.title": "Redefinir espaço de trabalho",
"workspace.reset.confirm": "Redefinir espaço de trabalho \"{{name}}\"?",
"workspace.reset.confirm": 'Redefinir espaço de trabalho "{{name}}"?',
"workspace.reset.button": "Redefinir espaço de trabalho",
"workspace.reset.archived.none": "Nenhuma sessão ativa será arquivada.",
"workspace.reset.archived.one": "1 sessão será arquivada.",
"workspace.reset.archived.many": "{{count}} sessões serão arquivadas.",
"workspace.reset.note": "Isso redefinirá o espaço de trabalho para corresponder ao branch padrão.",
}

View File

@@ -114,25 +114,30 @@ export const dict = {
"provider.connect.status.inProgress": "Autorisierung läuft...",
"provider.connect.status.waiting": "Warten auf Autorisierung...",
"provider.connect.status.failed": "Autorisierung fehlgeschlagen: {{error}}",
"provider.connect.apiKey.description": "Geben Sie Ihren {{provider}} API-Schlüssel ein, um Ihr Konto zu verbinden und {{provider}} Modelle in OpenCode zu nutzen.",
"provider.connect.apiKey.description":
"Geben Sie Ihren {{provider}} API-Schlüssel ein, um Ihr Konto zu verbinden und {{provider}} Modelle in OpenCode zu nutzen.",
"provider.connect.apiKey.label": "{{provider}} API-Schlüssel",
"provider.connect.apiKey.placeholder": "API-Schlüssel",
"provider.connect.apiKey.required": "API-Schlüssel ist erforderlich",
"provider.connect.opencodeZen.line1": "OpenCode Zen bietet Ihnen Zugriff auf eine kuratierte Auswahl zuverlässiger, optimierter Modelle für Coding-Agenten.",
"provider.connect.opencodeZen.line2": "Mit einem einzigen API-Schlüssel erhalten Sie Zugriff auf Modelle wie Claude, GPT, Gemini, GLM und mehr.",
"provider.connect.opencodeZen.line1":
"OpenCode Zen bietet Ihnen Zugriff auf eine kuratierte Auswahl zuverlässiger, optimierter Modelle für Coding-Agenten.",
"provider.connect.opencodeZen.line2":
"Mit einem einzigen API-Schlüssel erhalten Sie Zugriff auf Modelle wie Claude, GPT, Gemini, GLM und mehr.",
"provider.connect.opencodeZen.visit.prefix": "Besuchen Sie ",
"provider.connect.opencodeZen.visit.link": "opencode.ai/zen",
"provider.connect.opencodeZen.visit.suffix": ", um Ihren API-Schlüssel zu erhalten.",
"provider.connect.oauth.code.visit.prefix": "Besuchen Sie ",
"provider.connect.oauth.code.visit.link": "diesen Link",
"provider.connect.oauth.code.visit.suffix": ", um Ihren Autorisierungscode zu erhalten, Ihr Konto zu verbinden und {{provider}} Modelle in OpenCode zu nutzen.",
"provider.connect.oauth.code.visit.suffix":
", um Ihren Autorisierungscode zu erhalten, Ihr Konto zu verbinden und {{provider}} Modelle in OpenCode zu nutzen.",
"provider.connect.oauth.code.label": "{{method}} Autorisierungscode",
"provider.connect.oauth.code.placeholder": "Autorisierungscode",
"provider.connect.oauth.code.required": "Autorisierungscode ist erforderlich",
"provider.connect.oauth.code.invalid": "Ungültiger Autorisierungscode",
"provider.connect.oauth.auto.visit.prefix": "Besuchen Sie ",
"provider.connect.oauth.auto.visit.link": "diesen Link",
"provider.connect.oauth.auto.visit.suffix": " und geben Sie den untenstehenden Code ein, um Ihr Konto zu verbinden und {{provider}} Modelle in OpenCode zu nutzen.",
"provider.connect.oauth.auto.visit.suffix":
" und geben Sie den untenstehenden Code ein, um Ihr Konto zu verbinden und {{provider}} Modelle in OpenCode zu nutzen.",
"provider.connect.oauth.auto.confirmationCode": "Bestätigungscode",
"provider.connect.toast.connected.title": "{{provider}} verbunden",
"provider.connect.toast.connected.description": "{{provider}} Modelle sind jetzt verfügbar.",
@@ -149,7 +154,8 @@ export const dict = {
"provider.custom.field.baseURL.placeholder": "https://api.myprovider.com/v1",
"provider.custom.field.apiKey.label": "API-Schlüssel",
"provider.custom.field.apiKey.placeholder": "API-Schlüssel",
"provider.custom.field.apiKey.description": "Optional. Leer lassen, wenn Sie die Authentifizierung über Header verwalten.",
"provider.custom.field.apiKey.description":
"Optional. Leer lassen, wenn Sie die Authentifizierung über Header verwalten.",
"provider.custom.models.label": "Modelle",
"provider.custom.models.id.label": "ID",
"provider.custom.models.id.placeholder": "model-id",
@@ -204,7 +210,7 @@ export const dict = {
"common.default": "Standard",
"common.attachment": "Anhang",
"prompt.placeholder.shell": "Shell-Befehl eingeben...",
"prompt.placeholder.normal": "Fragen Sie alles... \"{{example}}\"",
"prompt.placeholder.normal": 'Fragen Sie alles... "{{example}}"',
"prompt.placeholder.summarizeComments": "Kommentare zusammenfassen…",
"prompt.placeholder.summarizeComment": "Kommentar zusammenfassen…",
"prompt.mode.shell": "Shell",
@@ -252,7 +258,8 @@ export const dict = {
"prompt.toast.pasteUnsupported.title": "Nicht unterstütztes Einfügen",
"prompt.toast.pasteUnsupported.description": "Hier können nur Bilder oder PDFs eingefügt werden.",
"prompt.toast.modelAgentRequired.title": "Wählen Sie einen Agenten und ein Modell",
"prompt.toast.modelAgentRequired.description": "Wählen Sie einen Agenten und ein Modell, bevor Sie eine Eingabe senden.",
"prompt.toast.modelAgentRequired.description":
"Wählen Sie einen Agenten und ein Modell, bevor Sie eine Eingabe senden.",
"prompt.toast.worktreeCreateFailed.title": "Worktree konnte nicht erstellt werden",
"prompt.toast.sessionCreateFailed.title": "Sitzung konnte nicht erstellt werden",
"prompt.toast.shellSendFailed.title": "Shell-Befehl konnte nicht gesendet werden",
@@ -282,7 +289,8 @@ export const dict = {
"dialog.server.add.checking": "Prüfen...",
"dialog.server.add.button": "Server hinzufügen",
"dialog.server.default.title": "Standardserver",
"dialog.server.default.description": "Beim App-Start mit diesem Server verbinden, anstatt einen lokalen Server zu starten. Erfordert Neustart.",
"dialog.server.default.description":
"Beim App-Start mit diesem Server verbinden, anstatt einen lokalen Server zu starten. Erfordert Neustart.",
"dialog.server.default.none": "Kein Server ausgewählt",
"dialog.server.default.set": "Aktuellen Server als Standard setzen",
"dialog.server.default.clear": "Löschen",
@@ -302,10 +310,12 @@ export const dict = {
"dialog.project.edit.color": "Farbe",
"dialog.project.edit.color.select": "{{color}}-Farbe auswählen",
"dialog.project.edit.worktree.startup": "Startup-Skript für Arbeitsbereich",
"dialog.project.edit.worktree.startup.description": "Wird nach dem Erstellen eines neuen Arbeitsbereichs (Worktree) ausgeführt.",
"dialog.project.edit.worktree.startup.description":
"Wird nach dem Erstellen eines neuen Arbeitsbereichs (Worktree) ausgeführt.",
"dialog.project.edit.worktree.startup.placeholder": "z. B. bun install",
"context.breakdown.title": "Kontext-Aufschlüsselung",
"context.breakdown.note": "Ungefähre Aufschlüsselung der Eingabe-Token. \"Andere\" beinhaltet Werkzeugdefinitionen und Overhead.",
"context.breakdown.note":
'Ungefähre Aufschlüsselung der Eingabe-Token. "Andere" beinhaltet Werkzeugdefinitionen und Overhead.',
"context.breakdown.system": "System",
"context.breakdown.user": "Benutzer",
"context.breakdown.assistant": "Assistent",
@@ -392,7 +402,8 @@ export const dict = {
"error.page.report.prefix": "Bitte melden Sie diesen Fehler dem OpenCode-Team",
"error.page.report.discord": "auf Discord",
"error.page.version": "Version: {{version}}",
"error.dev.rootNotFound": "Wurzelelement nicht gefunden. Haben Sie vergessen, es in Ihre index.html aufzunehmen? Oder wurde das id-Attribut falsch geschrieben?",
"error.dev.rootNotFound":
"Wurzelelement nicht gefunden. Haben Sie vergessen, es in Ihre index.html aufzunehmen? Oder wurde das id-Attribut falsch geschrieben?",
"error.globalSync.connectFailed": "Verbindung zum Server fehlgeschlagen. Läuft ein Server unter `{{url}}`?",
"directory.error.invalidUrl": "Ungültiges Verzeichnis in der URL.",
"error.chain.unknown": "Unbekannter Fehler",
@@ -404,12 +415,16 @@ export const dict = {
"error.chain.didYouMean": "Meinten Sie: {{suggestions}}",
"error.chain.modelNotFound": "Modell nicht gefunden: {{provider}}/{{model}}",
"error.chain.checkConfig": "Überprüfen Sie Ihre Konfiguration (opencode.json) auf Anbieter-/Modellnamen",
"error.chain.mcpFailed": "MCP-Server \"{{name}}\" fehlgeschlagen. Hinweis: OpenCode unterstützt noch keine MCP-Authentifizierung.",
"error.chain.mcpFailed":
'MCP-Server "{{name}}" fehlgeschlagen. Hinweis: OpenCode unterstützt noch keine MCP-Authentifizierung.',
"error.chain.providerAuthFailed": "Anbieter-Authentifizierung fehlgeschlagen ({{provider}}): {{message}}",
"error.chain.providerInitFailed": "Anbieter \"{{provider}}\" konnte nicht initialisiert werden. Überprüfen Sie Anmeldeinformationen und Konfiguration.",
"error.chain.providerInitFailed":
'Anbieter "{{provider}}" konnte nicht initialisiert werden. Überprüfen Sie Anmeldeinformationen und Konfiguration.',
"error.chain.configJsonInvalid": "Konfigurationsdatei unter {{path}} ist kein gültiges JSON(C)",
"error.chain.configJsonInvalidWithMessage": "Konfigurationsdatei unter {{path}} ist kein gültiges JSON(C): {{message}}",
"error.chain.configDirectoryTypo": "Verzeichnis \"{{dir}}\" in {{path}} ist ungültig. Benennen Sie das Verzeichnis in \"{{suggestion}}\" um oder entfernen Sie es. Dies ist ein häufiger Tippfehler.",
"error.chain.configJsonInvalidWithMessage":
"Konfigurationsdatei unter {{path}} ist kein gültiges JSON(C): {{message}}",
"error.chain.configDirectoryTypo":
'Verzeichnis "{{dir}}" in {{path}} ist ungültig. Benennen Sie das Verzeichnis in "{{suggestion}}" um oder entfernen Sie es. Dies ist ein häufiger Tippfehler.',
"error.chain.configFrontmatterError": "Frontmatter in {{path}} konnte nicht geparst werden:\n{{message}}",
"error.chain.configInvalid": "Konfigurationsdatei unter {{path}} ist ungültig",
"error.chain.configInvalidWithMessage": "Konfigurationsdatei unter {{path}} ist ungültig: {{message}}",
@@ -462,8 +477,10 @@ export const dict = {
"status.popover.tab.plugins": "Plugins",
"status.popover.action.manageServers": "Server verwalten",
"session.share.popover.title": "Im Web veröffentlichen",
"session.share.popover.description.shared": "Diese Sitzung ist öffentlich im Web. Sie ist für jeden mit dem Link zugänglich.",
"session.share.popover.description.unshared": "Sitzung öffentlich im Web teilen. Sie wird für jeden mit dem Link zugänglich sein.",
"session.share.popover.description.shared":
"Diese Sitzung ist öffentlich im Web. Sie ist für jeden mit dem Link zugänglich.",
"session.share.popover.description.unshared":
"Sitzung öffentlich im Web teilen. Sie wird für jeden mit dem Link zugänglich sein.",
"session.share.action.share": "Teilen",
"session.share.action.publish": "Veröffentlichen",
"session.share.action.publishing": "Veröffentliche...",
@@ -480,7 +497,8 @@ export const dict = {
"terminal.title.numbered": "Terminal {{number}}",
"terminal.close": "Terminal schließen",
"terminal.connectionLost.title": "Verbindung verloren",
"terminal.connectionLost.description": "Die Terminalverbindung wurde unterbrochen. Das kann passieren, wenn der Server neu startet.",
"terminal.connectionLost.description":
"Die Terminalverbindung wurde unterbrochen. Das kann passieren, wenn der Server neu startet.",
"common.closeTab": "Tab schließen",
"common.dismiss": "Verwerfen",
"common.requestFailed": "Anfrage fehlgeschlagen",
@@ -502,7 +520,8 @@ export const dict = {
"sidebar.workspaces.disable": "Arbeitsbereiche deaktivieren",
"sidebar.gettingStarted.title": "Erste Schritte",
"sidebar.gettingStarted.line1": "OpenCode enthält kostenlose Modelle, damit Sie sofort loslegen können.",
"sidebar.gettingStarted.line2": "Verbinden Sie einen beliebigen Anbieter, um Modelle wie Claude, GPT, Gemini usw. zu nutzen.",
"sidebar.gettingStarted.line2":
"Verbinden Sie einen beliebigen Anbieter, um Modelle wie Claude, GPT, Gemini usw. zu nutzen.",
"sidebar.project.recentSessions": "Letzte Sitzungen",
"sidebar.project.viewAllSessions": "Alle Sitzungen anzeigen",
"app.name.desktop": "OpenCode Desktop",
@@ -528,9 +547,10 @@ export const dict = {
"settings.general.row.font.description": "Die in Codeblöcken verwendete Monospace-Schriftart anpassen",
"settings.general.row.wayland.title": "Natives Wayland verwenden",
"settings.general.row.wayland.description": "X11-Fallback unter Wayland deaktivieren. Erfordert Neustart.",
"settings.general.row.wayland.tooltip": "Unter Linux mit Monitoren unterschiedlicher Bildwiederholraten kann natives Wayland stabiler sein.",
"settings.general.row.wayland.tooltip":
"Unter Linux mit Monitoren unterschiedlicher Bildwiederholraten kann natives Wayland stabiler sein.",
"settings.general.row.releaseNotes.title": "Versionshinweise",
"settings.general.row.releaseNotes.description": "\"Neuigkeiten\"-Pop-ups nach Updates anzeigen",
"settings.general.row.releaseNotes.description": '"Neuigkeiten"-Pop-ups nach Updates anzeigen',
"settings.updates.row.startup.title": "Beim Start nach Updates suchen",
"settings.updates.row.startup.description": "Beim Start von OpenCode automatisch nach Updates suchen",
"settings.updates.row.check.title": "Nach Updates suchen",
@@ -597,9 +617,11 @@ export const dict = {
"sound.option.yup05": "Ja 05",
"sound.option.yup06": "Ja 06",
"settings.general.notifications.agent.title": "Agent",
"settings.general.notifications.agent.description": "Systembenachrichtigung anzeigen, wenn der Agent fertig ist oder Aufmerksamkeit benötigt",
"settings.general.notifications.agent.description":
"Systembenachrichtigung anzeigen, wenn der Agent fertig ist oder Aufmerksamkeit benötigt",
"settings.general.notifications.permissions.title": "Berechtigungen",
"settings.general.notifications.permissions.description": "Systembenachrichtigung anzeigen, wenn eine Berechtigung erforderlich ist",
"settings.general.notifications.permissions.description":
"Systembenachrichtigung anzeigen, wenn eine Berechtigung erforderlich ist",
"settings.general.notifications.errors.title": "Fehler",
"settings.general.notifications.errors.description": "Systembenachrichtigung anzeigen, wenn ein Fehler auftritt",
"settings.general.sounds.agent.title": "Agent",
@@ -651,7 +673,8 @@ export const dict = {
"settings.permissions.tool.read.title": "Lesen",
"settings.permissions.tool.read.description": "Lesen einer Datei (stimmt mit dem Dateipfad überein)",
"settings.permissions.tool.edit.title": "Bearbeiten",
"settings.permissions.tool.edit.description": "Dateien ändern, einschließlich Bearbeitungen, Schreibvorgängen, Patches und Mehrfachbearbeitungen",
"settings.permissions.tool.edit.description":
"Dateien ändern, einschließlich Bearbeitungen, Schreibvorgängen, Patches und Mehrfachbearbeitungen",
"settings.permissions.tool.glob.title": "Glob",
"settings.permissions.tool.glob.description": "Dateien mithilfe von Glob-Mustern abgleichen",
"settings.permissions.tool.grep.title": "Grep",
@@ -682,7 +705,7 @@ export const dict = {
"settings.permissions.tool.doom_loop.description": "Wiederholte Tool-Aufrufe mit identischer Eingabe erkennen",
"session.delete.failed.title": "Sitzung konnte nicht gelöscht werden",
"session.delete.title": "Sitzung löschen",
"session.delete.confirm": "Sitzung \"{{name}}\" löschen?",
"session.delete.confirm": 'Sitzung "{{name}}" löschen?',
"session.delete.button": "Sitzung löschen",
"workspace.new": "Neuer Arbeitsbereich",
"workspace.type.local": "lokal",
@@ -700,13 +723,13 @@ export const dict = {
"workspace.status.clean": "Keine nicht zusammengeführten Änderungen erkannt.",
"workspace.status.dirty": "Nicht zusammengeführte Änderungen in diesem Arbeitsbereich erkannt.",
"workspace.delete.title": "Arbeitsbereich löschen",
"workspace.delete.confirm": "Arbeitsbereich \"{{name}}\" löschen?",
"workspace.delete.confirm": 'Arbeitsbereich "{{name}}" löschen?',
"workspace.delete.button": "Arbeitsbereich löschen",
"workspace.reset.title": "Arbeitsbereich zurücksetzen",
"workspace.reset.confirm": "Arbeitsbereich \"{{name}}\" zurücksetzen?",
"workspace.reset.confirm": 'Arbeitsbereich "{{name}}" zurücksetzen?',
"workspace.reset.button": "Arbeitsbereich zurücksetzen",
"workspace.reset.archived.none": "Keine aktiven Sitzungen werden archiviert.",
"workspace.reset.archived.one": "1 Sitzung wird archiviert.",
"workspace.reset.archived.many": "{{count}} Sitzungen werden archiviert.",
"workspace.reset.note": "Dadurch wird der Arbeitsbereich auf den Standard-Branch zurückgesetzt."
"workspace.reset.note": "Dadurch wird der Arbeitsbereich auf den Standard-Branch zurückgesetzt.",
} satisfies Partial<Record<Keys, string>>

View File

@@ -110,25 +110,30 @@ export const dict = {
"provider.connect.status.inProgress": "Autoryzacja w toku...",
"provider.connect.status.waiting": "Oczekiwanie na autoryzację...",
"provider.connect.status.failed": "Autoryzacja nie powiodła się: {{error}}",
"provider.connect.apiKey.description": "Wprowadź swój klucz API {{provider}}, aby połączyć konto i używać modeli {{provider}} w OpenCode.",
"provider.connect.apiKey.description":
"Wprowadź swój klucz API {{provider}}, aby połączyć konto i używać modeli {{provider}} w OpenCode.",
"provider.connect.apiKey.label": "Klucz API {{provider}}",
"provider.connect.apiKey.placeholder": "Klucz API",
"provider.connect.apiKey.required": "Klucz API jest wymagany",
"provider.connect.opencodeZen.line1": "OpenCode Zen daje dostęp do wybranego zestawu niezawodnych, zoptymalizowanych modeli dla agentów kodujących.",
"provider.connect.opencodeZen.line2": "Z jednym kluczem API uzyskasz dostęp do modeli takich jak Claude, GPT, Gemini, GLM i więcej.",
"provider.connect.opencodeZen.line1":
"OpenCode Zen daje dostęp do wybranego zestawu niezawodnych, zoptymalizowanych modeli dla agentów kodujących.",
"provider.connect.opencodeZen.line2":
"Z jednym kluczem API uzyskasz dostęp do modeli takich jak Claude, GPT, Gemini, GLM i więcej.",
"provider.connect.opencodeZen.visit.prefix": "Odwiedź ",
"provider.connect.opencodeZen.visit.link": "opencode.ai/zen",
"provider.connect.opencodeZen.visit.suffix": ", aby odebrać swój klucz API.",
"provider.connect.oauth.code.visit.prefix": "Odwiedź ",
"provider.connect.oauth.code.visit.link": "ten link",
"provider.connect.oauth.code.visit.suffix": ", aby odebrać kod autoryzacyjny, połączyć konto i używać modeli {{provider}} w OpenCode.",
"provider.connect.oauth.code.visit.suffix":
", aby odebrać kod autoryzacyjny, połączyć konto i używać modeli {{provider}} w OpenCode.",
"provider.connect.oauth.code.label": "Kod autoryzacyjny {{method}}",
"provider.connect.oauth.code.placeholder": "Kod autoryzacyjny",
"provider.connect.oauth.code.required": "Kod autoryzacyjny jest wymagany",
"provider.connect.oauth.code.invalid": "Nieprawidłowy kod autoryzacyjny",
"provider.connect.oauth.auto.visit.prefix": "Odwiedź ",
"provider.connect.oauth.auto.visit.link": "ten link",
"provider.connect.oauth.auto.visit.suffix": " i wprowadź poniższy kod, aby połączyć konto i używać modeli {{provider}} w OpenCode.",
"provider.connect.oauth.auto.visit.suffix":
" i wprowadź poniższy kod, aby połączyć konto i używać modeli {{provider}} w OpenCode.",
"provider.connect.oauth.auto.confirmationCode": "Kod potwierdzający",
"provider.connect.toast.connected.title": "Połączono {{provider}}",
"provider.connect.toast.connected.description": "Modele {{provider}} są teraz dostępne do użycia.",
@@ -145,7 +150,8 @@ export const dict = {
"provider.custom.field.baseURL.placeholder": "https://api.mojdostawca.com/v1",
"provider.custom.field.apiKey.label": "Klucz API",
"provider.custom.field.apiKey.placeholder": "Klucz API",
"provider.custom.field.apiKey.description": "Opcjonalne. Pozostaw puste, jeśli zarządzasz autoryzacją przez nagłówki.",
"provider.custom.field.apiKey.description":
"Opcjonalne. Pozostaw puste, jeśli zarządzasz autoryzacją przez nagłówki.",
"provider.custom.models.label": "Modele",
"provider.custom.models.id.label": "ID",
"provider.custom.models.id.placeholder": "model-id",
@@ -278,7 +284,8 @@ export const dict = {
"dialog.server.add.checking": "Sprawdzanie...",
"dialog.server.add.button": "Dodaj serwer",
"dialog.server.default.title": "Domyślny serwer",
"dialog.server.default.description": "Połącz z tym serwerem przy uruchomieniu aplikacji zamiast uruchamiać lokalny serwer. Wymaga restartu.",
"dialog.server.default.description":
"Połącz z tym serwerem przy uruchomieniu aplikacji zamiast uruchamiać lokalny serwer. Wymaga restartu.",
"dialog.server.default.none": "Nie wybrano serwera",
"dialog.server.default.set": "Ustaw bieżący serwer jako domyślny",
"dialog.server.default.clear": "Wyczyść",
@@ -388,7 +395,8 @@ export const dict = {
"error.page.report.prefix": "Proszę zgłosić ten błąd do zespołu OpenCode",
"error.page.report.discord": "na Discordzie",
"error.page.version": "Wersja: {{version}}",
"error.dev.rootNotFound": "Nie znaleziono elementu głównego. Czy zapomniałeś dodać go do swojego index.html? A może atrybut id został błędnie wpisany?",
"error.dev.rootNotFound":
"Nie znaleziono elementu głównego. Czy zapomniałeś dodać go do swojego index.html? A może atrybut id został błędnie wpisany?",
"error.globalSync.connectFailed": "Nie można połączyć się z serwerem. Czy serwer działa pod adresem `{{url}}`?",
"directory.error.invalidUrl": "Nieprawidłowy katalog w URL.",
"error.chain.unknown": "Nieznany błąd",
@@ -402,10 +410,12 @@ export const dict = {
"error.chain.checkConfig": "Sprawdź swoją konfigurację (opencode.json) nazwy dostawców/modeli",
"error.chain.mcpFailed": 'MCP server "{{name}}" failed. Note, OpenCode does not support MCP authentication yet.',
"error.chain.providerAuthFailed": "Uwierzytelnianie dostawcy nie powiodło się ({{provider}}): {{message}}",
"error.chain.providerInitFailed": 'Nie udało się zainicjować dostawcy "{{provider}}". Sprawdź poświadczenia i konfigurację.',
"error.chain.providerInitFailed":
'Nie udało się zainicjować dostawcy "{{provider}}". Sprawdź poświadczenia i konfigurację.',
"error.chain.configJsonInvalid": "Plik konfiguracyjny w {{path}} nie jest poprawnym JSON(C)",
"error.chain.configJsonInvalidWithMessage": "Plik konfiguracyjny w {{path}} nie jest poprawnym JSON(C): {{message}}",
"error.chain.configDirectoryTypo": 'Katalog "{{dir}}" w {{path}} jest nieprawidłowy. Zmień nazwę katalogu na "{{suggestion}}" lub usuń go. To częsta literówka.',
"error.chain.configDirectoryTypo":
'Katalog "{{dir}}" w {{path}} jest nieprawidłowy. Zmień nazwę katalogu na "{{suggestion}}" lub usuń go. To częsta literówka.',
"error.chain.configFrontmatterError": "Nie udało się przetworzyć frontmatter w {{path}}:\n{{message}}",
"error.chain.configInvalid": "Plik konfiguracyjny w {{path}} jest nieprawidłowy",
"error.chain.configInvalidWithMessage": "Plik konfiguracyjny w {{path}} jest nieprawidłowy: {{message}}",
@@ -458,8 +468,10 @@ export const dict = {
"status.popover.tab.plugins": "Wtyczki",
"status.popover.action.manageServers": "Zarządzaj serwerami",
"session.share.popover.title": "Opublikuj w sieci",
"session.share.popover.description.shared": "Ta sesja jest publiczna w sieci. Jest dostępna dla każdego, kto posiada link.",
"session.share.popover.description.unshared": "Udostępnij sesję publicznie w sieci. Będzie dostępna dla każdego, kto posiada link.",
"session.share.popover.description.shared":
"Ta sesja jest publiczna w sieci. Jest dostępna dla każdego, kto posiada link.",
"session.share.popover.description.unshared":
"Udostępnij sesję publicznie w sieci. Będzie dostępna dla każdego, kto posiada link.",
"session.share.action.share": "Udostępnij",
"session.share.action.publish": "Opublikuj",
"session.share.action.publishing": "Publikowanie...",
@@ -476,7 +488,8 @@ export const dict = {
"terminal.title.numbered": "Terminal {{number}}",
"terminal.close": "Zamknij terminal",
"terminal.connectionLost.title": "Utracono połączenie",
"terminal.connectionLost.description": "Połączenie z terminalem zostało przerwane. Może się to zdarzyć przy restarcie serwera.",
"terminal.connectionLost.description":
"Połączenie z terminalem zostało przerwane. Może się to zdarzyć przy restarcie serwera.",
"common.closeTab": "Zamknij kartę",
"common.dismiss": "Odrzuć",
"common.requestFailed": "Żądanie nie powiodło się",
@@ -524,7 +537,8 @@ export const dict = {
"settings.general.row.font.description": "Dostosuj czcionkę mono używaną w blokach kodu",
"settings.general.row.wayland.title": "Użyj natywnego Wayland",
"settings.general.row.wayland.description": "Wyłącz fallback X11 na Wayland. Wymaga restartu.",
"settings.general.row.wayland.tooltip": "Na Linuxie z monitorami o różnym odświeżaniu, natywny Wayland może być bardziej stabilny.",
"settings.general.row.wayland.tooltip":
"Na Linuxie z monitorami o różnym odświeżaniu, natywny Wayland może być bardziej stabilny.",
"settings.general.row.releaseNotes.title": "Informacje o wydaniu",
"settings.general.row.releaseNotes.description": 'Pokazuj wyskakujące okna "Co nowego" po aktualizacjach',
"settings.updates.row.startup.title": "Sprawdzaj aktualizacje przy uruchomieniu",
@@ -593,9 +607,11 @@ export const dict = {
"sound.option.yup05": "Yup 05",
"sound.option.yup06": "Yup 06",
"settings.general.notifications.agent.title": "Agent",
"settings.general.notifications.agent.description": "Pokaż powiadomienie systemowe, gdy agent zakończy pracę lub wymaga uwagi",
"settings.general.notifications.agent.description":
"Pokaż powiadomienie systemowe, gdy agent zakończy pracę lub wymaga uwagi",
"settings.general.notifications.permissions.title": "Uprawnienia",
"settings.general.notifications.permissions.description": "Pokaż powiadomienie systemowe, gdy wymagane jest uprawnienie",
"settings.general.notifications.permissions.description":
"Pokaż powiadomienie systemowe, gdy wymagane jest uprawnienie",
"settings.general.notifications.errors.title": "Błędy",
"settings.general.notifications.errors.description": "Pokaż powiadomienie systemowe, gdy wystąpi błąd",
"settings.general.sounds.agent.title": "Agent",

View File

@@ -147,7 +147,8 @@ export const dict = {
"provider.connect.status.inProgress": "正在授权...",
"provider.connect.status.waiting": "等待授权...",
"provider.connect.status.failed": "授权失败:{{error}}",
"provider.connect.apiKey.description": "输入你的 {{provider}} API 密钥以连接帐户,并在 OpenCode 中使用 {{provider}} 模型。",
"provider.connect.apiKey.description":
"输入你的 {{provider}} API 密钥以连接帐户,并在 OpenCode 中使用 {{provider}} 模型。",
"provider.connect.apiKey.label": "{{provider}} API 密钥",
"provider.connect.apiKey.placeholder": "API 密钥",
"provider.connect.apiKey.required": "API 密钥为必填项",
@@ -242,7 +243,7 @@ export const dict = {
"common.attachment": "附件",
"prompt.placeholder.shell": "输入 shell 命令...",
"prompt.placeholder.normal": "随便问点什么... \"{{example}}\"",
"prompt.placeholder.normal": '随便问点什么... "{{example}}"',
"prompt.placeholder.summarizeComments": "总结评论…",
"prompt.placeholder.summarizeComment": "总结该评论…",
"prompt.mode.shell": "Shell",
@@ -456,12 +457,13 @@ export const dict = {
"error.chain.didYouMean": "你是不是想输入:{{suggestions}}",
"error.chain.modelNotFound": "未找到模型:{{provider}}/{{model}}",
"error.chain.checkConfig": "请检查你的配置 (opencode.json) 中的 provider/model 名称",
"error.chain.mcpFailed": "MCP 服务器 \"{{name}}\" 启动失败。注意: OpenCode 暂不支持 MCP 认证。",
"error.chain.mcpFailed": 'MCP 服务器 "{{name}}" 启动失败。注意: OpenCode 暂不支持 MCP 认证。',
"error.chain.providerAuthFailed": "提供商认证失败({{provider}}{{message}}",
"error.chain.providerInitFailed": "无法初始化提供商 \"{{provider}}\"。请检查凭据和配置。",
"error.chain.providerInitFailed": '无法初始化提供商 "{{provider}}"。请检查凭据和配置。',
"error.chain.configJsonInvalid": "配置文件 {{path}} 不是有效的 JSON(C)",
"error.chain.configJsonInvalidWithMessage": "配置文件 {{path}} 不是有效的 JSON(C){{message}}",
"error.chain.configDirectoryTypo": "{{path}} 中的目录 \"{{dir}}\" 无效。请将目录重命名为 \"{{suggestion}}\" 或移除它。这是一个常见拼写错误。",
"error.chain.configDirectoryTypo":
'{{path}} 中的目录 "{{dir}}" 无效。请将目录重命名为 "{{suggestion}}" 或移除它。这是一个常见拼写错误。',
"error.chain.configFrontmatterError": "无法解析 {{path}} 中的 frontmatter\n{{message}}",
"error.chain.configInvalid": "配置文件 {{path}} 无效",
"error.chain.configInvalidWithMessage": "配置文件 {{path}} 无效:{{message}}",
@@ -761,7 +763,7 @@ export const dict = {
"session.delete.failed.title": "删除会话失败",
"session.delete.title": "删除会话",
"session.delete.confirm": "删除会话 \"{{name}}\"",
"session.delete.confirm": '删除会话 "{{name}}"',
"session.delete.button": "删除会话",
"workspace.new": "新建工作区",
@@ -780,10 +782,10 @@ export const dict = {
"workspace.status.clean": "未检测到未合并的更改。",
"workspace.status.dirty": "检测到未合并的更改。",
"workspace.delete.title": "删除工作区",
"workspace.delete.confirm": "删除工作区 \"{{name}}\"",
"workspace.delete.confirm": '删除工作区 "{{name}}"',
"workspace.delete.button": "删除工作区",
"workspace.reset.title": "重置工作区",
"workspace.reset.confirm": "重置工作区 \"{{name}}\"",
"workspace.reset.confirm": '重置工作区 "{{name}}"',
"workspace.reset.button": "重置工作区",
"workspace.reset.archived.none": "不会归档任何活跃会话。",
"workspace.reset.archived.one": "将归档 1 个会话。",

View File

@@ -1203,6 +1203,16 @@ export default function Layout(props: ParentProps) {
if (!result) return
globalSync.set(
"project",
produce((draft) => {
const project = draft.find((item) => item.worktree === root)
if (!project) return
project.sandboxes = (project.sandboxes ?? []).filter((sandbox) => sandbox !== directory)
}),
)
setStore("workspaceOrder", root, (order) => (order ?? []).filter((workspace) => workspace !== directory))
layout.projects.close(directory)
layout.projects.open(root)
@@ -1230,6 +1240,7 @@ export default function Layout(props: ParentProps) {
clearWorkspaceTerminals(
directory,
sessions.map((s) => s.id),
platform,
)
await globalSDK.client.instance.dispose({ directory }).catch(() => undefined)

View File

@@ -1,4 +1,4 @@
import { usePlatform } from "@/context/platform"
import { Platform, usePlatform } from "@/context/platform"
import { makePersisted, type AsyncStorage, type SyncStorage } from "@solid-primitives/storage"
import { checksum } from "@opencode-ai/util/encode"
import { createResource, type Accessor } from "solid-js"
@@ -318,9 +318,8 @@ export const Persist = {
},
}
export function removePersisted(target: { storage?: string; key: string }) {
const platform = usePlatform()
const isDesktop = platform.platform === "desktop" && !!platform.storage
export function removePersisted(target: { storage?: string; key: string }, platform?: Platform) {
const isDesktop = platform?.platform === "desktop" && !!platform.storage
if (isDesktop) {
return platform.storage?.(target.storage)?.removeItem(target.key)

Binary file not shown.

After

Width:  |  Height:  |  Size: 697 B

View File

@@ -0,0 +1,18 @@
<svg width="300" height="300" viewBox="0 0 300 300" fill="none" xmlns="http://www.w3.org/2000/svg">
<g transform="translate(30, 0)">
<g clip-path="url(#clip0_1401_86283)">
<mask id="mask0_1401_86283" style="mask-type:luminance" maskUnits="userSpaceOnUse" x="0" y="0" width="240" height="300">
<path d="M240 0H0V300H240V0Z" fill="white"/>
</mask>
<g mask="url(#mask0_1401_86283)">
<path d="M180 240H60V120H180V240Z" fill="#4B4646"/>
<path d="M180 60H60V240H180V60ZM240 300H0V0H240V300Z" fill="#F1ECEC"/>
</g>
</g>
</g>
<defs>
<clipPath id="clip0_1401_86283">
<rect width="240" height="300" fill="white"/>
</clipPath>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 631 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 697 B

View File

@@ -0,0 +1,18 @@
<svg width="300" height="300" viewBox="0 0 300 300" fill="none" xmlns="http://www.w3.org/2000/svg">
<g transform="translate(30, 0)">
<g clip-path="url(#clip0_1401_86274)">
<mask id="mask0_1401_86274" style="mask-type:luminance" maskUnits="userSpaceOnUse" x="0" y="0" width="240" height="300">
<path d="M240 0H0V300H240V0Z" fill="white"/>
</mask>
<g mask="url(#mask0_1401_86274)">
<path d="M180 240H60V120H180V240Z" fill="#CFCECD"/>
<path d="M180 60H60V240H180V60ZM240 300H0V0H240V300Z" fill="#211E1E"/>
</g>
</g>
</g>
<defs>
<clipPath id="clip0_1401_86274">
<rect width="240" height="300" fill="white"/>
</clipPath>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 631 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

View File

@@ -7,18 +7,24 @@ import { useI18n } from "~/context/i18n"
import { LocaleLinks } from "~/component/locale-links"
import previewLogoLight from "../../asset/brand/preview-opencode-logo-light.png"
import previewLogoDark from "../../asset/brand/preview-opencode-logo-dark.png"
import previewLogoLightSquare from "../../asset/brand/preview-opencode-logo-light-square.png"
import previewLogoDarkSquare from "../../asset/brand/preview-opencode-logo-dark-square.png"
import previewWordmarkLight from "../../asset/brand/preview-opencode-wordmark-light.png"
import previewWordmarkDark from "../../asset/brand/preview-opencode-wordmark-dark.png"
import previewWordmarkSimpleLight from "../../asset/brand/preview-opencode-wordmark-simple-light.png"
import previewWordmarkSimpleDark from "../../asset/brand/preview-opencode-wordmark-simple-dark.png"
import logoLightPng from "../../asset/brand/opencode-logo-light.png"
import logoDarkPng from "../../asset/brand/opencode-logo-dark.png"
import logoLightSquarePng from "../../asset/brand/opencode-logo-light-square.png"
import logoDarkSquarePng from "../../asset/brand/opencode-logo-dark-square.png"
import wordmarkLightPng from "../../asset/brand/opencode-wordmark-light.png"
import wordmarkDarkPng from "../../asset/brand/opencode-wordmark-dark.png"
import wordmarkSimpleLightPng from "../../asset/brand/opencode-wordmark-simple-light.png"
import wordmarkSimpleDarkPng from "../../asset/brand/opencode-wordmark-simple-dark.png"
import logoLightSvg from "../../asset/brand/opencode-logo-light.svg"
import logoDarkSvg from "../../asset/brand/opencode-logo-dark.svg"
import logoLightSquareSvg from "../../asset/brand/opencode-logo-light-square.svg"
import logoDarkSquareSvg from "../../asset/brand/opencode-logo-dark-square.svg"
import wordmarkLightSvg from "../../asset/brand/opencode-wordmark-light.svg"
import wordmarkDarkSvg from "../../asset/brand/opencode-wordmark-dark.svg"
import wordmarkSimpleLightSvg from "../../asset/brand/opencode-wordmark-simple-light.svg"
@@ -135,6 +141,60 @@ export default function Brand() {
</button>
</div>
</div>
<div>
<img src={previewLogoLightSquare} alt="OpenCode brand guidelines" />
<div data-component="actions">
<button onClick={() => downloadFile(logoLightSquarePng, "opencode-logo-light-square.png")}>
PNG
<svg width="20" height="20" viewBox="0 0 20 20" fill="none" xmlns="http://www.w3.org/2000/svg">
<path
d="M13.9583 10.6247L10 14.583L6.04167 10.6247M10 2.08301V13.958M16.25 17.9163H3.75"
stroke="currentColor"
stroke-width="1.5"
stroke-linecap="square"
/>
</svg>
</button>
<button onClick={() => downloadFile(logoLightSquareSvg, "opencode-logo-light-square.svg")}>
SVG
<svg width="20" height="20" viewBox="0 0 20 20" fill="none" xmlns="http://www.w3.org/2000/svg">
<path
d="M13.9583 10.6247L10 14.583L6.04167 10.6247M10 2.08301V13.958M16.25 17.9163H3.75"
stroke="currentColor"
stroke-width="1.5"
stroke-linecap="square"
/>
</svg>
</button>
</div>
</div>
<div>
<img src={previewLogoDarkSquare} alt="OpenCode brand guidelines" />
<div data-component="actions">
<button onClick={() => downloadFile(logoDarkSquarePng, "opencode-logo-dark-square.png")}>
PNG
<svg width="20" height="20" viewBox="0 0 20 20" fill="none" xmlns="http://www.w3.org/2000/svg">
<path
d="M13.9583 10.6247L10 14.583L6.04167 10.6247M10 2.08301V13.958M16.25 17.9163H3.75"
stroke="currentColor"
stroke-width="1.5"
stroke-linecap="square"
/>
</svg>
</button>
<button onClick={() => downloadFile(logoDarkSquareSvg, "opencode-logo-dark-square.svg")}>
SVG
<svg width="20" height="20" viewBox="0 0 20 20" fill="none" xmlns="http://www.w3.org/2000/svg">
<path
d="M13.9583 10.6247L10 14.583L6.04167 10.6247M10 2.08301V13.958M16.25 17.9163H3.75"
stroke="currentColor"
stroke-width="1.5"
stroke-linecap="square"
/>
</svg>
</button>
</div>
</div>
<div>
<img src={previewWordmarkLight} alt="OpenCode brand guidelines" />
<div data-component="actions">

View File

@@ -12,7 +12,7 @@
"@opencode-ai/console-resource": "workspace:*",
"@planetscale/database": "1.19.0",
"aws4fetch": "1.0.20",
"drizzle-orm": "0.41.0",
"drizzle-orm": "catalog:",
"postgres": "3.4.7",
"stripe": "18.0.0",
"ulid": "catalog:",
@@ -44,7 +44,7 @@
"@tsconfig/node22": "22.0.2",
"@types/bun": "1.3.0",
"@types/node": "catalog:",
"drizzle-kit": "0.30.5",
"drizzle-kit": "catalog:",
"mysql2": "3.14.4",
"typescript": "catalog:",
"@typescript/native-preview": "catalog:"

View File

@@ -4,7 +4,6 @@ export * from "drizzle-orm"
import { Client } from "@planetscale/database"
import { MySqlTransaction, type MySqlTransactionConfig } from "drizzle-orm/mysql-core"
import type { ExtractTablesWithRelations } from "drizzle-orm"
import type { PlanetScalePreparedQueryHKT, PlanetscaleQueryResultHKT } from "drizzle-orm/planetscale-serverless"
import { Context } from "../context"
import { memo } from "../util/memo"
@@ -14,7 +13,7 @@ export namespace Database {
PlanetscaleQueryResultHKT,
PlanetScalePreparedQueryHKT,
Record<string, never>,
ExtractTablesWithRelations<Record<string, never>>
any
>
const client = memo(() => {
@@ -23,7 +22,7 @@ export namespace Database {
username: Resource.Database.username,
password: Resource.Database.password,
})
const db = drizzle(result, {})
const db = drizzle({ client: result })
return db
})

View File

@@ -1,27 +1,10 @@
# opencode agent guidelines
# opencode database guide
## Build/Test Commands
## Database
- **Install**: `bun install`
- **Run**: `bun run --conditions=browser ./src/index.ts`
- **Typecheck**: `bun run typecheck` (npm run typecheck)
- **Test**: `bun test` (runs all tests)
- **Single test**: `bun test test/tool/tool.test.ts` (specific test file)
## Code Style
- **Runtime**: Bun with TypeScript ESM modules
- **Imports**: Use relative imports for local modules, named imports preferred
- **Types**: Zod schemas for validation, TypeScript interfaces for structure
- **Naming**: camelCase for variables/functions, PascalCase for classes/namespaces
- **Error handling**: Use Result patterns, avoid throwing exceptions in tools
- **File structure**: Namespace-based organization (e.g., `Tool.define()`, `Session.create()`)
## Architecture
- **Tools**: Implement `Tool.Info` interface with `execute()` method
- **Context**: Pass `sessionID` in tool context, use `App.provide()` for DI
- **Validation**: All inputs validated with Zod schemas
- **Logging**: Use `Log.create({ service: "name" })` pattern
- **Storage**: Use `Storage` namespace for persistence
- **API Client**: The TypeScript TUI (built with SolidJS + OpenTUI) communicates with the OpenCode server using `@opencode-ai/sdk`. When adding/modifying server endpoints in `packages/opencode/src/server/server.ts`, run `./script/generate.ts` to regenerate the SDK and related files.
- **Schema**: Drizzle schema lives in `src/**/*.sql.ts`.
- **Naming**: tables and columns use snake*case; join columns are `<entity>_id`; indexes are `<table>*<column>\_idx`.
- **Migrations**: generated by Drizzle Kit using `drizzle.config.ts` (schema: `./src/**/*.sql.ts`, output: `./migration`).
- **Command**: `bun run db generate --name <slug>`.
- **Output**: creates `migration/<timestamp>_<slug>/migration.sql` and `snapshot.json`.
- **Tests**: migration tests should read the per-folder layout (no `_journal.json`).

View File

@@ -0,0 +1,10 @@
import { defineConfig } from "drizzle-kit"
export default defineConfig({
dialect: "sqlite",
schema: "./src/**/*.sql.ts",
out: "./migration",
dbCredentials: {
url: "/home/thdxr/.local/share/opencode/opencode.db",
},
})

View File

@@ -0,0 +1,90 @@
CREATE TABLE `project` (
`id` text PRIMARY KEY,
`worktree` text NOT NULL,
`vcs` text,
`name` text,
`icon_url` text,
`icon_color` text,
`time_created` integer NOT NULL,
`time_updated` integer NOT NULL,
`time_initialized` integer,
`sandboxes` text NOT NULL
);
--> statement-breakpoint
CREATE TABLE `message` (
`id` text PRIMARY KEY,
`session_id` text NOT NULL,
`time_created` integer NOT NULL,
`time_updated` integer NOT NULL,
`data` text NOT NULL,
CONSTRAINT `fk_message_session_id_session_id_fk` FOREIGN KEY (`session_id`) REFERENCES `session`(`id`) ON DELETE CASCADE
);
--> statement-breakpoint
CREATE TABLE `part` (
`id` text PRIMARY KEY,
`message_id` text NOT NULL,
`session_id` text NOT NULL,
`time_created` integer NOT NULL,
`time_updated` integer NOT NULL,
`data` text NOT NULL,
CONSTRAINT `fk_part_message_id_message_id_fk` FOREIGN KEY (`message_id`) REFERENCES `message`(`id`) ON DELETE CASCADE
);
--> statement-breakpoint
CREATE TABLE `permission` (
`project_id` text PRIMARY KEY,
`time_created` integer NOT NULL,
`time_updated` integer NOT NULL,
`data` text NOT NULL,
CONSTRAINT `fk_permission_project_id_project_id_fk` FOREIGN KEY (`project_id`) REFERENCES `project`(`id`) ON DELETE CASCADE
);
--> statement-breakpoint
CREATE TABLE `session` (
`id` text PRIMARY KEY,
`project_id` text NOT NULL,
`parent_id` text,
`slug` text NOT NULL,
`directory` text NOT NULL,
`title` text NOT NULL,
`version` text NOT NULL,
`share_url` text,
`summary_additions` integer,
`summary_deletions` integer,
`summary_files` integer,
`summary_diffs` text,
`revert` text,
`permission` text,
`time_created` integer NOT NULL,
`time_updated` integer NOT NULL,
`time_compacting` integer,
`time_archived` integer,
CONSTRAINT `fk_session_project_id_project_id_fk` FOREIGN KEY (`project_id`) REFERENCES `project`(`id`) ON DELETE CASCADE
);
--> statement-breakpoint
CREATE TABLE `todo` (
`session_id` text NOT NULL,
`content` text NOT NULL,
`status` text NOT NULL,
`priority` text NOT NULL,
`position` integer NOT NULL,
`time_created` integer NOT NULL,
`time_updated` integer NOT NULL,
CONSTRAINT `todo_pk` PRIMARY KEY(`session_id`, `position`),
CONSTRAINT `fk_todo_session_id_session_id_fk` FOREIGN KEY (`session_id`) REFERENCES `session`(`id`) ON DELETE CASCADE
);
--> statement-breakpoint
CREATE TABLE `session_share` (
`session_id` text PRIMARY KEY,
`id` text NOT NULL,
`secret` text NOT NULL,
`url` text NOT NULL,
`time_created` integer NOT NULL,
`time_updated` integer NOT NULL,
CONSTRAINT `fk_session_share_session_id_session_id_fk` FOREIGN KEY (`session_id`) REFERENCES `session`(`id`) ON DELETE CASCADE
);
--> statement-breakpoint
CREATE INDEX `message_session_idx` ON `message` (`session_id`);--> statement-breakpoint
CREATE INDEX `part_message_idx` ON `part` (`message_id`);--> statement-breakpoint
CREATE INDEX `part_session_idx` ON `part` (`session_id`);--> statement-breakpoint
CREATE INDEX `session_project_idx` ON `session` (`project_id`);--> statement-breakpoint
CREATE INDEX `session_parent_idx` ON `session` (`parent_id`);--> statement-breakpoint
CREATE INDEX `todo_session_idx` ON `todo` (`session_id`);

View File

@@ -0,0 +1,796 @@
{
"version": "7",
"dialect": "sqlite",
"id": "068758ed-a97a-46f6-8a59-6c639ae7c20c",
"prevIds": ["00000000-0000-0000-0000-000000000000"],
"ddl": [
{
"name": "project",
"entityType": "tables"
},
{
"name": "message",
"entityType": "tables"
},
{
"name": "part",
"entityType": "tables"
},
{
"name": "permission",
"entityType": "tables"
},
{
"name": "session",
"entityType": "tables"
},
{
"name": "todo",
"entityType": "tables"
},
{
"name": "session_share",
"entityType": "tables"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "id",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "worktree",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "vcs",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "name",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "icon_url",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "icon_color",
"entityType": "columns",
"table": "project"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "project"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "project"
},
{
"type": "integer",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_initialized",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "sandboxes",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "id",
"entityType": "columns",
"table": "message"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "session_id",
"entityType": "columns",
"table": "message"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "message"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "message"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "data",
"entityType": "columns",
"table": "message"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "id",
"entityType": "columns",
"table": "part"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "message_id",
"entityType": "columns",
"table": "part"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "session_id",
"entityType": "columns",
"table": "part"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "part"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "part"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "data",
"entityType": "columns",
"table": "part"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "project_id",
"entityType": "columns",
"table": "permission"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "permission"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "permission"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "data",
"entityType": "columns",
"table": "permission"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "id",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "project_id",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "parent_id",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "slug",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "directory",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "title",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "version",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "share_url",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "summary_additions",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "summary_deletions",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "summary_files",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "summary_diffs",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "revert",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "permission",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_compacting",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_archived",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "session_id",
"entityType": "columns",
"table": "todo"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "content",
"entityType": "columns",
"table": "todo"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "status",
"entityType": "columns",
"table": "todo"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "priority",
"entityType": "columns",
"table": "todo"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "position",
"entityType": "columns",
"table": "todo"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "todo"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "todo"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "session_id",
"entityType": "columns",
"table": "session_share"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "id",
"entityType": "columns",
"table": "session_share"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "secret",
"entityType": "columns",
"table": "session_share"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "url",
"entityType": "columns",
"table": "session_share"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "session_share"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "session_share"
},
{
"columns": ["session_id"],
"tableTo": "session",
"columnsTo": ["id"],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
"name": "fk_message_session_id_session_id_fk",
"entityType": "fks",
"table": "message"
},
{
"columns": ["message_id"],
"tableTo": "message",
"columnsTo": ["id"],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
"name": "fk_part_message_id_message_id_fk",
"entityType": "fks",
"table": "part"
},
{
"columns": ["project_id"],
"tableTo": "project",
"columnsTo": ["id"],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
"name": "fk_permission_project_id_project_id_fk",
"entityType": "fks",
"table": "permission"
},
{
"columns": ["project_id"],
"tableTo": "project",
"columnsTo": ["id"],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
"name": "fk_session_project_id_project_id_fk",
"entityType": "fks",
"table": "session"
},
{
"columns": ["session_id"],
"tableTo": "session",
"columnsTo": ["id"],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
"name": "fk_todo_session_id_session_id_fk",
"entityType": "fks",
"table": "todo"
},
{
"columns": ["session_id"],
"tableTo": "session",
"columnsTo": ["id"],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
"name": "fk_session_share_session_id_session_id_fk",
"entityType": "fks",
"table": "session_share"
},
{
"columns": ["session_id", "position"],
"nameExplicit": false,
"name": "todo_pk",
"entityType": "pks",
"table": "todo"
},
{
"columns": ["id"],
"nameExplicit": false,
"name": "project_pk",
"table": "project",
"entityType": "pks"
},
{
"columns": ["id"],
"nameExplicit": false,
"name": "message_pk",
"table": "message",
"entityType": "pks"
},
{
"columns": ["id"],
"nameExplicit": false,
"name": "part_pk",
"table": "part",
"entityType": "pks"
},
{
"columns": ["project_id"],
"nameExplicit": false,
"name": "permission_pk",
"table": "permission",
"entityType": "pks"
},
{
"columns": ["id"],
"nameExplicit": false,
"name": "session_pk",
"table": "session",
"entityType": "pks"
},
{
"columns": ["session_id"],
"nameExplicit": false,
"name": "session_share_pk",
"table": "session_share",
"entityType": "pks"
},
{
"columns": [
{
"value": "session_id",
"isExpression": false
}
],
"isUnique": false,
"where": null,
"origin": "manual",
"name": "message_session_idx",
"entityType": "indexes",
"table": "message"
},
{
"columns": [
{
"value": "message_id",
"isExpression": false
}
],
"isUnique": false,
"where": null,
"origin": "manual",
"name": "part_message_idx",
"entityType": "indexes",
"table": "part"
},
{
"columns": [
{
"value": "session_id",
"isExpression": false
}
],
"isUnique": false,
"where": null,
"origin": "manual",
"name": "part_session_idx",
"entityType": "indexes",
"table": "part"
},
{
"columns": [
{
"value": "project_id",
"isExpression": false
}
],
"isUnique": false,
"where": null,
"origin": "manual",
"name": "session_project_idx",
"entityType": "indexes",
"table": "session"
},
{
"columns": [
{
"value": "parent_id",
"isExpression": false
}
],
"isUnique": false,
"where": null,
"origin": "manual",
"name": "session_parent_idx",
"entityType": "indexes",
"table": "session"
},
{
"columns": [
{
"value": "session_id",
"isExpression": false
}
],
"isUnique": false,
"where": null,
"origin": "manual",
"name": "todo_session_idx",
"entityType": "indexes",
"table": "todo"
}
],
"renames": []
}

View File

@@ -0,0 +1 @@
ALTER TABLE `project` ADD `commands` text;

View File

@@ -0,0 +1,847 @@
{
"version": "7",
"dialect": "sqlite",
"id": "8bc2d11d-97fa-4ba8-8bfa-6c5956c49aeb",
"prevIds": [
"068758ed-a97a-46f6-8a59-6c639ae7c20c"
],
"ddl": [
{
"name": "project",
"entityType": "tables"
},
{
"name": "message",
"entityType": "tables"
},
{
"name": "part",
"entityType": "tables"
},
{
"name": "permission",
"entityType": "tables"
},
{
"name": "session",
"entityType": "tables"
},
{
"name": "todo",
"entityType": "tables"
},
{
"name": "session_share",
"entityType": "tables"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "id",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "worktree",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "vcs",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "name",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "icon_url",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "icon_color",
"entityType": "columns",
"table": "project"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "project"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "project"
},
{
"type": "integer",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_initialized",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "sandboxes",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "commands",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "id",
"entityType": "columns",
"table": "message"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "session_id",
"entityType": "columns",
"table": "message"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "message"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "message"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "data",
"entityType": "columns",
"table": "message"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "id",
"entityType": "columns",
"table": "part"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "message_id",
"entityType": "columns",
"table": "part"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "session_id",
"entityType": "columns",
"table": "part"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "part"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "part"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "data",
"entityType": "columns",
"table": "part"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "project_id",
"entityType": "columns",
"table": "permission"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "permission"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "permission"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "data",
"entityType": "columns",
"table": "permission"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "id",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "project_id",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "parent_id",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "slug",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "directory",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "title",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "version",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "share_url",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "summary_additions",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "summary_deletions",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "summary_files",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "summary_diffs",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "revert",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "permission",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_compacting",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_archived",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "session_id",
"entityType": "columns",
"table": "todo"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "content",
"entityType": "columns",
"table": "todo"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "status",
"entityType": "columns",
"table": "todo"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "priority",
"entityType": "columns",
"table": "todo"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "position",
"entityType": "columns",
"table": "todo"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "todo"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "todo"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "session_id",
"entityType": "columns",
"table": "session_share"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "id",
"entityType": "columns",
"table": "session_share"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "secret",
"entityType": "columns",
"table": "session_share"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "url",
"entityType": "columns",
"table": "session_share"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "session_share"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "session_share"
},
{
"columns": [
"session_id"
],
"tableTo": "session",
"columnsTo": [
"id"
],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
"name": "fk_message_session_id_session_id_fk",
"entityType": "fks",
"table": "message"
},
{
"columns": [
"message_id"
],
"tableTo": "message",
"columnsTo": [
"id"
],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
"name": "fk_part_message_id_message_id_fk",
"entityType": "fks",
"table": "part"
},
{
"columns": [
"project_id"
],
"tableTo": "project",
"columnsTo": [
"id"
],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
"name": "fk_permission_project_id_project_id_fk",
"entityType": "fks",
"table": "permission"
},
{
"columns": [
"project_id"
],
"tableTo": "project",
"columnsTo": [
"id"
],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
"name": "fk_session_project_id_project_id_fk",
"entityType": "fks",
"table": "session"
},
{
"columns": [
"session_id"
],
"tableTo": "session",
"columnsTo": [
"id"
],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
"name": "fk_todo_session_id_session_id_fk",
"entityType": "fks",
"table": "todo"
},
{
"columns": [
"session_id"
],
"tableTo": "session",
"columnsTo": [
"id"
],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
"name": "fk_session_share_session_id_session_id_fk",
"entityType": "fks",
"table": "session_share"
},
{
"columns": [
"session_id",
"position"
],
"nameExplicit": false,
"name": "todo_pk",
"entityType": "pks",
"table": "todo"
},
{
"columns": [
"id"
],
"nameExplicit": false,
"name": "project_pk",
"table": "project",
"entityType": "pks"
},
{
"columns": [
"id"
],
"nameExplicit": false,
"name": "message_pk",
"table": "message",
"entityType": "pks"
},
{
"columns": [
"id"
],
"nameExplicit": false,
"name": "part_pk",
"table": "part",
"entityType": "pks"
},
{
"columns": [
"project_id"
],
"nameExplicit": false,
"name": "permission_pk",
"table": "permission",
"entityType": "pks"
},
{
"columns": [
"id"
],
"nameExplicit": false,
"name": "session_pk",
"table": "session",
"entityType": "pks"
},
{
"columns": [
"session_id"
],
"nameExplicit": false,
"name": "session_share_pk",
"table": "session_share",
"entityType": "pks"
},
{
"columns": [
{
"value": "session_id",
"isExpression": false
}
],
"isUnique": false,
"where": null,
"origin": "manual",
"name": "message_session_idx",
"entityType": "indexes",
"table": "message"
},
{
"columns": [
{
"value": "message_id",
"isExpression": false
}
],
"isUnique": false,
"where": null,
"origin": "manual",
"name": "part_message_idx",
"entityType": "indexes",
"table": "part"
},
{
"columns": [
{
"value": "session_id",
"isExpression": false
}
],
"isUnique": false,
"where": null,
"origin": "manual",
"name": "part_session_idx",
"entityType": "indexes",
"table": "part"
},
{
"columns": [
{
"value": "project_id",
"isExpression": false
}
],
"isUnique": false,
"where": null,
"origin": "manual",
"name": "session_project_idx",
"entityType": "indexes",
"table": "session"
},
{
"columns": [
{
"value": "parent_id",
"isExpression": false
}
],
"isUnique": false,
"where": null,
"origin": "manual",
"name": "session_parent_idx",
"entityType": "indexes",
"table": "session"
},
{
"columns": [
{
"value": "session_id",
"isExpression": false
}
],
"isUnique": false,
"where": null,
"origin": "manual",
"name": "todo_session_idx",
"entityType": "indexes",
"table": "todo"
}
],
"renames": []
}

View File

@@ -15,7 +15,8 @@
"lint": "echo 'Running lint checks...' && bun test --coverage",
"format": "echo 'Formatting code...' && bun run --prettier --write src/**/*.ts",
"docs": "echo 'Generating documentation...' && find src -name '*.ts' -exec echo 'Processing: {}' \\;",
"deploy": "echo 'Deploying application...' && bun run build && echo 'Deployment completed successfully'"
"deploy": "echo 'Deploying application...' && bun run build && echo 'Deployment completed successfully'",
"db": "bun drizzle-kit"
},
"bin": {
"opencode": "./bin/opencode"
@@ -42,6 +43,8 @@
"@types/turndown": "5.0.5",
"@types/yargs": "17.0.33",
"@typescript/native-preview": "catalog:",
"drizzle-kit": "1.0.0-beta.12-a5629fb",
"drizzle-orm": "1.0.0-beta.12-a5629fb",
"typescript": "catalog:",
"vscode-languageserver-types": "3.17.5",
"why-is-node-running": "3.2.2",
@@ -84,8 +87,8 @@
"@opencode-ai/sdk": "workspace:*",
"@opencode-ai/util": "workspace:*",
"@openrouter/ai-sdk-provider": "1.5.4",
"@opentui/core": "0.1.77",
"@opentui/solid": "0.1.77",
"@opentui/core": "0.1.79",
"@opentui/solid": "0.1.79",
"@parcel/watcher": "2.5.1",
"@pierre/diffs": "catalog:",
"@solid-primitives/event-bus": "1.1.2",
@@ -100,6 +103,7 @@
"clipboardy": "4.0.0",
"decimal.js": "10.5.0",
"diff": "catalog:",
"drizzle-orm": "1.0.0-beta.12-a5629fb",
"fuzzysort": "3.1.0",
"gray-matter": "4.0.3",
"hono": "catalog:",
@@ -122,5 +126,8 @@
"yargs": "18.0.0",
"zod": "catalog:",
"zod-to-json-schema": "3.24.5"
},
"overrides": {
"drizzle-orm": "1.0.0-beta.12-a5629fb"
}
}

View File

@@ -25,6 +25,32 @@ await Bun.write(
)
console.log("Generated models-snapshot.ts")
// Load migrations from migration directories
const migrationDirs = (await fs.promises.readdir(path.join(dir, "migration"), { withFileTypes: true }))
.filter((entry) => entry.isDirectory() && /^\d{4}\d{2}\d{2}\d{2}\d{2}\d{2}/.test(entry.name))
.map((entry) => entry.name)
.sort()
const migrations = await Promise.all(
migrationDirs.map(async (name) => {
const file = path.join(dir, "migration", name, "migration.sql")
const sql = await Bun.file(file).text()
const match = /^(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})/.exec(name)
const timestamp = match
? Date.UTC(
Number(match[1]),
Number(match[2]) - 1,
Number(match[3]),
Number(match[4]),
Number(match[5]),
Number(match[6]),
)
: 0
return { sql, timestamp }
}),
)
console.log(`Loaded ${migrations.length} migrations`)
const singleFlag = process.argv.includes("--single")
const baselineFlag = process.argv.includes("--baseline")
const skipInstall = process.argv.includes("--skip-install")
@@ -156,6 +182,7 @@ for (const item of targets) {
entrypoints: ["./src/index.ts", parserWorker, workerPath],
define: {
OPENCODE_VERSION: `'${Script.version}'`,
OPENCODE_MIGRATIONS: JSON.stringify(migrations),
OTUI_TREE_SITTER_WORKER_PATH: bunfsRoot + workerRelativePath,
OPENCODE_WORKER_PATH: workerPath,
OPENCODE_CHANNEL: `'${Script.channel}'`,

View File

@@ -0,0 +1,16 @@
#!/usr/bin/env bun
import { $ } from "bun"
// drizzle-kit check compares schema to migrations, exits non-zero if drift
const result = await $`bun drizzle-kit check`.quiet().nothrow()
if (result.exitCode !== 0) {
console.error("Schema has changes not captured in migrations!")
console.error("Run: bun drizzle-kit generate")
console.error("")
console.error(result.stderr.toString())
process.exit(1)
}
console.log("Migrations are up to date")

View File

@@ -435,46 +435,68 @@ export namespace ACP {
return
}
}
return
}
if (part.type === "text") {
const delta = props.delta
if (delta && part.ignored !== true) {
await this.connection
.sessionUpdate({
sessionId,
update: {
sessionUpdate: "agent_message_chunk",
content: {
type: "text",
text: delta,
},
case "message.part.delta": {
const props = event.properties
const session = this.sessionManager.tryGet(props.sessionID)
if (!session) return
const sessionId = session.id
const message = await this.sdk.session
.message(
{
sessionID: props.sessionID,
messageID: props.messageID,
directory: session.cwd,
},
{ throwOnError: true },
)
.then((x) => x.data)
.catch((error) => {
log.error("unexpected error when fetching message", { error })
return undefined
})
if (!message || message.info.role !== "assistant") return
const part = message.parts.find((p) => p.id === props.partID)
if (!part) return
if (part.type === "text" && props.field === "text" && part.ignored !== true) {
await this.connection
.sessionUpdate({
sessionId,
update: {
sessionUpdate: "agent_message_chunk",
content: {
type: "text",
text: props.delta,
},
})
.catch((error) => {
log.error("failed to send text to ACP", { error })
})
}
},
})
.catch((error) => {
log.error("failed to send text delta to ACP", { error })
})
return
}
if (part.type === "reasoning") {
const delta = props.delta
if (delta) {
await this.connection
.sessionUpdate({
sessionId,
update: {
sessionUpdate: "agent_thought_chunk",
content: {
type: "text",
text: delta,
},
if (part.type === "reasoning" && props.field === "text") {
await this.connection
.sessionUpdate({
sessionId,
update: {
sessionUpdate: "agent_thought_chunk",
content: {
type: "text",
text: props.delta,
},
})
.catch((error) => {
log.error("failed to send reasoning to ACP", { error })
})
}
},
})
.catch((error) => {
log.error("failed to send reasoning delta to ACP", { error })
})
}
return
}

View File

@@ -184,6 +184,18 @@ export namespace Agent {
),
prompt: PROMPT_TITLE,
},
handoff: {
name: "handoff",
mode: "primary",
options: {},
native: true,
hidden: true,
temperature: 0.5,
permission: PermissionNext.fromConfig({
"*": "allow",
}),
prompt: "none",
},
summary: {
name: "summary",
mode: "primary",

View File

@@ -3,7 +3,8 @@ import type { Session as SDKSession, Message, Part } from "@opencode-ai/sdk/v2"
import { Session } from "../../session"
import { cmd } from "./cmd"
import { bootstrap } from "../bootstrap"
import { Storage } from "../../storage/storage"
import { Database } from "../../storage/db"
import { SessionTable, MessageTable, PartTable } from "../../session/session.sql"
import { Instance } from "../../project/instance"
import { ShareNext } from "../../share/share-next"
import { EOL } from "os"
@@ -130,13 +131,35 @@ export const ImportCommand = cmd({
return
}
await Storage.write(["session", Instance.project.id, exportData.info.id], exportData.info)
Database.use((db) => db.insert(SessionTable).values(Session.toRow(exportData.info)).onConflictDoNothing().run())
for (const msg of exportData.messages) {
await Storage.write(["message", exportData.info.id, msg.info.id], msg.info)
Database.use((db) =>
db
.insert(MessageTable)
.values({
id: msg.info.id,
session_id: exportData.info.id,
time_created: msg.info.time?.created ?? Date.now(),
data: msg.info,
})
.onConflictDoNothing()
.run(),
)
for (const part of msg.parts) {
await Storage.write(["part", msg.info.id, part.id], part)
Database.use((db) =>
db
.insert(PartTable)
.values({
id: part.id,
message_id: msg.info.id,
session_id: exportData.info.id,
data: part,
})
.onConflictDoNothing()
.run(),
)
}
}

View File

@@ -2,7 +2,8 @@ import type { Argv } from "yargs"
import { cmd } from "./cmd"
import { Session } from "../../session"
import { bootstrap } from "../bootstrap"
import { Storage } from "../../storage/storage"
import { Database } from "../../storage/db"
import { SessionTable } from "../../session/session.sql"
import { Project } from "../../project/project"
import { Instance } from "../../project/instance"
@@ -87,25 +88,8 @@ async function getCurrentProject(): Promise<Project.Info> {
}
async function getAllSessions(): Promise<Session.Info[]> {
const sessions: Session.Info[] = []
const projectKeys = await Storage.list(["project"])
const projects = await Promise.all(projectKeys.map((key) => Storage.read<Project.Info>(key)))
for (const project of projects) {
if (!project) continue
const sessionKeys = await Storage.list(["session", project.id])
const projectSessions = await Promise.all(sessionKeys.map((key) => Storage.read<Session.Info>(key)))
for (const session of projectSessions) {
if (session) {
sessions.push(session)
}
}
}
return sessions
const rows = Database.use((db) => db.select().from(SessionTable).all())
return rows.map((row) => Session.fromRow(row))
}
export async function aggregateSessionStats(days?: number, projectFilter?: string): Promise<SessionStats> {

View File

@@ -3,6 +3,7 @@ import { Clipboard } from "@tui/util/clipboard"
import { TextAttributes } from "@opentui/core"
import { RouteProvider, useRoute } from "@tui/context/route"
import { Switch, Match, createEffect, untrack, ErrorBoundary, createSignal, onMount, batch, Show, on } from "solid-js"
import { win32DisableProcessedInput, win32IgnoreCtrlC, win32InstallCtrlCGuard } from "./win32"
import { Installation } from "@/installation"
import { Flag } from "@/flag/flag"
import { DialogProvider, useDialog } from "@tui/ui/dialog"
@@ -110,8 +111,18 @@ export function tui(input: {
}) {
// promise to prevent immediate exit
return new Promise<void>(async (resolve) => {
const unguard = win32InstallCtrlCGuard()
win32DisableProcessedInput()
win32IgnoreCtrlC()
const mode = await getTerminalBackgroundColor()
// Re-clear after getTerminalBackgroundColor() — setRawMode(false) restores
// the original console mode which re-enables ENABLE_PROCESSED_INPUT.
win32DisableProcessedInput()
const onExit = async () => {
unguard?.()
await input.onExit?.()
resolve()
}

View File

@@ -83,6 +83,7 @@ function init() {
},
slashes() {
return visibleOptions().flatMap((option) => {
if (option.disabled) return []
const slash = option.slash
if (!slash) return []
return {

View File

@@ -7,6 +7,27 @@ import { useDialog } from "@tui/ui/dialog"
import { createDialogProviderOptions, DialogProvider } from "./dialog-provider"
import { useKeybind } from "../context/keybind"
import * as fuzzysort from "fuzzysort"
import type { Provider } from "@opencode-ai/sdk/v2"
function pickLatest(models: [string, Provider["models"][string]][]) {
const picks: Record<string, [string, Provider["models"][string]]> = {}
for (const item of models) {
const model = item[0]
const info = item[1]
const key = info.family ?? model
const prev = picks[key]
if (!prev) {
picks[key] = item
continue
}
if (info.release_date !== prev[1].release_date) {
if (info.release_date > prev[1].release_date) picks[key] = item
continue
}
if (model > prev[0]) picks[key] = item
}
return Object.values(picks)
}
export function useConnected() {
const sync = useSync()
@@ -21,6 +42,7 @@ export function DialogModel(props: { providerID?: string }) {
const dialog = useDialog()
const keybind = useKeybind()
const [query, setQuery] = createSignal("")
const [all, setAll] = createSignal(false)
const connected = useConnected()
const providers = createDialogProviderOptions()
@@ -72,8 +94,8 @@ export function DialogModel(props: { providerID?: string }) {
(provider) => provider.id !== "opencode",
(provider) => provider.name,
),
flatMap((provider) =>
pipe(
flatMap((provider) => {
const items = pipe(
provider.models,
entries(),
filter(([_, info]) => info.status !== "deprecated"),
@@ -104,8 +126,9 @@ export function DialogModel(props: { providerID?: string }) {
(x) => x.footer !== "Free",
(x) => x.title,
),
),
),
)
return items
}),
)
const popularProviders = !connected()
@@ -154,6 +177,13 @@ export function DialogModel(props: { providerID?: string }) {
local.model.toggleFavorite(option.value as { providerID: string; modelID: string })
},
},
{
keybind: keybind.all.model_show_all_toggle?.[0],
title: all() ? "Show latest only" : "Show all models",
onTrigger: () => {
setAll((value) => !value)
},
},
]}
onFilter={setQuery}
flat={true}

View File

@@ -9,7 +9,7 @@ import type { AgentPart, FilePart, TextPart } from "@opencode-ai/sdk/v2"
export type PromptInfo = {
input: string
mode?: "normal" | "shell"
mode?: "normal" | "shell" | "handoff"
parts: (
| Omit<FilePart, "id" | "messageID" | "sessionID">
| Omit<AgentPart, "id" | "messageID" | "sessionID">

View File

@@ -120,7 +120,7 @@ export function Prompt(props: PromptProps) {
const [store, setStore] = createStore<{
prompt: PromptInfo
mode: "normal" | "shell"
mode: "normal" | "shell" | "handoff"
extmarkToPartIndex: Map<number, number>
interrupt: number
placeholder: number
@@ -349,6 +349,20 @@ export function Prompt(props: PromptProps) {
))
},
},
{
title: "Handoff",
value: "prompt.handoff",
disabled: props.sessionID === undefined,
category: "Prompt",
slash: {
name: "handoff",
},
onSelect: () => {
input.clear()
setStore("mode", "handoff")
setStore("prompt", { input: "", parts: [] })
},
},
]
})
@@ -526,17 +540,45 @@ export function Prompt(props: PromptProps) {
async function submit() {
if (props.disabled) return
if (autocomplete?.visible) return
const selectedModel = local.model.current()
if (!selectedModel) {
promptModelWarning()
return
}
if (store.mode === "handoff") {
const result = await sdk.client.session.handoff({
sessionID: props.sessionID!,
goal: store.prompt.input,
model: {
providerID: selectedModel.providerID,
modelID: selectedModel.modelID,
},
})
if (result.data) {
route.navigate({
type: "home",
initialPrompt: {
input: result.data.text,
parts:
result.data.files.map((file) => ({
type: "file",
url: file,
filename: file,
mime: "text/plain",
})) ?? [],
},
})
}
return
}
if (!store.prompt.input) return
const trimmed = store.prompt.input.trim()
if (trimmed === "exit" || trimmed === "quit" || trimmed === ":q") {
exit()
return
}
const selectedModel = local.model.current()
if (!selectedModel) {
promptModelWarning()
return
}
const sessionID = props.sessionID
? props.sessionID
: await (async () => {
@@ -737,6 +779,7 @@ export function Prompt(props: PromptProps) {
const highlight = createMemo(() => {
if (keybind.leader) return theme.border
if (store.mode === "shell") return theme.primary
if (store.mode === "handoff") return theme.warning
return local.agent.color(local.agent.current().name)
})
@@ -748,6 +791,7 @@ export function Prompt(props: PromptProps) {
})
const placeholderText = createMemo(() => {
if (store.mode === "handoff") return "Goal for the new session"
if (props.sessionID) return undefined
if (store.mode === "shell") {
const example = SHELL_PLACEHOLDERS[store.placeholder % SHELL_PLACEHOLDERS.length]
@@ -875,7 +919,7 @@ export function Prompt(props: PromptProps) {
e.preventDefault()
return
}
if (store.mode === "shell") {
if (store.mode === "shell" || store.mode === "handoff") {
if ((e.name === "backspace" && input.visualCursor.offset === 0) || e.name === "escape") {
setStore("mode", "normal")
e.preventDefault()
@@ -996,7 +1040,11 @@ export function Prompt(props: PromptProps) {
/>
<box flexDirection="row" flexShrink={0} paddingTop={1} gap={1}>
<text fg={highlight()}>
{store.mode === "shell" ? "Shell" : Locale.titlecase(local.agent.current().name)}{" "}
<Switch>
<Match when={store.mode === "normal"}>{Locale.titlecase(local.agent.current().name)}</Match>
<Match when={store.mode === "shell"}>Shell</Match>
<Match when={store.mode === "handoff"}>Handoff</Match>
</Switch>
</text>
<Show when={store.mode === "normal"}>
<box flexDirection="row" gap={1}>
@@ -1143,6 +1191,11 @@ export function Prompt(props: PromptProps) {
esc <span style={{ fg: theme.textMuted }}>exit shell mode</span>
</text>
</Match>
<Match when={store.mode === "handoff"}>
<text fg={theme.text}>
esc <span style={{ fg: theme.textMuted }}>exit handoff mode</span>
</text>
</Match>
</Switch>
</box>
</Show>

View File

@@ -1,4 +1,4 @@
import { createStore } from "solid-js/store"
import { createStore, reconcile } from "solid-js/store"
import { createSimpleContext } from "./helper"
import type { PromptInfo } from "../component/prompt/history"
@@ -32,7 +32,7 @@ export const { use: useRoute, provider: RouteProvider } = createSimpleContext({
},
navigate(route: Route) {
console.log("navigate", route)
setStore(route)
setStore(reconcile(route))
},
}
},

View File

@@ -299,6 +299,24 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({
break
}
case "message.part.delta": {
const parts = store.part[event.properties.messageID]
if (!parts) break
const result = Binary.search(parts, event.properties.partID, (p) => p.id)
if (!result.found) break
setStore(
"part",
event.properties.messageID,
produce((draft) => {
const part = draft[result.index]
const field = event.properties.field as keyof typeof part
const existing = part[field] as string | undefined
;(part[field] as string) = (existing ?? "") + event.properties.delta
}),
)
break
}
case "message.part.removed": {
const parts = store.part[event.properties.messageID]
const result = Binary.search(parts, event.properties.partID, (p) => p.id)

View File

@@ -2032,8 +2032,8 @@ function ApplyPatch(props: ToolProps<typeof ApplyPatchTool>) {
</For>
</Match>
<Match when={true}>
<InlineTool icon="%" pending="Preparing apply_patch..." complete={false} part={props.part}>
apply_patch
<InlineTool icon="%" pending="Preparing patch..." complete={false} part={props.part}>
Patch
</InlineTool>
</Match>
</Switch>

View File

@@ -9,6 +9,7 @@ import { Log } from "@/util/log"
import { withNetworkOptions, resolveNetworkOptions } from "@/cli/network"
import type { Event } from "@opencode-ai/sdk/v2"
import type { EventSource } from "./context/sdk"
import { win32DisableProcessedInput, win32InstallCtrlCGuard } from "./win32"
declare global {
const OPENCODE_WORKER_PATH: string
@@ -77,6 +78,14 @@ export const TuiThreadCommand = cmd({
describe: "agent to use",
}),
handler: async (args) => {
// Keep ENABLE_PROCESSED_INPUT cleared even if other code flips it.
// (Important when running under `bun run` wrappers on Windows.)
win32InstallCtrlCGuard()
// Must be the very first thing — disables CTRL_C_EVENT before any Worker
// spawn or async work so the OS cannot kill the process group.
win32DisableProcessedInput()
if (args.fork && !args.continue && !args.session) {
UI.error("--fork requires --continue or --session")
process.exit(1)

View File

@@ -0,0 +1,118 @@
import { dlopen, ptr } from "bun:ffi"
const STD_INPUT_HANDLE = -10
const ENABLE_PROCESSED_INPUT = 0x0001
const kernel = () =>
dlopen("kernel32.dll", {
GetStdHandle: { args: ["i32"], returns: "ptr" },
GetConsoleMode: { args: ["ptr", "ptr"], returns: "i32" },
SetConsoleMode: { args: ["ptr", "u32"], returns: "i32" },
SetConsoleCtrlHandler: { args: ["ptr", "i32"], returns: "i32" },
})
let k32: ReturnType<typeof kernel> | undefined
function load() {
if (process.platform !== "win32") return false
try {
k32 ??= kernel()
return true
} catch {
return false
}
}
/**
* Clear ENABLE_PROCESSED_INPUT on the console stdin handle.
*/
export function win32DisableProcessedInput() {
if (process.platform !== "win32") return
if (!process.stdin.isTTY) return
if (!load()) return
const handle = k32!.symbols.GetStdHandle(STD_INPUT_HANDLE)
const buf = new Uint32Array(1)
if (k32!.symbols.GetConsoleMode(handle, ptr(buf)) === 0) return
const mode = buf[0]!
if ((mode & ENABLE_PROCESSED_INPUT) === 0) return
k32!.symbols.SetConsoleMode(handle, mode & ~ENABLE_PROCESSED_INPUT)
}
/**
* Tell Windows to ignore CTRL_C_EVENT for this process.
*
* SetConsoleCtrlHandler(NULL, TRUE) makes the process ignore Ctrl+C
* signals at the OS level. Belt-and-suspenders alongside disabling
* ENABLE_PROCESSED_INPUT.
*/
export function win32IgnoreCtrlC() {
if (process.platform !== "win32") return
if (!process.stdin.isTTY) return
if (!load()) return
k32!.symbols.SetConsoleCtrlHandler(null, 1)
}
let unhook: (() => void) | undefined
/**
* Keep ENABLE_PROCESSED_INPUT disabled.
*
* On Windows, Ctrl+C becomes a CTRL_C_EVENT (instead of stdin input) when
* ENABLE_PROCESSED_INPUT is set. Various runtimes can re-apply console modes
* (sometimes on a later tick), and the flag is console-global, not per-process.
*
* We combine:
* - A `setRawMode(...)` hook to re-clear after known raw-mode toggles.
* - A low-frequency poll as a backstop for native/external mode changes.
*/
export function win32InstallCtrlCGuard() {
if (process.platform !== "win32") return
if (!process.stdin.isTTY) return
if (!load()) return
if (unhook) return unhook
const stdin = process.stdin as any
const original = stdin.setRawMode
const handle = k32!.symbols.GetStdHandle(STD_INPUT_HANDLE)
const buf = new Uint32Array(1)
const enforce = () => {
if (k32!.symbols.GetConsoleMode(handle, ptr(buf)) === 0) return
const mode = buf[0]!
if ((mode & ENABLE_PROCESSED_INPUT) === 0) return
k32!.symbols.SetConsoleMode(handle, mode & ~ENABLE_PROCESSED_INPUT)
}
// Some runtimes can re-apply console modes on the next tick; enforce twice.
const later = () => {
enforce()
setImmediate(enforce)
}
if (typeof original === "function") {
stdin.setRawMode = (mode: boolean) => {
const result = original.call(stdin, mode)
later()
return result
}
}
// Ensure it's cleared immediately too (covers any earlier mode changes).
later()
const interval = setInterval(enforce, 100)
unhook = () => {
clearInterval(interval)
if (typeof original === "function") {
stdin.setRawMode = original
}
unhook = undefined
}
return unhook
}

View File

@@ -778,6 +778,7 @@ export namespace Config {
stash_delete: z.string().optional().default("ctrl+d").describe("Delete stash entry"),
model_provider_list: z.string().optional().default("ctrl+a").describe("Open provider list from model dialog"),
model_favorite_toggle: z.string().optional().default("ctrl+f").describe("Toggle model favorite status"),
model_show_all_toggle: z.string().optional().default("ctrl+o").describe("Toggle showing all models"),
session_share: z.string().optional().default("none").describe("Share current session"),
session_unshare: z.string().optional().default("none").describe("Unshare current session"),
session_interrupt: z.string().optional().default("escape").describe("Interrupt current session"),

View File

@@ -46,7 +46,7 @@ export namespace Flag {
export const OPENCODE_EXPERIMENTAL_LSP_TY = truthy("OPENCODE_EXPERIMENTAL_LSP_TY")
export const OPENCODE_EXPERIMENTAL_LSP_TOOL = OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_LSP_TOOL")
export const OPENCODE_DISABLE_FILETIME_CHECK = truthy("OPENCODE_DISABLE_FILETIME_CHECK")
export const OPENCODE_EXPERIMENTAL_PLAN_MODE = OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_PLAN_MODE")
export const OPENCODE_EXPERIMENTAL_MARKDOWN = truthy("OPENCODE_EXPERIMENTAL_MARKDOWN")
export const OPENCODE_MODELS_URL = process.env["OPENCODE_MODELS_URL"]
export const OPENCODE_MODELS_PATH = process.env["OPENCODE_MODELS_PATH"]

View File

@@ -23,9 +23,14 @@ import { AttachCommand } from "./cli/cmd/tui/attach"
import { TuiThreadCommand } from "./cli/cmd/tui/thread"
import { AcpCommand } from "./cli/cmd/acp"
import { EOL } from "os"
import { win32DisableProcessedInput, win32IgnoreCtrlC } from "./cli/cmd/tui/win32"
import { WebCommand } from "./cli/cmd/web"
import { PrCommand } from "./cli/cmd/pr"
import { SessionCommand } from "./cli/cmd/session"
import path from "path"
import { Global } from "./global"
import { JsonMigration } from "./storage/json-migration"
import { Database } from "./storage/db"
process.on("unhandledRejection", (e) => {
Log.Default.error("rejection", {
@@ -39,6 +44,14 @@ process.on("uncaughtException", (e) => {
})
})
// Disable Windows CTRL_C_EVENT as early as possible. When running under
// `bun run` (e.g. `bun dev`), the parent bun process shares this console
// and would be killed by the OS before any JS signal handler fires.
win32DisableProcessedInput()
// Belt-and-suspenders: even if something re-enables ENABLE_PROCESSED_INPUT
// later (opentui raw mode, libuv, etc.), ignore the generated event.
win32IgnoreCtrlC()
const cli = yargs(hideBin(process.argv))
.parserConfiguration({ "populate--": true })
.scriptName("opencode")
@@ -74,6 +87,37 @@ const cli = yargs(hideBin(process.argv))
version: Installation.VERSION,
args: process.argv.slice(2),
})
const marker = path.join(Global.Path.data, "opencode.db")
if (!(await Bun.file(marker).exists())) {
console.log("Performing one time database migration, may take a few minutes...")
const tty = process.stdout.isTTY
const width = 36
const orange = "\x1b[38;5;214m"
const muted = "\x1b[0;2m"
const reset = "\x1b[0m"
let last = -1
if (tty) process.stdout.write("\x1b[?25l")
try {
await JsonMigration.run(Database.Client().$client, {
progress: (event) => {
if (!tty) return
const percent = Math.floor((event.current / event.total) * 100)
if (percent === last && event.current !== event.total) return
last = percent
const fill = Math.round((percent / 100) * width)
const bar = `${"■".repeat(fill)}${"・".repeat(width - fill)}`
process.stdout.write(
`\r${orange}${bar} ${percent.toString().padStart(3)}%${reset} ${muted}${event.label.padEnd(12)} ${event.current}/${event.total}${reset}`,
)
if (event.current === event.total) process.stdout.write("\n")
},
})
} finally {
if (tty) process.stdout.write("\x1b[?25h")
}
console.log("Database migration complete.")
}
})
.usage("\n" + UI.logo())
.completion("completion", "generate shell completion script")

View File

@@ -3,7 +3,8 @@ import { BusEvent } from "@/bus/bus-event"
import { Config } from "@/config/config"
import { Identifier } from "@/id/id"
import { Instance } from "@/project/instance"
import { Storage } from "@/storage/storage"
import { Database, eq } from "@/storage/db"
import { PermissionTable } from "@/session/session.sql"
import { fn } from "@/util/fn"
import { Log } from "@/util/log"
import { Wildcard } from "@/util/wildcard"
@@ -105,9 +106,12 @@ export namespace PermissionNext {
),
}
const state = Instance.state(async () => {
const state = Instance.state(() => {
const projectID = Instance.project.id
const stored = await Storage.read<Ruleset>(["permission", projectID]).catch(() => [] as Ruleset)
const row = Database.use((db) =>
db.select().from(PermissionTable).where(eq(PermissionTable.project_id, projectID)).get(),
)
const stored = row?.data ?? ([] as Ruleset)
const pending: Record<
string,
@@ -222,7 +226,8 @@ export namespace PermissionNext {
// TODO: we don't save the permission ruleset to disk yet until there's
// UI to manage it
// await Storage.write(["permission", Instance.project.id], s.approved)
// db().insert(PermissionTable).values({ projectID: Instance.project.id, data: s.approved })
// .onConflictDoUpdate({ target: PermissionTable.projectID, set: { data: s.approved } }).run()
return
}
},
@@ -275,6 +280,7 @@ export namespace PermissionNext {
}
export async function list() {
return state().then((x) => Object.values(x.pending).map((x) => x.info))
const s = await state()
return Object.values(s.pending).map((x) => x.info)
}
}

View File

@@ -1,5 +1,4 @@
import { Plugin } from "../plugin"
import { Share } from "../share/share"
import { Format } from "../format"
import { LSP } from "../lsp"
import { FileWatcher } from "../file/watcher"
@@ -17,7 +16,6 @@ import { Truncate } from "../tool/truncation"
export async function InstanceBootstrap() {
Log.Default.info("bootstrapping", { directory: Instance.directory })
await Plugin.init()
Share.init()
ShareNext.init()
Format.init()
await LSP.init()

View File

@@ -0,0 +1,15 @@
import { sqliteTable, text, integer } from "drizzle-orm/sqlite-core"
import { Timestamps } from "@/storage/schema.sql"
export const ProjectTable = sqliteTable("project", {
id: text().primaryKey(),
worktree: text().notNull(),
vcs: text(),
name: text(),
icon_url: text(),
icon_color: text(),
...Timestamps,
time_initialized: integer(),
sandboxes: text({ mode: "json" }).notNull().$type<string[]>(),
commands: text({ mode: "json" }).$type<{ start?: string }>(),
})

View File

@@ -1,18 +1,17 @@
import z from "zod"
import fs from "fs/promises"
import { Filesystem } from "../util/filesystem"
import path from "path"
import { $ } from "bun"
import { Storage } from "../storage/storage"
import { Database, eq } from "../storage/db"
import { ProjectTable } from "./project.sql"
import { SessionTable } from "../session/session.sql"
import { Log } from "../util/log"
import { Flag } from "@/flag/flag"
import { Session } from "../session"
import { work } from "../util/queue"
import { fn } from "@opencode-ai/util/fn"
import { BusEvent } from "@/bus/bus-event"
import { iife } from "@/util/iife"
import { GlobalBus } from "@/bus/global"
import { existsSync } from "fs"
export namespace Project {
const log = Log.create({ service: "project" })
@@ -50,66 +49,86 @@ export namespace Project {
Updated: BusEvent.define("project.updated", Info),
}
type Row = typeof ProjectTable.$inferSelect
export function fromRow(row: Row): Info {
const icon =
row.icon_url || row.icon_color
? { url: row.icon_url ?? undefined, color: row.icon_color ?? undefined }
: undefined
return {
id: row.id,
worktree: row.worktree,
vcs: row.vcs ? Info.shape.vcs.parse(row.vcs) : undefined,
name: row.name ?? undefined,
icon,
time: {
created: row.time_created,
updated: row.time_updated,
initialized: row.time_initialized ?? undefined,
},
sandboxes: row.sandboxes,
commands: row.commands ?? undefined,
}
}
export async function fromDirectory(directory: string) {
log.info("fromDirectory", { directory })
const { id, sandbox, worktree, vcs } = await iife(async () => {
const data = await iife(async () => {
const matches = Filesystem.up({ targets: [".git"], start: directory })
const git = await matches.next().then((x) => x.value)
await matches.return()
if (git) {
let sandbox = path.dirname(git)
const sandbox = path.dirname(git)
const bin = Bun.which("git")
const gitBinary = Bun.which("git")
// cached id calculation
let id = await Bun.file(path.join(git, "opencode"))
const cached = await Bun.file(path.join(git, "opencode"))
.text()
.then((x) => x.trim())
.catch(() => undefined)
if (!gitBinary) {
if (!bin) {
return {
id: id ?? "global",
id: cached ?? "global",
worktree: sandbox,
sandbox: sandbox,
vcs: Info.shape.vcs.parse(Flag.OPENCODE_FAKE_VCS),
}
}
// generate id from root commit
if (!id) {
const roots = await $`git rev-list --max-parents=0 --all`
.quiet()
.nothrow()
.cwd(sandbox)
.text()
.then((x) =>
x
.split("\n")
.filter(Boolean)
.map((x) => x.trim())
.toSorted(),
)
.catch(() => undefined)
if (!roots) {
return {
id: "global",
worktree: sandbox,
sandbox: sandbox,
vcs: Info.shape.vcs.parse(Flag.OPENCODE_FAKE_VCS),
}
}
id = roots[0]
if (id) {
void Bun.file(path.join(git, "opencode"))
.write(id)
const roots = cached
? undefined
: await $`git rev-list --max-parents=0 --all`
.quiet()
.nothrow()
.cwd(sandbox)
.text()
.then((x) =>
x
.split("\n")
.filter(Boolean)
.map((x) => x.trim())
.toSorted(),
)
.catch(() => undefined)
if (!cached && !roots) {
return {
id: "global",
worktree: sandbox,
sandbox: sandbox,
vcs: Info.shape.vcs.parse(Flag.OPENCODE_FAKE_VCS),
}
}
const id = cached ?? roots?.[0]
if (!cached && id) {
void Bun.file(path.join(git, "opencode"))
.write(id)
.catch(() => undefined)
}
if (!id) {
return {
id: "global",
@@ -136,33 +155,31 @@ export namespace Project {
}
}
sandbox = top
const worktree = await $`git rev-parse --git-common-dir`
const tree = await $`git rev-parse --git-common-dir`
.quiet()
.nothrow()
.cwd(sandbox)
.cwd(top)
.text()
.then((x) => {
const dirname = path.dirname(x.trim())
if (dirname === ".") return sandbox
if (dirname === ".") return top
return dirname
})
.catch(() => undefined)
if (!worktree) {
if (!tree) {
return {
id,
sandbox,
worktree: sandbox,
sandbox: top,
worktree: top,
vcs: Info.shape.vcs.parse(Flag.OPENCODE_FAKE_VCS),
}
}
return {
id,
sandbox,
worktree,
sandbox: top,
worktree: tree,
vcs: "git",
}
}
@@ -175,47 +192,80 @@ export namespace Project {
}
})
let existing = await Storage.read<Info>(["project", id]).catch(() => undefined)
if (!existing) {
existing = {
id,
worktree,
vcs: vcs as Info["vcs"],
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, data.id)).get())
const existing = await iife(async () => {
if (row) return fromRow(row)
const fresh: Info = {
id: data.id,
worktree: data.worktree,
vcs: data.vcs as Info["vcs"],
sandboxes: [],
time: {
created: Date.now(),
updated: Date.now(),
},
}
if (id !== "global") {
await migrateFromGlobal(id, worktree)
if (data.id !== "global") {
await migrateFromGlobal(data.id, data.worktree)
}
}
// migrate old projects before sandboxes
if (!existing.sandboxes) existing.sandboxes = []
return fresh
})
if (Flag.OPENCODE_EXPERIMENTAL_ICON_DISCOVERY) discover(existing)
const result: Info = {
...existing,
worktree,
vcs: vcs as Info["vcs"],
worktree: data.worktree,
vcs: data.vcs as Info["vcs"],
time: {
...existing.time,
updated: Date.now(),
},
}
if (sandbox !== result.worktree && !result.sandboxes.includes(sandbox)) result.sandboxes.push(sandbox)
result.sandboxes = result.sandboxes.filter((x) => existsSync(x))
await Storage.write<Info>(["project", id], result)
if (data.sandbox !== result.worktree && !result.sandboxes.includes(data.sandbox))
result.sandboxes.push(data.sandbox)
const sandboxes: string[] = []
for (const x of result.sandboxes) {
const stat = await Bun.file(x)
.stat()
.catch(() => undefined)
if (stat) sandboxes.push(x)
}
result.sandboxes = sandboxes
const insert = {
id: result.id,
worktree: result.worktree,
vcs: result.vcs ?? null,
name: result.name,
icon_url: result.icon?.url,
icon_color: result.icon?.color,
time_created: result.time.created,
time_updated: result.time.updated,
time_initialized: result.time.initialized,
sandboxes: result.sandboxes,
commands: result.commands,
}
const updateSet = {
worktree: result.worktree,
vcs: result.vcs ?? null,
name: result.name,
icon_url: result.icon?.url,
icon_color: result.icon?.color,
time_updated: result.time.updated,
time_initialized: result.time.initialized,
sandboxes: result.sandboxes,
commands: result.commands,
}
Database.use((db) =>
db.insert(ProjectTable).values(insert).onConflictDoUpdate({ target: ProjectTable.id, set: updateSet }).run(),
)
GlobalBus.emit("event", {
payload: {
type: Event.Updated.type,
properties: result,
},
})
return { project: result, sandbox }
return { project: result, sandbox: data.sandbox }
}
export async function discover(input: Info) {
@@ -248,43 +298,54 @@ export namespace Project {
return
}
async function migrateFromGlobal(newProjectID: string, worktree: string) {
const globalProject = await Storage.read<Info>(["project", "global"]).catch(() => undefined)
if (!globalProject) return
async function migrateFromGlobal(id: string, worktree: string) {
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, "global")).get())
if (!row) return
const globalSessions = await Storage.list(["session", "global"]).catch(() => [])
if (globalSessions.length === 0) return
const sessions = Database.use((db) =>
db.select().from(SessionTable).where(eq(SessionTable.project_id, "global")).all(),
)
if (sessions.length === 0) return
log.info("migrating sessions from global", { newProjectID, worktree, count: globalSessions.length })
log.info("migrating sessions from global", { newProjectID: id, worktree, count: sessions.length })
await work(10, globalSessions, async (key) => {
const sessionID = key[key.length - 1]
const session = await Storage.read<Session.Info>(key).catch(() => undefined)
if (!session) return
if (session.directory && session.directory !== worktree) return
await work(10, sessions, async (row) => {
// Skip sessions that belong to a different directory
if (row.directory && row.directory !== worktree) return
session.projectID = newProjectID
log.info("migrating session", { sessionID, from: "global", to: newProjectID })
await Storage.write(["session", newProjectID, sessionID], session)
await Storage.remove(key)
log.info("migrating session", { sessionID: row.id, from: "global", to: id })
Database.use((db) => db.update(SessionTable).set({ project_id: id }).where(eq(SessionTable.id, row.id)).run())
}).catch((error) => {
log.error("failed to migrate sessions from global to project", { error, projectId: newProjectID })
log.error("failed to migrate sessions from global to project", { error, projectId: id })
})
}
export async function setInitialized(projectID: string) {
await Storage.update<Info>(["project", projectID], (draft) => {
draft.time.initialized = Date.now()
})
export function setInitialized(id: string) {
Database.use((db) =>
db
.update(ProjectTable)
.set({
time_initialized: Date.now(),
})
.where(eq(ProjectTable.id, id))
.run(),
)
}
export async function list() {
const keys = await Storage.list(["project"])
const projects = await Promise.all(keys.map((x) => Storage.read<Info>(x)))
return projects.map((project) => ({
...project,
sandboxes: project.sandboxes?.filter((x) => existsSync(x)),
}))
export function list() {
return Database.use((db) =>
db
.select()
.from(ProjectTable)
.all()
.map((row) => fromRow(row)),
)
}
export function get(id: string): Info | undefined {
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get())
if (!row) return undefined
return fromRow(row)
}
export const update = fn(
@@ -295,77 +356,90 @@ export namespace Project {
commands: Info.shape.commands.optional(),
}),
async (input) => {
const result = await Storage.update<Info>(["project", input.projectID], (draft) => {
if (input.name !== undefined) draft.name = input.name
if (input.icon !== undefined) {
draft.icon = {
...draft.icon,
}
if (input.icon.url !== undefined) draft.icon.url = input.icon.url
if (input.icon.override !== undefined) draft.icon.override = input.icon.override || undefined
if (input.icon.color !== undefined) draft.icon.color = input.icon.color
}
if (input.commands?.start !== undefined) {
const start = input.commands.start || undefined
draft.commands = {
...(draft.commands ?? {}),
}
draft.commands.start = start
if (!draft.commands.start) draft.commands = undefined
}
draft.time.updated = Date.now()
})
const result = Database.use((db) =>
db
.update(ProjectTable)
.set({
name: input.name,
icon_url: input.icon?.url,
icon_color: input.icon?.color,
commands: input.commands,
time_updated: Date.now(),
})
.where(eq(ProjectTable.id, input.projectID))
.returning()
.get(),
)
if (!result) throw new Error(`Project not found: ${input.projectID}`)
const data = fromRow(result)
GlobalBus.emit("event", {
payload: {
type: Event.Updated.type,
properties: result,
properties: data,
},
})
return result
return data
},
)
export async function sandboxes(projectID: string) {
const project = await Storage.read<Info>(["project", projectID]).catch(() => undefined)
if (!project?.sandboxes) return []
export async function sandboxes(id: string) {
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get())
if (!row) return []
const data = fromRow(row)
const valid: string[] = []
for (const dir of project.sandboxes) {
const stat = await fs.stat(dir).catch(() => undefined)
for (const dir of data.sandboxes) {
const stat = await Bun.file(dir)
.stat()
.catch(() => undefined)
if (stat?.isDirectory()) valid.push(dir)
}
return valid
}
export async function addSandbox(projectID: string, directory: string) {
const result = await Storage.update<Info>(["project", projectID], (draft) => {
const sandboxes = draft.sandboxes ?? []
if (!sandboxes.includes(directory)) sandboxes.push(directory)
draft.sandboxes = sandboxes
draft.time.updated = Date.now()
})
export async function addSandbox(id: string, directory: string) {
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get())
if (!row) throw new Error(`Project not found: ${id}`)
const sandboxes = [...row.sandboxes]
if (!sandboxes.includes(directory)) sandboxes.push(directory)
const result = Database.use((db) =>
db
.update(ProjectTable)
.set({ sandboxes, time_updated: Date.now() })
.where(eq(ProjectTable.id, id))
.returning()
.get(),
)
if (!result) throw new Error(`Project not found: ${id}`)
const data = fromRow(result)
GlobalBus.emit("event", {
payload: {
type: Event.Updated.type,
properties: result,
properties: data,
},
})
return result
return data
}
export async function removeSandbox(projectID: string, directory: string) {
const result = await Storage.update<Info>(["project", projectID], (draft) => {
const sandboxes = draft.sandboxes ?? []
draft.sandboxes = sandboxes.filter((sandbox) => sandbox !== directory)
draft.time.updated = Date.now()
})
export async function removeSandbox(id: string, directory: string) {
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get())
if (!row) throw new Error(`Project not found: ${id}`)
const sandboxes = row.sandboxes.filter((s) => s !== directory)
const result = Database.use((db) =>
db
.update(ProjectTable)
.set({ sandboxes, time_updated: Date.now() })
.where(eq(ProjectTable.id, id))
.returning()
.get(),
)
if (!result) throw new Error(`Project not found: ${id}`)
const data = fromRow(result)
GlobalBus.emit("event", {
payload: {
type: Event.Updated.type,
properties: result,
properties: data,
},
})
return result
return data
}
}

View File

@@ -1,6 +1,6 @@
import { resolver } from "hono-openapi"
import z from "zod"
import { Storage } from "../storage/storage"
import { NotFoundError } from "../storage/db"
export const ERRORS = {
400: {
@@ -25,7 +25,7 @@ export const ERRORS = {
description: "Not found",
content: {
"application/json": {
schema: resolver(Storage.NotFoundError.Schema),
schema: resolver(NotFoundError.Schema),
},
},
},

View File

@@ -3,7 +3,7 @@ import { describeRoute, validator, resolver } from "hono-openapi"
import { upgradeWebSocket } from "hono/bun"
import z from "zod"
import { Pty } from "@/pty"
import { Storage } from "../../storage/storage"
import { NotFoundError } from "../../storage/db"
import { errors } from "../error"
import { lazy } from "../../util/lazy"
@@ -76,7 +76,7 @@ export const PtyRoutes = lazy(() =>
async (c) => {
const info = Pty.get(c.req.valid("param").ptyID)
if (!info) {
throw new Storage.NotFoundError({ message: "Session not found" })
throw new NotFoundError({ message: "Session not found" })
}
return c.json(info)
},

View File

@@ -7,6 +7,7 @@ import { MessageV2 } from "../../session/message-v2"
import { SessionPrompt } from "../../session/prompt"
import { SessionCompaction } from "../../session/compaction"
import { SessionRevert } from "../../session/revert"
import { SessionHandoff } from "../../session/handoff"
import { SessionStatus } from "@/session/status"
import { SessionSummary } from "@/session/summary"
import { Todo } from "../../session/todo"
@@ -276,18 +277,15 @@ export const SessionRoutes = lazy(() =>
const sessionID = c.req.valid("param").sessionID
const updates = c.req.valid("json")
const updatedSession = await Session.update(
sessionID,
(session) => {
if (updates.title !== undefined) {
session.title = updates.title
}
if (updates.time?.archived !== undefined) session.time.archived = updates.time.archived
},
{ touch: false },
)
let session = await Session.get(sessionID)
if (updates.title !== undefined) {
session = await Session.setTitle({ sessionID, title: updates.title })
}
if (updates.time?.archived !== undefined) {
session = await Session.setArchived({ sessionID, time: updates.time.archived })
}
return c.json(updatedSession)
return c.json(session)
},
)
.post(
@@ -935,5 +933,41 @@ export const SessionRoutes = lazy(() =>
})
return c.json(true)
},
)
.post(
"/:sessionID/handoff",
describeRoute({
summary: "Handoff session",
description: "Extract context and relevant files for another agent to continue the conversation.",
operationId: "session.handoff",
responses: {
200: {
description: "Handoff data extracted",
content: {
"application/json": {
schema: resolver(z.object({ text: z.string(), files: z.string().array() })),
},
},
},
...errors(400, 404),
},
}),
validator(
"param",
z.object({
sessionID: z.string().meta({ description: "Session ID" }),
}),
),
validator("json", SessionHandoff.handoff.schema.omit({ sessionID: true })),
async (c) => {
const params = c.req.valid("param")
const body = c.req.valid("json")
const result = await SessionHandoff.handoff({
sessionID: params.sessionID,
model: body.model,
goal: body.goal,
})
return c.json(result)
},
),
)

View File

@@ -31,7 +31,7 @@ import { ExperimentalRoutes } from "./routes/experimental"
import { ProviderRoutes } from "./routes/provider"
import { lazy } from "../util/lazy"
import { InstanceBootstrap } from "../project/bootstrap"
import { Storage } from "../storage/storage"
import { NotFoundError } from "../storage/db"
import type { ContentfulStatusCode } from "hono/utils/http-status"
import { websocket } from "hono/bun"
import { HTTPException } from "hono/http-exception"
@@ -65,7 +65,7 @@ export namespace Server {
})
if (err instanceof NamedError) {
let status: ContentfulStatusCode
if (err instanceof Storage.NotFoundError) status = 404
if (err instanceof NotFoundError) status = 404
else if (err instanceof Provider.ModelNotFoundError) status = 400
else if (err.name.startsWith("Worktree")) status = 400
else status = 500

View File

@@ -0,0 +1,105 @@
import { fn } from "@/util/fn"
import z from "zod"
import { MessageV2 } from "./message-v2"
import { LLM } from "./llm"
import { Agent } from "@/agent/agent"
import { Provider } from "@/provider/provider"
import { iife } from "@/util/iife"
import { Identifier } from "@/id/id"
import PROMPT_HANDOFF from "./prompt/handoff.txt"
import { type Tool } from "ai"
import { SessionStatus } from "./status"
import { defer } from "@/util/defer"
export namespace SessionHandoff {
const HandoffTool: Tool = {
description:
"A tool to extract relevant information from the thread and select relevant files for another agent to continue the conversation. Use this tool to identify the most important context and files needed.",
inputSchema: z.object({
text: z.string().describe(PROMPT_HANDOFF),
files: z
.string()
.array()
.describe(
[
"An array of file or directory paths (workspace-relative) that are relevant to accomplishing the goal.",
"",
'IMPORTANT: Return as a JSON array of strings, e.g., ["packages/core/src/session/message-v2.ts", "packages/core/src/session/prompt/handoff.txt"]',
"",
"Rules:",
"- Maximum 10 files. Only include the most critical files needed for the task.",
"- You can include directories if multiple files from that directory are needed",
"- Prioritize by importance and relevance. PUT THE MOST IMPORTANT FILES FIRST.",
'- Return workspace-relative paths (e.g., "packages/core/src/session/message-v2.ts")',
"- Do not use absolute paths or invent files",
].join("\n"),
),
}),
async execute(_args, _ctx) {
return {}
},
}
export const handoff = fn(
z.object({
sessionID: z.string(),
model: z.object({ providerID: z.string(), modelID: z.string() }),
goal: z.string().optional(),
}),
async (input) => {
SessionStatus.set(input.sessionID, { type: "busy" })
using _ = defer(() => SessionStatus.set(input.sessionID, { type: "idle" }))
const messages = await MessageV2.filterCompacted(MessageV2.stream(input.sessionID))
const agent = await Agent.get("handoff")
const model = await iife(async () => {
if (agent.model) return Provider.getModel(agent.model.providerID, agent.model.modelID)
const small = await Provider.getSmallModel(input.model.providerID)
if (small) return small
return Provider.getModel(input.model.providerID, input.model.modelID)
})
const user = {
info: {
model: {
providerID: model.providerID,
modelID: model.id,
},
agent: agent.name,
sessionID: input.sessionID,
id: Identifier.ascending("user"),
role: "user",
time: {
created: Date.now(),
},
} satisfies MessageV2.User,
parts: [
{
type: "text",
text: PROMPT_HANDOFF + "\n\nMy request:\n" + (input.goal ?? "general summarization"),
id: Identifier.ascending("part"),
sessionID: input.sessionID,
messageID: Identifier.ascending("message"),
},
] satisfies MessageV2.TextPart[],
} satisfies MessageV2.WithParts
const abort = new AbortController()
const stream = await LLM.stream({
agent,
messages: MessageV2.toModelMessages([...messages, user], model),
sessionID: input.sessionID,
abort: abort.signal,
model,
system: [],
small: true,
user: user.info,
output: "tool",
tools: {
handoff: HandoffTool,
},
})
const [result] = await stream.toolCalls
if (!result) throw new Error("Handoff tool did not return a result")
return result.input
},
)
}

View File

@@ -10,7 +10,9 @@ import { Flag } from "../flag/flag"
import { Identifier } from "../id/id"
import { Installation } from "../installation"
import { Storage } from "../storage/storage"
import { Database, NotFoundError, eq, and, or, like } from "../storage/db"
import { SessionTable, MessageTable, PartTable } from "./session.sql"
import { Storage } from "@/storage/storage"
import { Log } from "../util/log"
import { MessageV2 } from "./message-v2"
import { Instance } from "../project/instance"
@@ -41,6 +43,64 @@ export namespace Session {
).test(title)
}
type SessionRow = typeof SessionTable.$inferSelect
export function fromRow(row: SessionRow): Info {
const summary =
row.summary_additions !== null || row.summary_deletions !== null || row.summary_files !== null
? {
additions: row.summary_additions ?? 0,
deletions: row.summary_deletions ?? 0,
files: row.summary_files ?? 0,
diffs: row.summary_diffs ?? undefined,
}
: undefined
const share = row.share_url ? { url: row.share_url } : undefined
const revert = row.revert ?? undefined
return {
id: row.id,
slug: row.slug,
projectID: row.project_id,
directory: row.directory,
parentID: row.parent_id ?? undefined,
title: row.title,
version: row.version,
summary,
share,
revert,
permission: row.permission ?? undefined,
time: {
created: row.time_created,
updated: row.time_updated,
compacting: row.time_compacting ?? undefined,
archived: row.time_archived ?? undefined,
},
}
}
export function toRow(info: Info) {
return {
id: info.id,
project_id: info.projectID,
parent_id: info.parentID,
slug: info.slug,
directory: info.directory,
title: info.title,
version: info.version,
share_url: info.share?.url,
summary_additions: info.summary?.additions,
summary_deletions: info.summary?.deletions,
summary_files: info.summary?.files,
summary_diffs: info.summary?.diffs,
revert: info.revert ?? null,
permission: info.permission,
time_created: info.time.created,
time_updated: info.time.updated,
time_compacting: info.time.compacting,
time_archived: info.time.archived,
}
}
function getForkedTitle(title: string): string {
const match = title.match(/^(.+) \(fork #(\d+)\)$/)
if (match) {
@@ -94,16 +154,6 @@ export namespace Session {
})
export type Info = z.output<typeof Info>
export const ShareInfo = z
.object({
secret: z.string(),
url: z.string(),
})
.meta({
ref: "SessionShare",
})
export type ShareInfo = z.output<typeof ShareInfo>
export const Event = {
Created: BusEvent.define(
"session.created",
@@ -200,8 +250,17 @@ export namespace Session {
)
export const touch = fn(Identifier.schema("session"), async (sessionID) => {
await update(sessionID, (draft) => {
draft.time.updated = Date.now()
const now = Date.now()
Database.use((db) => {
const row = db
.update(SessionTable)
.set({ time_updated: now })
.where(eq(SessionTable.id, sessionID))
.returning()
.get()
if (!row) throw new NotFoundError({ message: `Session not found: ${sessionID}` })
const info = fromRow(row)
Database.effect(() => Bus.publish(Event.Updated, { info }))
})
})
@@ -227,21 +286,19 @@ export namespace Session {
},
}
log.info("created", result)
await Storage.write(["session", Instance.project.id, result.id], result)
Bus.publish(Event.Created, {
info: result,
Database.use((db) => {
db.insert(SessionTable).values(toRow(result)).run()
Database.effect(() =>
Bus.publish(Event.Created, {
info: result,
}),
)
})
const cfg = await Config.get()
if (!result.parentID && (Flag.OPENCODE_AUTO_SHARE || cfg.share === "auto"))
share(result.id)
.then((share) => {
update(result.id, (draft) => {
draft.share = share
})
})
.catch(() => {
// Silently ignore sharing errors during session creation
})
share(result.id).catch(() => {
// Silently ignore sharing errors during session creation
})
Bus.publish(Event.Updated, {
info: result,
})
@@ -256,12 +313,9 @@ export namespace Session {
}
export const get = fn(Identifier.schema("session"), async (id) => {
const read = await Storage.read<Info>(["session", Instance.project.id, id])
return read as Info
})
export const getShare = fn(Identifier.schema("session"), async (id) => {
return Storage.read<ShareInfo>(["share", id])
const row = Database.use((db) => db.select().from(SessionTable).where(eq(SessionTable.id, id)).get())
if (!row) throw new NotFoundError({ message: `Session not found: ${id}` })
return fromRow(row)
})
export const share = fn(Identifier.schema("session"), async (id) => {
@@ -271,15 +325,12 @@ export namespace Session {
}
const { ShareNext } = await import("@/share/share-next")
const share = await ShareNext.create(id)
await update(
id,
(draft) => {
draft.share = {
url: share.url,
}
},
{ touch: false },
)
Database.use((db) => {
const row = db.update(SessionTable).set({ share_url: share.url }).where(eq(SessionTable.id, id)).returning().get()
if (!row) throw new NotFoundError({ message: `Session not found: ${id}` })
const info = fromRow(row)
Database.effect(() => Bus.publish(Event.Updated, { info }))
})
return share
})
@@ -287,32 +338,155 @@ export namespace Session {
// Use ShareNext to remove the share (same as share function uses ShareNext to create)
const { ShareNext } = await import("@/share/share-next")
await ShareNext.remove(id)
await update(
id,
(draft) => {
draft.share = undefined
},
{ touch: false },
)
Database.use((db) => {
const row = db.update(SessionTable).set({ share_url: null }).where(eq(SessionTable.id, id)).returning().get()
if (!row) throw new NotFoundError({ message: `Session not found: ${id}` })
const info = fromRow(row)
Database.effect(() => Bus.publish(Event.Updated, { info }))
})
})
export async function update(id: string, editor: (session: Info) => void, options?: { touch?: boolean }) {
const project = Instance.project
const result = await Storage.update<Info>(["session", project.id, id], (draft) => {
editor(draft)
if (options?.touch !== false) {
draft.time.updated = Date.now()
}
export const setTitle = fn(
z.object({
sessionID: Identifier.schema("session"),
title: z.string(),
}),
async (input) => {
return Database.use((db) => {
const row = db
.update(SessionTable)
.set({ title: input.title })
.where(eq(SessionTable.id, input.sessionID))
.returning()
.get()
if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` })
const info = fromRow(row)
Database.effect(() => Bus.publish(Event.Updated, { info }))
return info
})
},
)
export const setArchived = fn(
z.object({
sessionID: Identifier.schema("session"),
time: z.number().optional(),
}),
async (input) => {
return Database.use((db) => {
const row = db
.update(SessionTable)
.set({ time_archived: input.time })
.where(eq(SessionTable.id, input.sessionID))
.returning()
.get()
if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` })
const info = fromRow(row)
Database.effect(() => Bus.publish(Event.Updated, { info }))
return info
})
},
)
export const setPermission = fn(
z.object({
sessionID: Identifier.schema("session"),
permission: PermissionNext.Ruleset,
}),
async (input) => {
return Database.use((db) => {
const row = db
.update(SessionTable)
.set({ permission: input.permission, time_updated: Date.now() })
.where(eq(SessionTable.id, input.sessionID))
.returning()
.get()
if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` })
const info = fromRow(row)
Database.effect(() => Bus.publish(Event.Updated, { info }))
return info
})
},
)
export const setRevert = fn(
z.object({
sessionID: Identifier.schema("session"),
revert: Info.shape.revert,
summary: Info.shape.summary,
}),
async (input) => {
return Database.use((db) => {
const row = db
.update(SessionTable)
.set({
revert: input.revert ?? null,
summary_additions: input.summary?.additions,
summary_deletions: input.summary?.deletions,
summary_files: input.summary?.files,
time_updated: Date.now(),
})
.where(eq(SessionTable.id, input.sessionID))
.returning()
.get()
if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` })
const info = fromRow(row)
Database.effect(() => Bus.publish(Event.Updated, { info }))
return info
})
},
)
export const clearRevert = fn(Identifier.schema("session"), async (sessionID) => {
return Database.use((db) => {
const row = db
.update(SessionTable)
.set({
revert: null,
time_updated: Date.now(),
})
.where(eq(SessionTable.id, sessionID))
.returning()
.get()
if (!row) throw new NotFoundError({ message: `Session not found: ${sessionID}` })
const info = fromRow(row)
Database.effect(() => Bus.publish(Event.Updated, { info }))
return info
})
Bus.publish(Event.Updated, {
info: result,
})
return result
}
})
export const setSummary = fn(
z.object({
sessionID: Identifier.schema("session"),
summary: Info.shape.summary,
}),
async (input) => {
return Database.use((db) => {
const row = db
.update(SessionTable)
.set({
summary_additions: input.summary?.additions,
summary_deletions: input.summary?.deletions,
summary_files: input.summary?.files,
time_updated: Date.now(),
})
.where(eq(SessionTable.id, input.sessionID))
.returning()
.get()
if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` })
const info = fromRow(row)
Database.effect(() => Bus.publish(Event.Updated, { info }))
return info
})
},
)
export const diff = fn(Identifier.schema("session"), async (sessionID) => {
const diffs = await Storage.read<Snapshot.FileDiff[]>(["session_diff", sessionID])
return diffs ?? []
try {
return await Storage.read<Snapshot.FileDiff[]>(["session_diff", sessionID])
} catch {
return []
}
})
export const messages = fn(
@@ -331,25 +505,37 @@ export namespace Session {
},
)
export async function* list() {
export function* list() {
const project = Instance.project
for (const item of await Storage.list(["session", project.id])) {
const session = await Storage.read<Info>(item).catch(() => undefined)
if (!session) continue
yield session
const rel = path.relative(Instance.worktree, Instance.directory)
const suffix = path.sep + rel
const rows = Database.use((db) =>
db
.select()
.from(SessionTable)
.where(
and(
eq(SessionTable.project_id, project.id),
or(eq(SessionTable.directory, Instance.directory), like(SessionTable.directory, `%${suffix}`)),
),
)
.all(),
)
for (const row of rows) {
yield fromRow(row)
}
}
export const children = fn(Identifier.schema("session"), async (parentID) => {
const project = Instance.project
const result = [] as Session.Info[]
for (const item of await Storage.list(["session", project.id])) {
const session = await Storage.read<Info>(item).catch(() => undefined)
if (!session) continue
if (session.parentID !== parentID) continue
result.push(session)
}
return result
const rows = Database.use((db) =>
db
.select()
.from(SessionTable)
.where(and(eq(SessionTable.project_id, project.id), eq(SessionTable.parent_id, parentID)))
.all(),
)
return rows.map(fromRow)
})
export const remove = fn(Identifier.schema("session"), async (sessionID) => {
@@ -360,15 +546,14 @@ export namespace Session {
await remove(child.id)
}
await unshare(sessionID).catch(() => {})
for (const msg of await Storage.list(["message", sessionID])) {
for (const part of await Storage.list(["part", msg.at(-1)!])) {
await Storage.remove(part)
}
await Storage.remove(msg)
}
await Storage.remove(["session", project.id, sessionID])
Bus.publish(Event.Deleted, {
info: session,
// CASCADE delete handles messages and parts automatically
Database.use((db) => {
db.delete(SessionTable).where(eq(SessionTable.id, sessionID)).run()
Database.effect(() =>
Bus.publish(Event.Deleted, {
info: session,
}),
)
})
} catch (e) {
log.error(e)
@@ -376,9 +561,23 @@ export namespace Session {
})
export const updateMessage = fn(MessageV2.Info, async (msg) => {
await Storage.write(["message", msg.sessionID, msg.id], msg)
Bus.publish(MessageV2.Event.Updated, {
info: msg,
const time_created = msg.role === "user" ? msg.time.created : msg.time.created
const { id, sessionID, ...data } = msg
Database.use((db) => {
db.insert(MessageTable)
.values({
id,
session_id: sessionID,
time_created,
data,
})
.onConflictDoUpdate({ target: MessageTable.id, set: { data } })
.run()
Database.effect(() =>
Bus.publish(MessageV2.Event.Updated, {
info: msg,
}),
)
})
return msg
})
@@ -389,10 +588,15 @@ export namespace Session {
messageID: Identifier.schema("message"),
}),
async (input) => {
await Storage.remove(["message", input.sessionID, input.messageID])
Bus.publish(MessageV2.Event.Removed, {
sessionID: input.sessionID,
messageID: input.messageID,
// CASCADE delete handles parts automatically
Database.use((db) => {
db.delete(MessageTable).where(eq(MessageTable.id, input.messageID)).run()
Database.effect(() =>
Bus.publish(MessageV2.Event.Removed, {
sessionID: input.sessionID,
messageID: input.messageID,
}),
)
})
return input.messageID
},
@@ -405,39 +609,58 @@ export namespace Session {
partID: Identifier.schema("part"),
}),
async (input) => {
await Storage.remove(["part", input.messageID, input.partID])
Bus.publish(MessageV2.Event.PartRemoved, {
sessionID: input.sessionID,
messageID: input.messageID,
partID: input.partID,
Database.use((db) => {
db.delete(PartTable).where(eq(PartTable.id, input.partID)).run()
Database.effect(() =>
Bus.publish(MessageV2.Event.PartRemoved, {
sessionID: input.sessionID,
messageID: input.messageID,
partID: input.partID,
}),
)
})
return input.partID
},
)
const UpdatePartInput = z.union([
MessageV2.Part,
z.object({
part: MessageV2.TextPart,
delta: z.string(),
}),
z.object({
part: MessageV2.ReasoningPart,
delta: z.string(),
}),
])
const UpdatePartInput = MessageV2.Part
export const updatePart = fn(UpdatePartInput, async (input) => {
const part = "delta" in input ? input.part : input
const delta = "delta" in input ? input.delta : undefined
await Storage.write(["part", part.messageID, part.id], part)
Bus.publish(MessageV2.Event.PartUpdated, {
part,
delta,
export const updatePart = fn(UpdatePartInput, async (part) => {
const { id, messageID, sessionID, ...data } = part
const time = Date.now()
Database.use((db) => {
db.insert(PartTable)
.values({
id,
message_id: messageID,
session_id: sessionID,
time_created: time,
data,
})
.onConflictDoUpdate({ target: PartTable.id, set: { data } })
.run()
Database.effect(() =>
Bus.publish(MessageV2.Event.PartUpdated, {
part,
}),
)
})
return part
})
export const updatePartDelta = fn(
z.object({
sessionID: z.string(),
messageID: z.string(),
partID: z.string(),
field: z.string(),
delta: z.string(),
}),
async (input) => {
Bus.publish(MessageV2.Event.PartDelta, input)
},
)
export const getUsage = fn(
z.object({
model: z.custom<Provider.Model>(),

View File

@@ -38,6 +38,7 @@ export namespace LLM {
small?: boolean
tools: Record<string, Tool>
retries?: number
output?: "tool"
}
export type StreamOutput = StreamTextResult<ToolSet, unknown>
@@ -207,6 +208,7 @@ export namespace LLM {
tools,
maxOutputTokens,
abortSignal: input.abort,
toolChoice: input.output === "tool" ? "required" : undefined,
headers: {
...(input.model.providerID.startsWith("opencode")
? {

View File

@@ -6,6 +6,10 @@ import { Identifier } from "../id/id"
import { LSP } from "../lsp"
import { Snapshot } from "@/snapshot"
import { fn } from "@/util/fn"
import { Database, eq, desc, inArray } from "@/storage/db"
import { MessageTable, PartTable } from "./session.sql"
import { ProviderTransform } from "@/provider/transform"
import { STATUS_CODES } from "http"
import { Storage } from "@/storage/storage"
import { ProviderError } from "@/provider/error"
import { iife } from "@/util/iife"
@@ -423,7 +427,16 @@ export namespace MessageV2 {
"message.part.updated",
z.object({
part: Part,
delta: z.string().optional(),
}),
),
PartDelta: BusEvent.define(
"message.part.delta",
z.object({
sessionID: z.string(),
messageID: z.string(),
partID: z.string(),
field: z.string(),
delta: z.string(),
}),
),
PartRemoved: BusEvent.define(
@@ -668,23 +681,65 @@ export namespace MessageV2 {
}
export const stream = fn(Identifier.schema("session"), async function* (sessionID) {
const list = await Array.fromAsync(await Storage.list(["message", sessionID]))
for (let i = list.length - 1; i >= 0; i--) {
yield await get({
sessionID,
messageID: list[i][2],
})
const size = 50
let offset = 0
while (true) {
const rows = Database.use((db) =>
db
.select()
.from(MessageTable)
.where(eq(MessageTable.session_id, sessionID))
.orderBy(desc(MessageTable.time_created))
.limit(size)
.offset(offset)
.all(),
)
if (rows.length === 0) break
const ids = rows.map((row) => row.id)
const partsByMessage = new Map<string, MessageV2.Part[]>()
if (ids.length > 0) {
const partRows = Database.use((db) =>
db
.select()
.from(PartTable)
.where(inArray(PartTable.message_id, ids))
.orderBy(PartTable.message_id, PartTable.id)
.all(),
)
for (const row of partRows) {
const part = {
...row.data,
id: row.id,
sessionID: row.session_id,
messageID: row.message_id,
} as MessageV2.Part
const list = partsByMessage.get(row.message_id)
if (list) list.push(part)
else partsByMessage.set(row.message_id, [part])
}
}
for (const row of rows) {
const info = { ...row.data, id: row.id, sessionID: row.session_id } as MessageV2.Info
yield {
info,
parts: partsByMessage.get(row.id) ?? [],
}
}
offset += rows.length
if (rows.length < size) break
}
})
export const parts = fn(Identifier.schema("message"), async (messageID) => {
const result = [] as MessageV2.Part[]
for (const item of await Storage.list(["part", messageID])) {
const read = await Storage.read<MessageV2.Part>(item)
result.push(read)
}
result.sort((a, b) => (a.id > b.id ? 1 : -1))
return result
export const parts = fn(Identifier.schema("message"), async (message_id) => {
const rows = Database.use((db) =>
db.select().from(PartTable).where(eq(PartTable.message_id, message_id)).orderBy(PartTable.id).all(),
)
return rows.map(
(row) => ({ ...row.data, id: row.id, sessionID: row.session_id, messageID: row.message_id }) as MessageV2.Part,
)
})
export const get = fn(
@@ -693,8 +748,11 @@ export namespace MessageV2 {
messageID: Identifier.schema("message"),
}),
async (input): Promise<WithParts> => {
const row = Database.use((db) => db.select().from(MessageTable).where(eq(MessageTable.id, input.messageID)).get())
if (!row) throw new Error(`Message not found: ${input.messageID}`)
const info = { ...row.data, id: row.id, sessionID: row.session_id } as MessageV2.Info
return {
info: await Storage.read<MessageV2.Info>(["message", input.sessionID, input.messageID]),
info,
parts: await parts(input.messageID),
}
},

View File

@@ -63,17 +63,19 @@ export namespace SessionProcessor {
if (value.id in reasoningMap) {
continue
}
reasoningMap[value.id] = {
const reasoningPart = {
id: Identifier.ascending("part"),
messageID: input.assistantMessage.id,
sessionID: input.assistantMessage.sessionID,
type: "reasoning",
type: "reasoning" as const,
text: "",
time: {
start: Date.now(),
},
metadata: value.providerMetadata,
}
reasoningMap[value.id] = reasoningPart
await Session.updatePart(reasoningPart)
break
case "reasoning-delta":
@@ -81,7 +83,13 @@ export namespace SessionProcessor {
const part = reasoningMap[value.id]
part.text += value.text
if (value.providerMetadata) part.metadata = value.providerMetadata
if (part.text) await Session.updatePart({ part, delta: value.text })
await Session.updatePartDelta({
sessionID: part.sessionID,
messageID: part.messageID,
partID: part.id,
field: "text",
delta: value.text,
})
}
break
@@ -288,17 +296,20 @@ export namespace SessionProcessor {
},
metadata: value.providerMetadata,
}
await Session.updatePart(currentText)
break
case "text-delta":
if (currentText) {
currentText.text += value.text
if (value.providerMetadata) currentText.metadata = value.providerMetadata
if (currentText.text)
await Session.updatePart({
part: currentText,
delta: value.text,
})
await Session.updatePartDelta({
sessionID: currentText.sessionID,
messageID: currentText.messageID,
partID: currentText.id,
field: "text",
delta: value.text,
})
}
break

View File

@@ -164,9 +164,7 @@ export namespace SessionPrompt {
}
if (permissions.length > 0) {
session.permission = permissions
await Session.update(session.id, (draft) => {
draft.permission = permissions
})
await Session.setPermission({ sessionID: session.id, permission: permissions })
}
if (input.noReply === true) {
@@ -1235,33 +1233,7 @@ export namespace SessionPrompt {
const userMessage = input.messages.findLast((msg) => msg.info.role === "user")
if (!userMessage) return input.messages
// Original logic when experimental plan mode is disabled
if (!Flag.OPENCODE_EXPERIMENTAL_PLAN_MODE) {
if (input.agent.name === "plan") {
userMessage.parts.push({
id: Identifier.ascending("part"),
messageID: userMessage.info.id,
sessionID: userMessage.info.sessionID,
type: "text",
text: PROMPT_PLAN,
synthetic: true,
})
}
const wasPlan = input.messages.some((msg) => msg.info.role === "assistant" && msg.info.agent === "plan")
if (wasPlan && input.agent.name === "build") {
userMessage.parts.push({
id: Identifier.ascending("part"),
messageID: userMessage.info.id,
sessionID: userMessage.info.sessionID,
type: "text",
text: BUILD_SWITCH,
synthetic: true,
})
}
return input.messages
}
// New plan mode logic when flag is enabled
// Plan mode logic
const assistantMessage = input.messages.findLast((msg) => msg.info.role === "assistant")
// Switching from plan mode to build mode
@@ -1853,21 +1825,16 @@ NOTE: At any point in time through this workflow you should feel free to ask the
],
})
const text = await result.text.catch((err) => log.error("failed to generate title", { error: err }))
if (text)
return Session.update(
input.session.id,
(draft) => {
const cleaned = text
.replace(/<think>[\s\S]*?<\/think>\s*/g, "")
.split("\n")
.map((line) => line.trim())
.find((line) => line.length > 0)
if (!cleaned) return
if (text) {
const cleaned = text
.replace(/<think>[\s\S]*?<\/think>\s*/g, "")
.split("\n")
.map((line) => line.trim())
.find((line) => line.length > 0)
if (!cleaned) return
const title = cleaned.length > 100 ? cleaned.substring(0, 97) + "..." : cleaned
draft.title = title
},
{ touch: false },
)
const title = cleaned.length > 100 ? cleaned.substring(0, 97) + "..." : cleaned
return Session.setTitle({ sessionID: input.session.id, title })
}
}
}

View File

@@ -0,0 +1,17 @@
Extract relevant context from the conversation above for continuing this work. Write from my perspective (first person: "I did...", "I told you...").
Consider what would be useful to know based on my request below. Questions that might be relevant:
- What did I just do or implement?
- What instructions did I already give you which are still relevant (e.g. follow patterns in the codebase)?
- What files did I already tell you that's important or that I am working on (and should continue working on)?
- Did I provide a plan or spec that should be included?
- What did I already tell you that's important (certain libraries, patterns, constraints, preferences)?
- What important technical details did I discover (APIs, methods, patterns)?
- What caveats, limitations, or open questions did I find?
Extract what matters for the specific request below. Don't answer questions that aren't relevant. Pick an appropriate length based on the complexity of the request.
Focus on capabilities and behavior, not file-by-file changes. Avoid excessive implementation details (variable names, storage keys, constants) unless critical.
Format: Plain text with bullets. No markdown headers, no bold/italic, no code fences. Use workspace-relative paths for files.

View File

@@ -4,8 +4,9 @@ import { Snapshot } from "../snapshot"
import { MessageV2 } from "./message-v2"
import { Session } from "."
import { Log } from "../util/log"
import { splitWhen } from "remeda"
import { Storage } from "../storage/storage"
import { Database, eq } from "../storage/db"
import { MessageTable, PartTable } from "./session.sql"
import { Storage } from "@/storage/storage"
import { Bus } from "../bus"
import { SessionPrompt } from "./prompt"
import { SessionSummary } from "./summary"
@@ -65,13 +66,14 @@ export namespace SessionRevert {
sessionID: input.sessionID,
diff: diffs,
})
return Session.update(input.sessionID, (draft) => {
draft.revert = revert
draft.summary = {
return Session.setRevert({
sessionID: input.sessionID,
revert,
summary: {
additions: diffs.reduce((sum, x) => sum + x.additions, 0),
deletions: diffs.reduce((sum, x) => sum + x.deletions, 0),
files: diffs.length,
}
},
})
}
return session
@@ -83,39 +85,54 @@ export namespace SessionRevert {
const session = await Session.get(input.sessionID)
if (!session.revert) return session
if (session.revert.snapshot) await Snapshot.restore(session.revert.snapshot)
const next = await Session.update(input.sessionID, (draft) => {
draft.revert = undefined
})
return next
return Session.clearRevert(input.sessionID)
}
export async function cleanup(session: Session.Info) {
if (!session.revert) return
const sessionID = session.id
let msgs = await Session.messages({ sessionID })
const msgs = await Session.messages({ sessionID })
const messageID = session.revert.messageID
const [preserve, remove] = splitWhen(msgs, (x) => x.info.id === messageID)
msgs = preserve
const preserve = [] as MessageV2.WithParts[]
const remove = [] as MessageV2.WithParts[]
let target: MessageV2.WithParts | undefined
for (const msg of msgs) {
if (msg.info.id < messageID) {
preserve.push(msg)
continue
}
if (msg.info.id > messageID) {
remove.push(msg)
continue
}
if (session.revert.partID) {
preserve.push(msg)
target = msg
continue
}
remove.push(msg)
}
for (const msg of remove) {
await Storage.remove(["message", sessionID, msg.info.id])
Database.use((db) => db.delete(MessageTable).where(eq(MessageTable.id, msg.info.id)).run())
await Bus.publish(MessageV2.Event.Removed, { sessionID: sessionID, messageID: msg.info.id })
}
const last = preserve.at(-1)
if (session.revert.partID && last) {
if (session.revert.partID && target) {
const partID = session.revert.partID
const [preserveParts, removeParts] = splitWhen(last.parts, (x) => x.id === partID)
last.parts = preserveParts
for (const part of removeParts) {
await Storage.remove(["part", last.info.id, part.id])
await Bus.publish(MessageV2.Event.PartRemoved, {
sessionID: sessionID,
messageID: last.info.id,
partID: part.id,
})
const removeStart = target.parts.findIndex((part) => part.id === partID)
if (removeStart >= 0) {
const preserveParts = target.parts.slice(0, removeStart)
const removeParts = target.parts.slice(removeStart)
target.parts = preserveParts
for (const part of removeParts) {
Database.use((db) => db.delete(PartTable).where(eq(PartTable.id, part.id)).run())
await Bus.publish(MessageV2.Event.PartRemoved, {
sessionID: sessionID,
messageID: target.info.id,
partID: part.id,
})
}
}
}
await Session.update(sessionID, (draft) => {
draft.revert = undefined
})
await Session.clearRevert(sessionID)
}
}

View File

@@ -0,0 +1,88 @@
import { sqliteTable, text, integer, index, primaryKey } from "drizzle-orm/sqlite-core"
import { ProjectTable } from "../project/project.sql"
import type { MessageV2 } from "./message-v2"
import type { Snapshot } from "@/snapshot"
import type { PermissionNext } from "@/permission/next"
import { Timestamps } from "@/storage/schema.sql"
type PartData = Omit<MessageV2.Part, "id" | "sessionID" | "messageID">
type InfoData = Omit<MessageV2.Info, "id" | "sessionID">
export const SessionTable = sqliteTable(
"session",
{
id: text().primaryKey(),
project_id: text()
.notNull()
.references(() => ProjectTable.id, { onDelete: "cascade" }),
parent_id: text(),
slug: text().notNull(),
directory: text().notNull(),
title: text().notNull(),
version: text().notNull(),
share_url: text(),
summary_additions: integer(),
summary_deletions: integer(),
summary_files: integer(),
summary_diffs: text({ mode: "json" }).$type<Snapshot.FileDiff[]>(),
revert: text({ mode: "json" }).$type<{ messageID: string; partID?: string; snapshot?: string; diff?: string }>(),
permission: text({ mode: "json" }).$type<PermissionNext.Ruleset>(),
...Timestamps,
time_compacting: integer(),
time_archived: integer(),
},
(table) => [index("session_project_idx").on(table.project_id), index("session_parent_idx").on(table.parent_id)],
)
export const MessageTable = sqliteTable(
"message",
{
id: text().primaryKey(),
session_id: text()
.notNull()
.references(() => SessionTable.id, { onDelete: "cascade" }),
...Timestamps,
data: text({ mode: "json" }).notNull().$type<InfoData>(),
},
(table) => [index("message_session_idx").on(table.session_id)],
)
export const PartTable = sqliteTable(
"part",
{
id: text().primaryKey(),
message_id: text()
.notNull()
.references(() => MessageTable.id, { onDelete: "cascade" }),
session_id: text().notNull(),
...Timestamps,
data: text({ mode: "json" }).notNull().$type<PartData>(),
},
(table) => [index("part_message_idx").on(table.message_id), index("part_session_idx").on(table.session_id)],
)
export const TodoTable = sqliteTable(
"todo",
{
session_id: text()
.notNull()
.references(() => SessionTable.id, { onDelete: "cascade" }),
content: text().notNull(),
status: text().notNull(),
priority: text().notNull(),
position: integer().notNull(),
...Timestamps,
},
(table) => [
primaryKey({ columns: [table.session_id, table.position] }),
index("todo_session_idx").on(table.session_id),
],
)
export const PermissionTable = sqliteTable("permission", {
project_id: text()
.primaryKey()
.references(() => ProjectTable.id, { onDelete: "cascade" }),
...Timestamps,
data: text({ mode: "json" }).notNull().$type<PermissionNext.Ruleset>(),
})

View File

@@ -90,12 +90,13 @@ export namespace SessionSummary {
async function summarizeSession(input: { sessionID: string; messages: MessageV2.WithParts[] }) {
const diffs = await computeDiff({ messages: input.messages })
await Session.update(input.sessionID, (draft) => {
draft.summary = {
await Session.setSummary({
sessionID: input.sessionID,
summary: {
additions: diffs.reduce((sum, x) => sum + x.additions, 0),
deletions: diffs.reduce((sum, x) => sum + x.deletions, 0),
files: diffs.length,
}
},
})
await Storage.write(["session_diff", input.sessionID], diffs)
Bus.publish(Session.Event.Diff, {

View File

@@ -1,7 +1,8 @@
import { BusEvent } from "@/bus/bus-event"
import { Bus } from "@/bus"
import z from "zod"
import { Storage } from "../storage/storage"
import { Database, eq, asc } from "../storage/db"
import { TodoTable } from "./session.sql"
export namespace Todo {
export const Info = z
@@ -9,7 +10,6 @@ export namespace Todo {
content: z.string().describe("Brief description of the task"),
status: z.string().describe("Current status of the task: pending, in_progress, completed, cancelled"),
priority: z.string().describe("Priority level of the task: high, medium, low"),
id: z.string().describe("Unique identifier for the todo item"),
})
.meta({ ref: "Todo" })
export type Info = z.infer<typeof Info>
@@ -24,14 +24,33 @@ export namespace Todo {
),
}
export async function update(input: { sessionID: string; todos: Info[] }) {
await Storage.write(["todo", input.sessionID], input.todos)
export function update(input: { sessionID: string; todos: Info[] }) {
Database.transaction((db) => {
db.delete(TodoTable).where(eq(TodoTable.session_id, input.sessionID)).run()
if (input.todos.length === 0) return
db.insert(TodoTable)
.values(
input.todos.map((todo, position) => ({
session_id: input.sessionID,
content: todo.content,
status: todo.status,
priority: todo.priority,
position,
})),
)
.run()
})
Bus.publish(Event.Updated, input)
}
export async function get(sessionID: string) {
return Storage.read<Info[]>(["todo", sessionID])
.then((x) => x || [])
.catch(() => [])
export function get(sessionID: string) {
const rows = Database.use((db) =>
db.select().from(TodoTable).where(eq(TodoTable.session_id, sessionID)).orderBy(asc(TodoTable.position)).all(),
)
return rows.map((row) => ({
content: row.content,
status: row.status,
priority: row.priority,
}))
}
}

View File

@@ -4,7 +4,8 @@ import { ulid } from "ulid"
import { Provider } from "@/provider/provider"
import { Session } from "@/session"
import { MessageV2 } from "@/session/message-v2"
import { Storage } from "@/storage/storage"
import { Database, eq } from "@/storage/db"
import { SessionShareTable } from "./share.sql"
import { Log } from "@/util/log"
import type * as SDK from "@opencode-ai/sdk/v2"
@@ -77,17 +78,26 @@ export namespace ShareNext {
})
.then((x) => x.json())
.then((x) => x as { id: string; url: string; secret: string })
await Storage.write(["session_share", sessionID], result)
Database.use((db) =>
db
.insert(SessionShareTable)
.values({ session_id: sessionID, id: result.id, secret: result.secret, url: result.url })
.onConflictDoUpdate({
target: SessionShareTable.session_id,
set: { id: result.id, secret: result.secret, url: result.url },
})
.run(),
)
fullSync(sessionID)
return result
}
function get(sessionID: string) {
return Storage.read<{
id: string
secret: string
url: string
}>(["session_share", sessionID])
const row = Database.use((db) =>
db.select().from(SessionShareTable).where(eq(SessionShareTable.session_id, sessionID)).get(),
)
if (!row) return
return { id: row.id, secret: row.secret, url: row.url }
}
type Data =
@@ -132,7 +142,7 @@ export namespace ShareNext {
const queued = queue.get(sessionID)
if (!queued) return
queue.delete(sessionID)
const share = await get(sessionID).catch(() => undefined)
const share = get(sessionID)
if (!share) return
await fetch(`${await url()}/api/share/${share.id}/sync`, {
@@ -152,7 +162,7 @@ export namespace ShareNext {
export async function remove(sessionID: string) {
if (disabled) return
log.info("removing share", { sessionID })
const share = await get(sessionID)
const share = get(sessionID)
if (!share) return
await fetch(`${await url()}/api/share/${share.id}`, {
method: "DELETE",
@@ -163,7 +173,7 @@ export namespace ShareNext {
secret: share.secret,
}),
})
await Storage.remove(["session_share", sessionID])
Database.use((db) => db.delete(SessionShareTable).where(eq(SessionShareTable.session_id, sessionID)).run())
}
async function fullSync(sessionID: string) {

View File

@@ -0,0 +1,13 @@
import { sqliteTable, text } from "drizzle-orm/sqlite-core"
import { SessionTable } from "../session/session.sql"
import { Timestamps } from "@/storage/schema.sql"
export const SessionShareTable = sqliteTable("session_share", {
session_id: text()
.primaryKey()
.references(() => SessionTable.id, { onDelete: "cascade" }),
id: text().notNull(),
secret: text().notNull(),
url: text().notNull(),
...Timestamps,
})

View File

@@ -1,92 +0,0 @@
import { Bus } from "../bus"
import { Installation } from "../installation"
import { Session } from "../session"
import { MessageV2 } from "../session/message-v2"
import { Log } from "../util/log"
export namespace Share {
const log = Log.create({ service: "share" })
let queue: Promise<void> = Promise.resolve()
const pending = new Map<string, any>()
export async function sync(key: string, content: any) {
if (disabled) return
const [root, ...splits] = key.split("/")
if (root !== "session") return
const [sub, sessionID] = splits
if (sub === "share") return
const share = await Session.getShare(sessionID).catch(() => {})
if (!share) return
const { secret } = share
pending.set(key, content)
queue = queue
.then(async () => {
const content = pending.get(key)
if (content === undefined) return
pending.delete(key)
return fetch(`${URL}/share_sync`, {
method: "POST",
body: JSON.stringify({
sessionID: sessionID,
secret,
key: key,
content,
}),
})
})
.then((x) => {
if (x) {
log.info("synced", {
key: key,
status: x.status,
})
}
})
}
export function init() {
Bus.subscribe(Session.Event.Updated, async (evt) => {
await sync("session/info/" + evt.properties.info.id, evt.properties.info)
})
Bus.subscribe(MessageV2.Event.Updated, async (evt) => {
await sync("session/message/" + evt.properties.info.sessionID + "/" + evt.properties.info.id, evt.properties.info)
})
Bus.subscribe(MessageV2.Event.PartUpdated, async (evt) => {
await sync(
"session/part/" +
evt.properties.part.sessionID +
"/" +
evt.properties.part.messageID +
"/" +
evt.properties.part.id,
evt.properties.part,
)
})
}
export const URL =
process.env["OPENCODE_API"] ??
(Installation.isPreview() || Installation.isLocal() ? "https://api.dev.opencode.ai" : "https://api.opencode.ai")
const disabled = process.env["OPENCODE_DISABLE_SHARE"] === "true" || process.env["OPENCODE_DISABLE_SHARE"] === "1"
export async function create(sessionID: string) {
if (disabled) return { url: "", secret: "" }
return fetch(`${URL}/share_create`, {
method: "POST",
body: JSON.stringify({ sessionID: sessionID }),
})
.then((x) => x.json())
.then((x) => x as { url: string; secret: string })
}
export async function remove(sessionID: string, secret: string) {
if (disabled) return {}
return fetch(`${URL}/share_delete`, {
method: "POST",
body: JSON.stringify({ sessionID, secret }),
}).then((x) => x.json())
}
}

4
packages/opencode/src/sql.d.ts vendored Normal file
View File

@@ -0,0 +1,4 @@
declare module "*.sql" {
const content: string
export default content
}

View File

@@ -0,0 +1,140 @@
import { Database as BunDatabase } from "bun:sqlite"
import { drizzle, type SQLiteBunDatabase } from "drizzle-orm/bun-sqlite"
import { migrate } from "drizzle-orm/bun-sqlite/migrator"
import { type SQLiteTransaction } from "drizzle-orm/sqlite-core"
export * from "drizzle-orm"
import { Context } from "../util/context"
import { lazy } from "../util/lazy"
import { Global } from "../global"
import { Log } from "../util/log"
import { NamedError } from "@opencode-ai/util/error"
import z from "zod"
import path from "path"
import { readFileSync, readdirSync } from "fs"
import fs from "fs/promises"
import { Instance } from "@/project/instance"
declare const OPENCODE_MIGRATIONS: { sql: string; timestamp: number }[] | undefined
export const NotFoundError = NamedError.create(
"NotFoundError",
z.object({
message: z.string(),
}),
)
const log = Log.create({ service: "db" })
export namespace Database {
export type Transaction = SQLiteTransaction<"sync", void, Record<string, never>, Record<string, never>>
type Client = SQLiteBunDatabase
type Journal = { sql: string; timestamp: number }[]
function time(tag: string) {
const match = /^(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})/.exec(tag)
if (!match) return 0
return Date.UTC(
Number(match[1]),
Number(match[2]) - 1,
Number(match[3]),
Number(match[4]),
Number(match[5]),
Number(match[6]),
)
}
function migrations(dir: string): Journal {
const dirs = readdirSync(dir, { withFileTypes: true })
.filter((entry) => entry.isDirectory())
.map((entry) => entry.name)
const sql = dirs
.map((name) => {
const file = path.join(dir, name, "migration.sql")
if (!Bun.file(file).size) return
return {
sql: readFileSync(file, "utf-8"),
timestamp: time(name),
}
})
.filter(Boolean) as Journal
return sql.sort((a, b) => a.timestamp - b.timestamp)
}
export const Client = lazy(() => {
log.info("opening database", { path: path.join(Global.Path.data, "opencode.db") })
const sqlite = new BunDatabase(path.join(Global.Path.data, "opencode.db"), { create: true })
sqlite.run("PRAGMA journal_mode = WAL")
sqlite.run("PRAGMA synchronous = NORMAL")
sqlite.run("PRAGMA busy_timeout = 5000")
sqlite.run("PRAGMA cache_size = -64000")
sqlite.run("PRAGMA foreign_keys = ON")
const db = drizzle({ client: sqlite })
// Apply schema migrations
const entries =
typeof OPENCODE_MIGRATIONS !== "undefined"
? OPENCODE_MIGRATIONS
: migrations(path.join(import.meta.dirname, "../../migration"))
if (entries.length > 0) {
log.info("applying migrations", {
count: entries.length,
mode: typeof OPENCODE_MIGRATIONS !== "undefined" ? "bundled" : "dev",
})
migrate(db, entries)
}
return db
})
export type TxOrDb = Transaction | Client
const ctx = Context.create<{
tx: TxOrDb
effects: (() => void | Promise<void>)[]
}>("database")
export function use<T>(callback: (trx: TxOrDb) => T): T {
try {
return callback(ctx.use().tx)
} catch (err) {
if (err instanceof Context.NotFound) {
const effects: (() => void | Promise<void>)[] = []
const result = ctx.provide({ effects, tx: Client() }, () => callback(Client()))
for (const effect of effects) effect()
return result
}
throw err
}
}
export function effect(fn: () => any | Promise<any>) {
try {
ctx.use().effects.push(fn)
} catch {
fn()
}
}
export function transaction<T>(callback: (tx: TxOrDb) => T): T {
try {
return callback(ctx.use().tx)
} catch (err) {
if (err instanceof Context.NotFound) {
const effects: (() => void | Promise<void>)[] = []
const result = Client().transaction((tx) => {
return ctx.provide({ tx, effects }, () => callback(tx))
})
for (const effect of effects) effect()
return result
}
throw err
}
}
}

View File

@@ -0,0 +1,437 @@
import { Database } from "bun:sqlite"
import { drizzle } from "drizzle-orm/bun-sqlite"
import { Global } from "../global"
import { Log } from "../util/log"
import { ProjectTable } from "../project/project.sql"
import { SessionTable, MessageTable, PartTable, TodoTable, PermissionTable } from "../session/session.sql"
import { SessionShareTable } from "../share/share.sql"
import path from "path"
import { existsSync } from "fs"
export namespace JsonMigration {
const log = Log.create({ service: "json-migration" })
export type Progress = {
current: number
total: number
label: string
}
type Options = {
progress?: (event: Progress) => void
}
export async function run(sqlite: Database, options?: Options) {
const storageDir = path.join(Global.Path.data, "storage")
if (!existsSync(storageDir)) {
log.info("storage directory does not exist, skipping migration")
return {
projects: 0,
sessions: 0,
messages: 0,
parts: 0,
todos: 0,
permissions: 0,
shares: 0,
errors: [] as string[],
}
}
log.info("starting json to sqlite migration", { storageDir })
const start = performance.now()
const db = drizzle({ client: sqlite })
// Optimize SQLite for bulk inserts
sqlite.exec("PRAGMA journal_mode = WAL")
sqlite.exec("PRAGMA synchronous = OFF")
sqlite.exec("PRAGMA cache_size = 10000")
sqlite.exec("PRAGMA temp_store = MEMORY")
const stats = {
projects: 0,
sessions: 0,
messages: 0,
parts: 0,
todos: 0,
permissions: 0,
shares: 0,
errors: [] as string[],
}
const orphans = {
sessions: 0,
todos: 0,
permissions: 0,
shares: 0,
}
const errs = stats.errors
const batchSize = 1000
const now = Date.now()
async function list(pattern: string) {
const items: string[] = []
const scan = new Bun.Glob(pattern)
for await (const file of scan.scan({ cwd: storageDir, absolute: true })) {
items.push(file)
}
return items
}
async function read(files: string[], start: number, end: number) {
const count = end - start
const tasks = new Array(count)
for (let i = 0; i < count; i++) {
tasks[i] = Bun.file(files[start + i]).json()
}
const results = await Promise.allSettled(tasks)
const items = new Array(count)
for (let i = 0; i < results.length; i++) {
const result = results[i]
if (result.status === "fulfilled") {
items[i] = result.value
continue
}
errs.push(`failed to read ${files[start + i]}: ${result.reason}`)
}
return items
}
function insert(values: any[], table: any, label: string) {
if (values.length === 0) return 0
try {
db.insert(table).values(values).onConflictDoNothing().run()
return values.length
} catch (e) {
errs.push(`failed to migrate ${label} batch: ${e}`)
return 0
}
}
// Pre-scan all files upfront to avoid repeated glob operations
log.info("scanning files...")
const [projectFiles, sessionFiles, messageFiles, partFiles, todoFiles, permFiles, shareFiles] = await Promise.all([
list("project/*.json"),
list("session/*/*.json"),
list("message/*/*.json"),
list("part/*/*.json"),
list("todo/*.json"),
list("permission/*.json"),
list("session_share/*.json"),
])
log.info("file scan complete", {
projects: projectFiles.length,
sessions: sessionFiles.length,
messages: messageFiles.length,
parts: partFiles.length,
todos: todoFiles.length,
permissions: permFiles.length,
shares: shareFiles.length,
})
const total = Math.max(
1,
projectFiles.length +
sessionFiles.length +
messageFiles.length +
partFiles.length +
todoFiles.length +
permFiles.length +
shareFiles.length,
)
const progress = options?.progress
let current = 0
const step = (label: string, count: number) => {
current = Math.min(total, current + count)
progress?.({ current, total, label })
}
progress?.({ current, total, label: "starting" })
sqlite.exec("BEGIN TRANSACTION")
// Migrate projects first (no FK deps)
const projectIds = new Set<string>()
const projectValues = [] as any[]
for (let i = 0; i < projectFiles.length; i += batchSize) {
const end = Math.min(i + batchSize, projectFiles.length)
const batch = await read(projectFiles, i, end)
projectValues.length = 0
for (let j = 0; j < batch.length; j++) {
const data = batch[j]
if (!data) continue
if (!data?.id) {
errs.push(`project missing id: ${projectFiles[i + j]}`)
continue
}
projectIds.add(data.id)
projectValues.push({
id: data.id,
worktree: data.worktree ?? "/",
vcs: data.vcs,
name: data.name ?? undefined,
icon_url: data.icon?.url,
icon_color: data.icon?.color,
time_created: data.time?.created ?? now,
time_updated: data.time?.updated ?? now,
time_initialized: data.time?.initialized,
sandboxes: data.sandboxes ?? [],
commands: data.commands,
})
}
stats.projects += insert(projectValues, ProjectTable, "project")
step("projects", end - i)
}
log.info("migrated projects", { count: stats.projects, duration: Math.round(performance.now() - start) })
// Migrate sessions (depends on projects)
const sessionIds = new Set<string>()
const sessionValues = [] as any[]
for (let i = 0; i < sessionFiles.length; i += batchSize) {
const end = Math.min(i + batchSize, sessionFiles.length)
const batch = await read(sessionFiles, i, end)
sessionValues.length = 0
for (let j = 0; j < batch.length; j++) {
const data = batch[j]
if (!data) continue
if (!data?.id || !data?.projectID) {
errs.push(`session missing id or projectID: ${sessionFiles[i + j]}`)
continue
}
if (!projectIds.has(data.projectID)) {
orphans.sessions++
continue
}
sessionIds.add(data.id)
sessionValues.push({
id: data.id,
project_id: data.projectID,
parent_id: data.parentID ?? null,
slug: data.slug ?? "",
directory: data.directory ?? "",
title: data.title ?? "",
version: data.version ?? "",
share_url: data.share?.url ?? null,
summary_additions: data.summary?.additions ?? null,
summary_deletions: data.summary?.deletions ?? null,
summary_files: data.summary?.files ?? null,
summary_diffs: data.summary?.diffs ?? null,
revert: data.revert ?? null,
permission: data.permission ?? null,
time_created: data.time?.created ?? now,
time_updated: data.time?.updated ?? now,
time_compacting: data.time?.compacting ?? null,
time_archived: data.time?.archived ?? null,
})
}
stats.sessions += insert(sessionValues, SessionTable, "session")
step("sessions", end - i)
}
log.info("migrated sessions", { count: stats.sessions })
if (orphans.sessions > 0) {
log.warn("skipped orphaned sessions", { count: orphans.sessions })
}
// Migrate messages using pre-scanned file map
const allMessageFiles = [] as string[]
const allMessageSessions = [] as string[]
const messageSessions = new Map<string, string>()
for (const file of messageFiles) {
const sessionID = path.basename(path.dirname(file))
if (!sessionIds.has(sessionID)) continue
allMessageFiles.push(file)
allMessageSessions.push(sessionID)
}
for (let i = 0; i < allMessageFiles.length; i += batchSize) {
const end = Math.min(i + batchSize, allMessageFiles.length)
const batch = await read(allMessageFiles, i, end)
const values = new Array(batch.length)
let count = 0
for (let j = 0; j < batch.length; j++) {
const data = batch[j]
if (!data) continue
const file = allMessageFiles[i + j]
const id = data.id ?? path.basename(file, ".json")
if (!id) {
errs.push(`message missing id: ${file}`)
continue
}
const sessionID = allMessageSessions[i + j]
messageSessions.set(id, sessionID)
const rest = data
delete rest.id
delete rest.sessionID
values[count++] = {
id,
session_id: sessionID,
time_created: data.time?.created ?? now,
time_updated: data.time?.updated ?? now,
data: rest,
}
}
values.length = count
stats.messages += insert(values, MessageTable, "message")
step("messages", end - i)
}
log.info("migrated messages", { count: stats.messages })
// Migrate parts using pre-scanned file map
for (let i = 0; i < partFiles.length; i += batchSize) {
const end = Math.min(i + batchSize, partFiles.length)
const batch = await read(partFiles, i, end)
const values = new Array(batch.length)
let count = 0
for (let j = 0; j < batch.length; j++) {
const data = batch[j]
if (!data) continue
const file = partFiles[i + j]
const id = data.id ?? path.basename(file, ".json")
const messageID = data.messageID ?? path.basename(path.dirname(file))
if (!id || !messageID) {
errs.push(`part missing id/messageID/sessionID: ${file}`)
continue
}
const sessionID = messageSessions.get(messageID)
if (!sessionID) {
errs.push(`part missing message session: ${file}`)
continue
}
if (!sessionIds.has(sessionID)) continue
const rest = data
delete rest.id
delete rest.messageID
delete rest.sessionID
values[count++] = {
id,
message_id: messageID,
session_id: sessionID,
time_created: data.time?.created ?? now,
time_updated: data.time?.updated ?? now,
data: rest,
}
}
values.length = count
stats.parts += insert(values, PartTable, "part")
step("parts", end - i)
}
log.info("migrated parts", { count: stats.parts })
// Migrate todos
const todoSessions = todoFiles.map((file) => path.basename(file, ".json"))
for (let i = 0; i < todoFiles.length; i += batchSize) {
const end = Math.min(i + batchSize, todoFiles.length)
const batch = await read(todoFiles, i, end)
const values = [] as any[]
for (let j = 0; j < batch.length; j++) {
const data = batch[j]
if (!data) continue
const sessionID = todoSessions[i + j]
if (!sessionIds.has(sessionID)) {
orphans.todos++
continue
}
if (!Array.isArray(data)) {
errs.push(`todo not an array: ${todoFiles[i + j]}`)
continue
}
for (let position = 0; position < data.length; position++) {
const todo = data[position]
if (!todo?.content || !todo?.status || !todo?.priority) continue
values.push({
session_id: sessionID,
content: todo.content,
status: todo.status,
priority: todo.priority,
position,
time_created: now,
time_updated: now,
})
}
}
stats.todos += insert(values, TodoTable, "todo")
step("todos", end - i)
}
log.info("migrated todos", { count: stats.todos })
if (orphans.todos > 0) {
log.warn("skipped orphaned todos", { count: orphans.todos })
}
// Migrate permissions
const permProjects = permFiles.map((file) => path.basename(file, ".json"))
const permValues = [] as any[]
for (let i = 0; i < permFiles.length; i += batchSize) {
const end = Math.min(i + batchSize, permFiles.length)
const batch = await read(permFiles, i, end)
permValues.length = 0
for (let j = 0; j < batch.length; j++) {
const data = batch[j]
if (!data) continue
const projectID = permProjects[i + j]
if (!projectIds.has(projectID)) {
orphans.permissions++
continue
}
permValues.push({ project_id: projectID, data })
}
stats.permissions += insert(permValues, PermissionTable, "permission")
step("permissions", end - i)
}
log.info("migrated permissions", { count: stats.permissions })
if (orphans.permissions > 0) {
log.warn("skipped orphaned permissions", { count: orphans.permissions })
}
// Migrate session shares
const shareSessions = shareFiles.map((file) => path.basename(file, ".json"))
const shareValues = [] as any[]
for (let i = 0; i < shareFiles.length; i += batchSize) {
const end = Math.min(i + batchSize, shareFiles.length)
const batch = await read(shareFiles, i, end)
shareValues.length = 0
for (let j = 0; j < batch.length; j++) {
const data = batch[j]
if (!data) continue
const sessionID = shareSessions[i + j]
if (!sessionIds.has(sessionID)) {
orphans.shares++
continue
}
if (!data?.id || !data?.secret || !data?.url) {
errs.push(`session_share missing id/secret/url: ${shareFiles[i + j]}`)
continue
}
shareValues.push({ session_id: sessionID, id: data.id, secret: data.secret, url: data.url })
}
stats.shares += insert(shareValues, SessionShareTable, "session_share")
step("shares", end - i)
}
log.info("migrated session shares", { count: stats.shares })
if (orphans.shares > 0) {
log.warn("skipped orphaned session shares", { count: orphans.shares })
}
sqlite.exec("COMMIT")
log.info("json migration complete", {
projects: stats.projects,
sessions: stats.sessions,
messages: stats.messages,
parts: stats.parts,
todos: stats.todos,
permissions: stats.permissions,
shares: stats.shares,
errorCount: stats.errors.length,
duration: Math.round(performance.now() - start),
})
if (stats.errors.length > 0) {
log.warn("migration errors", { errors: stats.errors.slice(0, 20) })
}
progress?.({ current: total, total, label: "complete" })
return stats
}
}

View File

@@ -0,0 +1,10 @@
import { integer } from "drizzle-orm/sqlite-core"
export const Timestamps = {
time_created: integer()
.notNull()
.$default(() => Date.now()),
time_updated: integer()
.notNull()
.$onUpdate(() => Date.now()),
}

View File

@@ -2,9 +2,9 @@ Performs exact string replacements in files.
Usage:
- You must use your `Read` tool at least once in the conversation before editing. This tool will error if you attempt an edit without reading the file.
- When editing text from Read tool output, ensure you preserve the exact indentation (tabs/spaces) as it appears AFTER the line number prefix. The line number prefix format is: spaces + line number + tab. Everything after that tab is the actual file content to match. Never include any part of the line number prefix in the oldString or newString.
- When editing text from Read tool output, ensure you preserve the exact indentation (tabs/spaces) as it appears AFTER the line number prefix. The line number prefix format is: line number + colon + space (e.g., `1: `). Everything after that space is the actual file content to match. Never include any part of the line number prefix in the oldString or newString.
- ALWAYS prefer editing existing files in the codebase. NEVER write new files unless explicitly required.
- Only use emojis if the user explicitly requests it. Avoid adding emojis to files unless asked.
- The edit will FAIL if `oldString` is not found in the file with an error "oldString not found in content".
- The edit will FAIL if `oldString` is found multiple times in the file with an error "oldString found multiple times and requires more code context to uniquely identify the intended match". Either provide a larger string with more surrounding context to make it unique or use `replaceAll` to change every instance of `oldString`.
- The edit will FAIL if `oldString` is found multiple times in the file with an error "Found multiple matches for oldString. Provide more surrounding lines in oldString to identify the correct match." Either provide a larger string with more surrounding context to make it unique or use `replaceAll` to change every instance of `oldString`.
- Use `replaceAll` for replacing and renaming strings across the file. This parameter is useful if you want to rename a variable for instance.

View File

@@ -17,9 +17,9 @@ const MAX_BYTES = 50 * 1024
export const ReadTool = Tool.define("read", {
description: DESCRIPTION,
parameters: z.object({
filePath: z.string().describe("The path to the file to read"),
offset: z.coerce.number().describe("The line number to start reading from (0-based)").optional(),
limit: z.coerce.number().describe("The number of lines to read (defaults to 2000)").optional(),
filePath: z.string().describe("The absolute path to the file or directory to read"),
offset: z.coerce.number().describe("The 0-based line offset to start reading from").optional(),
limit: z.coerce.number().describe("The maximum number of lines to read (defaults to 2000)").optional(),
}),
async execute(params, ctx) {
let filepath = params.filePath
@@ -28,8 +28,12 @@ export const ReadTool = Tool.define("read", {
}
const title = path.relative(Instance.worktree, filepath)
const file = Bun.file(filepath)
const stat = await file.stat().catch(() => undefined)
await assertExternalDirectory(ctx, filepath, {
bypass: Boolean(ctx.extra?.["bypassCwdCheck"]),
kind: stat?.isDirectory() ? "directory" : "file",
})
await ctx.ask({
@@ -39,8 +43,7 @@ export const ReadTool = Tool.define("read", {
metadata: {},
})
const file = Bun.file(filepath)
if (!(await file.exists())) {
if (!stat) {
const dir = path.dirname(filepath)
const base = path.basename(filepath)
@@ -60,6 +63,47 @@ export const ReadTool = Tool.define("read", {
throw new Error(`File not found: ${filepath}`)
}
if (stat.isDirectory()) {
const dirents = await fs.promises.readdir(filepath, { withFileTypes: true })
const entries = await Promise.all(
dirents.map(async (dirent) => {
if (dirent.isDirectory()) return dirent.name + "/"
if (dirent.isSymbolicLink()) {
const target = await fs.promises.stat(path.join(filepath, dirent.name)).catch(() => undefined)
if (target?.isDirectory()) return dirent.name + "/"
}
return dirent.name
}),
)
entries.sort((a, b) => a.localeCompare(b))
const limit = params.limit ?? DEFAULT_READ_LIMIT
const offset = params.offset || 0
const sliced = entries.slice(offset, offset + limit)
const truncated = offset + sliced.length < entries.length
const output = [
`<path>${filepath}</path>`,
`<type>directory</type>`,
`<entries>`,
sliced.join("\n"),
truncated
? `\n(Showing ${sliced.length} of ${entries.length} entries. Use 'offset' parameter to read beyond entry ${offset + sliced.length})`
: `\n(${entries.length} entries)`,
`</entries>`,
].join("\n")
return {
title,
output,
metadata: {
preview: sliced.slice(0, 20).join("\n"),
truncated,
loaded: [] as string[],
},
}
}
const instructions = await InstructionPrompt.resolve(ctx.messages, filepath, ctx.messageID)
// Exclude SVG (XML-based) and vnd.fastbidsheet (.fbs extension, commonly FlatBuffers schema files)
@@ -75,7 +119,7 @@ export const ReadTool = Tool.define("read", {
metadata: {
preview: msg,
truncated: false,
...(instructions.length > 0 && { loaded: instructions.map((i) => i.filepath) }),
loaded: instructions.map((i) => i.filepath),
},
attachments: [
{
@@ -112,11 +156,11 @@ export const ReadTool = Tool.define("read", {
}
const content = raw.map((line, index) => {
return `${(index + offset + 1).toString().padStart(5, "0")}| ${line}`
return `${index + offset + 1}: ${line}`
})
const preview = raw.slice(0, 20).join("\n")
let output = "<file>\n"
let output = [`<path>${filepath}</path>`, `<type>file</type>`, "<content>"].join("\n")
output += content.join("\n")
const totalLines = lines.length
@@ -131,7 +175,7 @@ export const ReadTool = Tool.define("read", {
} else {
output += `\n\n(End of file - total ${totalLines} lines)`
}
output += "\n</file>"
output += "\n</content>"
// just warms the lsp client
LSP.touchFile(filepath, false)
@@ -147,7 +191,7 @@ export const ReadTool = Tool.define("read", {
metadata: {
preview,
truncated,
...(instructions.length > 0 && { loaded: instructions.map((i) => i.filepath) }),
loaded: instructions.map((i) => i.filepath),
},
}
},

View File

@@ -1,12 +1,13 @@
Reads a file from the local filesystem. You can access any file directly by using this tool.
Assume this tool is able to read all files on the machine. If the User provides a path to a file assume that path is valid. It is okay to read a file that does not exist; an error will be returned.
Read a file or directory from the local filesystem. If the path does not exist, an error is returned.
Usage:
- The filePath parameter must be an absolute path, not a relative path
- By default, it reads up to 2000 lines starting from the beginning of the file
- You can optionally specify a line offset and limit (especially handy for long files), but it's recommended to read the whole file by not providing these parameters
- Any lines longer than 2000 characters will be truncated
- Results are returned using cat -n format, with line numbers starting at 1
- You have the capability to call multiple tools in a single response. It is always better to speculatively read multiple files as a batch that are potentially useful.
- If you read a file that exists but has empty contents you will receive a system reminder warning in place of file contents.
- You can read image files using this tool.
- The filePath parameter should be an absolute path.
- By default, this tool returns up to 2000 lines from the start of the file.
- To read later sections, call this tool again with a larger offset.
- Use the grep tool to find specific content in large files or files with long lines.
- If you are unsure of the correct file path, use the glob tool to look up filenames by glob pattern.
- Contents are returned with each line prefixed by its line number as `<line>: <content>`. For example, if a file has contents "foo\n", you will receive "1: foo\n". For directories, entries are returned one per line (without line numbers) with a trailing `/` for subdirectories.
- Any line longer than 2000 characters is truncated.
- Call this tool in parallel when you know there are multiple files you want to read.
- Avoid tiny repeated slices (30 line chunks). If you need more context, read a larger window.
- This tool can read image files and PDFs and return them as file attachments.

View File

@@ -114,7 +114,7 @@ export namespace ToolRegistry {
ApplyPatchTool,
...(Flag.OPENCODE_EXPERIMENTAL_LSP_TOOL ? [LspTool] : []),
...(config.experimental?.batch_tool === true ? [BatchTool] : []),
...(Flag.OPENCODE_EXPERIMENTAL_PLAN_MODE && Flag.OPENCODE_CLIENT === "cli" ? [PlanExitTool, PlanEnterTool] : []),
...(Flag.OPENCODE_CLIENT === "cli" ? [PlanExitTool, PlanEnterTool] : []),
...custom,
]
}

View File

@@ -4,9 +4,14 @@ export function lazy<T>(fn: () => T) {
const result = (): T => {
if (loaded) return value as T
loaded = true
value = fn()
return value as T
try {
value = fn()
loaded = true
return value as T
} catch (e) {
// Don't mark as loaded if initialization failed
throw e
}
}
result.reset = () => {

View File

@@ -7,7 +7,8 @@ import { Global } from "../global"
import { Instance } from "../project/instance"
import { InstanceBootstrap } from "../project/bootstrap"
import { Project } from "../project/project"
import { Storage } from "../storage/storage"
import { Database, eq } from "../storage/db"
import { ProjectTable } from "../project/project.sql"
import { fn } from "../util/fn"
import { Log } from "../util/log"
import { BusEvent } from "@/bus/bus-event"
@@ -307,7 +308,8 @@ export namespace Worktree {
}
async function runStartScripts(directory: string, input: { projectID: string; extra?: string }) {
const project = await Storage.read<Project.Info>(["project", input.projectID]).catch(() => undefined)
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, input.projectID)).get())
const project = row ? Project.fromRow(row) : undefined
const startup = project?.commands?.start?.trim() ?? ""
const ok = await runStartScript(directory, startup, "project")
if (!ok) return false

View File

@@ -122,12 +122,20 @@ function createFakeAgent() {
messages: async () => {
return { data: [] }
},
message: async () => {
message: async (params?: any) => {
// Return a message with parts that can be looked up by partID
return {
data: {
info: {
role: "assistant",
},
parts: [
{
id: params?.messageID ? `${params.messageID}_part` : "part_1",
type: "text",
text: "",
},
],
},
}
},
@@ -193,7 +201,7 @@ function createFakeAgent() {
}
describe("acp.agent event subscription", () => {
test("routes message.part.updated by the event sessionID (no cross-session pollution)", async () => {
test("routes message.part.delta by the event sessionID (no cross-session pollution)", async () => {
await using tmp = await tmpdir()
await Instance.provide({
directory: tmp.path,
@@ -207,14 +215,12 @@ describe("acp.agent event subscription", () => {
controller.push({
directory: cwd,
payload: {
type: "message.part.updated",
type: "message.part.delta",
properties: {
part: {
sessionID: sessionB,
messageID: "msg_1",
type: "text",
synthetic: false,
},
sessionID: sessionB,
messageID: "msg_1",
partID: "msg_1_part",
field: "text",
delta: "hello",
},
},
@@ -230,7 +236,7 @@ describe("acp.agent event subscription", () => {
})
})
test("keeps concurrent sessions isolated when message.part.updated events are interleaved", async () => {
test("keeps concurrent sessions isolated when message.part.delta events are interleaved", async () => {
await using tmp = await tmpdir()
await Instance.provide({
directory: tmp.path,
@@ -248,14 +254,12 @@ describe("acp.agent event subscription", () => {
controller.push({
directory: cwd,
payload: {
type: "message.part.updated",
type: "message.part.delta",
properties: {
part: {
sessionID: sessionId,
messageID,
type: "text",
synthetic: false,
},
sessionID: sessionId,
messageID,
partID: `${messageID}_part`,
field: "text",
delta,
},
},
@@ -402,14 +406,12 @@ describe("acp.agent event subscription", () => {
controller.push({
directory: cwd,
payload: {
type: "message.part.updated",
type: "message.part.delta",
properties: {
part: {
sessionID: sessionB,
messageID: "msg_b",
type: "text",
synthetic: false,
},
sessionID: sessionB,
messageID: "msg_b",
partID: "msg_b_part",
field: "text",
delta: "session_b_message",
},
},

View File

@@ -2,7 +2,6 @@ import { test, expect } from "bun:test"
import os from "os"
import { PermissionNext } from "../../src/permission/next"
import { Instance } from "../../src/project/instance"
import { Storage } from "../../src/storage/storage"
import { tmpdir } from "../fixture/fixture"
// fromConfig tests

View File

@@ -1,63 +1,70 @@
// IMPORTANT: Set env vars BEFORE any imports from src/ directory
// xdg-basedir reads env vars at import time, so we must set these first
import os from "os"
import path from "path"
import fs from "fs/promises"
import fsSync from "fs"
import { afterAll } from "bun:test"
import os from "os";
import path from "path";
import fs from "fs/promises";
import fsSync from "fs";
import { afterAll } from "bun:test";
const dir = path.join(os.tmpdir(), "opencode-test-data-" + process.pid)
await fs.mkdir(dir, { recursive: true })
// Set XDG env vars FIRST, before any src/ imports
const dir = path.join(os.tmpdir(), "opencode-test-data-" + process.pid);
await fs.mkdir(dir, { recursive: true });
afterAll(() => {
fsSync.rmSync(dir, { recursive: true, force: true })
})
fsSync.rmSync(dir, { recursive: true, force: true });
});
process.env["XDG_DATA_HOME"] = path.join(dir, "share");
process.env["XDG_CACHE_HOME"] = path.join(dir, "cache");
process.env["XDG_CONFIG_HOME"] = path.join(dir, "config");
process.env["XDG_STATE_HOME"] = path.join(dir, "state");
process.env["OPENCODE_MODELS_PATH"] = path.join(
import.meta.dir,
"tool",
"fixtures",
"models-api.json",
);
// Set test home directory to isolate tests from user's actual home directory
// This prevents tests from picking up real user configs/skills from ~/.claude/skills
const testHome = path.join(dir, "home")
await fs.mkdir(testHome, { recursive: true })
process.env["OPENCODE_TEST_HOME"] = testHome
const testHome = path.join(dir, "home");
await fs.mkdir(testHome, { recursive: true });
process.env["OPENCODE_TEST_HOME"] = testHome;
// Set test managed config directory to isolate tests from system managed settings
const testManagedConfigDir = path.join(dir, "managed")
process.env["OPENCODE_TEST_MANAGED_CONFIG_DIR"] = testManagedConfigDir
process.env["XDG_DATA_HOME"] = path.join(dir, "share")
process.env["XDG_CACHE_HOME"] = path.join(dir, "cache")
process.env["XDG_CONFIG_HOME"] = path.join(dir, "config")
process.env["XDG_STATE_HOME"] = path.join(dir, "state")
process.env["OPENCODE_MODELS_PATH"] = path.join(import.meta.dir, "tool", "fixtures", "models-api.json")
const testManagedConfigDir = path.join(dir, "managed");
process.env["OPENCODE_TEST_MANAGED_CONFIG_DIR"] = testManagedConfigDir;
// Write the cache version file to prevent global/index.ts from clearing the cache
const cacheDir = path.join(dir, "cache", "opencode")
await fs.mkdir(cacheDir, { recursive: true })
await fs.writeFile(path.join(cacheDir, "version"), "14")
const cacheDir = path.join(dir, "cache", "opencode");
await fs.mkdir(cacheDir, { recursive: true });
await fs.writeFile(path.join(cacheDir, "version"), "14");
// Clear provider env vars to ensure clean test state
delete process.env["ANTHROPIC_API_KEY"]
delete process.env["OPENAI_API_KEY"]
delete process.env["GOOGLE_API_KEY"]
delete process.env["GOOGLE_GENERATIVE_AI_API_KEY"]
delete process.env["AZURE_OPENAI_API_KEY"]
delete process.env["AWS_ACCESS_KEY_ID"]
delete process.env["AWS_PROFILE"]
delete process.env["AWS_REGION"]
delete process.env["AWS_BEARER_TOKEN_BEDROCK"]
delete process.env["OPENROUTER_API_KEY"]
delete process.env["GROQ_API_KEY"]
delete process.env["MISTRAL_API_KEY"]
delete process.env["PERPLEXITY_API_KEY"]
delete process.env["TOGETHER_API_KEY"]
delete process.env["XAI_API_KEY"]
delete process.env["DEEPSEEK_API_KEY"]
delete process.env["FIREWORKS_API_KEY"]
delete process.env["CEREBRAS_API_KEY"]
delete process.env["SAMBANOVA_API_KEY"]
delete process.env["ANTHROPIC_API_KEY"];
delete process.env["OPENAI_API_KEY"];
delete process.env["GOOGLE_API_KEY"];
delete process.env["GOOGLE_GENERATIVE_AI_API_KEY"];
delete process.env["AZURE_OPENAI_API_KEY"];
delete process.env["AWS_ACCESS_KEY_ID"];
delete process.env["AWS_PROFILE"];
delete process.env["AWS_REGION"];
delete process.env["AWS_BEARER_TOKEN_BEDROCK"];
delete process.env["OPENROUTER_API_KEY"];
delete process.env["GROQ_API_KEY"];
delete process.env["MISTRAL_API_KEY"];
delete process.env["PERPLEXITY_API_KEY"];
delete process.env["TOGETHER_API_KEY"];
delete process.env["XAI_API_KEY"];
delete process.env["DEEPSEEK_API_KEY"];
delete process.env["FIREWORKS_API_KEY"];
delete process.env["CEREBRAS_API_KEY"];
delete process.env["SAMBANOVA_API_KEY"];
// Now safe to import from src/
const { Log } = await import("../src/util/log")
const { Log } = await import("../src/util/log");
Log.init({
print: false,
dev: true,
level: "DEBUG",
})
});

View File

@@ -1,7 +1,6 @@
import { describe, expect, test } from "bun:test"
import { Project } from "../../src/project/project"
import { Log } from "../../src/util/log"
import { Storage } from "../../src/storage/storage"
import { $ } from "bun"
import path from "path"
import { tmpdir } from "../fixture/fixture"
@@ -55,37 +54,50 @@ describe("Project.fromDirectory with worktrees", () => {
test("should set worktree to root when called from a worktree", async () => {
await using tmp = await tmpdir({ git: true })
const worktreePath = path.join(tmp.path, "..", "worktree-test")
await $`git worktree add ${worktreePath} -b test-branch`.cwd(tmp.path).quiet()
const worktreePath = path.join(tmp.path, "..", path.basename(tmp.path) + "-worktree")
try {
await $`git worktree add ${worktreePath} -b test-branch-${Date.now()}`.cwd(tmp.path).quiet()
const { project, sandbox } = await Project.fromDirectory(worktreePath)
const { project, sandbox } = await Project.fromDirectory(worktreePath)
expect(project.worktree).toBe(tmp.path)
expect(sandbox).toBe(worktreePath)
expect(project.sandboxes).toContain(worktreePath)
expect(project.sandboxes).not.toContain(tmp.path)
await $`git worktree remove ${worktreePath}`.cwd(tmp.path).quiet()
expect(project.worktree).toBe(tmp.path)
expect(sandbox).toBe(worktreePath)
expect(project.sandboxes).toContain(worktreePath)
expect(project.sandboxes).not.toContain(tmp.path)
} finally {
await $`git worktree remove ${worktreePath}`
.cwd(tmp.path)
.quiet()
.catch(() => {})
}
})
test("should accumulate multiple worktrees in sandboxes", async () => {
await using tmp = await tmpdir({ git: true })
const worktree1 = path.join(tmp.path, "..", "worktree-1")
const worktree2 = path.join(tmp.path, "..", "worktree-2")
await $`git worktree add ${worktree1} -b branch-1`.cwd(tmp.path).quiet()
await $`git worktree add ${worktree2} -b branch-2`.cwd(tmp.path).quiet()
const worktree1 = path.join(tmp.path, "..", path.basename(tmp.path) + "-wt1")
const worktree2 = path.join(tmp.path, "..", path.basename(tmp.path) + "-wt2")
try {
await $`git worktree add ${worktree1} -b branch-${Date.now()}`.cwd(tmp.path).quiet()
await $`git worktree add ${worktree2} -b branch-${Date.now() + 1}`.cwd(tmp.path).quiet()
await Project.fromDirectory(worktree1)
const { project } = await Project.fromDirectory(worktree2)
await Project.fromDirectory(worktree1)
const { project } = await Project.fromDirectory(worktree2)
expect(project.worktree).toBe(tmp.path)
expect(project.sandboxes).toContain(worktree1)
expect(project.sandboxes).toContain(worktree2)
expect(project.sandboxes).not.toContain(tmp.path)
await $`git worktree remove ${worktree1}`.cwd(tmp.path).quiet()
await $`git worktree remove ${worktree2}`.cwd(tmp.path).quiet()
expect(project.worktree).toBe(tmp.path)
expect(project.sandboxes).toContain(worktree1)
expect(project.sandboxes).toContain(worktree2)
expect(project.sandboxes).not.toContain(tmp.path)
} finally {
await $`git worktree remove ${worktree1}`
.cwd(tmp.path)
.quiet()
.catch(() => {})
await $`git worktree remove ${worktree2}`
.cwd(tmp.path)
.quiet()
.catch(() => {})
}
})
})
@@ -99,11 +111,12 @@ describe("Project.discover", () => {
await Project.discover(project)
const updated = await Storage.read<Project.Info>(["project", project.id])
expect(updated.icon).toBeDefined()
expect(updated.icon?.url).toStartWith("data:")
expect(updated.icon?.url).toContain("base64")
expect(updated.icon?.color).toBeUndefined()
const updated = Project.get(project.id)
expect(updated).toBeDefined()
expect(updated!.icon).toBeDefined()
expect(updated!.icon?.url).toStartWith("data:")
expect(updated!.icon?.url).toContain("base64")
expect(updated!.icon?.color).toBeUndefined()
})
test("should not discover non-image files", async () => {
@@ -114,7 +127,8 @@ describe("Project.discover", () => {
await Project.discover(project)
const updated = await Storage.read<Project.Info>(["project", project.id])
expect(updated.icon).toBeUndefined()
const updated = Project.get(project.id)
expect(updated).toBeDefined()
expect(updated!.icon).toBeUndefined()
})
})

View File

@@ -0,0 +1,687 @@
import { describe, test, expect, beforeEach, afterEach } from "bun:test"
import { Database } from "bun:sqlite"
import { drizzle } from "drizzle-orm/bun-sqlite"
import { migrate } from "drizzle-orm/bun-sqlite/migrator"
import path from "path"
import fs from "fs/promises"
import { readFileSync, readdirSync } from "fs"
import { JsonMigration } from "../../src/storage/json-migration"
import { Global } from "../../src/global"
import { ProjectTable } from "../../src/project/project.sql"
import { SessionTable, MessageTable, PartTable, TodoTable, PermissionTable } from "../../src/session/session.sql"
import { SessionShareTable } from "../../src/share/share.sql"
// Test fixtures
const fixtures = {
project: {
id: "proj_test123abc",
name: "Test Project",
worktree: "/test/path",
vcs: "git" as const,
sandboxes: [],
},
session: {
id: "ses_test456def",
projectID: "proj_test123abc",
slug: "test-session",
directory: "/test/path",
title: "Test Session",
version: "1.0.0",
time: { created: 1700000000000, updated: 1700000001000 },
},
message: {
id: "msg_test789ghi",
sessionID: "ses_test456def",
role: "user" as const,
agent: "default",
model: { providerID: "openai", modelID: "gpt-4" },
time: { created: 1700000000000 },
},
part: {
id: "prt_testabc123",
messageID: "msg_test789ghi",
sessionID: "ses_test456def",
type: "text" as const,
text: "Hello, world!",
},
}
// Helper to create test storage directory structure
async function setupStorageDir() {
const storageDir = path.join(Global.Path.data, "storage")
await fs.rm(storageDir, { recursive: true, force: true })
await fs.mkdir(path.join(storageDir, "project"), { recursive: true })
await fs.mkdir(path.join(storageDir, "session", "proj_test123abc"), { recursive: true })
await fs.mkdir(path.join(storageDir, "message", "ses_test456def"), { recursive: true })
await fs.mkdir(path.join(storageDir, "part", "msg_test789ghi"), { recursive: true })
await fs.mkdir(path.join(storageDir, "session_diff"), { recursive: true })
await fs.mkdir(path.join(storageDir, "todo"), { recursive: true })
await fs.mkdir(path.join(storageDir, "permission"), { recursive: true })
await fs.mkdir(path.join(storageDir, "session_share"), { recursive: true })
// Create legacy marker to indicate JSON storage exists
await Bun.write(path.join(storageDir, "migration"), "1")
return storageDir
}
async function writeProject(storageDir: string, project: Record<string, unknown>) {
await Bun.write(path.join(storageDir, "project", `${project.id}.json`), JSON.stringify(project))
}
async function writeSession(storageDir: string, projectID: string, session: Record<string, unknown>) {
await Bun.write(path.join(storageDir, "session", projectID, `${session.id}.json`), JSON.stringify(session))
}
// Helper to create in-memory test database with schema
function createTestDb() {
const sqlite = new Database(":memory:")
sqlite.exec("PRAGMA foreign_keys = ON")
// Apply schema migrations using drizzle migrate
const dir = path.join(import.meta.dirname, "../../migration")
const entries = readdirSync(dir, { withFileTypes: true })
const migrations = entries
.filter((entry) => entry.isDirectory())
.map((entry) => ({
sql: readFileSync(path.join(dir, entry.name, "migration.sql"), "utf-8"),
timestamp: Number(entry.name.split("_")[0]),
}))
.sort((a, b) => a.timestamp - b.timestamp)
migrate(drizzle({ client: sqlite }), migrations)
return sqlite
}
describe("JSON to SQLite migration", () => {
let storageDir: string
let sqlite: Database
beforeEach(async () => {
storageDir = await setupStorageDir()
sqlite = createTestDb()
})
afterEach(async () => {
sqlite.close()
await fs.rm(storageDir, { recursive: true, force: true })
})
test("migrates project", async () => {
await writeProject(storageDir, {
id: "proj_test123abc",
worktree: "/test/path",
vcs: "git",
name: "Test Project",
time: { created: 1700000000000, updated: 1700000001000 },
sandboxes: ["/test/sandbox"],
})
const stats = await JsonMigration.run(sqlite)
expect(stats?.projects).toBe(1)
const db = drizzle({ client: sqlite })
const projects = db.select().from(ProjectTable).all()
expect(projects.length).toBe(1)
expect(projects[0].id).toBe("proj_test123abc")
expect(projects[0].worktree).toBe("/test/path")
expect(projects[0].name).toBe("Test Project")
expect(projects[0].sandboxes).toEqual(["/test/sandbox"])
})
test("migrates project with commands", async () => {
await writeProject(storageDir, {
id: "proj_with_commands",
worktree: "/test/path",
vcs: "git",
name: "Project With Commands",
time: { created: 1700000000000, updated: 1700000001000 },
sandboxes: ["/test/sandbox"],
commands: { start: "npm run dev" },
})
const stats = await JsonMigration.run(sqlite)
expect(stats?.projects).toBe(1)
const db = drizzle({ client: sqlite })
const projects = db.select().from(ProjectTable).all()
expect(projects.length).toBe(1)
expect(projects[0].id).toBe("proj_with_commands")
expect(projects[0].commands).toEqual({ start: "npm run dev" })
})
test("migrates project without commands field", async () => {
await writeProject(storageDir, {
id: "proj_no_commands",
worktree: "/test/path",
vcs: "git",
name: "Project Without Commands",
time: { created: 1700000000000, updated: 1700000001000 },
sandboxes: [],
})
const stats = await JsonMigration.run(sqlite)
expect(stats?.projects).toBe(1)
const db = drizzle({ client: sqlite })
const projects = db.select().from(ProjectTable).all()
expect(projects.length).toBe(1)
expect(projects[0].id).toBe("proj_no_commands")
expect(projects[0].commands).toBeNull()
})
test("migrates session with individual columns", async () => {
await writeProject(storageDir, {
id: "proj_test123abc",
worktree: "/test/path",
time: { created: Date.now(), updated: Date.now() },
sandboxes: [],
})
await writeSession(storageDir, "proj_test123abc", {
id: "ses_test456def",
projectID: "proj_test123abc",
slug: "test-session",
directory: "/test/dir",
title: "Test Session Title",
version: "1.0.0",
time: { created: 1700000000000, updated: 1700000001000 },
summary: { additions: 10, deletions: 5, files: 3 },
share: { url: "https://example.com/share" },
})
await JsonMigration.run(sqlite)
const db = drizzle({ client: sqlite })
const sessions = db.select().from(SessionTable).all()
expect(sessions.length).toBe(1)
expect(sessions[0].id).toBe("ses_test456def")
expect(sessions[0].project_id).toBe("proj_test123abc")
expect(sessions[0].slug).toBe("test-session")
expect(sessions[0].title).toBe("Test Session Title")
expect(sessions[0].summary_additions).toBe(10)
expect(sessions[0].summary_deletions).toBe(5)
expect(sessions[0].share_url).toBe("https://example.com/share")
})
test("migrates messages and parts", async () => {
await writeProject(storageDir, {
id: "proj_test123abc",
worktree: "/",
time: { created: Date.now(), updated: Date.now() },
sandboxes: [],
})
await writeSession(storageDir, "proj_test123abc", { ...fixtures.session })
await Bun.write(
path.join(storageDir, "message", "ses_test456def", "msg_test789ghi.json"),
JSON.stringify({ ...fixtures.message }),
)
await Bun.write(
path.join(storageDir, "part", "msg_test789ghi", "prt_testabc123.json"),
JSON.stringify({ ...fixtures.part }),
)
const stats = await JsonMigration.run(sqlite)
expect(stats?.messages).toBe(1)
expect(stats?.parts).toBe(1)
const db = drizzle({ client: sqlite })
const messages = db.select().from(MessageTable).all()
expect(messages.length).toBe(1)
expect(messages[0].id).toBe("msg_test789ghi")
const parts = db.select().from(PartTable).all()
expect(parts.length).toBe(1)
expect(parts[0].id).toBe("prt_testabc123")
})
test("migrates legacy parts without ids in body", async () => {
await writeProject(storageDir, {
id: "proj_test123abc",
worktree: "/",
time: { created: Date.now(), updated: Date.now() },
sandboxes: [],
})
await writeSession(storageDir, "proj_test123abc", { ...fixtures.session })
await Bun.write(
path.join(storageDir, "message", "ses_test456def", "msg_test789ghi.json"),
JSON.stringify({
role: "user",
agent: "default",
model: { providerID: "openai", modelID: "gpt-4" },
time: { created: 1700000000000 },
}),
)
await Bun.write(
path.join(storageDir, "part", "msg_test789ghi", "prt_testabc123.json"),
JSON.stringify({
type: "text",
text: "Hello, world!",
}),
)
const stats = await JsonMigration.run(sqlite)
expect(stats?.messages).toBe(1)
expect(stats?.parts).toBe(1)
const db = drizzle({ client: sqlite })
const messages = db.select().from(MessageTable).all()
expect(messages.length).toBe(1)
expect(messages[0].id).toBe("msg_test789ghi")
expect(messages[0].session_id).toBe("ses_test456def")
expect(messages[0].data).not.toHaveProperty("id")
expect(messages[0].data).not.toHaveProperty("sessionID")
const parts = db.select().from(PartTable).all()
expect(parts.length).toBe(1)
expect(parts[0].id).toBe("prt_testabc123")
expect(parts[0].message_id).toBe("msg_test789ghi")
expect(parts[0].session_id).toBe("ses_test456def")
expect(parts[0].data).not.toHaveProperty("id")
expect(parts[0].data).not.toHaveProperty("messageID")
expect(parts[0].data).not.toHaveProperty("sessionID")
})
test("skips orphaned sessions (no parent project)", async () => {
await Bun.write(
path.join(storageDir, "session", "proj_test123abc", "ses_orphan.json"),
JSON.stringify({
id: "ses_orphan",
projectID: "proj_nonexistent",
slug: "orphan",
directory: "/",
title: "Orphan",
version: "1.0.0",
time: { created: Date.now(), updated: Date.now() },
}),
)
const stats = await JsonMigration.run(sqlite)
expect(stats?.sessions).toBe(0)
})
test("is idempotent (running twice doesn't duplicate)", async () => {
await writeProject(storageDir, {
id: "proj_test123abc",
worktree: "/",
time: { created: Date.now(), updated: Date.now() },
sandboxes: [],
})
await JsonMigration.run(sqlite)
await JsonMigration.run(sqlite)
const db = drizzle({ client: sqlite })
const projects = db.select().from(ProjectTable).all()
expect(projects.length).toBe(1) // Still only 1 due to onConflictDoNothing
})
test("migrates todos", async () => {
await writeProject(storageDir, {
id: "proj_test123abc",
worktree: "/",
time: { created: Date.now(), updated: Date.now() },
sandboxes: [],
})
await writeSession(storageDir, "proj_test123abc", { ...fixtures.session })
// Create todo file (named by sessionID, contains array of todos)
await Bun.write(
path.join(storageDir, "todo", "ses_test456def.json"),
JSON.stringify([
{
id: "todo_1",
content: "First todo",
status: "pending",
priority: "high",
},
{
id: "todo_2",
content: "Second todo",
status: "completed",
priority: "medium",
},
]),
)
const stats = await JsonMigration.run(sqlite)
expect(stats?.todos).toBe(2)
const db = drizzle({ client: sqlite })
const todos = db.select().from(TodoTable).orderBy(TodoTable.position).all()
expect(todos.length).toBe(2)
expect(todos[0].content).toBe("First todo")
expect(todos[0].status).toBe("pending")
expect(todos[0].priority).toBe("high")
expect(todos[0].position).toBe(0)
expect(todos[1].content).toBe("Second todo")
expect(todos[1].position).toBe(1)
})
test("todos are ordered by position", async () => {
await writeProject(storageDir, {
id: "proj_test123abc",
worktree: "/",
time: { created: Date.now(), updated: Date.now() },
sandboxes: [],
})
await writeSession(storageDir, "proj_test123abc", { ...fixtures.session })
await Bun.write(
path.join(storageDir, "todo", "ses_test456def.json"),
JSON.stringify([
{ content: "Third", status: "pending", priority: "low" },
{ content: "First", status: "pending", priority: "high" },
{ content: "Second", status: "in_progress", priority: "medium" },
]),
)
await JsonMigration.run(sqlite)
const db = drizzle({ client: sqlite })
const todos = db.select().from(TodoTable).orderBy(TodoTable.position).all()
expect(todos.length).toBe(3)
expect(todos[0].content).toBe("Third")
expect(todos[0].position).toBe(0)
expect(todos[1].content).toBe("First")
expect(todos[1].position).toBe(1)
expect(todos[2].content).toBe("Second")
expect(todos[2].position).toBe(2)
})
test("migrates permissions", async () => {
await writeProject(storageDir, {
id: "proj_test123abc",
worktree: "/",
time: { created: Date.now(), updated: Date.now() },
sandboxes: [],
})
// Create permission file (named by projectID, contains array of rules)
const permissionData = [
{ permission: "file.read", pattern: "/test/file1.ts", action: "allow" as const },
{ permission: "file.write", pattern: "/test/file2.ts", action: "ask" as const },
{ permission: "command.run", pattern: "npm install", action: "deny" as const },
]
await Bun.write(path.join(storageDir, "permission", "proj_test123abc.json"), JSON.stringify(permissionData))
const stats = await JsonMigration.run(sqlite)
expect(stats?.permissions).toBe(1)
const db = drizzle({ client: sqlite })
const permissions = db.select().from(PermissionTable).all()
expect(permissions.length).toBe(1)
expect(permissions[0].project_id).toBe("proj_test123abc")
expect(permissions[0].data).toEqual(permissionData)
})
test("migrates session shares", async () => {
await writeProject(storageDir, {
id: "proj_test123abc",
worktree: "/",
time: { created: Date.now(), updated: Date.now() },
sandboxes: [],
})
await writeSession(storageDir, "proj_test123abc", { ...fixtures.session })
// Create session share file (named by sessionID)
await Bun.write(
path.join(storageDir, "session_share", "ses_test456def.json"),
JSON.stringify({
id: "share_123",
secret: "supersecretkey",
url: "https://share.example.com/ses_test456def",
}),
)
const stats = await JsonMigration.run(sqlite)
expect(stats?.shares).toBe(1)
const db = drizzle({ client: sqlite })
const shares = db.select().from(SessionShareTable).all()
expect(shares.length).toBe(1)
expect(shares[0].session_id).toBe("ses_test456def")
expect(shares[0].id).toBe("share_123")
expect(shares[0].secret).toBe("supersecretkey")
expect(shares[0].url).toBe("https://share.example.com/ses_test456def")
})
test("returns empty stats when storage directory does not exist", async () => {
await fs.rm(storageDir, { recursive: true, force: true })
const stats = await JsonMigration.run(sqlite)
expect(stats.projects).toBe(0)
expect(stats.sessions).toBe(0)
expect(stats.messages).toBe(0)
expect(stats.parts).toBe(0)
expect(stats.todos).toBe(0)
expect(stats.permissions).toBe(0)
expect(stats.shares).toBe(0)
expect(stats.errors).toEqual([])
})
test("continues when a JSON file is unreadable and records an error", async () => {
await writeProject(storageDir, {
id: "proj_test123abc",
worktree: "/",
time: { created: Date.now(), updated: Date.now() },
sandboxes: [],
})
await Bun.write(path.join(storageDir, "project", "broken.json"), "{ invalid json")
const stats = await JsonMigration.run(sqlite)
expect(stats.projects).toBe(1)
expect(stats.errors.some((x) => x.includes("failed to read") && x.includes("broken.json"))).toBe(true)
const db = drizzle({ client: sqlite })
const projects = db.select().from(ProjectTable).all()
expect(projects.length).toBe(1)
expect(projects[0].id).toBe("proj_test123abc")
})
test("skips invalid todo entries while preserving source positions", async () => {
await writeProject(storageDir, {
id: "proj_test123abc",
worktree: "/",
time: { created: Date.now(), updated: Date.now() },
sandboxes: [],
})
await writeSession(storageDir, "proj_test123abc", { ...fixtures.session })
await Bun.write(
path.join(storageDir, "todo", "ses_test456def.json"),
JSON.stringify([
{ content: "keep-0", status: "pending", priority: "high" },
{ content: "drop-1", priority: "low" },
{ content: "keep-2", status: "completed", priority: "medium" },
]),
)
const stats = await JsonMigration.run(sqlite)
expect(stats.todos).toBe(2)
const db = drizzle({ client: sqlite })
const todos = db.select().from(TodoTable).orderBy(TodoTable.position).all()
expect(todos.length).toBe(2)
expect(todos[0].content).toBe("keep-0")
expect(todos[0].position).toBe(0)
expect(todos[1].content).toBe("keep-2")
expect(todos[1].position).toBe(2)
})
test("skips orphaned todos, permissions, and shares", async () => {
await writeProject(storageDir, {
id: "proj_test123abc",
worktree: "/",
time: { created: Date.now(), updated: Date.now() },
sandboxes: [],
})
await writeSession(storageDir, "proj_test123abc", { ...fixtures.session })
await Bun.write(
path.join(storageDir, "todo", "ses_test456def.json"),
JSON.stringify([{ content: "valid", status: "pending", priority: "high" }]),
)
await Bun.write(
path.join(storageDir, "todo", "ses_missing.json"),
JSON.stringify([{ content: "orphan", status: "pending", priority: "high" }]),
)
await Bun.write(
path.join(storageDir, "permission", "proj_test123abc.json"),
JSON.stringify([{ permission: "file.read" }]),
)
await Bun.write(
path.join(storageDir, "permission", "proj_missing.json"),
JSON.stringify([{ permission: "file.write" }]),
)
await Bun.write(
path.join(storageDir, "session_share", "ses_test456def.json"),
JSON.stringify({ id: "share_ok", secret: "secret", url: "https://ok.example.com" }),
)
await Bun.write(
path.join(storageDir, "session_share", "ses_missing.json"),
JSON.stringify({ id: "share_missing", secret: "secret", url: "https://missing.example.com" }),
)
const stats = await JsonMigration.run(sqlite)
expect(stats.todos).toBe(1)
expect(stats.permissions).toBe(1)
expect(stats.shares).toBe(1)
const db = drizzle({ client: sqlite })
expect(db.select().from(TodoTable).all().length).toBe(1)
expect(db.select().from(PermissionTable).all().length).toBe(1)
expect(db.select().from(SessionShareTable).all().length).toBe(1)
})
test("handles mixed corruption and partial validity in one migration run", async () => {
await writeProject(storageDir, {
id: "proj_test123abc",
worktree: "/ok",
time: { created: 1700000000000, updated: 1700000001000 },
sandboxes: [],
})
await Bun.write(
path.join(storageDir, "project", "proj_missing_id.json"),
JSON.stringify({ worktree: "/bad", sandboxes: [] }),
)
await Bun.write(path.join(storageDir, "project", "proj_broken.json"), "{ nope")
await writeSession(storageDir, "proj_test123abc", {
id: "ses_test456def",
projectID: "proj_test123abc",
slug: "ok",
directory: "/ok",
title: "Ok",
version: "1",
time: { created: 1700000000000, updated: 1700000001000 },
})
await Bun.write(
path.join(storageDir, "session", "proj_test123abc", "ses_missing_project.json"),
JSON.stringify({
id: "ses_missing_project",
slug: "bad",
directory: "/bad",
title: "Bad",
version: "1",
}),
)
await Bun.write(
path.join(storageDir, "session", "proj_test123abc", "ses_orphan.json"),
JSON.stringify({
id: "ses_orphan",
projectID: "proj_missing",
slug: "orphan",
directory: "/bad",
title: "Orphan",
version: "1",
}),
)
await Bun.write(
path.join(storageDir, "message", "ses_test456def", "msg_ok.json"),
JSON.stringify({ role: "user", time: { created: 1700000000000 } }),
)
await Bun.write(path.join(storageDir, "message", "ses_test456def", "msg_broken.json"), "{ nope")
await Bun.write(
path.join(storageDir, "message", "ses_missing", "msg_orphan.json"),
JSON.stringify({ role: "user", time: { created: 1700000000000 } }),
)
await Bun.write(
path.join(storageDir, "part", "msg_ok", "part_ok.json"),
JSON.stringify({ type: "text", text: "ok" }),
)
await Bun.write(
path.join(storageDir, "part", "msg_missing", "part_missing_message.json"),
JSON.stringify({ type: "text", text: "bad" }),
)
await Bun.write(path.join(storageDir, "part", "msg_ok", "part_broken.json"), "{ nope")
await Bun.write(
path.join(storageDir, "todo", "ses_test456def.json"),
JSON.stringify([
{ content: "ok", status: "pending", priority: "high" },
{ content: "skip", status: "pending" },
]),
)
await Bun.write(
path.join(storageDir, "todo", "ses_missing.json"),
JSON.stringify([{ content: "orphan", status: "pending", priority: "high" }]),
)
await Bun.write(path.join(storageDir, "todo", "ses_broken.json"), "{ nope")
await Bun.write(
path.join(storageDir, "permission", "proj_test123abc.json"),
JSON.stringify([{ permission: "file.read" }]),
)
await Bun.write(
path.join(storageDir, "permission", "proj_missing.json"),
JSON.stringify([{ permission: "file.write" }]),
)
await Bun.write(path.join(storageDir, "permission", "proj_broken.json"), "{ nope")
await Bun.write(
path.join(storageDir, "session_share", "ses_test456def.json"),
JSON.stringify({ id: "share_ok", secret: "secret", url: "https://ok.example.com" }),
)
await Bun.write(
path.join(storageDir, "session_share", "ses_missing.json"),
JSON.stringify({ id: "share_orphan", secret: "secret", url: "https://missing.example.com" }),
)
await Bun.write(path.join(storageDir, "session_share", "ses_broken.json"), "{ nope")
const stats = await JsonMigration.run(sqlite)
expect(stats.projects).toBe(1)
expect(stats.sessions).toBe(1)
expect(stats.messages).toBe(1)
expect(stats.parts).toBe(1)
expect(stats.todos).toBe(1)
expect(stats.permissions).toBe(1)
expect(stats.shares).toBe(1)
expect(stats.errors.length).toBeGreaterThanOrEqual(6)
const db = drizzle({ client: sqlite })
expect(db.select().from(ProjectTable).all().length).toBe(1)
expect(db.select().from(SessionTable).all().length).toBe(1)
expect(db.select().from(MessageTable).all().length).toBe(1)
expect(db.select().from(PartTable).all().length).toBe(1)
expect(db.select().from(TodoTable).all().length).toBe(1)
expect(db.select().from(PermissionTable).all().length).toBe(1)
expect(db.select().from(SessionShareTable).all().length).toBe(1)
})
})

View File

@@ -78,6 +78,32 @@ describe("tool.read external_directory permission", () => {
})
})
test("asks for directory-scoped external_directory permission when reading external directory", async () => {
await using outerTmp = await tmpdir({
init: async (dir) => {
await Bun.write(path.join(dir, "external", "a.txt"), "a")
},
})
await using tmp = await tmpdir({ git: true })
await Instance.provide({
directory: tmp.path,
fn: async () => {
const read = await ReadTool.init()
const requests: Array<Omit<PermissionNext.Request, "id" | "sessionID" | "tool">> = []
const testCtx = {
...ctx,
ask: async (req: Omit<PermissionNext.Request, "id" | "sessionID" | "tool">) => {
requests.push(req)
},
}
await read.execute({ filePath: path.join(outerTmp.path, "external") }, testCtx)
const extDirReq = requests.find((r) => r.permission === "external_directory")
expect(extDirReq).toBeDefined()
expect(extDirReq!.patterns).toContain(path.join(outerTmp.path, "external", "*"))
},
})
})
test("asks for external_directory permission when reading relative path outside project", async () => {
await using tmp = await tmpdir({ git: true })
await Instance.provide({
@@ -249,6 +275,25 @@ describe("tool.read truncation", () => {
})
})
test("does not mark final directory page as truncated", async () => {
await using tmp = await tmpdir({
init: async (dir) => {
await Promise.all(
Array.from({ length: 10 }, (_, i) => Bun.write(path.join(dir, "dir", `file-${i}.txt`), `line${i}`)),
)
},
})
await Instance.provide({
directory: tmp.path,
fn: async () => {
const read = await ReadTool.init()
const result = await read.execute({ filePath: path.join(tmp.path, "dir"), offset: 5, limit: 5 }, ctx)
expect(result.metadata.truncated).toBe(false)
expect(result.output).not.toContain("Showing 5 of 10 entries")
},
})
})
test("truncates long lines", async () => {
await using tmp = await tmpdir({
init: async (dir) => {

View File

View File

@@ -110,6 +110,8 @@ import type {
SessionForkResponses,
SessionGetErrors,
SessionGetResponses,
SessionHandoffErrors,
SessionHandoffResponses,
SessionInitErrors,
SessionInitResponses,
SessionListResponses,
@@ -1766,6 +1768,48 @@ export class Session extends HeyApiClient {
...params,
})
}
/**
* Handoff session
*
* Extract context and relevant files for another agent to continue the conversation.
*/
public handoff<ThrowOnError extends boolean = false>(
parameters: {
sessionID: string
directory?: string
model?: {
providerID: string
modelID: string
}
goal?: string
},
options?: Options<never, ThrowOnError>,
) {
const params = buildClientParams(
[parameters],
[
{
args: [
{ in: "path", key: "sessionID" },
{ in: "query", key: "directory" },
{ in: "body", key: "model" },
{ in: "body", key: "goal" },
],
},
],
)
return (options?.client ?? this.client).post<SessionHandoffResponses, SessionHandoffErrors, ThrowOnError>({
url: "/session/{sessionID}/handoff",
...options,
...params,
headers: {
"Content-Type": "application/json",
...options?.headers,
...params.headers,
},
})
}
}
export class Part extends HeyApiClient {

View File

@@ -498,7 +498,17 @@ export type EventMessagePartUpdated = {
type: "message.part.updated"
properties: {
part: Part
delta?: string
}
}
export type EventMessagePartDelta = {
type: "message.part.delta"
properties: {
sessionID: string
messageID: string
partID: string
field: string
delta: string
}
}
@@ -668,10 +678,6 @@ export type Todo = {
* Priority level of the task: high, medium, low
*/
priority: string
/**
* Unique identifier for the todo item
*/
id: string
}
export type EventTodoUpdated = {
@@ -920,6 +926,7 @@ export type Event =
| EventMessageUpdated
| EventMessageRemoved
| EventMessagePartUpdated
| EventMessagePartDelta
| EventMessagePartRemoved
| EventPermissionAsked
| EventPermissionReplied
@@ -1032,6 +1039,10 @@ export type KeybindsConfig = {
* Toggle model favorite status
*/
model_favorite_toggle?: string
/**
* Toggle showing all models
*/
model_show_all_toggle?: string
/**
* Share current session
*/
@@ -3829,6 +3840,51 @@ export type PermissionRespondResponses = {
export type PermissionRespondResponse = PermissionRespondResponses[keyof PermissionRespondResponses]
export type SessionHandoffData = {
body?: {
model: {
providerID: string
modelID: string
}
goal?: string
}
path: {
/**
* Session ID
*/
sessionID: string
}
query?: {
directory?: string
}
url: "/session/{sessionID}/handoff"
}
export type SessionHandoffErrors = {
/**
* Bad request
*/
400: BadRequestError
/**
* Not found
*/
404: NotFoundError
}
export type SessionHandoffError = SessionHandoffErrors[keyof SessionHandoffErrors]
export type SessionHandoffResponses = {
/**
* Handoff data extracted
*/
200: {
text: string
files: Array<string>
}
}
export type SessionHandoffResponse = SessionHandoffResponses[keyof SessionHandoffResponses]
export type PermissionReplyData = {
body?: {
reply: "once" | "always" | "reject"

View File

@@ -3297,6 +3297,108 @@
]
}
},
"/session/{sessionID}/handoff": {
"post": {
"operationId": "session.handoff",
"parameters": [
{
"in": "query",
"name": "directory",
"schema": {
"type": "string"
}
},
{
"in": "path",
"name": "sessionID",
"schema": {
"type": "string"
},
"required": true,
"description": "Session ID"
}
],
"summary": "Handoff session",
"description": "Extract context and relevant files for another agent to continue the conversation.",
"responses": {
"200": {
"description": "Handoff data extracted",
"content": {
"application/json": {
"schema": {
"type": "object",
"properties": {
"text": {
"type": "string"
},
"files": {
"type": "array",
"items": {
"type": "string"
}
}
},
"required": ["text", "files"]
}
}
}
},
"400": {
"description": "Bad request",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/BadRequestError"
}
}
}
},
"404": {
"description": "Not found",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/NotFoundError"
}
}
}
}
},
"requestBody": {
"content": {
"application/json": {
"schema": {
"type": "object",
"properties": {
"model": {
"type": "object",
"properties": {
"providerID": {
"type": "string"
},
"modelID": {
"type": "string"
}
},
"required": ["providerID", "modelID"]
},
"goal": {
"type": "string"
}
},
"required": ["model"]
}
}
}
},
"x-codeSamples": [
{
"lang": "js",
"source": "import { createOpencodeClient } from \"@opencode-ai/sdk\n\nconst client = createOpencodeClient()\nawait client.session.handoff({\n ...\n})"
}
]
}
},
"/permission/{requestID}/reply": {
"post": {
"operationId": "permission.reply",
@@ -7255,12 +7357,40 @@
"properties": {
"part": {
"$ref": "#/components/schemas/Part"
}
},
"required": ["part"]
}
},
"required": ["type", "properties"]
},
"Event.message.part.delta": {
"type": "object",
"properties": {
"type": {
"type": "string",
"const": "message.part.delta"
},
"properties": {
"type": "object",
"properties": {
"sessionID": {
"type": "string"
},
"messageID": {
"type": "string"
},
"partID": {
"type": "string"
},
"field": {
"type": "string"
},
"delta": {
"type": "string"
}
},
"required": ["part"]
"required": ["sessionID", "messageID", "partID", "field", "delta"]
}
},
"required": ["type", "properties"]
@@ -7674,13 +7804,9 @@
"priority": {
"description": "Priority level of the task: high, medium, low",
"type": "string"
},
"id": {
"description": "Unique identifier for the todo item",
"type": "string"
}
},
"required": ["content", "status", "priority", "id"]
"required": ["content", "status", "priority"]
},
"Event.todo.updated": {
"type": "object",
@@ -8348,6 +8474,9 @@
{
"$ref": "#/components/schemas/Event.message.part.updated"
},
{
"$ref": "#/components/schemas/Event.message.part.delta"
},
{
"$ref": "#/components/schemas/Event.message.part.removed"
},

View File

@@ -1,20 +1,21 @@
export const dict = {
"ui.sessionReview.title": "تغييرات الجلسة",
"ui.sessionReview.title.lastTurn": "تغييرات آخر دور",
"ui.sessionReview.diffStyle.unified": "موجد",
"ui.sessionReview.diffStyle.unified": "موحد",
"ui.sessionReview.diffStyle.split": "منقسم",
"ui.sessionReview.expandAll": "توسيع الكل",
"ui.sessionReview.collapseAll": "طي الكل",
"ui.sessionReview.change.added": "مضاف",
"ui.sessionReview.change.removed": "محذوف",
"ui.sessionReview.change.modified": "معدل",
"ui.lineComment.label.prefix": "تعليق على ",
"ui.lineComment.label.suffix": "",
"ui.lineComment.editorLabel.prefix": "جارٍ التعليق على ",
"ui.lineComment.editorLabel.suffix": "",
"ui.lineComment.placeholder": "أضف تعليقًا",
"ui.lineComment.submit": "تعليق",
"ui.sessionTurn.steps.show": "إظهار الخطوات",
"ui.sessionTurn.steps.hide": "إخفاء الخطوات",
"ui.sessionTurn.summary.response": "استجابة",

View File

@@ -5,16 +5,17 @@ export const dict = {
"ui.sessionReview.diffStyle.split": "Dividido",
"ui.sessionReview.expandAll": "Expandir tudo",
"ui.sessionReview.collapseAll": "Recolher tudo",
"ui.sessionReview.change.added": "Adicionado",
"ui.sessionReview.change.removed": "Removido",
"ui.sessionReview.change.modified": "Modificado",
"ui.lineComment.label.prefix": "Comentar em ",
"ui.lineComment.label.suffix": "",
"ui.lineComment.editorLabel.prefix": "Comentando em ",
"ui.lineComment.editorLabel.suffix": "",
"ui.lineComment.placeholder": "Adicionar comentário",
"ui.lineComment.submit": "Comentar",
"ui.sessionTurn.steps.show": "Mostrar passos",
"ui.sessionTurn.steps.hide": "Ocultar passos",
"ui.sessionTurn.summary.response": "Resposta",
@@ -94,7 +95,7 @@ export const dict = {
"ui.patch.action.deleted": "Excluído",
"ui.patch.action.created": "Criado",
"ui.patch.action.moved": "Movido",
"ui.patch.action.patched": "Aplicado patch",
"ui.patch.action.patched": "Patch aplicado",
"ui.question.subtitle.answered": "{{count}} respondidas",
"ui.question.answer.none": "(sem resposta)",

View File

@@ -63,8 +63,8 @@ export const dict = {
"ui.tool.webfetch": "Webhentning",
"ui.tool.shell": "Shell",
"ui.tool.patch": "Patch",
"ui.tool.todos": "To-dos",
"ui.tool.todos.read": "Læs to-dos",
"ui.tool.todos": "Opgaver",
"ui.tool.todos.read": "Læs opgaver",
"ui.tool.questions": "Spørgsmål",
"ui.tool.agent": "{{type}} Agent",
@@ -94,7 +94,7 @@ export const dict = {
"ui.patch.action.deleted": "Slettet",
"ui.patch.action.created": "Oprettet",
"ui.patch.action.moved": "Flyttet",
"ui.patch.action.patched": "Patched",
"ui.patch.action.patched": "Patchet",
"ui.question.subtitle.answered": "{{count}} besvaret",
"ui.question.answer.none": "(intet svar)",

View File

@@ -5,16 +5,17 @@ export const dict = {
"ui.sessionReview.diffStyle.split": "Dividido",
"ui.sessionReview.expandAll": "Expandir todo",
"ui.sessionReview.collapseAll": "Colapsar todo",
"ui.sessionReview.change.added": "Añadido",
"ui.sessionReview.change.removed": "Eliminado",
"ui.sessionReview.change.modified": "Modificado",
"ui.lineComment.label.prefix": "Comentar en ",
"ui.lineComment.label.suffix": "",
"ui.lineComment.editorLabel.prefix": "Comentando en ",
"ui.lineComment.editorLabel.suffix": "",
"ui.lineComment.placeholder": "Añadir comentario",
"ui.lineComment.submit": "Comentar",
"ui.sessionTurn.steps.show": "Mostrar pasos",
"ui.sessionTurn.steps.hide": "Ocultar pasos",
"ui.sessionTurn.summary.response": "Respuesta",

View File

@@ -5,16 +5,17 @@ export const dict = {
"ui.sessionReview.diffStyle.split": "Divisé",
"ui.sessionReview.expandAll": "Tout développer",
"ui.sessionReview.collapseAll": "Tout réduire",
"ui.sessionReview.change.added": "Ajouté",
"ui.sessionReview.change.removed": "Supprimé",
"ui.sessionReview.change.modified": "Modifié",
"ui.lineComment.label.prefix": "Commenter sur ",
"ui.lineComment.label.suffix": "",
"ui.lineComment.editorLabel.prefix": "Commentaire sur ",
"ui.lineComment.editorLabel.suffix": "",
"ui.lineComment.placeholder": "Ajouter un commentaire",
"ui.lineComment.submit": "Commenter",
"ui.sessionTurn.steps.show": "Afficher les étapes",
"ui.sessionTurn.steps.hide": "Masquer les étapes",
"ui.sessionTurn.summary.response": "Réponse",

View File

@@ -5,16 +5,17 @@ export const dict = {
"ui.sessionReview.diffStyle.split": "분할 보기",
"ui.sessionReview.expandAll": "모두 펼치기",
"ui.sessionReview.collapseAll": "모두 접기",
"ui.sessionReview.change.added": "추가됨",
"ui.sessionReview.change.removed": "삭제됨",
"ui.sessionReview.change.modified": "수정됨",
"ui.lineComment.label.prefix": "",
"ui.lineComment.label.suffix": "에 댓글 달기",
"ui.lineComment.editorLabel.prefix": "",
"ui.lineComment.editorLabel.suffix": "에 댓글 작성 중",
"ui.lineComment.placeholder": "댓글 추가",
"ui.lineComment.submit": "댓글",
"ui.sessionTurn.steps.show": "단계 표시",
"ui.sessionTurn.steps.hide": "단계 숨기기",
"ui.sessionTurn.summary.response": "응답",

View File

@@ -8,16 +8,17 @@ export const dict: Record<Keys, string> = {
"ui.sessionReview.diffStyle.split": "Delt",
"ui.sessionReview.expandAll": "Utvid alle",
"ui.sessionReview.collapseAll": "Fold sammen alle",
"ui.sessionReview.change.added": "Lagt til",
"ui.sessionReview.change.removed": "Fjernet",
"ui.sessionReview.change.modified": "Endret",
"ui.lineComment.label.prefix": "Kommenter på ",
"ui.lineComment.label.suffix": "",
"ui.lineComment.editorLabel.prefix": "Kommenterer på ",
"ui.lineComment.editorLabel.suffix": "",
"ui.lineComment.placeholder": "Legg til kommentar",
"ui.lineComment.submit": "Kommenter",
"ui.sessionTurn.steps.show": "Vis trinn",
"ui.sessionTurn.steps.hide": "Skjul trinn",
"ui.sessionTurn.summary.response": "Svar",

Some files were not shown because too many files have changed in this diff Show More