Compare commits

...

121 Commits

Author SHA1 Message Date
opencode-agent[bot]
0f397244bb Apply PR #13052: experiment: use ffi to get around bun raw input/ctrl+c issues 2026-02-11 03:12:47 +00:00
opencode-agent[bot]
4133394da6 Apply PR #13036: upgrade opentui to 0.1.78 2026-02-11 03:12:47 +00:00
opencode-agent[bot]
cc374dc5e5 Apply PR #12755: feat(session): add handoff functionality and agent updates 2026-02-11 03:12:46 +00:00
opencode-agent[bot]
d9a58288fe Apply PR #12022: feat: update tui model dialog to utilize model family to reduce noise in list 2026-02-11 03:12:46 +00:00
opencode-agent[bot]
6b3d9e1386 Apply PR #11811: feat: make plan mode the default 2026-02-11 03:12:45 +00:00
opencode-agent[bot]
e9e43111aa Apply PR #10597: sqlite again 2026-02-11 03:12:45 +00:00
LukeParkerDev
56dfbbbc93 hook better instead of 100ms loop lol 2026-02-11 12:57:01 +10:00
LukeParkerDev
943bf316b6 beta conflict workaround 2026-02-11 12:13:49 +10:00
opencode-agent[bot]
c6ec2f47ef chore: generate 2026-02-11 01:56:08 +00:00
Aiden Cline
0fd6f365be fix(core): ensure compaction is more reliable, add reserve token buffer to ensure that input window has enough room to compact (#12924)
Co-authored-by: James Lal <james@littlebearlabs.io>
2026-02-10 19:55:22 -06:00
Aiden Cline
60bdb6e9ba tweak: /review prompt to look for behavior changes more explicitly (#13049) 2026-02-10 19:44:42 -06:00
LukeParkerDev
1f0346725a it works but its dumb 2026-02-11 11:37:17 +10:00
Dax
6e9cd576ea fix(tui): default session sidebar to auto (#13046) 2026-02-11 01:26:01 +00:00
OpeOginni
53ec15a56a fix(tui): improve amazon-bedrock check to include container credentials (#13037) 2026-02-10 18:51:49 -06:00
github-actions[bot]
a90b62267f Update VOUCHED list
https://github.com/anomalyco/opencode/issues/12841#issuecomment-3881500535
2026-02-11 00:37:36 +00:00
Sebastian Herrlinger
0171f70bd8 upgrade opentui to 0.1.78 2026-02-11 00:55:50 +01:00
Frank
24556331c8 wip: zen 2026-02-10 17:56:10 -05:00
Frank
39145b99e8 wip: zen 2026-02-10 17:44:00 -05:00
Frank
0afa6e03a8 wip: zen 2026-02-10 17:36:32 -05:00
Frank
7a3c775dc1 wip: zen 2026-02-10 17:24:03 -05:00
Frank
3ea58bb790 wip: zen 2026-02-10 17:15:01 -05:00
Adam
50c705cd2d fix(docs): locale translations 2026-02-10 22:12:32 +00:00
Frank
3894c217cc wip: zen 2026-02-10 22:12:32 +00:00
Adam
66c2bb8f37 chore: update website stats 2026-02-10 22:12:32 +00:00
opencode
1bbbd51d48 release: v1.1.56 2026-02-10 22:12:25 +00:00
Adam
50f3e74d05 fix(app): task tool rendering 2026-02-10 15:28:46 -06:00
Adam
21475a1dfd fix(docs): invalid markdown 2026-02-10 15:18:57 -06:00
Filip
dce4c05fa9 fix(desktop): open apps with executables on Windows (#13022) 2026-02-10 15:10:58 -06:00
Frank
8c56571ef9 zen: log error 2026-02-10 15:55:33 -05:00
Adam
92a77b72fb fix(app): don't close sidebar on session change (#13013) 2026-02-10 14:45:52 -06:00
Aiden Cline
eabf770053 Merge branch 'dev' into utilize-family-in-dialog 2026-02-10 14:43:15 -06:00
opencode-agent[bot]
4f6b929784 chore: generate 2026-02-10 20:22:31 +00:00
Adam
55119559b3 fix(app): don't scroll code search input 2026-02-10 20:22:31 +00:00
Adam
fd5531316f fix(docs): locale translations 2026-02-10 20:22:30 +00:00
Dax Raad
4666daa581 fix 2026-02-10 13:53:18 -05:00
Dax Raad
caf1316116 Merge branch 'dev' into sqlite2 2026-02-10 13:52:35 -05:00
Dax Raad
1de66812bf Merge branch 'dev' into sqlite2 2026-02-09 17:53:05 -05:00
Dax Raad
173c16581d Merge branch 'dev' into feature/session-handoff 2026-02-09 10:53:24 -05:00
Dax
94d0c9940a Merge branch 'dev' into sqlite2 2026-02-09 10:04:55 -05:00
Dax Raad
76bcd22802 docs: add architecture specs for session page improvements and parallel workstreams 2026-02-08 18:44:58 -05:00
Dax Raad
e06fcfdc43 app: update i18n translations for multiple languages 2026-02-08 18:44:53 -05:00
Dax Raad
246430cb8f tui: position prompt at bottom when resuming from handoff with initial prompt 2026-02-08 18:44:45 -05:00
Dax Raad
f689fc7f75 tui: convert handoff from text command to dedicated mode with slash command 2026-02-08 18:44:35 -05:00
Dax Raad
9b2fd57e6e tui: remove hardcoded handoff from autocomplete 2026-02-08 18:44:23 -05:00
Dax Raad
0d365fa613 tui: add handoff mode to prompt history types 2026-02-08 18:44:21 -05:00
Dax Raad
601e631624 tui: fix route navigation state reconciliation 2026-02-08 18:44:18 -05:00
Dax Raad
2fef02f487 tui: fix disabled commands from appearing in slash autocomplete 2026-02-08 18:44:12 -05:00
Dax Raad
4cc9104942 tui: add handoff text command handling in prompt 2026-02-08 18:44:09 -05:00
Dax Raad
0eaa6b5fc8 tui: add handoff autocomplete suggestion with text command 2026-02-08 18:44:07 -05:00
Dax Raad
e563cff034 core: add handoff API endpoint for session extraction 2026-02-08 18:44:04 -05:00
Dax Raad
3b2550106b sdk: regenerate client types for handoff endpoint 2026-02-08 18:44:01 -05:00
Dax Raad
3ec6bff038 core: add handoff session extraction logic with status tracking 2026-02-08 18:43:55 -05:00
Dax Raad
aab2a6df3b core: add tool-only output mode to LLM for handoff extraction 2026-02-08 18:43:53 -05:00
Dax Raad
84171018f2 core: add handoff prompt template for extracting session context 2026-02-08 18:43:51 -05:00
Dax Raad
280d7e6f91 core: add handoff agent for session context extraction 2026-02-08 18:43:49 -05:00
Dax Raad
5952891b1e core: filter session list to show only sessions relevant to current directory location
When running from a subdirectory, the session list now shows only sessions

that belong to the current directory or its subdirectories, instead of

showing all sessions from the entire project.
2026-02-07 01:18:57 -05:00
Dax Raad
d7c8a3f50d Merge remote-tracking branch 'origin/dev' into sqlite2 2026-02-07 01:11:15 -05:00
Dax Raad
ce353819e8 Merge branch 'dev' into sqlite2 2026-02-05 23:50:20 -05:00
Dax Raad
2dae94e5a3 core: show visual progress bar during database migration so users can see real-time status of projects, sessions, and messages being converted 2026-02-05 23:21:37 -05:00
Dax Raad
c6adc19e41 Merge branch 'dev' into sqlite2 2026-02-05 17:54:46 -05:00
Dax Raad
ce56166510 core: suppress error output when auto-installing dependencies to prevent confusing error messages from appearing in the UI 2026-02-05 12:47:05 -05:00
Dax Raad
5911e4c06a Merge branch 'dev' into sqlite2 2026-02-05 12:39:31 -05:00
Dax Raad
42fb840f22 Merge branch 'dev' into sqlite2 2026-02-05 11:52:58 -05:00
Dax Raad
4dcfdf6572 Merge branch 'dev' into sqlite2 2026-02-04 22:44:49 -05:00
Dax Raad
25f3d6d5a9 Merge branch 'dev' into sqlite2 2026-02-04 22:37:08 -05:00
Dax Raad
e19a9e9614 core: fix migration of legacy messages and parts missing IDs in JSON body
Users upgrading from older versions where message and part IDs were only stored in filenames (not the JSON body) can now have their session data properly migrated to SQLite. This ensures no data loss when transitioning to the new storage format.
2026-02-04 22:35:26 -05:00
Aiden Cline
bb4d978684 feat: update tui model dialog to utilize model family to reduce noise in list 2026-02-03 15:48:40 -06:00
Dax Raad
fcc903489b Merge branch 'dev' into sqlite2 2026-02-03 15:51:09 -05:00
Dax
949e69a9bf Merge branch 'dev' into sqlite2 2026-02-02 23:36:00 -05:00
Dax Raad
afec40e8da feat: make plan mode the default, remove experimental flag
- Remove OPENCODE_EXPERIMENTAL_PLAN_MODE flag from flag.ts
- Update prompt.ts to always use plan mode logic
- Update registry.ts to always include plan tools in CLI
- Remove flag documentation from cli.mdx
2026-02-02 10:40:40 -05:00
Dax
8c30f551e2 Merge branch 'dev' into sqlite2 2026-02-01 23:58:01 -05:00
opencode-agent[bot]
cb721497c1 chore: update nix node_modules hashes 2026-02-02 01:40:44 +00:00
Dax Raad
4ec6293054 fix: type errors in console-core and session
- Fix ExtractTablesWithRelations type compatibility with drizzle-orm beta
- Migrate Session.list() and Session.children() from Storage to SQLite
2026-02-01 20:31:02 -05:00
Dax Raad
b7a323355c fix: ExtractTablesWithRelations type parameter 2026-02-01 20:27:29 -05:00
Dax Raad
d4f053042c sync 2026-02-01 20:26:58 -05:00
Dax Raad
5f552534c7 Merge remote-tracking branch 'origin/sqlite2' into sqlite2 2026-02-01 20:26:24 -05:00
Dax Raad
ad5b790bb3 docs: simplify commit command by removing unnecessary instructions 2026-01-31 20:31:33 -05:00
Dax Raad
ed87341c4f core: fix storage directory existence check during json migration
Use fs.existsSync() instead of Bun.file().exists() since we're checking
for a directory, not a file.
2026-01-31 20:30:36 -05:00
opencode-agent[bot]
794ecab028 chore: update nix node_modules hashes 2026-02-01 01:20:05 +00:00
Dax Raad
eeb235724b Merge dev into sqlite2 2026-01-31 20:08:17 -05:00
Dax Raad
61084e7f6f sync 2026-01-31 16:32:37 -05:00
Dax Raad
200aef2eb3 sync 2026-01-31 16:28:07 -05:00
Dax Raad
f6e375a555 Merge origin/sqlite2 2026-01-31 16:10:11 -05:00
Dax Raad
db908deee5 Merge branch 'dev' into sqlite2 2026-01-31 16:08:47 -05:00
opencode-agent[bot]
7b72cc3a48 chore: update nix node_modules hashes 2026-01-30 16:21:25 +00:00
Dax Raad
b8cbfd48ec format 2026-01-30 11:17:33 -05:00
Dax Raad
498cbb2c26 core: split message part updates into delta events for smoother streaming
Streaming text and reasoning content now uses incremental delta events instead of sending full message parts on each update. This reduces bandwidth and improves real-time response smoothness in the TUI.
2026-01-30 11:16:30 -05:00
Dax Raad
d6fbd255b6 Merge branch 'dev' into sqlite2 2026-01-30 11:01:31 -05:00
Dax Raad
2de1c82bf7 Merge branch 'dev' into sqlite2 2026-01-30 10:03:47 -05:00
Dax Raad
34ebb3d051 Merge branch 'dev' into sqlite2 2026-01-29 16:07:20 -05:00
Dax Raad
9c3e3c1ab5 core: load migrations from timestamp-named directories instead of journal 2026-01-29 16:02:19 -05:00
Github Action
3ea499f04e chore: update nix node_modules hashes 2026-01-29 20:15:38 +00:00
Dax Raad
ab13c1d1c4 Merge branch 'dev' into sqlite2 2026-01-29 15:11:57 -05:00
Dax Raad
53b610c331 sync 2026-01-29 13:38:47 -05:00
Dax Raad
e3519356f2 sync 2026-01-29 13:32:20 -05:00
Dax Raad
2619acc0ff Merge branch 'dev' into sqlite2 2026-01-29 13:23:48 -05:00
Dax Raad
1bc45dc266 ignore: keep Chinese release notes label from blocking updates 2026-01-28 21:52:41 -05:00
Dax Raad
2e8feb1c78 core: significantly speed up data migration by optimizing SQLite settings and batch processing
Reduces migration time from minutes to seconds by enabling WAL mode, increasing batch size to 1000, and pre-scanning files upfront. Users upgrading to the SQLite backend will now see much faster startup times when their existing data is migrated.
2026-01-28 21:51:01 -05:00
Github Action
00e60899cc chore: update nix node_modules hashes 2026-01-28 23:49:18 +00:00
Dax Raad
30a918e9d4 Merge branch 'dev' into sqlite2 2026-01-28 18:45:12 -05:00
Dax Raad
ac16068140 Merge branch 'dev' into sqlite2 2026-01-27 17:45:05 -05:00
Dax Raad
19a41ab297 sync 2026-01-27 17:43:20 -05:00
Dax Raad
cd174d8cba sync 2026-01-27 16:57:51 -05:00
Dax Raad
246e901e42 Merge dev into sqlite2 2026-01-27 16:44:06 -05:00
Dax Raad
0ccef1b31f sync 2026-01-27 16:41:24 -05:00
Dax Raad
7706f5b6a8 core: switch commit command to kimi-k2.5 and improve worktree test reliability 2026-01-27 16:24:21 -05:00
Dax Raad
63e38555c9 sync 2026-01-27 15:33:44 -05:00
Dax Raad
f40685ab13 core: fix Drizzle ORM client initialization and type definitions 2026-01-27 12:38:38 -05:00
Dax Raad
a48a5a3462 core: migrate from custom JSON storage to standard Drizzle migrations to improve database reliability and performance
This replaces the previous manual JSON file system with standard Drizzle migrations, enabling:
- Proper database schema migrations with timestamp-based versioning
- Batched migration for faster migration of large datasets
- Better data integrity with proper table schemas instead of JSON blobs
- Easier database upgrades and rollback capabilities

Migration changes:
- Todo table now uses individual columns with composite PK instead of JSON blob
- Share table removes unused download share data
- Session diff table moved from database table to file storage
- All migrations now use proper Drizzle format with per-folder layout

Users will see a one-time migration on next run that migrates existing JSON data to the new SQLite database.
2026-01-27 12:36:05 -05:00
Dax Raad
5e1639de2b core: improve conversation loading performance by batching database queries
Reduces memory usage and speeds up conversation loading by using pagination
and inArray queries instead of loading all messages at once
2026-01-26 12:33:18 -05:00
Dax Raad
2b05833c32 core: ensure events publish reliably after database operations complete 2026-01-26 12:00:44 -05:00
Dax Raad
acdcf7fa88 core: remove dependency on remeda to simplify dependencies 2026-01-26 11:11:59 -05:00
Dax Raad
bf0754caeb sync 2026-01-26 10:35:53 -05:00
Dax Raad
4d50a32979 Merge dev into sqlite2 2026-01-26 08:53:01 -05:00
Dax Raad
57edb0ddc5 sync 2026-01-26 08:44:19 -05:00
Dax Raad
a614b78c6d tui: upgrade database migration system to drizzle migrator
Replaces custom migration system with drizzle-orm's built-in migrator, bundling migrations at build-time instead of runtime generation. This reduces bundle complexity and provides better integration with drizzle's migration tracking.
2026-01-25 22:27:04 -05:00
Github Action
b9f5a34247 chore: update nix node_modules hashes 2026-01-26 02:32:52 +00:00
Dax Raad
81b47a44e2 Merge branch 'dev' into sqlite2 2026-01-25 21:28:38 -05:00
Dax Raad
0c1c07467e Merge branch 'dev' into sqlite2 2026-01-25 20:18:36 -05:00
Dax Raad
105688bf90 sync 2026-01-25 20:16:56 -05:00
Dax Raad
1e7b4768b1 sync 2026-01-24 11:50:25 -05:00
482 changed files with 14026 additions and 7056 deletions

1
.github/VOUCHED.td vendored
View File

@@ -15,4 +15,5 @@ kitlangton
kommander
r44vc0rp
rekram1-node
-spider-yamet clawdbot/llm psychosis, spam pinging the team
thdxr

View File

@@ -16,15 +16,12 @@ wip:
For anything in the packages/web use the docs: prefix.
For anything in the packages/app use the ignore: prefix.
prefer to explain WHY something was done from an end user perspective instead of
WHAT was done.
do not do generic messages like "improved agent experience" be very specific
about what user facing changes were made
if there are changes do a git pull --rebase
if there are conflicts DO NOT FIX THEM. notify me and I will fix them
## GIT DIFF

View File

@@ -110,3 +110,4 @@ const table = sqliteTable("session", {
- Avoid mocks as much as possible
- Test actual implementation, do not duplicate logic into tests
- Tests cannot run from repo root (guard: `do-not-run-tests-from-root`); run from package dirs like `packages/opencode`.

519
bun.lock

File diff suppressed because it is too large Load Diff

View File

@@ -166,14 +166,10 @@ const bucketNew = new sst.cloudflare.Bucket("ZenDataNew")
const AWS_SES_ACCESS_KEY_ID = new sst.Secret("AWS_SES_ACCESS_KEY_ID")
const AWS_SES_SECRET_ACCESS_KEY = new sst.Secret("AWS_SES_SECRET_ACCESS_KEY")
let logProcessor
if ($app.stage === "production" || $app.stage === "frank") {
const HONEYCOMB_API_KEY = new sst.Secret("HONEYCOMB_API_KEY")
logProcessor = new sst.cloudflare.Worker("LogProcessor", {
handler: "packages/console/function/src/log-processor.ts",
link: [HONEYCOMB_API_KEY],
})
}
const logProcessor = new sst.cloudflare.Worker("LogProcessor", {
handler: "packages/console/function/src/log-processor.ts",
link: [new sst.Secret("HONEYCOMB_API_KEY")],
})
new sst.cloudflare.x.SolidStart("Console", {
domain,
@@ -211,7 +207,7 @@ new sst.cloudflare.x.SolidStart("Console", {
transform: {
worker: {
placement: { mode: "smart" },
tailConsumers: logProcessor ? [{ service: logProcessor.nodes.worker.scriptName }] : [],
tailConsumers: [{ service: logProcessor.nodes.worker.scriptName }],
},
},
},

View File

@@ -40,6 +40,8 @@
"@tailwindcss/vite": "4.1.11",
"diff": "8.0.2",
"dompurify": "3.3.1",
"drizzle-kit": "1.0.0-beta.12-a5629fb",
"drizzle-orm": "1.0.0-beta.12-a5629fb",
"ai": "5.0.124",
"hono": "4.10.7",
"hono-openapi": "1.1.2",

View File

@@ -0,0 +1,36 @@
import { test, expect } from "../fixtures"
import { closeSidebar, hoverSessionItem } from "../actions"
import { projectSwitchSelector, sessionItemSelector } from "../selectors"
test("collapsed sidebar popover stays open when archiving a session", async ({ page, slug, sdk, gotoSession }) => {
const stamp = Date.now()
const one = await sdk.session.create({ title: `e2e sidebar popover archive 1 ${stamp}` }).then((r) => r.data)
const two = await sdk.session.create({ title: `e2e sidebar popover archive 2 ${stamp}` }).then((r) => r.data)
if (!one?.id) throw new Error("Session create did not return an id")
if (!two?.id) throw new Error("Session create did not return an id")
try {
await gotoSession(one.id)
await closeSidebar(page)
const project = page.locator(projectSwitchSelector(slug)).first()
await expect(project).toBeVisible()
await project.hover()
await expect(page.locator(sessionItemSelector(one.id)).first()).toBeVisible()
await expect(page.locator(sessionItemSelector(two.id)).first()).toBeVisible()
const item = await hoverSessionItem(page, one.id)
await item
.getByRole("button", { name: /archive/i })
.first()
.click()
await expect(page.locator(sessionItemSelector(two.id)).first()).toBeVisible()
} finally {
await sdk.session.delete({ sessionID: one.id }).catch(() => undefined)
await sdk.session.delete({ sessionID: two.id }).catch(() => undefined)
}
})

View File

@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/app",
"version": "1.1.55",
"version": "1.1.56",
"description": "",
"type": "module",
"exports": {

View File

@@ -166,6 +166,7 @@ export function SessionHeader() {
})
const [prefs, setPrefs] = persisted(Persist.global("open.app"), createStore({ app: "finder" as OpenApp }))
const [menu, setMenu] = createStore({ open: false })
const canOpen = createMemo(() => platform.platform === "desktop" && !!platform.openPath && server.isLocal())
const current = createMemo(() => options().find((o) => o.id === prefs.app) ?? options()[0])
@@ -355,7 +356,12 @@ export function SessionHeader() {
<span class="text-12-regular text-text-strong">Open</span>
</Button>
<div class="self-stretch w-px bg-border-base/70" />
<DropdownMenu gutter={6} placement="bottom-end">
<DropdownMenu
gutter={6}
placement="bottom-end"
open={menu.open}
onOpenChange={(open) => setMenu("open", open)}
>
<DropdownMenu.Trigger
as={IconButton}
icon="chevron-down"
@@ -375,7 +381,13 @@ export function SessionHeader() {
}}
>
{options().map((o) => (
<DropdownMenu.RadioItem value={o.id} onSelect={() => openDir(o.id)}>
<DropdownMenu.RadioItem
value={o.id}
onSelect={() => {
setMenu("open", false)
openDir(o.id)
}}
>
<div class="flex size-5 shrink-0 items-center justify-center">
<AppIcon id={o.icon} class={size(o.icon)} />
</div>
@@ -388,7 +400,12 @@ export function SessionHeader() {
</DropdownMenu.RadioGroup>
</DropdownMenu.Group>
<DropdownMenu.Separator />
<DropdownMenu.Item onSelect={copyPath}>
<DropdownMenu.Item
onSelect={() => {
setMenu("open", false)
copyPath()
}}
>
<div class="flex size-5 shrink-0 items-center justify-center">
<Icon name="copy" size="small" class="text-icon-weak" />
</div>

View File

@@ -231,6 +231,24 @@ export function applyDirectoryEvent(input: {
}
break
}
case "message.part.delta": {
const props = event.properties as { messageID: string; partID: string; field: string; delta: string }
const parts = input.store.part[props.messageID]
if (!parts) break
const result = Binary.search(parts, props.partID, (p) => p.id)
if (!result.found) break
input.setStore(
"part",
props.messageID,
produce((draft) => {
const part = draft[result.index]
const field = props.field as keyof typeof part
const existing = part[field] as string | undefined
;(part[field] as string) = (existing ?? "") + props.delta
}),
)
break
}
case "vcs.branch.updated": {
const props = event.properties as { branch: string }
const next = { branch: props.branch }

View File

@@ -54,6 +54,13 @@ export default function Layout(props: ParentProps) {
navigate(`/${params.dir}/session/${sessionID}`)
}
const sessionHref = (sessionID: string) => {
if (params.dir) return `/${params.dir}/session/${sessionID}`
return `/session/${sessionID}`
}
const syncSession = (sessionID: string) => sync.session.sync(sessionID)
return (
<DataProvider
data={sync.data}
@@ -62,6 +69,8 @@ export default function Layout(props: ParentProps) {
onQuestionReply={replyToQuestion}
onQuestionReject={rejectQuestion}
onNavigateToSession={navigateToSession}
onSessionHref={sessionHref}
onSyncSession={syncSession}
>
<LocalProvider>{props.children}</LocalProvider>
</DataProvider>

View File

@@ -181,20 +181,6 @@ export default function Layout(props: ParentProps) {
aim.reset()
})
createEffect(
on(
() => ({ dir: params.dir, id: params.id }),
() => {
if (layout.sidebar.opened()) return
if (!state.hoverProject) return
aim.reset()
setState("hoverSession", undefined)
setState("hoverProject", undefined)
},
{ defer: true },
),
)
const autoselecting = createMemo(() => {
if (params.dir) return false
if (!state.autoselect) return false

View File

@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/console-app",
"version": "1.1.55",
"version": "1.1.56",
"type": "module",
"license": "MIT",
"scripts": {

View File

@@ -9,8 +9,8 @@ export const config = {
github: {
repoUrl: "https://github.com/anomalyco/opencode",
starsFormatted: {
compact: "95K",
full: "95,000",
compact: "100K",
full: "100,000",
},
},
@@ -22,8 +22,8 @@ export const config = {
// Static stats (used on landing page)
stats: {
contributors: "650",
commits: "8,500",
contributors: "700",
commits: "9,000",
monthlyUsers: "2.5M",
},
} as const

View File

@@ -351,8 +351,8 @@ export const dict = {
"changelog.empty":
"\u0644\u0645 \u064a\u062a\u0645 \u0627\u0644\u0639\u062b\u0648\u0631 \u0639\u0644\u0649 \u0623\u064a \u0625\u062f\u062e\u0627\u0644\u0627\u062a \u0641\u064a \u0633\u062c\u0644 \u0627\u0644\u062a\u063a\u064a\u064a\u0631\u0627\u062a.",
"changelog.viewJson": "\u0639\u0631\u0636 JSON",
"workspace.nav.zen": "Zen",
"workspace.nav.apiKeys": "مفاتيح API",
"workspace.nav.zen": "زين",
"workspace.nav.apiKeys": "API المفاتيح",
"workspace.nav.members": "أعضاء",
"workspace.nav.billing": "الفواتير",
"workspace.nav.settings": "إعدادات",
@@ -365,14 +365,14 @@ export const dict = {
"workspace.newUser.feature.quality.title": "أعلى جودة",
"workspace.newUser.feature.quality.body":
"الوصول إلى النماذج التي تم تكوينها لتحقيق الأداء الأمثل - لا يوجد تخفيضات أو توجيه إلى موفري الخدمة الأرخص.",
"workspace.newUser.feature.lockin.title": "بدون احتجاز بمزوّد واحد",
"workspace.newUser.feature.lockin.title": "لا يوجد قفل",
"workspace.newUser.feature.lockin.body":
"استخدم Zen مع أي وكيل ترميز، واستمر في استخدام موفري الخدمات الآخرين مع opencode وقتما تشاء.",
"workspace.newUser.copyApiKey": "انسخ مفتاح API",
"workspace.newUser.copyKey": "نسخ المفتاح",
"workspace.newUser.copied": "منسوخ!",
"workspace.newUser.step.enableBilling": "تمكين الفوترة",
"workspace.newUser.step.login.before": "شغّل",
"workspace.newUser.step.login.before": "يجري",
"workspace.newUser.step.login.after": "وحدد opencode",
"workspace.newUser.step.pasteKey": "الصق مفتاح API الخاص بك",
"workspace.newUser.step.models.before": "ابدأ opencode ثم قم بالتشغيل",
@@ -390,7 +390,7 @@ export const dict = {
"workspace.providers.saving": "توفير...",
"workspace.providers.save": "يحفظ",
"workspace.providers.table.provider": "مزود",
"workspace.providers.table.apiKey": "مفتاح API",
"workspace.providers.table.apiKey": "API المفتاح",
"workspace.usage.title": "تاريخ الاستخدام",
"workspace.usage.subtitle": "استخدام وتكاليف API الأخيرة.",
"workspace.usage.empty": "قم بإجراء أول مكالمة API للبدء.",
@@ -398,25 +398,25 @@ export const dict = {
"workspace.usage.table.model": "نموذج",
"workspace.usage.table.input": "مدخل",
"workspace.usage.table.output": "الإخراج",
"workspace.usage.table.cost": "التكلفة",
"workspace.usage.table.cost": "يكلف",
"workspace.usage.breakdown.input": "مدخل",
"workspace.usage.breakdown.cacheRead": "قراءة ذاكرة التخزين المؤقت",
"workspace.usage.breakdown.cacheWrite": "كتابة ذاكرة التخزين المؤقت",
"workspace.usage.breakdown.output": "الإخراج",
"workspace.usage.breakdown.reasoning": "المنطق",
"workspace.usage.subscription": "الاشتراك (${{amount}})",
"workspace.cost.title": "التكلفة",
"workspace.cost.title": "يكلف",
"workspace.cost.subtitle": "تكاليف الاستخدام مقسمة حسب النموذج.",
"workspace.cost.allModels": "جميع النماذج",
"workspace.cost.allModels": "جميع الموديلات",
"workspace.cost.allKeys": "جميع المفاتيح",
"workspace.cost.deletedSuffix": "(محذوف)",
"workspace.cost.empty": "لا توجد بيانات استخدام متاحة للفترة المحددة.",
"workspace.cost.subscriptionShort": "اشتراك",
"workspace.keys.title": "مفاتيح API",
"workspace.cost.subscriptionShort": "الفرعية",
"workspace.keys.title": "API المفاتيح",
"workspace.keys.subtitle": "إدارة مفاتيح API الخاصة بك للوصول إلى خدمات opencode.",
"workspace.keys.create": "قم بإنشاء مفتاح API",
"workspace.keys.placeholder": "أدخل اسم المفتاح",
"workspace.keys.empty": "أنشئ مفتاح API لبوابة opencode",
"workspace.keys.empty": "قم بإنشاء مفتاح opencode للبوابة API",
"workspace.keys.table.name": "اسم",
"workspace.keys.table.key": "مفتاح",
"workspace.keys.table.createdBy": "تم الإنشاء بواسطة",
@@ -442,14 +442,14 @@ export const dict = {
"workspace.members.table.email": "بريد إلكتروني",
"workspace.members.table.role": "دور",
"workspace.members.table.monthLimit": "حد الشهر",
"workspace.members.role.admin": "مسؤول",
"workspace.members.role.admin": "مسؤل",
"workspace.members.role.adminDescription": "يمكن إدارة النماذج، والأعضاء، والفواتير",
"workspace.members.role.member": "عضو",
"workspace.members.role.memberDescription": "يمكنهم فقط إنشاء مفاتيح API لأنفسهم",
"workspace.settings.title": "إعدادات",
"workspace.settings.subtitle": "قم بتحديث اسم مساحة العمل الخاصة بك وتفضيلاتك.",
"workspace.settings.workspaceName": "اسم مساحة العمل",
"workspace.settings.defaultName": "الافتراضي",
"workspace.settings.defaultName": "تقصير",
"workspace.settings.updating": "جارٍ التحديث...",
"workspace.settings.save": "يحفظ",
"workspace.settings.edit": "يحرر",
@@ -461,37 +461,37 @@ export const dict = {
"workspace.billing.add": "أضف $",
"workspace.billing.enterAmount": "أدخل المبلغ",
"workspace.billing.loading": "تحميل...",
"workspace.billing.addAction": "إضافة",
"workspace.billing.addAction": "يضيف",
"workspace.billing.addBalance": "إضافة الرصيد",
"workspace.billing.linkedToStripe": "مرتبط بـ Stripe",
"workspace.billing.manage": "إدارة",
"workspace.billing.linkedToStripe": "مرتبطة بالشريط",
"workspace.billing.manage": "يدير",
"workspace.billing.enable": "تمكين الفوترة",
"workspace.monthlyLimit.title": "الحد الشهري",
"workspace.monthlyLimit.subtitle": "قم بتعيين حد الاستخدام الشهري لحسابك.",
"workspace.monthlyLimit.placeholder": "50",
"workspace.monthlyLimit.setting": ارٍ التعيين...",
"workspace.monthlyLimit.setting": لسة...",
"workspace.monthlyLimit.set": "تعيين",
"workspace.monthlyLimit.edit": "تحرير الحد",
"workspace.monthlyLimit.noLimit": "لم يتم تعيين حد الاستخدام.",
"workspace.monthlyLimit.currentUsage.beforeMonth": "الاستخدام الحالي ل",
"workspace.monthlyLimit.currentUsage.beforeAmount": "هو $",
"workspace.reload.title": "إعادة الشحن التلقائي",
"workspace.reload.disabled.before": "إعادة الشحن التلقائي",
"workspace.reload.disabled.state": "معطّل",
"workspace.reload.disabled.after": "فعّلها لإعادة شحن الرصيد تلقائيًا عندما يكون منخفضًا.",
"workspace.reload.enabled.before": "إعادة الشحن التلقائي",
"workspace.reload.title": "إعادة التحميل التلقائي",
"workspace.reload.disabled.before": "إعادة التحميل التلقائي هو",
"workspace.reload.disabled.state": "عاجز",
"workspace.reload.disabled.after": "تمكين إعادة التحميل تلقائيًا عندما يكون الرصيد منخفضًا.",
"workspace.reload.enabled.before": "إعادة التحميل التلقائي هو",
"workspace.reload.enabled.state": "ممكّن",
"workspace.reload.enabled.middle": "سنعيد شحن رصيدك بمبلغ",
"workspace.reload.enabled.middle": "سنقوم بإعادة التحميل",
"workspace.reload.processingFee": "رسوم المعالجة",
"workspace.reload.enabled.after": "عندما يصل الرصيد إلى",
"workspace.reload.enabled.after": "عندما يصل التوازن",
"workspace.reload.edit": "يحرر",
"workspace.reload.enable": "تفعيل",
"workspace.reload.enableAutoReload": فعيل إعادة الشحن التلقائي",
"workspace.reload.reloadAmount": "مبلغ إعادة الشحن $",
"workspace.reload.enable": "يُمكَِن",
"workspace.reload.enableAutoReload": مكين إعادة التحميل التلقائي",
"workspace.reload.reloadAmount": "إعادة تحميل $",
"workspace.reload.whenBalanceReaches": "عندما يصل الرصيد إلى $",
"workspace.reload.saving": "توفير...",
"workspace.reload.save": "يحفظ",
"workspace.reload.failedAt": "فشلت إعادة الشحن في",
"workspace.reload.failedAt": "فشلت عملية إعادة التحميل عند",
"workspace.reload.reason": "سبب:",
"workspace.reload.updatePaymentMethod": "يرجى تحديث طريقة الدفع الخاصة بك والمحاولة مرة أخرى.",
"workspace.reload.retrying": "جارٍ إعادة المحاولة...",
@@ -500,11 +500,11 @@ export const dict = {
"workspace.payments.subtitle": "معاملات الدفع الأخيرة.",
"workspace.payments.table.date": "تاريخ",
"workspace.payments.table.paymentId": "معرف الدفع",
"workspace.payments.table.amount": "المبلغ",
"workspace.payments.table.amount": "كمية",
"workspace.payments.table.receipt": "إيصال",
"workspace.payments.type.credit": "ائتمان",
"workspace.payments.type.subscription": "الاشتراك",
"workspace.payments.view": "عرض",
"workspace.payments.view": "منظر",
"workspace.black.loading": "تحميل...",
"workspace.black.time.day": "يوم",
"workspace.black.time.days": "أيام",
@@ -521,8 +521,8 @@ export const dict = {
"workspace.black.subscription.resetsIn": "إعادة تعيين في",
"workspace.black.subscription.useBalance": "استخدم رصيدك المتوفر بعد الوصول إلى حدود الاستخدام",
"workspace.black.waitlist.title": "قائمة الانتظار",
"workspace.black.waitlist.joined": "أنت على قائمة الانتظار لخطة OpenCode Black بقيمة ${{plan}} شهريًا.",
"workspace.black.waitlist.ready": "نحن مستعدون لتسجيلك في خطة OpenCode Black بقيمة ${{plan}} شهريًا.",
"workspace.black.waitlist.joined": "أنت على قائمة الانتظار للخطة السوداء {{plan}} دولار شهريًا OpenCode.",
"workspace.black.waitlist.ready": "نحن على استعداد لتسجيلك في خطة Black {{plan}} الشهرية OpenCode.",
"workspace.black.waitlist.leave": "ترك قائمة الانتظار",
"workspace.black.waitlist.leaving": "مغادرة...",
"workspace.black.waitlist.left": "غادر",

View File

@@ -294,18 +294,18 @@ export const dict = {
"workspace.home.billing.currentBalance": "Nuværende saldo",
"workspace.newUser.feature.tested.title": "Testede og verificerede modeller",
"workspace.newUser.feature.tested.body":
"Vi har benchmarket og testet modeller specifikt til kodningsagenter for at sikre den bedste ydeevne.",
"Vi har benchmarket og testet modeller specifikt til kodningsmidler for at sikre den bedste ydeevne.",
"workspace.newUser.feature.quality.title": "Højeste kvalitet",
"workspace.newUser.feature.quality.body":
"Få adgang til modeller konfigureret til optimal ydeevne - ingen nedgraderinger eller routing til billigere udbydere.",
"workspace.newUser.feature.lockin.title": "Ingen indlåsning",
"workspace.newUser.feature.lockin.body":
"Brug Zen med en hvilken som helst kodningsagent, og fortsæt med at bruge andre udbydere med opencode, når du vil.",
"workspace.newUser.copyApiKey": "Kopiér API-nøgle",
"workspace.newUser.copyApiKey": "Kopiér nøglen API",
"workspace.newUser.copyKey": "Kopier nøgle",
"workspace.newUser.copied": "Kopieret!",
"workspace.newUser.step.enableBilling": "Aktiver fakturering",
"workspace.newUser.step.login.before": "Kør",
"workspace.newUser.step.login.before": "Løbe",
"workspace.newUser.step.login.after": "og vælg opencode",
"workspace.newUser.step.pasteKey": "Indsæt din API nøgle",
"workspace.newUser.step.models.before": "Start opencode og kør",
@@ -316,12 +316,12 @@ export const dict = {
"workspace.models.table.enabled": "Aktiveret",
"workspace.providers.title": "Medbring din egen nøgle",
"workspace.providers.subtitle": "Konfigurer dine egne API nøgler fra AI-udbydere.",
"workspace.providers.placeholder": "Indtast {{provider}} API-nøgle ({{prefix}}...)",
"workspace.providers.placeholder": "Indtast nøglen {{provider}} API ({{prefix}}...)",
"workspace.providers.configure": "Konfigurer",
"workspace.providers.edit": "Rediger",
"workspace.providers.edit": "Redigere",
"workspace.providers.delete": "Slet",
"workspace.providers.saving": "Gemmer...",
"workspace.providers.save": "Gem",
"workspace.providers.save": "Spare",
"workspace.providers.table.provider": "Udbyder",
"workspace.providers.table.apiKey": "API Nøgle",
"workspace.usage.title": "Brugshistorik",
@@ -330,15 +330,15 @@ export const dict = {
"workspace.usage.table.date": "Dato",
"workspace.usage.table.model": "Model",
"workspace.usage.table.input": "Input",
"workspace.usage.table.output": "Output",
"workspace.usage.table.cost": "Omkostning",
"workspace.usage.table.output": "Produktion",
"workspace.usage.table.cost": "Koste",
"workspace.usage.breakdown.input": "Input",
"workspace.usage.breakdown.cacheRead": "Cache læst",
"workspace.usage.breakdown.cacheWrite": "Cache skriv",
"workspace.usage.breakdown.output": "Output",
"workspace.usage.breakdown.output": "Produktion",
"workspace.usage.breakdown.reasoning": "Ræsonnement",
"workspace.usage.subscription": "abonnement (${{amount}})",
"workspace.cost.title": "Omkostninger",
"workspace.cost.title": "Koste",
"workspace.cost.subtitle": "Brugsomkostninger opdelt efter model.",
"workspace.cost.allModels": "Alle modeller",
"workspace.cost.allKeys": "Alle nøgler",
@@ -354,7 +354,7 @@ export const dict = {
"workspace.keys.table.key": "Nøgle",
"workspace.keys.table.createdBy": "Skabt af",
"workspace.keys.table.lastUsed": "Sidst brugt",
"workspace.keys.copyApiKey": "Kopiér API-nøgle",
"workspace.keys.copyApiKey": "Kopiér nøglen API",
"workspace.keys.delete": "Slet",
"workspace.members.title": "Medlemmer",
"workspace.members.subtitle": "Administrer arbejdsområdemedlemmer og deres tilladelser.",
@@ -368,10 +368,10 @@ export const dict = {
"workspace.members.noLimit": "Ingen grænse",
"workspace.members.noLimitLowercase": "ingen grænse",
"workspace.members.invited": "inviteret",
"workspace.members.edit": "Rediger",
"workspace.members.edit": "Redigere",
"workspace.members.delete": "Slet",
"workspace.members.saving": "Gemmer...",
"workspace.members.save": "Gem",
"workspace.members.save": "Spare",
"workspace.members.table.email": "E-mail",
"workspace.members.table.role": "Rolle",
"workspace.members.table.monthLimit": "Månedsgrænse",
@@ -382,10 +382,10 @@ export const dict = {
"workspace.settings.title": "Indstillinger",
"workspace.settings.subtitle": "Opdater dit arbejdsområdes navn og præferencer.",
"workspace.settings.workspaceName": "Arbejdsområdets navn",
"workspace.settings.defaultName": "Standard",
"workspace.settings.defaultName": "Misligholdelse",
"workspace.settings.updating": "Opdaterer...",
"workspace.settings.save": "Gem",
"workspace.settings.edit": "Rediger",
"workspace.settings.save": "Spare",
"workspace.settings.edit": "Redigere",
"workspace.billing.title": "Fakturering",
"workspace.billing.subtitle.beforeLink": "Administrer betalingsmetoder.",
"workspace.billing.contactUs": "Kontakt os",
@@ -394,10 +394,10 @@ export const dict = {
"workspace.billing.add": "Tilføj $",
"workspace.billing.enterAmount": "Indtast beløb",
"workspace.billing.loading": "Indlæser...",
"workspace.billing.addAction": "Tilføj",
"workspace.billing.addAction": "Tilføje",
"workspace.billing.addBalance": "Tilføj balance",
"workspace.billing.linkedToStripe": "Forbundet til Stripe",
"workspace.billing.manage": "Administrer",
"workspace.billing.manage": "Styre",
"workspace.billing.enable": "Aktiver fakturering",
"workspace.monthlyLimit.title": "Månedlig grænse",
"workspace.monthlyLimit.subtitle": "Indstil en månedlig forbrugsgrænse for din konto.",
@@ -408,23 +408,23 @@ export const dict = {
"workspace.monthlyLimit.noLimit": "Ingen forbrugsgrænse angivet.",
"workspace.monthlyLimit.currentUsage.beforeMonth": "Nuværende brug for",
"workspace.monthlyLimit.currentUsage.beforeAmount": "er $",
"workspace.reload.title": "Automatisk genopfyldning",
"workspace.reload.disabled.before": "Automatisk genopfyldning er",
"workspace.reload.disabled.state": "deaktiveret",
"workspace.reload.disabled.after": "Aktiver for automatisk at genopfylde, når saldoen er lav.",
"workspace.reload.enabled.before": "Automatisk genopfyldning er",
"workspace.reload.title": "Automatisk genindlæsning",
"workspace.reload.disabled.before": "Automatisk genindlæsning er",
"workspace.reload.disabled.state": "handicappet",
"workspace.reload.disabled.after": "Aktiver for automatisk at genindlæse, når balancen er lav.",
"workspace.reload.enabled.before": "Automatisk genindlæsning er",
"workspace.reload.enabled.state": "aktiveret",
"workspace.reload.enabled.middle": "Vi genopfylder",
"workspace.reload.enabled.middle": "Vi genindlæser",
"workspace.reload.processingFee": "ekspeditionsgebyr",
"workspace.reload.enabled.after": "når balancen er nået",
"workspace.reload.edit": "Rediger",
"workspace.reload.edit": "Redigere",
"workspace.reload.enable": "Aktiver",
"workspace.reload.enableAutoReload": "Aktiver automatisk genopfyldning",
"workspace.reload.reloadAmount": "Genopfyld $",
"workspace.reload.enableAutoReload": "Aktiver automatisk genindlæsning",
"workspace.reload.reloadAmount": "Genindlæs $",
"workspace.reload.whenBalanceReaches": "Når saldoen når $",
"workspace.reload.saving": "Gemmer...",
"workspace.reload.save": "Gem",
"workspace.reload.failedAt": "Genopfyldning mislykkedes kl",
"workspace.reload.save": "Spare",
"workspace.reload.failedAt": "Genindlæsning mislykkedes kl",
"workspace.reload.reason": "Årsag:",
"workspace.reload.updatePaymentMethod": "Opdater din betalingsmetode, og prøv igen.",
"workspace.reload.retrying": "Prøver igen...",
@@ -434,10 +434,10 @@ export const dict = {
"workspace.payments.table.date": "Dato",
"workspace.payments.table.paymentId": "Betalings-id",
"workspace.payments.table.amount": "Beløb",
"workspace.payments.table.receipt": "Kvittering",
"workspace.payments.table.receipt": "Modtagelse",
"workspace.payments.type.credit": "kredit",
"workspace.payments.type.subscription": "abonnement",
"workspace.payments.view": "Vis",
"workspace.payments.view": "Udsigt",
"workspace.black.loading": "Indlæser...",
"workspace.black.time.day": "dag",
"workspace.black.time.days": "dage",
@@ -458,8 +458,8 @@ export const dict = {
"workspace.black.waitlist.ready": "Vi er klar til at tilmelde dig ${{plan}} per måned OpenCode Black plan.",
"workspace.black.waitlist.leave": "Forlad venteliste",
"workspace.black.waitlist.leaving": "Forlader...",
"workspace.black.waitlist.left": "Forladt",
"workspace.black.waitlist.enroll": "Tilmeld",
"workspace.black.waitlist.left": "Venstre",
"workspace.black.waitlist.enroll": "Indskrive",
"workspace.black.waitlist.enrolling": "Tilmelder...",
"workspace.black.waitlist.enrolled": "Tilmeldt",
"workspace.black.waitlist.enrollNote":

View File

@@ -306,27 +306,27 @@ export const dict = {
"workspace.newUser.feature.lockin.title": "Kein Lock-in",
"workspace.newUser.feature.lockin.body":
"Verwenden Sie Zen mit einem beliebigen Codierungsagenten und nutzen Sie weiterhin andere Anbieter mit opencode, wann immer Sie möchten.",
"workspace.newUser.copyApiKey": "API-Schlüssel kopieren",
"workspace.newUser.copyApiKey": "Kopieren Sie den Schlüssel API",
"workspace.newUser.copyKey": "Schlüssel kopieren",
"workspace.newUser.copied": "Kopiert!",
"workspace.newUser.step.enableBilling": "Abrechnung aktivieren",
"workspace.newUser.step.login.before": "Führe",
"workspace.newUser.step.login.before": "Laufen",
"workspace.newUser.step.login.after": "und wählen Sie opencode",
"workspace.newUser.step.pasteKey": "Fügen Sie Ihren API-Schlüssel ein",
"workspace.newUser.step.models.before": "Starte opencode und führe",
"workspace.newUser.step.models.before": "Starten Sie opencode und führen Sie es aus",
"workspace.newUser.step.models.after": "um ein Modell auszuwählen",
"workspace.models.title": "Modelle",
"workspace.models.subtitle.beforeLink":
"Verwalten Sie, auf welche Modelle Arbeitsbereichsmitglieder zugreifen können.",
"workspace.models.table.model": "Modell",
"workspace.models.table.enabled": "Aktiviert",
"workspace.models.table.enabled": "Ermöglicht",
"workspace.providers.title": "Bringen Sie Ihren eigenen Schlüssel mit",
"workspace.providers.subtitle": "Konfigurieren Sie Ihre eigenen API-Schlüssel von KI-Anbietern.",
"workspace.providers.placeholder": "Geben Sie den Schlüssel {{provider}} API ein ({{prefix}}...)",
"workspace.providers.configure": "Konfigurieren",
"workspace.providers.edit": "Bearbeiten",
"workspace.providers.delete": "Löschen",
"workspace.providers.saving": "Wird gespeichert...",
"workspace.providers.saving": "Sparen...",
"workspace.providers.save": "Speichern",
"workspace.providers.table.provider": "Anbieter",
"workspace.providers.table.apiKey": "API-Schlüssel",
@@ -335,14 +335,14 @@ export const dict = {
"workspace.usage.empty": "Machen Sie Ihren ersten API-Aufruf, um loszulegen.",
"workspace.usage.table.date": "Datum",
"workspace.usage.table.model": "Modell",
"workspace.usage.table.input": "Input",
"workspace.usage.table.output": "Output",
"workspace.usage.table.input": "Eingang",
"workspace.usage.table.output": "Ausgabe",
"workspace.usage.table.cost": "Kosten",
"workspace.usage.breakdown.input": "Input",
"workspace.usage.breakdown.input": "Eingang",
"workspace.usage.breakdown.cacheRead": "Cache-Lesen",
"workspace.usage.breakdown.cacheWrite": "Cache-Schreiben",
"workspace.usage.breakdown.output": "Output",
"workspace.usage.breakdown.reasoning": "Reasoning",
"workspace.usage.breakdown.output": "Ausgabe",
"workspace.usage.breakdown.reasoning": "Argumentation",
"workspace.usage.subscription": "Abonnement (${{amount}})",
"workspace.cost.title": "Kosten",
"workspace.cost.subtitle": "Nutzungskosten aufgeschlüsselt nach Modell.",
@@ -360,12 +360,12 @@ export const dict = {
"workspace.keys.table.key": "Schlüssel",
"workspace.keys.table.createdBy": "Erstellt von",
"workspace.keys.table.lastUsed": "Zuletzt verwendet",
"workspace.keys.copyApiKey": "API-Schlüssel kopieren",
"workspace.keys.copyApiKey": "Kopieren Sie den Schlüssel API",
"workspace.keys.delete": "Löschen",
"workspace.members.title": "Mitglieder",
"workspace.members.subtitle": "Verwalten Sie Arbeitsbereichsmitglieder und ihre Berechtigungen.",
"workspace.members.invite": "Mitglied einladen",
"workspace.members.inviting": "Wird eingeladen...",
"workspace.members.inviting": "Einladend...",
"workspace.members.beta.beforeLink": "Während der Betaversion sind Arbeitsbereiche für Teams kostenlos.",
"workspace.members.form.invitee": "Eingeladen",
"workspace.members.form.emailPlaceholder": "Geben Sie Ihre E-Mail-Adresse ein",
@@ -376,7 +376,7 @@ export const dict = {
"workspace.members.invited": "eingeladen",
"workspace.members.edit": "Bearbeiten",
"workspace.members.delete": "Löschen",
"workspace.members.saving": "Wird gespeichert...",
"workspace.members.saving": "Sparen...",
"workspace.members.save": "Speichern",
"workspace.members.table.email": "E-Mail",
"workspace.members.table.role": "Rolle",
@@ -408,30 +408,30 @@ export const dict = {
"workspace.monthlyLimit.title": "Monatliches Limit",
"workspace.monthlyLimit.subtitle": "Legen Sie ein monatliches Nutzungslimit für Ihr Konto fest.",
"workspace.monthlyLimit.placeholder": "50",
"workspace.monthlyLimit.setting": "Wird gesetzt...",
"workspace.monthlyLimit.set": "Festlegen",
"workspace.monthlyLimit.setting": "Einstellung...",
"workspace.monthlyLimit.set": "Satz",
"workspace.monthlyLimit.edit": "Limit bearbeiten",
"workspace.monthlyLimit.noLimit": "Kein Nutzungslimit festgelegt.",
"workspace.monthlyLimit.currentUsage.beforeMonth": "Aktuelle Nutzung für",
"workspace.monthlyLimit.currentUsage.beforeAmount": "ist $",
"workspace.reload.title": "Automatische Aufladung",
"workspace.reload.disabled.before": "Automatische Aufladung ist",
"workspace.reload.title": "Automatisches Neuladen",
"workspace.reload.disabled.before": "Automatisches Nachladen ist",
"workspace.reload.disabled.state": "deaktiviert",
"workspace.reload.disabled.after":
"Aktivieren Sie diese Option, damit bei niedrigem Kontostand automatisch aufgeladen wird.",
"workspace.reload.enabled.before": "Automatische Aufladung ist",
"workspace.reload.enabled.state": "aktiviert",
"workspace.reload.enabled.middle": "Wir laden auf",
"Aktivieren Sie diese Option, um das Guthaben automatisch neu zu laden, wenn das Guthaben niedrig ist.",
"workspace.reload.enabled.before": "Automatisches Nachladen ist",
"workspace.reload.enabled.state": "ermöglicht",
"workspace.reload.enabled.middle": "Wir laden nach",
"workspace.reload.processingFee": "Bearbeitungsgebühr",
"workspace.reload.enabled.after": "sobald der Kontostand",
"workspace.reload.enabled.after": "wenn das Gleichgewicht erreicht ist",
"workspace.reload.edit": "Bearbeiten",
"workspace.reload.enable": "Aktivieren",
"workspace.reload.enableAutoReload": "Automatische Aufladung aktivieren",
"workspace.reload.reloadAmount": "Aufladebetrag $",
"workspace.reload.whenBalanceReaches": "Wenn der Kontostand $ erreicht",
"workspace.reload.saving": "Wird gespeichert...",
"workspace.reload.enableAutoReload": "Aktivieren Sie das automatische Neuladen",
"workspace.reload.reloadAmount": "$ neu laden",
"workspace.reload.whenBalanceReaches": "Wenn der Saldo $ erreicht",
"workspace.reload.saving": "Sparen...",
"workspace.reload.save": "Speichern",
"workspace.reload.failedAt": "Aufladung fehlgeschlagen am",
"workspace.reload.failedAt": "Neuladen fehlgeschlagen bei",
"workspace.reload.reason": "Grund:",
"workspace.reload.updatePaymentMethod": "Bitte aktualisieren Sie Ihre Zahlungsmethode und versuchen Sie es erneut.",
"workspace.reload.retrying": "Erneuter Versuch...",
@@ -440,11 +440,11 @@ export const dict = {
"workspace.payments.subtitle": "Letzte Zahlungsvorgänge.",
"workspace.payments.table.date": "Datum",
"workspace.payments.table.paymentId": "Zahlungs-ID",
"workspace.payments.table.amount": "Betrag",
"workspace.payments.table.amount": "Menge",
"workspace.payments.table.receipt": "Quittung",
"workspace.payments.type.credit": "Kredit",
"workspace.payments.type.subscription": "Abonnement",
"workspace.payments.view": "Anzeigen",
"workspace.payments.view": "Sicht",
"workspace.black.loading": "Laden...",
"workspace.black.time.day": "Tag",
"workspace.black.time.days": "Tage",
@@ -454,21 +454,21 @@ export const dict = {
"workspace.black.time.minutes": "Minuten",
"workspace.black.time.fewSeconds": "ein paar Sekunden",
"workspace.black.subscription.title": "Abonnement",
"workspace.black.subscription.message": "Sie haben OpenCode Black für ${{plan}} pro Monat abonniert.",
"workspace.black.subscription.message": "Sie haben OpenCode Black für {{plan}} pro Monat abonniert.",
"workspace.black.subscription.manage": "Abonnement verwalten",
"workspace.black.subscription.rollingUsage": "5-stündige Nutzung",
"workspace.black.subscription.weeklyUsage": "Wöchentliche Nutzung",
"workspace.black.subscription.resetsIn": "Zurückgesetzt in",
"workspace.black.subscription.resetsIn": "Wird zurückgesetzt",
"workspace.black.subscription.useBalance":
"Nutzen Sie Ihr verfügbares Guthaben, nachdem Sie die Nutzungslimits erreicht haben",
"workspace.black.waitlist.title": "Warteliste",
"workspace.black.waitlist.joined":
"Sie stehen auf der Warteliste für den OpenCode Black Tarif für ${{plan}} pro Monat.",
"Sie stehen auf der Warteliste für den Black-Plan im Wert von ${{plan}} pro Monat OpenCode.",
"workspace.black.waitlist.ready":
"Wir können Sie jetzt in den OpenCode Black Tarif für ${{plan}} pro Monat aufnehmen.",
"Wir sind bereit, Sie für den Black-Plan im Wert von ${{plan}} pro Monat OpenCode anzumelden.",
"workspace.black.waitlist.leave": "Warteliste verlassen",
"workspace.black.waitlist.leaving": "Verlassen...",
"workspace.black.waitlist.left": "Verlassen",
"workspace.black.waitlist.left": "Links",
"workspace.black.waitlist.enroll": "Einschreiben",
"workspace.black.waitlist.enrolling": "Anmeldung...",
"workspace.black.waitlist.enrolled": "Eingeschrieben",

View File

@@ -284,8 +284,8 @@ export const dict = {
"changelog.hero.subtitle": "Nuovi aggiornamenti e miglioramenti per OpenCode",
"changelog.empty": "Nessuna voce di changelog trovata.",
"changelog.viewJson": "Visualizza JSON",
"workspace.nav.zen": "Zen",
"workspace.nav.apiKeys": "Chiavi API",
"workspace.nav.zen": "zen",
"workspace.nav.apiKeys": "API Chiavi",
"workspace.nav.members": "Membri",
"workspace.nav.billing": "Fatturazione",
"workspace.nav.settings": "Impostazioni",
@@ -299,14 +299,14 @@ export const dict = {
"workspace.newUser.feature.quality.title": "Massima qualità",
"workspace.newUser.feature.quality.body":
"Modelli di accesso configurati per prestazioni ottimali: senza downgrade o instradamento verso fornitori più economici.",
"workspace.newUser.feature.lockin.title": "Nessun lock-in",
"workspace.newUser.feature.lockin.title": "Nessun blocco",
"workspace.newUser.feature.lockin.body":
"Utilizza Zen con qualsiasi agente di codifica e continua a utilizzare altri provider con opencode ogni volta che vuoi.",
"workspace.newUser.copyApiKey": "Copia la chiave API",
"workspace.newUser.copyKey": "Copia chiave",
"workspace.newUser.copied": "Copiato!",
"workspace.newUser.step.enableBilling": "Abilita fatturazione",
"workspace.newUser.step.login.before": "Esegui",
"workspace.newUser.step.login.before": "Correre",
"workspace.newUser.step.login.after": "e seleziona opencode",
"workspace.newUser.step.pasteKey": "Incolla la tua chiave API",
"workspace.newUser.step.models.before": "Avvia opencode ed esegui",
@@ -315,16 +315,16 @@ export const dict = {
"workspace.models.subtitle.beforeLink": "Gestire i modelli a cui possono accedere i membri dell'area di lavoro.",
"workspace.models.table.model": "Modello",
"workspace.models.table.enabled": "Abilitato",
"workspace.providers.title": "Bring Your Own Key (BYOK)",
"workspace.providers.title": "Porta la tua chiave",
"workspace.providers.subtitle": "Configura le tue chiavi API dai fornitori di intelligenza artificiale.",
"workspace.providers.placeholder": "Inserisci la chiave {{provider}} API ({{prefix}}...)",
"workspace.providers.configure": "Configura",
"workspace.providers.edit": "Modificare",
"workspace.providers.delete": "Eliminare",
"workspace.providers.saving": "Salvataggio in corso...",
"workspace.providers.saving": "Risparmio...",
"workspace.providers.save": "Salva",
"workspace.providers.table.provider": "Fornitore",
"workspace.providers.table.apiKey": "Chiave API",
"workspace.providers.table.apiKey": "API Chiave",
"workspace.usage.title": "Cronologia dell'utilizzo",
"workspace.usage.subtitle": "Utilizzo e costi recenti di API.",
"workspace.usage.empty": "Effettua la tua prima chiamata API per iniziare.",
@@ -346,7 +346,7 @@ export const dict = {
"workspace.cost.deletedSuffix": "(eliminato)",
"workspace.cost.empty": "Nessun dato di utilizzo disponibile per il periodo selezionato.",
"workspace.cost.subscriptionShort": "sub",
"workspace.keys.title": "Chiavi API",
"workspace.keys.title": "API Chiavi",
"workspace.keys.subtitle": "Gestisci le tue chiavi API per accedere ai servizi opencode.",
"workspace.keys.create": "Crea chiave API",
"workspace.keys.placeholder": "Inserisci il nome della chiave",
@@ -360,7 +360,7 @@ export const dict = {
"workspace.members.title": "Membri",
"workspace.members.subtitle": "Gestire i membri dell'area di lavoro e le relative autorizzazioni.",
"workspace.members.invite": "Invita membro",
"workspace.members.inviting": "Invito in corso...",
"workspace.members.inviting": "Invitante...",
"workspace.members.beta.beforeLink": "Gli spazi di lavoro sono gratuiti per i team durante la beta.",
"workspace.members.form.invitee": "Invitato",
"workspace.members.form.emailPlaceholder": "Inserisci l'e-mail",
@@ -371,12 +371,12 @@ export const dict = {
"workspace.members.invited": "invitato",
"workspace.members.edit": "Modificare",
"workspace.members.delete": "Eliminare",
"workspace.members.saving": "Salvataggio in corso...",
"workspace.members.saving": "Risparmio...",
"workspace.members.save": "Salva",
"workspace.members.table.email": "E-mail",
"workspace.members.table.role": "Ruolo",
"workspace.members.table.monthLimit": "Limite mensile",
"workspace.members.role.admin": "Admin",
"workspace.members.role.admin": "Ammin",
"workspace.members.role.adminDescription": "Può gestire modelli, membri e fatturazione",
"workspace.members.role.member": "Membro",
"workspace.members.role.memberDescription": "Possono generare chiavi API solo per se stessi",
@@ -388,42 +388,42 @@ export const dict = {
"workspace.settings.save": "Salva",
"workspace.settings.edit": "Modificare",
"workspace.billing.title": "Fatturazione",
"workspace.billing.subtitle.beforeLink": "Gestisci i metodi di pagamento.",
"workspace.billing.subtitle.beforeLink": "Gestire i metodi di pagamento.",
"workspace.billing.contactUs": "Contattaci",
"workspace.billing.subtitle.afterLink": "se hai qualche domanda",
"workspace.billing.currentBalance": "Saldo attuale",
"workspace.billing.add": "Aggiungi $",
"workspace.billing.enterAmount": "Inserisci l'importo",
"workspace.billing.loading": "Caricamento...",
"workspace.billing.addAction": "Aggiungi",
"workspace.billing.addAction": "Aggiungere",
"workspace.billing.addBalance": "Aggiungi saldo",
"workspace.billing.linkedToStripe": "Collegato a Stripe",
"workspace.billing.manage": "Gestisci",
"workspace.billing.manage": "Maneggio",
"workspace.billing.enable": "Abilita fatturazione",
"workspace.monthlyLimit.title": "Limite mensile",
"workspace.monthlyLimit.subtitle": "Imposta un limite di utilizzo mensile per il tuo account.",
"workspace.monthlyLimit.placeholder": "50",
"workspace.monthlyLimit.setting": "Impostazione in corso...",
"workspace.monthlyLimit.setting": "Collocamento...",
"workspace.monthlyLimit.set": "Impostato",
"workspace.monthlyLimit.edit": "Modifica limite",
"workspace.monthlyLimit.noLimit": "Nessun limite di utilizzo impostato.",
"workspace.monthlyLimit.currentUsage.beforeMonth": "Utilizzo attuale per",
"workspace.monthlyLimit.currentUsage.beforeAmount": "è $",
"workspace.reload.title": "Ricarica automatica",
"workspace.reload.disabled.before": "La ricarica automatica è",
"workspace.reload.disabled.before": "La ricarica automatica lo è",
"workspace.reload.disabled.state": "disabilitato",
"workspace.reload.disabled.after": "Abilita la ricarica automatica quando il saldo è basso.",
"workspace.reload.enabled.before": "La ricarica automatica è",
"workspace.reload.enabled.before": "La ricarica automatica lo è",
"workspace.reload.enabled.state": "abilitato",
"workspace.reload.enabled.middle": "Ricaricheremo",
"workspace.reload.processingFee": "tassa di elaborazione",
"workspace.reload.enabled.after": "quando il saldo raggiunge",
"workspace.reload.enabled.after": "quando l'equilibrio raggiunge",
"workspace.reload.edit": "Modificare",
"workspace.reload.enable": "Abilitare",
"workspace.reload.enableAutoReload": "Abilita ricarica automatica",
"workspace.reload.reloadAmount": "Ricarica $",
"workspace.reload.whenBalanceReaches": "Quando il saldo raggiunge $",
"workspace.reload.saving": "Salvataggio in corso...",
"workspace.reload.saving": "Risparmio...",
"workspace.reload.save": "Salva",
"workspace.reload.failedAt": "Ricarica non riuscita a",
"workspace.reload.reason": "Motivo:",
@@ -434,11 +434,11 @@ export const dict = {
"workspace.payments.subtitle": "Transazioni di pagamento recenti.",
"workspace.payments.table.date": "Data",
"workspace.payments.table.paymentId": "ID pagamento",
"workspace.payments.table.amount": "Importo",
"workspace.payments.table.amount": "Quantità",
"workspace.payments.table.receipt": "Ricevuta",
"workspace.payments.type.credit": "credito",
"workspace.payments.type.subscription": "sottoscrizione",
"workspace.payments.view": "Visualizza",
"workspace.payments.view": "Visualizzazione",
"workspace.black.loading": "Caricamento...",
"workspace.black.time.day": "giorno",
"workspace.black.time.days": "giorni",
@@ -452,14 +452,14 @@ export const dict = {
"workspace.black.subscription.manage": "Gestisci abbonamento",
"workspace.black.subscription.rollingUsage": "Utilizzo di 5 ore",
"workspace.black.subscription.weeklyUsage": "Utilizzo settimanale",
"workspace.black.subscription.resetsIn": "Si reimposta tra",
"workspace.black.subscription.resetsIn": "Si reimposta",
"workspace.black.subscription.useBalance": "Utilizza il saldo disponibile dopo aver raggiunto i limiti di utilizzo",
"workspace.black.waitlist.title": "Lista d'attesa",
"workspace.black.waitlist.joined": "Sei in lista d'attesa per il piano OpenCode Black da ${{plan}} al mese.",
"workspace.black.waitlist.joined": "Sei in lista d'attesa per il piano nero ${{plan}} al mese OpenCode.",
"workspace.black.waitlist.ready": "Siamo pronti per iscriverti al piano OpenCode Black da ${{plan}} al mese.",
"workspace.black.waitlist.leave": "Lascia la lista d'attesa",
"workspace.black.waitlist.leaving": "Uscita in corso...",
"workspace.black.waitlist.left": "Uscito dalla lista d'attesa",
"workspace.black.waitlist.leaving": "In partenza...",
"workspace.black.waitlist.left": "Sinistra",
"workspace.black.waitlist.enroll": "Iscriversi",
"workspace.black.waitlist.enrolling": "Iscrizione...",
"workspace.black.waitlist.enrolled": "Iscritto",

View File

@@ -293,9 +293,9 @@ export const dict = {
"changelog.hero.subtitle": "OpenCode \u7684\u65b0\u66f4\u65b0\u4e0e\u6539\u8fdb",
"changelog.empty": "\u672a\u627e\u5230\u66f4\u65b0\u65e5\u5fd7\u6761\u76ee\u3002",
"changelog.viewJson": "\u67e5\u770b JSON",
"workspace.nav.zen": "Zen",
"workspace.nav.zen": "",
"workspace.nav.apiKeys": "API 键",
"workspace.nav.members": "员",
"workspace.nav.members": "员",
"workspace.nav.billing": "计费",
"workspace.nav.settings": "设置",
"workspace.home.banner.beforeLink": "编码代理的可靠优化模型。",
@@ -310,26 +310,26 @@ export const dict = {
"workspace.newUser.feature.lockin.body":
"将 Zen 与任何编码代理结合使用,并在需要时继续将其他提供程序与 opencode 结合使用。",
"workspace.newUser.copyApiKey": "复制 API 密钥",
"workspace.newUser.copyKey": "复制钥",
"workspace.newUser.copied": "复制!",
"workspace.newUser.copyKey": "复制钥",
"workspace.newUser.copied": "复制",
"workspace.newUser.step.enableBilling": "启用计费",
"workspace.newUser.step.login.before": "运行",
"workspace.newUser.step.login.before": "跑步",
"workspace.newUser.step.login.after": "并选择 opencode",
"workspace.newUser.step.pasteKey": "粘贴您的 API 密钥",
"workspace.newUser.step.models.before": "启动 opencode 并运行",
"workspace.newUser.step.models.after": "选择型",
"workspace.models.title": "型",
"workspace.newUser.step.models.after": "选择型",
"workspace.models.title": "型",
"workspace.models.subtitle.beforeLink": "管理工作区成员可以访问哪些模型。",
"workspace.models.table.model": "模型",
"workspace.models.table.enabled": "启用",
"workspace.providers.title": "自带密钥",
"workspace.providers.title": "带上你自己的钥匙",
"workspace.providers.subtitle": "从 AI 提供商处配置您自己的 API 密钥。",
"workspace.providers.placeholder": "输入 {{provider}} API 密钥({{prefix}}...",
"workspace.providers.configure": "配置",
"workspace.providers.edit": "编辑",
"workspace.providers.delete": "删除",
"workspace.providers.saving": "保存...",
"workspace.providers.save": "保存",
"workspace.providers.save": "节省",
"workspace.providers.table.provider": "提供者",
"workspace.providers.table.apiKey": "API 密钥",
"workspace.usage.title": "使用历史",
@@ -348,25 +348,25 @@ export const dict = {
"workspace.usage.subscription": "订阅 (${{amount}})",
"workspace.cost.title": "成本",
"workspace.cost.subtitle": "按型号细分的使用成本。",
"workspace.cost.allModels": "所有型",
"workspace.cost.allKeys": "所有密钥",
"workspace.cost.allModels": "所有型",
"workspace.cost.allKeys": "所有按键",
"workspace.cost.deletedSuffix": "(已删除)",
"workspace.cost.empty": "所选期间没有可用的使用数据。",
"workspace.cost.subscriptionShort": "",
"workspace.cost.subscriptionShort": "",
"workspace.keys.title": "API 键",
"workspace.keys.subtitle": "管理您的 API 密钥以访问 opencode 服务。",
"workspace.keys.create": "创建 API 密钥",
"workspace.keys.placeholder": "输入密钥名称",
"workspace.keys.placeholder": "输入按键名称",
"workspace.keys.empty": "创建 opencode 网关 API 密钥",
"workspace.keys.table.name": "名",
"workspace.keys.table.key": "钥",
"workspace.keys.table.name": "名",
"workspace.keys.table.key": "钥",
"workspace.keys.table.createdBy": "创建者",
"workspace.keys.table.lastUsed": "最后使用",
"workspace.keys.copyApiKey": "复制 API 密钥",
"workspace.keys.delete": "删除",
"workspace.members.title": "员",
"workspace.members.title": "员",
"workspace.members.subtitle": "管理工作区成员及其权限。",
"workspace.members.invite": "邀请员",
"workspace.members.invite": "邀请员",
"workspace.members.inviting": "邀请...",
"workspace.members.beta.beforeLink": "测试期间,工作空间对团队免费。",
"workspace.members.form.invitee": "受邀者",
@@ -379,11 +379,11 @@ export const dict = {
"workspace.members.edit": "编辑",
"workspace.members.delete": "删除",
"workspace.members.saving": "保存...",
"workspace.members.save": "保存",
"workspace.members.save": "节省",
"workspace.members.table.email": "电子邮件",
"workspace.members.table.role": "角色",
"workspace.members.table.monthLimit": "月限额",
"workspace.members.role.admin": "管理员",
"workspace.members.table.monthLimit": "月份限制",
"workspace.members.role.admin": "行政",
"workspace.members.role.adminDescription": "可以管理模型、成员和计费",
"workspace.members.role.member": "成员",
"workspace.members.role.memberDescription": "只能为自己生成 API 密钥",
@@ -392,7 +392,7 @@ export const dict = {
"workspace.settings.workspaceName": "工作区名称",
"workspace.settings.defaultName": "默认",
"workspace.settings.updating": "更新中...",
"workspace.settings.save": "保存",
"workspace.settings.save": "节省",
"workspace.settings.edit": "编辑",
"workspace.billing.title": "计费",
"workspace.billing.subtitle.beforeLink": "管理付款方式。",
@@ -404,35 +404,35 @@ export const dict = {
"workspace.billing.loading": "加载中...",
"workspace.billing.addAction": "添加",
"workspace.billing.addBalance": "添加余额",
"workspace.billing.linkedToStripe": "已绑定 Stripe",
"workspace.billing.linkedToStripe": "链接到条纹",
"workspace.billing.manage": "管理",
"workspace.billing.enable": "启用计费",
"workspace.monthlyLimit.title": "每月限额",
"workspace.monthlyLimit.subtitle": "为您的帐户设置每月使用限额。",
"workspace.monthlyLimit.placeholder": "50",
"workspace.monthlyLimit.setting": "设置中...",
"workspace.monthlyLimit.set": "设置",
"workspace.monthlyLimit.setting": "环境...",
"workspace.monthlyLimit.set": "",
"workspace.monthlyLimit.edit": "编辑限制",
"workspace.monthlyLimit.noLimit": "没有设置使用限制。",
"workspace.monthlyLimit.currentUsage.beforeMonth": "当前",
"workspace.monthlyLimit.currentUsage.beforeAmount": "的使用量为 $",
"workspace.reload.title": "自动充值",
"workspace.reload.disabled.before": "自动充值已",
"workspace.reload.disabled.state": "停用",
"workspace.reload.disabled.after": "启用后将在余额较低时自动充值。",
"workspace.reload.enabled.before": "自动充值已",
"workspace.monthlyLimit.currentUsage.beforeMonth": "当前使用情况为",
"workspace.monthlyLimit.currentUsage.beforeAmount": " $",
"workspace.reload.title": "自动重新加载",
"workspace.reload.disabled.before": "自动重新加载是",
"workspace.reload.disabled.state": "残疾人",
"workspace.reload.disabled.after": "启用余额不足时自动充值。",
"workspace.reload.enabled.before": "自动重新加载是",
"workspace.reload.enabled.state": "已启用",
"workspace.reload.enabled.middle": "我们将自动充值",
"workspace.reload.processingFee": "手续费",
"workspace.reload.enabled.middle": "我们将重新加载",
"workspace.reload.processingFee": "加工费",
"workspace.reload.enabled.after": "当余额达到",
"workspace.reload.edit": "编辑",
"workspace.reload.enable": "启用",
"workspace.reload.enableAutoReload": "启用自动充值",
"workspace.reload.reloadAmount": "充值 $",
"workspace.reload.enable": "使能够",
"workspace.reload.enableAutoReload": "启用自动重新加载",
"workspace.reload.reloadAmount": "重新加载 $",
"workspace.reload.whenBalanceReaches": "当余额达到 $",
"workspace.reload.saving": "保存...",
"workspace.reload.save": "保存",
"workspace.reload.failedAt": "充值失败于",
"workspace.reload.save": "节省",
"workspace.reload.failedAt": "重新加载失败于",
"workspace.reload.reason": "原因:",
"workspace.reload.updatePaymentMethod": "请更新您的付款方式并重试。",
"workspace.reload.retrying": "正在重试...",
@@ -441,11 +441,11 @@ export const dict = {
"workspace.payments.subtitle": "最近的付款交易。",
"workspace.payments.table.date": "日期",
"workspace.payments.table.paymentId": "付款ID",
"workspace.payments.table.amount": "金额",
"workspace.payments.table.amount": "数量",
"workspace.payments.table.receipt": "收据",
"workspace.payments.type.credit": "信用",
"workspace.payments.type.subscription": "订阅",
"workspace.payments.view": "看",
"workspace.payments.view": "看",
"workspace.black.loading": "加载中...",
"workspace.black.time.day": "天",
"workspace.black.time.days": "天",
@@ -455,20 +455,20 @@ export const dict = {
"workspace.black.time.minutes": "分钟",
"workspace.black.time.fewSeconds": "几秒钟",
"workspace.black.subscription.title": "订阅",
"workspace.black.subscription.message": "您已订阅 OpenCode Black费用为每月 ${{plan}}。",
"workspace.black.subscription.message": "您已订阅 OpenCode Black每月费用为 {{plan}} 美元。",
"workspace.black.subscription.manage": "管理订阅",
"workspace.black.subscription.rollingUsage": "5小时使用",
"workspace.black.subscription.weeklyUsage": "每周使用量",
"workspace.black.subscription.resetsIn": "重置于",
"workspace.black.subscription.useBalance": "达到使用限额后使用您的可用余额",
"workspace.black.waitlist.title": "候补名单",
"workspace.black.waitlist.joined": "您已加入每月 ${{plan}} OpenCode Black 方案候补名单。",
"workspace.black.waitlist.ready": "我们已准备好您加入每月 ${{plan}} 的 OpenCode Black 方案。",
"workspace.black.waitlist.joined": "您正在等待每月 ${{plan}} OpenCode 黑色计划。",
"workspace.black.waitlist.ready": "我们已准备好您加入每月 {{plan}} 美元的 OpenCode 黑色计划。",
"workspace.black.waitlist.leave": "离开候补名单",
"workspace.black.waitlist.leaving": "离开...",
"workspace.black.waitlist.left": "已退出",
"workspace.black.waitlist.enroll": "加入",
"workspace.black.waitlist.enrolling": "加入中...",
"workspace.black.waitlist.enrolled": "已加入",
"workspace.black.waitlist.left": "左边",
"workspace.black.waitlist.enroll": "注册",
"workspace.black.waitlist.enrolling": "正在报名...",
"workspace.black.waitlist.enrolled": "已注册",
"workspace.black.waitlist.enrollNote": "单击“注册”后,您的订阅将立即开始,并且将从您的卡中扣费。",
} satisfies Dict

View File

@@ -293,9 +293,9 @@ export const dict = {
"changelog.hero.subtitle": "OpenCode \u7684\u65b0\u66f4\u65b0\u8207\u6539\u5584",
"changelog.empty": "\u627e\u4e0d\u5230\u66f4\u65b0\u65e5\u8a8c\u9805\u76ee\u3002",
"changelog.viewJson": "\u6aa2\u8996 JSON",
"workspace.nav.zen": "Zen",
"workspace.nav.zen": "",
"workspace.nav.apiKeys": "API 鍵",
"workspace.nav.members": "員",
"workspace.nav.members": "員",
"workspace.nav.billing": "計費",
"workspace.nav.settings": "設定",
"workspace.home.banner.beforeLink": "編碼代理的可靠優化模型。",
@@ -310,26 +310,26 @@ export const dict = {
"workspace.newUser.feature.lockin.body":
"將 Zen 與任何編碼代理結合使用,並在需要時繼續將其他提供程序與 opencode 結合使用。",
"workspace.newUser.copyApiKey": "複製 API 密鑰",
"workspace.newUser.copyKey": "複製鑰",
"workspace.newUser.copied": "複製!",
"workspace.newUser.copyKey": "複製鑰",
"workspace.newUser.copied": "複製",
"workspace.newUser.step.enableBilling": "啟用計費",
"workspace.newUser.step.login.before": "執行",
"workspace.newUser.step.login.before": "跑步",
"workspace.newUser.step.login.after": "並選擇 opencode",
"workspace.newUser.step.pasteKey": "粘貼您的 API 密鑰",
"workspace.newUser.step.models.before": "啟動 opencode 並運行",
"workspace.newUser.step.models.after": "選擇型",
"workspace.models.title": "型",
"workspace.newUser.step.models.after": "選擇型",
"workspace.models.title": "型",
"workspace.models.subtitle.beforeLink": "管理工作區成員可以訪問哪些模型。",
"workspace.models.table.model": "模型",
"workspace.models.table.enabled": "啟用",
"workspace.providers.title": "自帶密鑰",
"workspace.providers.title": "帶上你自己的鑰匙",
"workspace.providers.subtitle": "從 AI 提供商處配置您自己的 API 密鑰。",
"workspace.providers.placeholder": "輸入 {{provider}} API 密鑰({{prefix}}...",
"workspace.providers.configure": "配置",
"workspace.providers.edit": "編輯",
"workspace.providers.delete": "刪除",
"workspace.providers.saving": "保存...",
"workspace.providers.save": "儲存",
"workspace.providers.save": "節省",
"workspace.providers.table.provider": "提供者",
"workspace.providers.table.apiKey": "API 密鑰",
"workspace.usage.title": "使用歷史",
@@ -348,25 +348,25 @@ export const dict = {
"workspace.usage.subscription": "訂閱 (${{amount}})",
"workspace.cost.title": "成本",
"workspace.cost.subtitle": "按型號細分的使用成本。",
"workspace.cost.allModels": "所有型",
"workspace.cost.allKeys": "所有密鑰",
"workspace.cost.allModels": "所有型",
"workspace.cost.allKeys": "所有按鍵",
"workspace.cost.deletedSuffix": "(已刪除)",
"workspace.cost.empty": "所選期間沒有可用的使用數據。",
"workspace.cost.subscriptionShort": "",
"workspace.cost.subscriptionShort": "",
"workspace.keys.title": "API 鍵",
"workspace.keys.subtitle": "管理您的 API 密鑰以訪問 opencode 服務。",
"workspace.keys.create": "創建 API 密鑰",
"workspace.keys.placeholder": "輸入密鑰名稱",
"workspace.keys.placeholder": "輸入按鍵名稱",
"workspace.keys.empty": "創建 opencode 網關 API 密鑰",
"workspace.keys.table.name": "名",
"workspace.keys.table.key": "鑰",
"workspace.keys.table.name": "名",
"workspace.keys.table.key": "鑰",
"workspace.keys.table.createdBy": "創建者",
"workspace.keys.table.lastUsed": "最後使用",
"workspace.keys.copyApiKey": "複製 API 密鑰",
"workspace.keys.delete": "刪除",
"workspace.members.title": "員",
"workspace.members.title": "員",
"workspace.members.subtitle": "管理工作區成員及其權限。",
"workspace.members.invite": "邀請員",
"workspace.members.invite": "邀請員",
"workspace.members.inviting": "邀請...",
"workspace.members.beta.beforeLink": "測試期間,工作空間對團隊免費。",
"workspace.members.form.invitee": "受邀者",
@@ -379,11 +379,11 @@ export const dict = {
"workspace.members.edit": "編輯",
"workspace.members.delete": "刪除",
"workspace.members.saving": "保存...",
"workspace.members.save": "儲存",
"workspace.members.save": "節省",
"workspace.members.table.email": "電子郵件",
"workspace.members.table.role": "角色",
"workspace.members.table.monthLimit": "月限額",
"workspace.members.role.admin": "管理員",
"workspace.members.table.monthLimit": "月份限制",
"workspace.members.role.admin": "行政",
"workspace.members.role.adminDescription": "可以管理模型、成員和計費",
"workspace.members.role.member": "成員",
"workspace.members.role.memberDescription": "只能為自己生成 API 密鑰",
@@ -392,7 +392,7 @@ export const dict = {
"workspace.settings.workspaceName": "工作區名稱",
"workspace.settings.defaultName": "預設",
"workspace.settings.updating": "更新中...",
"workspace.settings.save": "儲存",
"workspace.settings.save": "節省",
"workspace.settings.edit": "編輯",
"workspace.billing.title": "計費",
"workspace.billing.subtitle.beforeLink": "管理付款方式。",
@@ -404,35 +404,35 @@ export const dict = {
"workspace.billing.loading": "載入中...",
"workspace.billing.addAction": "添加",
"workspace.billing.addBalance": "添加餘額",
"workspace.billing.linkedToStripe": "已連結 Stripe",
"workspace.billing.linkedToStripe": "鏈接到條紋",
"workspace.billing.manage": "管理",
"workspace.billing.enable": "啟用計費",
"workspace.monthlyLimit.title": "每月限額",
"workspace.monthlyLimit.subtitle": "為您的帳戶設置每月使用限額。",
"workspace.monthlyLimit.placeholder": "50",
"workspace.monthlyLimit.setting": "設定中...",
"workspace.monthlyLimit.set": "設定",
"workspace.monthlyLimit.setting": "環境...",
"workspace.monthlyLimit.set": "",
"workspace.monthlyLimit.edit": "編輯限制",
"workspace.monthlyLimit.noLimit": "沒有設置使用限制。",
"workspace.monthlyLimit.currentUsage.beforeMonth": "當前",
"workspace.monthlyLimit.currentUsage.beforeAmount": "的使用量為 $",
"workspace.reload.title": "自動儲值",
"workspace.reload.disabled.before": "自動儲值已",
"workspace.reload.disabled.state": "停用",
"workspace.reload.disabled.after": "啟用後會在餘額偏低時自動值。",
"workspace.reload.enabled.before": "自動儲值已",
"workspace.monthlyLimit.currentUsage.beforeMonth": "當前使用情況為",
"workspace.monthlyLimit.currentUsage.beforeAmount": " $",
"workspace.reload.title": "自動重新加載",
"workspace.reload.disabled.before": "自動重新加載是",
"workspace.reload.disabled.state": "殘疾人",
"workspace.reload.disabled.after": "啟用餘額不足時自動值。",
"workspace.reload.enabled.before": "自動重新加載是",
"workspace.reload.enabled.state": "已啟用",
"workspace.reload.enabled.middle": "我們將自動儲值",
"workspace.reload.processingFee": "手續費",
"workspace.reload.enabled.middle": "我們將重新加載",
"workspace.reload.processingFee": "加工費",
"workspace.reload.enabled.after": "當餘額達到",
"workspace.reload.edit": "編輯",
"workspace.reload.enable": "啟用",
"workspace.reload.enableAutoReload": "啟用自動儲值",
"workspace.reload.reloadAmount": "儲值 $",
"workspace.reload.enable": "使能夠",
"workspace.reload.enableAutoReload": "啟用自動重新加載",
"workspace.reload.reloadAmount": "重新加載 $",
"workspace.reload.whenBalanceReaches": "當餘額達到 $",
"workspace.reload.saving": "保存...",
"workspace.reload.save": "儲存",
"workspace.reload.failedAt": "儲值失敗於",
"workspace.reload.save": "節省",
"workspace.reload.failedAt": "重新加載失敗於",
"workspace.reload.reason": "原因:",
"workspace.reload.updatePaymentMethod": "請更新您的付款方式並重試。",
"workspace.reload.retrying": "正在重試...",
@@ -441,11 +441,11 @@ export const dict = {
"workspace.payments.subtitle": "最近的付款交易。",
"workspace.payments.table.date": "日期",
"workspace.payments.table.paymentId": "付款ID",
"workspace.payments.table.amount": "金額",
"workspace.payments.table.amount": "數量",
"workspace.payments.table.receipt": "收據",
"workspace.payments.type.credit": "信用",
"workspace.payments.type.subscription": "訂閱",
"workspace.payments.view": "看",
"workspace.payments.view": "看",
"workspace.black.loading": "載入中...",
"workspace.black.time.day": "天",
"workspace.black.time.days": "天",
@@ -455,20 +455,20 @@ export const dict = {
"workspace.black.time.minutes": "分鐘",
"workspace.black.time.fewSeconds": "幾秒鐘",
"workspace.black.subscription.title": "訂閱",
"workspace.black.subscription.message": "您已訂閱 OpenCode Black費用為每月 ${{plan}}。",
"workspace.black.subscription.message": "您已訂閱 OpenCode Black每月費用為 {{plan}} 美元。",
"workspace.black.subscription.manage": "管理訂閱",
"workspace.black.subscription.rollingUsage": "5小時使用",
"workspace.black.subscription.weeklyUsage": "每週使用量",
"workspace.black.subscription.resetsIn": "重置於",
"workspace.black.subscription.useBalance": "達到使用限額後使用您的可用餘額",
"workspace.black.waitlist.title": "候補名單",
"workspace.black.waitlist.joined": "您已加入每月 ${{plan}} OpenCode Black 方案候補名單。",
"workspace.black.waitlist.ready": "我們已準備好您加入每月 ${{plan}} 的 OpenCode Black 方案。",
"workspace.black.waitlist.joined": "您正在等待每月 ${{plan}} OpenCode 黑色計劃。",
"workspace.black.waitlist.ready": "我們已準備好您加入每月 {{plan}} 美元的 OpenCode 黑色計劃。",
"workspace.black.waitlist.leave": "離開候補名單",
"workspace.black.waitlist.leaving": "離開...",
"workspace.black.waitlist.left": "已退出",
"workspace.black.waitlist.enroll": "加入",
"workspace.black.waitlist.enrolling": "加入中...",
"workspace.black.waitlist.enrolled": "已加入",
"workspace.black.waitlist.left": "左邊",
"workspace.black.waitlist.enroll": "註冊",
"workspace.black.waitlist.enrolling": "正在報名...",
"workspace.black.waitlist.enrolled": "已註冊",
"workspace.black.waitlist.enrollNote": "單擊“註冊”後,您的訂閱將立即開始,並且將從您的卡中扣費。",
} satisfies Dict

View File

@@ -3,11 +3,13 @@ export class CreditsError extends Error {}
export class MonthlyLimitError extends Error {}
export class UserLimitError extends Error {}
export class ModelError extends Error {}
export class FreeUsageLimitError extends Error {}
export class SubscriptionUsageLimitError extends Error {
class LimitError extends Error {
retryAfter?: number
constructor(message: string, retryAfter?: number) {
super(message)
this.retryAfter = retryAfter
}
}
export class FreeUsageLimitError extends LimitError {}
export class SubscriptionUsageLimitError extends LimitError {}

View File

@@ -134,20 +134,26 @@ export async function handler(
body: reqBody,
})
// Try another provider => stop retrying if using fallback provider
if (
res.status !== 200 &&
// ie. openai 404 error: Item with id 'msg_0ead8b004a3b165d0069436a6b6834819896da85b63b196a3f' not found.
res.status !== 404 &&
// ie. cannot change codex model providers mid-session
modelInfo.stickyProvider !== "strict" &&
modelInfo.fallbackProvider &&
providerInfo.id !== modelInfo.fallbackProvider
) {
return retriableRequest({
excludeProviders: [...retry.excludeProviders, providerInfo.id],
retryCount: retry.retryCount + 1,
if (res.status !== 200) {
logger.metric({
"llm.error.code": res.status,
"llm.error.message": res.statusText,
})
// Try another provider => stop retrying if using fallback provider
if (
// ie. openai 404 error: Item with id 'msg_0ead8b004a3b165d0069436a6b6834819896da85b63b196a3f' not found.
res.status !== 404 &&
// ie. cannot change codex model providers mid-session
modelInfo.stickyProvider !== "strict" &&
modelInfo.fallbackProvider &&
providerInfo.id !== modelInfo.fallbackProvider
) {
return retriableRequest({
excludeProviders: [...retry.excludeProviders, providerInfo.id],
retryCount: retry.retryCount + 1,
})
}
}
return { providerInfo, reqBody, res, startTimestamp }
@@ -307,7 +313,7 @@ export async function handler(
if (error instanceof FreeUsageLimitError || error instanceof SubscriptionUsageLimitError) {
const headers = new Headers()
if (error instanceof SubscriptionUsageLimitError && error.retryAfter) {
if (error.retryAfter) {
headers.set("retry-after", String(error.retryAfter))
}
return new Response(

View File

@@ -28,17 +28,46 @@ export function createRateLimiter(limit: ZenData.RateLimit | undefined, rawIp: s
check: async () => {
const rows = await Database.use((tx) =>
tx
.select({ count: IpRateLimitTable.count })
.select({ interval: IpRateLimitTable.interval, count: IpRateLimitTable.count })
.from(IpRateLimitTable)
.where(and(eq(IpRateLimitTable.ip, ip), inArray(IpRateLimitTable.interval, intervals))),
)
const total = rows.reduce((sum, r) => sum + r.count, 0)
logger.debug(`rate limit total: ${total}`)
if (total >= limitValue) throw new FreeUsageLimitError(`Rate limit exceeded. Please try again later.`)
if (total >= limitValue)
throw new FreeUsageLimitError(
`Rate limit exceeded. Please try again later.`,
limit.period === "day" ? getRetryAfterDay(now) : getRetryAfterHour(rows, intervals, limitValue, now),
)
},
}
}
export function getRetryAfterDay(now: number) {
return Math.ceil((86_400_000 - (now % 86_400_000)) / 1000)
}
export function getRetryAfterHour(
rows: { interval: string; count: number }[],
intervals: string[],
limit: number,
now: number,
) {
const counts = new Map(rows.map((r) => [r.interval, r.count]))
// intervals are ordered newest to oldest: [current, -1h, -2h]
// simulate dropping oldest intervals one at a time
let running = intervals.reduce((sum, i) => sum + (counts.get(i) ?? 0), 0)
for (let i = intervals.length - 1; i >= 0; i--) {
running -= counts.get(intervals[i]) ?? 0
if (running < limit) {
// interval at index i rolls out of the window (intervals.length - i) hours from the current hour start
const hours = intervals.length - i
return Math.ceil((hours * 3_600_000 - (now % 3_600_000)) / 1000)
}
}
return Math.ceil((3_600_000 - (now % 3_600_000)) / 1000)
}
function buildYYYYMMDD(timestamp: number) {
return new Date(timestamp)
.toISOString()

View File

@@ -0,0 +1,92 @@
import { describe, expect, test } from "bun:test"
import { getRetryAfterDay, getRetryAfterHour } from "../src/routes/zen/util/rateLimiter"
describe("getRetryAfterDay", () => {
test("returns full day at midnight UTC", () => {
const midnight = Date.UTC(2026, 0, 15, 0, 0, 0, 0)
expect(getRetryAfterDay(midnight)).toBe(86_400)
})
test("returns remaining seconds until next UTC day", () => {
const noon = Date.UTC(2026, 0, 15, 12, 0, 0, 0)
expect(getRetryAfterDay(noon)).toBe(43_200)
})
test("rounds up to nearest second", () => {
const almost = Date.UTC(2026, 0, 15, 23, 59, 59, 500)
expect(getRetryAfterDay(almost)).toBe(1)
})
})
describe("getRetryAfterHour", () => {
// 14:30:00 UTC — 30 minutes into the current hour
const now = Date.UTC(2026, 0, 15, 14, 30, 0, 0)
const intervals = ["2026011514", "2026011513", "2026011512"]
test("waits 3 hours when all usage is in current hour", () => {
const rows = [{ interval: "2026011514", count: 10 }]
// only current hour has usage — it won't leave the window for 3 hours from hour start
// 3 * 3600 - 1800 = 9000s
expect(getRetryAfterHour(rows, intervals, 10, now)).toBe(9000)
})
test("waits 1 hour when dropping oldest interval is sufficient", () => {
const rows = [
{ interval: "2026011514", count: 2 },
{ interval: "2026011512", count: 10 },
]
// total=12, drop oldest (-2h, count=10) -> 2 < 10
// hours = 3 - 2 = 1 -> 1 * 3600 - 1800 = 1800s
expect(getRetryAfterHour(rows, intervals, 10, now)).toBe(1800)
})
test("waits 2 hours when usage spans oldest two intervals", () => {
const rows = [
{ interval: "2026011513", count: 8 },
{ interval: "2026011512", count: 5 },
]
// total=13, drop -2h (5) -> 8, 8 >= 8, drop -1h (8) -> 0 < 8
// hours = 3 - 1 = 2 -> 2 * 3600 - 1800 = 5400s
expect(getRetryAfterHour(rows, intervals, 8, now)).toBe(5400)
})
test("waits 1 hour when oldest interval alone pushes over limit", () => {
const rows = [
{ interval: "2026011514", count: 1 },
{ interval: "2026011513", count: 1 },
{ interval: "2026011512", count: 10 },
]
// total=12, drop -2h (10) -> 2 < 10
// hours = 3 - 2 = 1 -> 1800s
expect(getRetryAfterHour(rows, intervals, 10, now)).toBe(1800)
})
test("waits 2 hours when middle interval keeps total over limit", () => {
const rows = [
{ interval: "2026011514", count: 4 },
{ interval: "2026011513", count: 4 },
{ interval: "2026011512", count: 4 },
]
// total=12, drop -2h (4) -> 8, 8 >= 5, drop -1h (4) -> 4 < 5
// hours = 3 - 1 = 2 -> 5400s
expect(getRetryAfterHour(rows, intervals, 5, now)).toBe(5400)
})
test("rounds up to nearest second", () => {
const offset = Date.UTC(2026, 0, 15, 14, 30, 0, 500)
const rows = [
{ interval: "2026011514", count: 2 },
{ interval: "2026011512", count: 10 },
]
// hours=1 -> 3_600_000 - 1_800_500 = 1_799_500ms -> ceil(1799.5) = 1800
expect(getRetryAfterHour(rows, intervals, 10, offset)).toBe(1800)
})
test("fallback returns time until next hour when rows are empty", () => {
// edge case: rows empty but function called (shouldn't happen in practice)
// loop drops all zeros, running stays 0 which is < any positive limit on first iteration
const rows: { interval: string; count: number }[] = []
// drop -2h (0) -> 0 < 1 -> hours = 3 - 2 = 1 -> 1800s
expect(getRetryAfterHour(rows, intervals, 1, now)).toBe(1800)
})
})

View File

@@ -1,7 +1,7 @@
{
"$schema": "https://json.schemastore.org/package.json",
"name": "@opencode-ai/console-core",
"version": "1.1.55",
"version": "1.1.56",
"private": true,
"type": "module",
"license": "MIT",
@@ -12,13 +12,14 @@
"@opencode-ai/console-resource": "workspace:*",
"@planetscale/database": "1.19.0",
"aws4fetch": "1.0.20",
"drizzle-orm": "0.41.0",
"drizzle-orm": "catalog:",
"postgres": "3.4.7",
"stripe": "18.0.0",
"ulid": "catalog:",
"zod": "catalog:"
},
"exports": {
"./*.js": "./src/*.ts",
"./*": "./src/*"
},
"scripts": {
@@ -43,7 +44,7 @@
"@tsconfig/node22": "22.0.2",
"@types/bun": "1.3.0",
"@types/node": "catalog:",
"drizzle-kit": "0.30.5",
"drizzle-kit": "catalog:",
"mysql2": "3.14.4",
"typescript": "catalog:",
"@typescript/native-preview": "catalog:"

View File

@@ -4,7 +4,6 @@ export * from "drizzle-orm"
import { Client } from "@planetscale/database"
import { MySqlTransaction, type MySqlTransactionConfig } from "drizzle-orm/mysql-core"
import type { ExtractTablesWithRelations } from "drizzle-orm"
import type { PlanetScalePreparedQueryHKT, PlanetscaleQueryResultHKT } from "drizzle-orm/planetscale-serverless"
import { Context } from "../context"
import { memo } from "../util/memo"
@@ -14,7 +13,7 @@ export namespace Database {
PlanetscaleQueryResultHKT,
PlanetScalePreparedQueryHKT,
Record<string, never>,
ExtractTablesWithRelations<Record<string, never>>
any
>
const client = memo(() => {
@@ -23,7 +22,7 @@ export namespace Database {
username: Resource.Database.username,
password: Resource.Database.password,
})
const db = drizzle(result, {})
const db = drizzle({ client: result })
return db
})

View File

@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/console-function",
"version": "1.1.55",
"version": "1.1.56",
"$schema": "https://json.schemastore.org/package.json",
"private": true,
"type": "module",

View File

@@ -17,8 +17,7 @@ export default {
)
return
let metrics = {
event_type: "completions",
let data = {
"cf.continent": event.event.request.cf?.continent,
"cf.country": event.event.request.cf?.country,
"cf.city": event.event.request.cf?.city,
@@ -31,22 +30,28 @@ export default {
status: event.event.response?.status ?? 0,
ip: event.event.request.headers["x-real-ip"],
}
const time = new Date(event.eventTimestamp ?? Date.now()).toISOString()
const events = []
for (const log of event.logs) {
for (const message of log.message) {
if (!message.startsWith("_metric:")) continue
metrics = { ...metrics, ...JSON.parse(message.slice(8)) }
const json = JSON.parse(message.slice(8))
data = { ...data, ...json }
if ("llm.error.code" in json) {
events.push({ time, data: { ...data, event_type: "llm.error" } })
}
}
}
console.log(JSON.stringify(metrics, null, 2))
events.push({ time, data: { ...data, event_type: "completions" } })
console.log(JSON.stringify(data, null, 2))
const ret = await fetch("https://api.honeycomb.io/1/events/zen", {
const ret = await fetch("https://api.honeycomb.io/1/batch/zen", {
method: "POST",
headers: {
"Content-Type": "application/json",
"X-Honeycomb-Event-Time": (event.eventTimestamp ?? Date.now()).toString(),
"X-Honeycomb-Team": Resource.HONEYCOMB_API_KEY.value,
},
body: JSON.stringify(metrics),
body: JSON.stringify(events),
})
console.log(ret.status)
console.log(await ret.text())

View File

@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/console-mail",
"version": "1.1.55",
"version": "1.1.56",
"dependencies": {
"@jsx-email/all": "2.2.3",
"@jsx-email/cli": "1.4.3",

View File

@@ -1,7 +1,7 @@
{
"name": "@opencode-ai/desktop",
"private": true,
"version": "1.1.55",
"version": "1.1.56",
"type": "module",
"license": "MIT",
"scripts": {

View File

@@ -20,9 +20,9 @@ use std::{
env,
net::TcpListener,
path::PathBuf,
process::Command,
sync::{Arc, Mutex},
time::Duration,
process::Command,
};
use tauri::{AppHandle, Manager, RunEvent, State, ipc::Channel};
#[cfg(any(target_os = "linux", all(debug_assertions, windows)))]
@@ -152,12 +152,12 @@ fn check_app_exists(app_name: &str) -> bool {
{
check_windows_app(app_name)
}
#[cfg(target_os = "macos")]
{
check_macos_app(app_name)
}
#[cfg(target_os = "linux")]
{
check_linux_app(app_name)
@@ -165,11 +165,165 @@ fn check_app_exists(app_name: &str) -> bool {
}
#[cfg(target_os = "windows")]
fn check_windows_app(app_name: &str) -> bool {
fn check_windows_app(_app_name: &str) -> bool {
// Check if command exists in PATH, including .exe
return true;
}
#[cfg(target_os = "windows")]
fn resolve_windows_app_path(app_name: &str) -> Option<String> {
use std::path::{Path, PathBuf};
// Try to find the command using 'where'
let output = Command::new("where").arg(app_name).output().ok()?;
if !output.status.success() {
return None;
}
let paths = String::from_utf8_lossy(&output.stdout)
.lines()
.map(str::trim)
.filter(|line| !line.is_empty())
.map(PathBuf::from)
.collect::<Vec<_>>();
let has_ext = |path: &Path, ext: &str| {
path.extension()
.and_then(|v| v.to_str())
.map(|v| v.eq_ignore_ascii_case(ext))
.unwrap_or(false)
};
if let Some(path) = paths.iter().find(|path| has_ext(path, "exe")) {
return Some(path.to_string_lossy().to_string());
}
let resolve_cmd = |path: &Path| -> Option<String> {
let content = std::fs::read_to_string(path).ok()?;
for token in content.split('"') {
let lower = token.to_ascii_lowercase();
if !lower.contains(".exe") {
continue;
}
if let Some(index) = lower.find("%~dp0") {
let base = path.parent()?;
let suffix = &token[index + 5..];
let mut resolved = PathBuf::from(base);
for part in suffix.replace('/', "\\").split('\\') {
if part.is_empty() || part == "." {
continue;
}
if part == ".." {
let _ = resolved.pop();
continue;
}
resolved.push(part);
}
if resolved.exists() {
return Some(resolved.to_string_lossy().to_string());
}
}
let resolved = PathBuf::from(token);
if resolved.exists() {
return Some(resolved.to_string_lossy().to_string());
}
}
None
};
for path in &paths {
if has_ext(path, "cmd") || has_ext(path, "bat") {
if let Some(resolved) = resolve_cmd(path) {
return Some(resolved);
}
}
if path.extension().is_none() {
let cmd = path.with_extension("cmd");
if cmd.exists() {
if let Some(resolved) = resolve_cmd(&cmd) {
return Some(resolved);
}
}
let bat = path.with_extension("bat");
if bat.exists() {
if let Some(resolved) = resolve_cmd(&bat) {
return Some(resolved);
}
}
}
}
let key = app_name
.chars()
.filter(|v| v.is_ascii_alphanumeric())
.flat_map(|v| v.to_lowercase())
.collect::<String>();
if !key.is_empty() {
for path in &paths {
let dirs = [
path.parent(),
path.parent().and_then(|dir| dir.parent()),
path.parent()
.and_then(|dir| dir.parent())
.and_then(|dir| dir.parent()),
];
for dir in dirs.into_iter().flatten() {
if let Ok(entries) = std::fs::read_dir(dir) {
for entry in entries.flatten() {
let candidate = entry.path();
if !has_ext(&candidate, "exe") {
continue;
}
let Some(stem) = candidate.file_stem().and_then(|v| v.to_str()) else {
continue;
};
let name = stem
.chars()
.filter(|v| v.is_ascii_alphanumeric())
.flat_map(|v| v.to_lowercase())
.collect::<String>();
if name.contains(&key) || key.contains(&name) {
return Some(candidate.to_string_lossy().to_string());
}
}
}
}
}
}
paths.first().map(|path| path.to_string_lossy().to_string())
}
#[tauri::command]
#[specta::specta]
fn resolve_app_path(app_name: &str) -> Option<String> {
#[cfg(target_os = "windows")]
{
resolve_windows_app_path(app_name)
}
#[cfg(not(target_os = "windows"))]
{
// On macOS/Linux, just return the app_name as-is since
// the opener plugin handles them correctly
Some(app_name.to_string())
}
}
#[cfg(target_os = "macos")]
fn check_macos_app(app_name: &str) -> bool {
// Check common installation locations
@@ -181,13 +335,13 @@ fn check_macos_app(app_name: &str) -> bool {
if let Ok(home) = std::env::var("HOME") {
app_locations.push(format!("{}/Applications/{}.app", home, app_name));
}
for location in app_locations {
if std::path::Path::new(&location).exists() {
return true;
}
}
// Also check if command exists in PATH
Command::new("which")
.arg(app_name)
@@ -251,7 +405,8 @@ pub fn run() {
get_display_backend,
set_display_backend,
markdown::parse_markdown_command,
check_app_exists
check_app_exists,
resolve_app_path
])
.events(tauri_specta::collect_events![LoadingWindowComplete])
.error_handling(tauri_specta::ErrorHandlingMode::Throw);

View File

@@ -14,6 +14,7 @@ export const commands = {
setDisplayBackend: (backend: LinuxDisplayBackend) => __TAURI_INVOKE<null>("set_display_backend", { backend }),
parseMarkdownCommand: (markdown: string) => __TAURI_INVOKE<string>("parse_markdown_command", { markdown }),
checkAppExists: (appName: string) => __TAURI_INVOKE<boolean>("check_app_exists", { appName }),
resolveAppPath: (appName: string) => __TAURI_INVOKE<string | null>("resolve_app_path", { appName }),
};
/** Events */

View File

@@ -98,7 +98,12 @@ const createPlatform = (password: Accessor<string | null>): Platform => ({
void shellOpen(url).catch(() => undefined)
},
openPath(path: string, app?: string) {
async openPath(path: string, app?: string) {
const os = ostype()
if (os === "windows" && app) {
const resolvedApp = await commands.resolveAppPath(app)
return openerOpenPath(path, resolvedApp || app)
}
return openerOpenPath(path, app)
},

View File

@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/enterprise",
"version": "1.1.55",
"version": "1.1.56",
"private": true,
"type": "module",
"license": "MIT",

View File

@@ -1,7 +1,7 @@
id = "opencode"
name = "OpenCode"
description = "The open source coding agent."
version = "1.1.55"
version = "1.1.56"
schema_version = 1
authors = ["Anomaly"]
repository = "https://github.com/anomalyco/opencode"
@@ -11,26 +11,26 @@ name = "OpenCode"
icon = "./icons/opencode.svg"
[agent_servers.opencode.targets.darwin-aarch64]
archive = "https://github.com/anomalyco/opencode/releases/download/v1.1.55/opencode-darwin-arm64.zip"
archive = "https://github.com/anomalyco/opencode/releases/download/v1.1.56/opencode-darwin-arm64.zip"
cmd = "./opencode"
args = ["acp"]
[agent_servers.opencode.targets.darwin-x86_64]
archive = "https://github.com/anomalyco/opencode/releases/download/v1.1.55/opencode-darwin-x64.zip"
archive = "https://github.com/anomalyco/opencode/releases/download/v1.1.56/opencode-darwin-x64.zip"
cmd = "./opencode"
args = ["acp"]
[agent_servers.opencode.targets.linux-aarch64]
archive = "https://github.com/anomalyco/opencode/releases/download/v1.1.55/opencode-linux-arm64.tar.gz"
archive = "https://github.com/anomalyco/opencode/releases/download/v1.1.56/opencode-linux-arm64.tar.gz"
cmd = "./opencode"
args = ["acp"]
[agent_servers.opencode.targets.linux-x86_64]
archive = "https://github.com/anomalyco/opencode/releases/download/v1.1.55/opencode-linux-x64.tar.gz"
archive = "https://github.com/anomalyco/opencode/releases/download/v1.1.56/opencode-linux-x64.tar.gz"
cmd = "./opencode"
args = ["acp"]
[agent_servers.opencode.targets.windows-x86_64]
archive = "https://github.com/anomalyco/opencode/releases/download/v1.1.55/opencode-windows-x64.zip"
archive = "https://github.com/anomalyco/opencode/releases/download/v1.1.56/opencode-windows-x64.zip"
cmd = "./opencode.exe"
args = ["acp"]

View File

@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/function",
"version": "1.1.55",
"version": "1.1.56",
"$schema": "https://json.schemastore.org/package.json",
"private": true,
"type": "module",

View File

@@ -1,27 +1,10 @@
# opencode agent guidelines
# opencode database guide
## Build/Test Commands
## Database
- **Install**: `bun install`
- **Run**: `bun run --conditions=browser ./src/index.ts`
- **Typecheck**: `bun run typecheck` (npm run typecheck)
- **Test**: `bun test` (runs all tests)
- **Single test**: `bun test test/tool/tool.test.ts` (specific test file)
## Code Style
- **Runtime**: Bun with TypeScript ESM modules
- **Imports**: Use relative imports for local modules, named imports preferred
- **Types**: Zod schemas for validation, TypeScript interfaces for structure
- **Naming**: camelCase for variables/functions, PascalCase for classes/namespaces
- **Error handling**: Use Result patterns, avoid throwing exceptions in tools
- **File structure**: Namespace-based organization (e.g., `Tool.define()`, `Session.create()`)
## Architecture
- **Tools**: Implement `Tool.Info` interface with `execute()` method
- **Context**: Pass `sessionID` in tool context, use `App.provide()` for DI
- **Validation**: All inputs validated with Zod schemas
- **Logging**: Use `Log.create({ service: "name" })` pattern
- **Storage**: Use `Storage` namespace for persistence
- **API Client**: The TypeScript TUI (built with SolidJS + OpenTUI) communicates with the OpenCode server using `@opencode-ai/sdk`. When adding/modifying server endpoints in `packages/opencode/src/server/server.ts`, run `./script/generate.ts` to regenerate the SDK and related files.
- **Schema**: Drizzle schema lives in `src/**/*.sql.ts`.
- **Naming**: tables and columns use snake*case; join columns are `<entity>_id`; indexes are `<table>*<column>\_idx`.
- **Migrations**: generated by Drizzle Kit using `drizzle.config.ts` (schema: `./src/**/*.sql.ts`, output: `./migration`).
- **Command**: `bun run db generate --name <slug>`.
- **Output**: creates `migration/<timestamp>_<slug>/migration.sql` and `snapshot.json`.
- **Tests**: migration tests should read the per-folder layout (no `_journal.json`).

View File

@@ -0,0 +1,10 @@
import { defineConfig } from "drizzle-kit"
export default defineConfig({
dialect: "sqlite",
schema: "./src/**/*.sql.ts",
out: "./migration",
dbCredentials: {
url: "/home/thdxr/.local/share/opencode/opencode.db",
},
})

View File

@@ -0,0 +1,90 @@
CREATE TABLE `project` (
`id` text PRIMARY KEY,
`worktree` text NOT NULL,
`vcs` text,
`name` text,
`icon_url` text,
`icon_color` text,
`time_created` integer NOT NULL,
`time_updated` integer NOT NULL,
`time_initialized` integer,
`sandboxes` text NOT NULL
);
--> statement-breakpoint
CREATE TABLE `message` (
`id` text PRIMARY KEY,
`session_id` text NOT NULL,
`time_created` integer NOT NULL,
`time_updated` integer NOT NULL,
`data` text NOT NULL,
CONSTRAINT `fk_message_session_id_session_id_fk` FOREIGN KEY (`session_id`) REFERENCES `session`(`id`) ON DELETE CASCADE
);
--> statement-breakpoint
CREATE TABLE `part` (
`id` text PRIMARY KEY,
`message_id` text NOT NULL,
`session_id` text NOT NULL,
`time_created` integer NOT NULL,
`time_updated` integer NOT NULL,
`data` text NOT NULL,
CONSTRAINT `fk_part_message_id_message_id_fk` FOREIGN KEY (`message_id`) REFERENCES `message`(`id`) ON DELETE CASCADE
);
--> statement-breakpoint
CREATE TABLE `permission` (
`project_id` text PRIMARY KEY,
`time_created` integer NOT NULL,
`time_updated` integer NOT NULL,
`data` text NOT NULL,
CONSTRAINT `fk_permission_project_id_project_id_fk` FOREIGN KEY (`project_id`) REFERENCES `project`(`id`) ON DELETE CASCADE
);
--> statement-breakpoint
CREATE TABLE `session` (
`id` text PRIMARY KEY,
`project_id` text NOT NULL,
`parent_id` text,
`slug` text NOT NULL,
`directory` text NOT NULL,
`title` text NOT NULL,
`version` text NOT NULL,
`share_url` text,
`summary_additions` integer,
`summary_deletions` integer,
`summary_files` integer,
`summary_diffs` text,
`revert` text,
`permission` text,
`time_created` integer NOT NULL,
`time_updated` integer NOT NULL,
`time_compacting` integer,
`time_archived` integer,
CONSTRAINT `fk_session_project_id_project_id_fk` FOREIGN KEY (`project_id`) REFERENCES `project`(`id`) ON DELETE CASCADE
);
--> statement-breakpoint
CREATE TABLE `todo` (
`session_id` text NOT NULL,
`content` text NOT NULL,
`status` text NOT NULL,
`priority` text NOT NULL,
`position` integer NOT NULL,
`time_created` integer NOT NULL,
`time_updated` integer NOT NULL,
CONSTRAINT `todo_pk` PRIMARY KEY(`session_id`, `position`),
CONSTRAINT `fk_todo_session_id_session_id_fk` FOREIGN KEY (`session_id`) REFERENCES `session`(`id`) ON DELETE CASCADE
);
--> statement-breakpoint
CREATE TABLE `session_share` (
`session_id` text PRIMARY KEY,
`id` text NOT NULL,
`secret` text NOT NULL,
`url` text NOT NULL,
`time_created` integer NOT NULL,
`time_updated` integer NOT NULL,
CONSTRAINT `fk_session_share_session_id_session_id_fk` FOREIGN KEY (`session_id`) REFERENCES `session`(`id`) ON DELETE CASCADE
);
--> statement-breakpoint
CREATE INDEX `message_session_idx` ON `message` (`session_id`);--> statement-breakpoint
CREATE INDEX `part_message_idx` ON `part` (`message_id`);--> statement-breakpoint
CREATE INDEX `part_session_idx` ON `part` (`session_id`);--> statement-breakpoint
CREATE INDEX `session_project_idx` ON `session` (`project_id`);--> statement-breakpoint
CREATE INDEX `session_parent_idx` ON `session` (`parent_id`);--> statement-breakpoint
CREATE INDEX `todo_session_idx` ON `todo` (`session_id`);

View File

@@ -0,0 +1,796 @@
{
"version": "7",
"dialect": "sqlite",
"id": "068758ed-a97a-46f6-8a59-6c639ae7c20c",
"prevIds": ["00000000-0000-0000-0000-000000000000"],
"ddl": [
{
"name": "project",
"entityType": "tables"
},
{
"name": "message",
"entityType": "tables"
},
{
"name": "part",
"entityType": "tables"
},
{
"name": "permission",
"entityType": "tables"
},
{
"name": "session",
"entityType": "tables"
},
{
"name": "todo",
"entityType": "tables"
},
{
"name": "session_share",
"entityType": "tables"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "id",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "worktree",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "vcs",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "name",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "icon_url",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "icon_color",
"entityType": "columns",
"table": "project"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "project"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "project"
},
{
"type": "integer",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_initialized",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "sandboxes",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "id",
"entityType": "columns",
"table": "message"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "session_id",
"entityType": "columns",
"table": "message"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "message"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "message"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "data",
"entityType": "columns",
"table": "message"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "id",
"entityType": "columns",
"table": "part"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "message_id",
"entityType": "columns",
"table": "part"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "session_id",
"entityType": "columns",
"table": "part"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "part"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "part"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "data",
"entityType": "columns",
"table": "part"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "project_id",
"entityType": "columns",
"table": "permission"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "permission"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "permission"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "data",
"entityType": "columns",
"table": "permission"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "id",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "project_id",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "parent_id",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "slug",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "directory",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "title",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "version",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "share_url",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "summary_additions",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "summary_deletions",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "summary_files",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "summary_diffs",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "revert",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "permission",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_compacting",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_archived",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "session_id",
"entityType": "columns",
"table": "todo"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "content",
"entityType": "columns",
"table": "todo"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "status",
"entityType": "columns",
"table": "todo"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "priority",
"entityType": "columns",
"table": "todo"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "position",
"entityType": "columns",
"table": "todo"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "todo"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "todo"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "session_id",
"entityType": "columns",
"table": "session_share"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "id",
"entityType": "columns",
"table": "session_share"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "secret",
"entityType": "columns",
"table": "session_share"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "url",
"entityType": "columns",
"table": "session_share"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "session_share"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "session_share"
},
{
"columns": ["session_id"],
"tableTo": "session",
"columnsTo": ["id"],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
"name": "fk_message_session_id_session_id_fk",
"entityType": "fks",
"table": "message"
},
{
"columns": ["message_id"],
"tableTo": "message",
"columnsTo": ["id"],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
"name": "fk_part_message_id_message_id_fk",
"entityType": "fks",
"table": "part"
},
{
"columns": ["project_id"],
"tableTo": "project",
"columnsTo": ["id"],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
"name": "fk_permission_project_id_project_id_fk",
"entityType": "fks",
"table": "permission"
},
{
"columns": ["project_id"],
"tableTo": "project",
"columnsTo": ["id"],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
"name": "fk_session_project_id_project_id_fk",
"entityType": "fks",
"table": "session"
},
{
"columns": ["session_id"],
"tableTo": "session",
"columnsTo": ["id"],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
"name": "fk_todo_session_id_session_id_fk",
"entityType": "fks",
"table": "todo"
},
{
"columns": ["session_id"],
"tableTo": "session",
"columnsTo": ["id"],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
"name": "fk_session_share_session_id_session_id_fk",
"entityType": "fks",
"table": "session_share"
},
{
"columns": ["session_id", "position"],
"nameExplicit": false,
"name": "todo_pk",
"entityType": "pks",
"table": "todo"
},
{
"columns": ["id"],
"nameExplicit": false,
"name": "project_pk",
"table": "project",
"entityType": "pks"
},
{
"columns": ["id"],
"nameExplicit": false,
"name": "message_pk",
"table": "message",
"entityType": "pks"
},
{
"columns": ["id"],
"nameExplicit": false,
"name": "part_pk",
"table": "part",
"entityType": "pks"
},
{
"columns": ["project_id"],
"nameExplicit": false,
"name": "permission_pk",
"table": "permission",
"entityType": "pks"
},
{
"columns": ["id"],
"nameExplicit": false,
"name": "session_pk",
"table": "session",
"entityType": "pks"
},
{
"columns": ["session_id"],
"nameExplicit": false,
"name": "session_share_pk",
"table": "session_share",
"entityType": "pks"
},
{
"columns": [
{
"value": "session_id",
"isExpression": false
}
],
"isUnique": false,
"where": null,
"origin": "manual",
"name": "message_session_idx",
"entityType": "indexes",
"table": "message"
},
{
"columns": [
{
"value": "message_id",
"isExpression": false
}
],
"isUnique": false,
"where": null,
"origin": "manual",
"name": "part_message_idx",
"entityType": "indexes",
"table": "part"
},
{
"columns": [
{
"value": "session_id",
"isExpression": false
}
],
"isUnique": false,
"where": null,
"origin": "manual",
"name": "part_session_idx",
"entityType": "indexes",
"table": "part"
},
{
"columns": [
{
"value": "project_id",
"isExpression": false
}
],
"isUnique": false,
"where": null,
"origin": "manual",
"name": "session_project_idx",
"entityType": "indexes",
"table": "session"
},
{
"columns": [
{
"value": "parent_id",
"isExpression": false
}
],
"isUnique": false,
"where": null,
"origin": "manual",
"name": "session_parent_idx",
"entityType": "indexes",
"table": "session"
},
{
"columns": [
{
"value": "session_id",
"isExpression": false
}
],
"isUnique": false,
"where": null,
"origin": "manual",
"name": "todo_session_idx",
"entityType": "indexes",
"table": "todo"
}
],
"renames": []
}

View File

@@ -1,6 +1,6 @@
{
"$schema": "https://json.schemastore.org/package.json",
"version": "1.1.55",
"version": "1.1.56",
"name": "opencode",
"type": "module",
"license": "MIT",
@@ -15,7 +15,8 @@
"lint": "echo 'Running lint checks...' && bun test --coverage",
"format": "echo 'Formatting code...' && bun run --prettier --write src/**/*.ts",
"docs": "echo 'Generating documentation...' && find src -name '*.ts' -exec echo 'Processing: {}' \\;",
"deploy": "echo 'Deploying application...' && bun run build && echo 'Deployment completed successfully'"
"deploy": "echo 'Deploying application...' && bun run build && echo 'Deployment completed successfully'",
"db": "bun drizzle-kit"
},
"bin": {
"opencode": "./bin/opencode"
@@ -42,6 +43,8 @@
"@types/turndown": "5.0.5",
"@types/yargs": "17.0.33",
"@typescript/native-preview": "catalog:",
"drizzle-kit": "1.0.0-beta.12-a5629fb",
"drizzle-orm": "1.0.0-beta.12-a5629fb",
"typescript": "catalog:",
"vscode-languageserver-types": "3.17.5",
"why-is-node-running": "3.2.2",
@@ -84,8 +87,8 @@
"@opencode-ai/sdk": "workspace:*",
"@opencode-ai/util": "workspace:*",
"@openrouter/ai-sdk-provider": "1.5.4",
"@opentui/core": "0.1.77",
"@opentui/solid": "0.1.77",
"@opentui/core": "0.1.78",
"@opentui/solid": "0.1.78",
"@parcel/watcher": "2.5.1",
"@pierre/diffs": "catalog:",
"@solid-primitives/event-bus": "1.1.2",
@@ -100,6 +103,7 @@
"clipboardy": "4.0.0",
"decimal.js": "10.5.0",
"diff": "catalog:",
"drizzle-orm": "1.0.0-beta.12-a5629fb",
"fuzzysort": "3.1.0",
"gray-matter": "4.0.3",
"hono": "catalog:",
@@ -122,5 +126,8 @@
"yargs": "18.0.0",
"zod": "catalog:",
"zod-to-json-schema": "3.24.5"
},
"overrides": {
"drizzle-orm": "1.0.0-beta.12-a5629fb"
}
}

View File

@@ -25,6 +25,32 @@ await Bun.write(
)
console.log("Generated models-snapshot.ts")
// Load migrations from migration directories
const migrationDirs = (await fs.promises.readdir(path.join(dir, "migration"), { withFileTypes: true }))
.filter((entry) => entry.isDirectory() && /^\d{4}\d{2}\d{2}\d{2}\d{2}\d{2}/.test(entry.name))
.map((entry) => entry.name)
.sort()
const migrations = await Promise.all(
migrationDirs.map(async (name) => {
const file = path.join(dir, "migration", name, "migration.sql")
const sql = await Bun.file(file).text()
const match = /^(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})/.exec(name)
const timestamp = match
? Date.UTC(
Number(match[1]),
Number(match[2]) - 1,
Number(match[3]),
Number(match[4]),
Number(match[5]),
Number(match[6]),
)
: 0
return { sql, timestamp }
}),
)
console.log(`Loaded ${migrations.length} migrations`)
const singleFlag = process.argv.includes("--single")
const baselineFlag = process.argv.includes("--baseline")
const skipInstall = process.argv.includes("--skip-install")
@@ -156,6 +182,7 @@ for (const item of targets) {
entrypoints: ["./src/index.ts", parserWorker, workerPath],
define: {
OPENCODE_VERSION: `'${Script.version}'`,
OPENCODE_MIGRATIONS: JSON.stringify(migrations),
OTUI_TREE_SITTER_WORKER_PATH: bunfsRoot + workerRelativePath,
OPENCODE_WORKER_PATH: workerPath,
OPENCODE_CHANNEL: `'${Script.channel}'`,

View File

@@ -0,0 +1,16 @@
#!/usr/bin/env bun
import { $ } from "bun"
// drizzle-kit check compares schema to migrations, exits non-zero if drift
const result = await $`bun drizzle-kit check`.quiet().nothrow()
if (result.exitCode !== 0) {
console.error("Schema has changes not captured in migrations!")
console.error("Run: bun drizzle-kit generate")
console.error("")
console.error(result.stderr.toString())
process.exit(1)
}
console.log("Migrations are up to date")

View File

@@ -435,46 +435,68 @@ export namespace ACP {
return
}
}
return
}
if (part.type === "text") {
const delta = props.delta
if (delta && part.ignored !== true) {
await this.connection
.sessionUpdate({
sessionId,
update: {
sessionUpdate: "agent_message_chunk",
content: {
type: "text",
text: delta,
},
case "message.part.delta": {
const props = event.properties
const session = this.sessionManager.tryGet(props.sessionID)
if (!session) return
const sessionId = session.id
const message = await this.sdk.session
.message(
{
sessionID: props.sessionID,
messageID: props.messageID,
directory: session.cwd,
},
{ throwOnError: true },
)
.then((x) => x.data)
.catch((error) => {
log.error("unexpected error when fetching message", { error })
return undefined
})
if (!message || message.info.role !== "assistant") return
const part = message.parts.find((p) => p.id === props.partID)
if (!part) return
if (part.type === "text" && props.field === "text" && part.ignored !== true) {
await this.connection
.sessionUpdate({
sessionId,
update: {
sessionUpdate: "agent_message_chunk",
content: {
type: "text",
text: props.delta,
},
})
.catch((error) => {
log.error("failed to send text to ACP", { error })
})
}
},
})
.catch((error) => {
log.error("failed to send text delta to ACP", { error })
})
return
}
if (part.type === "reasoning") {
const delta = props.delta
if (delta) {
await this.connection
.sessionUpdate({
sessionId,
update: {
sessionUpdate: "agent_thought_chunk",
content: {
type: "text",
text: delta,
},
if (part.type === "reasoning" && props.field === "text") {
await this.connection
.sessionUpdate({
sessionId,
update: {
sessionUpdate: "agent_thought_chunk",
content: {
type: "text",
text: props.delta,
},
})
.catch((error) => {
log.error("failed to send reasoning to ACP", { error })
})
}
},
})
.catch((error) => {
log.error("failed to send reasoning delta to ACP", { error })
})
}
return
}

View File

@@ -184,6 +184,18 @@ export namespace Agent {
),
prompt: PROMPT_TITLE,
},
handoff: {
name: "handoff",
mode: "primary",
options: {},
native: true,
hidden: true,
temperature: 0.5,
permission: PermissionNext.fromConfig({
"*": "allow",
}),
prompt: "none",
},
summary: {
name: "summary",
mode: "primary",

View File

@@ -1,6 +1,6 @@
You are a helpful AI assistant tasked with summarizing conversations.
When asked to summarize, provide a detailed but concise summary of the conversation.
When asked to summarize, provide a detailed but concise summary of the conversation.
Focus on information that would be helpful for continuing the conversation, including:
- What was done
- What is currently being worked on
@@ -10,3 +10,5 @@ Focus on information that would be helpful for continuing the conversation, incl
- Important technical decisions and why they were made
Your summary should be comprehensive enough to provide context but concise enough to be quickly understood.
Do not respond to any questions in the conversation, only output the summary.

View File

@@ -3,7 +3,8 @@ import type { Session as SDKSession, Message, Part } from "@opencode-ai/sdk/v2"
import { Session } from "../../session"
import { cmd } from "./cmd"
import { bootstrap } from "../bootstrap"
import { Storage } from "../../storage/storage"
import { Database } from "../../storage/db"
import { SessionTable, MessageTable, PartTable } from "../../session/session.sql"
import { Instance } from "../../project/instance"
import { ShareNext } from "../../share/share-next"
import { EOL } from "os"
@@ -130,13 +131,35 @@ export const ImportCommand = cmd({
return
}
await Storage.write(["session", Instance.project.id, exportData.info.id], exportData.info)
Database.use((db) => db.insert(SessionTable).values(Session.toRow(exportData.info)).onConflictDoNothing().run())
for (const msg of exportData.messages) {
await Storage.write(["message", exportData.info.id, msg.info.id], msg.info)
Database.use((db) =>
db
.insert(MessageTable)
.values({
id: msg.info.id,
session_id: exportData.info.id,
time_created: msg.info.time?.created ?? Date.now(),
data: msg.info,
})
.onConflictDoNothing()
.run(),
)
for (const part of msg.parts) {
await Storage.write(["part", msg.info.id, part.id], part)
Database.use((db) =>
db
.insert(PartTable)
.values({
id: part.id,
message_id: msg.info.id,
session_id: exportData.info.id,
data: part,
})
.onConflictDoNothing()
.run(),
)
}
}

View File

@@ -2,7 +2,8 @@ import type { Argv } from "yargs"
import { cmd } from "./cmd"
import { Session } from "../../session"
import { bootstrap } from "../bootstrap"
import { Storage } from "../../storage/storage"
import { Database } from "../../storage/db"
import { SessionTable } from "../../session/session.sql"
import { Project } from "../../project/project"
import { Instance } from "../../project/instance"
@@ -87,25 +88,8 @@ async function getCurrentProject(): Promise<Project.Info> {
}
async function getAllSessions(): Promise<Session.Info[]> {
const sessions: Session.Info[] = []
const projectKeys = await Storage.list(["project"])
const projects = await Promise.all(projectKeys.map((key) => Storage.read<Project.Info>(key)))
for (const project of projects) {
if (!project) continue
const sessionKeys = await Storage.list(["session", project.id])
const projectSessions = await Promise.all(sessionKeys.map((key) => Storage.read<Session.Info>(key)))
for (const session of projectSessions) {
if (session) {
sessions.push(session)
}
}
}
return sessions
const rows = Database.use((db) => db.select().from(SessionTable).all())
return rows.map((row) => Session.fromRow(row))
}
export async function aggregateSessionStats(days?: number, projectFilter?: string): Promise<SessionStats> {

View File

@@ -3,6 +3,7 @@ import { Clipboard } from "@tui/util/clipboard"
import { TextAttributes } from "@opentui/core"
import { RouteProvider, useRoute } from "@tui/context/route"
import { Switch, Match, createEffect, untrack, ErrorBoundary, createSignal, onMount, batch, Show, on } from "solid-js"
import { win32DisableProcessedInput, win32IgnoreCtrlC, win32InstallCtrlCGuard } from "./win32"
import { Installation } from "@/installation"
import { Flag } from "@/flag/flag"
import { DialogProvider, useDialog } from "@tui/ui/dialog"
@@ -110,8 +111,18 @@ export function tui(input: {
}) {
// promise to prevent immediate exit
return new Promise<void>(async (resolve) => {
const unguard = win32InstallCtrlCGuard()
win32DisableProcessedInput()
win32IgnoreCtrlC()
const mode = await getTerminalBackgroundColor()
// Re-clear after getTerminalBackgroundColor() — setRawMode(false) restores
// the original console mode which re-enables ENABLE_PROCESSED_INPUT.
win32DisableProcessedInput()
const onExit = async () => {
unguard?.()
await input.onExit?.()
resolve()
}

View File

@@ -83,6 +83,7 @@ function init() {
},
slashes() {
return visibleOptions().flatMap((option) => {
if (option.disabled) return []
const slash = option.slash
if (!slash) return []
return {

View File

@@ -7,6 +7,27 @@ import { useDialog } from "@tui/ui/dialog"
import { createDialogProviderOptions, DialogProvider } from "./dialog-provider"
import { useKeybind } from "../context/keybind"
import * as fuzzysort from "fuzzysort"
import type { Provider } from "@opencode-ai/sdk/v2"
function pickLatest(models: [string, Provider["models"][string]][]) {
const picks: Record<string, [string, Provider["models"][string]]> = {}
for (const item of models) {
const model = item[0]
const info = item[1]
const key = info.family ?? model
const prev = picks[key]
if (!prev) {
picks[key] = item
continue
}
if (info.release_date !== prev[1].release_date) {
if (info.release_date > prev[1].release_date) picks[key] = item
continue
}
if (model > prev[0]) picks[key] = item
}
return Object.values(picks)
}
export function useConnected() {
const sync = useSync()
@@ -21,6 +42,7 @@ export function DialogModel(props: { providerID?: string }) {
const dialog = useDialog()
const keybind = useKeybind()
const [query, setQuery] = createSignal("")
const [all, setAll] = createSignal(false)
const connected = useConnected()
const providers = createDialogProviderOptions()
@@ -72,8 +94,8 @@ export function DialogModel(props: { providerID?: string }) {
(provider) => provider.id !== "opencode",
(provider) => provider.name,
),
flatMap((provider) =>
pipe(
flatMap((provider) => {
const items = pipe(
provider.models,
entries(),
filter(([_, info]) => info.status !== "deprecated"),
@@ -104,8 +126,8 @@ export function DialogModel(props: { providerID?: string }) {
(x) => x.footer !== "Free",
(x) => x.title,
),
),
),
)
}),
)
const popularProviders = !connected()
@@ -154,6 +176,13 @@ export function DialogModel(props: { providerID?: string }) {
local.model.toggleFavorite(option.value as { providerID: string; modelID: string })
},
},
{
keybind: keybind.all.model_show_all_toggle?.[0],
title: all() ? "Show latest only" : "Show all models",
onTrigger: () => {
setAll((value) => !value)
},
},
]}
onFilter={setQuery}
flat={true}

View File

@@ -9,7 +9,7 @@ import type { AgentPart, FilePart, TextPart } from "@opencode-ai/sdk/v2"
export type PromptInfo = {
input: string
mode?: "normal" | "shell"
mode?: "normal" | "shell" | "handoff"
parts: (
| Omit<FilePart, "id" | "messageID" | "sessionID">
| Omit<AgentPart, "id" | "messageID" | "sessionID">

View File

@@ -119,7 +119,7 @@ export function Prompt(props: PromptProps) {
const [store, setStore] = createStore<{
prompt: PromptInfo
mode: "normal" | "shell"
mode: "normal" | "shell" | "handoff"
extmarkToPartIndex: Map<number, number>
interrupt: number
placeholder: number
@@ -338,6 +338,20 @@ export function Prompt(props: PromptProps) {
))
},
},
{
title: "Handoff",
value: "prompt.handoff",
disabled: props.sessionID === undefined,
category: "Prompt",
slash: {
name: "handoff",
},
onSelect: () => {
input.clear()
setStore("mode", "handoff")
setStore("prompt", { input: "", parts: [] })
},
},
]
})
@@ -515,17 +529,45 @@ export function Prompt(props: PromptProps) {
async function submit() {
if (props.disabled) return
if (autocomplete?.visible) return
const selectedModel = local.model.current()
if (!selectedModel) {
promptModelWarning()
return
}
if (store.mode === "handoff") {
const result = await sdk.client.session.handoff({
sessionID: props.sessionID!,
goal: store.prompt.input,
model: {
providerID: selectedModel.providerID,
modelID: selectedModel.modelID,
},
})
if (result.data) {
route.navigate({
type: "home",
initialPrompt: {
input: result.data.text,
parts:
result.data.files.map((file) => ({
type: "file",
url: file,
filename: file,
mime: "text/plain",
})) ?? [],
},
})
}
return
}
if (!store.prompt.input) return
const trimmed = store.prompt.input.trim()
if (trimmed === "exit" || trimmed === "quit" || trimmed === ":q") {
exit()
return
}
const selectedModel = local.model.current()
if (!selectedModel) {
promptModelWarning()
return
}
const sessionID = props.sessionID
? props.sessionID
: await (async () => {
@@ -726,6 +768,7 @@ export function Prompt(props: PromptProps) {
const highlight = createMemo(() => {
if (keybind.leader) return theme.border
if (store.mode === "shell") return theme.primary
if (store.mode === "handoff") return theme.warning
return local.agent.color(local.agent.current().name)
})
@@ -797,7 +840,11 @@ export function Prompt(props: PromptProps) {
flexGrow={1}
>
<textarea
placeholder={props.sessionID ? undefined : `Ask anything... "${PLACEHOLDERS[store.placeholder]}"`}
placeholder={iife(() => {
if (store.mode === "handoff") return "Goal for the new session"
if (props.sessionID) return undefined
return `Ask anything... "${PLACEHOLDERS[store.placeholder]}"`
})}
textColor={keybind.leader ? theme.textMuted : theme.text}
focusedTextColor={keybind.leader ? theme.textMuted : theme.text}
minHeight={1}
@@ -854,7 +901,7 @@ export function Prompt(props: PromptProps) {
e.preventDefault()
return
}
if (store.mode === "shell") {
if (store.mode === "shell" || store.mode === "handoff") {
if ((e.name === "backspace" && input.visualCursor.offset === 0) || e.name === "escape") {
setStore("mode", "normal")
e.preventDefault()
@@ -975,7 +1022,11 @@ export function Prompt(props: PromptProps) {
/>
<box flexDirection="row" flexShrink={0} paddingTop={1} gap={1}>
<text fg={highlight()}>
{store.mode === "shell" ? "Shell" : Locale.titlecase(local.agent.current().name)}{" "}
<Switch>
<Match when={store.mode === "normal"}>{Locale.titlecase(local.agent.current().name)}</Match>
<Match when={store.mode === "shell"}>Shell</Match>
<Match when={store.mode === "handoff"}>Handoff</Match>
</Switch>
</text>
<Show when={store.mode === "normal"}>
<box flexDirection="row" gap={1}>
@@ -1122,6 +1173,11 @@ export function Prompt(props: PromptProps) {
esc <span style={{ fg: theme.textMuted }}>exit shell mode</span>
</text>
</Match>
<Match when={store.mode === "handoff"}>
<text fg={theme.text}>
esc <span style={{ fg: theme.textMuted }}>exit handoff mode</span>
</text>
</Match>
</Switch>
</box>
</Show>

View File

@@ -1,4 +1,4 @@
import { createStore } from "solid-js/store"
import { createStore, reconcile } from "solid-js/store"
import { createSimpleContext } from "./helper"
import type { PromptInfo } from "../component/prompt/history"
@@ -32,7 +32,7 @@ export const { use: useRoute, provider: RouteProvider } = createSimpleContext({
},
navigate(route: Route) {
console.log("navigate", route)
setStore(route)
setStore(reconcile(route))
},
}
},

View File

@@ -299,6 +299,24 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({
break
}
case "message.part.delta": {
const parts = store.part[event.properties.messageID]
if (!parts) break
const result = Binary.search(parts, event.properties.partID, (p) => p.id)
if (!result.found) break
setStore(
"part",
event.properties.messageID,
produce((draft) => {
const part = draft[result.index]
const field = event.properties.field as keyof typeof part
const existing = part[field] as string | undefined
;(part[field] as string) = (existing ?? "") + event.properties.delta
}),
)
break
}
case "message.part.removed": {
const parts = store.part[event.properties.messageID]
const result = Binary.search(parts, event.properties.partID, (p) => p.id)

View File

@@ -77,11 +77,11 @@ export function Home() {
let prompt: PromptRef
const args = useArgs()
onMount(() => {
if (once) return
if (route.initialPrompt) {
prompt.set(route.initialPrompt)
once = true
} else if (args.prompt) {
return
}
if (!once && args.prompt) {
prompt.set({ input: args.prompt, parts: [] })
once = true
prompt.submit()
@@ -96,7 +96,26 @@ export function Home() {
<box flexGrow={1} justifyContent="center" alignItems="center" paddingLeft={2} paddingRight={2} gap={1}>
<box height={3} />
<Logo />
<box width="100%" maxWidth={75} zIndex={1000} paddingTop={1}>
<Show when={!route.initialPrompt}>
<box width="100%" maxWidth={75} zIndex={1000} paddingTop={1}>
<Prompt
ref={(r) => {
prompt = r
promptRef.set(r)
}}
hint={Hint}
/>
</box>
</Show>
<box height={3} width="100%" maxWidth={75} alignItems="center" paddingTop={2}>
<Show when={showTips()}>
<Tips />
</Show>
</box>
<Toast />
</box>
<Show when={route.initialPrompt}>
<box paddingLeft={2} paddingRight={2}>
<Prompt
ref={(r) => {
prompt = r
@@ -105,13 +124,7 @@ export function Home() {
hint={Hint}
/>
</box>
<box height={3} width="100%" maxWidth={75} alignItems="center" paddingTop={2}>
<Show when={showTips()}>
<Tips />
</Show>
</box>
<Toast />
</box>
</Show>
<box paddingTop={1} paddingBottom={1} paddingLeft={2} paddingRight={2} flexDirection="row" flexShrink={0} gap={2}>
<text fg={theme.textMuted}>{directory()}</text>
<box gap={1} flexDirection="row" flexShrink={0}>

View File

@@ -141,7 +141,7 @@ export function Session() {
})
const dimensions = useTerminalDimensions()
const [sidebar, setSidebar] = kv.signal<"auto" | "hide">("sidebar", "hide")
const [sidebar, setSidebar] = kv.signal<"auto" | "hide">("sidebar", "auto")
const [sidebarOpen, setSidebarOpen] = createSignal(false)
const [conceal, setConceal] = createSignal(true)
const [showThinking, setShowThinking] = kv.signal("thinking_visibility", true)
@@ -2027,8 +2027,8 @@ function ApplyPatch(props: ToolProps<typeof ApplyPatchTool>) {
</For>
</Match>
<Match when={true}>
<InlineTool icon="%" pending="Preparing apply_patch..." complete={false} part={props.part}>
apply_patch
<InlineTool icon="%" pending="Preparing patch..." complete={false} part={props.part}>
Patch
</InlineTool>
</Match>
</Switch>

View File

@@ -9,6 +9,7 @@ import { Log } from "@/util/log"
import { withNetworkOptions, resolveNetworkOptions } from "@/cli/network"
import type { Event } from "@opencode-ai/sdk/v2"
import type { EventSource } from "./context/sdk"
import { win32DisableProcessedInput, win32InstallCtrlCGuard } from "./win32"
declare global {
const OPENCODE_WORKER_PATH: string
@@ -77,6 +78,14 @@ export const TuiThreadCommand = cmd({
describe: "agent to use",
}),
handler: async (args) => {
// Keep ENABLE_PROCESSED_INPUT cleared even if other code flips it.
// (Important when running under `bun run` wrappers on Windows.)
win32InstallCtrlCGuard()
// Must be the very first thing — disables CTRL_C_EVENT before any Worker
// spawn or async work so the OS cannot kill the process group.
win32DisableProcessedInput()
if (args.fork && !args.continue && !args.session) {
UI.error("--fork requires --continue or --session")
process.exit(1)

View File

@@ -0,0 +1,118 @@
import { dlopen, ptr } from "bun:ffi"
const STD_INPUT_HANDLE = -10
const ENABLE_PROCESSED_INPUT = 0x0001
const kernel = () =>
dlopen("kernel32.dll", {
GetStdHandle: { args: ["i32"], returns: "ptr" },
GetConsoleMode: { args: ["ptr", "ptr"], returns: "i32" },
SetConsoleMode: { args: ["ptr", "u32"], returns: "i32" },
SetConsoleCtrlHandler: { args: ["ptr", "i32"], returns: "i32" },
})
let k32: ReturnType<typeof kernel> | undefined
function load() {
if (process.platform !== "win32") return false
try {
k32 ??= kernel()
return true
} catch {
return false
}
}
/**
* Clear ENABLE_PROCESSED_INPUT on the console stdin handle.
*/
export function win32DisableProcessedInput() {
if (process.platform !== "win32") return
if (!process.stdin.isTTY) return
if (!load()) return
const handle = k32!.symbols.GetStdHandle(STD_INPUT_HANDLE)
const buf = new Uint32Array(1)
if (k32!.symbols.GetConsoleMode(handle, ptr(buf)) === 0) return
const mode = buf[0]!
if ((mode & ENABLE_PROCESSED_INPUT) === 0) return
k32!.symbols.SetConsoleMode(handle, mode & ~ENABLE_PROCESSED_INPUT)
}
/**
* Tell Windows to ignore CTRL_C_EVENT for this process.
*
* SetConsoleCtrlHandler(NULL, TRUE) makes the process ignore Ctrl+C
* signals at the OS level. Belt-and-suspenders alongside disabling
* ENABLE_PROCESSED_INPUT.
*/
export function win32IgnoreCtrlC() {
if (process.platform !== "win32") return
if (!process.stdin.isTTY) return
if (!load()) return
k32!.symbols.SetConsoleCtrlHandler(null, 1)
}
let unhook: (() => void) | undefined
/**
* Keep ENABLE_PROCESSED_INPUT disabled.
*
* On Windows, Ctrl+C becomes a CTRL_C_EVENT (instead of stdin input) when
* ENABLE_PROCESSED_INPUT is set. Various runtimes can re-apply console modes
* (sometimes on a later tick), and the flag is console-global, not per-process.
*
* We combine:
* - A `setRawMode(...)` hook to re-clear after known raw-mode toggles.
* - A low-frequency poll as a backstop for native/external mode changes.
*/
export function win32InstallCtrlCGuard() {
if (process.platform !== "win32") return
if (!process.stdin.isTTY) return
if (!load()) return
if (unhook) return unhook
const stdin = process.stdin as any
const original = stdin.setRawMode
const handle = k32!.symbols.GetStdHandle(STD_INPUT_HANDLE)
const buf = new Uint32Array(1)
const enforce = () => {
if (k32!.symbols.GetConsoleMode(handle, ptr(buf)) === 0) return
const mode = buf[0]!
if ((mode & ENABLE_PROCESSED_INPUT) === 0) return
k32!.symbols.SetConsoleMode(handle, mode & ~ENABLE_PROCESSED_INPUT)
}
// Some runtimes can re-apply console modes on the next tick; enforce twice.
const later = () => {
enforce()
setImmediate(enforce)
}
if (typeof original === "function") {
stdin.setRawMode = (mode: boolean) => {
const result = original.call(stdin, mode)
later()
return result
}
}
// Ensure it's cleared immediately too (covers any earlier mode changes).
later()
const interval = setInterval(enforce, 100)
unhook = () => {
clearInterval(interval)
if (typeof original === "function") {
stdin.setRawMode = original
}
unhook = undefined
}
return unhook
}

View File

@@ -57,6 +57,8 @@ Use best judgement when processing input.
**Performance** - Only flag if obviously problematic.
- O(n²) on unbounded data, N+1 queries, blocking I/O on hot paths
**Behavior Changes** - If a behavioral change is introduced, raise it (especially if it's possibly unintentional).
---
## Before You Flag Something

View File

@@ -778,6 +778,7 @@ export namespace Config {
stash_delete: z.string().optional().default("ctrl+d").describe("Delete stash entry"),
model_provider_list: z.string().optional().default("ctrl+a").describe("Open provider list from model dialog"),
model_favorite_toggle: z.string().optional().default("ctrl+f").describe("Toggle model favorite status"),
model_show_all_toggle: z.string().optional().default("ctrl+o").describe("Toggle showing all models"),
session_share: z.string().optional().default("none").describe("Share current session"),
session_unshare: z.string().optional().default("none").describe("Unshare current session"),
session_interrupt: z.string().optional().default("escape").describe("Interrupt current session"),
@@ -1161,6 +1162,12 @@ export namespace Config {
.object({
auto: z.boolean().optional().describe("Enable automatic compaction when context is full (default: true)"),
prune: z.boolean().optional().describe("Enable pruning of old tool outputs (default: true)"),
reserved: z
.number()
.int()
.min(0)
.optional()
.describe("Token buffer for compaction. Leaves enough window to avoid overflow during compaction."),
})
.optional(),
experimental: z

View File

@@ -46,7 +46,7 @@ export namespace Flag {
export const OPENCODE_EXPERIMENTAL_LSP_TY = truthy("OPENCODE_EXPERIMENTAL_LSP_TY")
export const OPENCODE_EXPERIMENTAL_LSP_TOOL = OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_LSP_TOOL")
export const OPENCODE_DISABLE_FILETIME_CHECK = truthy("OPENCODE_DISABLE_FILETIME_CHECK")
export const OPENCODE_EXPERIMENTAL_PLAN_MODE = OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_PLAN_MODE")
export const OPENCODE_EXPERIMENTAL_MARKDOWN = truthy("OPENCODE_EXPERIMENTAL_MARKDOWN")
export const OPENCODE_MODELS_URL = process.env["OPENCODE_MODELS_URL"]
export const OPENCODE_MODELS_PATH = process.env["OPENCODE_MODELS_PATH"]

View File

@@ -23,9 +23,14 @@ import { AttachCommand } from "./cli/cmd/tui/attach"
import { TuiThreadCommand } from "./cli/cmd/tui/thread"
import { AcpCommand } from "./cli/cmd/acp"
import { EOL } from "os"
import { win32DisableProcessedInput, win32IgnoreCtrlC } from "./cli/cmd/tui/win32"
import { WebCommand } from "./cli/cmd/web"
import { PrCommand } from "./cli/cmd/pr"
import { SessionCommand } from "./cli/cmd/session"
import path from "path"
import { Global } from "./global"
import { JsonMigration } from "./storage/json-migration"
import { Database } from "./storage/db"
process.on("unhandledRejection", (e) => {
Log.Default.error("rejection", {
@@ -39,6 +44,14 @@ process.on("uncaughtException", (e) => {
})
})
// Disable Windows CTRL_C_EVENT as early as possible. When running under
// `bun run` (e.g. `bun dev`), the parent bun process shares this console
// and would be killed by the OS before any JS signal handler fires.
win32DisableProcessedInput()
// Belt-and-suspenders: even if something re-enables ENABLE_PROCESSED_INPUT
// later (opentui raw mode, libuv, etc.), ignore the generated event.
win32IgnoreCtrlC()
const cli = yargs(hideBin(process.argv))
.parserConfiguration({ "populate--": true })
.scriptName("opencode")
@@ -74,6 +87,37 @@ const cli = yargs(hideBin(process.argv))
version: Installation.VERSION,
args: process.argv.slice(2),
})
const marker = path.join(Global.Path.data, "opencode.db")
if (!(await Bun.file(marker).exists())) {
console.log("Performing one time database migration, may take a few minutes...")
const tty = process.stdout.isTTY
const width = 36
const orange = "\x1b[38;5;214m"
const muted = "\x1b[0;2m"
const reset = "\x1b[0m"
let last = -1
if (tty) process.stdout.write("\x1b[?25l")
try {
await JsonMigration.run(Database.Client().$client, {
progress: (event) => {
if (!tty) return
const percent = Math.floor((event.current / event.total) * 100)
if (percent === last && event.current !== event.total) return
last = percent
const fill = Math.round((percent / 100) * width)
const bar = `${"■".repeat(fill)}${"・".repeat(width - fill)}`
process.stdout.write(
`\r${orange}${bar} ${percent.toString().padStart(3)}%${reset} ${muted}${event.label.padEnd(12)} ${event.current}/${event.total}${reset}`,
)
if (event.current === event.total) process.stdout.write("\n")
},
})
} finally {
if (tty) process.stdout.write("\x1b[?25h")
}
console.log("Database migration complete.")
}
})
.usage("\n" + UI.logo())
.completion("completion", "generate shell completion script")

View File

@@ -3,7 +3,8 @@ import { BusEvent } from "@/bus/bus-event"
import { Config } from "@/config/config"
import { Identifier } from "@/id/id"
import { Instance } from "@/project/instance"
import { Storage } from "@/storage/storage"
import { Database, eq } from "@/storage/db"
import { PermissionTable } from "@/session/session.sql"
import { fn } from "@/util/fn"
import { Log } from "@/util/log"
import { Wildcard } from "@/util/wildcard"
@@ -105,9 +106,12 @@ export namespace PermissionNext {
),
}
const state = Instance.state(async () => {
const state = Instance.state(() => {
const projectID = Instance.project.id
const stored = await Storage.read<Ruleset>(["permission", projectID]).catch(() => [] as Ruleset)
const row = Database.use((db) =>
db.select().from(PermissionTable).where(eq(PermissionTable.project_id, projectID)).get(),
)
const stored = row?.data ?? ([] as Ruleset)
const pending: Record<
string,
@@ -222,7 +226,8 @@ export namespace PermissionNext {
// TODO: we don't save the permission ruleset to disk yet until there's
// UI to manage it
// await Storage.write(["permission", Instance.project.id], s.approved)
// db().insert(PermissionTable).values({ projectID: Instance.project.id, data: s.approved })
// .onConflictDoUpdate({ target: PermissionTable.projectID, set: { data: s.approved } }).run()
return
}
},
@@ -275,6 +280,7 @@ export namespace PermissionNext {
}
export async function list() {
return state().then((x) => Object.values(x.pending).map((x) => x.info))
const s = await state()
return Object.values(s.pending).map((x) => x.info)
}
}

View File

@@ -1,5 +1,4 @@
import { Plugin } from "../plugin"
import { Share } from "../share/share"
import { Format } from "../format"
import { LSP } from "../lsp"
import { FileWatcher } from "../file/watcher"
@@ -17,7 +16,6 @@ import { Truncate } from "../tool/truncation"
export async function InstanceBootstrap() {
Log.Default.info("bootstrapping", { directory: Instance.directory })
await Plugin.init()
Share.init()
ShareNext.init()
Format.init()
await LSP.init()

View File

@@ -0,0 +1,14 @@
import { sqliteTable, text, integer } from "drizzle-orm/sqlite-core"
import { Timestamps } from "@/storage/schema.sql"
export const ProjectTable = sqliteTable("project", {
id: text().primaryKey(),
worktree: text().notNull(),
vcs: text(),
name: text(),
icon_url: text(),
icon_color: text(),
...Timestamps,
time_initialized: integer(),
sandboxes: text({ mode: "json" }).notNull().$type<string[]>(),
})

View File

@@ -1,18 +1,17 @@
import z from "zod"
import fs from "fs/promises"
import { Filesystem } from "../util/filesystem"
import path from "path"
import { $ } from "bun"
import { Storage } from "../storage/storage"
import { Database, eq } from "../storage/db"
import { ProjectTable } from "./project.sql"
import { SessionTable } from "../session/session.sql"
import { Log } from "../util/log"
import { Flag } from "@/flag/flag"
import { Session } from "../session"
import { work } from "../util/queue"
import { fn } from "@opencode-ai/util/fn"
import { BusEvent } from "@/bus/bus-event"
import { iife } from "@/util/iife"
import { GlobalBus } from "@/bus/global"
import { existsSync } from "fs"
export namespace Project {
const log = Log.create({ service: "project" })
@@ -50,66 +49,85 @@ export namespace Project {
Updated: BusEvent.define("project.updated", Info),
}
type Row = typeof ProjectTable.$inferSelect
export function fromRow(row: Row): Info {
const icon =
row.icon_url || row.icon_color
? { url: row.icon_url ?? undefined, color: row.icon_color ?? undefined }
: undefined
return {
id: row.id,
worktree: row.worktree,
vcs: row.vcs ? Info.shape.vcs.parse(row.vcs) : undefined,
name: row.name ?? undefined,
icon,
time: {
created: row.time_created,
updated: row.time_updated,
initialized: row.time_initialized ?? undefined,
},
sandboxes: row.sandboxes,
}
}
export async function fromDirectory(directory: string) {
log.info("fromDirectory", { directory })
const { id, sandbox, worktree, vcs } = await iife(async () => {
const data = await iife(async () => {
const matches = Filesystem.up({ targets: [".git"], start: directory })
const git = await matches.next().then((x) => x.value)
await matches.return()
if (git) {
let sandbox = path.dirname(git)
const sandbox = path.dirname(git)
const bin = Bun.which("git")
const gitBinary = Bun.which("git")
// cached id calculation
let id = await Bun.file(path.join(git, "opencode"))
const cached = await Bun.file(path.join(git, "opencode"))
.text()
.then((x) => x.trim())
.catch(() => undefined)
if (!gitBinary) {
if (!bin) {
return {
id: id ?? "global",
id: cached ?? "global",
worktree: sandbox,
sandbox: sandbox,
vcs: Info.shape.vcs.parse(Flag.OPENCODE_FAKE_VCS),
}
}
// generate id from root commit
if (!id) {
const roots = await $`git rev-list --max-parents=0 --all`
.quiet()
.nothrow()
.cwd(sandbox)
.text()
.then((x) =>
x
.split("\n")
.filter(Boolean)
.map((x) => x.trim())
.toSorted(),
)
.catch(() => undefined)
if (!roots) {
return {
id: "global",
worktree: sandbox,
sandbox: sandbox,
vcs: Info.shape.vcs.parse(Flag.OPENCODE_FAKE_VCS),
}
}
id = roots[0]
if (id) {
void Bun.file(path.join(git, "opencode"))
.write(id)
const roots = cached
? undefined
: await $`git rev-list --max-parents=0 --all`
.quiet()
.nothrow()
.cwd(sandbox)
.text()
.then((x) =>
x
.split("\n")
.filter(Boolean)
.map((x) => x.trim())
.toSorted(),
)
.catch(() => undefined)
if (!cached && !roots) {
return {
id: "global",
worktree: sandbox,
sandbox: sandbox,
vcs: Info.shape.vcs.parse(Flag.OPENCODE_FAKE_VCS),
}
}
const id = cached ?? roots?.[0]
if (!cached && id) {
void Bun.file(path.join(git, "opencode"))
.write(id)
.catch(() => undefined)
}
if (!id) {
return {
id: "global",
@@ -136,33 +154,31 @@ export namespace Project {
}
}
sandbox = top
const worktree = await $`git rev-parse --git-common-dir`
const tree = await $`git rev-parse --git-common-dir`
.quiet()
.nothrow()
.cwd(sandbox)
.cwd(top)
.text()
.then((x) => {
const dirname = path.dirname(x.trim())
if (dirname === ".") return sandbox
if (dirname === ".") return top
return dirname
})
.catch(() => undefined)
if (!worktree) {
if (!tree) {
return {
id,
sandbox,
worktree: sandbox,
sandbox: top,
worktree: top,
vcs: Info.shape.vcs.parse(Flag.OPENCODE_FAKE_VCS),
}
}
return {
id,
sandbox,
worktree,
sandbox: top,
worktree: tree,
vcs: "git",
}
}
@@ -175,47 +191,78 @@ export namespace Project {
}
})
let existing = await Storage.read<Info>(["project", id]).catch(() => undefined)
if (!existing) {
existing = {
id,
worktree,
vcs: vcs as Info["vcs"],
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, data.id)).get())
const existing = await iife(async () => {
if (row) return fromRow(row)
const fresh: Info = {
id: data.id,
worktree: data.worktree,
vcs: data.vcs as Info["vcs"],
sandboxes: [],
time: {
created: Date.now(),
updated: Date.now(),
},
}
if (id !== "global") {
await migrateFromGlobal(id, worktree)
if (data.id !== "global") {
await migrateFromGlobal(data.id, data.worktree)
}
}
// migrate old projects before sandboxes
if (!existing.sandboxes) existing.sandboxes = []
return fresh
})
if (Flag.OPENCODE_EXPERIMENTAL_ICON_DISCOVERY) discover(existing)
const result: Info = {
...existing,
worktree,
vcs: vcs as Info["vcs"],
worktree: data.worktree,
vcs: data.vcs as Info["vcs"],
time: {
...existing.time,
updated: Date.now(),
},
}
if (sandbox !== result.worktree && !result.sandboxes.includes(sandbox)) result.sandboxes.push(sandbox)
result.sandboxes = result.sandboxes.filter((x) => existsSync(x))
await Storage.write<Info>(["project", id], result)
if (data.sandbox !== result.worktree && !result.sandboxes.includes(data.sandbox))
result.sandboxes.push(data.sandbox)
const sandboxes: string[] = []
for (const x of result.sandboxes) {
const stat = await Bun.file(x)
.stat()
.catch(() => undefined)
if (stat) sandboxes.push(x)
}
result.sandboxes = sandboxes
const insert = {
id: result.id,
worktree: result.worktree,
vcs: result.vcs ?? null,
name: result.name,
icon_url: result.icon?.url,
icon_color: result.icon?.color,
time_created: result.time.created,
time_updated: result.time.updated,
time_initialized: result.time.initialized,
sandboxes: result.sandboxes,
}
const updateSet = {
worktree: result.worktree,
vcs: result.vcs ?? null,
name: result.name,
icon_url: result.icon?.url,
icon_color: result.icon?.color,
time_updated: result.time.updated,
time_initialized: result.time.initialized,
sandboxes: result.sandboxes,
}
Database.use((db) =>
db.insert(ProjectTable).values(insert).onConflictDoUpdate({ target: ProjectTable.id, set: updateSet }).run(),
)
GlobalBus.emit("event", {
payload: {
type: Event.Updated.type,
properties: result,
},
})
return { project: result, sandbox }
return { project: result, sandbox: data.sandbox }
}
export async function discover(input: Info) {
@@ -248,43 +295,54 @@ export namespace Project {
return
}
async function migrateFromGlobal(newProjectID: string, worktree: string) {
const globalProject = await Storage.read<Info>(["project", "global"]).catch(() => undefined)
if (!globalProject) return
async function migrateFromGlobal(id: string, worktree: string) {
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, "global")).get())
if (!row) return
const globalSessions = await Storage.list(["session", "global"]).catch(() => [])
if (globalSessions.length === 0) return
const sessions = Database.use((db) =>
db.select().from(SessionTable).where(eq(SessionTable.project_id, "global")).all(),
)
if (sessions.length === 0) return
log.info("migrating sessions from global", { newProjectID, worktree, count: globalSessions.length })
log.info("migrating sessions from global", { newProjectID: id, worktree, count: sessions.length })
await work(10, globalSessions, async (key) => {
const sessionID = key[key.length - 1]
const session = await Storage.read<Session.Info>(key).catch(() => undefined)
if (!session) return
if (session.directory && session.directory !== worktree) return
await work(10, sessions, async (row) => {
// Skip sessions that belong to a different directory
if (row.directory && row.directory !== worktree) return
session.projectID = newProjectID
log.info("migrating session", { sessionID, from: "global", to: newProjectID })
await Storage.write(["session", newProjectID, sessionID], session)
await Storage.remove(key)
log.info("migrating session", { sessionID: row.id, from: "global", to: id })
Database.use((db) => db.update(SessionTable).set({ project_id: id }).where(eq(SessionTable.id, row.id)).run())
}).catch((error) => {
log.error("failed to migrate sessions from global to project", { error, projectId: newProjectID })
log.error("failed to migrate sessions from global to project", { error, projectId: id })
})
}
export async function setInitialized(projectID: string) {
await Storage.update<Info>(["project", projectID], (draft) => {
draft.time.initialized = Date.now()
})
export function setInitialized(id: string) {
Database.use((db) =>
db
.update(ProjectTable)
.set({
time_initialized: Date.now(),
})
.where(eq(ProjectTable.id, id))
.run(),
)
}
export async function list() {
const keys = await Storage.list(["project"])
const projects = await Promise.all(keys.map((x) => Storage.read<Info>(x)))
return projects.map((project) => ({
...project,
sandboxes: project.sandboxes?.filter((x) => existsSync(x)),
}))
export function list() {
return Database.use((db) =>
db
.select()
.from(ProjectTable)
.all()
.map((row) => fromRow(row)),
)
}
export function get(id: string): Info | undefined {
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get())
if (!row) return undefined
return fromRow(row)
}
export const update = fn(
@@ -295,77 +353,89 @@ export namespace Project {
commands: Info.shape.commands.optional(),
}),
async (input) => {
const result = await Storage.update<Info>(["project", input.projectID], (draft) => {
if (input.name !== undefined) draft.name = input.name
if (input.icon !== undefined) {
draft.icon = {
...draft.icon,
}
if (input.icon.url !== undefined) draft.icon.url = input.icon.url
if (input.icon.override !== undefined) draft.icon.override = input.icon.override || undefined
if (input.icon.color !== undefined) draft.icon.color = input.icon.color
}
if (input.commands?.start !== undefined) {
const start = input.commands.start || undefined
draft.commands = {
...(draft.commands ?? {}),
}
draft.commands.start = start
if (!draft.commands.start) draft.commands = undefined
}
draft.time.updated = Date.now()
})
const result = Database.use((db) =>
db
.update(ProjectTable)
.set({
name: input.name,
icon_url: input.icon?.url,
icon_color: input.icon?.color,
time_updated: Date.now(),
})
.where(eq(ProjectTable.id, input.projectID))
.returning()
.get(),
)
if (!result) throw new Error(`Project not found: ${input.projectID}`)
const data = fromRow(result)
GlobalBus.emit("event", {
payload: {
type: Event.Updated.type,
properties: result,
properties: data,
},
})
return result
return data
},
)
export async function sandboxes(projectID: string) {
const project = await Storage.read<Info>(["project", projectID]).catch(() => undefined)
if (!project?.sandboxes) return []
export async function sandboxes(id: string) {
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get())
if (!row) return []
const data = fromRow(row)
const valid: string[] = []
for (const dir of project.sandboxes) {
const stat = await fs.stat(dir).catch(() => undefined)
for (const dir of data.sandboxes) {
const stat = await Bun.file(dir)
.stat()
.catch(() => undefined)
if (stat?.isDirectory()) valid.push(dir)
}
return valid
}
export async function addSandbox(projectID: string, directory: string) {
const result = await Storage.update<Info>(["project", projectID], (draft) => {
const sandboxes = draft.sandboxes ?? []
if (!sandboxes.includes(directory)) sandboxes.push(directory)
draft.sandboxes = sandboxes
draft.time.updated = Date.now()
})
export async function addSandbox(id: string, directory: string) {
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get())
if (!row) throw new Error(`Project not found: ${id}`)
const sandboxes = [...row.sandboxes]
if (!sandboxes.includes(directory)) sandboxes.push(directory)
const result = Database.use((db) =>
db
.update(ProjectTable)
.set({ sandboxes, time_updated: Date.now() })
.where(eq(ProjectTable.id, id))
.returning()
.get(),
)
if (!result) throw new Error(`Project not found: ${id}`)
const data = fromRow(result)
GlobalBus.emit("event", {
payload: {
type: Event.Updated.type,
properties: result,
properties: data,
},
})
return result
return data
}
export async function removeSandbox(projectID: string, directory: string) {
const result = await Storage.update<Info>(["project", projectID], (draft) => {
const sandboxes = draft.sandboxes ?? []
draft.sandboxes = sandboxes.filter((sandbox) => sandbox !== directory)
draft.time.updated = Date.now()
})
export async function removeSandbox(id: string, directory: string) {
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get())
if (!row) throw new Error(`Project not found: ${id}`)
const sandboxes = row.sandboxes.filter((s) => s !== directory)
const result = Database.use((db) =>
db
.update(ProjectTable)
.set({ sandboxes, time_updated: Date.now() })
.where(eq(ProjectTable.id, id))
.returning()
.get(),
)
if (!result) throw new Error(`Project not found: ${id}`)
const data = fromRow(result)
GlobalBus.emit("event", {
payload: {
type: Event.Updated.type,
properties: result,
properties: data,
},
})
return result
return data
}
}

View File

@@ -211,7 +211,12 @@ export namespace Provider {
const awsWebIdentityTokenFile = Env.get("AWS_WEB_IDENTITY_TOKEN_FILE")
if (!profile && !awsAccessKeyId && !awsBearerToken && !awsWebIdentityTokenFile) return { autoload: false }
const containerCreds = Boolean(
process.env.AWS_CONTAINER_CREDENTIALS_RELATIVE_URI || process.env.AWS_CONTAINER_CREDENTIALS_FULL_URI,
)
if (!profile && !awsAccessKeyId && !awsBearerToken && !awsWebIdentityTokenFile && !containerCreds)
return { autoload: false }
const providerOptions: AmazonBedrockProviderSettings = {
region: defaultRegion,

View File

@@ -5,6 +5,7 @@ import type { JSONSchema } from "zod/v4/core"
import type { Provider } from "./provider"
import type { ModelsDev } from "./models"
import { iife } from "@/util/iife"
import { Flag } from "@/flag/flag"
type Modality = NonNullable<ModelsDev.Model["modalities"]>["input"][number]
@@ -17,6 +18,8 @@ function mimeToModality(mime: string): Modality | undefined {
}
export namespace ProviderTransform {
export const OUTPUT_TOKEN_MAX = Flag.OPENCODE_EXPERIMENTAL_OUTPUT_TOKEN_MAX || 32_000
// Maps npm package to the key the AI SDK expects for providerOptions
function sdkKey(npm: string): string | undefined {
switch (npm) {
@@ -723,29 +726,8 @@ export namespace ProviderTransform {
return { [key]: options }
}
export function maxOutputTokens(
npm: string,
options: Record<string, any>,
modelLimit: number,
globalLimit: number,
): number {
const modelCap = modelLimit || globalLimit
const standardLimit = Math.min(modelCap, globalLimit)
if (npm === "@ai-sdk/anthropic" || npm === "@ai-sdk/google-vertex/anthropic") {
const thinking = options?.["thinking"]
const budgetTokens = typeof thinking?.["budgetTokens"] === "number" ? thinking["budgetTokens"] : 0
const enabled = thinking?.["type"] === "enabled"
if (enabled && budgetTokens > 0) {
// Return text tokens so that text + thinking <= model cap, preferring 32k text when possible.
if (budgetTokens + standardLimit <= modelCap) {
return standardLimit
}
return modelCap - budgetTokens
}
}
return standardLimit
export function maxOutputTokens(model: Provider.Model): number {
return Math.min(model.limit.output, OUTPUT_TOKEN_MAX) || OUTPUT_TOKEN_MAX
}
export function schema(model: Provider.Model, schema: JSONSchema.BaseSchema | JSONSchema7): JSONSchema7 {

View File

@@ -1,6 +1,6 @@
import { resolver } from "hono-openapi"
import z from "zod"
import { Storage } from "../storage/storage"
import { NotFoundError } from "../storage/db"
export const ERRORS = {
400: {
@@ -25,7 +25,7 @@ export const ERRORS = {
description: "Not found",
content: {
"application/json": {
schema: resolver(Storage.NotFoundError.Schema),
schema: resolver(NotFoundError.Schema),
},
},
},

View File

@@ -3,7 +3,7 @@ import { describeRoute, validator, resolver } from "hono-openapi"
import { upgradeWebSocket } from "hono/bun"
import z from "zod"
import { Pty } from "@/pty"
import { Storage } from "../../storage/storage"
import { NotFoundError } from "../../storage/db"
import { errors } from "../error"
import { lazy } from "../../util/lazy"
@@ -76,7 +76,7 @@ export const PtyRoutes = lazy(() =>
async (c) => {
const info = Pty.get(c.req.valid("param").ptyID)
if (!info) {
throw new Storage.NotFoundError({ message: "Session not found" })
throw new NotFoundError({ message: "Session not found" })
}
return c.json(info)
},

View File

@@ -7,6 +7,7 @@ import { MessageV2 } from "../../session/message-v2"
import { SessionPrompt } from "../../session/prompt"
import { SessionCompaction } from "../../session/compaction"
import { SessionRevert } from "../../session/revert"
import { SessionHandoff } from "../../session/handoff"
import { SessionStatus } from "@/session/status"
import { SessionSummary } from "@/session/summary"
import { Todo } from "../../session/todo"
@@ -276,18 +277,15 @@ export const SessionRoutes = lazy(() =>
const sessionID = c.req.valid("param").sessionID
const updates = c.req.valid("json")
const updatedSession = await Session.update(
sessionID,
(session) => {
if (updates.title !== undefined) {
session.title = updates.title
}
if (updates.time?.archived !== undefined) session.time.archived = updates.time.archived
},
{ touch: false },
)
let session = await Session.get(sessionID)
if (updates.title !== undefined) {
session = await Session.setTitle({ sessionID, title: updates.title })
}
if (updates.time?.archived !== undefined) {
session = await Session.setArchived({ sessionID, time: updates.time.archived })
}
return c.json(updatedSession)
return c.json(session)
},
)
.post(
@@ -935,5 +933,41 @@ export const SessionRoutes = lazy(() =>
})
return c.json(true)
},
)
.post(
"/:sessionID/handoff",
describeRoute({
summary: "Handoff session",
description: "Extract context and relevant files for another agent to continue the conversation.",
operationId: "session.handoff",
responses: {
200: {
description: "Handoff data extracted",
content: {
"application/json": {
schema: resolver(z.object({ text: z.string(), files: z.string().array() })),
},
},
},
...errors(400, 404),
},
}),
validator(
"param",
z.object({
sessionID: z.string().meta({ description: "Session ID" }),
}),
),
validator("json", SessionHandoff.handoff.schema.omit({ sessionID: true })),
async (c) => {
const params = c.req.valid("param")
const body = c.req.valid("json")
const result = await SessionHandoff.handoff({
sessionID: params.sessionID,
model: body.model,
goal: body.goal,
})
return c.json(result)
},
),
)

View File

@@ -31,7 +31,7 @@ import { ExperimentalRoutes } from "./routes/experimental"
import { ProviderRoutes } from "./routes/provider"
import { lazy } from "../util/lazy"
import { InstanceBootstrap } from "../project/bootstrap"
import { Storage } from "../storage/storage"
import { NotFoundError } from "../storage/db"
import type { ContentfulStatusCode } from "hono/utils/http-status"
import { websocket } from "hono/bun"
import { HTTPException } from "hono/http-exception"
@@ -65,7 +65,7 @@ export namespace Server {
})
if (err instanceof NamedError) {
let status: ContentfulStatusCode
if (err instanceof Storage.NotFoundError) status = 404
if (err instanceof NotFoundError) status = 404
else if (err instanceof Provider.ModelNotFoundError) status = 400
else if (err.name.startsWith("Worktree")) status = 400
else status = 500

View File

@@ -6,7 +6,6 @@ import { Instance } from "../project/instance"
import { Provider } from "../provider/provider"
import { MessageV2 } from "./message-v2"
import z from "zod"
import { SessionPrompt } from "./prompt"
import { Token } from "../util/token"
import { Log } from "../util/log"
import { SessionProcessor } from "./processor"
@@ -14,6 +13,7 @@ import { fn } from "@/util/fn"
import { Agent } from "@/agent/agent"
import { Plugin } from "@/plugin"
import { Config } from "@/config/config"
import { ProviderTransform } from "@/provider/transform"
export namespace SessionCompaction {
const log = Log.create({ service: "session.compaction" })
@@ -27,15 +27,22 @@ export namespace SessionCompaction {
),
}
const COMPACTION_BUFFER = 20_000
export async function isOverflow(input: { tokens: MessageV2.Assistant["tokens"]; model: Provider.Model }) {
const config = await Config.get()
if (config.compaction?.auto === false) return false
const context = input.model.limit.context
if (context === 0) return false
const count = input.tokens.input + input.tokens.cache.read + input.tokens.output
const output = Math.min(input.model.limit.output, SessionPrompt.OUTPUT_TOKEN_MAX) || SessionPrompt.OUTPUT_TOKEN_MAX
const usable = input.model.limit.input || context - output
return count > usable
const count =
input.tokens.total ||
input.tokens.input + input.tokens.output + input.tokens.cache.read + input.tokens.cache.write
const reserved =
config.compaction?.reserved ?? Math.min(COMPACTION_BUFFER, ProviderTransform.maxOutputTokens(input.model))
const usable = input.model.limit.input ? input.model.limit.input - reserved : context - reserved
return count >= usable
}
export const PRUNE_MINIMUM = 20_000
@@ -139,8 +146,34 @@ export namespace SessionCompaction {
{ sessionID: input.sessionID },
{ context: [], prompt: undefined },
)
const defaultPrompt =
"Provide a detailed prompt for continuing our conversation above. Focus on information that would be helpful for continuing the conversation, including what we did, what we're doing, which files we're working on, and what we're going to do next considering new session will not have access to our conversation."
const defaultPrompt = `Provide a detailed prompt for continuing our conversation above.
Focus on information that would be helpful for continuing the conversation, including what we did, what we're doing, which files we're working on, and what we're going to do next.
The summary that you construct will be used so that another agent can read it and continue the work.
When constructing the summary, try to stick to this template:
---
## Goal
[What goal(s) is the user trying to accomplish?]
## Instructions
- [What important instructions did the user give you that are relevant]
- [If there is a plan or spec, include information about it so next agent can continue using it]
## Discoveries
[What notable things were learned during this conversation that would be useful for the next agent to know when continuing the work]
## Accomplished
[What work has been completed, what work is still in progress, and what work is left?]
## Relevant files / directories
[Construct a structured list of relevant files that have been read, edited, or created that pertain to the task at hand. If all the files in a directory are relevant, include the path to the directory.]
---`
const promptText = compacting.prompt ?? [defaultPrompt, ...compacting.context].join("\n\n")
const result = await processor.process({
user: userMessage,
@@ -181,7 +214,7 @@ export namespace SessionCompaction {
sessionID: input.sessionID,
type: "text",
synthetic: true,
text: "Continue if you have next steps",
text: "Continue if you have next steps, or stop and ask for clarification if you are unsure how to proceed.",
time: {
start: Date.now(),
end: Date.now(),

View File

@@ -0,0 +1,105 @@
import { fn } from "@/util/fn"
import z from "zod"
import { MessageV2 } from "./message-v2"
import { LLM } from "./llm"
import { Agent } from "@/agent/agent"
import { Provider } from "@/provider/provider"
import { iife } from "@/util/iife"
import { Identifier } from "@/id/id"
import PROMPT_HANDOFF from "./prompt/handoff.txt"
import { type Tool } from "ai"
import { SessionStatus } from "./status"
import { defer } from "@/util/defer"
export namespace SessionHandoff {
const HandoffTool: Tool = {
description:
"A tool to extract relevant information from the thread and select relevant files for another agent to continue the conversation. Use this tool to identify the most important context and files needed.",
inputSchema: z.object({
text: z.string().describe(PROMPT_HANDOFF),
files: z
.string()
.array()
.describe(
[
"An array of file or directory paths (workspace-relative) that are relevant to accomplishing the goal.",
"",
'IMPORTANT: Return as a JSON array of strings, e.g., ["packages/core/src/session/message-v2.ts", "packages/core/src/session/prompt/handoff.txt"]',
"",
"Rules:",
"- Maximum 10 files. Only include the most critical files needed for the task.",
"- You can include directories if multiple files from that directory are needed",
"- Prioritize by importance and relevance. PUT THE MOST IMPORTANT FILES FIRST.",
'- Return workspace-relative paths (e.g., "packages/core/src/session/message-v2.ts")',
"- Do not use absolute paths or invent files",
].join("\n"),
),
}),
async execute(_args, _ctx) {
return {}
},
}
export const handoff = fn(
z.object({
sessionID: z.string(),
model: z.object({ providerID: z.string(), modelID: z.string() }),
goal: z.string().optional(),
}),
async (input) => {
SessionStatus.set(input.sessionID, { type: "busy" })
using _ = defer(() => SessionStatus.set(input.sessionID, { type: "idle" }))
const messages = await MessageV2.filterCompacted(MessageV2.stream(input.sessionID))
const agent = await Agent.get("handoff")
const model = await iife(async () => {
if (agent.model) return Provider.getModel(agent.model.providerID, agent.model.modelID)
const small = await Provider.getSmallModel(input.model.providerID)
if (small) return small
return Provider.getModel(input.model.providerID, input.model.modelID)
})
const user = {
info: {
model: {
providerID: model.providerID,
modelID: model.id,
},
agent: agent.name,
sessionID: input.sessionID,
id: Identifier.ascending("user"),
role: "user",
time: {
created: Date.now(),
},
} satisfies MessageV2.User,
parts: [
{
type: "text",
text: PROMPT_HANDOFF + "\n\nMy request:\n" + (input.goal ?? "general summarization"),
id: Identifier.ascending("part"),
sessionID: input.sessionID,
messageID: Identifier.ascending("message"),
},
] satisfies MessageV2.TextPart[],
} satisfies MessageV2.WithParts
const abort = new AbortController()
const stream = await LLM.stream({
agent,
messages: MessageV2.toModelMessages([...messages, user], model),
sessionID: input.sessionID,
abort: abort.signal,
model,
system: [],
small: true,
user: user.info,
output: "tool",
tools: {
handoff: HandoffTool,
},
})
const [result] = await stream.toolCalls
if (!result) throw new Error("Handoff tool did not return a result")
return result.input
},
)
}

View File

@@ -4,13 +4,15 @@ import { BusEvent } from "@/bus/bus-event"
import { Bus } from "@/bus"
import { Decimal } from "decimal.js"
import z from "zod"
import { type LanguageModelUsage, type ProviderMetadata } from "ai"
import { type ProviderMetadata } from "ai"
import { Config } from "../config/config"
import { Flag } from "../flag/flag"
import { Identifier } from "../id/id"
import { Installation } from "../installation"
import { Storage } from "../storage/storage"
import { Database, NotFoundError, eq, and, or, like } from "../storage/db"
import { SessionTable, MessageTable, PartTable } from "./session.sql"
import { Storage } from "@/storage/storage"
import { Log } from "../util/log"
import { MessageV2 } from "./message-v2"
import { Instance } from "../project/instance"
@@ -22,6 +24,8 @@ import { Snapshot } from "@/snapshot"
import type { Provider } from "@/provider/provider"
import { PermissionNext } from "@/permission/next"
import { Global } from "@/global"
import type { LanguageModelV2Usage } from "@ai-sdk/provider"
import { iife } from "@/util/iife"
export namespace Session {
const log = Log.create({ service: "session" })
@@ -39,6 +43,64 @@ export namespace Session {
).test(title)
}
type SessionRow = typeof SessionTable.$inferSelect
export function fromRow(row: SessionRow): Info {
const summary =
row.summary_additions !== null || row.summary_deletions !== null || row.summary_files !== null
? {
additions: row.summary_additions ?? 0,
deletions: row.summary_deletions ?? 0,
files: row.summary_files ?? 0,
diffs: row.summary_diffs ?? undefined,
}
: undefined
const share = row.share_url ? { url: row.share_url } : undefined
const revert = row.revert ?? undefined
return {
id: row.id,
slug: row.slug,
projectID: row.project_id,
directory: row.directory,
parentID: row.parent_id ?? undefined,
title: row.title,
version: row.version,
summary,
share,
revert,
permission: row.permission ?? undefined,
time: {
created: row.time_created,
updated: row.time_updated,
compacting: row.time_compacting ?? undefined,
archived: row.time_archived ?? undefined,
},
}
}
export function toRow(info: Info) {
return {
id: info.id,
project_id: info.projectID,
parent_id: info.parentID,
slug: info.slug,
directory: info.directory,
title: info.title,
version: info.version,
share_url: info.share?.url,
summary_additions: info.summary?.additions,
summary_deletions: info.summary?.deletions,
summary_files: info.summary?.files,
summary_diffs: info.summary?.diffs,
revert: info.revert ?? null,
permission: info.permission,
time_created: info.time.created,
time_updated: info.time.updated,
time_compacting: info.time.compacting,
time_archived: info.time.archived,
}
}
function getForkedTitle(title: string): string {
const match = title.match(/^(.+) \(fork #(\d+)\)$/)
if (match) {
@@ -92,16 +154,6 @@ export namespace Session {
})
export type Info = z.output<typeof Info>
export const ShareInfo = z
.object({
secret: z.string(),
url: z.string(),
})
.meta({
ref: "SessionShare",
})
export type ShareInfo = z.output<typeof ShareInfo>
export const Event = {
Created: BusEvent.define(
"session.created",
@@ -198,8 +250,17 @@ export namespace Session {
)
export const touch = fn(Identifier.schema("session"), async (sessionID) => {
await update(sessionID, (draft) => {
draft.time.updated = Date.now()
const now = Date.now()
Database.use((db) => {
const row = db
.update(SessionTable)
.set({ time_updated: now })
.where(eq(SessionTable.id, sessionID))
.returning()
.get()
if (!row) throw new NotFoundError({ message: `Session not found: ${sessionID}` })
const info = fromRow(row)
Database.effect(() => Bus.publish(Event.Updated, { info }))
})
})
@@ -225,21 +286,19 @@ export namespace Session {
},
}
log.info("created", result)
await Storage.write(["session", Instance.project.id, result.id], result)
Bus.publish(Event.Created, {
info: result,
Database.use((db) => {
db.insert(SessionTable).values(toRow(result)).run()
Database.effect(() =>
Bus.publish(Event.Created, {
info: result,
}),
)
})
const cfg = await Config.get()
if (!result.parentID && (Flag.OPENCODE_AUTO_SHARE || cfg.share === "auto"))
share(result.id)
.then((share) => {
update(result.id, (draft) => {
draft.share = share
})
})
.catch(() => {
// Silently ignore sharing errors during session creation
})
share(result.id).catch(() => {
// Silently ignore sharing errors during session creation
})
Bus.publish(Event.Updated, {
info: result,
})
@@ -254,12 +313,9 @@ export namespace Session {
}
export const get = fn(Identifier.schema("session"), async (id) => {
const read = await Storage.read<Info>(["session", Instance.project.id, id])
return read as Info
})
export const getShare = fn(Identifier.schema("session"), async (id) => {
return Storage.read<ShareInfo>(["share", id])
const row = Database.use((db) => db.select().from(SessionTable).where(eq(SessionTable.id, id)).get())
if (!row) throw new NotFoundError({ message: `Session not found: ${id}` })
return fromRow(row)
})
export const share = fn(Identifier.schema("session"), async (id) => {
@@ -269,15 +325,12 @@ export namespace Session {
}
const { ShareNext } = await import("@/share/share-next")
const share = await ShareNext.create(id)
await update(
id,
(draft) => {
draft.share = {
url: share.url,
}
},
{ touch: false },
)
Database.use((db) => {
const row = db.update(SessionTable).set({ share_url: share.url }).where(eq(SessionTable.id, id)).returning().get()
if (!row) throw new NotFoundError({ message: `Session not found: ${id}` })
const info = fromRow(row)
Database.effect(() => Bus.publish(Event.Updated, { info }))
})
return share
})
@@ -285,32 +338,155 @@ export namespace Session {
// Use ShareNext to remove the share (same as share function uses ShareNext to create)
const { ShareNext } = await import("@/share/share-next")
await ShareNext.remove(id)
await update(
id,
(draft) => {
draft.share = undefined
},
{ touch: false },
)
Database.use((db) => {
const row = db.update(SessionTable).set({ share_url: null }).where(eq(SessionTable.id, id)).returning().get()
if (!row) throw new NotFoundError({ message: `Session not found: ${id}` })
const info = fromRow(row)
Database.effect(() => Bus.publish(Event.Updated, { info }))
})
})
export async function update(id: string, editor: (session: Info) => void, options?: { touch?: boolean }) {
const project = Instance.project
const result = await Storage.update<Info>(["session", project.id, id], (draft) => {
editor(draft)
if (options?.touch !== false) {
draft.time.updated = Date.now()
}
export const setTitle = fn(
z.object({
sessionID: Identifier.schema("session"),
title: z.string(),
}),
async (input) => {
return Database.use((db) => {
const row = db
.update(SessionTable)
.set({ title: input.title })
.where(eq(SessionTable.id, input.sessionID))
.returning()
.get()
if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` })
const info = fromRow(row)
Database.effect(() => Bus.publish(Event.Updated, { info }))
return info
})
},
)
export const setArchived = fn(
z.object({
sessionID: Identifier.schema("session"),
time: z.number().optional(),
}),
async (input) => {
return Database.use((db) => {
const row = db
.update(SessionTable)
.set({ time_archived: input.time })
.where(eq(SessionTable.id, input.sessionID))
.returning()
.get()
if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` })
const info = fromRow(row)
Database.effect(() => Bus.publish(Event.Updated, { info }))
return info
})
},
)
export const setPermission = fn(
z.object({
sessionID: Identifier.schema("session"),
permission: PermissionNext.Ruleset,
}),
async (input) => {
return Database.use((db) => {
const row = db
.update(SessionTable)
.set({ permission: input.permission, time_updated: Date.now() })
.where(eq(SessionTable.id, input.sessionID))
.returning()
.get()
if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` })
const info = fromRow(row)
Database.effect(() => Bus.publish(Event.Updated, { info }))
return info
})
},
)
export const setRevert = fn(
z.object({
sessionID: Identifier.schema("session"),
revert: Info.shape.revert,
summary: Info.shape.summary,
}),
async (input) => {
return Database.use((db) => {
const row = db
.update(SessionTable)
.set({
revert: input.revert ?? null,
summary_additions: input.summary?.additions,
summary_deletions: input.summary?.deletions,
summary_files: input.summary?.files,
time_updated: Date.now(),
})
.where(eq(SessionTable.id, input.sessionID))
.returning()
.get()
if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` })
const info = fromRow(row)
Database.effect(() => Bus.publish(Event.Updated, { info }))
return info
})
},
)
export const clearRevert = fn(Identifier.schema("session"), async (sessionID) => {
return Database.use((db) => {
const row = db
.update(SessionTable)
.set({
revert: null,
time_updated: Date.now(),
})
.where(eq(SessionTable.id, sessionID))
.returning()
.get()
if (!row) throw new NotFoundError({ message: `Session not found: ${sessionID}` })
const info = fromRow(row)
Database.effect(() => Bus.publish(Event.Updated, { info }))
return info
})
Bus.publish(Event.Updated, {
info: result,
})
return result
}
})
export const setSummary = fn(
z.object({
sessionID: Identifier.schema("session"),
summary: Info.shape.summary,
}),
async (input) => {
return Database.use((db) => {
const row = db
.update(SessionTable)
.set({
summary_additions: input.summary?.additions,
summary_deletions: input.summary?.deletions,
summary_files: input.summary?.files,
time_updated: Date.now(),
})
.where(eq(SessionTable.id, input.sessionID))
.returning()
.get()
if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` })
const info = fromRow(row)
Database.effect(() => Bus.publish(Event.Updated, { info }))
return info
})
},
)
export const diff = fn(Identifier.schema("session"), async (sessionID) => {
const diffs = await Storage.read<Snapshot.FileDiff[]>(["session_diff", sessionID])
return diffs ?? []
try {
return await Storage.read<Snapshot.FileDiff[]>(["session_diff", sessionID])
} catch {
return []
}
})
export const messages = fn(
@@ -329,25 +505,37 @@ export namespace Session {
},
)
export async function* list() {
export function* list() {
const project = Instance.project
for (const item of await Storage.list(["session", project.id])) {
const session = await Storage.read<Info>(item).catch(() => undefined)
if (!session) continue
yield session
const rel = path.relative(Instance.worktree, Instance.directory)
const suffix = path.sep + rel
const rows = Database.use((db) =>
db
.select()
.from(SessionTable)
.where(
and(
eq(SessionTable.project_id, project.id),
or(eq(SessionTable.directory, Instance.directory), like(SessionTable.directory, `%${suffix}`)),
),
)
.all(),
)
for (const row of rows) {
yield fromRow(row)
}
}
export const children = fn(Identifier.schema("session"), async (parentID) => {
const project = Instance.project
const result = [] as Session.Info[]
for (const item of await Storage.list(["session", project.id])) {
const session = await Storage.read<Info>(item).catch(() => undefined)
if (!session) continue
if (session.parentID !== parentID) continue
result.push(session)
}
return result
const rows = Database.use((db) =>
db
.select()
.from(SessionTable)
.where(and(eq(SessionTable.project_id, project.id), eq(SessionTable.parent_id, parentID)))
.all(),
)
return rows.map(fromRow)
})
export const remove = fn(Identifier.schema("session"), async (sessionID) => {
@@ -358,15 +546,14 @@ export namespace Session {
await remove(child.id)
}
await unshare(sessionID).catch(() => {})
for (const msg of await Storage.list(["message", sessionID])) {
for (const part of await Storage.list(["part", msg.at(-1)!])) {
await Storage.remove(part)
}
await Storage.remove(msg)
}
await Storage.remove(["session", project.id, sessionID])
Bus.publish(Event.Deleted, {
info: session,
// CASCADE delete handles messages and parts automatically
Database.use((db) => {
db.delete(SessionTable).where(eq(SessionTable.id, sessionID)).run()
Database.effect(() =>
Bus.publish(Event.Deleted, {
info: session,
}),
)
})
} catch (e) {
log.error(e)
@@ -374,9 +561,23 @@ export namespace Session {
})
export const updateMessage = fn(MessageV2.Info, async (msg) => {
await Storage.write(["message", msg.sessionID, msg.id], msg)
Bus.publish(MessageV2.Event.Updated, {
info: msg,
const time_created = msg.role === "user" ? msg.time.created : msg.time.created
const { id, sessionID, ...data } = msg
Database.use((db) => {
db.insert(MessageTable)
.values({
id,
session_id: sessionID,
time_created,
data,
})
.onConflictDoUpdate({ target: MessageTable.id, set: { data } })
.run()
Database.effect(() =>
Bus.publish(MessageV2.Event.Updated, {
info: msg,
}),
)
})
return msg
})
@@ -387,10 +588,15 @@ export namespace Session {
messageID: Identifier.schema("message"),
}),
async (input) => {
await Storage.remove(["message", input.sessionID, input.messageID])
Bus.publish(MessageV2.Event.Removed, {
sessionID: input.sessionID,
messageID: input.messageID,
// CASCADE delete handles parts automatically
Database.use((db) => {
db.delete(MessageTable).where(eq(MessageTable.id, input.messageID)).run()
Database.effect(() =>
Bus.publish(MessageV2.Event.Removed, {
sessionID: input.sessionID,
messageID: input.messageID,
}),
)
})
return input.messageID
},
@@ -403,70 +609,113 @@ export namespace Session {
partID: Identifier.schema("part"),
}),
async (input) => {
await Storage.remove(["part", input.messageID, input.partID])
Bus.publish(MessageV2.Event.PartRemoved, {
sessionID: input.sessionID,
messageID: input.messageID,
partID: input.partID,
Database.use((db) => {
db.delete(PartTable).where(eq(PartTable.id, input.partID)).run()
Database.effect(() =>
Bus.publish(MessageV2.Event.PartRemoved, {
sessionID: input.sessionID,
messageID: input.messageID,
partID: input.partID,
}),
)
})
return input.partID
},
)
const UpdatePartInput = z.union([
MessageV2.Part,
z.object({
part: MessageV2.TextPart,
delta: z.string(),
}),
z.object({
part: MessageV2.ReasoningPart,
delta: z.string(),
}),
])
const UpdatePartInput = MessageV2.Part
export const updatePart = fn(UpdatePartInput, async (input) => {
const part = "delta" in input ? input.part : input
const delta = "delta" in input ? input.delta : undefined
await Storage.write(["part", part.messageID, part.id], part)
Bus.publish(MessageV2.Event.PartUpdated, {
part,
delta,
export const updatePart = fn(UpdatePartInput, async (part) => {
const { id, messageID, sessionID, ...data } = part
const time = Date.now()
Database.use((db) => {
db.insert(PartTable)
.values({
id,
message_id: messageID,
session_id: sessionID,
time_created: time,
data,
})
.onConflictDoUpdate({ target: PartTable.id, set: { data } })
.run()
Database.effect(() =>
Bus.publish(MessageV2.Event.PartUpdated, {
part,
}),
)
})
return part
})
export const updatePartDelta = fn(
z.object({
sessionID: z.string(),
messageID: z.string(),
partID: z.string(),
field: z.string(),
delta: z.string(),
}),
async (input) => {
Bus.publish(MessageV2.Event.PartDelta, input)
},
)
export const getUsage = fn(
z.object({
model: z.custom<Provider.Model>(),
usage: z.custom<LanguageModelUsage>(),
usage: z.custom<LanguageModelV2Usage>(),
metadata: z.custom<ProviderMetadata>().optional(),
}),
(input) => {
const cacheReadInputTokens = input.usage.cachedInputTokens ?? 0
const cacheWriteInputTokens = (input.metadata?.["anthropic"]?.["cacheCreationInputTokens"] ??
// @ts-expect-error
input.metadata?.["bedrock"]?.["usage"]?.["cacheWriteInputTokens"] ??
// @ts-expect-error
input.metadata?.["venice"]?.["usage"]?.["cacheCreationInputTokens"] ??
0) as number
const excludesCachedTokens = !!(input.metadata?.["anthropic"] || input.metadata?.["bedrock"])
const adjustedInputTokens = excludesCachedTokens
? (input.usage.inputTokens ?? 0)
: (input.usage.inputTokens ?? 0) - cacheReadInputTokens - cacheWriteInputTokens
const safe = (value: number) => {
if (!Number.isFinite(value)) return 0
return value
}
const inputTokens = safe(input.usage.inputTokens ?? 0)
const outputTokens = safe(input.usage.outputTokens ?? 0)
const reasoningTokens = safe(input.usage.reasoningTokens ?? 0)
const cacheReadInputTokens = safe(input.usage.cachedInputTokens ?? 0)
const cacheWriteInputTokens = safe(
(input.metadata?.["anthropic"]?.["cacheCreationInputTokens"] ??
// @ts-expect-error
input.metadata?.["bedrock"]?.["usage"]?.["cacheWriteInputTokens"] ??
// @ts-expect-error
input.metadata?.["venice"]?.["usage"]?.["cacheCreationInputTokens"] ??
0) as number,
)
// OpenRouter provides inputTokens as the total count of input tokens (including cached).
// AFAIK other providers (OpenRouter/OpenAI/Gemini etc.) do it the same way e.g. vercel/ai#8794 (comment)
// Anthropic does it differently though - inputTokens doesn't include cached tokens.
// It looks like OpenCode's cost calculation assumes all providers return inputTokens the same way Anthropic does (I'm guessing getUsage logic was originally implemented with anthropic), so it's causing incorrect cost calculation for OpenRouter and others.
const excludesCachedTokens = !!(input.metadata?.["anthropic"] || input.metadata?.["bedrock"])
const adjustedInputTokens = safe(
excludesCachedTokens ? inputTokens : inputTokens - cacheReadInputTokens - cacheWriteInputTokens,
)
const total = iife(() => {
// Anthropic doesn't provide total_tokens, also ai sdk will vastly undercount if we
// don't compute from components
if (
input.model.api.npm === "@ai-sdk/anthropic" ||
input.model.api.npm === "@ai-sdk/amazon-bedrock" ||
input.model.api.npm === "@ai-sdk/google-vertex/anthropic"
) {
return adjustedInputTokens + outputTokens + cacheReadInputTokens + cacheWriteInputTokens
}
return input.usage.totalTokens
})
const tokens = {
input: safe(adjustedInputTokens),
output: safe(input.usage.outputTokens ?? 0),
reasoning: safe(input.usage?.reasoningTokens ?? 0),
total,
input: adjustedInputTokens,
output: outputTokens,
reasoning: reasoningTokens,
cache: {
write: safe(cacheWriteInputTokens),
read: safe(cacheReadInputTokens),
write: cacheWriteInputTokens,
read: cacheReadInputTokens,
},
}

View File

@@ -25,8 +25,7 @@ import { Auth } from "@/auth"
export namespace LLM {
const log = Log.create({ service: "llm" })
export const OUTPUT_TOKEN_MAX = Flag.OPENCODE_EXPERIMENTAL_OUTPUT_TOKEN_MAX || 32_000
export const OUTPUT_TOKEN_MAX = ProviderTransform.OUTPUT_TOKEN_MAX
export type StreamInput = {
user: MessageV2.User
@@ -39,6 +38,7 @@ export namespace LLM {
small?: boolean
tools: Record<string, Tool>
retries?: number
output?: "tool"
}
export type StreamOutput = StreamTextResult<ToolSet, unknown>
@@ -149,14 +149,7 @@ export namespace LLM {
)
const maxOutputTokens =
isCodex || provider.id.includes("github-copilot")
? undefined
: ProviderTransform.maxOutputTokens(
input.model.api.npm,
params.options,
input.model.limit.output,
OUTPUT_TOKEN_MAX,
)
isCodex || provider.id.includes("github-copilot") ? undefined : ProviderTransform.maxOutputTokens(input.model)
const tools = await resolveTools(input)
@@ -215,6 +208,7 @@ export namespace LLM {
tools,
maxOutputTokens,
abortSignal: input.abort,
toolChoice: input.output === "tool" ? "required" : undefined,
headers: {
...(input.model.providerID.startsWith("opencode")
? {

View File

@@ -6,6 +6,10 @@ import { Identifier } from "../id/id"
import { LSP } from "../lsp"
import { Snapshot } from "@/snapshot"
import { fn } from "@/util/fn"
import { Database, eq, desc, inArray } from "@/storage/db"
import { MessageTable, PartTable } from "./session.sql"
import { ProviderTransform } from "@/provider/transform"
import { STATUS_CODES } from "http"
import { Storage } from "@/storage/storage"
import { ProviderError } from "@/provider/error"
import { iife } from "@/util/iife"
@@ -210,6 +214,7 @@ export namespace MessageV2 {
snapshot: z.string().optional(),
cost: z.number(),
tokens: z.object({
total: z.number().optional(),
input: z.number(),
output: z.number(),
reasoning: z.number(),
@@ -383,6 +388,7 @@ export namespace MessageV2 {
summary: z.boolean().optional(),
cost: z.number(),
tokens: z.object({
total: z.number().optional(),
input: z.number(),
output: z.number(),
reasoning: z.number(),
@@ -421,7 +427,16 @@ export namespace MessageV2 {
"message.part.updated",
z.object({
part: Part,
delta: z.string().optional(),
}),
),
PartDelta: BusEvent.define(
"message.part.delta",
z.object({
sessionID: z.string(),
messageID: z.string(),
partID: z.string(),
field: z.string(),
delta: z.string(),
}),
),
PartRemoved: BusEvent.define(
@@ -666,23 +681,65 @@ export namespace MessageV2 {
}
export const stream = fn(Identifier.schema("session"), async function* (sessionID) {
const list = await Array.fromAsync(await Storage.list(["message", sessionID]))
for (let i = list.length - 1; i >= 0; i--) {
yield await get({
sessionID,
messageID: list[i][2],
})
const size = 50
let offset = 0
while (true) {
const rows = Database.use((db) =>
db
.select()
.from(MessageTable)
.where(eq(MessageTable.session_id, sessionID))
.orderBy(desc(MessageTable.time_created))
.limit(size)
.offset(offset)
.all(),
)
if (rows.length === 0) break
const ids = rows.map((row) => row.id)
const partsByMessage = new Map<string, MessageV2.Part[]>()
if (ids.length > 0) {
const partRows = Database.use((db) =>
db
.select()
.from(PartTable)
.where(inArray(PartTable.message_id, ids))
.orderBy(PartTable.message_id, PartTable.id)
.all(),
)
for (const row of partRows) {
const part = {
...row.data,
id: row.id,
sessionID: row.session_id,
messageID: row.message_id,
} as MessageV2.Part
const list = partsByMessage.get(row.message_id)
if (list) list.push(part)
else partsByMessage.set(row.message_id, [part])
}
}
for (const row of rows) {
const info = { ...row.data, id: row.id, sessionID: row.session_id } as MessageV2.Info
yield {
info,
parts: partsByMessage.get(row.id) ?? [],
}
}
offset += rows.length
if (rows.length < size) break
}
})
export const parts = fn(Identifier.schema("message"), async (messageID) => {
const result = [] as MessageV2.Part[]
for (const item of await Storage.list(["part", messageID])) {
const read = await Storage.read<MessageV2.Part>(item)
result.push(read)
}
result.sort((a, b) => (a.id > b.id ? 1 : -1))
return result
export const parts = fn(Identifier.schema("message"), async (message_id) => {
const rows = Database.use((db) =>
db.select().from(PartTable).where(eq(PartTable.message_id, message_id)).orderBy(PartTable.id).all(),
)
return rows.map(
(row) => ({ ...row.data, id: row.id, sessionID: row.session_id, messageID: row.message_id }) as MessageV2.Part,
)
})
export const get = fn(
@@ -691,8 +748,11 @@ export namespace MessageV2 {
messageID: Identifier.schema("message"),
}),
async (input): Promise<WithParts> => {
const row = Database.use((db) => db.select().from(MessageTable).where(eq(MessageTable.id, input.messageID)).get())
if (!row) throw new Error(`Message not found: ${input.messageID}`)
const info = { ...row.data, id: row.id, sessionID: row.session_id } as MessageV2.Info
return {
info: await Storage.read<MessageV2.Info>(["message", input.sessionID, input.messageID]),
info,
parts: await parts(input.messageID),
}
},

View File

@@ -63,17 +63,19 @@ export namespace SessionProcessor {
if (value.id in reasoningMap) {
continue
}
reasoningMap[value.id] = {
const reasoningPart = {
id: Identifier.ascending("part"),
messageID: input.assistantMessage.id,
sessionID: input.assistantMessage.sessionID,
type: "reasoning",
type: "reasoning" as const,
text: "",
time: {
start: Date.now(),
},
metadata: value.providerMetadata,
}
reasoningMap[value.id] = reasoningPart
await Session.updatePart(reasoningPart)
break
case "reasoning-delta":
@@ -81,7 +83,13 @@ export namespace SessionProcessor {
const part = reasoningMap[value.id]
part.text += value.text
if (value.providerMetadata) part.metadata = value.providerMetadata
if (part.text) await Session.updatePart({ part, delta: value.text })
await Session.updatePartDelta({
sessionID: part.sessionID,
messageID: part.messageID,
partID: part.id,
field: "text",
delta: value.text,
})
}
break
@@ -288,17 +296,20 @@ export namespace SessionProcessor {
},
metadata: value.providerMetadata,
}
await Session.updatePart(currentText)
break
case "text-delta":
if (currentText) {
currentText.text += value.text
if (value.providerMetadata) currentText.metadata = value.providerMetadata
if (currentText.text)
await Session.updatePart({
part: currentText,
delta: value.text,
})
await Session.updatePartDelta({
sessionID: currentText.sessionID,
messageID: currentText.messageID,
partID: currentText.id,
field: "text",
delta: value.text,
})
}
break
@@ -342,6 +353,9 @@ export namespace SessionProcessor {
stack: JSON.stringify(e.stack),
})
const error = MessageV2.fromError(e, { providerID: input.model.providerID })
if (MessageV2.ContextOverflowError.isInstance(error)) {
// TODO: Handle context overflow error
}
const retry = SessionRetry.retryable(error)
if (retry !== undefined) {
attempt++

View File

@@ -52,7 +52,6 @@ globalThis.AI_SDK_LOG_WARNINGS = false
export namespace SessionPrompt {
const log = Log.create({ service: "session.prompt" })
export const OUTPUT_TOKEN_MAX = Flag.OPENCODE_EXPERIMENTAL_OUTPUT_TOKEN_MAX || 32_000
const state = Instance.state(
() => {
@@ -165,9 +164,7 @@ export namespace SessionPrompt {
}
if (permissions.length > 0) {
session.permission = permissions
await Session.update(session.id, (draft) => {
draft.permission = permissions
})
await Session.setPermission({ sessionID: session.id, permission: permissions })
}
if (input.noReply === true) {
@@ -1236,33 +1233,7 @@ export namespace SessionPrompt {
const userMessage = input.messages.findLast((msg) => msg.info.role === "user")
if (!userMessage) return input.messages
// Original logic when experimental plan mode is disabled
if (!Flag.OPENCODE_EXPERIMENTAL_PLAN_MODE) {
if (input.agent.name === "plan") {
userMessage.parts.push({
id: Identifier.ascending("part"),
messageID: userMessage.info.id,
sessionID: userMessage.info.sessionID,
type: "text",
text: PROMPT_PLAN,
synthetic: true,
})
}
const wasPlan = input.messages.some((msg) => msg.info.role === "assistant" && msg.info.agent === "plan")
if (wasPlan && input.agent.name === "build") {
userMessage.parts.push({
id: Identifier.ascending("part"),
messageID: userMessage.info.id,
sessionID: userMessage.info.sessionID,
type: "text",
text: BUILD_SWITCH,
synthetic: true,
})
}
return input.messages
}
// New plan mode logic when flag is enabled
// Plan mode logic
const assistantMessage = input.messages.findLast((msg) => msg.info.role === "assistant")
// Switching from plan mode to build mode
@@ -1854,21 +1825,16 @@ NOTE: At any point in time through this workflow you should feel free to ask the
],
})
const text = await result.text.catch((err) => log.error("failed to generate title", { error: err }))
if (text)
return Session.update(
input.session.id,
(draft) => {
const cleaned = text
.replace(/<think>[\s\S]*?<\/think>\s*/g, "")
.split("\n")
.map((line) => line.trim())
.find((line) => line.length > 0)
if (!cleaned) return
if (text) {
const cleaned = text
.replace(/<think>[\s\S]*?<\/think>\s*/g, "")
.split("\n")
.map((line) => line.trim())
.find((line) => line.length > 0)
if (!cleaned) return
const title = cleaned.length > 100 ? cleaned.substring(0, 97) + "..." : cleaned
draft.title = title
},
{ touch: false },
)
const title = cleaned.length > 100 ? cleaned.substring(0, 97) + "..." : cleaned
return Session.setTitle({ sessionID: input.session.id, title })
}
}
}

View File

@@ -0,0 +1,17 @@
Extract relevant context from the conversation above for continuing this work. Write from my perspective (first person: "I did...", "I told you...").
Consider what would be useful to know based on my request below. Questions that might be relevant:
- What did I just do or implement?
- What instructions did I already give you which are still relevant (e.g. follow patterns in the codebase)?
- What files did I already tell you that's important or that I am working on (and should continue working on)?
- Did I provide a plan or spec that should be included?
- What did I already tell you that's important (certain libraries, patterns, constraints, preferences)?
- What important technical details did I discover (APIs, methods, patterns)?
- What caveats, limitations, or open questions did I find?
Extract what matters for the specific request below. Don't answer questions that aren't relevant. Pick an appropriate length based on the complexity of the request.
Focus on capabilities and behavior, not file-by-file changes. Avoid excessive implementation details (variable names, storage keys, constants) unless critical.
Format: Plain text with bullets. No markdown headers, no bold/italic, no code fences. Use workspace-relative paths for files.

View File

@@ -59,9 +59,8 @@ export namespace SessionRetry {
}
export function retryable(error: ReturnType<NamedError["toObject"]>) {
// DO NOT retry context overflow errors
// context overflow errors should not be retried
if (MessageV2.ContextOverflowError.isInstance(error)) return undefined
if (MessageV2.APIError.isInstance(error)) {
if (!error.data.isRetryable) return undefined
if (error.data.responseBody?.includes("FreeUsageLimitError"))

View File

@@ -4,8 +4,9 @@ import { Snapshot } from "../snapshot"
import { MessageV2 } from "./message-v2"
import { Session } from "."
import { Log } from "../util/log"
import { splitWhen } from "remeda"
import { Storage } from "../storage/storage"
import { Database, eq } from "../storage/db"
import { MessageTable, PartTable } from "./session.sql"
import { Storage } from "@/storage/storage"
import { Bus } from "../bus"
import { SessionPrompt } from "./prompt"
import { SessionSummary } from "./summary"
@@ -65,13 +66,14 @@ export namespace SessionRevert {
sessionID: input.sessionID,
diff: diffs,
})
return Session.update(input.sessionID, (draft) => {
draft.revert = revert
draft.summary = {
return Session.setRevert({
sessionID: input.sessionID,
revert,
summary: {
additions: diffs.reduce((sum, x) => sum + x.additions, 0),
deletions: diffs.reduce((sum, x) => sum + x.deletions, 0),
files: diffs.length,
}
},
})
}
return session
@@ -83,39 +85,54 @@ export namespace SessionRevert {
const session = await Session.get(input.sessionID)
if (!session.revert) return session
if (session.revert.snapshot) await Snapshot.restore(session.revert.snapshot)
const next = await Session.update(input.sessionID, (draft) => {
draft.revert = undefined
})
return next
return Session.clearRevert(input.sessionID)
}
export async function cleanup(session: Session.Info) {
if (!session.revert) return
const sessionID = session.id
let msgs = await Session.messages({ sessionID })
const msgs = await Session.messages({ sessionID })
const messageID = session.revert.messageID
const [preserve, remove] = splitWhen(msgs, (x) => x.info.id === messageID)
msgs = preserve
const preserve = [] as MessageV2.WithParts[]
const remove = [] as MessageV2.WithParts[]
let target: MessageV2.WithParts | undefined
for (const msg of msgs) {
if (msg.info.id < messageID) {
preserve.push(msg)
continue
}
if (msg.info.id > messageID) {
remove.push(msg)
continue
}
if (session.revert.partID) {
preserve.push(msg)
target = msg
continue
}
remove.push(msg)
}
for (const msg of remove) {
await Storage.remove(["message", sessionID, msg.info.id])
Database.use((db) => db.delete(MessageTable).where(eq(MessageTable.id, msg.info.id)).run())
await Bus.publish(MessageV2.Event.Removed, { sessionID: sessionID, messageID: msg.info.id })
}
const last = preserve.at(-1)
if (session.revert.partID && last) {
if (session.revert.partID && target) {
const partID = session.revert.partID
const [preserveParts, removeParts] = splitWhen(last.parts, (x) => x.id === partID)
last.parts = preserveParts
for (const part of removeParts) {
await Storage.remove(["part", last.info.id, part.id])
await Bus.publish(MessageV2.Event.PartRemoved, {
sessionID: sessionID,
messageID: last.info.id,
partID: part.id,
})
const removeStart = target.parts.findIndex((part) => part.id === partID)
if (removeStart >= 0) {
const preserveParts = target.parts.slice(0, removeStart)
const removeParts = target.parts.slice(removeStart)
target.parts = preserveParts
for (const part of removeParts) {
Database.use((db) => db.delete(PartTable).where(eq(PartTable.id, part.id)).run())
await Bus.publish(MessageV2.Event.PartRemoved, {
sessionID: sessionID,
messageID: target.info.id,
partID: part.id,
})
}
}
}
await Session.update(sessionID, (draft) => {
draft.revert = undefined
})
await Session.clearRevert(sessionID)
}
}

View File

@@ -0,0 +1,88 @@
import { sqliteTable, text, integer, index, primaryKey } from "drizzle-orm/sqlite-core"
import { ProjectTable } from "../project/project.sql"
import type { MessageV2 } from "./message-v2"
import type { Snapshot } from "@/snapshot"
import type { PermissionNext } from "@/permission/next"
import { Timestamps } from "@/storage/schema.sql"
type PartData = Omit<MessageV2.Part, "id" | "sessionID" | "messageID">
type InfoData = Omit<MessageV2.Info, "id" | "sessionID">
export const SessionTable = sqliteTable(
"session",
{
id: text().primaryKey(),
project_id: text()
.notNull()
.references(() => ProjectTable.id, { onDelete: "cascade" }),
parent_id: text(),
slug: text().notNull(),
directory: text().notNull(),
title: text().notNull(),
version: text().notNull(),
share_url: text(),
summary_additions: integer(),
summary_deletions: integer(),
summary_files: integer(),
summary_diffs: text({ mode: "json" }).$type<Snapshot.FileDiff[]>(),
revert: text({ mode: "json" }).$type<{ messageID: string; partID?: string; snapshot?: string; diff?: string }>(),
permission: text({ mode: "json" }).$type<PermissionNext.Ruleset>(),
...Timestamps,
time_compacting: integer(),
time_archived: integer(),
},
(table) => [index("session_project_idx").on(table.project_id), index("session_parent_idx").on(table.parent_id)],
)
export const MessageTable = sqliteTable(
"message",
{
id: text().primaryKey(),
session_id: text()
.notNull()
.references(() => SessionTable.id, { onDelete: "cascade" }),
...Timestamps,
data: text({ mode: "json" }).notNull().$type<InfoData>(),
},
(table) => [index("message_session_idx").on(table.session_id)],
)
export const PartTable = sqliteTable(
"part",
{
id: text().primaryKey(),
message_id: text()
.notNull()
.references(() => MessageTable.id, { onDelete: "cascade" }),
session_id: text().notNull(),
...Timestamps,
data: text({ mode: "json" }).notNull().$type<PartData>(),
},
(table) => [index("part_message_idx").on(table.message_id), index("part_session_idx").on(table.session_id)],
)
export const TodoTable = sqliteTable(
"todo",
{
session_id: text()
.notNull()
.references(() => SessionTable.id, { onDelete: "cascade" }),
content: text().notNull(),
status: text().notNull(),
priority: text().notNull(),
position: integer().notNull(),
...Timestamps,
},
(table) => [
primaryKey({ columns: [table.session_id, table.position] }),
index("todo_session_idx").on(table.session_id),
],
)
export const PermissionTable = sqliteTable("permission", {
project_id: text()
.primaryKey()
.references(() => ProjectTable.id, { onDelete: "cascade" }),
...Timestamps,
data: text({ mode: "json" }).notNull().$type<PermissionNext.Ruleset>(),
})

View File

@@ -90,12 +90,13 @@ export namespace SessionSummary {
async function summarizeSession(input: { sessionID: string; messages: MessageV2.WithParts[] }) {
const diffs = await computeDiff({ messages: input.messages })
await Session.update(input.sessionID, (draft) => {
draft.summary = {
await Session.setSummary({
sessionID: input.sessionID,
summary: {
additions: diffs.reduce((sum, x) => sum + x.additions, 0),
deletions: diffs.reduce((sum, x) => sum + x.deletions, 0),
files: diffs.length,
}
},
})
await Storage.write(["session_diff", input.sessionID], diffs)
Bus.publish(Session.Event.Diff, {

View File

@@ -1,7 +1,8 @@
import { BusEvent } from "@/bus/bus-event"
import { Bus } from "@/bus"
import z from "zod"
import { Storage } from "../storage/storage"
import { Database, eq, asc } from "../storage/db"
import { TodoTable } from "./session.sql"
export namespace Todo {
export const Info = z
@@ -9,7 +10,6 @@ export namespace Todo {
content: z.string().describe("Brief description of the task"),
status: z.string().describe("Current status of the task: pending, in_progress, completed, cancelled"),
priority: z.string().describe("Priority level of the task: high, medium, low"),
id: z.string().describe("Unique identifier for the todo item"),
})
.meta({ ref: "Todo" })
export type Info = z.infer<typeof Info>
@@ -24,14 +24,33 @@ export namespace Todo {
),
}
export async function update(input: { sessionID: string; todos: Info[] }) {
await Storage.write(["todo", input.sessionID], input.todos)
export function update(input: { sessionID: string; todos: Info[] }) {
Database.transaction((db) => {
db.delete(TodoTable).where(eq(TodoTable.session_id, input.sessionID)).run()
if (input.todos.length === 0) return
db.insert(TodoTable)
.values(
input.todos.map((todo, position) => ({
session_id: input.sessionID,
content: todo.content,
status: todo.status,
priority: todo.priority,
position,
})),
)
.run()
})
Bus.publish(Event.Updated, input)
}
export async function get(sessionID: string) {
return Storage.read<Info[]>(["todo", sessionID])
.then((x) => x || [])
.catch(() => [])
export function get(sessionID: string) {
const rows = Database.use((db) =>
db.select().from(TodoTable).where(eq(TodoTable.session_id, sessionID)).orderBy(asc(TodoTable.position)).all(),
)
return rows.map((row) => ({
content: row.content,
status: row.status,
priority: row.priority,
}))
}
}

View File

@@ -4,7 +4,8 @@ import { ulid } from "ulid"
import { Provider } from "@/provider/provider"
import { Session } from "@/session"
import { MessageV2 } from "@/session/message-v2"
import { Storage } from "@/storage/storage"
import { Database, eq } from "@/storage/db"
import { SessionShareTable } from "./share.sql"
import { Log } from "@/util/log"
import type * as SDK from "@opencode-ai/sdk/v2"
@@ -77,17 +78,26 @@ export namespace ShareNext {
})
.then((x) => x.json())
.then((x) => x as { id: string; url: string; secret: string })
await Storage.write(["session_share", sessionID], result)
Database.use((db) =>
db
.insert(SessionShareTable)
.values({ session_id: sessionID, id: result.id, secret: result.secret, url: result.url })
.onConflictDoUpdate({
target: SessionShareTable.session_id,
set: { id: result.id, secret: result.secret, url: result.url },
})
.run(),
)
fullSync(sessionID)
return result
}
function get(sessionID: string) {
return Storage.read<{
id: string
secret: string
url: string
}>(["session_share", sessionID])
const row = Database.use((db) =>
db.select().from(SessionShareTable).where(eq(SessionShareTable.session_id, sessionID)).get(),
)
if (!row) return
return { id: row.id, secret: row.secret, url: row.url }
}
type Data =
@@ -132,7 +142,7 @@ export namespace ShareNext {
const queued = queue.get(sessionID)
if (!queued) return
queue.delete(sessionID)
const share = await get(sessionID).catch(() => undefined)
const share = get(sessionID)
if (!share) return
await fetch(`${await url()}/api/share/${share.id}/sync`, {
@@ -152,7 +162,7 @@ export namespace ShareNext {
export async function remove(sessionID: string) {
if (disabled) return
log.info("removing share", { sessionID })
const share = await get(sessionID)
const share = get(sessionID)
if (!share) return
await fetch(`${await url()}/api/share/${share.id}`, {
method: "DELETE",
@@ -163,7 +173,7 @@ export namespace ShareNext {
secret: share.secret,
}),
})
await Storage.remove(["session_share", sessionID])
Database.use((db) => db.delete(SessionShareTable).where(eq(SessionShareTable.session_id, sessionID)).run())
}
async function fullSync(sessionID: string) {

View File

@@ -0,0 +1,13 @@
import { sqliteTable, text } from "drizzle-orm/sqlite-core"
import { SessionTable } from "../session/session.sql"
import { Timestamps } from "@/storage/schema.sql"
export const SessionShareTable = sqliteTable("session_share", {
session_id: text()
.primaryKey()
.references(() => SessionTable.id, { onDelete: "cascade" }),
id: text().notNull(),
secret: text().notNull(),
url: text().notNull(),
...Timestamps,
})

View File

@@ -1,92 +0,0 @@
import { Bus } from "../bus"
import { Installation } from "../installation"
import { Session } from "../session"
import { MessageV2 } from "../session/message-v2"
import { Log } from "../util/log"
export namespace Share {
const log = Log.create({ service: "share" })
let queue: Promise<void> = Promise.resolve()
const pending = new Map<string, any>()
export async function sync(key: string, content: any) {
if (disabled) return
const [root, ...splits] = key.split("/")
if (root !== "session") return
const [sub, sessionID] = splits
if (sub === "share") return
const share = await Session.getShare(sessionID).catch(() => {})
if (!share) return
const { secret } = share
pending.set(key, content)
queue = queue
.then(async () => {
const content = pending.get(key)
if (content === undefined) return
pending.delete(key)
return fetch(`${URL}/share_sync`, {
method: "POST",
body: JSON.stringify({
sessionID: sessionID,
secret,
key: key,
content,
}),
})
})
.then((x) => {
if (x) {
log.info("synced", {
key: key,
status: x.status,
})
}
})
}
export function init() {
Bus.subscribe(Session.Event.Updated, async (evt) => {
await sync("session/info/" + evt.properties.info.id, evt.properties.info)
})
Bus.subscribe(MessageV2.Event.Updated, async (evt) => {
await sync("session/message/" + evt.properties.info.sessionID + "/" + evt.properties.info.id, evt.properties.info)
})
Bus.subscribe(MessageV2.Event.PartUpdated, async (evt) => {
await sync(
"session/part/" +
evt.properties.part.sessionID +
"/" +
evt.properties.part.messageID +
"/" +
evt.properties.part.id,
evt.properties.part,
)
})
}
export const URL =
process.env["OPENCODE_API"] ??
(Installation.isPreview() || Installation.isLocal() ? "https://api.dev.opencode.ai" : "https://api.opencode.ai")
const disabled = process.env["OPENCODE_DISABLE_SHARE"] === "true" || process.env["OPENCODE_DISABLE_SHARE"] === "1"
export async function create(sessionID: string) {
if (disabled) return { url: "", secret: "" }
return fetch(`${URL}/share_create`, {
method: "POST",
body: JSON.stringify({ sessionID: sessionID }),
})
.then((x) => x.json())
.then((x) => x as { url: string; secret: string })
}
export async function remove(sessionID: string, secret: string) {
if (disabled) return {}
return fetch(`${URL}/share_delete`, {
method: "POST",
body: JSON.stringify({ sessionID, secret }),
}).then((x) => x.json())
}
}

4
packages/opencode/src/sql.d.ts vendored Normal file
View File

@@ -0,0 +1,4 @@
declare module "*.sql" {
const content: string
export default content
}

View File

@@ -0,0 +1,140 @@
import { Database as BunDatabase } from "bun:sqlite"
import { drizzle, type SQLiteBunDatabase } from "drizzle-orm/bun-sqlite"
import { migrate } from "drizzle-orm/bun-sqlite/migrator"
import { type SQLiteTransaction } from "drizzle-orm/sqlite-core"
export * from "drizzle-orm"
import { Context } from "../util/context"
import { lazy } from "../util/lazy"
import { Global } from "../global"
import { Log } from "../util/log"
import { NamedError } from "@opencode-ai/util/error"
import z from "zod"
import path from "path"
import { readFileSync, readdirSync } from "fs"
import fs from "fs/promises"
import { Instance } from "@/project/instance"
declare const OPENCODE_MIGRATIONS: { sql: string; timestamp: number }[] | undefined
export const NotFoundError = NamedError.create(
"NotFoundError",
z.object({
message: z.string(),
}),
)
const log = Log.create({ service: "db" })
export namespace Database {
export type Transaction = SQLiteTransaction<"sync", void, Record<string, never>, Record<string, never>>
type Client = SQLiteBunDatabase
type Journal = { sql: string; timestamp: number }[]
function time(tag: string) {
const match = /^(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})/.exec(tag)
if (!match) return 0
return Date.UTC(
Number(match[1]),
Number(match[2]) - 1,
Number(match[3]),
Number(match[4]),
Number(match[5]),
Number(match[6]),
)
}
function migrations(dir: string): Journal {
const dirs = readdirSync(dir, { withFileTypes: true })
.filter((entry) => entry.isDirectory())
.map((entry) => entry.name)
const sql = dirs
.map((name) => {
const file = path.join(dir, name, "migration.sql")
if (!Bun.file(file).size) return
return {
sql: readFileSync(file, "utf-8"),
timestamp: time(name),
}
})
.filter(Boolean) as Journal
return sql.sort((a, b) => a.timestamp - b.timestamp)
}
export const Client = lazy(() => {
log.info("opening database", { path: path.join(Global.Path.data, "opencode.db") })
const sqlite = new BunDatabase(path.join(Global.Path.data, "opencode.db"), { create: true })
sqlite.run("PRAGMA journal_mode = WAL")
sqlite.run("PRAGMA synchronous = NORMAL")
sqlite.run("PRAGMA busy_timeout = 5000")
sqlite.run("PRAGMA cache_size = -64000")
sqlite.run("PRAGMA foreign_keys = ON")
const db = drizzle({ client: sqlite })
// Apply schema migrations
const entries =
typeof OPENCODE_MIGRATIONS !== "undefined"
? OPENCODE_MIGRATIONS
: migrations(path.join(import.meta.dirname, "../../migration"))
if (entries.length > 0) {
log.info("applying migrations", {
count: entries.length,
mode: typeof OPENCODE_MIGRATIONS !== "undefined" ? "bundled" : "dev",
})
migrate(db, entries)
}
return db
})
export type TxOrDb = Transaction | Client
const ctx = Context.create<{
tx: TxOrDb
effects: (() => void | Promise<void>)[]
}>("database")
export function use<T>(callback: (trx: TxOrDb) => T): T {
try {
return callback(ctx.use().tx)
} catch (err) {
if (err instanceof Context.NotFound) {
const effects: (() => void | Promise<void>)[] = []
const result = ctx.provide({ effects, tx: Client() }, () => callback(Client()))
for (const effect of effects) effect()
return result
}
throw err
}
}
export function effect(fn: () => any | Promise<any>) {
try {
ctx.use().effects.push(fn)
} catch {
fn()
}
}
export function transaction<T>(callback: (tx: TxOrDb) => T): T {
try {
return callback(ctx.use().tx)
} catch (err) {
if (err instanceof Context.NotFound) {
const effects: (() => void | Promise<void>)[] = []
const result = Client().transaction((tx) => {
return ctx.provide({ tx, effects }, () => callback(tx))
})
for (const effect of effects) effect()
return result
}
throw err
}
}
}

View File

@@ -0,0 +1,436 @@
import { Database } from "bun:sqlite"
import { drizzle } from "drizzle-orm/bun-sqlite"
import { Global } from "../global"
import { Log } from "../util/log"
import { ProjectTable } from "../project/project.sql"
import { SessionTable, MessageTable, PartTable, TodoTable, PermissionTable } from "../session/session.sql"
import { SessionShareTable } from "../share/share.sql"
import path from "path"
import { existsSync } from "fs"
export namespace JsonMigration {
const log = Log.create({ service: "json-migration" })
export type Progress = {
current: number
total: number
label: string
}
type Options = {
progress?: (event: Progress) => void
}
export async function run(sqlite: Database, options?: Options) {
const storageDir = path.join(Global.Path.data, "storage")
if (!existsSync(storageDir)) {
log.info("storage directory does not exist, skipping migration")
return {
projects: 0,
sessions: 0,
messages: 0,
parts: 0,
todos: 0,
permissions: 0,
shares: 0,
errors: [] as string[],
}
}
log.info("starting json to sqlite migration", { storageDir })
const start = performance.now()
const db = drizzle({ client: sqlite })
// Optimize SQLite for bulk inserts
sqlite.exec("PRAGMA journal_mode = WAL")
sqlite.exec("PRAGMA synchronous = OFF")
sqlite.exec("PRAGMA cache_size = 10000")
sqlite.exec("PRAGMA temp_store = MEMORY")
const stats = {
projects: 0,
sessions: 0,
messages: 0,
parts: 0,
todos: 0,
permissions: 0,
shares: 0,
errors: [] as string[],
}
const orphans = {
sessions: 0,
todos: 0,
permissions: 0,
shares: 0,
}
const errs = stats.errors
const batchSize = 1000
const now = Date.now()
async function list(pattern: string) {
const items: string[] = []
const scan = new Bun.Glob(pattern)
for await (const file of scan.scan({ cwd: storageDir, absolute: true })) {
items.push(file)
}
return items
}
async function read(files: string[], start: number, end: number) {
const count = end - start
const tasks = new Array(count)
for (let i = 0; i < count; i++) {
tasks[i] = Bun.file(files[start + i]).json()
}
const results = await Promise.allSettled(tasks)
const items = new Array(count)
for (let i = 0; i < results.length; i++) {
const result = results[i]
if (result.status === "fulfilled") {
items[i] = result.value
continue
}
errs.push(`failed to read ${files[start + i]}: ${result.reason}`)
}
return items
}
function insert(values: any[], table: any, label: string) {
if (values.length === 0) return 0
try {
db.insert(table).values(values).onConflictDoNothing().run()
return values.length
} catch (e) {
errs.push(`failed to migrate ${label} batch: ${e}`)
return 0
}
}
// Pre-scan all files upfront to avoid repeated glob operations
log.info("scanning files...")
const [projectFiles, sessionFiles, messageFiles, partFiles, todoFiles, permFiles, shareFiles] = await Promise.all([
list("project/*.json"),
list("session/*/*.json"),
list("message/*/*.json"),
list("part/*/*.json"),
list("todo/*.json"),
list("permission/*.json"),
list("session_share/*.json"),
])
log.info("file scan complete", {
projects: projectFiles.length,
sessions: sessionFiles.length,
messages: messageFiles.length,
parts: partFiles.length,
todos: todoFiles.length,
permissions: permFiles.length,
shares: shareFiles.length,
})
const total = Math.max(
1,
projectFiles.length +
sessionFiles.length +
messageFiles.length +
partFiles.length +
todoFiles.length +
permFiles.length +
shareFiles.length,
)
const progress = options?.progress
let current = 0
const step = (label: string, count: number) => {
current = Math.min(total, current + count)
progress?.({ current, total, label })
}
progress?.({ current, total, label: "starting" })
sqlite.exec("BEGIN TRANSACTION")
// Migrate projects first (no FK deps)
const projectIds = new Set<string>()
const projectValues = [] as any[]
for (let i = 0; i < projectFiles.length; i += batchSize) {
const end = Math.min(i + batchSize, projectFiles.length)
const batch = await read(projectFiles, i, end)
projectValues.length = 0
for (let j = 0; j < batch.length; j++) {
const data = batch[j]
if (!data) continue
if (!data?.id) {
errs.push(`project missing id: ${projectFiles[i + j]}`)
continue
}
projectIds.add(data.id)
projectValues.push({
id: data.id,
worktree: data.worktree ?? "/",
vcs: data.vcs,
name: data.name ?? undefined,
icon_url: data.icon?.url,
icon_color: data.icon?.color,
time_created: data.time?.created ?? now,
time_updated: data.time?.updated ?? now,
time_initialized: data.time?.initialized,
sandboxes: data.sandboxes ?? [],
})
}
stats.projects += insert(projectValues, ProjectTable, "project")
step("projects", end - i)
}
log.info("migrated projects", { count: stats.projects, duration: Math.round(performance.now() - start) })
// Migrate sessions (depends on projects)
const sessionIds = new Set<string>()
const sessionValues = [] as any[]
for (let i = 0; i < sessionFiles.length; i += batchSize) {
const end = Math.min(i + batchSize, sessionFiles.length)
const batch = await read(sessionFiles, i, end)
sessionValues.length = 0
for (let j = 0; j < batch.length; j++) {
const data = batch[j]
if (!data) continue
if (!data?.id || !data?.projectID) {
errs.push(`session missing id or projectID: ${sessionFiles[i + j]}`)
continue
}
if (!projectIds.has(data.projectID)) {
orphans.sessions++
continue
}
sessionIds.add(data.id)
sessionValues.push({
id: data.id,
project_id: data.projectID,
parent_id: data.parentID ?? null,
slug: data.slug ?? "",
directory: data.directory ?? "",
title: data.title ?? "",
version: data.version ?? "",
share_url: data.share?.url ?? null,
summary_additions: data.summary?.additions ?? null,
summary_deletions: data.summary?.deletions ?? null,
summary_files: data.summary?.files ?? null,
summary_diffs: data.summary?.diffs ?? null,
revert: data.revert ?? null,
permission: data.permission ?? null,
time_created: data.time?.created ?? now,
time_updated: data.time?.updated ?? now,
time_compacting: data.time?.compacting ?? null,
time_archived: data.time?.archived ?? null,
})
}
stats.sessions += insert(sessionValues, SessionTable, "session")
step("sessions", end - i)
}
log.info("migrated sessions", { count: stats.sessions })
if (orphans.sessions > 0) {
log.warn("skipped orphaned sessions", { count: orphans.sessions })
}
// Migrate messages using pre-scanned file map
const allMessageFiles = [] as string[]
const allMessageSessions = [] as string[]
const messageSessions = new Map<string, string>()
for (const file of messageFiles) {
const sessionID = path.basename(path.dirname(file))
if (!sessionIds.has(sessionID)) continue
allMessageFiles.push(file)
allMessageSessions.push(sessionID)
}
for (let i = 0; i < allMessageFiles.length; i += batchSize) {
const end = Math.min(i + batchSize, allMessageFiles.length)
const batch = await read(allMessageFiles, i, end)
const values = new Array(batch.length)
let count = 0
for (let j = 0; j < batch.length; j++) {
const data = batch[j]
if (!data) continue
const file = allMessageFiles[i + j]
const id = data.id ?? path.basename(file, ".json")
if (!id) {
errs.push(`message missing id: ${file}`)
continue
}
const sessionID = allMessageSessions[i + j]
messageSessions.set(id, sessionID)
const rest = data
delete rest.id
delete rest.sessionID
values[count++] = {
id,
session_id: sessionID,
time_created: data.time?.created ?? now,
time_updated: data.time?.updated ?? now,
data: rest,
}
}
values.length = count
stats.messages += insert(values, MessageTable, "message")
step("messages", end - i)
}
log.info("migrated messages", { count: stats.messages })
// Migrate parts using pre-scanned file map
for (let i = 0; i < partFiles.length; i += batchSize) {
const end = Math.min(i + batchSize, partFiles.length)
const batch = await read(partFiles, i, end)
const values = new Array(batch.length)
let count = 0
for (let j = 0; j < batch.length; j++) {
const data = batch[j]
if (!data) continue
const file = partFiles[i + j]
const id = data.id ?? path.basename(file, ".json")
const messageID = data.messageID ?? path.basename(path.dirname(file))
if (!id || !messageID) {
errs.push(`part missing id/messageID/sessionID: ${file}`)
continue
}
const sessionID = messageSessions.get(messageID)
if (!sessionID) {
errs.push(`part missing message session: ${file}`)
continue
}
if (!sessionIds.has(sessionID)) continue
const rest = data
delete rest.id
delete rest.messageID
delete rest.sessionID
values[count++] = {
id,
message_id: messageID,
session_id: sessionID,
time_created: data.time?.created ?? now,
time_updated: data.time?.updated ?? now,
data: rest,
}
}
values.length = count
stats.parts += insert(values, PartTable, "part")
step("parts", end - i)
}
log.info("migrated parts", { count: stats.parts })
// Migrate todos
const todoSessions = todoFiles.map((file) => path.basename(file, ".json"))
for (let i = 0; i < todoFiles.length; i += batchSize) {
const end = Math.min(i + batchSize, todoFiles.length)
const batch = await read(todoFiles, i, end)
const values = [] as any[]
for (let j = 0; j < batch.length; j++) {
const data = batch[j]
if (!data) continue
const sessionID = todoSessions[i + j]
if (!sessionIds.has(sessionID)) {
orphans.todos++
continue
}
if (!Array.isArray(data)) {
errs.push(`todo not an array: ${todoFiles[i + j]}`)
continue
}
for (let position = 0; position < data.length; position++) {
const todo = data[position]
if (!todo?.content || !todo?.status || !todo?.priority) continue
values.push({
session_id: sessionID,
content: todo.content,
status: todo.status,
priority: todo.priority,
position,
time_created: now,
time_updated: now,
})
}
}
stats.todos += insert(values, TodoTable, "todo")
step("todos", end - i)
}
log.info("migrated todos", { count: stats.todos })
if (orphans.todos > 0) {
log.warn("skipped orphaned todos", { count: orphans.todos })
}
// Migrate permissions
const permProjects = permFiles.map((file) => path.basename(file, ".json"))
const permValues = [] as any[]
for (let i = 0; i < permFiles.length; i += batchSize) {
const end = Math.min(i + batchSize, permFiles.length)
const batch = await read(permFiles, i, end)
permValues.length = 0
for (let j = 0; j < batch.length; j++) {
const data = batch[j]
if (!data) continue
const projectID = permProjects[i + j]
if (!projectIds.has(projectID)) {
orphans.permissions++
continue
}
permValues.push({ project_id: projectID, data })
}
stats.permissions += insert(permValues, PermissionTable, "permission")
step("permissions", end - i)
}
log.info("migrated permissions", { count: stats.permissions })
if (orphans.permissions > 0) {
log.warn("skipped orphaned permissions", { count: orphans.permissions })
}
// Migrate session shares
const shareSessions = shareFiles.map((file) => path.basename(file, ".json"))
const shareValues = [] as any[]
for (let i = 0; i < shareFiles.length; i += batchSize) {
const end = Math.min(i + batchSize, shareFiles.length)
const batch = await read(shareFiles, i, end)
shareValues.length = 0
for (let j = 0; j < batch.length; j++) {
const data = batch[j]
if (!data) continue
const sessionID = shareSessions[i + j]
if (!sessionIds.has(sessionID)) {
orphans.shares++
continue
}
if (!data?.id || !data?.secret || !data?.url) {
errs.push(`session_share missing id/secret/url: ${shareFiles[i + j]}`)
continue
}
shareValues.push({ session_id: sessionID, id: data.id, secret: data.secret, url: data.url })
}
stats.shares += insert(shareValues, SessionShareTable, "session_share")
step("shares", end - i)
}
log.info("migrated session shares", { count: stats.shares })
if (orphans.shares > 0) {
log.warn("skipped orphaned session shares", { count: orphans.shares })
}
sqlite.exec("COMMIT")
log.info("json migration complete", {
projects: stats.projects,
sessions: stats.sessions,
messages: stats.messages,
parts: stats.parts,
todos: stats.todos,
permissions: stats.permissions,
shares: stats.shares,
errorCount: stats.errors.length,
duration: Math.round(performance.now() - start),
})
if (stats.errors.length > 0) {
log.warn("migration errors", { errors: stats.errors.slice(0, 20) })
}
progress?.({ current: total, total, label: "complete" })
return stats
}
}

View File

@@ -0,0 +1,10 @@
import { integer } from "drizzle-orm/sqlite-core"
export const Timestamps = {
time_created: integer()
.notNull()
.$default(() => Date.now()),
time_updated: integer()
.notNull()
.$onUpdate(() => Date.now()),
}

View File

@@ -114,7 +114,7 @@ export namespace ToolRegistry {
ApplyPatchTool,
...(Flag.OPENCODE_EXPERIMENTAL_LSP_TOOL ? [LspTool] : []),
...(config.experimental?.batch_tool === true ? [BatchTool] : []),
...(Flag.OPENCODE_EXPERIMENTAL_PLAN_MODE && Flag.OPENCODE_CLIENT === "cli" ? [PlanExitTool, PlanEnterTool] : []),
...(Flag.OPENCODE_CLIENT === "cli" ? [PlanExitTool, PlanEnterTool] : []),
...custom,
]
}

View File

@@ -4,9 +4,14 @@ export function lazy<T>(fn: () => T) {
const result = (): T => {
if (loaded) return value as T
loaded = true
value = fn()
return value as T
try {
value = fn()
loaded = true
return value as T
} catch (e) {
// Don't mark as loaded if initialization failed
throw e
}
}
result.reset = () => {

View File

@@ -7,7 +7,8 @@ import { Global } from "../global"
import { Instance } from "../project/instance"
import { InstanceBootstrap } from "../project/bootstrap"
import { Project } from "../project/project"
import { Storage } from "../storage/storage"
import { Database, eq } from "../storage/db"
import { ProjectTable } from "../project/project.sql"
import { fn } from "../util/fn"
import { Log } from "../util/log"
import { BusEvent } from "@/bus/bus-event"
@@ -307,7 +308,8 @@ export namespace Worktree {
}
async function runStartScripts(directory: string, input: { projectID: string; extra?: string }) {
const project = await Storage.read<Project.Info>(["project", input.projectID]).catch(() => undefined)
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, input.projectID)).get())
const project = row ? Project.fromRow(row) : undefined
const startup = project?.commands?.start?.trim() ?? ""
const ok = await runStartScript(directory, startup, "project")
if (!ok) return false

View File

@@ -122,12 +122,20 @@ function createFakeAgent() {
messages: async () => {
return { data: [] }
},
message: async () => {
message: async (params?: any) => {
// Return a message with parts that can be looked up by partID
return {
data: {
info: {
role: "assistant",
},
parts: [
{
id: params?.messageID ? `${params.messageID}_part` : "part_1",
type: "text",
text: "",
},
],
},
}
},
@@ -193,7 +201,7 @@ function createFakeAgent() {
}
describe("acp.agent event subscription", () => {
test("routes message.part.updated by the event sessionID (no cross-session pollution)", async () => {
test("routes message.part.delta by the event sessionID (no cross-session pollution)", async () => {
await using tmp = await tmpdir()
await Instance.provide({
directory: tmp.path,
@@ -207,14 +215,12 @@ describe("acp.agent event subscription", () => {
controller.push({
directory: cwd,
payload: {
type: "message.part.updated",
type: "message.part.delta",
properties: {
part: {
sessionID: sessionB,
messageID: "msg_1",
type: "text",
synthetic: false,
},
sessionID: sessionB,
messageID: "msg_1",
partID: "msg_1_part",
field: "text",
delta: "hello",
},
},
@@ -230,7 +236,7 @@ describe("acp.agent event subscription", () => {
})
})
test("keeps concurrent sessions isolated when message.part.updated events are interleaved", async () => {
test("keeps concurrent sessions isolated when message.part.delta events are interleaved", async () => {
await using tmp = await tmpdir()
await Instance.provide({
directory: tmp.path,
@@ -248,14 +254,12 @@ describe("acp.agent event subscription", () => {
controller.push({
directory: cwd,
payload: {
type: "message.part.updated",
type: "message.part.delta",
properties: {
part: {
sessionID: sessionId,
messageID,
type: "text",
synthetic: false,
},
sessionID: sessionId,
messageID,
partID: `${messageID}_part`,
field: "text",
delta,
},
},
@@ -402,14 +406,12 @@ describe("acp.agent event subscription", () => {
controller.push({
directory: cwd,
payload: {
type: "message.part.updated",
type: "message.part.delta",
properties: {
part: {
sessionID: sessionB,
messageID: "msg_b",
type: "text",
synthetic: false,
},
sessionID: sessionB,
messageID: "msg_b",
partID: "msg_b_part",
field: "text",
delta: "session_b_message",
},
},

View File

@@ -2,7 +2,6 @@ import { test, expect } from "bun:test"
import os from "os"
import { PermissionNext } from "../../src/permission/next"
import { Instance } from "../../src/project/instance"
import { Storage } from "../../src/storage/storage"
import { tmpdir } from "../fixture/fixture"
// fromConfig tests

View File

@@ -1,63 +1,70 @@
// IMPORTANT: Set env vars BEFORE any imports from src/ directory
// xdg-basedir reads env vars at import time, so we must set these first
import os from "os"
import path from "path"
import fs from "fs/promises"
import fsSync from "fs"
import { afterAll } from "bun:test"
import os from "os";
import path from "path";
import fs from "fs/promises";
import fsSync from "fs";
import { afterAll } from "bun:test";
const dir = path.join(os.tmpdir(), "opencode-test-data-" + process.pid)
await fs.mkdir(dir, { recursive: true })
// Set XDG env vars FIRST, before any src/ imports
const dir = path.join(os.tmpdir(), "opencode-test-data-" + process.pid);
await fs.mkdir(dir, { recursive: true });
afterAll(() => {
fsSync.rmSync(dir, { recursive: true, force: true })
})
fsSync.rmSync(dir, { recursive: true, force: true });
});
process.env["XDG_DATA_HOME"] = path.join(dir, "share");
process.env["XDG_CACHE_HOME"] = path.join(dir, "cache");
process.env["XDG_CONFIG_HOME"] = path.join(dir, "config");
process.env["XDG_STATE_HOME"] = path.join(dir, "state");
process.env["OPENCODE_MODELS_PATH"] = path.join(
import.meta.dir,
"tool",
"fixtures",
"models-api.json",
);
// Set test home directory to isolate tests from user's actual home directory
// This prevents tests from picking up real user configs/skills from ~/.claude/skills
const testHome = path.join(dir, "home")
await fs.mkdir(testHome, { recursive: true })
process.env["OPENCODE_TEST_HOME"] = testHome
const testHome = path.join(dir, "home");
await fs.mkdir(testHome, { recursive: true });
process.env["OPENCODE_TEST_HOME"] = testHome;
// Set test managed config directory to isolate tests from system managed settings
const testManagedConfigDir = path.join(dir, "managed")
process.env["OPENCODE_TEST_MANAGED_CONFIG_DIR"] = testManagedConfigDir
process.env["XDG_DATA_HOME"] = path.join(dir, "share")
process.env["XDG_CACHE_HOME"] = path.join(dir, "cache")
process.env["XDG_CONFIG_HOME"] = path.join(dir, "config")
process.env["XDG_STATE_HOME"] = path.join(dir, "state")
process.env["OPENCODE_MODELS_PATH"] = path.join(import.meta.dir, "tool", "fixtures", "models-api.json")
const testManagedConfigDir = path.join(dir, "managed");
process.env["OPENCODE_TEST_MANAGED_CONFIG_DIR"] = testManagedConfigDir;
// Write the cache version file to prevent global/index.ts from clearing the cache
const cacheDir = path.join(dir, "cache", "opencode")
await fs.mkdir(cacheDir, { recursive: true })
await fs.writeFile(path.join(cacheDir, "version"), "14")
const cacheDir = path.join(dir, "cache", "opencode");
await fs.mkdir(cacheDir, { recursive: true });
await fs.writeFile(path.join(cacheDir, "version"), "14");
// Clear provider env vars to ensure clean test state
delete process.env["ANTHROPIC_API_KEY"]
delete process.env["OPENAI_API_KEY"]
delete process.env["GOOGLE_API_KEY"]
delete process.env["GOOGLE_GENERATIVE_AI_API_KEY"]
delete process.env["AZURE_OPENAI_API_KEY"]
delete process.env["AWS_ACCESS_KEY_ID"]
delete process.env["AWS_PROFILE"]
delete process.env["AWS_REGION"]
delete process.env["AWS_BEARER_TOKEN_BEDROCK"]
delete process.env["OPENROUTER_API_KEY"]
delete process.env["GROQ_API_KEY"]
delete process.env["MISTRAL_API_KEY"]
delete process.env["PERPLEXITY_API_KEY"]
delete process.env["TOGETHER_API_KEY"]
delete process.env["XAI_API_KEY"]
delete process.env["DEEPSEEK_API_KEY"]
delete process.env["FIREWORKS_API_KEY"]
delete process.env["CEREBRAS_API_KEY"]
delete process.env["SAMBANOVA_API_KEY"]
delete process.env["ANTHROPIC_API_KEY"];
delete process.env["OPENAI_API_KEY"];
delete process.env["GOOGLE_API_KEY"];
delete process.env["GOOGLE_GENERATIVE_AI_API_KEY"];
delete process.env["AZURE_OPENAI_API_KEY"];
delete process.env["AWS_ACCESS_KEY_ID"];
delete process.env["AWS_PROFILE"];
delete process.env["AWS_REGION"];
delete process.env["AWS_BEARER_TOKEN_BEDROCK"];
delete process.env["OPENROUTER_API_KEY"];
delete process.env["GROQ_API_KEY"];
delete process.env["MISTRAL_API_KEY"];
delete process.env["PERPLEXITY_API_KEY"];
delete process.env["TOGETHER_API_KEY"];
delete process.env["XAI_API_KEY"];
delete process.env["DEEPSEEK_API_KEY"];
delete process.env["FIREWORKS_API_KEY"];
delete process.env["CEREBRAS_API_KEY"];
delete process.env["SAMBANOVA_API_KEY"];
// Now safe to import from src/
const { Log } = await import("../src/util/log")
const { Log } = await import("../src/util/log");
Log.init({
print: false,
dev: true,
level: "DEBUG",
})
});

View File

@@ -1,7 +1,6 @@
import { describe, expect, test } from "bun:test"
import { Project } from "../../src/project/project"
import { Log } from "../../src/util/log"
import { Storage } from "../../src/storage/storage"
import { $ } from "bun"
import path from "path"
import { tmpdir } from "../fixture/fixture"
@@ -55,37 +54,50 @@ describe("Project.fromDirectory with worktrees", () => {
test("should set worktree to root when called from a worktree", async () => {
await using tmp = await tmpdir({ git: true })
const worktreePath = path.join(tmp.path, "..", "worktree-test")
await $`git worktree add ${worktreePath} -b test-branch`.cwd(tmp.path).quiet()
const worktreePath = path.join(tmp.path, "..", path.basename(tmp.path) + "-worktree")
try {
await $`git worktree add ${worktreePath} -b test-branch-${Date.now()}`.cwd(tmp.path).quiet()
const { project, sandbox } = await Project.fromDirectory(worktreePath)
const { project, sandbox } = await Project.fromDirectory(worktreePath)
expect(project.worktree).toBe(tmp.path)
expect(sandbox).toBe(worktreePath)
expect(project.sandboxes).toContain(worktreePath)
expect(project.sandboxes).not.toContain(tmp.path)
await $`git worktree remove ${worktreePath}`.cwd(tmp.path).quiet()
expect(project.worktree).toBe(tmp.path)
expect(sandbox).toBe(worktreePath)
expect(project.sandboxes).toContain(worktreePath)
expect(project.sandboxes).not.toContain(tmp.path)
} finally {
await $`git worktree remove ${worktreePath}`
.cwd(tmp.path)
.quiet()
.catch(() => {})
}
})
test("should accumulate multiple worktrees in sandboxes", async () => {
await using tmp = await tmpdir({ git: true })
const worktree1 = path.join(tmp.path, "..", "worktree-1")
const worktree2 = path.join(tmp.path, "..", "worktree-2")
await $`git worktree add ${worktree1} -b branch-1`.cwd(tmp.path).quiet()
await $`git worktree add ${worktree2} -b branch-2`.cwd(tmp.path).quiet()
const worktree1 = path.join(tmp.path, "..", path.basename(tmp.path) + "-wt1")
const worktree2 = path.join(tmp.path, "..", path.basename(tmp.path) + "-wt2")
try {
await $`git worktree add ${worktree1} -b branch-${Date.now()}`.cwd(tmp.path).quiet()
await $`git worktree add ${worktree2} -b branch-${Date.now() + 1}`.cwd(tmp.path).quiet()
await Project.fromDirectory(worktree1)
const { project } = await Project.fromDirectory(worktree2)
await Project.fromDirectory(worktree1)
const { project } = await Project.fromDirectory(worktree2)
expect(project.worktree).toBe(tmp.path)
expect(project.sandboxes).toContain(worktree1)
expect(project.sandboxes).toContain(worktree2)
expect(project.sandboxes).not.toContain(tmp.path)
await $`git worktree remove ${worktree1}`.cwd(tmp.path).quiet()
await $`git worktree remove ${worktree2}`.cwd(tmp.path).quiet()
expect(project.worktree).toBe(tmp.path)
expect(project.sandboxes).toContain(worktree1)
expect(project.sandboxes).toContain(worktree2)
expect(project.sandboxes).not.toContain(tmp.path)
} finally {
await $`git worktree remove ${worktree1}`
.cwd(tmp.path)
.quiet()
.catch(() => {})
await $`git worktree remove ${worktree2}`
.cwd(tmp.path)
.quiet()
.catch(() => {})
}
})
})
@@ -99,11 +111,12 @@ describe("Project.discover", () => {
await Project.discover(project)
const updated = await Storage.read<Project.Info>(["project", project.id])
expect(updated.icon).toBeDefined()
expect(updated.icon?.url).toStartWith("data:")
expect(updated.icon?.url).toContain("base64")
expect(updated.icon?.color).toBeUndefined()
const updated = Project.get(project.id)
expect(updated).toBeDefined()
expect(updated!.icon).toBeDefined()
expect(updated!.icon?.url).toStartWith("data:")
expect(updated!.icon?.url).toContain("base64")
expect(updated!.icon?.color).toBeUndefined()
})
test("should not discover non-image files", async () => {
@@ -114,7 +127,8 @@ describe("Project.discover", () => {
await Project.discover(project)
const updated = await Storage.read<Project.Info>(["project", project.id])
expect(updated.icon).toBeUndefined()
const updated = Project.get(project.id)
expect(updated).toBeDefined()
expect(updated!.icon).toBeUndefined()
})
})

Some files were not shown because too many files have changed in this diff Show More