mirror of
https://github.com/anomalyco/opencode.git
synced 2026-02-10 19:04:17 +00:00
Compare commits
109 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9ced3cd8ed | ||
|
|
1f900fdaf8 | ||
|
|
700a15523f | ||
|
|
1c73ab5a23 | ||
|
|
c2665dd6ff | ||
|
|
1e03a55acd | ||
|
|
65c9669283 | ||
|
|
18b6257119 | ||
|
|
c607c01fb9 | ||
|
|
4c4e30cd71 | ||
|
|
19ad7ad809 | ||
|
|
87795384de | ||
|
|
0732ab3393 | ||
|
|
2bccfd7462 | ||
|
|
83853cc5e6 | ||
|
|
4a73d51acd | ||
|
|
227202c2ab | ||
|
|
1599c29aa5 | ||
|
|
0578be5669 | ||
|
|
e00532f631 | ||
|
|
be042ebb7d | ||
|
|
ff68fcebd1 | ||
|
|
1de66812bf | ||
|
|
173c16581d | ||
|
|
94d0c9940a | ||
|
|
7a1e6c5326 | ||
|
|
4e77e52395 | ||
|
|
76bcd22802 | ||
|
|
e06fcfdc43 | ||
|
|
246430cb8f | ||
|
|
f689fc7f75 | ||
|
|
9b2fd57e6e | ||
|
|
0d365fa613 | ||
|
|
601e631624 | ||
|
|
2fef02f487 | ||
|
|
4cc9104942 | ||
|
|
0eaa6b5fc8 | ||
|
|
e563cff034 | ||
|
|
3b2550106b | ||
|
|
3ec6bff038 | ||
|
|
aab2a6df3b | ||
|
|
84171018f2 | ||
|
|
280d7e6f91 | ||
|
|
5952891b1e | ||
|
|
d7c8a3f50d | ||
|
|
ce353819e8 | ||
|
|
2dae94e5a3 | ||
|
|
c6adc19e41 | ||
|
|
ce56166510 | ||
|
|
5911e4c06a | ||
|
|
42fb840f22 | ||
|
|
4dcfdf6572 | ||
|
|
25f3d6d5a9 | ||
|
|
e19a9e9614 | ||
|
|
bb4d978684 | ||
|
|
fcc903489b | ||
|
|
949e69a9bf | ||
|
|
afec40e8da | ||
|
|
8c30f551e2 | ||
|
|
cb721497c1 | ||
|
|
4ec6293054 | ||
|
|
b7a323355c | ||
|
|
d4f053042c | ||
|
|
5f552534c7 | ||
|
|
ad5b790bb3 | ||
|
|
ed87341c4f | ||
|
|
794ecab028 | ||
|
|
eeb235724b | ||
|
|
61084e7f6f | ||
|
|
200aef2eb3 | ||
|
|
f6e375a555 | ||
|
|
db908deee5 | ||
|
|
7b72cc3a48 | ||
|
|
b8cbfd48ec | ||
|
|
498cbb2c26 | ||
|
|
d6fbd255b6 | ||
|
|
2de1c82bf7 | ||
|
|
34ebb3d051 | ||
|
|
9c3e3c1ab5 | ||
|
|
3ea499f04e | ||
|
|
ab13c1d1c4 | ||
|
|
53b610c331 | ||
|
|
e3519356f2 | ||
|
|
2619acc0ff | ||
|
|
1bc45dc266 | ||
|
|
2e8feb1c78 | ||
|
|
00e60899cc | ||
|
|
30a918e9d4 | ||
|
|
ac16068140 | ||
|
|
19a41ab297 | ||
|
|
cd174d8cba | ||
|
|
246e901e42 | ||
|
|
0ccef1b31f | ||
|
|
7706f5b6a8 | ||
|
|
63e38555c9 | ||
|
|
f40685ab13 | ||
|
|
a48a5a3462 | ||
|
|
5e1639de2b | ||
|
|
2b05833c32 | ||
|
|
acdcf7fa88 | ||
|
|
bf0754caeb | ||
|
|
4d50a32979 | ||
|
|
57edb0ddc5 | ||
|
|
a614b78c6d | ||
|
|
b9f5a34247 | ||
|
|
81b47a44e2 | ||
|
|
0c1c07467e | ||
|
|
105688bf90 | ||
|
|
1e7b4768b1 |
2
.github/pull_request_template.md
vendored
2
.github/pull_request_template.md
vendored
@@ -1,6 +1,6 @@
|
||||
### What does this PR do?
|
||||
|
||||
Please provide a description of the issue (if there is one), the changes you made to fix it, and why they work. It is expected that you understand why your changes work and if you do not understand why at least say as much so a maintainer knows how much to value the pr.
|
||||
Please provide a description of the issue (if there is one), the changes you made to fix it, and why they work. It is expected that you understand why your changes work and if you do not understand why at least say as much so a maintainer knows how much to value the PR.
|
||||
|
||||
**If you paste a large clearly AI generated description here your PR may be IGNORED or CLOSED!**
|
||||
|
||||
|
||||
72
.github/workflows/publish.yml
vendored
72
.github/workflows/publish.yml
vendored
@@ -72,7 +72,7 @@ jobs:
|
||||
- name: Build
|
||||
id: build
|
||||
run: |
|
||||
./packages/opencode/script/build.ts
|
||||
./packages/opencode/script/build.ts --exclude-os=win32
|
||||
env:
|
||||
OPENCODE_VERSION: ${{ needs.version.outputs.version }}
|
||||
OPENCODE_RELEASE: ${{ needs.version.outputs.release }}
|
||||
@@ -86,9 +86,57 @@ jobs:
|
||||
outputs:
|
||||
version: ${{ needs.version.outputs.version }}
|
||||
|
||||
# Windows CLI targets are built on a Windows runner using the anomalyco/bun fork.
|
||||
# The fork Bun must be BOTH the compiler and base binary so that the serialized
|
||||
# CompiledModuleGraphFile struct layout matches at runtime.
|
||||
build-cli-windows:
|
||||
needs: version
|
||||
runs-on: blacksmith-4vcpu-windows-2025
|
||||
if: github.repository == 'anomalyco/opencode'
|
||||
env:
|
||||
BUN_FORK_VERSION: "1.3.9-opencode.2"
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-tags: true
|
||||
|
||||
- name: Install Bun fork
|
||||
shell: pwsh
|
||||
run: |
|
||||
$variant = "bun-windows-x64"
|
||||
$url = "https://github.com/anomalyco/bun/releases/download/v${{ env.BUN_FORK_VERSION }}/$variant.tgz"
|
||||
$tmp = "$env:RUNNER_TEMP\bun-fork"
|
||||
New-Item -ItemType Directory -Force -Path $tmp | Out-Null
|
||||
Write-Host "Downloading $url"
|
||||
curl.exe -fSL -o "$tmp\$variant.tgz" $url
|
||||
tar -xzf "$tmp\$variant.tgz" -C $tmp --strip-components=2
|
||||
Copy-Item "$tmp\bun.exe" "$tmp\bunx.exe"
|
||||
Write-Host "Bun fork installed at $tmp"
|
||||
"$tmp" | Out-File -Append -FilePath $env:GITHUB_PATH
|
||||
|
||||
- name: Verify Bun fork
|
||||
run: bun --version
|
||||
|
||||
- name: Install dependencies
|
||||
run: bun install
|
||||
|
||||
- name: Build
|
||||
run: bun run ./packages/opencode/script/build.ts --os=win32 --skip-release
|
||||
env:
|
||||
OPENCODE_VERSION: ${{ needs.version.outputs.version }}
|
||||
OPENCODE_RELEASE: ${{ needs.version.outputs.release }}
|
||||
OPENCODE_SKIP_BUN_VERSION_CHECK: "1"
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: opencode-cli-windows
|
||||
path: packages/opencode/dist
|
||||
|
||||
build-tauri:
|
||||
needs:
|
||||
- build-cli
|
||||
- build-cli-windows
|
||||
- version
|
||||
continue-on-error: false
|
||||
strategy:
|
||||
@@ -206,6 +254,7 @@ jobs:
|
||||
needs:
|
||||
- version
|
||||
- build-cli
|
||||
- build-cli-windows
|
||||
- build-tauri
|
||||
runs-on: blacksmith-4vcpu-ubuntu-2404
|
||||
steps:
|
||||
@@ -243,6 +292,27 @@ jobs:
|
||||
name: opencode-cli
|
||||
path: packages/opencode/dist
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: opencode-cli-windows
|
||||
path: packages/opencode/dist
|
||||
|
||||
# Windows archives are created here (on Ubuntu where zip is available)
|
||||
# because the Windows CI runner doesn't have zip installed.
|
||||
- name: Create and upload Windows release archives
|
||||
if: needs.version.outputs.release
|
||||
run: |
|
||||
cd packages/opencode
|
||||
for dir in dist/opencode-windows-*/bin; do
|
||||
key=$(basename $(dirname "$dir"))
|
||||
(cd "$dir" && zip -r "../../../${key}.zip" *)
|
||||
done
|
||||
if ls dist/opencode-windows-*.zip 1>/dev/null 2>&1; then
|
||||
gh release upload "v${{ needs.version.outputs.version }}" dist/opencode-windows-*.zip --clobber
|
||||
fi
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
|
||||
- name: Cache apt packages (AUR)
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
|
||||
@@ -16,15 +16,12 @@ wip:
|
||||
|
||||
For anything in the packages/web use the docs: prefix.
|
||||
|
||||
For anything in the packages/app use the ignore: prefix.
|
||||
|
||||
prefer to explain WHY something was done from an end user perspective instead of
|
||||
WHAT was done.
|
||||
|
||||
do not do generic messages like "improved agent experience" be very specific
|
||||
about what user facing changes were made
|
||||
|
||||
if there are changes do a git pull --rebase
|
||||
if there are conflicts DO NOT FIX THEM. notify me and I will fix them
|
||||
|
||||
## GIT DIFF
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
Use this tool to assign and/or label a Github issue.
|
||||
Use this tool to assign and/or label a GitHub issue.
|
||||
|
||||
You can assign the following users:
|
||||
- thdxr
|
||||
|
||||
@@ -110,3 +110,4 @@ const table = sqliteTable("session", {
|
||||
|
||||
- Avoid mocks as much as possible
|
||||
- Test actual implementation, do not duplicate logic into tests
|
||||
- Tests cannot run from repo root (guard: `do-not-run-tests-from-root`); run from package dirs like `packages/opencode`.
|
||||
|
||||
@@ -275,7 +275,7 @@ async function assertOpencodeConnected() {
|
||||
body: {
|
||||
service: "github-workflow",
|
||||
level: "info",
|
||||
message: "Prepare to react to Github Workflow event",
|
||||
message: "Prepare to react to GitHub Workflow event",
|
||||
},
|
||||
})
|
||||
connected = true
|
||||
|
||||
@@ -40,6 +40,8 @@
|
||||
"@tailwindcss/vite": "4.1.11",
|
||||
"diff": "8.0.2",
|
||||
"dompurify": "3.3.1",
|
||||
"drizzle-kit": "1.0.0-beta.12-a5629fb",
|
||||
"drizzle-orm": "1.0.0-beta.12-a5629fb",
|
||||
"ai": "5.0.124",
|
||||
"hono": "4.10.7",
|
||||
"hono-openapi": "1.1.2",
|
||||
|
||||
@@ -1,2 +1,3 @@
|
||||
[test]
|
||||
root = "./src"
|
||||
preload = ["./happydom.ts"]
|
||||
|
||||
140
packages/app/e2e/projects/workspace-new-session.spec.ts
Normal file
140
packages/app/e2e/projects/workspace-new-session.spec.ts
Normal file
@@ -0,0 +1,140 @@
|
||||
import { base64Decode } from "@opencode-ai/util/encode"
|
||||
import type { Page } from "@playwright/test"
|
||||
import { test, expect } from "../fixtures"
|
||||
import { cleanupTestProject, openSidebar, sessionIDFromUrl, setWorkspacesEnabled } from "../actions"
|
||||
import { promptSelector, workspaceItemSelector, workspaceNewSessionSelector } from "../selectors"
|
||||
import { createSdk } from "../utils"
|
||||
|
||||
function slugFromUrl(url: string) {
|
||||
return /\/([^/]+)\/session(?:\/|$)/.exec(url)?.[1] ?? ""
|
||||
}
|
||||
|
||||
async function waitWorkspaceReady(page: Page, slug: string) {
|
||||
await openSidebar(page)
|
||||
await expect
|
||||
.poll(
|
||||
async () => {
|
||||
const item = page.locator(workspaceItemSelector(slug)).first()
|
||||
try {
|
||||
await item.hover({ timeout: 500 })
|
||||
return true
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
},
|
||||
{ timeout: 60_000 },
|
||||
)
|
||||
.toBe(true)
|
||||
}
|
||||
|
||||
async function createWorkspace(page: Page, root: string, seen: string[]) {
|
||||
await openSidebar(page)
|
||||
await page.getByRole("button", { name: "New workspace" }).first().click()
|
||||
|
||||
await expect
|
||||
.poll(
|
||||
() => {
|
||||
const slug = slugFromUrl(page.url())
|
||||
if (!slug) return ""
|
||||
if (slug === root) return ""
|
||||
if (seen.includes(slug)) return ""
|
||||
return slug
|
||||
},
|
||||
{ timeout: 45_000 },
|
||||
)
|
||||
.not.toBe("")
|
||||
|
||||
const slug = slugFromUrl(page.url())
|
||||
const directory = base64Decode(slug)
|
||||
if (!directory) throw new Error(`Failed to decode workspace slug: ${slug}`)
|
||||
return { slug, directory }
|
||||
}
|
||||
|
||||
async function openWorkspaceNewSession(page: Page, slug: string) {
|
||||
await waitWorkspaceReady(page, slug)
|
||||
|
||||
const item = page.locator(workspaceItemSelector(slug)).first()
|
||||
await item.hover()
|
||||
|
||||
const button = page.locator(workspaceNewSessionSelector(slug)).first()
|
||||
await expect(button).toBeVisible()
|
||||
await button.click({ force: true })
|
||||
|
||||
await expect.poll(() => slugFromUrl(page.url())).toBe(slug)
|
||||
await expect(page).toHaveURL(new RegExp(`/${slug}/session(?:[/?#]|$)`))
|
||||
}
|
||||
|
||||
async function createSessionFromWorkspace(page: Page, slug: string, text: string) {
|
||||
await openWorkspaceNewSession(page, slug)
|
||||
|
||||
const prompt = page.locator(promptSelector)
|
||||
await expect(prompt).toBeVisible()
|
||||
await prompt.click()
|
||||
await page.keyboard.type(text)
|
||||
await page.keyboard.press("Enter")
|
||||
|
||||
await expect.poll(() => slugFromUrl(page.url())).toBe(slug)
|
||||
await expect(page).toHaveURL(new RegExp(`/${slug}/session/[^/?#]+`), { timeout: 30_000 })
|
||||
|
||||
const sessionID = sessionIDFromUrl(page.url())
|
||||
if (!sessionID) throw new Error(`Failed to parse session id from url: ${page.url()}`)
|
||||
return sessionID
|
||||
}
|
||||
|
||||
async function sessionDirectory(directory: string, sessionID: string) {
|
||||
const info = await createSdk(directory)
|
||||
.session.get({ sessionID })
|
||||
.then((x) => x.data)
|
||||
.catch(() => undefined)
|
||||
if (!info) return ""
|
||||
return info.directory
|
||||
}
|
||||
|
||||
test("new sessions from sidebar workspace actions stay in selected workspace", async ({ page, withProject }) => {
|
||||
await page.setViewportSize({ width: 1400, height: 800 })
|
||||
|
||||
await withProject(async ({ directory, slug: root }) => {
|
||||
const workspaces = [] as { slug: string; directory: string }[]
|
||||
const sessions = [] as string[]
|
||||
|
||||
try {
|
||||
await openSidebar(page)
|
||||
await setWorkspacesEnabled(page, root, true)
|
||||
|
||||
const first = await createWorkspace(page, root, [])
|
||||
workspaces.push(first)
|
||||
await waitWorkspaceReady(page, first.slug)
|
||||
|
||||
const second = await createWorkspace(page, root, [first.slug])
|
||||
workspaces.push(second)
|
||||
await waitWorkspaceReady(page, second.slug)
|
||||
|
||||
const firstSession = await createSessionFromWorkspace(page, first.slug, `workspace one ${Date.now()}`)
|
||||
sessions.push(firstSession)
|
||||
|
||||
const secondSession = await createSessionFromWorkspace(page, second.slug, `workspace two ${Date.now()}`)
|
||||
sessions.push(secondSession)
|
||||
|
||||
const thirdSession = await createSessionFromWorkspace(page, first.slug, `workspace one again ${Date.now()}`)
|
||||
sessions.push(thirdSession)
|
||||
|
||||
await expect.poll(() => sessionDirectory(first.directory, firstSession)).toBe(first.directory)
|
||||
await expect.poll(() => sessionDirectory(second.directory, secondSession)).toBe(second.directory)
|
||||
await expect.poll(() => sessionDirectory(first.directory, thirdSession)).toBe(first.directory)
|
||||
} finally {
|
||||
const dirs = [directory, ...workspaces.map((workspace) => workspace.directory)]
|
||||
await Promise.all(
|
||||
sessions.map((sessionID) =>
|
||||
Promise.all(
|
||||
dirs.map((dir) =>
|
||||
createSdk(dir)
|
||||
.session.delete({ sessionID })
|
||||
.catch(() => undefined),
|
||||
),
|
||||
),
|
||||
),
|
||||
)
|
||||
await Promise.all(workspaces.map((workspace) => cleanupTestProject(workspace.directory)))
|
||||
}
|
||||
})
|
||||
})
|
||||
@@ -48,6 +48,9 @@ export const workspaceItemSelector = (slug: string) =>
|
||||
export const workspaceMenuTriggerSelector = (slug: string) =>
|
||||
`${sidebarNavSelector} [data-action="workspace-menu"][data-workspace="${slug}"]`
|
||||
|
||||
export const workspaceNewSessionSelector = (slug: string) =>
|
||||
`${sidebarNavSelector} [data-action="workspace-new-session"][data-workspace="${slug}"]`
|
||||
|
||||
export const listItemSelector = '[data-slot="list-item"]'
|
||||
|
||||
export const listItemKeyStartsWithSelector = (prefix: string) => `${listItemSelector}[data-key^="${prefix}"]`
|
||||
|
||||
126
packages/app/e2e/session/session-undo-redo.spec.ts
Normal file
126
packages/app/e2e/session/session-undo-redo.spec.ts
Normal file
@@ -0,0 +1,126 @@
|
||||
import type { Page } from "@playwright/test"
|
||||
import { test, expect } from "../fixtures"
|
||||
import { withSession } from "../actions"
|
||||
import { createSdk, modKey } from "../utils"
|
||||
import { promptSelector } from "../selectors"
|
||||
|
||||
async function seedConversation(input: {
|
||||
page: Page
|
||||
sdk: ReturnType<typeof createSdk>
|
||||
sessionID: string
|
||||
token: string
|
||||
}) {
|
||||
const prompt = input.page.locator(promptSelector)
|
||||
await expect(prompt).toBeVisible()
|
||||
await prompt.click()
|
||||
await input.page.keyboard.type(`Reply with exactly: ${input.token}`)
|
||||
await input.page.keyboard.press("Enter")
|
||||
|
||||
let userMessageID: string | undefined
|
||||
await expect
|
||||
.poll(
|
||||
async () => {
|
||||
const messages = await input.sdk.session
|
||||
.messages({ sessionID: input.sessionID, limit: 50 })
|
||||
.then((r) => r.data ?? [])
|
||||
const users = messages.filter((m) => m.info.role === "user")
|
||||
if (users.length === 0) return false
|
||||
|
||||
const user = users.reduce((acc, item) => (item.info.id > acc.info.id ? item : acc))
|
||||
userMessageID = user.info.id
|
||||
|
||||
const assistantText = messages
|
||||
.filter((m) => m.info.role === "assistant")
|
||||
.flatMap((m) => m.parts)
|
||||
.filter((p) => p.type === "text")
|
||||
.map((p) => p.text)
|
||||
.join("\n")
|
||||
|
||||
return assistantText.includes(input.token)
|
||||
},
|
||||
{ timeout: 90_000 },
|
||||
)
|
||||
.toBe(true)
|
||||
|
||||
if (!userMessageID) throw new Error("Expected a user message id")
|
||||
return { prompt, userMessageID }
|
||||
}
|
||||
|
||||
test("slash undo sets revert and restores prior prompt", async ({ page, withProject }) => {
|
||||
test.setTimeout(120_000)
|
||||
|
||||
const token = `undo_${Date.now()}`
|
||||
|
||||
await withProject(async (project) => {
|
||||
const sdk = createSdk(project.directory)
|
||||
|
||||
await withSession(sdk, `e2e undo ${Date.now()}`, async (session) => {
|
||||
await project.gotoSession(session.id)
|
||||
|
||||
const seeded = await seedConversation({ page, sdk, sessionID: session.id, token })
|
||||
|
||||
await seeded.prompt.click()
|
||||
await page.keyboard.type("/undo")
|
||||
|
||||
const undo = page.locator('[data-slash-id="session.undo"]').first()
|
||||
await expect(undo).toBeVisible()
|
||||
await page.keyboard.press("Enter")
|
||||
|
||||
await expect
|
||||
.poll(async () => await sdk.session.get({ sessionID: session.id }).then((r) => r.data?.revert?.messageID), {
|
||||
timeout: 30_000,
|
||||
})
|
||||
.toBe(seeded.userMessageID)
|
||||
|
||||
await expect(seeded.prompt).toContainText(token)
|
||||
await expect(page.locator(`[data-message-id="${seeded.userMessageID}"]`)).toHaveCount(0)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test("slash redo clears revert and restores latest state", async ({ page, withProject }) => {
|
||||
test.setTimeout(120_000)
|
||||
|
||||
const token = `redo_${Date.now()}`
|
||||
|
||||
await withProject(async (project) => {
|
||||
const sdk = createSdk(project.directory)
|
||||
|
||||
await withSession(sdk, `e2e redo ${Date.now()}`, async (session) => {
|
||||
await project.gotoSession(session.id)
|
||||
|
||||
const seeded = await seedConversation({ page, sdk, sessionID: session.id, token })
|
||||
|
||||
await seeded.prompt.click()
|
||||
await page.keyboard.type("/undo")
|
||||
|
||||
const undo = page.locator('[data-slash-id="session.undo"]').first()
|
||||
await expect(undo).toBeVisible()
|
||||
await page.keyboard.press("Enter")
|
||||
|
||||
await expect
|
||||
.poll(async () => await sdk.session.get({ sessionID: session.id }).then((r) => r.data?.revert?.messageID), {
|
||||
timeout: 30_000,
|
||||
})
|
||||
.toBe(seeded.userMessageID)
|
||||
|
||||
await seeded.prompt.click()
|
||||
await page.keyboard.press(`${modKey}+A`)
|
||||
await page.keyboard.press("Backspace")
|
||||
await page.keyboard.type("/redo")
|
||||
|
||||
const redo = page.locator('[data-slash-id="session.redo"]').first()
|
||||
await expect(redo).toBeVisible()
|
||||
await page.keyboard.press("Enter")
|
||||
|
||||
await expect
|
||||
.poll(async () => await sdk.session.get({ sessionID: session.id }).then((r) => r.data?.revert?.messageID), {
|
||||
timeout: 30_000,
|
||||
})
|
||||
.toBeUndefined()
|
||||
|
||||
await expect(seeded.prompt).not.toContainText(token)
|
||||
await expect(page.locator(`[data-message-id="${seeded.userMessageID}"]`).first()).toBeVisible()
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -89,7 +89,6 @@ let runner: ReturnType<typeof Bun.spawn> | undefined
|
||||
let server: { stop: () => Promise<void> | void } | undefined
|
||||
let inst: { Instance: { disposeAll: () => Promise<void> | void } } | undefined
|
||||
let cleaned = false
|
||||
let internalError = false
|
||||
|
||||
const cleanup = async () => {
|
||||
if (cleaned) return
|
||||
@@ -115,9 +114,8 @@ const shutdown = (code: number, reason: string) => {
|
||||
}
|
||||
|
||||
const reportInternalError = (reason: string, error: unknown) => {
|
||||
internalError = true
|
||||
console.error(`e2e-local internal error: ${reason}`)
|
||||
console.error(error)
|
||||
console.warn(`e2e-local ignored server error: ${reason}`)
|
||||
console.warn(error)
|
||||
}
|
||||
|
||||
process.once("SIGINT", () => shutdown(130, "SIGINT"))
|
||||
@@ -177,6 +175,4 @@ try {
|
||||
await cleanup()
|
||||
}
|
||||
|
||||
if (code === 0 && internalError) code = 1
|
||||
|
||||
process.exit(code)
|
||||
|
||||
@@ -6,6 +6,7 @@ let dirsToExpand: typeof import("./file-tree").dirsToExpand
|
||||
|
||||
beforeAll(async () => {
|
||||
mock.module("@solidjs/router", () => ({
|
||||
useNavigate: () => () => undefined,
|
||||
useParams: () => ({}),
|
||||
}))
|
||||
mock.module("@/context/file", () => ({
|
||||
|
||||
@@ -787,7 +787,7 @@ export const PromptInput: Component<PromptInputProps> = (props) => {
|
||||
},
|
||||
setMode: (mode) => setStore("mode", mode),
|
||||
setPopover: (popover) => setStore("popover", popover),
|
||||
newSessionWorktree: props.newSessionWorktree,
|
||||
newSessionWorktree: () => props.newSessionWorktree,
|
||||
onNewSessionWorktreeReset: props.onNewSessionWorktreeReset,
|
||||
onSubmit: props.onSubmit,
|
||||
})
|
||||
|
||||
175
packages/app/src/components/prompt-input/submit.test.ts
Normal file
175
packages/app/src/components/prompt-input/submit.test.ts
Normal file
@@ -0,0 +1,175 @@
|
||||
import { beforeAll, beforeEach, describe, expect, mock, test } from "bun:test"
|
||||
import type { Prompt } from "@/context/prompt"
|
||||
|
||||
let createPromptSubmit: typeof import("./submit").createPromptSubmit
|
||||
|
||||
const createdClients: string[] = []
|
||||
const createdSessions: string[] = []
|
||||
const sentShell: string[] = []
|
||||
const syncedDirectories: string[] = []
|
||||
|
||||
let selected = "/repo/worktree-a"
|
||||
|
||||
const promptValue: Prompt = [{ type: "text", content: "ls", start: 0, end: 2 }]
|
||||
|
||||
const clientFor = (directory: string) => ({
|
||||
session: {
|
||||
create: async () => {
|
||||
createdSessions.push(directory)
|
||||
return { data: { id: `session-${createdSessions.length}` } }
|
||||
},
|
||||
shell: async () => {
|
||||
sentShell.push(directory)
|
||||
return { data: undefined }
|
||||
},
|
||||
prompt: async () => ({ data: undefined }),
|
||||
command: async () => ({ data: undefined }),
|
||||
abort: async () => ({ data: undefined }),
|
||||
},
|
||||
worktree: {
|
||||
create: async () => ({ data: { directory: `${directory}/new` } }),
|
||||
},
|
||||
})
|
||||
|
||||
beforeAll(async () => {
|
||||
const rootClient = clientFor("/repo/main")
|
||||
|
||||
mock.module("@solidjs/router", () => ({
|
||||
useNavigate: () => () => undefined,
|
||||
useParams: () => ({}),
|
||||
}))
|
||||
|
||||
mock.module("@opencode-ai/sdk/v2/client", () => ({
|
||||
createOpencodeClient: (input: { directory: string }) => {
|
||||
createdClients.push(input.directory)
|
||||
return clientFor(input.directory)
|
||||
},
|
||||
}))
|
||||
|
||||
mock.module("@opencode-ai/ui/toast", () => ({
|
||||
showToast: () => 0,
|
||||
}))
|
||||
|
||||
mock.module("@opencode-ai/util/encode", () => ({
|
||||
base64Encode: (value: string) => value,
|
||||
}))
|
||||
|
||||
mock.module("@/context/local", () => ({
|
||||
useLocal: () => ({
|
||||
model: {
|
||||
current: () => ({ id: "model", provider: { id: "provider" } }),
|
||||
variant: { current: () => undefined },
|
||||
},
|
||||
agent: {
|
||||
current: () => ({ name: "agent" }),
|
||||
},
|
||||
}),
|
||||
}))
|
||||
|
||||
mock.module("@/context/prompt", () => ({
|
||||
usePrompt: () => ({
|
||||
current: () => promptValue,
|
||||
reset: () => undefined,
|
||||
set: () => undefined,
|
||||
context: {
|
||||
add: () => undefined,
|
||||
remove: () => undefined,
|
||||
items: () => [],
|
||||
},
|
||||
}),
|
||||
}))
|
||||
|
||||
mock.module("@/context/layout", () => ({
|
||||
useLayout: () => ({
|
||||
handoff: {
|
||||
setTabs: () => undefined,
|
||||
},
|
||||
}),
|
||||
}))
|
||||
|
||||
mock.module("@/context/sdk", () => ({
|
||||
useSDK: () => ({
|
||||
directory: "/repo/main",
|
||||
client: rootClient,
|
||||
url: "http://localhost:4096",
|
||||
}),
|
||||
}))
|
||||
|
||||
mock.module("@/context/sync", () => ({
|
||||
useSync: () => ({
|
||||
data: { command: [] },
|
||||
session: {
|
||||
optimistic: {
|
||||
add: () => undefined,
|
||||
remove: () => undefined,
|
||||
},
|
||||
},
|
||||
set: () => undefined,
|
||||
}),
|
||||
}))
|
||||
|
||||
mock.module("@/context/global-sync", () => ({
|
||||
useGlobalSync: () => ({
|
||||
child: (directory: string) => {
|
||||
syncedDirectories.push(directory)
|
||||
return [{}, () => undefined]
|
||||
},
|
||||
}),
|
||||
}))
|
||||
|
||||
mock.module("@/context/platform", () => ({
|
||||
usePlatform: () => ({
|
||||
fetch: fetch,
|
||||
}),
|
||||
}))
|
||||
|
||||
mock.module("@/context/language", () => ({
|
||||
useLanguage: () => ({
|
||||
t: (key: string) => key,
|
||||
}),
|
||||
}))
|
||||
|
||||
const mod = await import("./submit")
|
||||
createPromptSubmit = mod.createPromptSubmit
|
||||
})
|
||||
|
||||
beforeEach(() => {
|
||||
createdClients.length = 0
|
||||
createdSessions.length = 0
|
||||
sentShell.length = 0
|
||||
syncedDirectories.length = 0
|
||||
selected = "/repo/worktree-a"
|
||||
})
|
||||
|
||||
describe("prompt submit worktree selection", () => {
|
||||
test("reads the latest worktree accessor value per submit", async () => {
|
||||
const submit = createPromptSubmit({
|
||||
info: () => undefined,
|
||||
imageAttachments: () => [],
|
||||
commentCount: () => 0,
|
||||
mode: () => "shell",
|
||||
working: () => false,
|
||||
editor: () => undefined,
|
||||
queueScroll: () => undefined,
|
||||
promptLength: (value) => value.reduce((sum, part) => sum + ("content" in part ? part.content.length : 0), 0),
|
||||
addToHistory: () => undefined,
|
||||
resetHistoryNavigation: () => undefined,
|
||||
setMode: () => undefined,
|
||||
setPopover: () => undefined,
|
||||
newSessionWorktree: () => selected,
|
||||
onNewSessionWorktreeReset: () => undefined,
|
||||
onSubmit: () => undefined,
|
||||
})
|
||||
|
||||
const event = { preventDefault: () => undefined } as unknown as Event
|
||||
|
||||
await submit.handleSubmit(event)
|
||||
selected = "/repo/worktree-b"
|
||||
await submit.handleSubmit(event)
|
||||
|
||||
expect(createdClients).toEqual(["/repo/worktree-a", "/repo/worktree-b"])
|
||||
expect(createdSessions).toEqual(["/repo/worktree-a", "/repo/worktree-b"])
|
||||
expect(sentShell).toEqual(["/repo/worktree-a", "/repo/worktree-b"])
|
||||
expect(syncedDirectories).toEqual(["/repo/worktree-a", "/repo/worktree-b"])
|
||||
})
|
||||
})
|
||||
@@ -37,7 +37,7 @@ type PromptSubmitInput = {
|
||||
resetHistoryNavigation: () => void
|
||||
setMode: (mode: "normal" | "shell") => void
|
||||
setPopover: (popover: "at" | "slash" | null) => void
|
||||
newSessionWorktree?: string
|
||||
newSessionWorktree?: Accessor<string | undefined>
|
||||
onNewSessionWorktreeReset?: () => void
|
||||
onSubmit?: () => void
|
||||
}
|
||||
@@ -137,7 +137,7 @@ export function createPromptSubmit(input: PromptSubmitInput) {
|
||||
|
||||
const projectDirectory = sdk.directory
|
||||
const isNewSession = !params.id
|
||||
const worktreeSelection = input.newSessionWorktree || "main"
|
||||
const worktreeSelection = input.newSessionWorktree?.() || "main"
|
||||
|
||||
let sessionDirectory = projectDirectory
|
||||
let client = sdk.client
|
||||
|
||||
@@ -6,6 +6,7 @@ let createCommentSessionForTest: typeof import("./comments").createCommentSessio
|
||||
|
||||
beforeAll(async () => {
|
||||
mock.module("@solidjs/router", () => ({
|
||||
useNavigate: () => () => undefined,
|
||||
useParams: () => ({}),
|
||||
}))
|
||||
mock.module("@opencode-ai/ui/context", () => ({
|
||||
|
||||
@@ -231,6 +231,24 @@ export function applyDirectoryEvent(input: {
|
||||
}
|
||||
break
|
||||
}
|
||||
case "message.part.delta": {
|
||||
const props = event.properties as { messageID: string; partID: string; field: string; delta: string }
|
||||
const parts = input.store.part[props.messageID]
|
||||
if (!parts) break
|
||||
const result = Binary.search(parts, props.partID, (p) => p.id)
|
||||
if (!result.found) break
|
||||
input.setStore(
|
||||
"part",
|
||||
props.messageID,
|
||||
produce((draft) => {
|
||||
const part = draft[result.index]
|
||||
const field = props.field as keyof typeof part
|
||||
const existing = part[field] as string | undefined
|
||||
;(part[field] as string) = (existing ?? "") + props.delta
|
||||
}),
|
||||
)
|
||||
break
|
||||
}
|
||||
case "vcs.branch.updated": {
|
||||
const props = event.properties as { branch: string }
|
||||
const next = { branch: props.branch }
|
||||
|
||||
@@ -5,6 +5,7 @@ let getLegacyTerminalStorageKeys: (dir: string, legacySessionID?: string) => str
|
||||
|
||||
beforeAll(async () => {
|
||||
mock.module("@solidjs/router", () => ({
|
||||
useNavigate: () => () => undefined,
|
||||
useParams: () => ({}),
|
||||
}))
|
||||
mock.module("@opencode-ai/ui/context", () => ({
|
||||
|
||||
@@ -144,7 +144,7 @@ export const SessionItem = (props: SessionItemProps): JSX.Element => {
|
||||
|
||||
const item = (
|
||||
<A
|
||||
href={`${props.slug}/session/${props.session.id}`}
|
||||
href={`/${props.slug}/session/${props.session.id}`}
|
||||
class={`flex items-center justify-between gap-3 min-w-0 text-left w-full focus:outline-none transition-[padding] ${props.mobile ? "pr-7" : ""} group-hover/session:pr-7 group-focus-within/session:pr-7 group-active/session:pr-7 ${props.dense ? "py-0.5" : "py-1"}`}
|
||||
onPointerEnter={scheduleHoverPrefetch}
|
||||
onPointerLeave={cancelHoverPrefetch}
|
||||
@@ -285,7 +285,7 @@ export const NewSessionItem = (props: {
|
||||
const tooltip = () => props.mobile || !props.sidebarExpanded()
|
||||
const item = (
|
||||
<A
|
||||
href={`${props.slug}/session`}
|
||||
href={`/${props.slug}/session`}
|
||||
end
|
||||
class={`flex items-center justify-between gap-3 min-w-0 text-left w-full focus:outline-none ${props.dense ? "py-0.5" : "py-1"}`}
|
||||
onClick={() => {
|
||||
|
||||
@@ -99,4 +99,9 @@ describe("persist localStorage resilience", () => {
|
||||
|
||||
expect(storage.getItem("direct-value")).toBe('{"value":5}')
|
||||
})
|
||||
|
||||
test("normalizer rejects malformed JSON payloads", () => {
|
||||
const result = persistTesting.normalize({ value: "ok" }, '{"value":"\\x"}')
|
||||
expect(result).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
@@ -195,6 +195,14 @@ function parse(value: string) {
|
||||
}
|
||||
}
|
||||
|
||||
function normalize(defaults: unknown, raw: string, migrate?: (value: unknown) => unknown) {
|
||||
const parsed = parse(raw)
|
||||
if (parsed === undefined) return
|
||||
const migrated = migrate ? migrate(parsed) : parsed
|
||||
const merged = merge(defaults, migrated)
|
||||
return JSON.stringify(merged)
|
||||
}
|
||||
|
||||
function workspaceStorage(dir: string) {
|
||||
const head = dir.slice(0, 12) || "workspace"
|
||||
const sum = checksum(dir) ?? "0"
|
||||
@@ -291,6 +299,7 @@ function localStorageDirect(): SyncStorage {
|
||||
export const PersistTesting = {
|
||||
localStorageDirect,
|
||||
localStorageWithPrefix,
|
||||
normalize,
|
||||
}
|
||||
|
||||
export const Persist = {
|
||||
@@ -358,12 +367,11 @@ export function persisted<T>(
|
||||
getItem: (key) => {
|
||||
const raw = current.getItem(key)
|
||||
if (raw !== null) {
|
||||
const parsed = parse(raw)
|
||||
if (parsed === undefined) return raw
|
||||
|
||||
const migrated = config.migrate ? config.migrate(parsed) : parsed
|
||||
const merged = merge(defaults, migrated)
|
||||
const next = JSON.stringify(merged)
|
||||
const next = normalize(defaults, raw, config.migrate)
|
||||
if (next === undefined) {
|
||||
current.removeItem(key)
|
||||
return null
|
||||
}
|
||||
if (raw !== next) current.setItem(key, next)
|
||||
return next
|
||||
}
|
||||
@@ -372,16 +380,13 @@ export function persisted<T>(
|
||||
const legacyRaw = legacyStore.getItem(legacyKey)
|
||||
if (legacyRaw === null) continue
|
||||
|
||||
current.setItem(key, legacyRaw)
|
||||
const next = normalize(defaults, legacyRaw, config.migrate)
|
||||
if (next === undefined) {
|
||||
legacyStore.removeItem(legacyKey)
|
||||
continue
|
||||
}
|
||||
current.setItem(key, next)
|
||||
legacyStore.removeItem(legacyKey)
|
||||
|
||||
const parsed = parse(legacyRaw)
|
||||
if (parsed === undefined) return legacyRaw
|
||||
|
||||
const migrated = config.migrate ? config.migrate(parsed) : parsed
|
||||
const merged = merge(defaults, migrated)
|
||||
const next = JSON.stringify(merged)
|
||||
if (legacyRaw !== next) current.setItem(key, next)
|
||||
return next
|
||||
}
|
||||
|
||||
@@ -405,12 +410,11 @@ export function persisted<T>(
|
||||
getItem: async (key) => {
|
||||
const raw = await current.getItem(key)
|
||||
if (raw !== null) {
|
||||
const parsed = parse(raw)
|
||||
if (parsed === undefined) return raw
|
||||
|
||||
const migrated = config.migrate ? config.migrate(parsed) : parsed
|
||||
const merged = merge(defaults, migrated)
|
||||
const next = JSON.stringify(merged)
|
||||
const next = normalize(defaults, raw, config.migrate)
|
||||
if (next === undefined) {
|
||||
await current.removeItem(key).catch(() => undefined)
|
||||
return null
|
||||
}
|
||||
if (raw !== next) await current.setItem(key, next)
|
||||
return next
|
||||
}
|
||||
@@ -421,16 +425,13 @@ export function persisted<T>(
|
||||
const legacyRaw = await legacyStore.getItem(legacyKey)
|
||||
if (legacyRaw === null) continue
|
||||
|
||||
await current.setItem(key, legacyRaw)
|
||||
const next = normalize(defaults, legacyRaw, config.migrate)
|
||||
if (next === undefined) {
|
||||
await legacyStore.removeItem(legacyKey).catch(() => undefined)
|
||||
continue
|
||||
}
|
||||
await current.setItem(key, next)
|
||||
await legacyStore.removeItem(legacyKey)
|
||||
|
||||
const parsed = parse(legacyRaw)
|
||||
if (parsed === undefined) return legacyRaw
|
||||
|
||||
const migrated = config.migrate ? config.migrate(parsed) : parsed
|
||||
const merged = merge(defaults, migrated)
|
||||
const next = JSON.stringify(merged)
|
||||
if (legacyRaw !== next) await current.setItem(key, next)
|
||||
return next
|
||||
}
|
||||
|
||||
|
||||
@@ -351,8 +351,8 @@ export const dict = {
|
||||
"changelog.empty":
|
||||
"\u0644\u0645 \u064a\u062a\u0645 \u0627\u0644\u0639\u062b\u0648\u0631 \u0639\u0644\u0649 \u0623\u064a \u0625\u062f\u062e\u0627\u0644\u0627\u062a \u0641\u064a \u0633\u062c\u0644 \u0627\u0644\u062a\u063a\u064a\u064a\u0631\u0627\u062a.",
|
||||
"changelog.viewJson": "\u0639\u0631\u0636 JSON",
|
||||
"workspace.nav.zen": "Zen",
|
||||
"workspace.nav.apiKeys": "مفاتيح API",
|
||||
"workspace.nav.zen": "زين",
|
||||
"workspace.nav.apiKeys": "API المفاتيح",
|
||||
"workspace.nav.members": "أعضاء",
|
||||
"workspace.nav.billing": "الفواتير",
|
||||
"workspace.nav.settings": "إعدادات",
|
||||
@@ -365,14 +365,14 @@ export const dict = {
|
||||
"workspace.newUser.feature.quality.title": "أعلى جودة",
|
||||
"workspace.newUser.feature.quality.body":
|
||||
"الوصول إلى النماذج التي تم تكوينها لتحقيق الأداء الأمثل - لا يوجد تخفيضات أو توجيه إلى موفري الخدمة الأرخص.",
|
||||
"workspace.newUser.feature.lockin.title": "بدون احتجاز بمزوّد واحد",
|
||||
"workspace.newUser.feature.lockin.title": "لا يوجد قفل",
|
||||
"workspace.newUser.feature.lockin.body":
|
||||
"استخدم Zen مع أي وكيل ترميز، واستمر في استخدام موفري الخدمات الآخرين مع opencode وقتما تشاء.",
|
||||
"workspace.newUser.copyApiKey": "انسخ مفتاح API",
|
||||
"workspace.newUser.copyKey": "نسخ المفتاح",
|
||||
"workspace.newUser.copied": "منسوخ!",
|
||||
"workspace.newUser.step.enableBilling": "تمكين الفوترة",
|
||||
"workspace.newUser.step.login.before": "شغّل",
|
||||
"workspace.newUser.step.login.before": "يجري",
|
||||
"workspace.newUser.step.login.after": "وحدد opencode",
|
||||
"workspace.newUser.step.pasteKey": "الصق مفتاح API الخاص بك",
|
||||
"workspace.newUser.step.models.before": "ابدأ opencode ثم قم بالتشغيل",
|
||||
@@ -390,7 +390,7 @@ export const dict = {
|
||||
"workspace.providers.saving": "توفير...",
|
||||
"workspace.providers.save": "يحفظ",
|
||||
"workspace.providers.table.provider": "مزود",
|
||||
"workspace.providers.table.apiKey": "مفتاح API",
|
||||
"workspace.providers.table.apiKey": "API المفتاح",
|
||||
"workspace.usage.title": "تاريخ الاستخدام",
|
||||
"workspace.usage.subtitle": "استخدام وتكاليف API الأخيرة.",
|
||||
"workspace.usage.empty": "قم بإجراء أول مكالمة API للبدء.",
|
||||
@@ -398,25 +398,25 @@ export const dict = {
|
||||
"workspace.usage.table.model": "نموذج",
|
||||
"workspace.usage.table.input": "مدخل",
|
||||
"workspace.usage.table.output": "الإخراج",
|
||||
"workspace.usage.table.cost": "التكلفة",
|
||||
"workspace.usage.table.cost": "يكلف",
|
||||
"workspace.usage.breakdown.input": "مدخل",
|
||||
"workspace.usage.breakdown.cacheRead": "قراءة ذاكرة التخزين المؤقت",
|
||||
"workspace.usage.breakdown.cacheWrite": "كتابة ذاكرة التخزين المؤقت",
|
||||
"workspace.usage.breakdown.output": "الإخراج",
|
||||
"workspace.usage.breakdown.reasoning": "المنطق",
|
||||
"workspace.usage.subscription": "الاشتراك (${{amount}})",
|
||||
"workspace.cost.title": "التكلفة",
|
||||
"workspace.cost.title": "يكلف",
|
||||
"workspace.cost.subtitle": "تكاليف الاستخدام مقسمة حسب النموذج.",
|
||||
"workspace.cost.allModels": "جميع النماذج",
|
||||
"workspace.cost.allModels": "جميع الموديلات",
|
||||
"workspace.cost.allKeys": "جميع المفاتيح",
|
||||
"workspace.cost.deletedSuffix": "(محذوف)",
|
||||
"workspace.cost.empty": "لا توجد بيانات استخدام متاحة للفترة المحددة.",
|
||||
"workspace.cost.subscriptionShort": "اشتراك",
|
||||
"workspace.keys.title": "مفاتيح API",
|
||||
"workspace.cost.subscriptionShort": "الفرعية",
|
||||
"workspace.keys.title": "API المفاتيح",
|
||||
"workspace.keys.subtitle": "إدارة مفاتيح API الخاصة بك للوصول إلى خدمات opencode.",
|
||||
"workspace.keys.create": "قم بإنشاء مفتاح API",
|
||||
"workspace.keys.placeholder": "أدخل اسم المفتاح",
|
||||
"workspace.keys.empty": "أنشئ مفتاح API لبوابة opencode",
|
||||
"workspace.keys.empty": "قم بإنشاء مفتاح opencode للبوابة API",
|
||||
"workspace.keys.table.name": "اسم",
|
||||
"workspace.keys.table.key": "مفتاح",
|
||||
"workspace.keys.table.createdBy": "تم الإنشاء بواسطة",
|
||||
@@ -442,14 +442,14 @@ export const dict = {
|
||||
"workspace.members.table.email": "بريد إلكتروني",
|
||||
"workspace.members.table.role": "دور",
|
||||
"workspace.members.table.monthLimit": "حد الشهر",
|
||||
"workspace.members.role.admin": "مسؤول",
|
||||
"workspace.members.role.admin": "مسؤل",
|
||||
"workspace.members.role.adminDescription": "يمكن إدارة النماذج، والأعضاء، والفواتير",
|
||||
"workspace.members.role.member": "عضو",
|
||||
"workspace.members.role.memberDescription": "يمكنهم فقط إنشاء مفاتيح API لأنفسهم",
|
||||
"workspace.settings.title": "إعدادات",
|
||||
"workspace.settings.subtitle": "قم بتحديث اسم مساحة العمل الخاصة بك وتفضيلاتك.",
|
||||
"workspace.settings.workspaceName": "اسم مساحة العمل",
|
||||
"workspace.settings.defaultName": "الافتراضي",
|
||||
"workspace.settings.defaultName": "تقصير",
|
||||
"workspace.settings.updating": "جارٍ التحديث...",
|
||||
"workspace.settings.save": "يحفظ",
|
||||
"workspace.settings.edit": "يحرر",
|
||||
@@ -461,37 +461,37 @@ export const dict = {
|
||||
"workspace.billing.add": "أضف $",
|
||||
"workspace.billing.enterAmount": "أدخل المبلغ",
|
||||
"workspace.billing.loading": "تحميل...",
|
||||
"workspace.billing.addAction": "إضافة",
|
||||
"workspace.billing.addAction": "يضيف",
|
||||
"workspace.billing.addBalance": "إضافة الرصيد",
|
||||
"workspace.billing.linkedToStripe": "مرتبط بـ Stripe",
|
||||
"workspace.billing.manage": "إدارة",
|
||||
"workspace.billing.linkedToStripe": "مرتبطة بالشريط",
|
||||
"workspace.billing.manage": "يدير",
|
||||
"workspace.billing.enable": "تمكين الفوترة",
|
||||
"workspace.monthlyLimit.title": "الحد الشهري",
|
||||
"workspace.monthlyLimit.subtitle": "قم بتعيين حد الاستخدام الشهري لحسابك.",
|
||||
"workspace.monthlyLimit.placeholder": "50",
|
||||
"workspace.monthlyLimit.setting": "جارٍ التعيين...",
|
||||
"workspace.monthlyLimit.setting": "جلسة...",
|
||||
"workspace.monthlyLimit.set": "تعيين",
|
||||
"workspace.monthlyLimit.edit": "تحرير الحد",
|
||||
"workspace.monthlyLimit.noLimit": "لم يتم تعيين حد الاستخدام.",
|
||||
"workspace.monthlyLimit.currentUsage.beforeMonth": "الاستخدام الحالي ل",
|
||||
"workspace.monthlyLimit.currentUsage.beforeAmount": "هو $",
|
||||
"workspace.reload.title": "إعادة الشحن التلقائي",
|
||||
"workspace.reload.disabled.before": "إعادة الشحن التلقائي",
|
||||
"workspace.reload.disabled.state": "معطّل",
|
||||
"workspace.reload.disabled.after": "فعّلها لإعادة شحن الرصيد تلقائيًا عندما يكون منخفضًا.",
|
||||
"workspace.reload.enabled.before": "إعادة الشحن التلقائي",
|
||||
"workspace.reload.title": "إعادة التحميل التلقائي",
|
||||
"workspace.reload.disabled.before": "إعادة التحميل التلقائي هو",
|
||||
"workspace.reload.disabled.state": "عاجز",
|
||||
"workspace.reload.disabled.after": "تمكين إعادة التحميل تلقائيًا عندما يكون الرصيد منخفضًا.",
|
||||
"workspace.reload.enabled.before": "إعادة التحميل التلقائي هو",
|
||||
"workspace.reload.enabled.state": "ممكّن",
|
||||
"workspace.reload.enabled.middle": "سنعيد شحن رصيدك بمبلغ",
|
||||
"workspace.reload.enabled.middle": "سنقوم بإعادة التحميل",
|
||||
"workspace.reload.processingFee": "رسوم المعالجة",
|
||||
"workspace.reload.enabled.after": "عندما يصل الرصيد إلى",
|
||||
"workspace.reload.enabled.after": "عندما يصل التوازن",
|
||||
"workspace.reload.edit": "يحرر",
|
||||
"workspace.reload.enable": "تفعيل",
|
||||
"workspace.reload.enableAutoReload": "تفعيل إعادة الشحن التلقائي",
|
||||
"workspace.reload.reloadAmount": "مبلغ إعادة الشحن $",
|
||||
"workspace.reload.enable": "يُمكَِن",
|
||||
"workspace.reload.enableAutoReload": "تمكين إعادة التحميل التلقائي",
|
||||
"workspace.reload.reloadAmount": "إعادة تحميل $",
|
||||
"workspace.reload.whenBalanceReaches": "عندما يصل الرصيد إلى $",
|
||||
"workspace.reload.saving": "توفير...",
|
||||
"workspace.reload.save": "يحفظ",
|
||||
"workspace.reload.failedAt": "فشلت إعادة الشحن في",
|
||||
"workspace.reload.failedAt": "فشلت عملية إعادة التحميل عند",
|
||||
"workspace.reload.reason": "سبب:",
|
||||
"workspace.reload.updatePaymentMethod": "يرجى تحديث طريقة الدفع الخاصة بك والمحاولة مرة أخرى.",
|
||||
"workspace.reload.retrying": "جارٍ إعادة المحاولة...",
|
||||
@@ -500,11 +500,11 @@ export const dict = {
|
||||
"workspace.payments.subtitle": "معاملات الدفع الأخيرة.",
|
||||
"workspace.payments.table.date": "تاريخ",
|
||||
"workspace.payments.table.paymentId": "معرف الدفع",
|
||||
"workspace.payments.table.amount": "المبلغ",
|
||||
"workspace.payments.table.amount": "كمية",
|
||||
"workspace.payments.table.receipt": "إيصال",
|
||||
"workspace.payments.type.credit": "ائتمان",
|
||||
"workspace.payments.type.subscription": "الاشتراك",
|
||||
"workspace.payments.view": "عرض",
|
||||
"workspace.payments.view": "منظر",
|
||||
"workspace.black.loading": "تحميل...",
|
||||
"workspace.black.time.day": "يوم",
|
||||
"workspace.black.time.days": "أيام",
|
||||
@@ -521,8 +521,8 @@ export const dict = {
|
||||
"workspace.black.subscription.resetsIn": "إعادة تعيين في",
|
||||
"workspace.black.subscription.useBalance": "استخدم رصيدك المتوفر بعد الوصول إلى حدود الاستخدام",
|
||||
"workspace.black.waitlist.title": "قائمة الانتظار",
|
||||
"workspace.black.waitlist.joined": "أنت على قائمة الانتظار لخطة OpenCode Black بقيمة ${{plan}} شهريًا.",
|
||||
"workspace.black.waitlist.ready": "نحن مستعدون لتسجيلك في خطة OpenCode Black بقيمة ${{plan}} شهريًا.",
|
||||
"workspace.black.waitlist.joined": "أنت على قائمة الانتظار للخطة السوداء {{plan}} دولار شهريًا OpenCode.",
|
||||
"workspace.black.waitlist.ready": "نحن على استعداد لتسجيلك في خطة Black {{plan}} الشهرية OpenCode.",
|
||||
"workspace.black.waitlist.leave": "ترك قائمة الانتظار",
|
||||
"workspace.black.waitlist.leaving": "مغادرة...",
|
||||
"workspace.black.waitlist.left": "غادر",
|
||||
|
||||
@@ -294,18 +294,18 @@ export const dict = {
|
||||
"workspace.home.billing.currentBalance": "Nuværende saldo",
|
||||
"workspace.newUser.feature.tested.title": "Testede og verificerede modeller",
|
||||
"workspace.newUser.feature.tested.body":
|
||||
"Vi har benchmarket og testet modeller specifikt til kodningsagenter for at sikre den bedste ydeevne.",
|
||||
"Vi har benchmarket og testet modeller specifikt til kodningsmidler for at sikre den bedste ydeevne.",
|
||||
"workspace.newUser.feature.quality.title": "Højeste kvalitet",
|
||||
"workspace.newUser.feature.quality.body":
|
||||
"Få adgang til modeller konfigureret til optimal ydeevne - ingen nedgraderinger eller routing til billigere udbydere.",
|
||||
"workspace.newUser.feature.lockin.title": "Ingen indlåsning",
|
||||
"workspace.newUser.feature.lockin.body":
|
||||
"Brug Zen med en hvilken som helst kodningsagent, og fortsæt med at bruge andre udbydere med opencode, når du vil.",
|
||||
"workspace.newUser.copyApiKey": "Kopiér API-nøgle",
|
||||
"workspace.newUser.copyApiKey": "Kopiér nøglen API",
|
||||
"workspace.newUser.copyKey": "Kopier nøgle",
|
||||
"workspace.newUser.copied": "Kopieret!",
|
||||
"workspace.newUser.step.enableBilling": "Aktiver fakturering",
|
||||
"workspace.newUser.step.login.before": "Kør",
|
||||
"workspace.newUser.step.login.before": "Løbe",
|
||||
"workspace.newUser.step.login.after": "og vælg opencode",
|
||||
"workspace.newUser.step.pasteKey": "Indsæt din API nøgle",
|
||||
"workspace.newUser.step.models.before": "Start opencode og kør",
|
||||
@@ -316,12 +316,12 @@ export const dict = {
|
||||
"workspace.models.table.enabled": "Aktiveret",
|
||||
"workspace.providers.title": "Medbring din egen nøgle",
|
||||
"workspace.providers.subtitle": "Konfigurer dine egne API nøgler fra AI-udbydere.",
|
||||
"workspace.providers.placeholder": "Indtast {{provider}} API-nøgle ({{prefix}}...)",
|
||||
"workspace.providers.placeholder": "Indtast nøglen {{provider}} API ({{prefix}}...)",
|
||||
"workspace.providers.configure": "Konfigurer",
|
||||
"workspace.providers.edit": "Rediger",
|
||||
"workspace.providers.edit": "Redigere",
|
||||
"workspace.providers.delete": "Slet",
|
||||
"workspace.providers.saving": "Gemmer...",
|
||||
"workspace.providers.save": "Gem",
|
||||
"workspace.providers.save": "Spare",
|
||||
"workspace.providers.table.provider": "Udbyder",
|
||||
"workspace.providers.table.apiKey": "API Nøgle",
|
||||
"workspace.usage.title": "Brugshistorik",
|
||||
@@ -330,15 +330,15 @@ export const dict = {
|
||||
"workspace.usage.table.date": "Dato",
|
||||
"workspace.usage.table.model": "Model",
|
||||
"workspace.usage.table.input": "Input",
|
||||
"workspace.usage.table.output": "Output",
|
||||
"workspace.usage.table.cost": "Omkostning",
|
||||
"workspace.usage.table.output": "Produktion",
|
||||
"workspace.usage.table.cost": "Koste",
|
||||
"workspace.usage.breakdown.input": "Input",
|
||||
"workspace.usage.breakdown.cacheRead": "Cache læst",
|
||||
"workspace.usage.breakdown.cacheWrite": "Cache skriv",
|
||||
"workspace.usage.breakdown.output": "Output",
|
||||
"workspace.usage.breakdown.output": "Produktion",
|
||||
"workspace.usage.breakdown.reasoning": "Ræsonnement",
|
||||
"workspace.usage.subscription": "abonnement (${{amount}})",
|
||||
"workspace.cost.title": "Omkostninger",
|
||||
"workspace.cost.title": "Koste",
|
||||
"workspace.cost.subtitle": "Brugsomkostninger opdelt efter model.",
|
||||
"workspace.cost.allModels": "Alle modeller",
|
||||
"workspace.cost.allKeys": "Alle nøgler",
|
||||
@@ -354,7 +354,7 @@ export const dict = {
|
||||
"workspace.keys.table.key": "Nøgle",
|
||||
"workspace.keys.table.createdBy": "Skabt af",
|
||||
"workspace.keys.table.lastUsed": "Sidst brugt",
|
||||
"workspace.keys.copyApiKey": "Kopiér API-nøgle",
|
||||
"workspace.keys.copyApiKey": "Kopiér nøglen API",
|
||||
"workspace.keys.delete": "Slet",
|
||||
"workspace.members.title": "Medlemmer",
|
||||
"workspace.members.subtitle": "Administrer arbejdsområdemedlemmer og deres tilladelser.",
|
||||
@@ -368,10 +368,10 @@ export const dict = {
|
||||
"workspace.members.noLimit": "Ingen grænse",
|
||||
"workspace.members.noLimitLowercase": "ingen grænse",
|
||||
"workspace.members.invited": "inviteret",
|
||||
"workspace.members.edit": "Rediger",
|
||||
"workspace.members.edit": "Redigere",
|
||||
"workspace.members.delete": "Slet",
|
||||
"workspace.members.saving": "Gemmer...",
|
||||
"workspace.members.save": "Gem",
|
||||
"workspace.members.save": "Spare",
|
||||
"workspace.members.table.email": "E-mail",
|
||||
"workspace.members.table.role": "Rolle",
|
||||
"workspace.members.table.monthLimit": "Månedsgrænse",
|
||||
@@ -382,10 +382,10 @@ export const dict = {
|
||||
"workspace.settings.title": "Indstillinger",
|
||||
"workspace.settings.subtitle": "Opdater dit arbejdsområdes navn og præferencer.",
|
||||
"workspace.settings.workspaceName": "Arbejdsområdets navn",
|
||||
"workspace.settings.defaultName": "Standard",
|
||||
"workspace.settings.defaultName": "Misligholdelse",
|
||||
"workspace.settings.updating": "Opdaterer...",
|
||||
"workspace.settings.save": "Gem",
|
||||
"workspace.settings.edit": "Rediger",
|
||||
"workspace.settings.save": "Spare",
|
||||
"workspace.settings.edit": "Redigere",
|
||||
"workspace.billing.title": "Fakturering",
|
||||
"workspace.billing.subtitle.beforeLink": "Administrer betalingsmetoder.",
|
||||
"workspace.billing.contactUs": "Kontakt os",
|
||||
@@ -394,10 +394,10 @@ export const dict = {
|
||||
"workspace.billing.add": "Tilføj $",
|
||||
"workspace.billing.enterAmount": "Indtast beløb",
|
||||
"workspace.billing.loading": "Indlæser...",
|
||||
"workspace.billing.addAction": "Tilføj",
|
||||
"workspace.billing.addAction": "Tilføje",
|
||||
"workspace.billing.addBalance": "Tilføj balance",
|
||||
"workspace.billing.linkedToStripe": "Forbundet til Stripe",
|
||||
"workspace.billing.manage": "Administrer",
|
||||
"workspace.billing.manage": "Styre",
|
||||
"workspace.billing.enable": "Aktiver fakturering",
|
||||
"workspace.monthlyLimit.title": "Månedlig grænse",
|
||||
"workspace.monthlyLimit.subtitle": "Indstil en månedlig forbrugsgrænse for din konto.",
|
||||
@@ -408,23 +408,23 @@ export const dict = {
|
||||
"workspace.monthlyLimit.noLimit": "Ingen forbrugsgrænse angivet.",
|
||||
"workspace.monthlyLimit.currentUsage.beforeMonth": "Nuværende brug for",
|
||||
"workspace.monthlyLimit.currentUsage.beforeAmount": "er $",
|
||||
"workspace.reload.title": "Automatisk genopfyldning",
|
||||
"workspace.reload.disabled.before": "Automatisk genopfyldning er",
|
||||
"workspace.reload.disabled.state": "deaktiveret",
|
||||
"workspace.reload.disabled.after": "Aktiver for automatisk at genopfylde, når saldoen er lav.",
|
||||
"workspace.reload.enabled.before": "Automatisk genopfyldning er",
|
||||
"workspace.reload.title": "Automatisk genindlæsning",
|
||||
"workspace.reload.disabled.before": "Automatisk genindlæsning er",
|
||||
"workspace.reload.disabled.state": "handicappet",
|
||||
"workspace.reload.disabled.after": "Aktiver for automatisk at genindlæse, når balancen er lav.",
|
||||
"workspace.reload.enabled.before": "Automatisk genindlæsning er",
|
||||
"workspace.reload.enabled.state": "aktiveret",
|
||||
"workspace.reload.enabled.middle": "Vi genopfylder",
|
||||
"workspace.reload.enabled.middle": "Vi genindlæser",
|
||||
"workspace.reload.processingFee": "ekspeditionsgebyr",
|
||||
"workspace.reload.enabled.after": "når balancen er nået",
|
||||
"workspace.reload.edit": "Rediger",
|
||||
"workspace.reload.edit": "Redigere",
|
||||
"workspace.reload.enable": "Aktiver",
|
||||
"workspace.reload.enableAutoReload": "Aktiver automatisk genopfyldning",
|
||||
"workspace.reload.reloadAmount": "Genopfyld $",
|
||||
"workspace.reload.enableAutoReload": "Aktiver automatisk genindlæsning",
|
||||
"workspace.reload.reloadAmount": "Genindlæs $",
|
||||
"workspace.reload.whenBalanceReaches": "Når saldoen når $",
|
||||
"workspace.reload.saving": "Gemmer...",
|
||||
"workspace.reload.save": "Gem",
|
||||
"workspace.reload.failedAt": "Genopfyldning mislykkedes kl",
|
||||
"workspace.reload.save": "Spare",
|
||||
"workspace.reload.failedAt": "Genindlæsning mislykkedes kl",
|
||||
"workspace.reload.reason": "Årsag:",
|
||||
"workspace.reload.updatePaymentMethod": "Opdater din betalingsmetode, og prøv igen.",
|
||||
"workspace.reload.retrying": "Prøver igen...",
|
||||
@@ -434,10 +434,10 @@ export const dict = {
|
||||
"workspace.payments.table.date": "Dato",
|
||||
"workspace.payments.table.paymentId": "Betalings-id",
|
||||
"workspace.payments.table.amount": "Beløb",
|
||||
"workspace.payments.table.receipt": "Kvittering",
|
||||
"workspace.payments.table.receipt": "Modtagelse",
|
||||
"workspace.payments.type.credit": "kredit",
|
||||
"workspace.payments.type.subscription": "abonnement",
|
||||
"workspace.payments.view": "Vis",
|
||||
"workspace.payments.view": "Udsigt",
|
||||
"workspace.black.loading": "Indlæser...",
|
||||
"workspace.black.time.day": "dag",
|
||||
"workspace.black.time.days": "dage",
|
||||
@@ -458,8 +458,8 @@ export const dict = {
|
||||
"workspace.black.waitlist.ready": "Vi er klar til at tilmelde dig ${{plan}} per måned OpenCode Black plan.",
|
||||
"workspace.black.waitlist.leave": "Forlad venteliste",
|
||||
"workspace.black.waitlist.leaving": "Forlader...",
|
||||
"workspace.black.waitlist.left": "Forladt",
|
||||
"workspace.black.waitlist.enroll": "Tilmeld",
|
||||
"workspace.black.waitlist.left": "Venstre",
|
||||
"workspace.black.waitlist.enroll": "Indskrive",
|
||||
"workspace.black.waitlist.enrolling": "Tilmelder...",
|
||||
"workspace.black.waitlist.enrolled": "Tilmeldt",
|
||||
"workspace.black.waitlist.enrollNote":
|
||||
|
||||
@@ -306,27 +306,27 @@ export const dict = {
|
||||
"workspace.newUser.feature.lockin.title": "Kein Lock-in",
|
||||
"workspace.newUser.feature.lockin.body":
|
||||
"Verwenden Sie Zen mit einem beliebigen Codierungsagenten und nutzen Sie weiterhin andere Anbieter mit opencode, wann immer Sie möchten.",
|
||||
"workspace.newUser.copyApiKey": "API-Schlüssel kopieren",
|
||||
"workspace.newUser.copyApiKey": "Kopieren Sie den Schlüssel API",
|
||||
"workspace.newUser.copyKey": "Schlüssel kopieren",
|
||||
"workspace.newUser.copied": "Kopiert!",
|
||||
"workspace.newUser.step.enableBilling": "Abrechnung aktivieren",
|
||||
"workspace.newUser.step.login.before": "Führe",
|
||||
"workspace.newUser.step.login.before": "Laufen",
|
||||
"workspace.newUser.step.login.after": "und wählen Sie opencode",
|
||||
"workspace.newUser.step.pasteKey": "Fügen Sie Ihren API-Schlüssel ein",
|
||||
"workspace.newUser.step.models.before": "Starte opencode und führe",
|
||||
"workspace.newUser.step.models.before": "Starten Sie opencode und führen Sie es aus",
|
||||
"workspace.newUser.step.models.after": "um ein Modell auszuwählen",
|
||||
"workspace.models.title": "Modelle",
|
||||
"workspace.models.subtitle.beforeLink":
|
||||
"Verwalten Sie, auf welche Modelle Arbeitsbereichsmitglieder zugreifen können.",
|
||||
"workspace.models.table.model": "Modell",
|
||||
"workspace.models.table.enabled": "Aktiviert",
|
||||
"workspace.models.table.enabled": "Ermöglicht",
|
||||
"workspace.providers.title": "Bringen Sie Ihren eigenen Schlüssel mit",
|
||||
"workspace.providers.subtitle": "Konfigurieren Sie Ihre eigenen API-Schlüssel von KI-Anbietern.",
|
||||
"workspace.providers.placeholder": "Geben Sie den Schlüssel {{provider}} API ein ({{prefix}}...)",
|
||||
"workspace.providers.configure": "Konfigurieren",
|
||||
"workspace.providers.edit": "Bearbeiten",
|
||||
"workspace.providers.delete": "Löschen",
|
||||
"workspace.providers.saving": "Wird gespeichert...",
|
||||
"workspace.providers.saving": "Sparen...",
|
||||
"workspace.providers.save": "Speichern",
|
||||
"workspace.providers.table.provider": "Anbieter",
|
||||
"workspace.providers.table.apiKey": "API-Schlüssel",
|
||||
@@ -335,14 +335,14 @@ export const dict = {
|
||||
"workspace.usage.empty": "Machen Sie Ihren ersten API-Aufruf, um loszulegen.",
|
||||
"workspace.usage.table.date": "Datum",
|
||||
"workspace.usage.table.model": "Modell",
|
||||
"workspace.usage.table.input": "Input",
|
||||
"workspace.usage.table.output": "Output",
|
||||
"workspace.usage.table.input": "Eingang",
|
||||
"workspace.usage.table.output": "Ausgabe",
|
||||
"workspace.usage.table.cost": "Kosten",
|
||||
"workspace.usage.breakdown.input": "Input",
|
||||
"workspace.usage.breakdown.input": "Eingang",
|
||||
"workspace.usage.breakdown.cacheRead": "Cache-Lesen",
|
||||
"workspace.usage.breakdown.cacheWrite": "Cache-Schreiben",
|
||||
"workspace.usage.breakdown.output": "Output",
|
||||
"workspace.usage.breakdown.reasoning": "Reasoning",
|
||||
"workspace.usage.breakdown.output": "Ausgabe",
|
||||
"workspace.usage.breakdown.reasoning": "Argumentation",
|
||||
"workspace.usage.subscription": "Abonnement (${{amount}})",
|
||||
"workspace.cost.title": "Kosten",
|
||||
"workspace.cost.subtitle": "Nutzungskosten aufgeschlüsselt nach Modell.",
|
||||
@@ -360,12 +360,12 @@ export const dict = {
|
||||
"workspace.keys.table.key": "Schlüssel",
|
||||
"workspace.keys.table.createdBy": "Erstellt von",
|
||||
"workspace.keys.table.lastUsed": "Zuletzt verwendet",
|
||||
"workspace.keys.copyApiKey": "API-Schlüssel kopieren",
|
||||
"workspace.keys.copyApiKey": "Kopieren Sie den Schlüssel API",
|
||||
"workspace.keys.delete": "Löschen",
|
||||
"workspace.members.title": "Mitglieder",
|
||||
"workspace.members.subtitle": "Verwalten Sie Arbeitsbereichsmitglieder und ihre Berechtigungen.",
|
||||
"workspace.members.invite": "Mitglied einladen",
|
||||
"workspace.members.inviting": "Wird eingeladen...",
|
||||
"workspace.members.inviting": "Einladend...",
|
||||
"workspace.members.beta.beforeLink": "Während der Betaversion sind Arbeitsbereiche für Teams kostenlos.",
|
||||
"workspace.members.form.invitee": "Eingeladen",
|
||||
"workspace.members.form.emailPlaceholder": "Geben Sie Ihre E-Mail-Adresse ein",
|
||||
@@ -376,7 +376,7 @@ export const dict = {
|
||||
"workspace.members.invited": "eingeladen",
|
||||
"workspace.members.edit": "Bearbeiten",
|
||||
"workspace.members.delete": "Löschen",
|
||||
"workspace.members.saving": "Wird gespeichert...",
|
||||
"workspace.members.saving": "Sparen...",
|
||||
"workspace.members.save": "Speichern",
|
||||
"workspace.members.table.email": "E-Mail",
|
||||
"workspace.members.table.role": "Rolle",
|
||||
@@ -408,30 +408,30 @@ export const dict = {
|
||||
"workspace.monthlyLimit.title": "Monatliches Limit",
|
||||
"workspace.monthlyLimit.subtitle": "Legen Sie ein monatliches Nutzungslimit für Ihr Konto fest.",
|
||||
"workspace.monthlyLimit.placeholder": "50",
|
||||
"workspace.monthlyLimit.setting": "Wird gesetzt...",
|
||||
"workspace.monthlyLimit.set": "Festlegen",
|
||||
"workspace.monthlyLimit.setting": "Einstellung...",
|
||||
"workspace.monthlyLimit.set": "Satz",
|
||||
"workspace.monthlyLimit.edit": "Limit bearbeiten",
|
||||
"workspace.monthlyLimit.noLimit": "Kein Nutzungslimit festgelegt.",
|
||||
"workspace.monthlyLimit.currentUsage.beforeMonth": "Aktuelle Nutzung für",
|
||||
"workspace.monthlyLimit.currentUsage.beforeAmount": "ist $",
|
||||
"workspace.reload.title": "Automatische Aufladung",
|
||||
"workspace.reload.disabled.before": "Automatische Aufladung ist",
|
||||
"workspace.reload.title": "Automatisches Neuladen",
|
||||
"workspace.reload.disabled.before": "Automatisches Nachladen ist",
|
||||
"workspace.reload.disabled.state": "deaktiviert",
|
||||
"workspace.reload.disabled.after":
|
||||
"Aktivieren Sie diese Option, damit bei niedrigem Kontostand automatisch aufgeladen wird.",
|
||||
"workspace.reload.enabled.before": "Automatische Aufladung ist",
|
||||
"workspace.reload.enabled.state": "aktiviert",
|
||||
"workspace.reload.enabled.middle": "Wir laden auf",
|
||||
"Aktivieren Sie diese Option, um das Guthaben automatisch neu zu laden, wenn das Guthaben niedrig ist.",
|
||||
"workspace.reload.enabled.before": "Automatisches Nachladen ist",
|
||||
"workspace.reload.enabled.state": "ermöglicht",
|
||||
"workspace.reload.enabled.middle": "Wir laden nach",
|
||||
"workspace.reload.processingFee": "Bearbeitungsgebühr",
|
||||
"workspace.reload.enabled.after": "sobald der Kontostand",
|
||||
"workspace.reload.enabled.after": "wenn das Gleichgewicht erreicht ist",
|
||||
"workspace.reload.edit": "Bearbeiten",
|
||||
"workspace.reload.enable": "Aktivieren",
|
||||
"workspace.reload.enableAutoReload": "Automatische Aufladung aktivieren",
|
||||
"workspace.reload.reloadAmount": "Aufladebetrag $",
|
||||
"workspace.reload.whenBalanceReaches": "Wenn der Kontostand $ erreicht",
|
||||
"workspace.reload.saving": "Wird gespeichert...",
|
||||
"workspace.reload.enableAutoReload": "Aktivieren Sie das automatische Neuladen",
|
||||
"workspace.reload.reloadAmount": "$ neu laden",
|
||||
"workspace.reload.whenBalanceReaches": "Wenn der Saldo $ erreicht",
|
||||
"workspace.reload.saving": "Sparen...",
|
||||
"workspace.reload.save": "Speichern",
|
||||
"workspace.reload.failedAt": "Aufladung fehlgeschlagen am",
|
||||
"workspace.reload.failedAt": "Neuladen fehlgeschlagen bei",
|
||||
"workspace.reload.reason": "Grund:",
|
||||
"workspace.reload.updatePaymentMethod": "Bitte aktualisieren Sie Ihre Zahlungsmethode und versuchen Sie es erneut.",
|
||||
"workspace.reload.retrying": "Erneuter Versuch...",
|
||||
@@ -440,11 +440,11 @@ export const dict = {
|
||||
"workspace.payments.subtitle": "Letzte Zahlungsvorgänge.",
|
||||
"workspace.payments.table.date": "Datum",
|
||||
"workspace.payments.table.paymentId": "Zahlungs-ID",
|
||||
"workspace.payments.table.amount": "Betrag",
|
||||
"workspace.payments.table.amount": "Menge",
|
||||
"workspace.payments.table.receipt": "Quittung",
|
||||
"workspace.payments.type.credit": "Kredit",
|
||||
"workspace.payments.type.subscription": "Abonnement",
|
||||
"workspace.payments.view": "Anzeigen",
|
||||
"workspace.payments.view": "Sicht",
|
||||
"workspace.black.loading": "Laden...",
|
||||
"workspace.black.time.day": "Tag",
|
||||
"workspace.black.time.days": "Tage",
|
||||
@@ -454,21 +454,21 @@ export const dict = {
|
||||
"workspace.black.time.minutes": "Minuten",
|
||||
"workspace.black.time.fewSeconds": "ein paar Sekunden",
|
||||
"workspace.black.subscription.title": "Abonnement",
|
||||
"workspace.black.subscription.message": "Sie haben OpenCode Black für ${{plan}} pro Monat abonniert.",
|
||||
"workspace.black.subscription.message": "Sie haben OpenCode Black für {{plan}} pro Monat abonniert.",
|
||||
"workspace.black.subscription.manage": "Abonnement verwalten",
|
||||
"workspace.black.subscription.rollingUsage": "5-stündige Nutzung",
|
||||
"workspace.black.subscription.weeklyUsage": "Wöchentliche Nutzung",
|
||||
"workspace.black.subscription.resetsIn": "Zurückgesetzt in",
|
||||
"workspace.black.subscription.resetsIn": "Wird zurückgesetzt",
|
||||
"workspace.black.subscription.useBalance":
|
||||
"Nutzen Sie Ihr verfügbares Guthaben, nachdem Sie die Nutzungslimits erreicht haben",
|
||||
"workspace.black.waitlist.title": "Warteliste",
|
||||
"workspace.black.waitlist.joined":
|
||||
"Sie stehen auf der Warteliste für den OpenCode Black Tarif für ${{plan}} pro Monat.",
|
||||
"Sie stehen auf der Warteliste für den Black-Plan im Wert von ${{plan}} pro Monat OpenCode.",
|
||||
"workspace.black.waitlist.ready":
|
||||
"Wir können Sie jetzt in den OpenCode Black Tarif für ${{plan}} pro Monat aufnehmen.",
|
||||
"Wir sind bereit, Sie für den Black-Plan im Wert von ${{plan}} pro Monat OpenCode anzumelden.",
|
||||
"workspace.black.waitlist.leave": "Warteliste verlassen",
|
||||
"workspace.black.waitlist.leaving": "Verlassen...",
|
||||
"workspace.black.waitlist.left": "Verlassen",
|
||||
"workspace.black.waitlist.left": "Links",
|
||||
"workspace.black.waitlist.enroll": "Einschreiben",
|
||||
"workspace.black.waitlist.enrolling": "Anmeldung...",
|
||||
"workspace.black.waitlist.enrolled": "Eingeschrieben",
|
||||
|
||||
@@ -394,7 +394,7 @@ export const dict = {
|
||||
"workspace.settings.edit": "Edit",
|
||||
|
||||
"workspace.billing.title": "Billing",
|
||||
"workspace.billing.subtitle.beforeLink": "Manage payments methods.",
|
||||
"workspace.billing.subtitle.beforeLink": "Manage payment methods.",
|
||||
"workspace.billing.contactUs": "Contact us",
|
||||
"workspace.billing.subtitle.afterLink": "if you have any questions.",
|
||||
"workspace.billing.currentBalance": "Current Balance",
|
||||
|
||||
@@ -284,8 +284,8 @@ export const dict = {
|
||||
"changelog.hero.subtitle": "Nuovi aggiornamenti e miglioramenti per OpenCode",
|
||||
"changelog.empty": "Nessuna voce di changelog trovata.",
|
||||
"changelog.viewJson": "Visualizza JSON",
|
||||
"workspace.nav.zen": "Zen",
|
||||
"workspace.nav.apiKeys": "Chiavi API",
|
||||
"workspace.nav.zen": "zen",
|
||||
"workspace.nav.apiKeys": "API Chiavi",
|
||||
"workspace.nav.members": "Membri",
|
||||
"workspace.nav.billing": "Fatturazione",
|
||||
"workspace.nav.settings": "Impostazioni",
|
||||
@@ -299,14 +299,14 @@ export const dict = {
|
||||
"workspace.newUser.feature.quality.title": "Massima qualità",
|
||||
"workspace.newUser.feature.quality.body":
|
||||
"Modelli di accesso configurati per prestazioni ottimali: senza downgrade o instradamento verso fornitori più economici.",
|
||||
"workspace.newUser.feature.lockin.title": "Nessun lock-in",
|
||||
"workspace.newUser.feature.lockin.title": "Nessun blocco",
|
||||
"workspace.newUser.feature.lockin.body":
|
||||
"Utilizza Zen con qualsiasi agente di codifica e continua a utilizzare altri provider con opencode ogni volta che vuoi.",
|
||||
"workspace.newUser.copyApiKey": "Copia la chiave API",
|
||||
"workspace.newUser.copyKey": "Copia chiave",
|
||||
"workspace.newUser.copied": "Copiato!",
|
||||
"workspace.newUser.step.enableBilling": "Abilita fatturazione",
|
||||
"workspace.newUser.step.login.before": "Esegui",
|
||||
"workspace.newUser.step.login.before": "Correre",
|
||||
"workspace.newUser.step.login.after": "e seleziona opencode",
|
||||
"workspace.newUser.step.pasteKey": "Incolla la tua chiave API",
|
||||
"workspace.newUser.step.models.before": "Avvia opencode ed esegui",
|
||||
@@ -315,16 +315,16 @@ export const dict = {
|
||||
"workspace.models.subtitle.beforeLink": "Gestire i modelli a cui possono accedere i membri dell'area di lavoro.",
|
||||
"workspace.models.table.model": "Modello",
|
||||
"workspace.models.table.enabled": "Abilitato",
|
||||
"workspace.providers.title": "Bring Your Own Key (BYOK)",
|
||||
"workspace.providers.title": "Porta la tua chiave",
|
||||
"workspace.providers.subtitle": "Configura le tue chiavi API dai fornitori di intelligenza artificiale.",
|
||||
"workspace.providers.placeholder": "Inserisci la chiave {{provider}} API ({{prefix}}...)",
|
||||
"workspace.providers.configure": "Configura",
|
||||
"workspace.providers.edit": "Modificare",
|
||||
"workspace.providers.delete": "Eliminare",
|
||||
"workspace.providers.saving": "Salvataggio in corso...",
|
||||
"workspace.providers.saving": "Risparmio...",
|
||||
"workspace.providers.save": "Salva",
|
||||
"workspace.providers.table.provider": "Fornitore",
|
||||
"workspace.providers.table.apiKey": "Chiave API",
|
||||
"workspace.providers.table.apiKey": "API Chiave",
|
||||
"workspace.usage.title": "Cronologia dell'utilizzo",
|
||||
"workspace.usage.subtitle": "Utilizzo e costi recenti di API.",
|
||||
"workspace.usage.empty": "Effettua la tua prima chiamata API per iniziare.",
|
||||
@@ -346,7 +346,7 @@ export const dict = {
|
||||
"workspace.cost.deletedSuffix": "(eliminato)",
|
||||
"workspace.cost.empty": "Nessun dato di utilizzo disponibile per il periodo selezionato.",
|
||||
"workspace.cost.subscriptionShort": "sub",
|
||||
"workspace.keys.title": "Chiavi API",
|
||||
"workspace.keys.title": "API Chiavi",
|
||||
"workspace.keys.subtitle": "Gestisci le tue chiavi API per accedere ai servizi opencode.",
|
||||
"workspace.keys.create": "Crea chiave API",
|
||||
"workspace.keys.placeholder": "Inserisci il nome della chiave",
|
||||
@@ -360,7 +360,7 @@ export const dict = {
|
||||
"workspace.members.title": "Membri",
|
||||
"workspace.members.subtitle": "Gestire i membri dell'area di lavoro e le relative autorizzazioni.",
|
||||
"workspace.members.invite": "Invita membro",
|
||||
"workspace.members.inviting": "Invito in corso...",
|
||||
"workspace.members.inviting": "Invitante...",
|
||||
"workspace.members.beta.beforeLink": "Gli spazi di lavoro sono gratuiti per i team durante la beta.",
|
||||
"workspace.members.form.invitee": "Invitato",
|
||||
"workspace.members.form.emailPlaceholder": "Inserisci l'e-mail",
|
||||
@@ -371,12 +371,12 @@ export const dict = {
|
||||
"workspace.members.invited": "invitato",
|
||||
"workspace.members.edit": "Modificare",
|
||||
"workspace.members.delete": "Eliminare",
|
||||
"workspace.members.saving": "Salvataggio in corso...",
|
||||
"workspace.members.saving": "Risparmio...",
|
||||
"workspace.members.save": "Salva",
|
||||
"workspace.members.table.email": "E-mail",
|
||||
"workspace.members.table.role": "Ruolo",
|
||||
"workspace.members.table.monthLimit": "Limite mensile",
|
||||
"workspace.members.role.admin": "Admin",
|
||||
"workspace.members.role.admin": "Ammin",
|
||||
"workspace.members.role.adminDescription": "Può gestire modelli, membri e fatturazione",
|
||||
"workspace.members.role.member": "Membro",
|
||||
"workspace.members.role.memberDescription": "Possono generare chiavi API solo per se stessi",
|
||||
@@ -388,42 +388,42 @@ export const dict = {
|
||||
"workspace.settings.save": "Salva",
|
||||
"workspace.settings.edit": "Modificare",
|
||||
"workspace.billing.title": "Fatturazione",
|
||||
"workspace.billing.subtitle.beforeLink": "Gestisci i metodi di pagamento.",
|
||||
"workspace.billing.subtitle.beforeLink": "Gestire i metodi di pagamento.",
|
||||
"workspace.billing.contactUs": "Contattaci",
|
||||
"workspace.billing.subtitle.afterLink": "se hai qualche domanda",
|
||||
"workspace.billing.currentBalance": "Saldo attuale",
|
||||
"workspace.billing.add": "Aggiungi $",
|
||||
"workspace.billing.enterAmount": "Inserisci l'importo",
|
||||
"workspace.billing.loading": "Caricamento...",
|
||||
"workspace.billing.addAction": "Aggiungi",
|
||||
"workspace.billing.addAction": "Aggiungere",
|
||||
"workspace.billing.addBalance": "Aggiungi saldo",
|
||||
"workspace.billing.linkedToStripe": "Collegato a Stripe",
|
||||
"workspace.billing.manage": "Gestisci",
|
||||
"workspace.billing.manage": "Maneggio",
|
||||
"workspace.billing.enable": "Abilita fatturazione",
|
||||
"workspace.monthlyLimit.title": "Limite mensile",
|
||||
"workspace.monthlyLimit.subtitle": "Imposta un limite di utilizzo mensile per il tuo account.",
|
||||
"workspace.monthlyLimit.placeholder": "50",
|
||||
"workspace.monthlyLimit.setting": "Impostazione in corso...",
|
||||
"workspace.monthlyLimit.setting": "Collocamento...",
|
||||
"workspace.monthlyLimit.set": "Impostato",
|
||||
"workspace.monthlyLimit.edit": "Modifica limite",
|
||||
"workspace.monthlyLimit.noLimit": "Nessun limite di utilizzo impostato.",
|
||||
"workspace.monthlyLimit.currentUsage.beforeMonth": "Utilizzo attuale per",
|
||||
"workspace.monthlyLimit.currentUsage.beforeAmount": "è $",
|
||||
"workspace.reload.title": "Ricarica automatica",
|
||||
"workspace.reload.disabled.before": "La ricarica automatica è",
|
||||
"workspace.reload.disabled.before": "La ricarica automatica lo è",
|
||||
"workspace.reload.disabled.state": "disabilitato",
|
||||
"workspace.reload.disabled.after": "Abilita la ricarica automatica quando il saldo è basso.",
|
||||
"workspace.reload.enabled.before": "La ricarica automatica è",
|
||||
"workspace.reload.enabled.before": "La ricarica automatica lo è",
|
||||
"workspace.reload.enabled.state": "abilitato",
|
||||
"workspace.reload.enabled.middle": "Ricaricheremo",
|
||||
"workspace.reload.processingFee": "tassa di elaborazione",
|
||||
"workspace.reload.enabled.after": "quando il saldo raggiunge",
|
||||
"workspace.reload.enabled.after": "quando l'equilibrio raggiunge",
|
||||
"workspace.reload.edit": "Modificare",
|
||||
"workspace.reload.enable": "Abilitare",
|
||||
"workspace.reload.enableAutoReload": "Abilita ricarica automatica",
|
||||
"workspace.reload.reloadAmount": "Ricarica $",
|
||||
"workspace.reload.whenBalanceReaches": "Quando il saldo raggiunge $",
|
||||
"workspace.reload.saving": "Salvataggio in corso...",
|
||||
"workspace.reload.saving": "Risparmio...",
|
||||
"workspace.reload.save": "Salva",
|
||||
"workspace.reload.failedAt": "Ricarica non riuscita a",
|
||||
"workspace.reload.reason": "Motivo:",
|
||||
@@ -434,11 +434,11 @@ export const dict = {
|
||||
"workspace.payments.subtitle": "Transazioni di pagamento recenti.",
|
||||
"workspace.payments.table.date": "Data",
|
||||
"workspace.payments.table.paymentId": "ID pagamento",
|
||||
"workspace.payments.table.amount": "Importo",
|
||||
"workspace.payments.table.amount": "Quantità",
|
||||
"workspace.payments.table.receipt": "Ricevuta",
|
||||
"workspace.payments.type.credit": "credito",
|
||||
"workspace.payments.type.subscription": "sottoscrizione",
|
||||
"workspace.payments.view": "Visualizza",
|
||||
"workspace.payments.view": "Visualizzazione",
|
||||
"workspace.black.loading": "Caricamento...",
|
||||
"workspace.black.time.day": "giorno",
|
||||
"workspace.black.time.days": "giorni",
|
||||
@@ -452,14 +452,14 @@ export const dict = {
|
||||
"workspace.black.subscription.manage": "Gestisci abbonamento",
|
||||
"workspace.black.subscription.rollingUsage": "Utilizzo di 5 ore",
|
||||
"workspace.black.subscription.weeklyUsage": "Utilizzo settimanale",
|
||||
"workspace.black.subscription.resetsIn": "Si reimposta tra",
|
||||
"workspace.black.subscription.resetsIn": "Si reimposta",
|
||||
"workspace.black.subscription.useBalance": "Utilizza il saldo disponibile dopo aver raggiunto i limiti di utilizzo",
|
||||
"workspace.black.waitlist.title": "Lista d'attesa",
|
||||
"workspace.black.waitlist.joined": "Sei in lista d'attesa per il piano OpenCode Black da ${{plan}} al mese.",
|
||||
"workspace.black.waitlist.joined": "Sei in lista d'attesa per il piano nero ${{plan}} al mese OpenCode.",
|
||||
"workspace.black.waitlist.ready": "Siamo pronti per iscriverti al piano OpenCode Black da ${{plan}} al mese.",
|
||||
"workspace.black.waitlist.leave": "Lascia la lista d'attesa",
|
||||
"workspace.black.waitlist.leaving": "Uscita in corso...",
|
||||
"workspace.black.waitlist.left": "Uscito dalla lista d'attesa",
|
||||
"workspace.black.waitlist.leaving": "In partenza...",
|
||||
"workspace.black.waitlist.left": "Sinistra",
|
||||
"workspace.black.waitlist.enroll": "Iscriversi",
|
||||
"workspace.black.waitlist.enrolling": "Iscrizione...",
|
||||
"workspace.black.waitlist.enrolled": "Iscritto",
|
||||
|
||||
@@ -203,7 +203,7 @@ export const dict = {
|
||||
"zen.how.step2.link": "betale per forespørsel",
|
||||
"zen.how.step2.afterLink": "med null markeringer",
|
||||
"zen.how.step3.title": "Automatisk påfylling",
|
||||
"zen.how.step3.body": "når saldoen din når $5, legger vi automatisk til $20",
|
||||
"zen.how.step3.body": "når saldoen din når $5, fyller vi automatisk på $20",
|
||||
"zen.privacy.title": "Personvernet ditt er viktig for oss",
|
||||
"zen.privacy.beforeExceptions":
|
||||
"Alle Zen-modeller er vert i USA. Leverandører følger en nulloppbevaringspolicy og bruker ikke dataene dine til modelltrening, med",
|
||||
@@ -283,7 +283,7 @@ export const dict = {
|
||||
"changelog.empty": "Ingen endringsloggoppforinger funnet.",
|
||||
"changelog.viewJson": "Vis JSON",
|
||||
"workspace.nav.zen": "Zen",
|
||||
"workspace.nav.apiKeys": "API Taster",
|
||||
"workspace.nav.apiKeys": "API Nøkler",
|
||||
"workspace.nav.members": "Medlemmer",
|
||||
"workspace.nav.billing": "Fakturering",
|
||||
"workspace.nav.settings": "Innstillinger",
|
||||
@@ -320,7 +320,7 @@ export const dict = {
|
||||
"workspace.providers.edit": "Redigere",
|
||||
"workspace.providers.delete": "Slett",
|
||||
"workspace.providers.saving": "Lagrer...",
|
||||
"workspace.providers.save": "Spare",
|
||||
"workspace.providers.save": "Lagre",
|
||||
"workspace.providers.table.provider": "Leverandør",
|
||||
"workspace.providers.table.apiKey": "API nøkkel",
|
||||
"workspace.usage.title": "Brukshistorikk",
|
||||
@@ -330,21 +330,21 @@ export const dict = {
|
||||
"workspace.usage.table.model": "Modell",
|
||||
"workspace.usage.table.input": "Inndata",
|
||||
"workspace.usage.table.output": "Produksjon",
|
||||
"workspace.usage.table.cost": "Koste",
|
||||
"workspace.usage.table.cost": "Kostnad",
|
||||
"workspace.usage.breakdown.input": "Inndata",
|
||||
"workspace.usage.breakdown.cacheRead": "Cache lest",
|
||||
"workspace.usage.breakdown.cacheWrite": "Cache-skriving",
|
||||
"workspace.usage.breakdown.output": "Produksjon",
|
||||
"workspace.usage.breakdown.reasoning": "Argumentasjon",
|
||||
"workspace.usage.subscription": "abonnement (${{amount}})",
|
||||
"workspace.cost.title": "Koste",
|
||||
"workspace.cost.title": "Kostnad",
|
||||
"workspace.cost.subtitle": "Brukskostnader fordelt på modell.",
|
||||
"workspace.cost.allModels": "Alle modeller",
|
||||
"workspace.cost.allKeys": "Alle nøkler",
|
||||
"workspace.cost.deletedSuffix": "(slettet)",
|
||||
"workspace.cost.empty": "Ingen bruksdata tilgjengelig for den valgte perioden.",
|
||||
"workspace.cost.subscriptionShort": "sub",
|
||||
"workspace.keys.title": "API Taster",
|
||||
"workspace.keys.title": "API Nøkler",
|
||||
"workspace.keys.subtitle": "Administrer API-nøklene dine for å få tilgang til opencode-tjenester.",
|
||||
"workspace.keys.create": "Opprett API-nøkkel",
|
||||
"workspace.keys.placeholder": "Skriv inn nøkkelnavn",
|
||||
@@ -370,7 +370,7 @@ export const dict = {
|
||||
"workspace.members.edit": "Redigere",
|
||||
"workspace.members.delete": "Slett",
|
||||
"workspace.members.saving": "Lagrer...",
|
||||
"workspace.members.save": "Spare",
|
||||
"workspace.members.save": "Lagre",
|
||||
"workspace.members.table.email": "E-post",
|
||||
"workspace.members.table.role": "Rolle",
|
||||
"workspace.members.table.monthLimit": "Månedsgrense",
|
||||
@@ -383,7 +383,7 @@ export const dict = {
|
||||
"workspace.settings.workspaceName": "Navn på arbeidsområde",
|
||||
"workspace.settings.defaultName": "Misligholde",
|
||||
"workspace.settings.updating": "Oppdaterer...",
|
||||
"workspace.settings.save": "Spare",
|
||||
"workspace.settings.save": "Lagre",
|
||||
"workspace.settings.edit": "Redigere",
|
||||
"workspace.billing.title": "Fakturering",
|
||||
"workspace.billing.subtitle.beforeLink": "Administrer betalingsmåter.",
|
||||
@@ -407,22 +407,22 @@ export const dict = {
|
||||
"workspace.monthlyLimit.noLimit": "Ingen bruksgrense satt.",
|
||||
"workspace.monthlyLimit.currentUsage.beforeMonth": "Gjeldende bruk for",
|
||||
"workspace.monthlyLimit.currentUsage.beforeAmount": "er $",
|
||||
"workspace.reload.title": "Last inn automatisk",
|
||||
"workspace.reload.disabled.before": "Automatisk reload er",
|
||||
"workspace.reload.disabled.state": "funksjonshemmet",
|
||||
"workspace.reload.disabled.after": "Aktiver for å laste automatisk på nytt når balansen er lav.",
|
||||
"workspace.reload.enabled.before": "Automatisk reload er",
|
||||
"workspace.reload.title": "Automatisk påfylling",
|
||||
"workspace.reload.disabled.before": "Automatisk påfylling er",
|
||||
"workspace.reload.disabled.state": "deaktivert",
|
||||
"workspace.reload.disabled.after": "Aktiver for å automatisk påfylle på nytt når saldoen er lav.",
|
||||
"workspace.reload.enabled.before": "Automatisk påfylling er",
|
||||
"workspace.reload.enabled.state": "aktivert",
|
||||
"workspace.reload.enabled.middle": "Vi laster på nytt",
|
||||
"workspace.reload.enabled.middle": "Vi fyller på",
|
||||
"workspace.reload.processingFee": "behandlingsgebyr",
|
||||
"workspace.reload.enabled.after": "når balansen når",
|
||||
"workspace.reload.enabled.after": "når saldoen når",
|
||||
"workspace.reload.edit": "Redigere",
|
||||
"workspace.reload.enable": "Aktiver",
|
||||
"workspace.reload.enableAutoReload": "Aktiver automatisk reload",
|
||||
"workspace.reload.enableAutoReload": "Aktiver automatisk påfylling",
|
||||
"workspace.reload.reloadAmount": "Last inn $",
|
||||
"workspace.reload.whenBalanceReaches": "Når saldoen når $",
|
||||
"workspace.reload.saving": "Lagrer...",
|
||||
"workspace.reload.save": "Spare",
|
||||
"workspace.reload.save": "Lagre",
|
||||
"workspace.reload.failedAt": "Omlasting mislyktes kl",
|
||||
"workspace.reload.reason": "Grunn:",
|
||||
"workspace.reload.updatePaymentMethod": "Oppdater betalingsmåten og prøv på nytt.",
|
||||
@@ -436,7 +436,7 @@ export const dict = {
|
||||
"workspace.payments.table.receipt": "Kvittering",
|
||||
"workspace.payments.type.credit": "kreditt",
|
||||
"workspace.payments.type.subscription": "abonnement",
|
||||
"workspace.payments.view": "Utsikt",
|
||||
"workspace.payments.view": "Vis",
|
||||
"workspace.black.loading": "Laster inn...",
|
||||
"workspace.black.time.day": "dag",
|
||||
"workspace.black.time.days": "dager",
|
||||
|
||||
@@ -293,9 +293,9 @@ export const dict = {
|
||||
"changelog.hero.subtitle": "OpenCode \u7684\u65b0\u66f4\u65b0\u4e0e\u6539\u8fdb",
|
||||
"changelog.empty": "\u672a\u627e\u5230\u66f4\u65b0\u65e5\u5fd7\u6761\u76ee\u3002",
|
||||
"changelog.viewJson": "\u67e5\u770b JSON",
|
||||
"workspace.nav.zen": "Zen",
|
||||
"workspace.nav.zen": "禅",
|
||||
"workspace.nav.apiKeys": "API 键",
|
||||
"workspace.nav.members": "成员",
|
||||
"workspace.nav.members": "会员",
|
||||
"workspace.nav.billing": "计费",
|
||||
"workspace.nav.settings": "设置",
|
||||
"workspace.home.banner.beforeLink": "编码代理的可靠优化模型。",
|
||||
@@ -310,26 +310,26 @@ export const dict = {
|
||||
"workspace.newUser.feature.lockin.body":
|
||||
"将 Zen 与任何编码代理结合使用,并在需要时继续将其他提供程序与 opencode 结合使用。",
|
||||
"workspace.newUser.copyApiKey": "复制 API 密钥",
|
||||
"workspace.newUser.copyKey": "复制密钥",
|
||||
"workspace.newUser.copied": "已复制!",
|
||||
"workspace.newUser.copyKey": "复制钥匙",
|
||||
"workspace.newUser.copied": "复制了!",
|
||||
"workspace.newUser.step.enableBilling": "启用计费",
|
||||
"workspace.newUser.step.login.before": "运行",
|
||||
"workspace.newUser.step.login.before": "跑步",
|
||||
"workspace.newUser.step.login.after": "并选择 opencode",
|
||||
"workspace.newUser.step.pasteKey": "粘贴您的 API 密钥",
|
||||
"workspace.newUser.step.models.before": "启动 opencode 并运行",
|
||||
"workspace.newUser.step.models.after": "选择模型",
|
||||
"workspace.models.title": "模型",
|
||||
"workspace.newUser.step.models.after": "选择型号",
|
||||
"workspace.models.title": "型号",
|
||||
"workspace.models.subtitle.beforeLink": "管理工作区成员可以访问哪些模型。",
|
||||
"workspace.models.table.model": "模型",
|
||||
"workspace.models.table.enabled": "启用",
|
||||
"workspace.providers.title": "自带密钥",
|
||||
"workspace.providers.title": "带上你自己的钥匙",
|
||||
"workspace.providers.subtitle": "从 AI 提供商处配置您自己的 API 密钥。",
|
||||
"workspace.providers.placeholder": "输入 {{provider}} API 密钥({{prefix}}...)",
|
||||
"workspace.providers.configure": "配置",
|
||||
"workspace.providers.edit": "编辑",
|
||||
"workspace.providers.delete": "删除",
|
||||
"workspace.providers.saving": "保存...",
|
||||
"workspace.providers.save": "保存",
|
||||
"workspace.providers.save": "节省",
|
||||
"workspace.providers.table.provider": "提供者",
|
||||
"workspace.providers.table.apiKey": "API 密钥",
|
||||
"workspace.usage.title": "使用历史",
|
||||
@@ -348,25 +348,25 @@ export const dict = {
|
||||
"workspace.usage.subscription": "订阅 (${{amount}})",
|
||||
"workspace.cost.title": "成本",
|
||||
"workspace.cost.subtitle": "按型号细分的使用成本。",
|
||||
"workspace.cost.allModels": "所有模型",
|
||||
"workspace.cost.allKeys": "所有密钥",
|
||||
"workspace.cost.allModels": "所有型号",
|
||||
"workspace.cost.allKeys": "所有按键",
|
||||
"workspace.cost.deletedSuffix": "(已删除)",
|
||||
"workspace.cost.empty": "所选期间没有可用的使用数据。",
|
||||
"workspace.cost.subscriptionShort": "订",
|
||||
"workspace.cost.subscriptionShort": "子",
|
||||
"workspace.keys.title": "API 键",
|
||||
"workspace.keys.subtitle": "管理您的 API 密钥以访问 opencode 服务。",
|
||||
"workspace.keys.create": "创建 API 密钥",
|
||||
"workspace.keys.placeholder": "输入密钥名称",
|
||||
"workspace.keys.placeholder": "输入按键名称",
|
||||
"workspace.keys.empty": "创建 opencode 网关 API 密钥",
|
||||
"workspace.keys.table.name": "名称",
|
||||
"workspace.keys.table.key": "密钥",
|
||||
"workspace.keys.table.name": "姓名",
|
||||
"workspace.keys.table.key": "钥匙",
|
||||
"workspace.keys.table.createdBy": "创建者",
|
||||
"workspace.keys.table.lastUsed": "最后使用",
|
||||
"workspace.keys.copyApiKey": "复制 API 密钥",
|
||||
"workspace.keys.delete": "删除",
|
||||
"workspace.members.title": "成员",
|
||||
"workspace.members.title": "会员",
|
||||
"workspace.members.subtitle": "管理工作区成员及其权限。",
|
||||
"workspace.members.invite": "邀请成员",
|
||||
"workspace.members.invite": "邀请会员",
|
||||
"workspace.members.inviting": "邀请...",
|
||||
"workspace.members.beta.beforeLink": "测试期间,工作空间对团队免费。",
|
||||
"workspace.members.form.invitee": "受邀者",
|
||||
@@ -379,11 +379,11 @@ export const dict = {
|
||||
"workspace.members.edit": "编辑",
|
||||
"workspace.members.delete": "删除",
|
||||
"workspace.members.saving": "保存...",
|
||||
"workspace.members.save": "保存",
|
||||
"workspace.members.save": "节省",
|
||||
"workspace.members.table.email": "电子邮件",
|
||||
"workspace.members.table.role": "角色",
|
||||
"workspace.members.table.monthLimit": "月限额",
|
||||
"workspace.members.role.admin": "管理员",
|
||||
"workspace.members.table.monthLimit": "月份限制",
|
||||
"workspace.members.role.admin": "行政",
|
||||
"workspace.members.role.adminDescription": "可以管理模型、成员和计费",
|
||||
"workspace.members.role.member": "成员",
|
||||
"workspace.members.role.memberDescription": "只能为自己生成 API 密钥",
|
||||
@@ -392,7 +392,7 @@ export const dict = {
|
||||
"workspace.settings.workspaceName": "工作区名称",
|
||||
"workspace.settings.defaultName": "默认",
|
||||
"workspace.settings.updating": "更新中...",
|
||||
"workspace.settings.save": "保存",
|
||||
"workspace.settings.save": "节省",
|
||||
"workspace.settings.edit": "编辑",
|
||||
"workspace.billing.title": "计费",
|
||||
"workspace.billing.subtitle.beforeLink": "管理付款方式。",
|
||||
@@ -404,35 +404,35 @@ export const dict = {
|
||||
"workspace.billing.loading": "加载中...",
|
||||
"workspace.billing.addAction": "添加",
|
||||
"workspace.billing.addBalance": "添加余额",
|
||||
"workspace.billing.linkedToStripe": "已绑定 Stripe",
|
||||
"workspace.billing.linkedToStripe": "链接到条纹",
|
||||
"workspace.billing.manage": "管理",
|
||||
"workspace.billing.enable": "启用计费",
|
||||
"workspace.monthlyLimit.title": "每月限额",
|
||||
"workspace.monthlyLimit.subtitle": "为您的帐户设置每月使用限额。",
|
||||
"workspace.monthlyLimit.placeholder": "50",
|
||||
"workspace.monthlyLimit.setting": "设置中...",
|
||||
"workspace.monthlyLimit.set": "设置",
|
||||
"workspace.monthlyLimit.setting": "环境...",
|
||||
"workspace.monthlyLimit.set": "放",
|
||||
"workspace.monthlyLimit.edit": "编辑限制",
|
||||
"workspace.monthlyLimit.noLimit": "没有设置使用限制。",
|
||||
"workspace.monthlyLimit.currentUsage.beforeMonth": "当前",
|
||||
"workspace.monthlyLimit.currentUsage.beforeAmount": "的使用量为 $",
|
||||
"workspace.reload.title": "自动充值",
|
||||
"workspace.reload.disabled.before": "自动充值已",
|
||||
"workspace.reload.disabled.state": "停用",
|
||||
"workspace.reload.disabled.after": "启用后将在余额较低时自动充值。",
|
||||
"workspace.reload.enabled.before": "自动充值已",
|
||||
"workspace.monthlyLimit.currentUsage.beforeMonth": "当前使用情况为",
|
||||
"workspace.monthlyLimit.currentUsage.beforeAmount": "是 $",
|
||||
"workspace.reload.title": "自动重新加载",
|
||||
"workspace.reload.disabled.before": "自动重新加载是",
|
||||
"workspace.reload.disabled.state": "残疾人",
|
||||
"workspace.reload.disabled.after": "启用余额不足时自动充值。",
|
||||
"workspace.reload.enabled.before": "自动重新加载是",
|
||||
"workspace.reload.enabled.state": "已启用",
|
||||
"workspace.reload.enabled.middle": "我们将自动充值",
|
||||
"workspace.reload.processingFee": "手续费",
|
||||
"workspace.reload.enabled.middle": "我们将重新加载",
|
||||
"workspace.reload.processingFee": "加工费",
|
||||
"workspace.reload.enabled.after": "当余额达到",
|
||||
"workspace.reload.edit": "编辑",
|
||||
"workspace.reload.enable": "启用",
|
||||
"workspace.reload.enableAutoReload": "启用自动充值",
|
||||
"workspace.reload.reloadAmount": "充值 $",
|
||||
"workspace.reload.enable": "使能够",
|
||||
"workspace.reload.enableAutoReload": "启用自动重新加载",
|
||||
"workspace.reload.reloadAmount": "重新加载 $",
|
||||
"workspace.reload.whenBalanceReaches": "当余额达到 $",
|
||||
"workspace.reload.saving": "保存...",
|
||||
"workspace.reload.save": "保存",
|
||||
"workspace.reload.failedAt": "充值失败于",
|
||||
"workspace.reload.save": "节省",
|
||||
"workspace.reload.failedAt": "重新加载失败于",
|
||||
"workspace.reload.reason": "原因:",
|
||||
"workspace.reload.updatePaymentMethod": "请更新您的付款方式并重试。",
|
||||
"workspace.reload.retrying": "正在重试...",
|
||||
@@ -441,11 +441,11 @@ export const dict = {
|
||||
"workspace.payments.subtitle": "最近的付款交易。",
|
||||
"workspace.payments.table.date": "日期",
|
||||
"workspace.payments.table.paymentId": "付款ID",
|
||||
"workspace.payments.table.amount": "金额",
|
||||
"workspace.payments.table.amount": "数量",
|
||||
"workspace.payments.table.receipt": "收据",
|
||||
"workspace.payments.type.credit": "信用",
|
||||
"workspace.payments.type.subscription": "订阅",
|
||||
"workspace.payments.view": "查看",
|
||||
"workspace.payments.view": "看法",
|
||||
"workspace.black.loading": "加载中...",
|
||||
"workspace.black.time.day": "天",
|
||||
"workspace.black.time.days": "天",
|
||||
@@ -455,20 +455,20 @@ export const dict = {
|
||||
"workspace.black.time.minutes": "分钟",
|
||||
"workspace.black.time.fewSeconds": "几秒钟",
|
||||
"workspace.black.subscription.title": "订阅",
|
||||
"workspace.black.subscription.message": "您已订阅 OpenCode Black,费用为每月 ${{plan}}。",
|
||||
"workspace.black.subscription.message": "您已订阅 OpenCode Black,每月费用为 {{plan}} 美元。",
|
||||
"workspace.black.subscription.manage": "管理订阅",
|
||||
"workspace.black.subscription.rollingUsage": "5小时使用",
|
||||
"workspace.black.subscription.weeklyUsage": "每周使用量",
|
||||
"workspace.black.subscription.resetsIn": "重置于",
|
||||
"workspace.black.subscription.useBalance": "达到使用限额后使用您的可用余额",
|
||||
"workspace.black.waitlist.title": "候补名单",
|
||||
"workspace.black.waitlist.joined": "您已加入每月 ${{plan}} 的 OpenCode Black 方案候补名单。",
|
||||
"workspace.black.waitlist.ready": "我们已准备好将您加入每月 ${{plan}} 的 OpenCode Black 方案。",
|
||||
"workspace.black.waitlist.joined": "您正在等待每月 ${{plan}} OpenCode 黑色计划。",
|
||||
"workspace.black.waitlist.ready": "我们已准备好让您加入每月 {{plan}} 美元的 OpenCode 黑色计划。",
|
||||
"workspace.black.waitlist.leave": "离开候补名单",
|
||||
"workspace.black.waitlist.leaving": "离开...",
|
||||
"workspace.black.waitlist.left": "已退出",
|
||||
"workspace.black.waitlist.enroll": "加入",
|
||||
"workspace.black.waitlist.enrolling": "加入中...",
|
||||
"workspace.black.waitlist.enrolled": "已加入",
|
||||
"workspace.black.waitlist.left": "左边",
|
||||
"workspace.black.waitlist.enroll": "注册",
|
||||
"workspace.black.waitlist.enrolling": "正在报名...",
|
||||
"workspace.black.waitlist.enrolled": "已注册",
|
||||
"workspace.black.waitlist.enrollNote": "单击“注册”后,您的订阅将立即开始,并且将从您的卡中扣费。",
|
||||
} satisfies Dict
|
||||
|
||||
@@ -293,9 +293,9 @@ export const dict = {
|
||||
"changelog.hero.subtitle": "OpenCode \u7684\u65b0\u66f4\u65b0\u8207\u6539\u5584",
|
||||
"changelog.empty": "\u627e\u4e0d\u5230\u66f4\u65b0\u65e5\u8a8c\u9805\u76ee\u3002",
|
||||
"changelog.viewJson": "\u6aa2\u8996 JSON",
|
||||
"workspace.nav.zen": "Zen",
|
||||
"workspace.nav.zen": "禪",
|
||||
"workspace.nav.apiKeys": "API 鍵",
|
||||
"workspace.nav.members": "成員",
|
||||
"workspace.nav.members": "會員",
|
||||
"workspace.nav.billing": "計費",
|
||||
"workspace.nav.settings": "設定",
|
||||
"workspace.home.banner.beforeLink": "編碼代理的可靠優化模型。",
|
||||
@@ -310,26 +310,26 @@ export const dict = {
|
||||
"workspace.newUser.feature.lockin.body":
|
||||
"將 Zen 與任何編碼代理結合使用,並在需要時繼續將其他提供程序與 opencode 結合使用。",
|
||||
"workspace.newUser.copyApiKey": "複製 API 密鑰",
|
||||
"workspace.newUser.copyKey": "複製密鑰",
|
||||
"workspace.newUser.copied": "已複製!",
|
||||
"workspace.newUser.copyKey": "複製鑰匙",
|
||||
"workspace.newUser.copied": "複製了!",
|
||||
"workspace.newUser.step.enableBilling": "啟用計費",
|
||||
"workspace.newUser.step.login.before": "執行",
|
||||
"workspace.newUser.step.login.before": "跑步",
|
||||
"workspace.newUser.step.login.after": "並選擇 opencode",
|
||||
"workspace.newUser.step.pasteKey": "粘貼您的 API 密鑰",
|
||||
"workspace.newUser.step.models.before": "啟動 opencode 並運行",
|
||||
"workspace.newUser.step.models.after": "選擇模型",
|
||||
"workspace.models.title": "模型",
|
||||
"workspace.newUser.step.models.after": "選擇型號",
|
||||
"workspace.models.title": "型號",
|
||||
"workspace.models.subtitle.beforeLink": "管理工作區成員可以訪問哪些模型。",
|
||||
"workspace.models.table.model": "模型",
|
||||
"workspace.models.table.enabled": "啟用",
|
||||
"workspace.providers.title": "自帶密鑰",
|
||||
"workspace.providers.title": "帶上你自己的鑰匙",
|
||||
"workspace.providers.subtitle": "從 AI 提供商處配置您自己的 API 密鑰。",
|
||||
"workspace.providers.placeholder": "輸入 {{provider}} API 密鑰({{prefix}}...)",
|
||||
"workspace.providers.configure": "配置",
|
||||
"workspace.providers.edit": "編輯",
|
||||
"workspace.providers.delete": "刪除",
|
||||
"workspace.providers.saving": "保存...",
|
||||
"workspace.providers.save": "儲存",
|
||||
"workspace.providers.save": "節省",
|
||||
"workspace.providers.table.provider": "提供者",
|
||||
"workspace.providers.table.apiKey": "API 密鑰",
|
||||
"workspace.usage.title": "使用歷史",
|
||||
@@ -348,25 +348,25 @@ export const dict = {
|
||||
"workspace.usage.subscription": "訂閱 (${{amount}})",
|
||||
"workspace.cost.title": "成本",
|
||||
"workspace.cost.subtitle": "按型號細分的使用成本。",
|
||||
"workspace.cost.allModels": "所有模型",
|
||||
"workspace.cost.allKeys": "所有密鑰",
|
||||
"workspace.cost.allModels": "所有型號",
|
||||
"workspace.cost.allKeys": "所有按鍵",
|
||||
"workspace.cost.deletedSuffix": "(已刪除)",
|
||||
"workspace.cost.empty": "所選期間沒有可用的使用數據。",
|
||||
"workspace.cost.subscriptionShort": "訂",
|
||||
"workspace.cost.subscriptionShort": "子",
|
||||
"workspace.keys.title": "API 鍵",
|
||||
"workspace.keys.subtitle": "管理您的 API 密鑰以訪問 opencode 服務。",
|
||||
"workspace.keys.create": "創建 API 密鑰",
|
||||
"workspace.keys.placeholder": "輸入密鑰名稱",
|
||||
"workspace.keys.placeholder": "輸入按鍵名稱",
|
||||
"workspace.keys.empty": "創建 opencode 網關 API 密鑰",
|
||||
"workspace.keys.table.name": "名稱",
|
||||
"workspace.keys.table.key": "密鑰",
|
||||
"workspace.keys.table.name": "姓名",
|
||||
"workspace.keys.table.key": "鑰匙",
|
||||
"workspace.keys.table.createdBy": "創建者",
|
||||
"workspace.keys.table.lastUsed": "最後使用",
|
||||
"workspace.keys.copyApiKey": "複製 API 密鑰",
|
||||
"workspace.keys.delete": "刪除",
|
||||
"workspace.members.title": "成員",
|
||||
"workspace.members.title": "會員",
|
||||
"workspace.members.subtitle": "管理工作區成員及其權限。",
|
||||
"workspace.members.invite": "邀請成員",
|
||||
"workspace.members.invite": "邀請會員",
|
||||
"workspace.members.inviting": "邀請...",
|
||||
"workspace.members.beta.beforeLink": "測試期間,工作空間對團隊免費。",
|
||||
"workspace.members.form.invitee": "受邀者",
|
||||
@@ -379,11 +379,11 @@ export const dict = {
|
||||
"workspace.members.edit": "編輯",
|
||||
"workspace.members.delete": "刪除",
|
||||
"workspace.members.saving": "保存...",
|
||||
"workspace.members.save": "儲存",
|
||||
"workspace.members.save": "節省",
|
||||
"workspace.members.table.email": "電子郵件",
|
||||
"workspace.members.table.role": "角色",
|
||||
"workspace.members.table.monthLimit": "月限額",
|
||||
"workspace.members.role.admin": "管理員",
|
||||
"workspace.members.table.monthLimit": "月份限制",
|
||||
"workspace.members.role.admin": "行政",
|
||||
"workspace.members.role.adminDescription": "可以管理模型、成員和計費",
|
||||
"workspace.members.role.member": "成員",
|
||||
"workspace.members.role.memberDescription": "只能為自己生成 API 密鑰",
|
||||
@@ -392,7 +392,7 @@ export const dict = {
|
||||
"workspace.settings.workspaceName": "工作區名稱",
|
||||
"workspace.settings.defaultName": "預設",
|
||||
"workspace.settings.updating": "更新中...",
|
||||
"workspace.settings.save": "儲存",
|
||||
"workspace.settings.save": "節省",
|
||||
"workspace.settings.edit": "編輯",
|
||||
"workspace.billing.title": "計費",
|
||||
"workspace.billing.subtitle.beforeLink": "管理付款方式。",
|
||||
@@ -404,35 +404,35 @@ export const dict = {
|
||||
"workspace.billing.loading": "載入中...",
|
||||
"workspace.billing.addAction": "添加",
|
||||
"workspace.billing.addBalance": "添加餘額",
|
||||
"workspace.billing.linkedToStripe": "已連結 Stripe",
|
||||
"workspace.billing.linkedToStripe": "鏈接到條紋",
|
||||
"workspace.billing.manage": "管理",
|
||||
"workspace.billing.enable": "啟用計費",
|
||||
"workspace.monthlyLimit.title": "每月限額",
|
||||
"workspace.monthlyLimit.subtitle": "為您的帳戶設置每月使用限額。",
|
||||
"workspace.monthlyLimit.placeholder": "50",
|
||||
"workspace.monthlyLimit.setting": "設定中...",
|
||||
"workspace.monthlyLimit.set": "設定",
|
||||
"workspace.monthlyLimit.setting": "環境...",
|
||||
"workspace.monthlyLimit.set": "放",
|
||||
"workspace.monthlyLimit.edit": "編輯限制",
|
||||
"workspace.monthlyLimit.noLimit": "沒有設置使用限制。",
|
||||
"workspace.monthlyLimit.currentUsage.beforeMonth": "當前",
|
||||
"workspace.monthlyLimit.currentUsage.beforeAmount": "的使用量為 $",
|
||||
"workspace.reload.title": "自動儲值",
|
||||
"workspace.reload.disabled.before": "自動儲值已",
|
||||
"workspace.reload.disabled.state": "停用",
|
||||
"workspace.reload.disabled.after": "啟用後會在餘額偏低時自動儲值。",
|
||||
"workspace.reload.enabled.before": "自動儲值已",
|
||||
"workspace.monthlyLimit.currentUsage.beforeMonth": "當前使用情況為",
|
||||
"workspace.monthlyLimit.currentUsage.beforeAmount": "是 $",
|
||||
"workspace.reload.title": "自動重新加載",
|
||||
"workspace.reload.disabled.before": "自動重新加載是",
|
||||
"workspace.reload.disabled.state": "殘疾人",
|
||||
"workspace.reload.disabled.after": "啟用餘額不足時自動充值。",
|
||||
"workspace.reload.enabled.before": "自動重新加載是",
|
||||
"workspace.reload.enabled.state": "已啟用",
|
||||
"workspace.reload.enabled.middle": "我們將自動儲值",
|
||||
"workspace.reload.processingFee": "手續費",
|
||||
"workspace.reload.enabled.middle": "我們將重新加載",
|
||||
"workspace.reload.processingFee": "加工費",
|
||||
"workspace.reload.enabled.after": "當餘額達到",
|
||||
"workspace.reload.edit": "編輯",
|
||||
"workspace.reload.enable": "啟用",
|
||||
"workspace.reload.enableAutoReload": "啟用自動儲值",
|
||||
"workspace.reload.reloadAmount": "儲值 $",
|
||||
"workspace.reload.enable": "使能夠",
|
||||
"workspace.reload.enableAutoReload": "啟用自動重新加載",
|
||||
"workspace.reload.reloadAmount": "重新加載 $",
|
||||
"workspace.reload.whenBalanceReaches": "當餘額達到 $",
|
||||
"workspace.reload.saving": "保存...",
|
||||
"workspace.reload.save": "儲存",
|
||||
"workspace.reload.failedAt": "儲值失敗於",
|
||||
"workspace.reload.save": "節省",
|
||||
"workspace.reload.failedAt": "重新加載失敗於",
|
||||
"workspace.reload.reason": "原因:",
|
||||
"workspace.reload.updatePaymentMethod": "請更新您的付款方式並重試。",
|
||||
"workspace.reload.retrying": "正在重試...",
|
||||
@@ -441,11 +441,11 @@ export const dict = {
|
||||
"workspace.payments.subtitle": "最近的付款交易。",
|
||||
"workspace.payments.table.date": "日期",
|
||||
"workspace.payments.table.paymentId": "付款ID",
|
||||
"workspace.payments.table.amount": "金額",
|
||||
"workspace.payments.table.amount": "數量",
|
||||
"workspace.payments.table.receipt": "收據",
|
||||
"workspace.payments.type.credit": "信用",
|
||||
"workspace.payments.type.subscription": "訂閱",
|
||||
"workspace.payments.view": "查看",
|
||||
"workspace.payments.view": "看法",
|
||||
"workspace.black.loading": "載入中...",
|
||||
"workspace.black.time.day": "天",
|
||||
"workspace.black.time.days": "天",
|
||||
@@ -455,20 +455,20 @@ export const dict = {
|
||||
"workspace.black.time.minutes": "分鐘",
|
||||
"workspace.black.time.fewSeconds": "幾秒鐘",
|
||||
"workspace.black.subscription.title": "訂閱",
|
||||
"workspace.black.subscription.message": "您已訂閱 OpenCode Black,費用為每月 ${{plan}}。",
|
||||
"workspace.black.subscription.message": "您已訂閱 OpenCode Black,每月費用為 {{plan}} 美元。",
|
||||
"workspace.black.subscription.manage": "管理訂閱",
|
||||
"workspace.black.subscription.rollingUsage": "5小時使用",
|
||||
"workspace.black.subscription.weeklyUsage": "每週使用量",
|
||||
"workspace.black.subscription.resetsIn": "重置於",
|
||||
"workspace.black.subscription.useBalance": "達到使用限額後使用您的可用餘額",
|
||||
"workspace.black.waitlist.title": "候補名單",
|
||||
"workspace.black.waitlist.joined": "您已加入每月 ${{plan}} 的 OpenCode Black 方案候補名單。",
|
||||
"workspace.black.waitlist.ready": "我們已準備好將您加入每月 ${{plan}} 的 OpenCode Black 方案。",
|
||||
"workspace.black.waitlist.joined": "您正在等待每月 ${{plan}} OpenCode 黑色計劃。",
|
||||
"workspace.black.waitlist.ready": "我們已準備好讓您加入每月 {{plan}} 美元的 OpenCode 黑色計劃。",
|
||||
"workspace.black.waitlist.leave": "離開候補名單",
|
||||
"workspace.black.waitlist.leaving": "離開...",
|
||||
"workspace.black.waitlist.left": "已退出",
|
||||
"workspace.black.waitlist.enroll": "加入",
|
||||
"workspace.black.waitlist.enrolling": "加入中...",
|
||||
"workspace.black.waitlist.enrolled": "已加入",
|
||||
"workspace.black.waitlist.left": "左邊",
|
||||
"workspace.black.waitlist.enroll": "註冊",
|
||||
"workspace.black.waitlist.enrolling": "正在報名...",
|
||||
"workspace.black.waitlist.enrolled": "已註冊",
|
||||
"workspace.black.waitlist.enrollNote": "單擊“註冊”後,您的訂閱將立即開始,並且將從您的卡中扣費。",
|
||||
} satisfies Dict
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
"@opencode-ai/console-resource": "workspace:*",
|
||||
"@planetscale/database": "1.19.0",
|
||||
"aws4fetch": "1.0.20",
|
||||
"drizzle-orm": "0.41.0",
|
||||
"drizzle-orm": "catalog:",
|
||||
"postgres": "3.4.7",
|
||||
"stripe": "18.0.0",
|
||||
"ulid": "catalog:",
|
||||
@@ -43,7 +43,7 @@
|
||||
"@tsconfig/node22": "22.0.2",
|
||||
"@types/bun": "1.3.0",
|
||||
"@types/node": "catalog:",
|
||||
"drizzle-kit": "0.30.5",
|
||||
"drizzle-kit": "catalog:",
|
||||
"mysql2": "3.14.4",
|
||||
"typescript": "catalog:",
|
||||
"@typescript/native-preview": "catalog:"
|
||||
|
||||
@@ -4,7 +4,6 @@ export * from "drizzle-orm"
|
||||
import { Client } from "@planetscale/database"
|
||||
|
||||
import { MySqlTransaction, type MySqlTransactionConfig } from "drizzle-orm/mysql-core"
|
||||
import type { ExtractTablesWithRelations } from "drizzle-orm"
|
||||
import type { PlanetScalePreparedQueryHKT, PlanetscaleQueryResultHKT } from "drizzle-orm/planetscale-serverless"
|
||||
import { Context } from "../context"
|
||||
import { memo } from "../util/memo"
|
||||
@@ -14,7 +13,7 @@ export namespace Database {
|
||||
PlanetscaleQueryResultHKT,
|
||||
PlanetScalePreparedQueryHKT,
|
||||
Record<string, never>,
|
||||
ExtractTablesWithRelations<Record<string, never>>
|
||||
any
|
||||
>
|
||||
|
||||
const client = memo(() => {
|
||||
@@ -23,7 +22,7 @@ export namespace Database {
|
||||
username: Resource.Database.username,
|
||||
password: Resource.Database.password,
|
||||
})
|
||||
const db = drizzle(result, {})
|
||||
const db = drizzle({ client: result })
|
||||
return db
|
||||
})
|
||||
|
||||
|
||||
@@ -15,5 +15,9 @@ const dir = "src-tauri/target/opencode-binaries"
|
||||
|
||||
await $`mkdir -p ${dir}`
|
||||
await $`gh run download ${Bun.env.GITHUB_RUN_ID} -n opencode-cli`.cwd(dir)
|
||||
// Windows CLI binaries are built in a separate job using the bun fork.
|
||||
// nothrow() because this artifact only exists for the Windows target — non-Windows
|
||||
// Tauri builds won't have it and that's expected.
|
||||
await $`gh run download ${Bun.env.GITHUB_RUN_ID} -n opencode-cli-windows`.cwd(dir).nothrow()
|
||||
|
||||
await copyBinaryToSidecarFolder(windowsify(`${dir}/${sidecarConfig.ocBinary}/bin/opencode`))
|
||||
|
||||
@@ -116,6 +116,15 @@ function parseRecord(value: unknown) {
|
||||
return value as Record<string, unknown>
|
||||
}
|
||||
|
||||
function parseStored(value: unknown) {
|
||||
if (typeof value !== "string") return value
|
||||
try {
|
||||
return JSON.parse(value) as unknown
|
||||
} catch {
|
||||
return value
|
||||
}
|
||||
}
|
||||
|
||||
function pickLocale(value: unknown): Locale | null {
|
||||
const direct = parseLocale(value)
|
||||
if (direct) return direct
|
||||
@@ -169,7 +178,7 @@ export function initI18n(): Promise<Locale> {
|
||||
if (!store) return state.locale
|
||||
|
||||
const raw = await store.get("language").catch(() => null)
|
||||
const value = typeof raw === "string" ? JSON.parse(raw) : raw
|
||||
const value = parseStored(raw)
|
||||
const next = pickLocale(value) ?? state.locale
|
||||
|
||||
state.locale = next
|
||||
|
||||
@@ -1,27 +1,10 @@
|
||||
# opencode agent guidelines
|
||||
# opencode database guide
|
||||
|
||||
## Build/Test Commands
|
||||
## Database
|
||||
|
||||
- **Install**: `bun install`
|
||||
- **Run**: `bun run --conditions=browser ./src/index.ts`
|
||||
- **Typecheck**: `bun run typecheck` (npm run typecheck)
|
||||
- **Test**: `bun test` (runs all tests)
|
||||
- **Single test**: `bun test test/tool/tool.test.ts` (specific test file)
|
||||
|
||||
## Code Style
|
||||
|
||||
- **Runtime**: Bun with TypeScript ESM modules
|
||||
- **Imports**: Use relative imports for local modules, named imports preferred
|
||||
- **Types**: Zod schemas for validation, TypeScript interfaces for structure
|
||||
- **Naming**: camelCase for variables/functions, PascalCase for classes/namespaces
|
||||
- **Error handling**: Use Result patterns, avoid throwing exceptions in tools
|
||||
- **File structure**: Namespace-based organization (e.g., `Tool.define()`, `Session.create()`)
|
||||
|
||||
## Architecture
|
||||
|
||||
- **Tools**: Implement `Tool.Info` interface with `execute()` method
|
||||
- **Context**: Pass `sessionID` in tool context, use `App.provide()` for DI
|
||||
- **Validation**: All inputs validated with Zod schemas
|
||||
- **Logging**: Use `Log.create({ service: "name" })` pattern
|
||||
- **Storage**: Use `Storage` namespace for persistence
|
||||
- **API Client**: The TypeScript TUI (built with SolidJS + OpenTUI) communicates with the OpenCode server using `@opencode-ai/sdk`. When adding/modifying server endpoints in `packages/opencode/src/server/server.ts`, run `./script/generate.ts` to regenerate the SDK and related files.
|
||||
- **Schema**: Drizzle schema lives in `src/**/*.sql.ts`.
|
||||
- **Naming**: tables and columns use snake*case; join columns are `<entity>_id`; indexes are `<table>*<column>\_idx`.
|
||||
- **Migrations**: generated by Drizzle Kit using `drizzle.config.ts` (schema: `./src/**/*.sql.ts`, output: `./migration`).
|
||||
- **Command**: `bun run db generate --name <slug>`.
|
||||
- **Output**: creates `migration/<timestamp>_<slug>/migration.sql` and `snapshot.json`.
|
||||
- **Tests**: migration tests should read the per-folder layout (no `_journal.json`).
|
||||
|
||||
@@ -5,6 +5,24 @@ const fs = require("fs")
|
||||
const path = require("path")
|
||||
const os = require("os")
|
||||
|
||||
// On Windows, ignore CTRL_C_EVENT in this shim process so only the child
|
||||
// (the real opencode binary) handles Ctrl+C gracefully. Without this, stock
|
||||
// Bun's shim dies instantly on Ctrl+C, killing the child mid-cleanup.
|
||||
// Uses bun:ffi to call Win32 SetConsoleCtrlHandler(NULL, TRUE).
|
||||
// No-op under Node.js where bun:ffi is unavailable.
|
||||
if (os.platform() === "win32") {
|
||||
try {
|
||||
var ffi = require("bun:ffi")
|
||||
var kernel32 = ffi.dlopen("kernel32.dll", {
|
||||
SetConsoleCtrlHandler: {
|
||||
args: [ffi.FFIType.ptr, ffi.FFIType.i32],
|
||||
returns: ffi.FFIType.i32,
|
||||
},
|
||||
})
|
||||
kernel32.symbols.SetConsoleCtrlHandler(null, 1)
|
||||
} catch {}
|
||||
}
|
||||
|
||||
function run(target) {
|
||||
const result = childProcess.spawnSync(target, process.argv.slice(2), {
|
||||
stdio: "inherit",
|
||||
|
||||
10
packages/opencode/drizzle.config.ts
Normal file
10
packages/opencode/drizzle.config.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { defineConfig } from "drizzle-kit"
|
||||
|
||||
export default defineConfig({
|
||||
dialect: "sqlite",
|
||||
schema: "./src/**/*.sql.ts",
|
||||
out: "./migration",
|
||||
dbCredentials: {
|
||||
url: "/home/thdxr/.local/share/opencode/opencode.db",
|
||||
},
|
||||
})
|
||||
@@ -0,0 +1,90 @@
|
||||
CREATE TABLE `project` (
|
||||
`id` text PRIMARY KEY,
|
||||
`worktree` text NOT NULL,
|
||||
`vcs` text,
|
||||
`name` text,
|
||||
`icon_url` text,
|
||||
`icon_color` text,
|
||||
`time_created` integer NOT NULL,
|
||||
`time_updated` integer NOT NULL,
|
||||
`time_initialized` integer,
|
||||
`sandboxes` text NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE `message` (
|
||||
`id` text PRIMARY KEY,
|
||||
`session_id` text NOT NULL,
|
||||
`time_created` integer NOT NULL,
|
||||
`time_updated` integer NOT NULL,
|
||||
`data` text NOT NULL,
|
||||
CONSTRAINT `fk_message_session_id_session_id_fk` FOREIGN KEY (`session_id`) REFERENCES `session`(`id`) ON DELETE CASCADE
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE `part` (
|
||||
`id` text PRIMARY KEY,
|
||||
`message_id` text NOT NULL,
|
||||
`session_id` text NOT NULL,
|
||||
`time_created` integer NOT NULL,
|
||||
`time_updated` integer NOT NULL,
|
||||
`data` text NOT NULL,
|
||||
CONSTRAINT `fk_part_message_id_message_id_fk` FOREIGN KEY (`message_id`) REFERENCES `message`(`id`) ON DELETE CASCADE
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE `permission` (
|
||||
`project_id` text PRIMARY KEY,
|
||||
`time_created` integer NOT NULL,
|
||||
`time_updated` integer NOT NULL,
|
||||
`data` text NOT NULL,
|
||||
CONSTRAINT `fk_permission_project_id_project_id_fk` FOREIGN KEY (`project_id`) REFERENCES `project`(`id`) ON DELETE CASCADE
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE `session` (
|
||||
`id` text PRIMARY KEY,
|
||||
`project_id` text NOT NULL,
|
||||
`parent_id` text,
|
||||
`slug` text NOT NULL,
|
||||
`directory` text NOT NULL,
|
||||
`title` text NOT NULL,
|
||||
`version` text NOT NULL,
|
||||
`share_url` text,
|
||||
`summary_additions` integer,
|
||||
`summary_deletions` integer,
|
||||
`summary_files` integer,
|
||||
`summary_diffs` text,
|
||||
`revert` text,
|
||||
`permission` text,
|
||||
`time_created` integer NOT NULL,
|
||||
`time_updated` integer NOT NULL,
|
||||
`time_compacting` integer,
|
||||
`time_archived` integer,
|
||||
CONSTRAINT `fk_session_project_id_project_id_fk` FOREIGN KEY (`project_id`) REFERENCES `project`(`id`) ON DELETE CASCADE
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE `todo` (
|
||||
`session_id` text NOT NULL,
|
||||
`content` text NOT NULL,
|
||||
`status` text NOT NULL,
|
||||
`priority` text NOT NULL,
|
||||
`position` integer NOT NULL,
|
||||
`time_created` integer NOT NULL,
|
||||
`time_updated` integer NOT NULL,
|
||||
CONSTRAINT `todo_pk` PRIMARY KEY(`session_id`, `position`),
|
||||
CONSTRAINT `fk_todo_session_id_session_id_fk` FOREIGN KEY (`session_id`) REFERENCES `session`(`id`) ON DELETE CASCADE
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE `session_share` (
|
||||
`session_id` text PRIMARY KEY,
|
||||
`id` text NOT NULL,
|
||||
`secret` text NOT NULL,
|
||||
`url` text NOT NULL,
|
||||
`time_created` integer NOT NULL,
|
||||
`time_updated` integer NOT NULL,
|
||||
CONSTRAINT `fk_session_share_session_id_session_id_fk` FOREIGN KEY (`session_id`) REFERENCES `session`(`id`) ON DELETE CASCADE
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE INDEX `message_session_idx` ON `message` (`session_id`);--> statement-breakpoint
|
||||
CREATE INDEX `part_message_idx` ON `part` (`message_id`);--> statement-breakpoint
|
||||
CREATE INDEX `part_session_idx` ON `part` (`session_id`);--> statement-breakpoint
|
||||
CREATE INDEX `session_project_idx` ON `session` (`project_id`);--> statement-breakpoint
|
||||
CREATE INDEX `session_parent_idx` ON `session` (`parent_id`);--> statement-breakpoint
|
||||
CREATE INDEX `todo_session_idx` ON `todo` (`session_id`);
|
||||
@@ -0,0 +1,796 @@
|
||||
{
|
||||
"version": "7",
|
||||
"dialect": "sqlite",
|
||||
"id": "068758ed-a97a-46f6-8a59-6c639ae7c20c",
|
||||
"prevIds": ["00000000-0000-0000-0000-000000000000"],
|
||||
"ddl": [
|
||||
{
|
||||
"name": "project",
|
||||
"entityType": "tables"
|
||||
},
|
||||
{
|
||||
"name": "message",
|
||||
"entityType": "tables"
|
||||
},
|
||||
{
|
||||
"name": "part",
|
||||
"entityType": "tables"
|
||||
},
|
||||
{
|
||||
"name": "permission",
|
||||
"entityType": "tables"
|
||||
},
|
||||
{
|
||||
"name": "session",
|
||||
"entityType": "tables"
|
||||
},
|
||||
{
|
||||
"name": "todo",
|
||||
"entityType": "tables"
|
||||
},
|
||||
{
|
||||
"name": "session_share",
|
||||
"entityType": "tables"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "id",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "worktree",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "vcs",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "name",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "icon_url",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "icon_color",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_created",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_updated",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_initialized",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "sandboxes",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "id",
|
||||
"entityType": "columns",
|
||||
"table": "message"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "session_id",
|
||||
"entityType": "columns",
|
||||
"table": "message"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_created",
|
||||
"entityType": "columns",
|
||||
"table": "message"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_updated",
|
||||
"entityType": "columns",
|
||||
"table": "message"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "data",
|
||||
"entityType": "columns",
|
||||
"table": "message"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "id",
|
||||
"entityType": "columns",
|
||||
"table": "part"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "message_id",
|
||||
"entityType": "columns",
|
||||
"table": "part"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "session_id",
|
||||
"entityType": "columns",
|
||||
"table": "part"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_created",
|
||||
"entityType": "columns",
|
||||
"table": "part"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_updated",
|
||||
"entityType": "columns",
|
||||
"table": "part"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "data",
|
||||
"entityType": "columns",
|
||||
"table": "part"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "project_id",
|
||||
"entityType": "columns",
|
||||
"table": "permission"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_created",
|
||||
"entityType": "columns",
|
||||
"table": "permission"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_updated",
|
||||
"entityType": "columns",
|
||||
"table": "permission"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "data",
|
||||
"entityType": "columns",
|
||||
"table": "permission"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "id",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "project_id",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "parent_id",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "slug",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "directory",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "title",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "version",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "share_url",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "summary_additions",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "summary_deletions",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "summary_files",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "summary_diffs",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "revert",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "permission",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_created",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_updated",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_compacting",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_archived",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "session_id",
|
||||
"entityType": "columns",
|
||||
"table": "todo"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "content",
|
||||
"entityType": "columns",
|
||||
"table": "todo"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "status",
|
||||
"entityType": "columns",
|
||||
"table": "todo"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "priority",
|
||||
"entityType": "columns",
|
||||
"table": "todo"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "position",
|
||||
"entityType": "columns",
|
||||
"table": "todo"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_created",
|
||||
"entityType": "columns",
|
||||
"table": "todo"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_updated",
|
||||
"entityType": "columns",
|
||||
"table": "todo"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "session_id",
|
||||
"entityType": "columns",
|
||||
"table": "session_share"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "id",
|
||||
"entityType": "columns",
|
||||
"table": "session_share"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "secret",
|
||||
"entityType": "columns",
|
||||
"table": "session_share"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "url",
|
||||
"entityType": "columns",
|
||||
"table": "session_share"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_created",
|
||||
"entityType": "columns",
|
||||
"table": "session_share"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_updated",
|
||||
"entityType": "columns",
|
||||
"table": "session_share"
|
||||
},
|
||||
{
|
||||
"columns": ["session_id"],
|
||||
"tableTo": "session",
|
||||
"columnsTo": ["id"],
|
||||
"onUpdate": "NO ACTION",
|
||||
"onDelete": "CASCADE",
|
||||
"nameExplicit": false,
|
||||
"name": "fk_message_session_id_session_id_fk",
|
||||
"entityType": "fks",
|
||||
"table": "message"
|
||||
},
|
||||
{
|
||||
"columns": ["message_id"],
|
||||
"tableTo": "message",
|
||||
"columnsTo": ["id"],
|
||||
"onUpdate": "NO ACTION",
|
||||
"onDelete": "CASCADE",
|
||||
"nameExplicit": false,
|
||||
"name": "fk_part_message_id_message_id_fk",
|
||||
"entityType": "fks",
|
||||
"table": "part"
|
||||
},
|
||||
{
|
||||
"columns": ["project_id"],
|
||||
"tableTo": "project",
|
||||
"columnsTo": ["id"],
|
||||
"onUpdate": "NO ACTION",
|
||||
"onDelete": "CASCADE",
|
||||
"nameExplicit": false,
|
||||
"name": "fk_permission_project_id_project_id_fk",
|
||||
"entityType": "fks",
|
||||
"table": "permission"
|
||||
},
|
||||
{
|
||||
"columns": ["project_id"],
|
||||
"tableTo": "project",
|
||||
"columnsTo": ["id"],
|
||||
"onUpdate": "NO ACTION",
|
||||
"onDelete": "CASCADE",
|
||||
"nameExplicit": false,
|
||||
"name": "fk_session_project_id_project_id_fk",
|
||||
"entityType": "fks",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"columns": ["session_id"],
|
||||
"tableTo": "session",
|
||||
"columnsTo": ["id"],
|
||||
"onUpdate": "NO ACTION",
|
||||
"onDelete": "CASCADE",
|
||||
"nameExplicit": false,
|
||||
"name": "fk_todo_session_id_session_id_fk",
|
||||
"entityType": "fks",
|
||||
"table": "todo"
|
||||
},
|
||||
{
|
||||
"columns": ["session_id"],
|
||||
"tableTo": "session",
|
||||
"columnsTo": ["id"],
|
||||
"onUpdate": "NO ACTION",
|
||||
"onDelete": "CASCADE",
|
||||
"nameExplicit": false,
|
||||
"name": "fk_session_share_session_id_session_id_fk",
|
||||
"entityType": "fks",
|
||||
"table": "session_share"
|
||||
},
|
||||
{
|
||||
"columns": ["session_id", "position"],
|
||||
"nameExplicit": false,
|
||||
"name": "todo_pk",
|
||||
"entityType": "pks",
|
||||
"table": "todo"
|
||||
},
|
||||
{
|
||||
"columns": ["id"],
|
||||
"nameExplicit": false,
|
||||
"name": "project_pk",
|
||||
"table": "project",
|
||||
"entityType": "pks"
|
||||
},
|
||||
{
|
||||
"columns": ["id"],
|
||||
"nameExplicit": false,
|
||||
"name": "message_pk",
|
||||
"table": "message",
|
||||
"entityType": "pks"
|
||||
},
|
||||
{
|
||||
"columns": ["id"],
|
||||
"nameExplicit": false,
|
||||
"name": "part_pk",
|
||||
"table": "part",
|
||||
"entityType": "pks"
|
||||
},
|
||||
{
|
||||
"columns": ["project_id"],
|
||||
"nameExplicit": false,
|
||||
"name": "permission_pk",
|
||||
"table": "permission",
|
||||
"entityType": "pks"
|
||||
},
|
||||
{
|
||||
"columns": ["id"],
|
||||
"nameExplicit": false,
|
||||
"name": "session_pk",
|
||||
"table": "session",
|
||||
"entityType": "pks"
|
||||
},
|
||||
{
|
||||
"columns": ["session_id"],
|
||||
"nameExplicit": false,
|
||||
"name": "session_share_pk",
|
||||
"table": "session_share",
|
||||
"entityType": "pks"
|
||||
},
|
||||
{
|
||||
"columns": [
|
||||
{
|
||||
"value": "session_id",
|
||||
"isExpression": false
|
||||
}
|
||||
],
|
||||
"isUnique": false,
|
||||
"where": null,
|
||||
"origin": "manual",
|
||||
"name": "message_session_idx",
|
||||
"entityType": "indexes",
|
||||
"table": "message"
|
||||
},
|
||||
{
|
||||
"columns": [
|
||||
{
|
||||
"value": "message_id",
|
||||
"isExpression": false
|
||||
}
|
||||
],
|
||||
"isUnique": false,
|
||||
"where": null,
|
||||
"origin": "manual",
|
||||
"name": "part_message_idx",
|
||||
"entityType": "indexes",
|
||||
"table": "part"
|
||||
},
|
||||
{
|
||||
"columns": [
|
||||
{
|
||||
"value": "session_id",
|
||||
"isExpression": false
|
||||
}
|
||||
],
|
||||
"isUnique": false,
|
||||
"where": null,
|
||||
"origin": "manual",
|
||||
"name": "part_session_idx",
|
||||
"entityType": "indexes",
|
||||
"table": "part"
|
||||
},
|
||||
{
|
||||
"columns": [
|
||||
{
|
||||
"value": "project_id",
|
||||
"isExpression": false
|
||||
}
|
||||
],
|
||||
"isUnique": false,
|
||||
"where": null,
|
||||
"origin": "manual",
|
||||
"name": "session_project_idx",
|
||||
"entityType": "indexes",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"columns": [
|
||||
{
|
||||
"value": "parent_id",
|
||||
"isExpression": false
|
||||
}
|
||||
],
|
||||
"isUnique": false,
|
||||
"where": null,
|
||||
"origin": "manual",
|
||||
"name": "session_parent_idx",
|
||||
"entityType": "indexes",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"columns": [
|
||||
{
|
||||
"value": "session_id",
|
||||
"isExpression": false
|
||||
}
|
||||
],
|
||||
"isUnique": false,
|
||||
"where": null,
|
||||
"origin": "manual",
|
||||
"name": "todo_session_idx",
|
||||
"entityType": "indexes",
|
||||
"table": "todo"
|
||||
}
|
||||
],
|
||||
"renames": []
|
||||
}
|
||||
@@ -15,7 +15,8 @@
|
||||
"lint": "echo 'Running lint checks...' && bun test --coverage",
|
||||
"format": "echo 'Formatting code...' && bun run --prettier --write src/**/*.ts",
|
||||
"docs": "echo 'Generating documentation...' && find src -name '*.ts' -exec echo 'Processing: {}' \\;",
|
||||
"deploy": "echo 'Deploying application...' && bun run build && echo 'Deployment completed successfully'"
|
||||
"deploy": "echo 'Deploying application...' && bun run build && echo 'Deployment completed successfully'",
|
||||
"db": "bun drizzle-kit"
|
||||
},
|
||||
"bin": {
|
||||
"opencode": "./bin/opencode"
|
||||
@@ -42,6 +43,8 @@
|
||||
"@types/turndown": "5.0.5",
|
||||
"@types/yargs": "17.0.33",
|
||||
"@typescript/native-preview": "catalog:",
|
||||
"drizzle-kit": "1.0.0-beta.12-a5629fb",
|
||||
"drizzle-orm": "1.0.0-beta.12-a5629fb",
|
||||
"typescript": "catalog:",
|
||||
"vscode-languageserver-types": "3.17.5",
|
||||
"why-is-node-running": "3.2.2",
|
||||
@@ -100,6 +103,7 @@
|
||||
"clipboardy": "4.0.0",
|
||||
"decimal.js": "10.5.0",
|
||||
"diff": "catalog:",
|
||||
"drizzle-orm": "1.0.0-beta.12-a5629fb",
|
||||
"fuzzysort": "3.1.0",
|
||||
"gray-matter": "4.0.3",
|
||||
"hono": "catalog:",
|
||||
@@ -122,5 +126,8 @@
|
||||
"yargs": "18.0.0",
|
||||
"zod": "catalog:",
|
||||
"zod-to-json-schema": "3.24.5"
|
||||
},
|
||||
"overrides": {
|
||||
"drizzle-orm": "1.0.0-beta.12-a5629fb"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
import solidPlugin from "../node_modules/@opentui/solid/scripts/solid-plugin"
|
||||
import path from "path"
|
||||
import fs from "fs"
|
||||
import os from "os"
|
||||
import { $ } from "bun"
|
||||
import { fileURLToPath } from "url"
|
||||
|
||||
@@ -25,9 +26,38 @@ await Bun.write(
|
||||
)
|
||||
console.log("Generated models-snapshot.ts")
|
||||
|
||||
// Load migrations from migration directories
|
||||
const migrationDirs = (await fs.promises.readdir(path.join(dir, "migration"), { withFileTypes: true }))
|
||||
.filter((entry) => entry.isDirectory() && /^\d{4}\d{2}\d{2}\d{2}\d{2}\d{2}/.test(entry.name))
|
||||
.map((entry) => entry.name)
|
||||
.sort()
|
||||
|
||||
const migrations = await Promise.all(
|
||||
migrationDirs.map(async (name) => {
|
||||
const file = path.join(dir, "migration", name, "migration.sql")
|
||||
const sql = await Bun.file(file).text()
|
||||
const match = /^(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})/.exec(name)
|
||||
const timestamp = match
|
||||
? Date.UTC(
|
||||
Number(match[1]),
|
||||
Number(match[2]) - 1,
|
||||
Number(match[3]),
|
||||
Number(match[4]),
|
||||
Number(match[5]),
|
||||
Number(match[6]),
|
||||
)
|
||||
: 0
|
||||
return { sql, timestamp }
|
||||
}),
|
||||
)
|
||||
console.log(`Loaded ${migrations.length} migrations`)
|
||||
|
||||
const singleFlag = process.argv.includes("--single")
|
||||
const baselineFlag = process.argv.includes("--baseline")
|
||||
const skipInstall = process.argv.includes("--skip-install")
|
||||
const skipRelease = process.argv.includes("--skip-release")
|
||||
const osFilter = process.argv.filter((a) => a.startsWith("--os=")).map((a) => a.split("=")[1])
|
||||
const excludeOs = process.argv.filter((a) => a.startsWith("--exclude-os=")).map((a) => a.split("=")[1])
|
||||
|
||||
const allTargets: {
|
||||
os: string
|
||||
@@ -88,26 +118,18 @@ const allTargets: {
|
||||
},
|
||||
]
|
||||
|
||||
const targets = singleFlag
|
||||
? allTargets.filter((item) => {
|
||||
if (item.os !== process.platform || item.arch !== process.arch) {
|
||||
return false
|
||||
}
|
||||
|
||||
// When building for the current platform, prefer a single native binary by default.
|
||||
// Baseline binaries require additional Bun artifacts and can be flaky to download.
|
||||
if (item.avx2 === false) {
|
||||
return baselineFlag
|
||||
}
|
||||
|
||||
// also skip abi-specific builds for the same reason
|
||||
if (item.abi !== undefined) {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
})
|
||||
: allTargets
|
||||
const targets = allTargets.filter((item) => {
|
||||
if (osFilter.length && !osFilter.includes(item.os)) return false
|
||||
if (excludeOs.includes(item.os)) return false
|
||||
if (!singleFlag) return true
|
||||
if (item.os !== process.platform || item.arch !== process.arch) return false
|
||||
// When building for the current platform, prefer a single native binary by default.
|
||||
// Baseline binaries require additional Bun artifacts and can be flaky to download.
|
||||
if (item.avx2 === false) return baselineFlag
|
||||
// also skip abi-specific builds for the same reason
|
||||
if (item.abi !== undefined) return false
|
||||
return true
|
||||
})
|
||||
|
||||
await $`rm -rf dist`
|
||||
|
||||
@@ -137,6 +159,38 @@ for (const item of targets) {
|
||||
const bunfsRoot = item.os === "win32" ? "B:/~BUN/root/" : "/$bunfs/root/"
|
||||
const workerRelativePath = path.relative(dir, parserWorker).replaceAll("\\", "/")
|
||||
|
||||
// Use anomalyco/bun fork for Windows targets (includes Ctrl+C fix from PR #25876)
|
||||
// https://github.com/anomalyco/bun/releases/tag/v1.3.9-opencode.2
|
||||
// BUN_COMPILE_TARGET_TARBALL_URL only works for cross-compilation, so we download
|
||||
// the fork binary and use executablePath to ensure it works for both native and cross builds.
|
||||
// When running natively on Windows (build-cli-windows job), the fork is already the
|
||||
// system Bun so executablePath is only needed for the baseline variant.
|
||||
const BUN_FORK_VERSION = "1.3.9-opencode.2"
|
||||
let forkExePath: string | undefined
|
||||
if (item.os === "win32") {
|
||||
const native = process.platform === "win32"
|
||||
if (!native || item.avx2 === false) {
|
||||
const variant = item.avx2 === false ? "bun-windows-x64-baseline" : "bun-windows-x64"
|
||||
const forkDir = path.join(os.tmpdir(), "bun-fork", BUN_FORK_VERSION, variant)
|
||||
const forkBin = path.join(forkDir, "bun.exe")
|
||||
if (!fs.existsSync(forkBin)) {
|
||||
const tgzUrl = `https://github.com/anomalyco/bun/releases/download/v${BUN_FORK_VERSION}/${variant}.tgz`
|
||||
console.log(`downloading bun fork ${BUN_FORK_VERSION} (${variant})...`)
|
||||
console.log(` ${tgzUrl}`)
|
||||
fs.mkdirSync(forkDir, { recursive: true })
|
||||
const tgzName = `${variant}.tgz`
|
||||
const tgzPath = path.join(forkDir, tgzName)
|
||||
const dl = Bun.spawnSync(["curl", "-fSL", "-o", tgzName, tgzUrl], { cwd: forkDir })
|
||||
if (dl.exitCode !== 0) throw new Error(`download failed: ${dl.stderr.toString()}`)
|
||||
const tar = Bun.spawnSync(["tar", "-xzf", tgzName, "--strip-components=2"], { cwd: forkDir })
|
||||
if (tar.exitCode !== 0) throw new Error(`tar extract failed: ${tar.stderr.toString()}`)
|
||||
fs.unlinkSync(tgzPath)
|
||||
}
|
||||
forkExePath = forkBin
|
||||
console.log(`using bun fork: ${forkBin}`)
|
||||
}
|
||||
}
|
||||
|
||||
await Bun.build({
|
||||
conditions: ["browser"],
|
||||
tsconfig: "./tsconfig.json",
|
||||
@@ -152,10 +206,12 @@ for (const item of targets) {
|
||||
outfile: `dist/${name}/bin/opencode`,
|
||||
execArgv: [`--user-agent=opencode/${Script.version}`, "--use-system-ca", "--"],
|
||||
windows: {},
|
||||
...(forkExePath ? { executablePath: forkExePath } : {}),
|
||||
},
|
||||
entrypoints: ["./src/index.ts", parserWorker, workerPath],
|
||||
define: {
|
||||
OPENCODE_VERSION: `'${Script.version}'`,
|
||||
OPENCODE_MIGRATIONS: JSON.stringify(migrations),
|
||||
OTUI_TREE_SITTER_WORKER_PATH: bunfsRoot + workerRelativePath,
|
||||
OPENCODE_WORKER_PATH: workerPath,
|
||||
OPENCODE_CHANNEL: `'${Script.channel}'`,
|
||||
@@ -179,15 +235,20 @@ for (const item of targets) {
|
||||
binaries[name] = Script.version
|
||||
}
|
||||
|
||||
if (Script.release) {
|
||||
if (Script.release && !skipRelease) {
|
||||
const archives: string[] = []
|
||||
for (const key of Object.keys(binaries)) {
|
||||
if (key.includes("linux")) {
|
||||
await $`tar -czf ../../${key}.tar.gz *`.cwd(`dist/${key}/bin`)
|
||||
archives.push(`./dist/${key}.tar.gz`)
|
||||
} else {
|
||||
await $`zip -r ../../${key}.zip *`.cwd(`dist/${key}/bin`)
|
||||
archives.push(`./dist/${key}.zip`)
|
||||
}
|
||||
}
|
||||
await $`gh release upload v${Script.version} ./dist/*.zip ./dist/*.tar.gz --clobber`
|
||||
if (archives.length) {
|
||||
await $`gh release upload v${Script.version} ${archives} --clobber`
|
||||
}
|
||||
}
|
||||
|
||||
export { binaries }
|
||||
|
||||
16
packages/opencode/script/check-migrations.ts
Normal file
16
packages/opencode/script/check-migrations.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
#!/usr/bin/env bun
|
||||
|
||||
import { $ } from "bun"
|
||||
|
||||
// drizzle-kit check compares schema to migrations, exits non-zero if drift
|
||||
const result = await $`bun drizzle-kit check`.quiet().nothrow()
|
||||
|
||||
if (result.exitCode !== 0) {
|
||||
console.error("Schema has changes not captured in migrations!")
|
||||
console.error("Run: bun drizzle-kit generate")
|
||||
console.error("")
|
||||
console.error(result.stderr.toString())
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
console.log("Migrations are up to date")
|
||||
@@ -435,46 +435,68 @@ export namespace ACP {
|
||||
return
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if (part.type === "text") {
|
||||
const delta = props.delta
|
||||
if (delta && part.ignored !== true) {
|
||||
await this.connection
|
||||
.sessionUpdate({
|
||||
sessionId,
|
||||
update: {
|
||||
sessionUpdate: "agent_message_chunk",
|
||||
content: {
|
||||
type: "text",
|
||||
text: delta,
|
||||
},
|
||||
case "message.part.delta": {
|
||||
const props = event.properties
|
||||
const session = this.sessionManager.tryGet(props.sessionID)
|
||||
if (!session) return
|
||||
const sessionId = session.id
|
||||
|
||||
const message = await this.sdk.session
|
||||
.message(
|
||||
{
|
||||
sessionID: props.sessionID,
|
||||
messageID: props.messageID,
|
||||
directory: session.cwd,
|
||||
},
|
||||
{ throwOnError: true },
|
||||
)
|
||||
.then((x) => x.data)
|
||||
.catch((error) => {
|
||||
log.error("unexpected error when fetching message", { error })
|
||||
return undefined
|
||||
})
|
||||
|
||||
if (!message || message.info.role !== "assistant") return
|
||||
|
||||
const part = message.parts.find((p) => p.id === props.partID)
|
||||
if (!part) return
|
||||
|
||||
if (part.type === "text" && props.field === "text" && part.ignored !== true) {
|
||||
await this.connection
|
||||
.sessionUpdate({
|
||||
sessionId,
|
||||
update: {
|
||||
sessionUpdate: "agent_message_chunk",
|
||||
content: {
|
||||
type: "text",
|
||||
text: props.delta,
|
||||
},
|
||||
})
|
||||
.catch((error) => {
|
||||
log.error("failed to send text to ACP", { error })
|
||||
})
|
||||
}
|
||||
},
|
||||
})
|
||||
.catch((error) => {
|
||||
log.error("failed to send text delta to ACP", { error })
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
if (part.type === "reasoning") {
|
||||
const delta = props.delta
|
||||
if (delta) {
|
||||
await this.connection
|
||||
.sessionUpdate({
|
||||
sessionId,
|
||||
update: {
|
||||
sessionUpdate: "agent_thought_chunk",
|
||||
content: {
|
||||
type: "text",
|
||||
text: delta,
|
||||
},
|
||||
if (part.type === "reasoning" && props.field === "text") {
|
||||
await this.connection
|
||||
.sessionUpdate({
|
||||
sessionId,
|
||||
update: {
|
||||
sessionUpdate: "agent_thought_chunk",
|
||||
content: {
|
||||
type: "text",
|
||||
text: props.delta,
|
||||
},
|
||||
})
|
||||
.catch((error) => {
|
||||
log.error("failed to send reasoning to ACP", { error })
|
||||
})
|
||||
}
|
||||
},
|
||||
})
|
||||
.catch((error) => {
|
||||
log.error("failed to send reasoning delta to ACP", { error })
|
||||
})
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
@@ -184,6 +184,18 @@ export namespace Agent {
|
||||
),
|
||||
prompt: PROMPT_TITLE,
|
||||
},
|
||||
handoff: {
|
||||
name: "handoff",
|
||||
mode: "primary",
|
||||
options: {},
|
||||
native: true,
|
||||
hidden: true,
|
||||
temperature: 0.5,
|
||||
permission: PermissionNext.fromConfig({
|
||||
"*": "allow",
|
||||
}),
|
||||
prompt: "none",
|
||||
},
|
||||
summary: {
|
||||
name: "summary",
|
||||
mode: "primary",
|
||||
|
||||
@@ -3,7 +3,8 @@ import type { Session as SDKSession, Message, Part } from "@opencode-ai/sdk/v2"
|
||||
import { Session } from "../../session"
|
||||
import { cmd } from "./cmd"
|
||||
import { bootstrap } from "../bootstrap"
|
||||
import { Storage } from "../../storage/storage"
|
||||
import { Database } from "../../storage/db"
|
||||
import { SessionTable, MessageTable, PartTable } from "../../session/session.sql"
|
||||
import { Instance } from "../../project/instance"
|
||||
import { ShareNext } from "../../share/share-next"
|
||||
import { EOL } from "os"
|
||||
@@ -130,13 +131,35 @@ export const ImportCommand = cmd({
|
||||
return
|
||||
}
|
||||
|
||||
await Storage.write(["session", Instance.project.id, exportData.info.id], exportData.info)
|
||||
Database.use((db) => db.insert(SessionTable).values(Session.toRow(exportData.info)).onConflictDoNothing().run())
|
||||
|
||||
for (const msg of exportData.messages) {
|
||||
await Storage.write(["message", exportData.info.id, msg.info.id], msg.info)
|
||||
Database.use((db) =>
|
||||
db
|
||||
.insert(MessageTable)
|
||||
.values({
|
||||
id: msg.info.id,
|
||||
session_id: exportData.info.id,
|
||||
time_created: msg.info.time?.created ?? Date.now(),
|
||||
data: msg.info,
|
||||
})
|
||||
.onConflictDoNothing()
|
||||
.run(),
|
||||
)
|
||||
|
||||
for (const part of msg.parts) {
|
||||
await Storage.write(["part", msg.info.id, part.id], part)
|
||||
Database.use((db) =>
|
||||
db
|
||||
.insert(PartTable)
|
||||
.values({
|
||||
id: part.id,
|
||||
message_id: msg.info.id,
|
||||
session_id: exportData.info.id,
|
||||
data: part,
|
||||
})
|
||||
.onConflictDoNothing()
|
||||
.run(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,7 +2,8 @@ import type { Argv } from "yargs"
|
||||
import { cmd } from "./cmd"
|
||||
import { Session } from "../../session"
|
||||
import { bootstrap } from "../bootstrap"
|
||||
import { Storage } from "../../storage/storage"
|
||||
import { Database } from "../../storage/db"
|
||||
import { SessionTable } from "../../session/session.sql"
|
||||
import { Project } from "../../project/project"
|
||||
import { Instance } from "../../project/instance"
|
||||
|
||||
@@ -87,25 +88,8 @@ async function getCurrentProject(): Promise<Project.Info> {
|
||||
}
|
||||
|
||||
async function getAllSessions(): Promise<Session.Info[]> {
|
||||
const sessions: Session.Info[] = []
|
||||
|
||||
const projectKeys = await Storage.list(["project"])
|
||||
const projects = await Promise.all(projectKeys.map((key) => Storage.read<Project.Info>(key)))
|
||||
|
||||
for (const project of projects) {
|
||||
if (!project) continue
|
||||
|
||||
const sessionKeys = await Storage.list(["session", project.id])
|
||||
const projectSessions = await Promise.all(sessionKeys.map((key) => Storage.read<Session.Info>(key)))
|
||||
|
||||
for (const session of projectSessions) {
|
||||
if (session) {
|
||||
sessions.push(session)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return sessions
|
||||
const rows = Database.use((db) => db.select().from(SessionTable).all())
|
||||
return rows.map((row) => Session.fromRow(row))
|
||||
}
|
||||
|
||||
export async function aggregateSessionStats(days?: number, projectFilter?: string): Promise<SessionStats> {
|
||||
|
||||
@@ -83,6 +83,7 @@ function init() {
|
||||
},
|
||||
slashes() {
|
||||
return visibleOptions().flatMap((option) => {
|
||||
if (option.disabled) return []
|
||||
const slash = option.slash
|
||||
if (!slash) return []
|
||||
return {
|
||||
|
||||
@@ -7,6 +7,27 @@ import { useDialog } from "@tui/ui/dialog"
|
||||
import { createDialogProviderOptions, DialogProvider } from "./dialog-provider"
|
||||
import { useKeybind } from "../context/keybind"
|
||||
import * as fuzzysort from "fuzzysort"
|
||||
import type { Provider } from "@opencode-ai/sdk/v2"
|
||||
|
||||
function pickLatest(models: [string, Provider["models"][string]][]) {
|
||||
const picks: Record<string, [string, Provider["models"][string]]> = {}
|
||||
for (const item of models) {
|
||||
const model = item[0]
|
||||
const info = item[1]
|
||||
const key = info.family ?? model
|
||||
const prev = picks[key]
|
||||
if (!prev) {
|
||||
picks[key] = item
|
||||
continue
|
||||
}
|
||||
if (info.release_date !== prev[1].release_date) {
|
||||
if (info.release_date > prev[1].release_date) picks[key] = item
|
||||
continue
|
||||
}
|
||||
if (model > prev[0]) picks[key] = item
|
||||
}
|
||||
return Object.values(picks)
|
||||
}
|
||||
|
||||
export function useConnected() {
|
||||
const sync = useSync()
|
||||
@@ -22,6 +43,7 @@ export function DialogModel(props: { providerID?: string }) {
|
||||
const keybind = useKeybind()
|
||||
const [ref, setRef] = createSignal<DialogSelectRef<unknown>>()
|
||||
const [query, setQuery] = createSignal("")
|
||||
const [all, setAll] = createSignal(false)
|
||||
|
||||
const connected = useConnected()
|
||||
const providers = createDialogProviderOptions()
|
||||
@@ -117,12 +139,16 @@ export function DialogModel(props: { providerID?: string }) {
|
||||
(provider) => provider.id !== "opencode",
|
||||
(provider) => provider.name,
|
||||
),
|
||||
flatMap((provider) =>
|
||||
pipe(
|
||||
flatMap((provider) => {
|
||||
const items = pipe(
|
||||
provider.models,
|
||||
entries(),
|
||||
filter(([_, info]) => info.status !== "deprecated"),
|
||||
filter(([_, info]) => (props.providerID ? info.providerID === props.providerID : true)),
|
||||
)
|
||||
const list = all() ? items : pickLatest(items)
|
||||
return pipe(
|
||||
list,
|
||||
map(([model, info]) => {
|
||||
const value = {
|
||||
providerID: provider.id,
|
||||
@@ -168,8 +194,8 @@ export function DialogModel(props: { providerID?: string }) {
|
||||
(x) => x.footer !== "Free",
|
||||
(x) => x.title,
|
||||
),
|
||||
),
|
||||
),
|
||||
)
|
||||
}),
|
||||
)
|
||||
|
||||
const popularProviders = !connected()
|
||||
@@ -222,6 +248,13 @@ export function DialogModel(props: { providerID?: string }) {
|
||||
local.model.toggleFavorite(option.value as { providerID: string; modelID: string })
|
||||
},
|
||||
},
|
||||
{
|
||||
keybind: keybind.all.model_show_all_toggle?.[0],
|
||||
title: all() ? "Show latest only" : "Show all models",
|
||||
onTrigger: () => {
|
||||
setAll((value) => !value)
|
||||
},
|
||||
},
|
||||
]}
|
||||
ref={setRef}
|
||||
onFilter={setQuery}
|
||||
|
||||
@@ -9,7 +9,7 @@ import type { AgentPart, FilePart, TextPart } from "@opencode-ai/sdk/v2"
|
||||
|
||||
export type PromptInfo = {
|
||||
input: string
|
||||
mode?: "normal" | "shell"
|
||||
mode?: "normal" | "shell" | "handoff"
|
||||
parts: (
|
||||
| Omit<FilePart, "id" | "messageID" | "sessionID">
|
||||
| Omit<AgentPart, "id" | "messageID" | "sessionID">
|
||||
|
||||
@@ -119,7 +119,7 @@ export function Prompt(props: PromptProps) {
|
||||
|
||||
const [store, setStore] = createStore<{
|
||||
prompt: PromptInfo
|
||||
mode: "normal" | "shell"
|
||||
mode: "normal" | "shell" | "handoff"
|
||||
extmarkToPartIndex: Map<number, number>
|
||||
interrupt: number
|
||||
placeholder: number
|
||||
@@ -338,6 +338,20 @@ export function Prompt(props: PromptProps) {
|
||||
))
|
||||
},
|
||||
},
|
||||
{
|
||||
title: "Handoff",
|
||||
value: "prompt.handoff",
|
||||
disabled: props.sessionID === undefined,
|
||||
category: "Prompt",
|
||||
slash: {
|
||||
name: "handoff",
|
||||
},
|
||||
onSelect: () => {
|
||||
input.clear()
|
||||
setStore("mode", "handoff")
|
||||
setStore("prompt", { input: "", parts: [] })
|
||||
},
|
||||
},
|
||||
]
|
||||
})
|
||||
|
||||
@@ -515,17 +529,45 @@ export function Prompt(props: PromptProps) {
|
||||
async function submit() {
|
||||
if (props.disabled) return
|
||||
if (autocomplete?.visible) return
|
||||
const selectedModel = local.model.current()
|
||||
if (!selectedModel) {
|
||||
promptModelWarning()
|
||||
return
|
||||
}
|
||||
|
||||
if (store.mode === "handoff") {
|
||||
const result = await sdk.client.session.handoff({
|
||||
sessionID: props.sessionID!,
|
||||
goal: store.prompt.input,
|
||||
model: {
|
||||
providerID: selectedModel.providerID,
|
||||
modelID: selectedModel.modelID,
|
||||
},
|
||||
})
|
||||
if (result.data) {
|
||||
route.navigate({
|
||||
type: "home",
|
||||
initialPrompt: {
|
||||
input: result.data.text,
|
||||
parts:
|
||||
result.data.files.map((file) => ({
|
||||
type: "file",
|
||||
url: file,
|
||||
filename: file,
|
||||
mime: "text/plain",
|
||||
})) ?? [],
|
||||
},
|
||||
})
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if (!store.prompt.input) return
|
||||
const trimmed = store.prompt.input.trim()
|
||||
if (trimmed === "exit" || trimmed === "quit" || trimmed === ":q") {
|
||||
exit()
|
||||
return
|
||||
}
|
||||
const selectedModel = local.model.current()
|
||||
if (!selectedModel) {
|
||||
promptModelWarning()
|
||||
return
|
||||
}
|
||||
const sessionID = props.sessionID
|
||||
? props.sessionID
|
||||
: await (async () => {
|
||||
@@ -726,6 +768,7 @@ export function Prompt(props: PromptProps) {
|
||||
const highlight = createMemo(() => {
|
||||
if (keybind.leader) return theme.border
|
||||
if (store.mode === "shell") return theme.primary
|
||||
if (store.mode === "handoff") return theme.warning
|
||||
return local.agent.color(local.agent.current().name)
|
||||
})
|
||||
|
||||
@@ -797,7 +840,11 @@ export function Prompt(props: PromptProps) {
|
||||
flexGrow={1}
|
||||
>
|
||||
<textarea
|
||||
placeholder={props.sessionID ? undefined : `Ask anything... "${PLACEHOLDERS[store.placeholder]}"`}
|
||||
placeholder={iife(() => {
|
||||
if (store.mode === "handoff") return "Goal for the new session"
|
||||
if (props.sessionID) return undefined
|
||||
return `Ask anything... "${PLACEHOLDERS[store.placeholder]}"`
|
||||
})}
|
||||
textColor={keybind.leader ? theme.textMuted : theme.text}
|
||||
focusedTextColor={keybind.leader ? theme.textMuted : theme.text}
|
||||
minHeight={1}
|
||||
@@ -854,7 +901,7 @@ export function Prompt(props: PromptProps) {
|
||||
e.preventDefault()
|
||||
return
|
||||
}
|
||||
if (store.mode === "shell") {
|
||||
if (store.mode === "shell" || store.mode === "handoff") {
|
||||
if ((e.name === "backspace" && input.visualCursor.offset === 0) || e.name === "escape") {
|
||||
setStore("mode", "normal")
|
||||
e.preventDefault()
|
||||
@@ -975,7 +1022,11 @@ export function Prompt(props: PromptProps) {
|
||||
/>
|
||||
<box flexDirection="row" flexShrink={0} paddingTop={1} gap={1}>
|
||||
<text fg={highlight()}>
|
||||
{store.mode === "shell" ? "Shell" : Locale.titlecase(local.agent.current().name)}{" "}
|
||||
<Switch>
|
||||
<Match when={store.mode === "normal"}>{Locale.titlecase(local.agent.current().name)}</Match>
|
||||
<Match when={store.mode === "shell"}>Shell</Match>
|
||||
<Match when={store.mode === "handoff"}>Handoff</Match>
|
||||
</Switch>
|
||||
</text>
|
||||
<Show when={store.mode === "normal"}>
|
||||
<box flexDirection="row" gap={1}>
|
||||
@@ -1122,6 +1173,11 @@ export function Prompt(props: PromptProps) {
|
||||
esc <span style={{ fg: theme.textMuted }}>exit shell mode</span>
|
||||
</text>
|
||||
</Match>
|
||||
<Match when={store.mode === "handoff"}>
|
||||
<text fg={theme.text}>
|
||||
esc <span style={{ fg: theme.textMuted }}>exit handoff mode</span>
|
||||
</text>
|
||||
</Match>
|
||||
</Switch>
|
||||
</box>
|
||||
</Show>
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { createStore } from "solid-js/store"
|
||||
import { createStore, reconcile } from "solid-js/store"
|
||||
import { createSimpleContext } from "./helper"
|
||||
import type { PromptInfo } from "../component/prompt/history"
|
||||
|
||||
@@ -32,7 +32,7 @@ export const { use: useRoute, provider: RouteProvider } = createSimpleContext({
|
||||
},
|
||||
navigate(route: Route) {
|
||||
console.log("navigate", route)
|
||||
setStore(route)
|
||||
setStore(reconcile(route))
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
@@ -299,6 +299,24 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({
|
||||
break
|
||||
}
|
||||
|
||||
case "message.part.delta": {
|
||||
const parts = store.part[event.properties.messageID]
|
||||
if (!parts) break
|
||||
const result = Binary.search(parts, event.properties.partID, (p) => p.id)
|
||||
if (!result.found) break
|
||||
setStore(
|
||||
"part",
|
||||
event.properties.messageID,
|
||||
produce((draft) => {
|
||||
const part = draft[result.index]
|
||||
const field = event.properties.field as keyof typeof part
|
||||
const existing = part[field] as string | undefined
|
||||
;(part[field] as string) = (existing ?? "") + event.properties.delta
|
||||
}),
|
||||
)
|
||||
break
|
||||
}
|
||||
|
||||
case "message.part.removed": {
|
||||
const parts = store.part[event.properties.messageID]
|
||||
const result = Binary.search(parts, event.properties.partID, (p) => p.id)
|
||||
|
||||
@@ -77,11 +77,11 @@ export function Home() {
|
||||
let prompt: PromptRef
|
||||
const args = useArgs()
|
||||
onMount(() => {
|
||||
if (once) return
|
||||
if (route.initialPrompt) {
|
||||
prompt.set(route.initialPrompt)
|
||||
once = true
|
||||
} else if (args.prompt) {
|
||||
return
|
||||
}
|
||||
if (!once && args.prompt) {
|
||||
prompt.set({ input: args.prompt, parts: [] })
|
||||
once = true
|
||||
prompt.submit()
|
||||
@@ -96,7 +96,26 @@ export function Home() {
|
||||
<box flexGrow={1} justifyContent="center" alignItems="center" paddingLeft={2} paddingRight={2} gap={1}>
|
||||
<box height={3} />
|
||||
<Logo />
|
||||
<box width="100%" maxWidth={75} zIndex={1000} paddingTop={1}>
|
||||
<Show when={!route.initialPrompt}>
|
||||
<box width="100%" maxWidth={75} zIndex={1000} paddingTop={1}>
|
||||
<Prompt
|
||||
ref={(r) => {
|
||||
prompt = r
|
||||
promptRef.set(r)
|
||||
}}
|
||||
hint={Hint}
|
||||
/>
|
||||
</box>
|
||||
</Show>
|
||||
<box height={3} width="100%" maxWidth={75} alignItems="center" paddingTop={2}>
|
||||
<Show when={showTips()}>
|
||||
<Tips />
|
||||
</Show>
|
||||
</box>
|
||||
<Toast />
|
||||
</box>
|
||||
<Show when={route.initialPrompt}>
|
||||
<box paddingLeft={2} paddingRight={2}>
|
||||
<Prompt
|
||||
ref={(r) => {
|
||||
prompt = r
|
||||
@@ -105,13 +124,7 @@ export function Home() {
|
||||
hint={Hint}
|
||||
/>
|
||||
</box>
|
||||
<box height={3} width="100%" maxWidth={75} alignItems="center" paddingTop={2}>
|
||||
<Show when={showTips()}>
|
||||
<Tips />
|
||||
</Show>
|
||||
</box>
|
||||
<Toast />
|
||||
</box>
|
||||
</Show>
|
||||
<box paddingTop={1} paddingBottom={1} paddingLeft={2} paddingRight={2} flexDirection="row" flexShrink={0} gap={2}>
|
||||
<text fg={theme.textMuted}>{directory()}</text>
|
||||
<box gap={1} flexDirection="row" flexShrink={0}>
|
||||
|
||||
@@ -2027,8 +2027,8 @@ function ApplyPatch(props: ToolProps<typeof ApplyPatchTool>) {
|
||||
</For>
|
||||
</Match>
|
||||
<Match when={true}>
|
||||
<InlineTool icon="%" pending="Preparing apply_patch..." complete={false} part={props.part}>
|
||||
apply_patch
|
||||
<InlineTool icon="%" pending="Preparing patch..." complete={false} part={props.part}>
|
||||
Patch
|
||||
</InlineTool>
|
||||
</Match>
|
||||
</Switch>
|
||||
|
||||
@@ -778,6 +778,7 @@ export namespace Config {
|
||||
stash_delete: z.string().optional().default("ctrl+d").describe("Delete stash entry"),
|
||||
model_provider_list: z.string().optional().default("ctrl+a").describe("Open provider list from model dialog"),
|
||||
model_favorite_toggle: z.string().optional().default("ctrl+f").describe("Toggle model favorite status"),
|
||||
model_show_all_toggle: z.string().optional().default("ctrl+o").describe("Toggle showing all models"),
|
||||
session_share: z.string().optional().default("none").describe("Share current session"),
|
||||
session_unshare: z.string().optional().default("none").describe("Unshare current session"),
|
||||
session_interrupt: z.string().optional().default("escape").describe("Interrupt current session"),
|
||||
|
||||
@@ -46,7 +46,7 @@ export namespace Flag {
|
||||
export const OPENCODE_EXPERIMENTAL_LSP_TY = truthy("OPENCODE_EXPERIMENTAL_LSP_TY")
|
||||
export const OPENCODE_EXPERIMENTAL_LSP_TOOL = OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_LSP_TOOL")
|
||||
export const OPENCODE_DISABLE_FILETIME_CHECK = truthy("OPENCODE_DISABLE_FILETIME_CHECK")
|
||||
export const OPENCODE_EXPERIMENTAL_PLAN_MODE = OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_PLAN_MODE")
|
||||
|
||||
export const OPENCODE_EXPERIMENTAL_MARKDOWN = truthy("OPENCODE_EXPERIMENTAL_MARKDOWN")
|
||||
export const OPENCODE_MODELS_URL = process.env["OPENCODE_MODELS_URL"]
|
||||
export const OPENCODE_MODELS_PATH = process.env["OPENCODE_MODELS_PATH"]
|
||||
|
||||
@@ -26,6 +26,10 @@ import { EOL } from "os"
|
||||
import { WebCommand } from "./cli/cmd/web"
|
||||
import { PrCommand } from "./cli/cmd/pr"
|
||||
import { SessionCommand } from "./cli/cmd/session"
|
||||
import path from "path"
|
||||
import { Global } from "./global"
|
||||
import { JsonMigration } from "./storage/json-migration"
|
||||
import { Database } from "./storage/db"
|
||||
|
||||
process.on("unhandledRejection", (e) => {
|
||||
Log.Default.error("rejection", {
|
||||
@@ -74,6 +78,37 @@ const cli = yargs(hideBin(process.argv))
|
||||
version: Installation.VERSION,
|
||||
args: process.argv.slice(2),
|
||||
})
|
||||
|
||||
const marker = path.join(Global.Path.data, "opencode.db")
|
||||
if (!(await Bun.file(marker).exists())) {
|
||||
console.log("Performing one time database migration, may take a few minutes...")
|
||||
const tty = process.stdout.isTTY
|
||||
const width = 36
|
||||
const orange = "\x1b[38;5;214m"
|
||||
const muted = "\x1b[0;2m"
|
||||
const reset = "\x1b[0m"
|
||||
let last = -1
|
||||
if (tty) process.stdout.write("\x1b[?25l")
|
||||
try {
|
||||
await JsonMigration.run(Database.Client().$client, {
|
||||
progress: (event) => {
|
||||
if (!tty) return
|
||||
const percent = Math.floor((event.current / event.total) * 100)
|
||||
if (percent === last && event.current !== event.total) return
|
||||
last = percent
|
||||
const fill = Math.round((percent / 100) * width)
|
||||
const bar = `${"■".repeat(fill)}${"・".repeat(width - fill)}`
|
||||
process.stdout.write(
|
||||
`\r${orange}${bar} ${percent.toString().padStart(3)}%${reset} ${muted}${event.label.padEnd(12)} ${event.current}/${event.total}${reset}`,
|
||||
)
|
||||
if (event.current === event.total) process.stdout.write("\n")
|
||||
},
|
||||
})
|
||||
} finally {
|
||||
if (tty) process.stdout.write("\x1b[?25h")
|
||||
}
|
||||
console.log("Database migration complete.")
|
||||
}
|
||||
})
|
||||
.usage("\n" + UI.logo())
|
||||
.completion("completion", "generate shell completion script")
|
||||
|
||||
@@ -3,7 +3,8 @@ import { BusEvent } from "@/bus/bus-event"
|
||||
import { Config } from "@/config/config"
|
||||
import { Identifier } from "@/id/id"
|
||||
import { Instance } from "@/project/instance"
|
||||
import { Storage } from "@/storage/storage"
|
||||
import { Database, eq } from "@/storage/db"
|
||||
import { PermissionTable } from "@/session/session.sql"
|
||||
import { fn } from "@/util/fn"
|
||||
import { Log } from "@/util/log"
|
||||
import { Wildcard } from "@/util/wildcard"
|
||||
@@ -105,9 +106,12 @@ export namespace PermissionNext {
|
||||
),
|
||||
}
|
||||
|
||||
const state = Instance.state(async () => {
|
||||
const state = Instance.state(() => {
|
||||
const projectID = Instance.project.id
|
||||
const stored = await Storage.read<Ruleset>(["permission", projectID]).catch(() => [] as Ruleset)
|
||||
const row = Database.use((db) =>
|
||||
db.select().from(PermissionTable).where(eq(PermissionTable.project_id, projectID)).get(),
|
||||
)
|
||||
const stored = row?.data ?? ([] as Ruleset)
|
||||
|
||||
const pending: Record<
|
||||
string,
|
||||
@@ -222,7 +226,8 @@ export namespace PermissionNext {
|
||||
|
||||
// TODO: we don't save the permission ruleset to disk yet until there's
|
||||
// UI to manage it
|
||||
// await Storage.write(["permission", Instance.project.id], s.approved)
|
||||
// db().insert(PermissionTable).values({ projectID: Instance.project.id, data: s.approved })
|
||||
// .onConflictDoUpdate({ target: PermissionTable.projectID, set: { data: s.approved } }).run()
|
||||
return
|
||||
}
|
||||
},
|
||||
@@ -275,6 +280,7 @@ export namespace PermissionNext {
|
||||
}
|
||||
|
||||
export async function list() {
|
||||
return state().then((x) => Object.values(x.pending).map((x) => x.info))
|
||||
const s = await state()
|
||||
return Object.values(s.pending).map((x) => x.info)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { Plugin } from "../plugin"
|
||||
import { Share } from "../share/share"
|
||||
import { Format } from "../format"
|
||||
import { LSP } from "../lsp"
|
||||
import { FileWatcher } from "../file/watcher"
|
||||
@@ -17,7 +16,6 @@ import { Truncate } from "../tool/truncation"
|
||||
export async function InstanceBootstrap() {
|
||||
Log.Default.info("bootstrapping", { directory: Instance.directory })
|
||||
await Plugin.init()
|
||||
Share.init()
|
||||
ShareNext.init()
|
||||
Format.init()
|
||||
await LSP.init()
|
||||
|
||||
14
packages/opencode/src/project/project.sql.ts
Normal file
14
packages/opencode/src/project/project.sql.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import { sqliteTable, text, integer } from "drizzle-orm/sqlite-core"
|
||||
import { Timestamps } from "@/storage/schema.sql"
|
||||
|
||||
export const ProjectTable = sqliteTable("project", {
|
||||
id: text().primaryKey(),
|
||||
worktree: text().notNull(),
|
||||
vcs: text(),
|
||||
name: text(),
|
||||
icon_url: text(),
|
||||
icon_color: text(),
|
||||
...Timestamps,
|
||||
time_initialized: integer(),
|
||||
sandboxes: text({ mode: "json" }).notNull().$type<string[]>(),
|
||||
})
|
||||
@@ -1,18 +1,17 @@
|
||||
import z from "zod"
|
||||
import fs from "fs/promises"
|
||||
import { Filesystem } from "../util/filesystem"
|
||||
import path from "path"
|
||||
import { $ } from "bun"
|
||||
import { Storage } from "../storage/storage"
|
||||
import { Database, eq } from "../storage/db"
|
||||
import { ProjectTable } from "./project.sql"
|
||||
import { SessionTable } from "../session/session.sql"
|
||||
import { Log } from "../util/log"
|
||||
import { Flag } from "@/flag/flag"
|
||||
import { Session } from "../session"
|
||||
import { work } from "../util/queue"
|
||||
import { fn } from "@opencode-ai/util/fn"
|
||||
import { BusEvent } from "@/bus/bus-event"
|
||||
import { iife } from "@/util/iife"
|
||||
import { GlobalBus } from "@/bus/global"
|
||||
import { existsSync } from "fs"
|
||||
|
||||
export namespace Project {
|
||||
const log = Log.create({ service: "project" })
|
||||
@@ -50,66 +49,85 @@ export namespace Project {
|
||||
Updated: BusEvent.define("project.updated", Info),
|
||||
}
|
||||
|
||||
type Row = typeof ProjectTable.$inferSelect
|
||||
|
||||
export function fromRow(row: Row): Info {
|
||||
const icon =
|
||||
row.icon_url || row.icon_color
|
||||
? { url: row.icon_url ?? undefined, color: row.icon_color ?? undefined }
|
||||
: undefined
|
||||
return {
|
||||
id: row.id,
|
||||
worktree: row.worktree,
|
||||
vcs: row.vcs ? Info.shape.vcs.parse(row.vcs) : undefined,
|
||||
name: row.name ?? undefined,
|
||||
icon,
|
||||
time: {
|
||||
created: row.time_created,
|
||||
updated: row.time_updated,
|
||||
initialized: row.time_initialized ?? undefined,
|
||||
},
|
||||
sandboxes: row.sandboxes,
|
||||
}
|
||||
}
|
||||
|
||||
export async function fromDirectory(directory: string) {
|
||||
log.info("fromDirectory", { directory })
|
||||
|
||||
const { id, sandbox, worktree, vcs } = await iife(async () => {
|
||||
const data = await iife(async () => {
|
||||
const matches = Filesystem.up({ targets: [".git"], start: directory })
|
||||
const git = await matches.next().then((x) => x.value)
|
||||
await matches.return()
|
||||
if (git) {
|
||||
let sandbox = path.dirname(git)
|
||||
const sandbox = path.dirname(git)
|
||||
const bin = Bun.which("git")
|
||||
|
||||
const gitBinary = Bun.which("git")
|
||||
|
||||
// cached id calculation
|
||||
let id = await Bun.file(path.join(git, "opencode"))
|
||||
const cached = await Bun.file(path.join(git, "opencode"))
|
||||
.text()
|
||||
.then((x) => x.trim())
|
||||
.catch(() => undefined)
|
||||
|
||||
if (!gitBinary) {
|
||||
if (!bin) {
|
||||
return {
|
||||
id: id ?? "global",
|
||||
id: cached ?? "global",
|
||||
worktree: sandbox,
|
||||
sandbox: sandbox,
|
||||
vcs: Info.shape.vcs.parse(Flag.OPENCODE_FAKE_VCS),
|
||||
}
|
||||
}
|
||||
|
||||
// generate id from root commit
|
||||
if (!id) {
|
||||
const roots = await $`git rev-list --max-parents=0 --all`
|
||||
.quiet()
|
||||
.nothrow()
|
||||
.cwd(sandbox)
|
||||
.text()
|
||||
.then((x) =>
|
||||
x
|
||||
.split("\n")
|
||||
.filter(Boolean)
|
||||
.map((x) => x.trim())
|
||||
.toSorted(),
|
||||
)
|
||||
.catch(() => undefined)
|
||||
|
||||
if (!roots) {
|
||||
return {
|
||||
id: "global",
|
||||
worktree: sandbox,
|
||||
sandbox: sandbox,
|
||||
vcs: Info.shape.vcs.parse(Flag.OPENCODE_FAKE_VCS),
|
||||
}
|
||||
}
|
||||
|
||||
id = roots[0]
|
||||
if (id) {
|
||||
void Bun.file(path.join(git, "opencode"))
|
||||
.write(id)
|
||||
const roots = cached
|
||||
? undefined
|
||||
: await $`git rev-list --max-parents=0 --all`
|
||||
.quiet()
|
||||
.nothrow()
|
||||
.cwd(sandbox)
|
||||
.text()
|
||||
.then((x) =>
|
||||
x
|
||||
.split("\n")
|
||||
.filter(Boolean)
|
||||
.map((x) => x.trim())
|
||||
.toSorted(),
|
||||
)
|
||||
.catch(() => undefined)
|
||||
|
||||
if (!cached && !roots) {
|
||||
return {
|
||||
id: "global",
|
||||
worktree: sandbox,
|
||||
sandbox: sandbox,
|
||||
vcs: Info.shape.vcs.parse(Flag.OPENCODE_FAKE_VCS),
|
||||
}
|
||||
}
|
||||
|
||||
const id = cached ?? roots?.[0]
|
||||
if (!cached && id) {
|
||||
void Bun.file(path.join(git, "opencode"))
|
||||
.write(id)
|
||||
.catch(() => undefined)
|
||||
}
|
||||
|
||||
if (!id) {
|
||||
return {
|
||||
id: "global",
|
||||
@@ -136,33 +154,31 @@ export namespace Project {
|
||||
}
|
||||
}
|
||||
|
||||
sandbox = top
|
||||
|
||||
const worktree = await $`git rev-parse --git-common-dir`
|
||||
const tree = await $`git rev-parse --git-common-dir`
|
||||
.quiet()
|
||||
.nothrow()
|
||||
.cwd(sandbox)
|
||||
.cwd(top)
|
||||
.text()
|
||||
.then((x) => {
|
||||
const dirname = path.dirname(x.trim())
|
||||
if (dirname === ".") return sandbox
|
||||
if (dirname === ".") return top
|
||||
return dirname
|
||||
})
|
||||
.catch(() => undefined)
|
||||
|
||||
if (!worktree) {
|
||||
if (!tree) {
|
||||
return {
|
||||
id,
|
||||
sandbox,
|
||||
worktree: sandbox,
|
||||
sandbox: top,
|
||||
worktree: top,
|
||||
vcs: Info.shape.vcs.parse(Flag.OPENCODE_FAKE_VCS),
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
id,
|
||||
sandbox,
|
||||
worktree,
|
||||
sandbox: top,
|
||||
worktree: tree,
|
||||
vcs: "git",
|
||||
}
|
||||
}
|
||||
@@ -175,47 +191,78 @@ export namespace Project {
|
||||
}
|
||||
})
|
||||
|
||||
let existing = await Storage.read<Info>(["project", id]).catch(() => undefined)
|
||||
if (!existing) {
|
||||
existing = {
|
||||
id,
|
||||
worktree,
|
||||
vcs: vcs as Info["vcs"],
|
||||
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, data.id)).get())
|
||||
const existing = await iife(async () => {
|
||||
if (row) return fromRow(row)
|
||||
const fresh: Info = {
|
||||
id: data.id,
|
||||
worktree: data.worktree,
|
||||
vcs: data.vcs as Info["vcs"],
|
||||
sandboxes: [],
|
||||
time: {
|
||||
created: Date.now(),
|
||||
updated: Date.now(),
|
||||
},
|
||||
}
|
||||
if (id !== "global") {
|
||||
await migrateFromGlobal(id, worktree)
|
||||
if (data.id !== "global") {
|
||||
await migrateFromGlobal(data.id, data.worktree)
|
||||
}
|
||||
}
|
||||
|
||||
// migrate old projects before sandboxes
|
||||
if (!existing.sandboxes) existing.sandboxes = []
|
||||
return fresh
|
||||
})
|
||||
|
||||
if (Flag.OPENCODE_EXPERIMENTAL_ICON_DISCOVERY) discover(existing)
|
||||
|
||||
const result: Info = {
|
||||
...existing,
|
||||
worktree,
|
||||
vcs: vcs as Info["vcs"],
|
||||
worktree: data.worktree,
|
||||
vcs: data.vcs as Info["vcs"],
|
||||
time: {
|
||||
...existing.time,
|
||||
updated: Date.now(),
|
||||
},
|
||||
}
|
||||
if (sandbox !== result.worktree && !result.sandboxes.includes(sandbox)) result.sandboxes.push(sandbox)
|
||||
result.sandboxes = result.sandboxes.filter((x) => existsSync(x))
|
||||
await Storage.write<Info>(["project", id], result)
|
||||
if (data.sandbox !== result.worktree && !result.sandboxes.includes(data.sandbox))
|
||||
result.sandboxes.push(data.sandbox)
|
||||
const sandboxes: string[] = []
|
||||
for (const x of result.sandboxes) {
|
||||
const stat = await Bun.file(x)
|
||||
.stat()
|
||||
.catch(() => undefined)
|
||||
if (stat) sandboxes.push(x)
|
||||
}
|
||||
result.sandboxes = sandboxes
|
||||
const insert = {
|
||||
id: result.id,
|
||||
worktree: result.worktree,
|
||||
vcs: result.vcs ?? null,
|
||||
name: result.name,
|
||||
icon_url: result.icon?.url,
|
||||
icon_color: result.icon?.color,
|
||||
time_created: result.time.created,
|
||||
time_updated: result.time.updated,
|
||||
time_initialized: result.time.initialized,
|
||||
sandboxes: result.sandboxes,
|
||||
}
|
||||
const updateSet = {
|
||||
worktree: result.worktree,
|
||||
vcs: result.vcs ?? null,
|
||||
name: result.name,
|
||||
icon_url: result.icon?.url,
|
||||
icon_color: result.icon?.color,
|
||||
time_updated: result.time.updated,
|
||||
time_initialized: result.time.initialized,
|
||||
sandboxes: result.sandboxes,
|
||||
}
|
||||
Database.use((db) =>
|
||||
db.insert(ProjectTable).values(insert).onConflictDoUpdate({ target: ProjectTable.id, set: updateSet }).run(),
|
||||
)
|
||||
GlobalBus.emit("event", {
|
||||
payload: {
|
||||
type: Event.Updated.type,
|
||||
properties: result,
|
||||
},
|
||||
})
|
||||
return { project: result, sandbox }
|
||||
return { project: result, sandbox: data.sandbox }
|
||||
}
|
||||
|
||||
export async function discover(input: Info) {
|
||||
@@ -248,43 +295,54 @@ export namespace Project {
|
||||
return
|
||||
}
|
||||
|
||||
async function migrateFromGlobal(newProjectID: string, worktree: string) {
|
||||
const globalProject = await Storage.read<Info>(["project", "global"]).catch(() => undefined)
|
||||
if (!globalProject) return
|
||||
async function migrateFromGlobal(id: string, worktree: string) {
|
||||
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, "global")).get())
|
||||
if (!row) return
|
||||
|
||||
const globalSessions = await Storage.list(["session", "global"]).catch(() => [])
|
||||
if (globalSessions.length === 0) return
|
||||
const sessions = Database.use((db) =>
|
||||
db.select().from(SessionTable).where(eq(SessionTable.project_id, "global")).all(),
|
||||
)
|
||||
if (sessions.length === 0) return
|
||||
|
||||
log.info("migrating sessions from global", { newProjectID, worktree, count: globalSessions.length })
|
||||
log.info("migrating sessions from global", { newProjectID: id, worktree, count: sessions.length })
|
||||
|
||||
await work(10, globalSessions, async (key) => {
|
||||
const sessionID = key[key.length - 1]
|
||||
const session = await Storage.read<Session.Info>(key).catch(() => undefined)
|
||||
if (!session) return
|
||||
if (session.directory && session.directory !== worktree) return
|
||||
await work(10, sessions, async (row) => {
|
||||
// Skip sessions that belong to a different directory
|
||||
if (row.directory && row.directory !== worktree) return
|
||||
|
||||
session.projectID = newProjectID
|
||||
log.info("migrating session", { sessionID, from: "global", to: newProjectID })
|
||||
await Storage.write(["session", newProjectID, sessionID], session)
|
||||
await Storage.remove(key)
|
||||
log.info("migrating session", { sessionID: row.id, from: "global", to: id })
|
||||
Database.use((db) => db.update(SessionTable).set({ project_id: id }).where(eq(SessionTable.id, row.id)).run())
|
||||
}).catch((error) => {
|
||||
log.error("failed to migrate sessions from global to project", { error, projectId: newProjectID })
|
||||
log.error("failed to migrate sessions from global to project", { error, projectId: id })
|
||||
})
|
||||
}
|
||||
|
||||
export async function setInitialized(projectID: string) {
|
||||
await Storage.update<Info>(["project", projectID], (draft) => {
|
||||
draft.time.initialized = Date.now()
|
||||
})
|
||||
export function setInitialized(id: string) {
|
||||
Database.use((db) =>
|
||||
db
|
||||
.update(ProjectTable)
|
||||
.set({
|
||||
time_initialized: Date.now(),
|
||||
})
|
||||
.where(eq(ProjectTable.id, id))
|
||||
.run(),
|
||||
)
|
||||
}
|
||||
|
||||
export async function list() {
|
||||
const keys = await Storage.list(["project"])
|
||||
const projects = await Promise.all(keys.map((x) => Storage.read<Info>(x)))
|
||||
return projects.map((project) => ({
|
||||
...project,
|
||||
sandboxes: project.sandboxes?.filter((x) => existsSync(x)),
|
||||
}))
|
||||
export function list() {
|
||||
return Database.use((db) =>
|
||||
db
|
||||
.select()
|
||||
.from(ProjectTable)
|
||||
.all()
|
||||
.map((row) => fromRow(row)),
|
||||
)
|
||||
}
|
||||
|
||||
export function get(id: string): Info | undefined {
|
||||
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get())
|
||||
if (!row) return undefined
|
||||
return fromRow(row)
|
||||
}
|
||||
|
||||
export const update = fn(
|
||||
@@ -295,77 +353,89 @@ export namespace Project {
|
||||
commands: Info.shape.commands.optional(),
|
||||
}),
|
||||
async (input) => {
|
||||
const result = await Storage.update<Info>(["project", input.projectID], (draft) => {
|
||||
if (input.name !== undefined) draft.name = input.name
|
||||
if (input.icon !== undefined) {
|
||||
draft.icon = {
|
||||
...draft.icon,
|
||||
}
|
||||
if (input.icon.url !== undefined) draft.icon.url = input.icon.url
|
||||
if (input.icon.override !== undefined) draft.icon.override = input.icon.override || undefined
|
||||
if (input.icon.color !== undefined) draft.icon.color = input.icon.color
|
||||
}
|
||||
|
||||
if (input.commands?.start !== undefined) {
|
||||
const start = input.commands.start || undefined
|
||||
draft.commands = {
|
||||
...(draft.commands ?? {}),
|
||||
}
|
||||
draft.commands.start = start
|
||||
if (!draft.commands.start) draft.commands = undefined
|
||||
}
|
||||
|
||||
draft.time.updated = Date.now()
|
||||
})
|
||||
const result = Database.use((db) =>
|
||||
db
|
||||
.update(ProjectTable)
|
||||
.set({
|
||||
name: input.name,
|
||||
icon_url: input.icon?.url,
|
||||
icon_color: input.icon?.color,
|
||||
time_updated: Date.now(),
|
||||
})
|
||||
.where(eq(ProjectTable.id, input.projectID))
|
||||
.returning()
|
||||
.get(),
|
||||
)
|
||||
if (!result) throw new Error(`Project not found: ${input.projectID}`)
|
||||
const data = fromRow(result)
|
||||
GlobalBus.emit("event", {
|
||||
payload: {
|
||||
type: Event.Updated.type,
|
||||
properties: result,
|
||||
properties: data,
|
||||
},
|
||||
})
|
||||
return result
|
||||
return data
|
||||
},
|
||||
)
|
||||
|
||||
export async function sandboxes(projectID: string) {
|
||||
const project = await Storage.read<Info>(["project", projectID]).catch(() => undefined)
|
||||
if (!project?.sandboxes) return []
|
||||
export async function sandboxes(id: string) {
|
||||
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get())
|
||||
if (!row) return []
|
||||
const data = fromRow(row)
|
||||
const valid: string[] = []
|
||||
for (const dir of project.sandboxes) {
|
||||
const stat = await fs.stat(dir).catch(() => undefined)
|
||||
for (const dir of data.sandboxes) {
|
||||
const stat = await Bun.file(dir)
|
||||
.stat()
|
||||
.catch(() => undefined)
|
||||
if (stat?.isDirectory()) valid.push(dir)
|
||||
}
|
||||
return valid
|
||||
}
|
||||
|
||||
export async function addSandbox(projectID: string, directory: string) {
|
||||
const result = await Storage.update<Info>(["project", projectID], (draft) => {
|
||||
const sandboxes = draft.sandboxes ?? []
|
||||
if (!sandboxes.includes(directory)) sandboxes.push(directory)
|
||||
draft.sandboxes = sandboxes
|
||||
draft.time.updated = Date.now()
|
||||
})
|
||||
export async function addSandbox(id: string, directory: string) {
|
||||
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get())
|
||||
if (!row) throw new Error(`Project not found: ${id}`)
|
||||
const sandboxes = [...row.sandboxes]
|
||||
if (!sandboxes.includes(directory)) sandboxes.push(directory)
|
||||
const result = Database.use((db) =>
|
||||
db
|
||||
.update(ProjectTable)
|
||||
.set({ sandboxes, time_updated: Date.now() })
|
||||
.where(eq(ProjectTable.id, id))
|
||||
.returning()
|
||||
.get(),
|
||||
)
|
||||
if (!result) throw new Error(`Project not found: ${id}`)
|
||||
const data = fromRow(result)
|
||||
GlobalBus.emit("event", {
|
||||
payload: {
|
||||
type: Event.Updated.type,
|
||||
properties: result,
|
||||
properties: data,
|
||||
},
|
||||
})
|
||||
return result
|
||||
return data
|
||||
}
|
||||
|
||||
export async function removeSandbox(projectID: string, directory: string) {
|
||||
const result = await Storage.update<Info>(["project", projectID], (draft) => {
|
||||
const sandboxes = draft.sandboxes ?? []
|
||||
draft.sandboxes = sandboxes.filter((sandbox) => sandbox !== directory)
|
||||
draft.time.updated = Date.now()
|
||||
})
|
||||
export async function removeSandbox(id: string, directory: string) {
|
||||
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get())
|
||||
if (!row) throw new Error(`Project not found: ${id}`)
|
||||
const sandboxes = row.sandboxes.filter((s) => s !== directory)
|
||||
const result = Database.use((db) =>
|
||||
db
|
||||
.update(ProjectTable)
|
||||
.set({ sandboxes, time_updated: Date.now() })
|
||||
.where(eq(ProjectTable.id, id))
|
||||
.returning()
|
||||
.get(),
|
||||
)
|
||||
if (!result) throw new Error(`Project not found: ${id}`)
|
||||
const data = fromRow(result)
|
||||
GlobalBus.emit("event", {
|
||||
payload: {
|
||||
type: Event.Updated.type,
|
||||
properties: result,
|
||||
properties: data,
|
||||
},
|
||||
})
|
||||
return result
|
||||
return data
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { resolver } from "hono-openapi"
|
||||
import z from "zod"
|
||||
import { Storage } from "../storage/storage"
|
||||
import { NotFoundError } from "../storage/db"
|
||||
|
||||
export const ERRORS = {
|
||||
400: {
|
||||
@@ -25,7 +25,7 @@ export const ERRORS = {
|
||||
description: "Not found",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Storage.NotFoundError.Schema),
|
||||
schema: resolver(NotFoundError.Schema),
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
@@ -3,7 +3,7 @@ import { describeRoute, validator, resolver } from "hono-openapi"
|
||||
import { upgradeWebSocket } from "hono/bun"
|
||||
import z from "zod"
|
||||
import { Pty } from "@/pty"
|
||||
import { Storage } from "../../storage/storage"
|
||||
import { NotFoundError } from "../../storage/db"
|
||||
import { errors } from "../error"
|
||||
import { lazy } from "../../util/lazy"
|
||||
|
||||
@@ -76,7 +76,7 @@ export const PtyRoutes = lazy(() =>
|
||||
async (c) => {
|
||||
const info = Pty.get(c.req.valid("param").ptyID)
|
||||
if (!info) {
|
||||
throw new Storage.NotFoundError({ message: "Session not found" })
|
||||
throw new NotFoundError({ message: "Session not found" })
|
||||
}
|
||||
return c.json(info)
|
||||
},
|
||||
|
||||
@@ -7,6 +7,7 @@ import { MessageV2 } from "../../session/message-v2"
|
||||
import { SessionPrompt } from "../../session/prompt"
|
||||
import { SessionCompaction } from "../../session/compaction"
|
||||
import { SessionRevert } from "../../session/revert"
|
||||
import { SessionHandoff } from "../../session/handoff"
|
||||
import { SessionStatus } from "@/session/status"
|
||||
import { SessionSummary } from "@/session/summary"
|
||||
import { Todo } from "../../session/todo"
|
||||
@@ -276,18 +277,15 @@ export const SessionRoutes = lazy(() =>
|
||||
const sessionID = c.req.valid("param").sessionID
|
||||
const updates = c.req.valid("json")
|
||||
|
||||
const updatedSession = await Session.update(
|
||||
sessionID,
|
||||
(session) => {
|
||||
if (updates.title !== undefined) {
|
||||
session.title = updates.title
|
||||
}
|
||||
if (updates.time?.archived !== undefined) session.time.archived = updates.time.archived
|
||||
},
|
||||
{ touch: false },
|
||||
)
|
||||
let session = await Session.get(sessionID)
|
||||
if (updates.title !== undefined) {
|
||||
session = await Session.setTitle({ sessionID, title: updates.title })
|
||||
}
|
||||
if (updates.time?.archived !== undefined) {
|
||||
session = await Session.setArchived({ sessionID, time: updates.time.archived })
|
||||
}
|
||||
|
||||
return c.json(updatedSession)
|
||||
return c.json(session)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
@@ -935,5 +933,41 @@ export const SessionRoutes = lazy(() =>
|
||||
})
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:sessionID/handoff",
|
||||
describeRoute({
|
||||
summary: "Handoff session",
|
||||
description: "Extract context and relevant files for another agent to continue the conversation.",
|
||||
operationId: "session.handoff",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Handoff data extracted",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.object({ text: z.string(), files: z.string().array() })),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: z.string().meta({ description: "Session ID" }),
|
||||
}),
|
||||
),
|
||||
validator("json", SessionHandoff.handoff.schema.omit({ sessionID: true })),
|
||||
async (c) => {
|
||||
const params = c.req.valid("param")
|
||||
const body = c.req.valid("json")
|
||||
const result = await SessionHandoff.handoff({
|
||||
sessionID: params.sessionID,
|
||||
model: body.model,
|
||||
goal: body.goal,
|
||||
})
|
||||
return c.json(result)
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
@@ -31,7 +31,7 @@ import { ExperimentalRoutes } from "./routes/experimental"
|
||||
import { ProviderRoutes } from "./routes/provider"
|
||||
import { lazy } from "../util/lazy"
|
||||
import { InstanceBootstrap } from "../project/bootstrap"
|
||||
import { Storage } from "../storage/storage"
|
||||
import { NotFoundError } from "../storage/db"
|
||||
import type { ContentfulStatusCode } from "hono/utils/http-status"
|
||||
import { websocket } from "hono/bun"
|
||||
import { HTTPException } from "hono/http-exception"
|
||||
@@ -65,7 +65,7 @@ export namespace Server {
|
||||
})
|
||||
if (err instanceof NamedError) {
|
||||
let status: ContentfulStatusCode
|
||||
if (err instanceof Storage.NotFoundError) status = 404
|
||||
if (err instanceof NotFoundError) status = 404
|
||||
else if (err instanceof Provider.ModelNotFoundError) status = 400
|
||||
else if (err.name.startsWith("Worktree")) status = 400
|
||||
else status = 500
|
||||
|
||||
105
packages/opencode/src/session/handoff.ts
Normal file
105
packages/opencode/src/session/handoff.ts
Normal file
@@ -0,0 +1,105 @@
|
||||
import { fn } from "@/util/fn"
|
||||
import z from "zod"
|
||||
import { MessageV2 } from "./message-v2"
|
||||
import { LLM } from "./llm"
|
||||
import { Agent } from "@/agent/agent"
|
||||
import { Provider } from "@/provider/provider"
|
||||
import { iife } from "@/util/iife"
|
||||
import { Identifier } from "@/id/id"
|
||||
import PROMPT_HANDOFF from "./prompt/handoff.txt"
|
||||
import { type Tool } from "ai"
|
||||
import { SessionStatus } from "./status"
|
||||
import { defer } from "@/util/defer"
|
||||
|
||||
export namespace SessionHandoff {
|
||||
const HandoffTool: Tool = {
|
||||
description:
|
||||
"A tool to extract relevant information from the thread and select relevant files for another agent to continue the conversation. Use this tool to identify the most important context and files needed.",
|
||||
inputSchema: z.object({
|
||||
text: z.string().describe(PROMPT_HANDOFF),
|
||||
files: z
|
||||
.string()
|
||||
.array()
|
||||
.describe(
|
||||
[
|
||||
"An array of file or directory paths (workspace-relative) that are relevant to accomplishing the goal.",
|
||||
"",
|
||||
'IMPORTANT: Return as a JSON array of strings, e.g., ["packages/core/src/session/message-v2.ts", "packages/core/src/session/prompt/handoff.txt"]',
|
||||
"",
|
||||
"Rules:",
|
||||
"- Maximum 10 files. Only include the most critical files needed for the task.",
|
||||
"- You can include directories if multiple files from that directory are needed",
|
||||
"- Prioritize by importance and relevance. PUT THE MOST IMPORTANT FILES FIRST.",
|
||||
'- Return workspace-relative paths (e.g., "packages/core/src/session/message-v2.ts")',
|
||||
"- Do not use absolute paths or invent files",
|
||||
].join("\n"),
|
||||
),
|
||||
}),
|
||||
async execute(_args, _ctx) {
|
||||
return {}
|
||||
},
|
||||
}
|
||||
|
||||
export const handoff = fn(
|
||||
z.object({
|
||||
sessionID: z.string(),
|
||||
model: z.object({ providerID: z.string(), modelID: z.string() }),
|
||||
goal: z.string().optional(),
|
||||
}),
|
||||
async (input) => {
|
||||
SessionStatus.set(input.sessionID, { type: "busy" })
|
||||
using _ = defer(() => SessionStatus.set(input.sessionID, { type: "idle" }))
|
||||
const messages = await MessageV2.filterCompacted(MessageV2.stream(input.sessionID))
|
||||
const agent = await Agent.get("handoff")
|
||||
const model = await iife(async () => {
|
||||
if (agent.model) return Provider.getModel(agent.model.providerID, agent.model.modelID)
|
||||
const small = await Provider.getSmallModel(input.model.providerID)
|
||||
if (small) return small
|
||||
return Provider.getModel(input.model.providerID, input.model.modelID)
|
||||
})
|
||||
const user = {
|
||||
info: {
|
||||
model: {
|
||||
providerID: model.providerID,
|
||||
modelID: model.id,
|
||||
},
|
||||
agent: agent.name,
|
||||
sessionID: input.sessionID,
|
||||
id: Identifier.ascending("user"),
|
||||
role: "user",
|
||||
time: {
|
||||
created: Date.now(),
|
||||
},
|
||||
} satisfies MessageV2.User,
|
||||
parts: [
|
||||
{
|
||||
type: "text",
|
||||
text: PROMPT_HANDOFF + "\n\nMy request:\n" + (input.goal ?? "general summarization"),
|
||||
id: Identifier.ascending("part"),
|
||||
sessionID: input.sessionID,
|
||||
messageID: Identifier.ascending("message"),
|
||||
},
|
||||
] satisfies MessageV2.TextPart[],
|
||||
} satisfies MessageV2.WithParts
|
||||
const abort = new AbortController()
|
||||
const stream = await LLM.stream({
|
||||
agent,
|
||||
messages: MessageV2.toModelMessages([...messages, user], model),
|
||||
sessionID: input.sessionID,
|
||||
abort: abort.signal,
|
||||
model,
|
||||
system: [],
|
||||
small: true,
|
||||
user: user.info,
|
||||
output: "tool",
|
||||
tools: {
|
||||
handoff: HandoffTool,
|
||||
},
|
||||
})
|
||||
|
||||
const [result] = await stream.toolCalls
|
||||
if (!result) throw new Error("Handoff tool did not return a result")
|
||||
return result.input
|
||||
},
|
||||
)
|
||||
}
|
||||
@@ -10,7 +10,9 @@ import { Flag } from "../flag/flag"
|
||||
import { Identifier } from "../id/id"
|
||||
import { Installation } from "../installation"
|
||||
|
||||
import { Storage } from "../storage/storage"
|
||||
import { Database, NotFoundError, eq, and, or, like } from "../storage/db"
|
||||
import { SessionTable, MessageTable, PartTable } from "./session.sql"
|
||||
import { Storage } from "@/storage/storage"
|
||||
import { Log } from "../util/log"
|
||||
import { MessageV2 } from "./message-v2"
|
||||
import { Instance } from "../project/instance"
|
||||
@@ -39,6 +41,64 @@ export namespace Session {
|
||||
).test(title)
|
||||
}
|
||||
|
||||
type SessionRow = typeof SessionTable.$inferSelect
|
||||
|
||||
export function fromRow(row: SessionRow): Info {
|
||||
const summary =
|
||||
row.summary_additions !== null || row.summary_deletions !== null || row.summary_files !== null
|
||||
? {
|
||||
additions: row.summary_additions ?? 0,
|
||||
deletions: row.summary_deletions ?? 0,
|
||||
files: row.summary_files ?? 0,
|
||||
diffs: row.summary_diffs ?? undefined,
|
||||
}
|
||||
: undefined
|
||||
const share = row.share_url ? { url: row.share_url } : undefined
|
||||
const revert = row.revert ?? undefined
|
||||
return {
|
||||
id: row.id,
|
||||
slug: row.slug,
|
||||
projectID: row.project_id,
|
||||
directory: row.directory,
|
||||
parentID: row.parent_id ?? undefined,
|
||||
title: row.title,
|
||||
version: row.version,
|
||||
summary,
|
||||
share,
|
||||
revert,
|
||||
permission: row.permission ?? undefined,
|
||||
time: {
|
||||
created: row.time_created,
|
||||
updated: row.time_updated,
|
||||
compacting: row.time_compacting ?? undefined,
|
||||
archived: row.time_archived ?? undefined,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export function toRow(info: Info) {
|
||||
return {
|
||||
id: info.id,
|
||||
project_id: info.projectID,
|
||||
parent_id: info.parentID,
|
||||
slug: info.slug,
|
||||
directory: info.directory,
|
||||
title: info.title,
|
||||
version: info.version,
|
||||
share_url: info.share?.url,
|
||||
summary_additions: info.summary?.additions,
|
||||
summary_deletions: info.summary?.deletions,
|
||||
summary_files: info.summary?.files,
|
||||
summary_diffs: info.summary?.diffs,
|
||||
revert: info.revert ?? null,
|
||||
permission: info.permission,
|
||||
time_created: info.time.created,
|
||||
time_updated: info.time.updated,
|
||||
time_compacting: info.time.compacting,
|
||||
time_archived: info.time.archived,
|
||||
}
|
||||
}
|
||||
|
||||
function getForkedTitle(title: string): string {
|
||||
const match = title.match(/^(.+) \(fork #(\d+)\)$/)
|
||||
if (match) {
|
||||
@@ -92,16 +152,6 @@ export namespace Session {
|
||||
})
|
||||
export type Info = z.output<typeof Info>
|
||||
|
||||
export const ShareInfo = z
|
||||
.object({
|
||||
secret: z.string(),
|
||||
url: z.string(),
|
||||
})
|
||||
.meta({
|
||||
ref: "SessionShare",
|
||||
})
|
||||
export type ShareInfo = z.output<typeof ShareInfo>
|
||||
|
||||
export const Event = {
|
||||
Created: BusEvent.define(
|
||||
"session.created",
|
||||
@@ -198,8 +248,17 @@ export namespace Session {
|
||||
)
|
||||
|
||||
export const touch = fn(Identifier.schema("session"), async (sessionID) => {
|
||||
await update(sessionID, (draft) => {
|
||||
draft.time.updated = Date.now()
|
||||
const now = Date.now()
|
||||
Database.use((db) => {
|
||||
const row = db
|
||||
.update(SessionTable)
|
||||
.set({ time_updated: now })
|
||||
.where(eq(SessionTable.id, sessionID))
|
||||
.returning()
|
||||
.get()
|
||||
if (!row) throw new NotFoundError({ message: `Session not found: ${sessionID}` })
|
||||
const info = fromRow(row)
|
||||
Database.effect(() => Bus.publish(Event.Updated, { info }))
|
||||
})
|
||||
})
|
||||
|
||||
@@ -225,21 +284,19 @@ export namespace Session {
|
||||
},
|
||||
}
|
||||
log.info("created", result)
|
||||
await Storage.write(["session", Instance.project.id, result.id], result)
|
||||
Bus.publish(Event.Created, {
|
||||
info: result,
|
||||
Database.use((db) => {
|
||||
db.insert(SessionTable).values(toRow(result)).run()
|
||||
Database.effect(() =>
|
||||
Bus.publish(Event.Created, {
|
||||
info: result,
|
||||
}),
|
||||
)
|
||||
})
|
||||
const cfg = await Config.get()
|
||||
if (!result.parentID && (Flag.OPENCODE_AUTO_SHARE || cfg.share === "auto"))
|
||||
share(result.id)
|
||||
.then((share) => {
|
||||
update(result.id, (draft) => {
|
||||
draft.share = share
|
||||
})
|
||||
})
|
||||
.catch(() => {
|
||||
// Silently ignore sharing errors during session creation
|
||||
})
|
||||
share(result.id).catch(() => {
|
||||
// Silently ignore sharing errors during session creation
|
||||
})
|
||||
Bus.publish(Event.Updated, {
|
||||
info: result,
|
||||
})
|
||||
@@ -254,12 +311,9 @@ export namespace Session {
|
||||
}
|
||||
|
||||
export const get = fn(Identifier.schema("session"), async (id) => {
|
||||
const read = await Storage.read<Info>(["session", Instance.project.id, id])
|
||||
return read as Info
|
||||
})
|
||||
|
||||
export const getShare = fn(Identifier.schema("session"), async (id) => {
|
||||
return Storage.read<ShareInfo>(["share", id])
|
||||
const row = Database.use((db) => db.select().from(SessionTable).where(eq(SessionTable.id, id)).get())
|
||||
if (!row) throw new NotFoundError({ message: `Session not found: ${id}` })
|
||||
return fromRow(row)
|
||||
})
|
||||
|
||||
export const share = fn(Identifier.schema("session"), async (id) => {
|
||||
@@ -269,15 +323,12 @@ export namespace Session {
|
||||
}
|
||||
const { ShareNext } = await import("@/share/share-next")
|
||||
const share = await ShareNext.create(id)
|
||||
await update(
|
||||
id,
|
||||
(draft) => {
|
||||
draft.share = {
|
||||
url: share.url,
|
||||
}
|
||||
},
|
||||
{ touch: false },
|
||||
)
|
||||
Database.use((db) => {
|
||||
const row = db.update(SessionTable).set({ share_url: share.url }).where(eq(SessionTable.id, id)).returning().get()
|
||||
if (!row) throw new NotFoundError({ message: `Session not found: ${id}` })
|
||||
const info = fromRow(row)
|
||||
Database.effect(() => Bus.publish(Event.Updated, { info }))
|
||||
})
|
||||
return share
|
||||
})
|
||||
|
||||
@@ -285,32 +336,155 @@ export namespace Session {
|
||||
// Use ShareNext to remove the share (same as share function uses ShareNext to create)
|
||||
const { ShareNext } = await import("@/share/share-next")
|
||||
await ShareNext.remove(id)
|
||||
await update(
|
||||
id,
|
||||
(draft) => {
|
||||
draft.share = undefined
|
||||
},
|
||||
{ touch: false },
|
||||
)
|
||||
Database.use((db) => {
|
||||
const row = db.update(SessionTable).set({ share_url: null }).where(eq(SessionTable.id, id)).returning().get()
|
||||
if (!row) throw new NotFoundError({ message: `Session not found: ${id}` })
|
||||
const info = fromRow(row)
|
||||
Database.effect(() => Bus.publish(Event.Updated, { info }))
|
||||
})
|
||||
})
|
||||
|
||||
export async function update(id: string, editor: (session: Info) => void, options?: { touch?: boolean }) {
|
||||
const project = Instance.project
|
||||
const result = await Storage.update<Info>(["session", project.id, id], (draft) => {
|
||||
editor(draft)
|
||||
if (options?.touch !== false) {
|
||||
draft.time.updated = Date.now()
|
||||
}
|
||||
export const setTitle = fn(
|
||||
z.object({
|
||||
sessionID: Identifier.schema("session"),
|
||||
title: z.string(),
|
||||
}),
|
||||
async (input) => {
|
||||
return Database.use((db) => {
|
||||
const row = db
|
||||
.update(SessionTable)
|
||||
.set({ title: input.title })
|
||||
.where(eq(SessionTable.id, input.sessionID))
|
||||
.returning()
|
||||
.get()
|
||||
if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` })
|
||||
const info = fromRow(row)
|
||||
Database.effect(() => Bus.publish(Event.Updated, { info }))
|
||||
return info
|
||||
})
|
||||
},
|
||||
)
|
||||
|
||||
export const setArchived = fn(
|
||||
z.object({
|
||||
sessionID: Identifier.schema("session"),
|
||||
time: z.number().optional(),
|
||||
}),
|
||||
async (input) => {
|
||||
return Database.use((db) => {
|
||||
const row = db
|
||||
.update(SessionTable)
|
||||
.set({ time_archived: input.time })
|
||||
.where(eq(SessionTable.id, input.sessionID))
|
||||
.returning()
|
||||
.get()
|
||||
if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` })
|
||||
const info = fromRow(row)
|
||||
Database.effect(() => Bus.publish(Event.Updated, { info }))
|
||||
return info
|
||||
})
|
||||
},
|
||||
)
|
||||
|
||||
export const setPermission = fn(
|
||||
z.object({
|
||||
sessionID: Identifier.schema("session"),
|
||||
permission: PermissionNext.Ruleset,
|
||||
}),
|
||||
async (input) => {
|
||||
return Database.use((db) => {
|
||||
const row = db
|
||||
.update(SessionTable)
|
||||
.set({ permission: input.permission, time_updated: Date.now() })
|
||||
.where(eq(SessionTable.id, input.sessionID))
|
||||
.returning()
|
||||
.get()
|
||||
if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` })
|
||||
const info = fromRow(row)
|
||||
Database.effect(() => Bus.publish(Event.Updated, { info }))
|
||||
return info
|
||||
})
|
||||
},
|
||||
)
|
||||
|
||||
export const setRevert = fn(
|
||||
z.object({
|
||||
sessionID: Identifier.schema("session"),
|
||||
revert: Info.shape.revert,
|
||||
summary: Info.shape.summary,
|
||||
}),
|
||||
async (input) => {
|
||||
return Database.use((db) => {
|
||||
const row = db
|
||||
.update(SessionTable)
|
||||
.set({
|
||||
revert: input.revert ?? null,
|
||||
summary_additions: input.summary?.additions,
|
||||
summary_deletions: input.summary?.deletions,
|
||||
summary_files: input.summary?.files,
|
||||
time_updated: Date.now(),
|
||||
})
|
||||
.where(eq(SessionTable.id, input.sessionID))
|
||||
.returning()
|
||||
.get()
|
||||
if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` })
|
||||
const info = fromRow(row)
|
||||
Database.effect(() => Bus.publish(Event.Updated, { info }))
|
||||
return info
|
||||
})
|
||||
},
|
||||
)
|
||||
|
||||
export const clearRevert = fn(Identifier.schema("session"), async (sessionID) => {
|
||||
return Database.use((db) => {
|
||||
const row = db
|
||||
.update(SessionTable)
|
||||
.set({
|
||||
revert: null,
|
||||
time_updated: Date.now(),
|
||||
})
|
||||
.where(eq(SessionTable.id, sessionID))
|
||||
.returning()
|
||||
.get()
|
||||
if (!row) throw new NotFoundError({ message: `Session not found: ${sessionID}` })
|
||||
const info = fromRow(row)
|
||||
Database.effect(() => Bus.publish(Event.Updated, { info }))
|
||||
return info
|
||||
})
|
||||
Bus.publish(Event.Updated, {
|
||||
info: result,
|
||||
})
|
||||
return result
|
||||
}
|
||||
})
|
||||
|
||||
export const setSummary = fn(
|
||||
z.object({
|
||||
sessionID: Identifier.schema("session"),
|
||||
summary: Info.shape.summary,
|
||||
}),
|
||||
async (input) => {
|
||||
return Database.use((db) => {
|
||||
const row = db
|
||||
.update(SessionTable)
|
||||
.set({
|
||||
summary_additions: input.summary?.additions,
|
||||
summary_deletions: input.summary?.deletions,
|
||||
summary_files: input.summary?.files,
|
||||
time_updated: Date.now(),
|
||||
})
|
||||
.where(eq(SessionTable.id, input.sessionID))
|
||||
.returning()
|
||||
.get()
|
||||
if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` })
|
||||
const info = fromRow(row)
|
||||
Database.effect(() => Bus.publish(Event.Updated, { info }))
|
||||
return info
|
||||
})
|
||||
},
|
||||
)
|
||||
|
||||
export const diff = fn(Identifier.schema("session"), async (sessionID) => {
|
||||
const diffs = await Storage.read<Snapshot.FileDiff[]>(["session_diff", sessionID])
|
||||
return diffs ?? []
|
||||
try {
|
||||
return await Storage.read<Snapshot.FileDiff[]>(["session_diff", sessionID])
|
||||
} catch {
|
||||
return []
|
||||
}
|
||||
})
|
||||
|
||||
export const messages = fn(
|
||||
@@ -329,25 +503,37 @@ export namespace Session {
|
||||
},
|
||||
)
|
||||
|
||||
export async function* list() {
|
||||
export function* list() {
|
||||
const project = Instance.project
|
||||
for (const item of await Storage.list(["session", project.id])) {
|
||||
const session = await Storage.read<Info>(item).catch(() => undefined)
|
||||
if (!session) continue
|
||||
yield session
|
||||
const rel = path.relative(Instance.worktree, Instance.directory)
|
||||
const suffix = path.sep + rel
|
||||
const rows = Database.use((db) =>
|
||||
db
|
||||
.select()
|
||||
.from(SessionTable)
|
||||
.where(
|
||||
and(
|
||||
eq(SessionTable.project_id, project.id),
|
||||
or(eq(SessionTable.directory, Instance.directory), like(SessionTable.directory, `%${suffix}`)),
|
||||
),
|
||||
)
|
||||
.all(),
|
||||
)
|
||||
for (const row of rows) {
|
||||
yield fromRow(row)
|
||||
}
|
||||
}
|
||||
|
||||
export const children = fn(Identifier.schema("session"), async (parentID) => {
|
||||
const project = Instance.project
|
||||
const result = [] as Session.Info[]
|
||||
for (const item of await Storage.list(["session", project.id])) {
|
||||
const session = await Storage.read<Info>(item).catch(() => undefined)
|
||||
if (!session) continue
|
||||
if (session.parentID !== parentID) continue
|
||||
result.push(session)
|
||||
}
|
||||
return result
|
||||
const rows = Database.use((db) =>
|
||||
db
|
||||
.select()
|
||||
.from(SessionTable)
|
||||
.where(and(eq(SessionTable.project_id, project.id), eq(SessionTable.parent_id, parentID)))
|
||||
.all(),
|
||||
)
|
||||
return rows.map(fromRow)
|
||||
})
|
||||
|
||||
export const remove = fn(Identifier.schema("session"), async (sessionID) => {
|
||||
@@ -358,15 +544,14 @@ export namespace Session {
|
||||
await remove(child.id)
|
||||
}
|
||||
await unshare(sessionID).catch(() => {})
|
||||
for (const msg of await Storage.list(["message", sessionID])) {
|
||||
for (const part of await Storage.list(["part", msg.at(-1)!])) {
|
||||
await Storage.remove(part)
|
||||
}
|
||||
await Storage.remove(msg)
|
||||
}
|
||||
await Storage.remove(["session", project.id, sessionID])
|
||||
Bus.publish(Event.Deleted, {
|
||||
info: session,
|
||||
// CASCADE delete handles messages and parts automatically
|
||||
Database.use((db) => {
|
||||
db.delete(SessionTable).where(eq(SessionTable.id, sessionID)).run()
|
||||
Database.effect(() =>
|
||||
Bus.publish(Event.Deleted, {
|
||||
info: session,
|
||||
}),
|
||||
)
|
||||
})
|
||||
} catch (e) {
|
||||
log.error(e)
|
||||
@@ -374,9 +559,23 @@ export namespace Session {
|
||||
})
|
||||
|
||||
export const updateMessage = fn(MessageV2.Info, async (msg) => {
|
||||
await Storage.write(["message", msg.sessionID, msg.id], msg)
|
||||
Bus.publish(MessageV2.Event.Updated, {
|
||||
info: msg,
|
||||
const time_created = msg.role === "user" ? msg.time.created : msg.time.created
|
||||
const { id, sessionID, ...data } = msg
|
||||
Database.use((db) => {
|
||||
db.insert(MessageTable)
|
||||
.values({
|
||||
id,
|
||||
session_id: sessionID,
|
||||
time_created,
|
||||
data,
|
||||
})
|
||||
.onConflictDoUpdate({ target: MessageTable.id, set: { data } })
|
||||
.run()
|
||||
Database.effect(() =>
|
||||
Bus.publish(MessageV2.Event.Updated, {
|
||||
info: msg,
|
||||
}),
|
||||
)
|
||||
})
|
||||
return msg
|
||||
})
|
||||
@@ -387,10 +586,15 @@ export namespace Session {
|
||||
messageID: Identifier.schema("message"),
|
||||
}),
|
||||
async (input) => {
|
||||
await Storage.remove(["message", input.sessionID, input.messageID])
|
||||
Bus.publish(MessageV2.Event.Removed, {
|
||||
sessionID: input.sessionID,
|
||||
messageID: input.messageID,
|
||||
// CASCADE delete handles parts automatically
|
||||
Database.use((db) => {
|
||||
db.delete(MessageTable).where(eq(MessageTable.id, input.messageID)).run()
|
||||
Database.effect(() =>
|
||||
Bus.publish(MessageV2.Event.Removed, {
|
||||
sessionID: input.sessionID,
|
||||
messageID: input.messageID,
|
||||
}),
|
||||
)
|
||||
})
|
||||
return input.messageID
|
||||
},
|
||||
@@ -403,39 +607,58 @@ export namespace Session {
|
||||
partID: Identifier.schema("part"),
|
||||
}),
|
||||
async (input) => {
|
||||
await Storage.remove(["part", input.messageID, input.partID])
|
||||
Bus.publish(MessageV2.Event.PartRemoved, {
|
||||
sessionID: input.sessionID,
|
||||
messageID: input.messageID,
|
||||
partID: input.partID,
|
||||
Database.use((db) => {
|
||||
db.delete(PartTable).where(eq(PartTable.id, input.partID)).run()
|
||||
Database.effect(() =>
|
||||
Bus.publish(MessageV2.Event.PartRemoved, {
|
||||
sessionID: input.sessionID,
|
||||
messageID: input.messageID,
|
||||
partID: input.partID,
|
||||
}),
|
||||
)
|
||||
})
|
||||
return input.partID
|
||||
},
|
||||
)
|
||||
|
||||
const UpdatePartInput = z.union([
|
||||
MessageV2.Part,
|
||||
z.object({
|
||||
part: MessageV2.TextPart,
|
||||
delta: z.string(),
|
||||
}),
|
||||
z.object({
|
||||
part: MessageV2.ReasoningPart,
|
||||
delta: z.string(),
|
||||
}),
|
||||
])
|
||||
const UpdatePartInput = MessageV2.Part
|
||||
|
||||
export const updatePart = fn(UpdatePartInput, async (input) => {
|
||||
const part = "delta" in input ? input.part : input
|
||||
const delta = "delta" in input ? input.delta : undefined
|
||||
await Storage.write(["part", part.messageID, part.id], part)
|
||||
Bus.publish(MessageV2.Event.PartUpdated, {
|
||||
part,
|
||||
delta,
|
||||
export const updatePart = fn(UpdatePartInput, async (part) => {
|
||||
const { id, messageID, sessionID, ...data } = part
|
||||
const time = Date.now()
|
||||
Database.use((db) => {
|
||||
db.insert(PartTable)
|
||||
.values({
|
||||
id,
|
||||
message_id: messageID,
|
||||
session_id: sessionID,
|
||||
time_created: time,
|
||||
data,
|
||||
})
|
||||
.onConflictDoUpdate({ target: PartTable.id, set: { data } })
|
||||
.run()
|
||||
Database.effect(() =>
|
||||
Bus.publish(MessageV2.Event.PartUpdated, {
|
||||
part,
|
||||
}),
|
||||
)
|
||||
})
|
||||
return part
|
||||
})
|
||||
|
||||
export const updatePartDelta = fn(
|
||||
z.object({
|
||||
sessionID: z.string(),
|
||||
messageID: z.string(),
|
||||
partID: z.string(),
|
||||
field: z.string(),
|
||||
delta: z.string(),
|
||||
}),
|
||||
async (input) => {
|
||||
Bus.publish(MessageV2.Event.PartDelta, input)
|
||||
},
|
||||
)
|
||||
|
||||
export const getUsage = fn(
|
||||
z.object({
|
||||
model: z.custom<Provider.Model>(),
|
||||
|
||||
@@ -39,6 +39,7 @@ export namespace LLM {
|
||||
small?: boolean
|
||||
tools: Record<string, Tool>
|
||||
retries?: number
|
||||
output?: "tool"
|
||||
}
|
||||
|
||||
export type StreamOutput = StreamTextResult<ToolSet, unknown>
|
||||
@@ -215,6 +216,7 @@ export namespace LLM {
|
||||
tools,
|
||||
maxOutputTokens,
|
||||
abortSignal: input.abort,
|
||||
toolChoice: input.output === "tool" ? "required" : undefined,
|
||||
headers: {
|
||||
...(input.model.providerID.startsWith("opencode")
|
||||
? {
|
||||
|
||||
@@ -6,6 +6,10 @@ import { Identifier } from "../id/id"
|
||||
import { LSP } from "../lsp"
|
||||
import { Snapshot } from "@/snapshot"
|
||||
import { fn } from "@/util/fn"
|
||||
import { Database, eq, desc, inArray } from "@/storage/db"
|
||||
import { MessageTable, PartTable } from "./session.sql"
|
||||
import { ProviderTransform } from "@/provider/transform"
|
||||
import { STATUS_CODES } from "http"
|
||||
import { Storage } from "@/storage/storage"
|
||||
import { ProviderError } from "@/provider/error"
|
||||
import { iife } from "@/util/iife"
|
||||
@@ -421,7 +425,16 @@ export namespace MessageV2 {
|
||||
"message.part.updated",
|
||||
z.object({
|
||||
part: Part,
|
||||
delta: z.string().optional(),
|
||||
}),
|
||||
),
|
||||
PartDelta: BusEvent.define(
|
||||
"message.part.delta",
|
||||
z.object({
|
||||
sessionID: z.string(),
|
||||
messageID: z.string(),
|
||||
partID: z.string(),
|
||||
field: z.string(),
|
||||
delta: z.string(),
|
||||
}),
|
||||
),
|
||||
PartRemoved: BusEvent.define(
|
||||
@@ -666,23 +679,65 @@ export namespace MessageV2 {
|
||||
}
|
||||
|
||||
export const stream = fn(Identifier.schema("session"), async function* (sessionID) {
|
||||
const list = await Array.fromAsync(await Storage.list(["message", sessionID]))
|
||||
for (let i = list.length - 1; i >= 0; i--) {
|
||||
yield await get({
|
||||
sessionID,
|
||||
messageID: list[i][2],
|
||||
})
|
||||
const size = 50
|
||||
let offset = 0
|
||||
while (true) {
|
||||
const rows = Database.use((db) =>
|
||||
db
|
||||
.select()
|
||||
.from(MessageTable)
|
||||
.where(eq(MessageTable.session_id, sessionID))
|
||||
.orderBy(desc(MessageTable.time_created))
|
||||
.limit(size)
|
||||
.offset(offset)
|
||||
.all(),
|
||||
)
|
||||
if (rows.length === 0) break
|
||||
|
||||
const ids = rows.map((row) => row.id)
|
||||
const partsByMessage = new Map<string, MessageV2.Part[]>()
|
||||
if (ids.length > 0) {
|
||||
const partRows = Database.use((db) =>
|
||||
db
|
||||
.select()
|
||||
.from(PartTable)
|
||||
.where(inArray(PartTable.message_id, ids))
|
||||
.orderBy(PartTable.message_id, PartTable.id)
|
||||
.all(),
|
||||
)
|
||||
for (const row of partRows) {
|
||||
const part = {
|
||||
...row.data,
|
||||
id: row.id,
|
||||
sessionID: row.session_id,
|
||||
messageID: row.message_id,
|
||||
} as MessageV2.Part
|
||||
const list = partsByMessage.get(row.message_id)
|
||||
if (list) list.push(part)
|
||||
else partsByMessage.set(row.message_id, [part])
|
||||
}
|
||||
}
|
||||
|
||||
for (const row of rows) {
|
||||
const info = { ...row.data, id: row.id, sessionID: row.session_id } as MessageV2.Info
|
||||
yield {
|
||||
info,
|
||||
parts: partsByMessage.get(row.id) ?? [],
|
||||
}
|
||||
}
|
||||
|
||||
offset += rows.length
|
||||
if (rows.length < size) break
|
||||
}
|
||||
})
|
||||
|
||||
export const parts = fn(Identifier.schema("message"), async (messageID) => {
|
||||
const result = [] as MessageV2.Part[]
|
||||
for (const item of await Storage.list(["part", messageID])) {
|
||||
const read = await Storage.read<MessageV2.Part>(item)
|
||||
result.push(read)
|
||||
}
|
||||
result.sort((a, b) => (a.id > b.id ? 1 : -1))
|
||||
return result
|
||||
export const parts = fn(Identifier.schema("message"), async (message_id) => {
|
||||
const rows = Database.use((db) =>
|
||||
db.select().from(PartTable).where(eq(PartTable.message_id, message_id)).orderBy(PartTable.id).all(),
|
||||
)
|
||||
return rows.map(
|
||||
(row) => ({ ...row.data, id: row.id, sessionID: row.session_id, messageID: row.message_id }) as MessageV2.Part,
|
||||
)
|
||||
})
|
||||
|
||||
export const get = fn(
|
||||
@@ -691,8 +746,11 @@ export namespace MessageV2 {
|
||||
messageID: Identifier.schema("message"),
|
||||
}),
|
||||
async (input): Promise<WithParts> => {
|
||||
const row = Database.use((db) => db.select().from(MessageTable).where(eq(MessageTable.id, input.messageID)).get())
|
||||
if (!row) throw new Error(`Message not found: ${input.messageID}`)
|
||||
const info = { ...row.data, id: row.id, sessionID: row.session_id } as MessageV2.Info
|
||||
return {
|
||||
info: await Storage.read<MessageV2.Info>(["message", input.sessionID, input.messageID]),
|
||||
info,
|
||||
parts: await parts(input.messageID),
|
||||
}
|
||||
},
|
||||
|
||||
@@ -63,17 +63,19 @@ export namespace SessionProcessor {
|
||||
if (value.id in reasoningMap) {
|
||||
continue
|
||||
}
|
||||
reasoningMap[value.id] = {
|
||||
const reasoningPart = {
|
||||
id: Identifier.ascending("part"),
|
||||
messageID: input.assistantMessage.id,
|
||||
sessionID: input.assistantMessage.sessionID,
|
||||
type: "reasoning",
|
||||
type: "reasoning" as const,
|
||||
text: "",
|
||||
time: {
|
||||
start: Date.now(),
|
||||
},
|
||||
metadata: value.providerMetadata,
|
||||
}
|
||||
reasoningMap[value.id] = reasoningPart
|
||||
await Session.updatePart(reasoningPart)
|
||||
break
|
||||
|
||||
case "reasoning-delta":
|
||||
@@ -81,7 +83,13 @@ export namespace SessionProcessor {
|
||||
const part = reasoningMap[value.id]
|
||||
part.text += value.text
|
||||
if (value.providerMetadata) part.metadata = value.providerMetadata
|
||||
if (part.text) await Session.updatePart({ part, delta: value.text })
|
||||
await Session.updatePartDelta({
|
||||
sessionID: part.sessionID,
|
||||
messageID: part.messageID,
|
||||
partID: part.id,
|
||||
field: "text",
|
||||
delta: value.text,
|
||||
})
|
||||
}
|
||||
break
|
||||
|
||||
@@ -288,17 +296,20 @@ export namespace SessionProcessor {
|
||||
},
|
||||
metadata: value.providerMetadata,
|
||||
}
|
||||
await Session.updatePart(currentText)
|
||||
break
|
||||
|
||||
case "text-delta":
|
||||
if (currentText) {
|
||||
currentText.text += value.text
|
||||
if (value.providerMetadata) currentText.metadata = value.providerMetadata
|
||||
if (currentText.text)
|
||||
await Session.updatePart({
|
||||
part: currentText,
|
||||
delta: value.text,
|
||||
})
|
||||
await Session.updatePartDelta({
|
||||
sessionID: currentText.sessionID,
|
||||
messageID: currentText.messageID,
|
||||
partID: currentText.id,
|
||||
field: "text",
|
||||
delta: value.text,
|
||||
})
|
||||
}
|
||||
break
|
||||
|
||||
|
||||
@@ -165,9 +165,7 @@ export namespace SessionPrompt {
|
||||
}
|
||||
if (permissions.length > 0) {
|
||||
session.permission = permissions
|
||||
await Session.update(session.id, (draft) => {
|
||||
draft.permission = permissions
|
||||
})
|
||||
await Session.setPermission({ sessionID: session.id, permission: permissions })
|
||||
}
|
||||
|
||||
if (input.noReply === true) {
|
||||
@@ -1236,33 +1234,7 @@ export namespace SessionPrompt {
|
||||
const userMessage = input.messages.findLast((msg) => msg.info.role === "user")
|
||||
if (!userMessage) return input.messages
|
||||
|
||||
// Original logic when experimental plan mode is disabled
|
||||
if (!Flag.OPENCODE_EXPERIMENTAL_PLAN_MODE) {
|
||||
if (input.agent.name === "plan") {
|
||||
userMessage.parts.push({
|
||||
id: Identifier.ascending("part"),
|
||||
messageID: userMessage.info.id,
|
||||
sessionID: userMessage.info.sessionID,
|
||||
type: "text",
|
||||
text: PROMPT_PLAN,
|
||||
synthetic: true,
|
||||
})
|
||||
}
|
||||
const wasPlan = input.messages.some((msg) => msg.info.role === "assistant" && msg.info.agent === "plan")
|
||||
if (wasPlan && input.agent.name === "build") {
|
||||
userMessage.parts.push({
|
||||
id: Identifier.ascending("part"),
|
||||
messageID: userMessage.info.id,
|
||||
sessionID: userMessage.info.sessionID,
|
||||
type: "text",
|
||||
text: BUILD_SWITCH,
|
||||
synthetic: true,
|
||||
})
|
||||
}
|
||||
return input.messages
|
||||
}
|
||||
|
||||
// New plan mode logic when flag is enabled
|
||||
// Plan mode logic
|
||||
const assistantMessage = input.messages.findLast((msg) => msg.info.role === "assistant")
|
||||
|
||||
// Switching from plan mode to build mode
|
||||
@@ -1854,21 +1826,16 @@ NOTE: At any point in time through this workflow you should feel free to ask the
|
||||
],
|
||||
})
|
||||
const text = await result.text.catch((err) => log.error("failed to generate title", { error: err }))
|
||||
if (text)
|
||||
return Session.update(
|
||||
input.session.id,
|
||||
(draft) => {
|
||||
const cleaned = text
|
||||
.replace(/<think>[\s\S]*?<\/think>\s*/g, "")
|
||||
.split("\n")
|
||||
.map((line) => line.trim())
|
||||
.find((line) => line.length > 0)
|
||||
if (!cleaned) return
|
||||
if (text) {
|
||||
const cleaned = text
|
||||
.replace(/<think>[\s\S]*?<\/think>\s*/g, "")
|
||||
.split("\n")
|
||||
.map((line) => line.trim())
|
||||
.find((line) => line.length > 0)
|
||||
if (!cleaned) return
|
||||
|
||||
const title = cleaned.length > 100 ? cleaned.substring(0, 97) + "..." : cleaned
|
||||
draft.title = title
|
||||
},
|
||||
{ touch: false },
|
||||
)
|
||||
const title = cleaned.length > 100 ? cleaned.substring(0, 97) + "..." : cleaned
|
||||
return Session.setTitle({ sessionID: input.session.id, title })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -56,7 +56,7 @@ user: which file contains the implementation of foo?
|
||||
assistant: src/foo.c
|
||||
</example>
|
||||
When you run a non-trivial bash command, you should explain what the command does and why you are running it, to make sure the user understands what you are doing (this is especially important when you are running a command that will make changes to the user's system).
|
||||
Remember that your output will be displayed on a command line interface. Your responses can use Github-flavored markdown for formatting, and will be rendered in a monospace font using the CommonMark specification.
|
||||
Remember that your output will be displayed on a command line interface. Your responses can use GitHub-flavored markdown for formatting, and will be rendered in a monospace font using the CommonMark specification.
|
||||
Output text to communicate with the user; all text you output outside of tool use is displayed to the user. Only use tools to complete tasks. Never use tools like Bash or code comments as means to communicate with the user during the session.
|
||||
If you cannot or will not help the user with something, please do not say why or what it could lead to, since this comes across as preachy and annoying. Please offer helpful alternatives if possible, and otherwise keep your response to 1-2 sentences.
|
||||
Only use emojis if the user explicitly requests it. Avoid using emojis in all communication unless asked.
|
||||
|
||||
@@ -13,7 +13,7 @@ When the user directly asks about OpenCode (eg. "can OpenCode do...", "does Open
|
||||
|
||||
# Tone and style
|
||||
- Only use emojis if the user explicitly requests it. Avoid using emojis in all communication unless asked.
|
||||
- Your output will be displayed on a command line interface. Your responses should be short and concise. You can use Github-flavored markdown for formatting, and will be rendered in a monospace font using the CommonMark specification.
|
||||
- Your output will be displayed on a command line interface. Your responses should be short and concise. You can use GitHub-flavored markdown for formatting, and will be rendered in a monospace font using the CommonMark specification.
|
||||
- Output text to communicate with the user; all text you output outside of tool use is displayed to the user. Only use tools to complete tasks. Never use tools like Bash or code comments as means to communicate with the user during the session.
|
||||
- NEVER create files unless they're absolutely necessary for achieving your goal. ALWAYS prefer editing an existing file to creating a new one. This includes markdown files.
|
||||
|
||||
|
||||
@@ -76,7 +76,7 @@ Carefully read the issue and think hard about a plan to solve it before coding.
|
||||
- Create a todo list in markdown format to track your progress.
|
||||
- Each time you complete a step, check it off using `[x]` syntax.
|
||||
- Each time you check off a step, display the updated todo list to the user.
|
||||
- Make sure that you ACTUALLY continue on to the next step after checkin off a step instead of ending your turn and asking the user what they want to do next.
|
||||
- Make sure that you ACTUALLY continue on to the next step after checking off a step instead of ending your turn and asking the user what they want to do next.
|
||||
|
||||
## 6. Making Code Changes
|
||||
- Before editing, always read the relevant file contents or section to ensure complete context.
|
||||
|
||||
@@ -60,7 +60,7 @@ You are producing plain text that will later be styled by the CLI. Follow these
|
||||
|
||||
## Final answer structure and style guidelines
|
||||
|
||||
- Plain text; CLI handles styling. Use structure only when it helps scanability.
|
||||
- Plain text; CLI handles styling. Use structure only when it helps scannability.
|
||||
- Headers: optional; short Title Case (1-3 words) wrapped in **…**; no blank line before the first bullet; add only if they truly help.
|
||||
- Bullets: use - ; merge related points; keep to one line when possible; 4–6 per list ordered by importance; keep phrasing consistent.
|
||||
- Monospace: backticks for commands/paths/env vars/code ids and inline examples; use for literal keyword bullets; never combine with **.
|
||||
|
||||
@@ -39,7 +39,7 @@ incremental steps - use the todo tool to track your progress.
|
||||
- What are the edge cases?
|
||||
- What are the potential pitfalls?
|
||||
- How does this fit into the larger context of the codebase?
|
||||
- What are the dependencies and interactions with other parts of the codee
|
||||
- What are the dependencies and interactions with other parts of the code
|
||||
|
||||
## 2. Codebase Investigation
|
||||
- Explore relevant files and directories.
|
||||
|
||||
17
packages/opencode/src/session/prompt/handoff.txt
Normal file
17
packages/opencode/src/session/prompt/handoff.txt
Normal file
@@ -0,0 +1,17 @@
|
||||
Extract relevant context from the conversation above for continuing this work. Write from my perspective (first person: "I did...", "I told you...").
|
||||
|
||||
Consider what would be useful to know based on my request below. Questions that might be relevant:
|
||||
|
||||
- What did I just do or implement?
|
||||
- What instructions did I already give you which are still relevant (e.g. follow patterns in the codebase)?
|
||||
- What files did I already tell you that's important or that I am working on (and should continue working on)?
|
||||
- Did I provide a plan or spec that should be included?
|
||||
- What did I already tell you that's important (certain libraries, patterns, constraints, preferences)?
|
||||
- What important technical details did I discover (APIs, methods, patterns)?
|
||||
- What caveats, limitations, or open questions did I find?
|
||||
|
||||
Extract what matters for the specific request below. Don't answer questions that aren't relevant. Pick an appropriate length based on the complexity of the request.
|
||||
|
||||
Focus on capabilities and behavior, not file-by-file changes. Avoid excessive implementation details (variable names, storage keys, constants) unless critical.
|
||||
|
||||
Format: Plain text with bullets. No markdown headers, no bold/italic, no code fences. Use workspace-relative paths for files.
|
||||
@@ -12,7 +12,7 @@ When the user directly asks about opencode (eg 'can opencode do...', 'does openc
|
||||
|
||||
# Tone and style
|
||||
You should be concise, direct, and to the point. When you run a non-trivial bash command, you should explain what the command does and why you are running it, to make sure the user understands what you are doing (this is especially important when you are running a command that will make changes to the user's system).
|
||||
Remember that your output will be displayed on a command line interface. Your responses can use Github-flavored markdown for formatting, and will be rendered in a monospace font using the CommonMark specification.
|
||||
Remember that your output will be displayed on a command line interface. Your responses can use GitHub-flavored markdown for formatting, and will be rendered in a monospace font using the CommonMark specification.
|
||||
Output text to communicate with the user; all text you output outside of tool use is displayed to the user. Only use tools to complete tasks. Never use tools like Bash or code comments as means to communicate with the user during the session.
|
||||
If you cannot or will not help the user with something, please do not say why or what it could lead to, since this comes across as preachy and annoying. Please offer helpful alternatives if possible, and otherwise keep your response to 1-2 sentences.
|
||||
Only use emojis if the user explicitly requests it. Avoid using emojis in all communication unless asked.
|
||||
|
||||
@@ -2,7 +2,7 @@ You are opencode, an interactive CLI tool that helps users with software enginee
|
||||
|
||||
# Tone and style
|
||||
You should be concise, direct, and to the point. When you run a non-trivial bash command, you should explain what the command does and why you are running it, to make sure the user understands what you are doing (this is especially important when you are running a command that will make changes to the user's system).
|
||||
Remember that your output will be displayed on a command line interface. Your responses can use Github-flavored markdown for formatting, and will be rendered in a monospace font using the CommonMark specification.
|
||||
Remember that your output will be displayed on a command line interface. Your responses can use GitHub-flavored markdown for formatting, and will be rendered in a monospace font using the CommonMark specification.
|
||||
Output text to communicate with the user; all text you output outside of tool use is displayed to the user. Only use tools to complete tasks. Never use tools like Bash or code comments as means to communicate with the user during the session.
|
||||
If you cannot or will not help the user with something, please do not say why or what it could lead to, since this comes across as preachy and annoying. Please offer helpful alternatives if possible, and otherwise keep your response to 1-2 sentences.
|
||||
Only use emojis if the user explicitly requests it. Avoid using emojis in all communication unless asked.
|
||||
|
||||
@@ -4,8 +4,9 @@ import { Snapshot } from "../snapshot"
|
||||
import { MessageV2 } from "./message-v2"
|
||||
import { Session } from "."
|
||||
import { Log } from "../util/log"
|
||||
import { splitWhen } from "remeda"
|
||||
import { Storage } from "../storage/storage"
|
||||
import { Database, eq } from "../storage/db"
|
||||
import { MessageTable, PartTable } from "./session.sql"
|
||||
import { Storage } from "@/storage/storage"
|
||||
import { Bus } from "../bus"
|
||||
import { SessionPrompt } from "./prompt"
|
||||
import { SessionSummary } from "./summary"
|
||||
@@ -65,13 +66,14 @@ export namespace SessionRevert {
|
||||
sessionID: input.sessionID,
|
||||
diff: diffs,
|
||||
})
|
||||
return Session.update(input.sessionID, (draft) => {
|
||||
draft.revert = revert
|
||||
draft.summary = {
|
||||
return Session.setRevert({
|
||||
sessionID: input.sessionID,
|
||||
revert,
|
||||
summary: {
|
||||
additions: diffs.reduce((sum, x) => sum + x.additions, 0),
|
||||
deletions: diffs.reduce((sum, x) => sum + x.deletions, 0),
|
||||
files: diffs.length,
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
return session
|
||||
@@ -83,39 +85,54 @@ export namespace SessionRevert {
|
||||
const session = await Session.get(input.sessionID)
|
||||
if (!session.revert) return session
|
||||
if (session.revert.snapshot) await Snapshot.restore(session.revert.snapshot)
|
||||
const next = await Session.update(input.sessionID, (draft) => {
|
||||
draft.revert = undefined
|
||||
})
|
||||
return next
|
||||
return Session.clearRevert(input.sessionID)
|
||||
}
|
||||
|
||||
export async function cleanup(session: Session.Info) {
|
||||
if (!session.revert) return
|
||||
const sessionID = session.id
|
||||
let msgs = await Session.messages({ sessionID })
|
||||
const msgs = await Session.messages({ sessionID })
|
||||
const messageID = session.revert.messageID
|
||||
const [preserve, remove] = splitWhen(msgs, (x) => x.info.id === messageID)
|
||||
msgs = preserve
|
||||
const preserve = [] as MessageV2.WithParts[]
|
||||
const remove = [] as MessageV2.WithParts[]
|
||||
let target: MessageV2.WithParts | undefined
|
||||
for (const msg of msgs) {
|
||||
if (msg.info.id < messageID) {
|
||||
preserve.push(msg)
|
||||
continue
|
||||
}
|
||||
if (msg.info.id > messageID) {
|
||||
remove.push(msg)
|
||||
continue
|
||||
}
|
||||
if (session.revert.partID) {
|
||||
preserve.push(msg)
|
||||
target = msg
|
||||
continue
|
||||
}
|
||||
remove.push(msg)
|
||||
}
|
||||
for (const msg of remove) {
|
||||
await Storage.remove(["message", sessionID, msg.info.id])
|
||||
Database.use((db) => db.delete(MessageTable).where(eq(MessageTable.id, msg.info.id)).run())
|
||||
await Bus.publish(MessageV2.Event.Removed, { sessionID: sessionID, messageID: msg.info.id })
|
||||
}
|
||||
const last = preserve.at(-1)
|
||||
if (session.revert.partID && last) {
|
||||
if (session.revert.partID && target) {
|
||||
const partID = session.revert.partID
|
||||
const [preserveParts, removeParts] = splitWhen(last.parts, (x) => x.id === partID)
|
||||
last.parts = preserveParts
|
||||
for (const part of removeParts) {
|
||||
await Storage.remove(["part", last.info.id, part.id])
|
||||
await Bus.publish(MessageV2.Event.PartRemoved, {
|
||||
sessionID: sessionID,
|
||||
messageID: last.info.id,
|
||||
partID: part.id,
|
||||
})
|
||||
const removeStart = target.parts.findIndex((part) => part.id === partID)
|
||||
if (removeStart >= 0) {
|
||||
const preserveParts = target.parts.slice(0, removeStart)
|
||||
const removeParts = target.parts.slice(removeStart)
|
||||
target.parts = preserveParts
|
||||
for (const part of removeParts) {
|
||||
Database.use((db) => db.delete(PartTable).where(eq(PartTable.id, part.id)).run())
|
||||
await Bus.publish(MessageV2.Event.PartRemoved, {
|
||||
sessionID: sessionID,
|
||||
messageID: target.info.id,
|
||||
partID: part.id,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
await Session.update(sessionID, (draft) => {
|
||||
draft.revert = undefined
|
||||
})
|
||||
await Session.clearRevert(sessionID)
|
||||
}
|
||||
}
|
||||
|
||||
88
packages/opencode/src/session/session.sql.ts
Normal file
88
packages/opencode/src/session/session.sql.ts
Normal file
@@ -0,0 +1,88 @@
|
||||
import { sqliteTable, text, integer, index, primaryKey } from "drizzle-orm/sqlite-core"
|
||||
import { ProjectTable } from "../project/project.sql"
|
||||
import type { MessageV2 } from "./message-v2"
|
||||
import type { Snapshot } from "@/snapshot"
|
||||
import type { PermissionNext } from "@/permission/next"
|
||||
import { Timestamps } from "@/storage/schema.sql"
|
||||
|
||||
type PartData = Omit<MessageV2.Part, "id" | "sessionID" | "messageID">
|
||||
type InfoData = Omit<MessageV2.Info, "id" | "sessionID">
|
||||
|
||||
export const SessionTable = sqliteTable(
|
||||
"session",
|
||||
{
|
||||
id: text().primaryKey(),
|
||||
project_id: text()
|
||||
.notNull()
|
||||
.references(() => ProjectTable.id, { onDelete: "cascade" }),
|
||||
parent_id: text(),
|
||||
slug: text().notNull(),
|
||||
directory: text().notNull(),
|
||||
title: text().notNull(),
|
||||
version: text().notNull(),
|
||||
share_url: text(),
|
||||
summary_additions: integer(),
|
||||
summary_deletions: integer(),
|
||||
summary_files: integer(),
|
||||
summary_diffs: text({ mode: "json" }).$type<Snapshot.FileDiff[]>(),
|
||||
revert: text({ mode: "json" }).$type<{ messageID: string; partID?: string; snapshot?: string; diff?: string }>(),
|
||||
permission: text({ mode: "json" }).$type<PermissionNext.Ruleset>(),
|
||||
...Timestamps,
|
||||
time_compacting: integer(),
|
||||
time_archived: integer(),
|
||||
},
|
||||
(table) => [index("session_project_idx").on(table.project_id), index("session_parent_idx").on(table.parent_id)],
|
||||
)
|
||||
|
||||
export const MessageTable = sqliteTable(
|
||||
"message",
|
||||
{
|
||||
id: text().primaryKey(),
|
||||
session_id: text()
|
||||
.notNull()
|
||||
.references(() => SessionTable.id, { onDelete: "cascade" }),
|
||||
...Timestamps,
|
||||
data: text({ mode: "json" }).notNull().$type<InfoData>(),
|
||||
},
|
||||
(table) => [index("message_session_idx").on(table.session_id)],
|
||||
)
|
||||
|
||||
export const PartTable = sqliteTable(
|
||||
"part",
|
||||
{
|
||||
id: text().primaryKey(),
|
||||
message_id: text()
|
||||
.notNull()
|
||||
.references(() => MessageTable.id, { onDelete: "cascade" }),
|
||||
session_id: text().notNull(),
|
||||
...Timestamps,
|
||||
data: text({ mode: "json" }).notNull().$type<PartData>(),
|
||||
},
|
||||
(table) => [index("part_message_idx").on(table.message_id), index("part_session_idx").on(table.session_id)],
|
||||
)
|
||||
|
||||
export const TodoTable = sqliteTable(
|
||||
"todo",
|
||||
{
|
||||
session_id: text()
|
||||
.notNull()
|
||||
.references(() => SessionTable.id, { onDelete: "cascade" }),
|
||||
content: text().notNull(),
|
||||
status: text().notNull(),
|
||||
priority: text().notNull(),
|
||||
position: integer().notNull(),
|
||||
...Timestamps,
|
||||
},
|
||||
(table) => [
|
||||
primaryKey({ columns: [table.session_id, table.position] }),
|
||||
index("todo_session_idx").on(table.session_id),
|
||||
],
|
||||
)
|
||||
|
||||
export const PermissionTable = sqliteTable("permission", {
|
||||
project_id: text()
|
||||
.primaryKey()
|
||||
.references(() => ProjectTable.id, { onDelete: "cascade" }),
|
||||
...Timestamps,
|
||||
data: text({ mode: "json" }).notNull().$type<PermissionNext.Ruleset>(),
|
||||
})
|
||||
@@ -103,12 +103,13 @@ export namespace SessionSummary {
|
||||
return files.has(x.file)
|
||||
}),
|
||||
)
|
||||
await Session.update(input.sessionID, (draft) => {
|
||||
draft.summary = {
|
||||
await Session.setSummary({
|
||||
sessionID: input.sessionID,
|
||||
summary: {
|
||||
additions: diffs.reduce((sum, x) => sum + x.additions, 0),
|
||||
deletions: diffs.reduce((sum, x) => sum + x.deletions, 0),
|
||||
files: diffs.length,
|
||||
}
|
||||
},
|
||||
})
|
||||
await Storage.write(["session_diff", input.sessionID], diffs)
|
||||
Bus.publish(Session.Event.Diff, {
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import { BusEvent } from "@/bus/bus-event"
|
||||
import { Bus } from "@/bus"
|
||||
import z from "zod"
|
||||
import { Storage } from "../storage/storage"
|
||||
import { Database, eq, asc } from "../storage/db"
|
||||
import { TodoTable } from "./session.sql"
|
||||
|
||||
export namespace Todo {
|
||||
export const Info = z
|
||||
@@ -9,7 +10,6 @@ export namespace Todo {
|
||||
content: z.string().describe("Brief description of the task"),
|
||||
status: z.string().describe("Current status of the task: pending, in_progress, completed, cancelled"),
|
||||
priority: z.string().describe("Priority level of the task: high, medium, low"),
|
||||
id: z.string().describe("Unique identifier for the todo item"),
|
||||
})
|
||||
.meta({ ref: "Todo" })
|
||||
export type Info = z.infer<typeof Info>
|
||||
@@ -24,14 +24,33 @@ export namespace Todo {
|
||||
),
|
||||
}
|
||||
|
||||
export async function update(input: { sessionID: string; todos: Info[] }) {
|
||||
await Storage.write(["todo", input.sessionID], input.todos)
|
||||
export function update(input: { sessionID: string; todos: Info[] }) {
|
||||
Database.transaction((db) => {
|
||||
db.delete(TodoTable).where(eq(TodoTable.session_id, input.sessionID)).run()
|
||||
if (input.todos.length === 0) return
|
||||
db.insert(TodoTable)
|
||||
.values(
|
||||
input.todos.map((todo, position) => ({
|
||||
session_id: input.sessionID,
|
||||
content: todo.content,
|
||||
status: todo.status,
|
||||
priority: todo.priority,
|
||||
position,
|
||||
})),
|
||||
)
|
||||
.run()
|
||||
})
|
||||
Bus.publish(Event.Updated, input)
|
||||
}
|
||||
|
||||
export async function get(sessionID: string) {
|
||||
return Storage.read<Info[]>(["todo", sessionID])
|
||||
.then((x) => x || [])
|
||||
.catch(() => [])
|
||||
export function get(sessionID: string) {
|
||||
const rows = Database.use((db) =>
|
||||
db.select().from(TodoTable).where(eq(TodoTable.session_id, sessionID)).orderBy(asc(TodoTable.position)).all(),
|
||||
)
|
||||
return rows.map((row) => ({
|
||||
content: row.content,
|
||||
status: row.status,
|
||||
priority: row.priority,
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,7 +4,8 @@ import { ulid } from "ulid"
|
||||
import { Provider } from "@/provider/provider"
|
||||
import { Session } from "@/session"
|
||||
import { MessageV2 } from "@/session/message-v2"
|
||||
import { Storage } from "@/storage/storage"
|
||||
import { Database, eq } from "@/storage/db"
|
||||
import { SessionShareTable } from "./share.sql"
|
||||
import { Log } from "@/util/log"
|
||||
import type * as SDK from "@opencode-ai/sdk/v2"
|
||||
|
||||
@@ -77,17 +78,26 @@ export namespace ShareNext {
|
||||
})
|
||||
.then((x) => x.json())
|
||||
.then((x) => x as { id: string; url: string; secret: string })
|
||||
await Storage.write(["session_share", sessionID], result)
|
||||
Database.use((db) =>
|
||||
db
|
||||
.insert(SessionShareTable)
|
||||
.values({ session_id: sessionID, id: result.id, secret: result.secret, url: result.url })
|
||||
.onConflictDoUpdate({
|
||||
target: SessionShareTable.session_id,
|
||||
set: { id: result.id, secret: result.secret, url: result.url },
|
||||
})
|
||||
.run(),
|
||||
)
|
||||
fullSync(sessionID)
|
||||
return result
|
||||
}
|
||||
|
||||
function get(sessionID: string) {
|
||||
return Storage.read<{
|
||||
id: string
|
||||
secret: string
|
||||
url: string
|
||||
}>(["session_share", sessionID])
|
||||
const row = Database.use((db) =>
|
||||
db.select().from(SessionShareTable).where(eq(SessionShareTable.session_id, sessionID)).get(),
|
||||
)
|
||||
if (!row) return
|
||||
return { id: row.id, secret: row.secret, url: row.url }
|
||||
}
|
||||
|
||||
type Data =
|
||||
@@ -132,7 +142,7 @@ export namespace ShareNext {
|
||||
const queued = queue.get(sessionID)
|
||||
if (!queued) return
|
||||
queue.delete(sessionID)
|
||||
const share = await get(sessionID).catch(() => undefined)
|
||||
const share = get(sessionID)
|
||||
if (!share) return
|
||||
|
||||
await fetch(`${await url()}/api/share/${share.id}/sync`, {
|
||||
@@ -152,7 +162,7 @@ export namespace ShareNext {
|
||||
export async function remove(sessionID: string) {
|
||||
if (disabled) return
|
||||
log.info("removing share", { sessionID })
|
||||
const share = await get(sessionID)
|
||||
const share = get(sessionID)
|
||||
if (!share) return
|
||||
await fetch(`${await url()}/api/share/${share.id}`, {
|
||||
method: "DELETE",
|
||||
@@ -163,7 +173,7 @@ export namespace ShareNext {
|
||||
secret: share.secret,
|
||||
}),
|
||||
})
|
||||
await Storage.remove(["session_share", sessionID])
|
||||
Database.use((db) => db.delete(SessionShareTable).where(eq(SessionShareTable.session_id, sessionID)).run())
|
||||
}
|
||||
|
||||
async function fullSync(sessionID: string) {
|
||||
|
||||
13
packages/opencode/src/share/share.sql.ts
Normal file
13
packages/opencode/src/share/share.sql.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { sqliteTable, text } from "drizzle-orm/sqlite-core"
|
||||
import { SessionTable } from "../session/session.sql"
|
||||
import { Timestamps } from "@/storage/schema.sql"
|
||||
|
||||
export const SessionShareTable = sqliteTable("session_share", {
|
||||
session_id: text()
|
||||
.primaryKey()
|
||||
.references(() => SessionTable.id, { onDelete: "cascade" }),
|
||||
id: text().notNull(),
|
||||
secret: text().notNull(),
|
||||
url: text().notNull(),
|
||||
...Timestamps,
|
||||
})
|
||||
@@ -1,92 +0,0 @@
|
||||
import { Bus } from "../bus"
|
||||
import { Installation } from "../installation"
|
||||
import { Session } from "../session"
|
||||
import { MessageV2 } from "../session/message-v2"
|
||||
import { Log } from "../util/log"
|
||||
|
||||
export namespace Share {
|
||||
const log = Log.create({ service: "share" })
|
||||
|
||||
let queue: Promise<void> = Promise.resolve()
|
||||
const pending = new Map<string, any>()
|
||||
|
||||
export async function sync(key: string, content: any) {
|
||||
if (disabled) return
|
||||
const [root, ...splits] = key.split("/")
|
||||
if (root !== "session") return
|
||||
const [sub, sessionID] = splits
|
||||
if (sub === "share") return
|
||||
const share = await Session.getShare(sessionID).catch(() => {})
|
||||
if (!share) return
|
||||
const { secret } = share
|
||||
pending.set(key, content)
|
||||
queue = queue
|
||||
.then(async () => {
|
||||
const content = pending.get(key)
|
||||
if (content === undefined) return
|
||||
pending.delete(key)
|
||||
|
||||
return fetch(`${URL}/share_sync`, {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
sessionID: sessionID,
|
||||
secret,
|
||||
key: key,
|
||||
content,
|
||||
}),
|
||||
})
|
||||
})
|
||||
.then((x) => {
|
||||
if (x) {
|
||||
log.info("synced", {
|
||||
key: key,
|
||||
status: x.status,
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
export function init() {
|
||||
Bus.subscribe(Session.Event.Updated, async (evt) => {
|
||||
await sync("session/info/" + evt.properties.info.id, evt.properties.info)
|
||||
})
|
||||
Bus.subscribe(MessageV2.Event.Updated, async (evt) => {
|
||||
await sync("session/message/" + evt.properties.info.sessionID + "/" + evt.properties.info.id, evt.properties.info)
|
||||
})
|
||||
Bus.subscribe(MessageV2.Event.PartUpdated, async (evt) => {
|
||||
await sync(
|
||||
"session/part/" +
|
||||
evt.properties.part.sessionID +
|
||||
"/" +
|
||||
evt.properties.part.messageID +
|
||||
"/" +
|
||||
evt.properties.part.id,
|
||||
evt.properties.part,
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
export const URL =
|
||||
process.env["OPENCODE_API"] ??
|
||||
(Installation.isPreview() || Installation.isLocal() ? "https://api.dev.opencode.ai" : "https://api.opencode.ai")
|
||||
|
||||
const disabled = process.env["OPENCODE_DISABLE_SHARE"] === "true" || process.env["OPENCODE_DISABLE_SHARE"] === "1"
|
||||
|
||||
export async function create(sessionID: string) {
|
||||
if (disabled) return { url: "", secret: "" }
|
||||
return fetch(`${URL}/share_create`, {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ sessionID: sessionID }),
|
||||
})
|
||||
.then((x) => x.json())
|
||||
.then((x) => x as { url: string; secret: string })
|
||||
}
|
||||
|
||||
export async function remove(sessionID: string, secret: string) {
|
||||
if (disabled) return {}
|
||||
return fetch(`${URL}/share_delete`, {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ sessionID, secret }),
|
||||
}).then((x) => x.json())
|
||||
}
|
||||
}
|
||||
4
packages/opencode/src/sql.d.ts
vendored
Normal file
4
packages/opencode/src/sql.d.ts
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
declare module "*.sql" {
|
||||
const content: string
|
||||
export default content
|
||||
}
|
||||
140
packages/opencode/src/storage/db.ts
Normal file
140
packages/opencode/src/storage/db.ts
Normal file
@@ -0,0 +1,140 @@
|
||||
import { Database as BunDatabase } from "bun:sqlite"
|
||||
import { drizzle, type SQLiteBunDatabase } from "drizzle-orm/bun-sqlite"
|
||||
import { migrate } from "drizzle-orm/bun-sqlite/migrator"
|
||||
import { type SQLiteTransaction } from "drizzle-orm/sqlite-core"
|
||||
export * from "drizzle-orm"
|
||||
import { Context } from "../util/context"
|
||||
import { lazy } from "../util/lazy"
|
||||
import { Global } from "../global"
|
||||
import { Log } from "../util/log"
|
||||
import { NamedError } from "@opencode-ai/util/error"
|
||||
import z from "zod"
|
||||
import path from "path"
|
||||
import { readFileSync, readdirSync } from "fs"
|
||||
import fs from "fs/promises"
|
||||
import { Instance } from "@/project/instance"
|
||||
|
||||
declare const OPENCODE_MIGRATIONS: { sql: string; timestamp: number }[] | undefined
|
||||
|
||||
export const NotFoundError = NamedError.create(
|
||||
"NotFoundError",
|
||||
z.object({
|
||||
message: z.string(),
|
||||
}),
|
||||
)
|
||||
|
||||
const log = Log.create({ service: "db" })
|
||||
|
||||
export namespace Database {
|
||||
export type Transaction = SQLiteTransaction<"sync", void, Record<string, never>, Record<string, never>>
|
||||
|
||||
type Client = SQLiteBunDatabase
|
||||
|
||||
type Journal = { sql: string; timestamp: number }[]
|
||||
|
||||
function time(tag: string) {
|
||||
const match = /^(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})/.exec(tag)
|
||||
if (!match) return 0
|
||||
return Date.UTC(
|
||||
Number(match[1]),
|
||||
Number(match[2]) - 1,
|
||||
Number(match[3]),
|
||||
Number(match[4]),
|
||||
Number(match[5]),
|
||||
Number(match[6]),
|
||||
)
|
||||
}
|
||||
|
||||
function migrations(dir: string): Journal {
|
||||
const dirs = readdirSync(dir, { withFileTypes: true })
|
||||
.filter((entry) => entry.isDirectory())
|
||||
.map((entry) => entry.name)
|
||||
|
||||
const sql = dirs
|
||||
.map((name) => {
|
||||
const file = path.join(dir, name, "migration.sql")
|
||||
if (!Bun.file(file).size) return
|
||||
return {
|
||||
sql: readFileSync(file, "utf-8"),
|
||||
timestamp: time(name),
|
||||
}
|
||||
})
|
||||
.filter(Boolean) as Journal
|
||||
|
||||
return sql.sort((a, b) => a.timestamp - b.timestamp)
|
||||
}
|
||||
|
||||
export const Client = lazy(() => {
|
||||
log.info("opening database", { path: path.join(Global.Path.data, "opencode.db") })
|
||||
|
||||
const sqlite = new BunDatabase(path.join(Global.Path.data, "opencode.db"), { create: true })
|
||||
|
||||
sqlite.run("PRAGMA journal_mode = WAL")
|
||||
sqlite.run("PRAGMA synchronous = NORMAL")
|
||||
sqlite.run("PRAGMA busy_timeout = 5000")
|
||||
sqlite.run("PRAGMA cache_size = -64000")
|
||||
sqlite.run("PRAGMA foreign_keys = ON")
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
|
||||
// Apply schema migrations
|
||||
const entries =
|
||||
typeof OPENCODE_MIGRATIONS !== "undefined"
|
||||
? OPENCODE_MIGRATIONS
|
||||
: migrations(path.join(import.meta.dirname, "../../migration"))
|
||||
if (entries.length > 0) {
|
||||
log.info("applying migrations", {
|
||||
count: entries.length,
|
||||
mode: typeof OPENCODE_MIGRATIONS !== "undefined" ? "bundled" : "dev",
|
||||
})
|
||||
migrate(db, entries)
|
||||
}
|
||||
|
||||
return db
|
||||
})
|
||||
|
||||
export type TxOrDb = Transaction | Client
|
||||
|
||||
const ctx = Context.create<{
|
||||
tx: TxOrDb
|
||||
effects: (() => void | Promise<void>)[]
|
||||
}>("database")
|
||||
|
||||
export function use<T>(callback: (trx: TxOrDb) => T): T {
|
||||
try {
|
||||
return callback(ctx.use().tx)
|
||||
} catch (err) {
|
||||
if (err instanceof Context.NotFound) {
|
||||
const effects: (() => void | Promise<void>)[] = []
|
||||
const result = ctx.provide({ effects, tx: Client() }, () => callback(Client()))
|
||||
for (const effect of effects) effect()
|
||||
return result
|
||||
}
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
export function effect(fn: () => any | Promise<any>) {
|
||||
try {
|
||||
ctx.use().effects.push(fn)
|
||||
} catch {
|
||||
fn()
|
||||
}
|
||||
}
|
||||
|
||||
export function transaction<T>(callback: (tx: TxOrDb) => T): T {
|
||||
try {
|
||||
return callback(ctx.use().tx)
|
||||
} catch (err) {
|
||||
if (err instanceof Context.NotFound) {
|
||||
const effects: (() => void | Promise<void>)[] = []
|
||||
const result = Client().transaction((tx) => {
|
||||
return ctx.provide({ tx, effects }, () => callback(tx))
|
||||
})
|
||||
for (const effect of effects) effect()
|
||||
return result
|
||||
}
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
436
packages/opencode/src/storage/json-migration.ts
Normal file
436
packages/opencode/src/storage/json-migration.ts
Normal file
@@ -0,0 +1,436 @@
|
||||
import { Database } from "bun:sqlite"
|
||||
import { drizzle } from "drizzle-orm/bun-sqlite"
|
||||
import { Global } from "../global"
|
||||
import { Log } from "../util/log"
|
||||
import { ProjectTable } from "../project/project.sql"
|
||||
import { SessionTable, MessageTable, PartTable, TodoTable, PermissionTable } from "../session/session.sql"
|
||||
import { SessionShareTable } from "../share/share.sql"
|
||||
import path from "path"
|
||||
import { existsSync } from "fs"
|
||||
|
||||
export namespace JsonMigration {
|
||||
const log = Log.create({ service: "json-migration" })
|
||||
|
||||
export type Progress = {
|
||||
current: number
|
||||
total: number
|
||||
label: string
|
||||
}
|
||||
|
||||
type Options = {
|
||||
progress?: (event: Progress) => void
|
||||
}
|
||||
|
||||
export async function run(sqlite: Database, options?: Options) {
|
||||
const storageDir = path.join(Global.Path.data, "storage")
|
||||
|
||||
if (!existsSync(storageDir)) {
|
||||
log.info("storage directory does not exist, skipping migration")
|
||||
return {
|
||||
projects: 0,
|
||||
sessions: 0,
|
||||
messages: 0,
|
||||
parts: 0,
|
||||
todos: 0,
|
||||
permissions: 0,
|
||||
shares: 0,
|
||||
errors: [] as string[],
|
||||
}
|
||||
}
|
||||
|
||||
log.info("starting json to sqlite migration", { storageDir })
|
||||
const start = performance.now()
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
|
||||
// Optimize SQLite for bulk inserts
|
||||
sqlite.exec("PRAGMA journal_mode = WAL")
|
||||
sqlite.exec("PRAGMA synchronous = OFF")
|
||||
sqlite.exec("PRAGMA cache_size = 10000")
|
||||
sqlite.exec("PRAGMA temp_store = MEMORY")
|
||||
const stats = {
|
||||
projects: 0,
|
||||
sessions: 0,
|
||||
messages: 0,
|
||||
parts: 0,
|
||||
todos: 0,
|
||||
permissions: 0,
|
||||
shares: 0,
|
||||
errors: [] as string[],
|
||||
}
|
||||
const orphans = {
|
||||
sessions: 0,
|
||||
todos: 0,
|
||||
permissions: 0,
|
||||
shares: 0,
|
||||
}
|
||||
const errs = stats.errors
|
||||
|
||||
const batchSize = 1000
|
||||
const now = Date.now()
|
||||
|
||||
async function list(pattern: string) {
|
||||
const items: string[] = []
|
||||
const scan = new Bun.Glob(pattern)
|
||||
for await (const file of scan.scan({ cwd: storageDir, absolute: true })) {
|
||||
items.push(file)
|
||||
}
|
||||
return items
|
||||
}
|
||||
|
||||
async function read(files: string[], start: number, end: number) {
|
||||
const count = end - start
|
||||
const tasks = new Array(count)
|
||||
for (let i = 0; i < count; i++) {
|
||||
tasks[i] = Bun.file(files[start + i]).json()
|
||||
}
|
||||
const results = await Promise.allSettled(tasks)
|
||||
const items = new Array(count)
|
||||
for (let i = 0; i < results.length; i++) {
|
||||
const result = results[i]
|
||||
if (result.status === "fulfilled") {
|
||||
items[i] = result.value
|
||||
continue
|
||||
}
|
||||
errs.push(`failed to read ${files[start + i]}: ${result.reason}`)
|
||||
}
|
||||
return items
|
||||
}
|
||||
|
||||
function insert(values: any[], table: any, label: string) {
|
||||
if (values.length === 0) return 0
|
||||
try {
|
||||
db.insert(table).values(values).onConflictDoNothing().run()
|
||||
return values.length
|
||||
} catch (e) {
|
||||
errs.push(`failed to migrate ${label} batch: ${e}`)
|
||||
return 0
|
||||
}
|
||||
}
|
||||
|
||||
// Pre-scan all files upfront to avoid repeated glob operations
|
||||
log.info("scanning files...")
|
||||
const [projectFiles, sessionFiles, messageFiles, partFiles, todoFiles, permFiles, shareFiles] = await Promise.all([
|
||||
list("project/*.json"),
|
||||
list("session/*/*.json"),
|
||||
list("message/*/*.json"),
|
||||
list("part/*/*.json"),
|
||||
list("todo/*.json"),
|
||||
list("permission/*.json"),
|
||||
list("session_share/*.json"),
|
||||
])
|
||||
|
||||
log.info("file scan complete", {
|
||||
projects: projectFiles.length,
|
||||
sessions: sessionFiles.length,
|
||||
messages: messageFiles.length,
|
||||
parts: partFiles.length,
|
||||
todos: todoFiles.length,
|
||||
permissions: permFiles.length,
|
||||
shares: shareFiles.length,
|
||||
})
|
||||
|
||||
const total = Math.max(
|
||||
1,
|
||||
projectFiles.length +
|
||||
sessionFiles.length +
|
||||
messageFiles.length +
|
||||
partFiles.length +
|
||||
todoFiles.length +
|
||||
permFiles.length +
|
||||
shareFiles.length,
|
||||
)
|
||||
const progress = options?.progress
|
||||
let current = 0
|
||||
const step = (label: string, count: number) => {
|
||||
current = Math.min(total, current + count)
|
||||
progress?.({ current, total, label })
|
||||
}
|
||||
|
||||
progress?.({ current, total, label: "starting" })
|
||||
|
||||
sqlite.exec("BEGIN TRANSACTION")
|
||||
|
||||
// Migrate projects first (no FK deps)
|
||||
const projectIds = new Set<string>()
|
||||
const projectValues = [] as any[]
|
||||
for (let i = 0; i < projectFiles.length; i += batchSize) {
|
||||
const end = Math.min(i + batchSize, projectFiles.length)
|
||||
const batch = await read(projectFiles, i, end)
|
||||
projectValues.length = 0
|
||||
for (let j = 0; j < batch.length; j++) {
|
||||
const data = batch[j]
|
||||
if (!data) continue
|
||||
if (!data?.id) {
|
||||
errs.push(`project missing id: ${projectFiles[i + j]}`)
|
||||
continue
|
||||
}
|
||||
projectIds.add(data.id)
|
||||
projectValues.push({
|
||||
id: data.id,
|
||||
worktree: data.worktree ?? "/",
|
||||
vcs: data.vcs,
|
||||
name: data.name ?? undefined,
|
||||
icon_url: data.icon?.url,
|
||||
icon_color: data.icon?.color,
|
||||
time_created: data.time?.created ?? now,
|
||||
time_updated: data.time?.updated ?? now,
|
||||
time_initialized: data.time?.initialized,
|
||||
sandboxes: data.sandboxes ?? [],
|
||||
})
|
||||
}
|
||||
stats.projects += insert(projectValues, ProjectTable, "project")
|
||||
step("projects", end - i)
|
||||
}
|
||||
log.info("migrated projects", { count: stats.projects, duration: Math.round(performance.now() - start) })
|
||||
|
||||
// Migrate sessions (depends on projects)
|
||||
const sessionIds = new Set<string>()
|
||||
const sessionValues = [] as any[]
|
||||
for (let i = 0; i < sessionFiles.length; i += batchSize) {
|
||||
const end = Math.min(i + batchSize, sessionFiles.length)
|
||||
const batch = await read(sessionFiles, i, end)
|
||||
sessionValues.length = 0
|
||||
for (let j = 0; j < batch.length; j++) {
|
||||
const data = batch[j]
|
||||
if (!data) continue
|
||||
if (!data?.id || !data?.projectID) {
|
||||
errs.push(`session missing id or projectID: ${sessionFiles[i + j]}`)
|
||||
continue
|
||||
}
|
||||
if (!projectIds.has(data.projectID)) {
|
||||
orphans.sessions++
|
||||
continue
|
||||
}
|
||||
sessionIds.add(data.id)
|
||||
sessionValues.push({
|
||||
id: data.id,
|
||||
project_id: data.projectID,
|
||||
parent_id: data.parentID ?? null,
|
||||
slug: data.slug ?? "",
|
||||
directory: data.directory ?? "",
|
||||
title: data.title ?? "",
|
||||
version: data.version ?? "",
|
||||
share_url: data.share?.url ?? null,
|
||||
summary_additions: data.summary?.additions ?? null,
|
||||
summary_deletions: data.summary?.deletions ?? null,
|
||||
summary_files: data.summary?.files ?? null,
|
||||
summary_diffs: data.summary?.diffs ?? null,
|
||||
revert: data.revert ?? null,
|
||||
permission: data.permission ?? null,
|
||||
time_created: data.time?.created ?? now,
|
||||
time_updated: data.time?.updated ?? now,
|
||||
time_compacting: data.time?.compacting ?? null,
|
||||
time_archived: data.time?.archived ?? null,
|
||||
})
|
||||
}
|
||||
stats.sessions += insert(sessionValues, SessionTable, "session")
|
||||
step("sessions", end - i)
|
||||
}
|
||||
log.info("migrated sessions", { count: stats.sessions })
|
||||
if (orphans.sessions > 0) {
|
||||
log.warn("skipped orphaned sessions", { count: orphans.sessions })
|
||||
}
|
||||
|
||||
// Migrate messages using pre-scanned file map
|
||||
const allMessageFiles = [] as string[]
|
||||
const allMessageSessions = [] as string[]
|
||||
const messageSessions = new Map<string, string>()
|
||||
for (const file of messageFiles) {
|
||||
const sessionID = path.basename(path.dirname(file))
|
||||
if (!sessionIds.has(sessionID)) continue
|
||||
allMessageFiles.push(file)
|
||||
allMessageSessions.push(sessionID)
|
||||
}
|
||||
|
||||
for (let i = 0; i < allMessageFiles.length; i += batchSize) {
|
||||
const end = Math.min(i + batchSize, allMessageFiles.length)
|
||||
const batch = await read(allMessageFiles, i, end)
|
||||
const values = new Array(batch.length)
|
||||
let count = 0
|
||||
for (let j = 0; j < batch.length; j++) {
|
||||
const data = batch[j]
|
||||
if (!data) continue
|
||||
const file = allMessageFiles[i + j]
|
||||
const id = data.id ?? path.basename(file, ".json")
|
||||
if (!id) {
|
||||
errs.push(`message missing id: ${file}`)
|
||||
continue
|
||||
}
|
||||
const sessionID = allMessageSessions[i + j]
|
||||
messageSessions.set(id, sessionID)
|
||||
const rest = data
|
||||
delete rest.id
|
||||
delete rest.sessionID
|
||||
values[count++] = {
|
||||
id,
|
||||
session_id: sessionID,
|
||||
time_created: data.time?.created ?? now,
|
||||
time_updated: data.time?.updated ?? now,
|
||||
data: rest,
|
||||
}
|
||||
}
|
||||
values.length = count
|
||||
stats.messages += insert(values, MessageTable, "message")
|
||||
step("messages", end - i)
|
||||
}
|
||||
log.info("migrated messages", { count: stats.messages })
|
||||
|
||||
// Migrate parts using pre-scanned file map
|
||||
for (let i = 0; i < partFiles.length; i += batchSize) {
|
||||
const end = Math.min(i + batchSize, partFiles.length)
|
||||
const batch = await read(partFiles, i, end)
|
||||
const values = new Array(batch.length)
|
||||
let count = 0
|
||||
for (let j = 0; j < batch.length; j++) {
|
||||
const data = batch[j]
|
||||
if (!data) continue
|
||||
const file = partFiles[i + j]
|
||||
const id = data.id ?? path.basename(file, ".json")
|
||||
const messageID = data.messageID ?? path.basename(path.dirname(file))
|
||||
if (!id || !messageID) {
|
||||
errs.push(`part missing id/messageID/sessionID: ${file}`)
|
||||
continue
|
||||
}
|
||||
const sessionID = messageSessions.get(messageID)
|
||||
if (!sessionID) {
|
||||
errs.push(`part missing message session: ${file}`)
|
||||
continue
|
||||
}
|
||||
if (!sessionIds.has(sessionID)) continue
|
||||
const rest = data
|
||||
delete rest.id
|
||||
delete rest.messageID
|
||||
delete rest.sessionID
|
||||
values[count++] = {
|
||||
id,
|
||||
message_id: messageID,
|
||||
session_id: sessionID,
|
||||
time_created: data.time?.created ?? now,
|
||||
time_updated: data.time?.updated ?? now,
|
||||
data: rest,
|
||||
}
|
||||
}
|
||||
values.length = count
|
||||
stats.parts += insert(values, PartTable, "part")
|
||||
step("parts", end - i)
|
||||
}
|
||||
log.info("migrated parts", { count: stats.parts })
|
||||
|
||||
// Migrate todos
|
||||
const todoSessions = todoFiles.map((file) => path.basename(file, ".json"))
|
||||
for (let i = 0; i < todoFiles.length; i += batchSize) {
|
||||
const end = Math.min(i + batchSize, todoFiles.length)
|
||||
const batch = await read(todoFiles, i, end)
|
||||
const values = [] as any[]
|
||||
for (let j = 0; j < batch.length; j++) {
|
||||
const data = batch[j]
|
||||
if (!data) continue
|
||||
const sessionID = todoSessions[i + j]
|
||||
if (!sessionIds.has(sessionID)) {
|
||||
orphans.todos++
|
||||
continue
|
||||
}
|
||||
if (!Array.isArray(data)) {
|
||||
errs.push(`todo not an array: ${todoFiles[i + j]}`)
|
||||
continue
|
||||
}
|
||||
for (let position = 0; position < data.length; position++) {
|
||||
const todo = data[position]
|
||||
if (!todo?.content || !todo?.status || !todo?.priority) continue
|
||||
values.push({
|
||||
session_id: sessionID,
|
||||
content: todo.content,
|
||||
status: todo.status,
|
||||
priority: todo.priority,
|
||||
position,
|
||||
time_created: now,
|
||||
time_updated: now,
|
||||
})
|
||||
}
|
||||
}
|
||||
stats.todos += insert(values, TodoTable, "todo")
|
||||
step("todos", end - i)
|
||||
}
|
||||
log.info("migrated todos", { count: stats.todos })
|
||||
if (orphans.todos > 0) {
|
||||
log.warn("skipped orphaned todos", { count: orphans.todos })
|
||||
}
|
||||
|
||||
// Migrate permissions
|
||||
const permProjects = permFiles.map((file) => path.basename(file, ".json"))
|
||||
const permValues = [] as any[]
|
||||
for (let i = 0; i < permFiles.length; i += batchSize) {
|
||||
const end = Math.min(i + batchSize, permFiles.length)
|
||||
const batch = await read(permFiles, i, end)
|
||||
permValues.length = 0
|
||||
for (let j = 0; j < batch.length; j++) {
|
||||
const data = batch[j]
|
||||
if (!data) continue
|
||||
const projectID = permProjects[i + j]
|
||||
if (!projectIds.has(projectID)) {
|
||||
orphans.permissions++
|
||||
continue
|
||||
}
|
||||
permValues.push({ project_id: projectID, data })
|
||||
}
|
||||
stats.permissions += insert(permValues, PermissionTable, "permission")
|
||||
step("permissions", end - i)
|
||||
}
|
||||
log.info("migrated permissions", { count: stats.permissions })
|
||||
if (orphans.permissions > 0) {
|
||||
log.warn("skipped orphaned permissions", { count: orphans.permissions })
|
||||
}
|
||||
|
||||
// Migrate session shares
|
||||
const shareSessions = shareFiles.map((file) => path.basename(file, ".json"))
|
||||
const shareValues = [] as any[]
|
||||
for (let i = 0; i < shareFiles.length; i += batchSize) {
|
||||
const end = Math.min(i + batchSize, shareFiles.length)
|
||||
const batch = await read(shareFiles, i, end)
|
||||
shareValues.length = 0
|
||||
for (let j = 0; j < batch.length; j++) {
|
||||
const data = batch[j]
|
||||
if (!data) continue
|
||||
const sessionID = shareSessions[i + j]
|
||||
if (!sessionIds.has(sessionID)) {
|
||||
orphans.shares++
|
||||
continue
|
||||
}
|
||||
if (!data?.id || !data?.secret || !data?.url) {
|
||||
errs.push(`session_share missing id/secret/url: ${shareFiles[i + j]}`)
|
||||
continue
|
||||
}
|
||||
shareValues.push({ session_id: sessionID, id: data.id, secret: data.secret, url: data.url })
|
||||
}
|
||||
stats.shares += insert(shareValues, SessionShareTable, "session_share")
|
||||
step("shares", end - i)
|
||||
}
|
||||
log.info("migrated session shares", { count: stats.shares })
|
||||
if (orphans.shares > 0) {
|
||||
log.warn("skipped orphaned session shares", { count: orphans.shares })
|
||||
}
|
||||
|
||||
sqlite.exec("COMMIT")
|
||||
|
||||
log.info("json migration complete", {
|
||||
projects: stats.projects,
|
||||
sessions: stats.sessions,
|
||||
messages: stats.messages,
|
||||
parts: stats.parts,
|
||||
todos: stats.todos,
|
||||
permissions: stats.permissions,
|
||||
shares: stats.shares,
|
||||
errorCount: stats.errors.length,
|
||||
duration: Math.round(performance.now() - start),
|
||||
})
|
||||
|
||||
if (stats.errors.length > 0) {
|
||||
log.warn("migration errors", { errors: stats.errors.slice(0, 20) })
|
||||
}
|
||||
|
||||
progress?.({ current: total, total, label: "complete" })
|
||||
|
||||
return stats
|
||||
}
|
||||
}
|
||||
10
packages/opencode/src/storage/schema.sql.ts
Normal file
10
packages/opencode/src/storage/schema.sql.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { integer } from "drizzle-orm/sqlite-core"
|
||||
|
||||
export const Timestamps = {
|
||||
time_created: integer()
|
||||
.notNull()
|
||||
.$default(() => Date.now()),
|
||||
time_updated: integer()
|
||||
.notNull()
|
||||
.$onUpdate(() => Date.now()),
|
||||
}
|
||||
@@ -87,7 +87,7 @@ Important notes:
|
||||
- If there are no changes to commit (i.e., no untracked files and no modifications), do not create an empty commit
|
||||
|
||||
# Creating pull requests
|
||||
Use the gh command via the Bash tool for ALL GitHub-related tasks including working with issues, pull requests, checks, and releases. If given a Github URL use the gh command to get the information needed.
|
||||
Use the gh command via the Bash tool for ALL GitHub-related tasks including working with issues, pull requests, checks, and releases. If given a GitHub URL use the gh command to get the information needed.
|
||||
|
||||
IMPORTANT: When the user asks you to create a pull request, follow these steps carefully:
|
||||
|
||||
@@ -112,4 +112,4 @@ Important:
|
||||
- Return the PR URL when you're done, so the user can see it
|
||||
|
||||
# Other common operations
|
||||
- View comments on a Github PR: gh api repos/foo/bar/pulls/123/comments
|
||||
- View comments on a GitHub PR: gh api repos/foo/bar/pulls/123/comments
|
||||
|
||||
@@ -114,7 +114,7 @@ export namespace ToolRegistry {
|
||||
ApplyPatchTool,
|
||||
...(Flag.OPENCODE_EXPERIMENTAL_LSP_TOOL ? [LspTool] : []),
|
||||
...(config.experimental?.batch_tool === true ? [BatchTool] : []),
|
||||
...(Flag.OPENCODE_EXPERIMENTAL_PLAN_MODE && Flag.OPENCODE_CLIENT === "cli" ? [PlanExitTool, PlanEnterTool] : []),
|
||||
...(Flag.OPENCODE_CLIENT === "cli" ? [PlanExitTool, PlanEnterTool] : []),
|
||||
...custom,
|
||||
]
|
||||
}
|
||||
|
||||
@@ -4,9 +4,14 @@ export function lazy<T>(fn: () => T) {
|
||||
|
||||
const result = (): T => {
|
||||
if (loaded) return value as T
|
||||
loaded = true
|
||||
value = fn()
|
||||
return value as T
|
||||
try {
|
||||
value = fn()
|
||||
loaded = true
|
||||
return value as T
|
||||
} catch (e) {
|
||||
// Don't mark as loaded if initialization failed
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
||||
result.reset = () => {
|
||||
|
||||
@@ -7,7 +7,8 @@ import { Global } from "../global"
|
||||
import { Instance } from "../project/instance"
|
||||
import { InstanceBootstrap } from "../project/bootstrap"
|
||||
import { Project } from "../project/project"
|
||||
import { Storage } from "../storage/storage"
|
||||
import { Database, eq } from "../storage/db"
|
||||
import { ProjectTable } from "../project/project.sql"
|
||||
import { fn } from "../util/fn"
|
||||
import { Log } from "../util/log"
|
||||
import { BusEvent } from "@/bus/bus-event"
|
||||
@@ -219,6 +220,44 @@ export namespace Worktree {
|
||||
return [outputText(result.stderr), outputText(result.stdout)].filter(Boolean).join("\n")
|
||||
}
|
||||
|
||||
function failed(result: { stdout?: Uint8Array; stderr?: Uint8Array }) {
|
||||
return [outputText(result.stderr), outputText(result.stdout)].filter(Boolean).flatMap((chunk) =>
|
||||
chunk
|
||||
.split("\n")
|
||||
.map((line) => line.trim())
|
||||
.flatMap((line) => {
|
||||
const match = line.match(/^warning:\s+failed to remove\s+(.+):\s+/i)
|
||||
if (!match) return []
|
||||
const value = match[1]?.trim().replace(/^['"]|['"]$/g, "")
|
||||
if (!value) return []
|
||||
return [value]
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
async function prune(root: string, entries: string[]) {
|
||||
const base = await canonical(root)
|
||||
await Promise.all(
|
||||
entries.map(async (entry) => {
|
||||
const target = await canonical(path.resolve(root, entry))
|
||||
if (target === base) return
|
||||
if (!target.startsWith(`${base}${path.sep}`)) return
|
||||
await fs.rm(target, { recursive: true, force: true }).catch(() => undefined)
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
async function sweep(root: string) {
|
||||
const first = await $`git clean -ffdx`.quiet().nothrow().cwd(root)
|
||||
if (first.exitCode === 0) return first
|
||||
|
||||
const entries = failed(first)
|
||||
if (!entries.length) return first
|
||||
|
||||
await prune(root, entries)
|
||||
return $`git clean -ffdx`.quiet().nothrow().cwd(root)
|
||||
}
|
||||
|
||||
async function canonical(input: string) {
|
||||
const abs = path.resolve(input)
|
||||
const real = await fs.realpath(abs).catch(() => abs)
|
||||
@@ -269,7 +308,8 @@ export namespace Worktree {
|
||||
}
|
||||
|
||||
async function runStartScripts(directory: string, input: { projectID: string; extra?: string }) {
|
||||
const project = await Storage.read<Project.Info>(["project", input.projectID]).catch(() => undefined)
|
||||
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, input.projectID)).get())
|
||||
const project = row ? Project.fromRow(row) : undefined
|
||||
const startup = project?.commands?.start?.trim() ?? ""
|
||||
const ok = await runStartScript(directory, startup, "project")
|
||||
if (!ok) return false
|
||||
@@ -536,7 +576,7 @@ export namespace Worktree {
|
||||
throw new ResetFailedError({ message: errorText(resetToTarget) || "Failed to reset worktree to target" })
|
||||
}
|
||||
|
||||
const clean = await $`git clean -fdx`.quiet().nothrow().cwd(worktreePath)
|
||||
const clean = await sweep(worktreePath)
|
||||
if (clean.exitCode !== 0) {
|
||||
throw new ResetFailedError({ message: errorText(clean) || "Failed to clean worktree" })
|
||||
}
|
||||
|
||||
@@ -122,12 +122,20 @@ function createFakeAgent() {
|
||||
messages: async () => {
|
||||
return { data: [] }
|
||||
},
|
||||
message: async () => {
|
||||
message: async (params?: any) => {
|
||||
// Return a message with parts that can be looked up by partID
|
||||
return {
|
||||
data: {
|
||||
info: {
|
||||
role: "assistant",
|
||||
},
|
||||
parts: [
|
||||
{
|
||||
id: params?.messageID ? `${params.messageID}_part` : "part_1",
|
||||
type: "text",
|
||||
text: "",
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
},
|
||||
@@ -193,7 +201,7 @@ function createFakeAgent() {
|
||||
}
|
||||
|
||||
describe("acp.agent event subscription", () => {
|
||||
test("routes message.part.updated by the event sessionID (no cross-session pollution)", async () => {
|
||||
test("routes message.part.delta by the event sessionID (no cross-session pollution)", async () => {
|
||||
await using tmp = await tmpdir()
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
@@ -207,14 +215,12 @@ describe("acp.agent event subscription", () => {
|
||||
controller.push({
|
||||
directory: cwd,
|
||||
payload: {
|
||||
type: "message.part.updated",
|
||||
type: "message.part.delta",
|
||||
properties: {
|
||||
part: {
|
||||
sessionID: sessionB,
|
||||
messageID: "msg_1",
|
||||
type: "text",
|
||||
synthetic: false,
|
||||
},
|
||||
sessionID: sessionB,
|
||||
messageID: "msg_1",
|
||||
partID: "msg_1_part",
|
||||
field: "text",
|
||||
delta: "hello",
|
||||
},
|
||||
},
|
||||
@@ -230,7 +236,7 @@ describe("acp.agent event subscription", () => {
|
||||
})
|
||||
})
|
||||
|
||||
test("keeps concurrent sessions isolated when message.part.updated events are interleaved", async () => {
|
||||
test("keeps concurrent sessions isolated when message.part.delta events are interleaved", async () => {
|
||||
await using tmp = await tmpdir()
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
@@ -248,14 +254,12 @@ describe("acp.agent event subscription", () => {
|
||||
controller.push({
|
||||
directory: cwd,
|
||||
payload: {
|
||||
type: "message.part.updated",
|
||||
type: "message.part.delta",
|
||||
properties: {
|
||||
part: {
|
||||
sessionID: sessionId,
|
||||
messageID,
|
||||
type: "text",
|
||||
synthetic: false,
|
||||
},
|
||||
sessionID: sessionId,
|
||||
messageID,
|
||||
partID: `${messageID}_part`,
|
||||
field: "text",
|
||||
delta,
|
||||
},
|
||||
},
|
||||
@@ -402,14 +406,12 @@ describe("acp.agent event subscription", () => {
|
||||
controller.push({
|
||||
directory: cwd,
|
||||
payload: {
|
||||
type: "message.part.updated",
|
||||
type: "message.part.delta",
|
||||
properties: {
|
||||
part: {
|
||||
sessionID: sessionB,
|
||||
messageID: "msg_b",
|
||||
type: "text",
|
||||
synthetic: false,
|
||||
},
|
||||
sessionID: sessionB,
|
||||
messageID: "msg_b",
|
||||
partID: "msg_b_part",
|
||||
field: "text",
|
||||
delta: "session_b_message",
|
||||
},
|
||||
},
|
||||
|
||||
@@ -2,7 +2,6 @@ import { test, expect } from "bun:test"
|
||||
import os from "os"
|
||||
import { PermissionNext } from "../../src/permission/next"
|
||||
import { Instance } from "../../src/project/instance"
|
||||
import { Storage } from "../../src/storage/storage"
|
||||
import { tmpdir } from "../fixture/fixture"
|
||||
|
||||
// fromConfig tests
|
||||
|
||||
@@ -1,63 +1,70 @@
|
||||
// IMPORTANT: Set env vars BEFORE any imports from src/ directory
|
||||
// xdg-basedir reads env vars at import time, so we must set these first
|
||||
import os from "os"
|
||||
import path from "path"
|
||||
import fs from "fs/promises"
|
||||
import fsSync from "fs"
|
||||
import { afterAll } from "bun:test"
|
||||
import os from "os";
|
||||
import path from "path";
|
||||
import fs from "fs/promises";
|
||||
import fsSync from "fs";
|
||||
import { afterAll } from "bun:test";
|
||||
|
||||
const dir = path.join(os.tmpdir(), "opencode-test-data-" + process.pid)
|
||||
await fs.mkdir(dir, { recursive: true })
|
||||
// Set XDG env vars FIRST, before any src/ imports
|
||||
const dir = path.join(os.tmpdir(), "opencode-test-data-" + process.pid);
|
||||
await fs.mkdir(dir, { recursive: true });
|
||||
afterAll(() => {
|
||||
fsSync.rmSync(dir, { recursive: true, force: true })
|
||||
})
|
||||
fsSync.rmSync(dir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
process.env["XDG_DATA_HOME"] = path.join(dir, "share");
|
||||
process.env["XDG_CACHE_HOME"] = path.join(dir, "cache");
|
||||
process.env["XDG_CONFIG_HOME"] = path.join(dir, "config");
|
||||
process.env["XDG_STATE_HOME"] = path.join(dir, "state");
|
||||
process.env["OPENCODE_MODELS_PATH"] = path.join(
|
||||
import.meta.dir,
|
||||
"tool",
|
||||
"fixtures",
|
||||
"models-api.json",
|
||||
);
|
||||
|
||||
// Set test home directory to isolate tests from user's actual home directory
|
||||
// This prevents tests from picking up real user configs/skills from ~/.claude/skills
|
||||
const testHome = path.join(dir, "home")
|
||||
await fs.mkdir(testHome, { recursive: true })
|
||||
process.env["OPENCODE_TEST_HOME"] = testHome
|
||||
const testHome = path.join(dir, "home");
|
||||
await fs.mkdir(testHome, { recursive: true });
|
||||
process.env["OPENCODE_TEST_HOME"] = testHome;
|
||||
|
||||
// Set test managed config directory to isolate tests from system managed settings
|
||||
const testManagedConfigDir = path.join(dir, "managed")
|
||||
process.env["OPENCODE_TEST_MANAGED_CONFIG_DIR"] = testManagedConfigDir
|
||||
|
||||
process.env["XDG_DATA_HOME"] = path.join(dir, "share")
|
||||
process.env["XDG_CACHE_HOME"] = path.join(dir, "cache")
|
||||
process.env["XDG_CONFIG_HOME"] = path.join(dir, "config")
|
||||
process.env["XDG_STATE_HOME"] = path.join(dir, "state")
|
||||
process.env["OPENCODE_MODELS_PATH"] = path.join(import.meta.dir, "tool", "fixtures", "models-api.json")
|
||||
const testManagedConfigDir = path.join(dir, "managed");
|
||||
process.env["OPENCODE_TEST_MANAGED_CONFIG_DIR"] = testManagedConfigDir;
|
||||
|
||||
// Write the cache version file to prevent global/index.ts from clearing the cache
|
||||
const cacheDir = path.join(dir, "cache", "opencode")
|
||||
await fs.mkdir(cacheDir, { recursive: true })
|
||||
await fs.writeFile(path.join(cacheDir, "version"), "14")
|
||||
const cacheDir = path.join(dir, "cache", "opencode");
|
||||
await fs.mkdir(cacheDir, { recursive: true });
|
||||
await fs.writeFile(path.join(cacheDir, "version"), "14");
|
||||
|
||||
// Clear provider env vars to ensure clean test state
|
||||
delete process.env["ANTHROPIC_API_KEY"]
|
||||
delete process.env["OPENAI_API_KEY"]
|
||||
delete process.env["GOOGLE_API_KEY"]
|
||||
delete process.env["GOOGLE_GENERATIVE_AI_API_KEY"]
|
||||
delete process.env["AZURE_OPENAI_API_KEY"]
|
||||
delete process.env["AWS_ACCESS_KEY_ID"]
|
||||
delete process.env["AWS_PROFILE"]
|
||||
delete process.env["AWS_REGION"]
|
||||
delete process.env["AWS_BEARER_TOKEN_BEDROCK"]
|
||||
delete process.env["OPENROUTER_API_KEY"]
|
||||
delete process.env["GROQ_API_KEY"]
|
||||
delete process.env["MISTRAL_API_KEY"]
|
||||
delete process.env["PERPLEXITY_API_KEY"]
|
||||
delete process.env["TOGETHER_API_KEY"]
|
||||
delete process.env["XAI_API_KEY"]
|
||||
delete process.env["DEEPSEEK_API_KEY"]
|
||||
delete process.env["FIREWORKS_API_KEY"]
|
||||
delete process.env["CEREBRAS_API_KEY"]
|
||||
delete process.env["SAMBANOVA_API_KEY"]
|
||||
delete process.env["ANTHROPIC_API_KEY"];
|
||||
delete process.env["OPENAI_API_KEY"];
|
||||
delete process.env["GOOGLE_API_KEY"];
|
||||
delete process.env["GOOGLE_GENERATIVE_AI_API_KEY"];
|
||||
delete process.env["AZURE_OPENAI_API_KEY"];
|
||||
delete process.env["AWS_ACCESS_KEY_ID"];
|
||||
delete process.env["AWS_PROFILE"];
|
||||
delete process.env["AWS_REGION"];
|
||||
delete process.env["AWS_BEARER_TOKEN_BEDROCK"];
|
||||
delete process.env["OPENROUTER_API_KEY"];
|
||||
delete process.env["GROQ_API_KEY"];
|
||||
delete process.env["MISTRAL_API_KEY"];
|
||||
delete process.env["PERPLEXITY_API_KEY"];
|
||||
delete process.env["TOGETHER_API_KEY"];
|
||||
delete process.env["XAI_API_KEY"];
|
||||
delete process.env["DEEPSEEK_API_KEY"];
|
||||
delete process.env["FIREWORKS_API_KEY"];
|
||||
delete process.env["CEREBRAS_API_KEY"];
|
||||
delete process.env["SAMBANOVA_API_KEY"];
|
||||
|
||||
// Now safe to import from src/
|
||||
const { Log } = await import("../src/util/log")
|
||||
const { Log } = await import("../src/util/log");
|
||||
|
||||
Log.init({
|
||||
print: false,
|
||||
dev: true,
|
||||
level: "DEBUG",
|
||||
})
|
||||
});
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { describe, expect, test } from "bun:test"
|
||||
import { Project } from "../../src/project/project"
|
||||
import { Log } from "../../src/util/log"
|
||||
import { Storage } from "../../src/storage/storage"
|
||||
import { $ } from "bun"
|
||||
import path from "path"
|
||||
import { tmpdir } from "../fixture/fixture"
|
||||
@@ -55,37 +54,50 @@ describe("Project.fromDirectory with worktrees", () => {
|
||||
test("should set worktree to root when called from a worktree", async () => {
|
||||
await using tmp = await tmpdir({ git: true })
|
||||
|
||||
const worktreePath = path.join(tmp.path, "..", "worktree-test")
|
||||
await $`git worktree add ${worktreePath} -b test-branch`.cwd(tmp.path).quiet()
|
||||
const worktreePath = path.join(tmp.path, "..", path.basename(tmp.path) + "-worktree")
|
||||
try {
|
||||
await $`git worktree add ${worktreePath} -b test-branch-${Date.now()}`.cwd(tmp.path).quiet()
|
||||
|
||||
const { project, sandbox } = await Project.fromDirectory(worktreePath)
|
||||
const { project, sandbox } = await Project.fromDirectory(worktreePath)
|
||||
|
||||
expect(project.worktree).toBe(tmp.path)
|
||||
expect(sandbox).toBe(worktreePath)
|
||||
expect(project.sandboxes).toContain(worktreePath)
|
||||
expect(project.sandboxes).not.toContain(tmp.path)
|
||||
|
||||
await $`git worktree remove ${worktreePath}`.cwd(tmp.path).quiet()
|
||||
expect(project.worktree).toBe(tmp.path)
|
||||
expect(sandbox).toBe(worktreePath)
|
||||
expect(project.sandboxes).toContain(worktreePath)
|
||||
expect(project.sandboxes).not.toContain(tmp.path)
|
||||
} finally {
|
||||
await $`git worktree remove ${worktreePath}`
|
||||
.cwd(tmp.path)
|
||||
.quiet()
|
||||
.catch(() => {})
|
||||
}
|
||||
})
|
||||
|
||||
test("should accumulate multiple worktrees in sandboxes", async () => {
|
||||
await using tmp = await tmpdir({ git: true })
|
||||
|
||||
const worktree1 = path.join(tmp.path, "..", "worktree-1")
|
||||
const worktree2 = path.join(tmp.path, "..", "worktree-2")
|
||||
await $`git worktree add ${worktree1} -b branch-1`.cwd(tmp.path).quiet()
|
||||
await $`git worktree add ${worktree2} -b branch-2`.cwd(tmp.path).quiet()
|
||||
const worktree1 = path.join(tmp.path, "..", path.basename(tmp.path) + "-wt1")
|
||||
const worktree2 = path.join(tmp.path, "..", path.basename(tmp.path) + "-wt2")
|
||||
try {
|
||||
await $`git worktree add ${worktree1} -b branch-${Date.now()}`.cwd(tmp.path).quiet()
|
||||
await $`git worktree add ${worktree2} -b branch-${Date.now() + 1}`.cwd(tmp.path).quiet()
|
||||
|
||||
await Project.fromDirectory(worktree1)
|
||||
const { project } = await Project.fromDirectory(worktree2)
|
||||
await Project.fromDirectory(worktree1)
|
||||
const { project } = await Project.fromDirectory(worktree2)
|
||||
|
||||
expect(project.worktree).toBe(tmp.path)
|
||||
expect(project.sandboxes).toContain(worktree1)
|
||||
expect(project.sandboxes).toContain(worktree2)
|
||||
expect(project.sandboxes).not.toContain(tmp.path)
|
||||
|
||||
await $`git worktree remove ${worktree1}`.cwd(tmp.path).quiet()
|
||||
await $`git worktree remove ${worktree2}`.cwd(tmp.path).quiet()
|
||||
expect(project.worktree).toBe(tmp.path)
|
||||
expect(project.sandboxes).toContain(worktree1)
|
||||
expect(project.sandboxes).toContain(worktree2)
|
||||
expect(project.sandboxes).not.toContain(tmp.path)
|
||||
} finally {
|
||||
await $`git worktree remove ${worktree1}`
|
||||
.cwd(tmp.path)
|
||||
.quiet()
|
||||
.catch(() => {})
|
||||
await $`git worktree remove ${worktree2}`
|
||||
.cwd(tmp.path)
|
||||
.quiet()
|
||||
.catch(() => {})
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
@@ -99,11 +111,12 @@ describe("Project.discover", () => {
|
||||
|
||||
await Project.discover(project)
|
||||
|
||||
const updated = await Storage.read<Project.Info>(["project", project.id])
|
||||
expect(updated.icon).toBeDefined()
|
||||
expect(updated.icon?.url).toStartWith("data:")
|
||||
expect(updated.icon?.url).toContain("base64")
|
||||
expect(updated.icon?.color).toBeUndefined()
|
||||
const updated = Project.get(project.id)
|
||||
expect(updated).toBeDefined()
|
||||
expect(updated!.icon).toBeDefined()
|
||||
expect(updated!.icon?.url).toStartWith("data:")
|
||||
expect(updated!.icon?.url).toContain("base64")
|
||||
expect(updated!.icon?.color).toBeUndefined()
|
||||
})
|
||||
|
||||
test("should not discover non-image files", async () => {
|
||||
@@ -114,7 +127,8 @@ describe("Project.discover", () => {
|
||||
|
||||
await Project.discover(project)
|
||||
|
||||
const updated = await Storage.read<Project.Info>(["project", project.id])
|
||||
expect(updated.icon).toBeUndefined()
|
||||
const updated = Project.get(project.id)
|
||||
expect(updated).toBeDefined()
|
||||
expect(updated!.icon).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
644
packages/opencode/test/storage/json-migration.test.ts
Normal file
644
packages/opencode/test/storage/json-migration.test.ts
Normal file
@@ -0,0 +1,644 @@
|
||||
import { describe, test, expect, beforeEach, afterEach } from "bun:test"
|
||||
import { Database } from "bun:sqlite"
|
||||
import { drizzle } from "drizzle-orm/bun-sqlite"
|
||||
import { migrate } from "drizzle-orm/bun-sqlite/migrator"
|
||||
import path from "path"
|
||||
import fs from "fs/promises"
|
||||
import { readFileSync, readdirSync } from "fs"
|
||||
import { JsonMigration } from "../../src/storage/json-migration"
|
||||
import { Global } from "../../src/global"
|
||||
import { ProjectTable } from "../../src/project/project.sql"
|
||||
import { SessionTable, MessageTable, PartTable, TodoTable, PermissionTable } from "../../src/session/session.sql"
|
||||
import { SessionShareTable } from "../../src/share/share.sql"
|
||||
|
||||
// Test fixtures
|
||||
const fixtures = {
|
||||
project: {
|
||||
id: "proj_test123abc",
|
||||
name: "Test Project",
|
||||
worktree: "/test/path",
|
||||
vcs: "git" as const,
|
||||
sandboxes: [],
|
||||
},
|
||||
session: {
|
||||
id: "ses_test456def",
|
||||
projectID: "proj_test123abc",
|
||||
slug: "test-session",
|
||||
directory: "/test/path",
|
||||
title: "Test Session",
|
||||
version: "1.0.0",
|
||||
time: { created: 1700000000000, updated: 1700000001000 },
|
||||
},
|
||||
message: {
|
||||
id: "msg_test789ghi",
|
||||
sessionID: "ses_test456def",
|
||||
role: "user" as const,
|
||||
agent: "default",
|
||||
model: { providerID: "openai", modelID: "gpt-4" },
|
||||
time: { created: 1700000000000 },
|
||||
},
|
||||
part: {
|
||||
id: "prt_testabc123",
|
||||
messageID: "msg_test789ghi",
|
||||
sessionID: "ses_test456def",
|
||||
type: "text" as const,
|
||||
text: "Hello, world!",
|
||||
},
|
||||
}
|
||||
|
||||
// Helper to create test storage directory structure
|
||||
async function setupStorageDir() {
|
||||
const storageDir = path.join(Global.Path.data, "storage")
|
||||
await fs.rm(storageDir, { recursive: true, force: true })
|
||||
await fs.mkdir(path.join(storageDir, "project"), { recursive: true })
|
||||
await fs.mkdir(path.join(storageDir, "session", "proj_test123abc"), { recursive: true })
|
||||
await fs.mkdir(path.join(storageDir, "message", "ses_test456def"), { recursive: true })
|
||||
await fs.mkdir(path.join(storageDir, "part", "msg_test789ghi"), { recursive: true })
|
||||
await fs.mkdir(path.join(storageDir, "session_diff"), { recursive: true })
|
||||
await fs.mkdir(path.join(storageDir, "todo"), { recursive: true })
|
||||
await fs.mkdir(path.join(storageDir, "permission"), { recursive: true })
|
||||
await fs.mkdir(path.join(storageDir, "session_share"), { recursive: true })
|
||||
// Create legacy marker to indicate JSON storage exists
|
||||
await Bun.write(path.join(storageDir, "migration"), "1")
|
||||
return storageDir
|
||||
}
|
||||
|
||||
async function writeProject(storageDir: string, project: Record<string, unknown>) {
|
||||
await Bun.write(path.join(storageDir, "project", `${project.id}.json`), JSON.stringify(project))
|
||||
}
|
||||
|
||||
async function writeSession(storageDir: string, projectID: string, session: Record<string, unknown>) {
|
||||
await Bun.write(path.join(storageDir, "session", projectID, `${session.id}.json`), JSON.stringify(session))
|
||||
}
|
||||
|
||||
// Helper to create in-memory test database with schema
|
||||
function createTestDb() {
|
||||
const sqlite = new Database(":memory:")
|
||||
sqlite.exec("PRAGMA foreign_keys = ON")
|
||||
|
||||
// Apply schema migrations using drizzle migrate
|
||||
const dir = path.join(import.meta.dirname, "../../migration")
|
||||
const entries = readdirSync(dir, { withFileTypes: true })
|
||||
const migrations = entries
|
||||
.filter((entry) => entry.isDirectory())
|
||||
.map((entry) => ({
|
||||
sql: readFileSync(path.join(dir, entry.name, "migration.sql"), "utf-8"),
|
||||
timestamp: Number(entry.name.split("_")[0]),
|
||||
}))
|
||||
.sort((a, b) => a.timestamp - b.timestamp)
|
||||
migrate(drizzle({ client: sqlite }), migrations)
|
||||
|
||||
return sqlite
|
||||
}
|
||||
|
||||
describe("JSON to SQLite migration", () => {
|
||||
let storageDir: string
|
||||
let sqlite: Database
|
||||
|
||||
beforeEach(async () => {
|
||||
storageDir = await setupStorageDir()
|
||||
sqlite = createTestDb()
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
sqlite.close()
|
||||
await fs.rm(storageDir, { recursive: true, force: true })
|
||||
})
|
||||
|
||||
test("migrates project", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_test123abc",
|
||||
worktree: "/test/path",
|
||||
vcs: "git",
|
||||
name: "Test Project",
|
||||
time: { created: 1700000000000, updated: 1700000001000 },
|
||||
sandboxes: ["/test/sandbox"],
|
||||
})
|
||||
|
||||
const stats = await JsonMigration.run(sqlite)
|
||||
|
||||
expect(stats?.projects).toBe(1)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
const projects = db.select().from(ProjectTable).all()
|
||||
expect(projects.length).toBe(1)
|
||||
expect(projects[0].id).toBe("proj_test123abc")
|
||||
expect(projects[0].worktree).toBe("/test/path")
|
||||
expect(projects[0].name).toBe("Test Project")
|
||||
expect(projects[0].sandboxes).toEqual(["/test/sandbox"])
|
||||
})
|
||||
|
||||
test("migrates session with individual columns", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_test123abc",
|
||||
worktree: "/test/path",
|
||||
time: { created: Date.now(), updated: Date.now() },
|
||||
sandboxes: [],
|
||||
})
|
||||
|
||||
await writeSession(storageDir, "proj_test123abc", {
|
||||
id: "ses_test456def",
|
||||
projectID: "proj_test123abc",
|
||||
slug: "test-session",
|
||||
directory: "/test/dir",
|
||||
title: "Test Session Title",
|
||||
version: "1.0.0",
|
||||
time: { created: 1700000000000, updated: 1700000001000 },
|
||||
summary: { additions: 10, deletions: 5, files: 3 },
|
||||
share: { url: "https://example.com/share" },
|
||||
})
|
||||
|
||||
await JsonMigration.run(sqlite)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
const sessions = db.select().from(SessionTable).all()
|
||||
expect(sessions.length).toBe(1)
|
||||
expect(sessions[0].id).toBe("ses_test456def")
|
||||
expect(sessions[0].project_id).toBe("proj_test123abc")
|
||||
expect(sessions[0].slug).toBe("test-session")
|
||||
expect(sessions[0].title).toBe("Test Session Title")
|
||||
expect(sessions[0].summary_additions).toBe(10)
|
||||
expect(sessions[0].summary_deletions).toBe(5)
|
||||
expect(sessions[0].share_url).toBe("https://example.com/share")
|
||||
})
|
||||
|
||||
test("migrates messages and parts", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_test123abc",
|
||||
worktree: "/",
|
||||
time: { created: Date.now(), updated: Date.now() },
|
||||
sandboxes: [],
|
||||
})
|
||||
await writeSession(storageDir, "proj_test123abc", { ...fixtures.session })
|
||||
await Bun.write(
|
||||
path.join(storageDir, "message", "ses_test456def", "msg_test789ghi.json"),
|
||||
JSON.stringify({ ...fixtures.message }),
|
||||
)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "part", "msg_test789ghi", "prt_testabc123.json"),
|
||||
JSON.stringify({ ...fixtures.part }),
|
||||
)
|
||||
|
||||
const stats = await JsonMigration.run(sqlite)
|
||||
|
||||
expect(stats?.messages).toBe(1)
|
||||
expect(stats?.parts).toBe(1)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
const messages = db.select().from(MessageTable).all()
|
||||
expect(messages.length).toBe(1)
|
||||
expect(messages[0].id).toBe("msg_test789ghi")
|
||||
|
||||
const parts = db.select().from(PartTable).all()
|
||||
expect(parts.length).toBe(1)
|
||||
expect(parts[0].id).toBe("prt_testabc123")
|
||||
})
|
||||
|
||||
test("migrates legacy parts without ids in body", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_test123abc",
|
||||
worktree: "/",
|
||||
time: { created: Date.now(), updated: Date.now() },
|
||||
sandboxes: [],
|
||||
})
|
||||
await writeSession(storageDir, "proj_test123abc", { ...fixtures.session })
|
||||
await Bun.write(
|
||||
path.join(storageDir, "message", "ses_test456def", "msg_test789ghi.json"),
|
||||
JSON.stringify({
|
||||
role: "user",
|
||||
agent: "default",
|
||||
model: { providerID: "openai", modelID: "gpt-4" },
|
||||
time: { created: 1700000000000 },
|
||||
}),
|
||||
)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "part", "msg_test789ghi", "prt_testabc123.json"),
|
||||
JSON.stringify({
|
||||
type: "text",
|
||||
text: "Hello, world!",
|
||||
}),
|
||||
)
|
||||
|
||||
const stats = await JsonMigration.run(sqlite)
|
||||
|
||||
expect(stats?.messages).toBe(1)
|
||||
expect(stats?.parts).toBe(1)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
const messages = db.select().from(MessageTable).all()
|
||||
expect(messages.length).toBe(1)
|
||||
expect(messages[0].id).toBe("msg_test789ghi")
|
||||
expect(messages[0].session_id).toBe("ses_test456def")
|
||||
expect(messages[0].data).not.toHaveProperty("id")
|
||||
expect(messages[0].data).not.toHaveProperty("sessionID")
|
||||
|
||||
const parts = db.select().from(PartTable).all()
|
||||
expect(parts.length).toBe(1)
|
||||
expect(parts[0].id).toBe("prt_testabc123")
|
||||
expect(parts[0].message_id).toBe("msg_test789ghi")
|
||||
expect(parts[0].session_id).toBe("ses_test456def")
|
||||
expect(parts[0].data).not.toHaveProperty("id")
|
||||
expect(parts[0].data).not.toHaveProperty("messageID")
|
||||
expect(parts[0].data).not.toHaveProperty("sessionID")
|
||||
})
|
||||
|
||||
test("skips orphaned sessions (no parent project)", async () => {
|
||||
await Bun.write(
|
||||
path.join(storageDir, "session", "proj_test123abc", "ses_orphan.json"),
|
||||
JSON.stringify({
|
||||
id: "ses_orphan",
|
||||
projectID: "proj_nonexistent",
|
||||
slug: "orphan",
|
||||
directory: "/",
|
||||
title: "Orphan",
|
||||
version: "1.0.0",
|
||||
time: { created: Date.now(), updated: Date.now() },
|
||||
}),
|
||||
)
|
||||
|
||||
const stats = await JsonMigration.run(sqlite)
|
||||
|
||||
expect(stats?.sessions).toBe(0)
|
||||
})
|
||||
|
||||
test("is idempotent (running twice doesn't duplicate)", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_test123abc",
|
||||
worktree: "/",
|
||||
time: { created: Date.now(), updated: Date.now() },
|
||||
sandboxes: [],
|
||||
})
|
||||
|
||||
await JsonMigration.run(sqlite)
|
||||
await JsonMigration.run(sqlite)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
const projects = db.select().from(ProjectTable).all()
|
||||
expect(projects.length).toBe(1) // Still only 1 due to onConflictDoNothing
|
||||
})
|
||||
|
||||
test("migrates todos", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_test123abc",
|
||||
worktree: "/",
|
||||
time: { created: Date.now(), updated: Date.now() },
|
||||
sandboxes: [],
|
||||
})
|
||||
await writeSession(storageDir, "proj_test123abc", { ...fixtures.session })
|
||||
|
||||
// Create todo file (named by sessionID, contains array of todos)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "todo", "ses_test456def.json"),
|
||||
JSON.stringify([
|
||||
{
|
||||
id: "todo_1",
|
||||
content: "First todo",
|
||||
status: "pending",
|
||||
priority: "high",
|
||||
},
|
||||
{
|
||||
id: "todo_2",
|
||||
content: "Second todo",
|
||||
status: "completed",
|
||||
priority: "medium",
|
||||
},
|
||||
]),
|
||||
)
|
||||
|
||||
const stats = await JsonMigration.run(sqlite)
|
||||
|
||||
expect(stats?.todos).toBe(2)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
const todos = db.select().from(TodoTable).orderBy(TodoTable.position).all()
|
||||
expect(todos.length).toBe(2)
|
||||
expect(todos[0].content).toBe("First todo")
|
||||
expect(todos[0].status).toBe("pending")
|
||||
expect(todos[0].priority).toBe("high")
|
||||
expect(todos[0].position).toBe(0)
|
||||
expect(todos[1].content).toBe("Second todo")
|
||||
expect(todos[1].position).toBe(1)
|
||||
})
|
||||
|
||||
test("todos are ordered by position", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_test123abc",
|
||||
worktree: "/",
|
||||
time: { created: Date.now(), updated: Date.now() },
|
||||
sandboxes: [],
|
||||
})
|
||||
await writeSession(storageDir, "proj_test123abc", { ...fixtures.session })
|
||||
|
||||
await Bun.write(
|
||||
path.join(storageDir, "todo", "ses_test456def.json"),
|
||||
JSON.stringify([
|
||||
{ content: "Third", status: "pending", priority: "low" },
|
||||
{ content: "First", status: "pending", priority: "high" },
|
||||
{ content: "Second", status: "in_progress", priority: "medium" },
|
||||
]),
|
||||
)
|
||||
|
||||
await JsonMigration.run(sqlite)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
const todos = db.select().from(TodoTable).orderBy(TodoTable.position).all()
|
||||
|
||||
expect(todos.length).toBe(3)
|
||||
expect(todos[0].content).toBe("Third")
|
||||
expect(todos[0].position).toBe(0)
|
||||
expect(todos[1].content).toBe("First")
|
||||
expect(todos[1].position).toBe(1)
|
||||
expect(todos[2].content).toBe("Second")
|
||||
expect(todos[2].position).toBe(2)
|
||||
})
|
||||
|
||||
test("migrates permissions", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_test123abc",
|
||||
worktree: "/",
|
||||
time: { created: Date.now(), updated: Date.now() },
|
||||
sandboxes: [],
|
||||
})
|
||||
|
||||
// Create permission file (named by projectID, contains array of rules)
|
||||
const permissionData = [
|
||||
{ permission: "file.read", pattern: "/test/file1.ts", action: "allow" as const },
|
||||
{ permission: "file.write", pattern: "/test/file2.ts", action: "ask" as const },
|
||||
{ permission: "command.run", pattern: "npm install", action: "deny" as const },
|
||||
]
|
||||
await Bun.write(path.join(storageDir, "permission", "proj_test123abc.json"), JSON.stringify(permissionData))
|
||||
|
||||
const stats = await JsonMigration.run(sqlite)
|
||||
|
||||
expect(stats?.permissions).toBe(1)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
const permissions = db.select().from(PermissionTable).all()
|
||||
expect(permissions.length).toBe(1)
|
||||
expect(permissions[0].project_id).toBe("proj_test123abc")
|
||||
expect(permissions[0].data).toEqual(permissionData)
|
||||
})
|
||||
|
||||
test("migrates session shares", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_test123abc",
|
||||
worktree: "/",
|
||||
time: { created: Date.now(), updated: Date.now() },
|
||||
sandboxes: [],
|
||||
})
|
||||
await writeSession(storageDir, "proj_test123abc", { ...fixtures.session })
|
||||
|
||||
// Create session share file (named by sessionID)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "session_share", "ses_test456def.json"),
|
||||
JSON.stringify({
|
||||
id: "share_123",
|
||||
secret: "supersecretkey",
|
||||
url: "https://share.example.com/ses_test456def",
|
||||
}),
|
||||
)
|
||||
|
||||
const stats = await JsonMigration.run(sqlite)
|
||||
|
||||
expect(stats?.shares).toBe(1)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
const shares = db.select().from(SessionShareTable).all()
|
||||
expect(shares.length).toBe(1)
|
||||
expect(shares[0].session_id).toBe("ses_test456def")
|
||||
expect(shares[0].id).toBe("share_123")
|
||||
expect(shares[0].secret).toBe("supersecretkey")
|
||||
expect(shares[0].url).toBe("https://share.example.com/ses_test456def")
|
||||
})
|
||||
|
||||
test("returns empty stats when storage directory does not exist", async () => {
|
||||
await fs.rm(storageDir, { recursive: true, force: true })
|
||||
|
||||
const stats = await JsonMigration.run(sqlite)
|
||||
|
||||
expect(stats.projects).toBe(0)
|
||||
expect(stats.sessions).toBe(0)
|
||||
expect(stats.messages).toBe(0)
|
||||
expect(stats.parts).toBe(0)
|
||||
expect(stats.todos).toBe(0)
|
||||
expect(stats.permissions).toBe(0)
|
||||
expect(stats.shares).toBe(0)
|
||||
expect(stats.errors).toEqual([])
|
||||
})
|
||||
|
||||
test("continues when a JSON file is unreadable and records an error", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_test123abc",
|
||||
worktree: "/",
|
||||
time: { created: Date.now(), updated: Date.now() },
|
||||
sandboxes: [],
|
||||
})
|
||||
await Bun.write(path.join(storageDir, "project", "broken.json"), "{ invalid json")
|
||||
|
||||
const stats = await JsonMigration.run(sqlite)
|
||||
|
||||
expect(stats.projects).toBe(1)
|
||||
expect(stats.errors.some((x) => x.includes("failed to read") && x.includes("broken.json"))).toBe(true)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
const projects = db.select().from(ProjectTable).all()
|
||||
expect(projects.length).toBe(1)
|
||||
expect(projects[0].id).toBe("proj_test123abc")
|
||||
})
|
||||
|
||||
test("skips invalid todo entries while preserving source positions", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_test123abc",
|
||||
worktree: "/",
|
||||
time: { created: Date.now(), updated: Date.now() },
|
||||
sandboxes: [],
|
||||
})
|
||||
await writeSession(storageDir, "proj_test123abc", { ...fixtures.session })
|
||||
|
||||
await Bun.write(
|
||||
path.join(storageDir, "todo", "ses_test456def.json"),
|
||||
JSON.stringify([
|
||||
{ content: "keep-0", status: "pending", priority: "high" },
|
||||
{ content: "drop-1", priority: "low" },
|
||||
{ content: "keep-2", status: "completed", priority: "medium" },
|
||||
]),
|
||||
)
|
||||
|
||||
const stats = await JsonMigration.run(sqlite)
|
||||
expect(stats.todos).toBe(2)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
const todos = db.select().from(TodoTable).orderBy(TodoTable.position).all()
|
||||
expect(todos.length).toBe(2)
|
||||
expect(todos[0].content).toBe("keep-0")
|
||||
expect(todos[0].position).toBe(0)
|
||||
expect(todos[1].content).toBe("keep-2")
|
||||
expect(todos[1].position).toBe(2)
|
||||
})
|
||||
|
||||
test("skips orphaned todos, permissions, and shares", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_test123abc",
|
||||
worktree: "/",
|
||||
time: { created: Date.now(), updated: Date.now() },
|
||||
sandboxes: [],
|
||||
})
|
||||
await writeSession(storageDir, "proj_test123abc", { ...fixtures.session })
|
||||
|
||||
await Bun.write(
|
||||
path.join(storageDir, "todo", "ses_test456def.json"),
|
||||
JSON.stringify([{ content: "valid", status: "pending", priority: "high" }]),
|
||||
)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "todo", "ses_missing.json"),
|
||||
JSON.stringify([{ content: "orphan", status: "pending", priority: "high" }]),
|
||||
)
|
||||
|
||||
await Bun.write(
|
||||
path.join(storageDir, "permission", "proj_test123abc.json"),
|
||||
JSON.stringify([{ permission: "file.read" }]),
|
||||
)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "permission", "proj_missing.json"),
|
||||
JSON.stringify([{ permission: "file.write" }]),
|
||||
)
|
||||
|
||||
await Bun.write(
|
||||
path.join(storageDir, "session_share", "ses_test456def.json"),
|
||||
JSON.stringify({ id: "share_ok", secret: "secret", url: "https://ok.example.com" }),
|
||||
)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "session_share", "ses_missing.json"),
|
||||
JSON.stringify({ id: "share_missing", secret: "secret", url: "https://missing.example.com" }),
|
||||
)
|
||||
|
||||
const stats = await JsonMigration.run(sqlite)
|
||||
|
||||
expect(stats.todos).toBe(1)
|
||||
expect(stats.permissions).toBe(1)
|
||||
expect(stats.shares).toBe(1)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
expect(db.select().from(TodoTable).all().length).toBe(1)
|
||||
expect(db.select().from(PermissionTable).all().length).toBe(1)
|
||||
expect(db.select().from(SessionShareTable).all().length).toBe(1)
|
||||
})
|
||||
|
||||
test("handles mixed corruption and partial validity in one migration run", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_test123abc",
|
||||
worktree: "/ok",
|
||||
time: { created: 1700000000000, updated: 1700000001000 },
|
||||
sandboxes: [],
|
||||
})
|
||||
await Bun.write(
|
||||
path.join(storageDir, "project", "proj_missing_id.json"),
|
||||
JSON.stringify({ worktree: "/bad", sandboxes: [] }),
|
||||
)
|
||||
await Bun.write(path.join(storageDir, "project", "proj_broken.json"), "{ nope")
|
||||
|
||||
await writeSession(storageDir, "proj_test123abc", {
|
||||
id: "ses_test456def",
|
||||
projectID: "proj_test123abc",
|
||||
slug: "ok",
|
||||
directory: "/ok",
|
||||
title: "Ok",
|
||||
version: "1",
|
||||
time: { created: 1700000000000, updated: 1700000001000 },
|
||||
})
|
||||
await Bun.write(
|
||||
path.join(storageDir, "session", "proj_test123abc", "ses_missing_project.json"),
|
||||
JSON.stringify({
|
||||
id: "ses_missing_project",
|
||||
slug: "bad",
|
||||
directory: "/bad",
|
||||
title: "Bad",
|
||||
version: "1",
|
||||
}),
|
||||
)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "session", "proj_test123abc", "ses_orphan.json"),
|
||||
JSON.stringify({
|
||||
id: "ses_orphan",
|
||||
projectID: "proj_missing",
|
||||
slug: "orphan",
|
||||
directory: "/bad",
|
||||
title: "Orphan",
|
||||
version: "1",
|
||||
}),
|
||||
)
|
||||
|
||||
await Bun.write(
|
||||
path.join(storageDir, "message", "ses_test456def", "msg_ok.json"),
|
||||
JSON.stringify({ role: "user", time: { created: 1700000000000 } }),
|
||||
)
|
||||
await Bun.write(path.join(storageDir, "message", "ses_test456def", "msg_broken.json"), "{ nope")
|
||||
await Bun.write(
|
||||
path.join(storageDir, "message", "ses_missing", "msg_orphan.json"),
|
||||
JSON.stringify({ role: "user", time: { created: 1700000000000 } }),
|
||||
)
|
||||
|
||||
await Bun.write(
|
||||
path.join(storageDir, "part", "msg_ok", "part_ok.json"),
|
||||
JSON.stringify({ type: "text", text: "ok" }),
|
||||
)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "part", "msg_missing", "part_missing_message.json"),
|
||||
JSON.stringify({ type: "text", text: "bad" }),
|
||||
)
|
||||
await Bun.write(path.join(storageDir, "part", "msg_ok", "part_broken.json"), "{ nope")
|
||||
|
||||
await Bun.write(
|
||||
path.join(storageDir, "todo", "ses_test456def.json"),
|
||||
JSON.stringify([
|
||||
{ content: "ok", status: "pending", priority: "high" },
|
||||
{ content: "skip", status: "pending" },
|
||||
]),
|
||||
)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "todo", "ses_missing.json"),
|
||||
JSON.stringify([{ content: "orphan", status: "pending", priority: "high" }]),
|
||||
)
|
||||
await Bun.write(path.join(storageDir, "todo", "ses_broken.json"), "{ nope")
|
||||
|
||||
await Bun.write(
|
||||
path.join(storageDir, "permission", "proj_test123abc.json"),
|
||||
JSON.stringify([{ permission: "file.read" }]),
|
||||
)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "permission", "proj_missing.json"),
|
||||
JSON.stringify([{ permission: "file.write" }]),
|
||||
)
|
||||
await Bun.write(path.join(storageDir, "permission", "proj_broken.json"), "{ nope")
|
||||
|
||||
await Bun.write(
|
||||
path.join(storageDir, "session_share", "ses_test456def.json"),
|
||||
JSON.stringify({ id: "share_ok", secret: "secret", url: "https://ok.example.com" }),
|
||||
)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "session_share", "ses_missing.json"),
|
||||
JSON.stringify({ id: "share_orphan", secret: "secret", url: "https://missing.example.com" }),
|
||||
)
|
||||
await Bun.write(path.join(storageDir, "session_share", "ses_broken.json"), "{ nope")
|
||||
|
||||
const stats = await JsonMigration.run(sqlite)
|
||||
|
||||
expect(stats.projects).toBe(1)
|
||||
expect(stats.sessions).toBe(1)
|
||||
expect(stats.messages).toBe(1)
|
||||
expect(stats.parts).toBe(1)
|
||||
expect(stats.todos).toBe(1)
|
||||
expect(stats.permissions).toBe(1)
|
||||
expect(stats.shares).toBe(1)
|
||||
expect(stats.errors.length).toBeGreaterThanOrEqual(6)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
expect(db.select().from(ProjectTable).all().length).toBe(1)
|
||||
expect(db.select().from(SessionTable).all().length).toBe(1)
|
||||
expect(db.select().from(MessageTable).all().length).toBe(1)
|
||||
expect(db.select().from(PartTable).all().length).toBe(1)
|
||||
expect(db.select().from(TodoTable).all().length).toBe(1)
|
||||
expect(db.select().from(PermissionTable).all().length).toBe(1)
|
||||
expect(db.select().from(SessionShareTable).all().length).toBe(1)
|
||||
})
|
||||
})
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user