Compare commits

...

14 Commits

Author SHA1 Message Date
James Long
4e51c3611a feat(core): initial implementation of syncing 2026-03-16 10:39:25 -04:00
xinxin
9c585bb58b docs(providers): clarify npm choice for chat vs responses APIs (#16974)
Co-authored-by: wangxinxin <xinxin.wang@pharmbrain.com>
2026-03-11 10:35:16 -05:00
Aiden Cline
0f6bc8ae71 tweak: adjust way skills are presented to agent to increase likelyhood of skill invocations. (#17053) 2026-03-11 10:24:55 -05:00
Shoubhit Dash
7291e28273 perf(app): trim session render work (#16987) 2026-03-11 18:19:17 +05:30
Filip
db57fe6193 fix(app): make error tool card respect settings (#17005) 2026-03-11 14:52:33 +05:30
Brendan Allan
802416639b ci: setup node in tauri build 2026-03-11 16:09:17 +08:00
opencode-agent[bot]
7ec398d855 chore: generate 2026-03-11 03:34:02 +00:00
Luke Parker
4ab35d2c5c fix(electron): hide Windows background consoles (#16842)
Co-authored-by: Brendan Allan <git@brendonovich.dev>
2026-03-11 13:33:06 +10:00
SOUMITRA-SAHA
b4ae030fc2 fix: add GOOGLE_VERTEX_LOCATION env var support for Vertex AI (#16922)
Co-authored-by: Aiden Cline <63023139+rekram1-node@users.noreply.github.com>
2026-03-10 22:32:39 -05:00
Jack
0843964eb3 feat(web): use Feishu for Chinese community links (#16908)
Co-authored-by: Frank <frank@anoma.ly>
2026-03-11 11:07:13 +08:00
Kit Langton
a1b06d63c9 fix(account): resilient orgs fetch (#16944) 2026-03-11 00:39:07 +00:00
Dax Raad
1b6820bab5 sync 2026-03-10 20:13:56 -04:00
Adam
89bf199c07 chore(app): fix tests 2026-03-10 19:03:44 -05:00
Aiden Cline
5acfdd1c5d chore: kill old copilot 403 message that was used for old plugin migration (#16904) 2026-03-10 16:20:41 -05:00
43 changed files with 2179 additions and 215 deletions

View File

@@ -149,6 +149,10 @@ jobs:
- uses: ./.github/actions/setup-bun
- uses: actions/setup-node@v4
with:
node-version: "24"
- name: Cache apt packages
if: contains(matrix.settings.host, 'ubuntu')
uses: actions/cache@v4

View File

@@ -1,3 +1,4 @@
plans/
bun.lock
package.json
package-lock.json

View File

@@ -137,4 +137,4 @@ OpenCode 内置两种 Agent可用 `Tab` 键快速切换:
---
**加入我们的社区** [Discord](https://discord.gg/opencode) | [X.com](https://x.com/opencode)
**加入我们的社区** [飞书](https://applink.feishu.cn/client/chat/chatter/add_by_link?link_token=de8k6664-1b5e-43f2-8efd-21d6772647b5&qr_code=true) | [X.com](https://x.com/opencode)

View File

@@ -137,4 +137,4 @@ OpenCode 內建了兩種 Agent您可以使用 `Tab` 鍵快速切換。
---
**加入我們的社群** [Discord](https://discord.gg/opencode) | [X.com](https://x.com/opencode)
**加入我們的社群** [飞书](https://applink.feishu.cn/client/chat/chatter/add_by_link?link_token=de8k6664-1b5e-43f2-8efd-21d6772647b5&qr_code=true) | [X.com](https://x.com/opencode)

View File

@@ -9,14 +9,12 @@ test("/terminal toggles the terminal panel", async ({ page, gotoSession }) => {
await expect(terminal).not.toBeVisible()
await prompt.click()
await page.keyboard.type("/terminal")
await prompt.fill("/terminal")
await expect(page.locator('[data-slash-id="terminal.toggle"]').first()).toBeVisible()
await page.keyboard.press("Enter")
await expect(terminal).toBeVisible()
await prompt.click()
await page.keyboard.type("/terminal")
await prompt.fill("/terminal")
await expect(page.locator('[data-slash-id="terminal.toggle"]').first()).toBeVisible()
await page.keyboard.press("Enter")
await expect(terminal).not.toBeVisible()

View File

@@ -1,5 +1,6 @@
export const promptSelector = '[data-component="prompt-input"]'
export const terminalSelector = '[data-component="terminal"]'
export const terminalPanelSelector = '#terminal-panel[aria-hidden="false"]'
export const terminalSelector = `${terminalPanelSelector} [data-component="terminal"]`
export const sessionComposerDockSelector = '[data-component="session-prompt-dock"]'
export const questionDockSelector = '[data-component="dock-prompt"][data-kind="question"]'
export const permissionDockSelector = '[data-component="dock-prompt"][data-kind="permission"]'

View File

@@ -764,6 +764,7 @@ export function MessageTimeline(props: {
"min-w-0 w-full max-w-full": true,
"md:max-w-200 2xl:max-w-[1000px]": props.centered,
}}
style={{ "content-visibility": "auto", "contain-intrinsic-size": "auto 500px" }}
>
<Show when={commentCount() > 0}>
<div class="w-full px-4 md:px-5 pb-2">

View File

@@ -8,6 +8,12 @@ import { useI18n } from "~/context/i18n"
export function Footer() {
const language = useLanguage()
const i18n = useI18n()
const community = createMemo(() => {
const locale = language.locale()
return locale === "zh" || locale === "zht"
? ({ key: "footer.feishu", link: language.route("/feishu") } as const)
: ({ key: "footer.discord", link: language.route("/discord") } as const)
})
const githubData = createAsync(() => github())
const starCount = createMemo(() =>
githubData()?.stars
@@ -32,7 +38,7 @@ export function Footer() {
<a href={language.route("/changelog")}>{i18n.t("footer.changelog")}</a>
</div>
<div data-slot="cell">
<a href={language.route("/discord")}>{i18n.t("footer.discord")}</a>
<a href={community().link}>{i18n.t(community().key)}</a>
</div>
<div data-slot="cell">
<a href={config.social.twitter}>{i18n.t("footer.x")}</a>

View File

@@ -21,6 +21,7 @@ export const dict = {
"footer.github": "GitHub",
"footer.docs": "Docs",
"footer.changelog": "Changelog",
"footer.feishu": "Feishu",
"footer.discord": "Discord",
"footer.x": "X",

View File

@@ -24,6 +24,7 @@ export const dict = {
"footer.github": "GitHub",
"footer.docs": "文档",
"footer.changelog": "更新日志",
"footer.feishu": "飞书",
"footer.discord": "Discord",
"footer.x": "X",

View File

@@ -24,6 +24,7 @@ export const dict = {
"footer.github": "GitHub",
"footer.docs": "文件",
"footer.changelog": "更新日誌",
"footer.feishu": "飞书",
"footer.discord": "Discord",
"footer.x": "X",

View File

@@ -0,0 +1,7 @@
import { redirect } from "@solidjs/router"
export async function GET() {
return redirect(
"https://applink.feishu.cn/client/chat/chatter/add_by_link?link_token=de8k6664-1b5e-43f2-8efd-21d6772647b5&qr_code=true",
)
}

View File

@@ -107,7 +107,7 @@ export function syncCli() {
let version = ""
try {
version = execFileSync(installPath, ["--version"]).toString().trim()
version = execFileSync(installPath, ["--version"], { windowsHide: true }).toString().trim()
} catch {
return
}
@@ -147,7 +147,7 @@ export function spawnCommand(args: string, extraEnv: Record<string, string>) {
console.log(`[cli] Executing: ${cmd} ${cmdArgs.join(" ")}`)
const child = spawn(cmd, cmdArgs, {
env: envs,
detached: true,
detached: process.platform !== "win32",
windowsHide: true,
stdio: ["ignore", "pipe", "pipe"],
})

View File

@@ -0,0 +1,12 @@
CREATE TABLE `event_sequence` (
`aggregate_id` text PRIMARY KEY,
`seq` integer NOT NULL
);
--> statement-breakpoint
CREATE TABLE `event` (
`seq` integer NOT NULL,
`aggregateId` text NOT NULL,
`name` text NOT NULL,
`data` text NOT NULL,
CONSTRAINT `fk_event_aggregateId_event_sequence_aggregate_id_fk` FOREIGN KEY (`aggregateId`) REFERENCES `event_sequence`(`aggregate_id`) ON DELETE CASCADE
);

File diff suppressed because it is too large Load Diff

View File

@@ -192,11 +192,17 @@ export class AccountService extends ServiceMap.Service<
const orgsByAccount = Effect.fn("AccountService.orgsByAccount")(function* () {
const accounts = yield* repo.list()
return yield* Effect.forEach(
const [errors, results] = yield* Effect.partition(
accounts,
(account) => orgs(account.id).pipe(Effect.map((orgs) => ({ account, orgs }))),
{ concurrency: 3 },
)
for (const error of errors) {
yield* Effect.logWarning("failed to fetch orgs for account").pipe(
Effect.annotateLogs({ error: String(error) }),
)
}
return results
})
const orgs = Effect.fn("AccountService.orgs")(function* (accountID: AccountID) {

View File

@@ -90,6 +90,10 @@ export namespace Installation {
return CHANNEL === "local"
}
export function isTesting() {
return process.env.NODE_ENV === "test"
}
export async function method() {
if (process.execPath.includes(path.join(".opencode", "bin"))) return "curl"
if (process.execPath.includes(path.join(".local", "bin"))) return "curl"

View File

@@ -114,6 +114,7 @@ export namespace LSP {
return {
process: spawn(item.command[0], item.command.slice(1), {
cwd: root,
windowsHide: true,
env: {
...process.env,
...item.env,

View File

@@ -1,4 +1,4 @@
import { spawn, type ChildProcessWithoutNullStreams } from "child_process"
import { spawn as launch, type ChildProcessWithoutNullStreams } from "child_process"
import path from "path"
import os from "os"
import { Global } from "../global"
@@ -14,6 +14,11 @@ import { Process } from "../util/process"
import { which } from "../util/which"
import { Module } from "@opencode-ai/util/module"
const spawn = ((cmd, args, opts) => {
if (Array.isArray(args)) return launch(cmd, [...args], { ...(opts ?? {}), windowsHide: true })
return launch(cmd, { ...(args ?? {}), windowsHide: true })
}) as typeof launch
export namespace LSPServer {
const log = Log.create({ service: "lsp.server" })
const pathExists = async (p: string) =>

View File

@@ -40,14 +40,6 @@ export namespace ProviderError {
return /^4(00|13)\s*(status code)?\s*\(no body\)/i.test(message)
}
function error(providerID: string, error: APICallError) {
if (providerID.includes("github-copilot") && error.statusCode === 403) {
return "Please reauthenticate with the copilot provider to ensure your credentials work properly with OpenCode."
}
return error.message
}
function message(providerID: string, e: APICallError) {
return iife(() => {
const msg = e.message
@@ -60,10 +52,6 @@ export namespace ProviderError {
return "Unknown error"
}
const transformed = error(providerID, e)
if (transformed !== msg) {
return transformed
}
if (!e.responseBody || (e.statusCode && msg !== STATUS_CODES[e.statusCode])) {
return msg
}

View File

@@ -67,7 +67,11 @@ export namespace Provider {
const project =
options["project"] ?? Env.get("GOOGLE_CLOUD_PROJECT") ?? Env.get("GCP_PROJECT") ?? Env.get("GCLOUD_PROJECT")
const location =
options["location"] ?? Env.get("GOOGLE_CLOUD_LOCATION") ?? Env.get("VERTEX_LOCATION") ?? "us-central1"
options["location"] ??
Env.get("GOOGLE_VERTEX_LOCATION") ??
Env.get("GOOGLE_CLOUD_LOCATION") ??
Env.get("VERTEX_LOCATION") ??
"us-central1"
const endpoint = location === "global" ? "aiplatform.googleapis.com" : `${location}-aiplatform.googleapis.com`
return {
@@ -437,7 +441,11 @@ export namespace Provider {
Env.get("GCLOUD_PROJECT")
const location =
provider.options?.location ?? Env.get("GOOGLE_CLOUD_LOCATION") ?? Env.get("VERTEX_LOCATION") ?? "us-central1"
provider.options?.location ??
Env.get("GOOGLE_VERTEX_LOCATION") ??
Env.get("GOOGLE_CLOUD_LOCATION") ??
Env.get("VERTEX_LOCATION") ??
"us-central1"
const autoload = Boolean(project)
if (!autoload) return { autoload: false }

View File

@@ -4,6 +4,7 @@ import { streamSSE } from "hono/streaming"
import z from "zod"
import { BusEvent } from "@/bus/bus-event"
import { GlobalBus } from "@/bus/global"
import { DatabaseEvent } from "@/storage/event"
import { Instance } from "../../project/instance"
import { Installation } from "@/installation"
import { Log } from "../../util/log"
@@ -107,6 +108,75 @@ export const GlobalRoutes = lazy(() =>
})
},
)
.get(
"/db-event",
describeRoute({
summary: "Get database global events",
description: "Subscribe to database global events from the OpenCode system using server-sent events.",
operationId: "global.db-event",
responses: {
200: {
description: "Event stream",
content: {
"text/event-stream": {
schema: resolver(
z
.object({
directory: z.string(),
payload: BusEvent.payloads(),
})
.meta({
ref: "GlobalEvent",
}),
),
},
},
},
},
}),
async (c) => {
log.info("global event connected")
c.header("X-Accel-Buffering", "no")
c.header("X-Content-Type-Options", "nosniff")
return streamSSE(c, async (stream) => {
stream.writeSSE({
data: JSON.stringify({
payload: {
type: "server.connected",
properties: {},
},
}),
})
async function handler(event: any) {
await stream.writeSSE({
data: JSON.stringify(event),
})
}
DatabaseEvent.Bus.on("event", handler)
// Send heartbeat every 10s to prevent stalled proxy streams.
const heartbeat = setInterval(() => {
stream.writeSSE({
data: JSON.stringify({
payload: {
type: "server.heartbeat",
properties: {},
},
}),
})
}, 10_000)
await new Promise<void>((resolve) => {
stream.onAbort(() => {
clearInterval(heartbeat)
DatabaseEvent.Bus.off("event", handler)
resolve()
log.info("db global event disconnected")
})
})
})
},
)
.get(
"/config",
describeRoute({

View File

@@ -10,9 +10,10 @@ import { Flag } from "../flag/flag"
import { Identifier } from "../id/id"
import { Installation } from "../installation"
import { Database, NotFoundError, eq, and, or, gte, isNull, desc, like, inArray, lt } from "../storage/db"
import { Database, NotFoundError, eq, and, gte, isNull, desc, like, inArray, lt } from "../storage/db"
import { DatabaseEvent } from "../storage/event"
import type { SQL } from "../storage/db"
import { SessionTable, MessageTable, PartTable } from "./session.sql"
import { SessionTable } from "./session.sql"
import { ProjectTable } from "../project/project.sql"
import { Storage } from "@/storage/storage"
import { Log } from "../util/log"
@@ -179,26 +180,42 @@ export namespace Session {
export type GlobalInfo = z.output<typeof GlobalInfo>
export const Event = {
Created: BusEvent.define(
Created: DatabaseEvent.define(
"session.created",
"v1",
z.object({
id: z.string(),
info: Info,
}),
),
Updated: BusEvent.define(
Shared: DatabaseEvent.define(
"session.shared",
"v1",
z.object({
id: z.string(),
url: z.string().optional(),
}),
),
Touch: DatabaseEvent.define("session.touch", "v1", z.object({ id: z.string(), time: z.number() })),
Updated: DatabaseEvent.define(
"session.updated",
"v1",
z.object({
id: z.string(),
info: Info,
}),
),
Deleted: BusEvent.define(
Deleted: DatabaseEvent.define(
"session.deleted",
"v1",
z.object({
id: z.string(),
info: Info,
}),
),
Diff: BusEvent.define(
Diff: DatabaseEvent.agg("sessionID").define(
"session.diff",
"v1",
z.object({
sessionID: z.string(),
diff: Snapshot.FileDiff.array(),
@@ -277,18 +294,8 @@ export namespace Session {
)
export const touch = fn(Identifier.schema("session"), async (sessionID) => {
const now = Date.now()
Database.use((db) => {
const row = db
.update(SessionTable)
.set({ time_updated: now })
.where(eq(SessionTable.id, sessionID))
.returning()
.get()
if (!row) throw new NotFoundError({ message: `Session not found: ${sessionID}` })
const info = fromRow(row)
Database.effect(() => Bus.publish(Event.Updated, { info }))
})
const time = Date.now()
DatabaseEvent.run(Event.Touch, { id: sessionID, time })
})
export async function createNext(input: {
@@ -315,20 +322,16 @@ export namespace Session {
},
}
log.info("created", result)
Database.use((db) => {
db.insert(SessionTable).values(toRow(result)).run()
Database.effect(() =>
Bus.publish(Event.Created, {
info: result,
}),
)
})
DatabaseEvent.run(Event.Created, { id: result.id, info: result })
const cfg = await Config.get()
if (!result.parentID && (Flag.OPENCODE_AUTO_SHARE || cfg.share === "auto"))
share(result.id).catch(() => {
// Silently ignore sharing errors during session creation
})
Bus.publish(Event.Updated, {
id: result.id,
info: result,
})
return result
@@ -354,12 +357,9 @@ export namespace Session {
}
const { ShareNext } = await import("@/share/share-next")
const share = await ShareNext.create(id)
Database.use((db) => {
const row = db.update(SessionTable).set({ share_url: share.url }).where(eq(SessionTable.id, id)).returning().get()
if (!row) throw new NotFoundError({ message: `Session not found: ${id}` })
const info = fromRow(row)
Database.effect(() => Bus.publish(Event.Updated, { info }))
})
DatabaseEvent.run(Event.Shared, { id, url: share.url })
return share
})
@@ -367,12 +367,8 @@ export namespace Session {
// Use ShareNext to remove the share (same as share function uses ShareNext to create)
const { ShareNext } = await import("@/share/share-next")
await ShareNext.remove(id)
Database.use((db) => {
const row = db.update(SessionTable).set({ share_url: null }).where(eq(SessionTable.id, id)).returning().get()
if (!row) throw new NotFoundError({ message: `Session not found: ${id}` })
const info = fromRow(row)
Database.effect(() => Bus.publish(Event.Updated, { info }))
})
DatabaseEvent.run(Event.Shared, { id, url: undefined })
})
export const setTitle = fn(
@@ -659,46 +655,25 @@ export namespace Session {
})
export const remove = fn(Identifier.schema("session"), async (sessionID) => {
const project = Instance.project
try {
const session = await get(sessionID)
for (const child of await children(sessionID)) {
await remove(child.id)
}
await unshare(sessionID).catch(() => {})
// CASCADE delete handles messages and parts automatically
Database.use((db) => {
db.delete(SessionTable).where(eq(SessionTable.id, sessionID)).run()
Database.effect(() =>
Bus.publish(Event.Deleted, {
info: session,
}),
)
})
DatabaseEvent.run(Event.Deleted, { id: sessionID, info: session })
} catch (e) {
log.error(e)
}
})
export const updateMessage = fn(MessageV2.Info, async (msg) => {
const time_created = msg.time.created
const { id, sessionID, ...data } = msg
Database.use((db) => {
db.insert(MessageTable)
.values({
id,
session_id: sessionID,
time_created,
data,
})
.onConflictDoUpdate({ target: MessageTable.id, set: { data } })
.run()
Database.effect(() =>
Bus.publish(MessageV2.Event.Updated, {
info: msg,
}),
)
DatabaseEvent.run(MessageV2.Event.Updated, {
id: msg.sessionID,
info: msg,
})
return msg
})
@@ -708,17 +683,9 @@ export namespace Session {
messageID: Identifier.schema("message"),
}),
async (input) => {
// CASCADE delete handles parts automatically
Database.use((db) => {
db.delete(MessageTable)
.where(and(eq(MessageTable.id, input.messageID), eq(MessageTable.session_id, input.sessionID)))
.run()
Database.effect(() =>
Bus.publish(MessageV2.Event.Removed, {
sessionID: input.sessionID,
messageID: input.messageID,
}),
)
DatabaseEvent.run(MessageV2.Event.Removed, {
sessionID: input.sessionID,
messageID: input.messageID,
})
return input.messageID
},
@@ -731,17 +698,10 @@ export namespace Session {
partID: Identifier.schema("part"),
}),
async (input) => {
Database.use((db) => {
db.delete(PartTable)
.where(and(eq(PartTable.id, input.partID), eq(PartTable.session_id, input.sessionID)))
.run()
Database.effect(() =>
Bus.publish(MessageV2.Event.PartRemoved, {
sessionID: input.sessionID,
messageID: input.messageID,
partID: input.partID,
}),
)
DatabaseEvent.run(MessageV2.Event.PartRemoved, {
sessionID: input.sessionID,
messageID: input.messageID,
partID: input.partID,
})
return input.partID
},
@@ -750,24 +710,10 @@ export namespace Session {
const UpdatePartInput = MessageV2.Part
export const updatePart = fn(UpdatePartInput, async (part) => {
const { id, messageID, sessionID, ...data } = part
const time = Date.now()
Database.use((db) => {
db.insert(PartTable)
.values({
id,
message_id: messageID,
session_id: sessionID,
time_created: time,
data,
})
.onConflictDoUpdate({ target: PartTable.id, set: { data } })
.run()
Database.effect(() =>
Bus.publish(MessageV2.Event.PartUpdated, {
part: structuredClone(part),
}),
)
DatabaseEvent.run(MessageV2.Event.PartUpdated, {
sessionID: part.sessionID,
part: structuredClone(part),
time: Date.now(),
})
return part
})

View File

@@ -7,6 +7,7 @@ import { LSP } from "../lsp"
import { Snapshot } from "@/snapshot"
import { fn } from "@/util/fn"
import { Database, eq, desc, inArray } from "@/storage/db"
import { DatabaseEvent } from "../storage/event"
import { MessageTable, PartTable } from "./session.sql"
import { ProviderTransform } from "@/provider/transform"
import { STATUS_CODES } from "http"
@@ -448,23 +449,29 @@ export namespace MessageV2 {
export type Info = z.infer<typeof Info>
export const Event = {
Updated: BusEvent.define(
Updated: DatabaseEvent.define(
"message.updated",
"v1",
z.object({
id: z.string(),
info: Info,
}),
),
Removed: BusEvent.define(
Removed: DatabaseEvent.agg("sessionID").define(
"message.removed",
"v1",
z.object({
sessionID: z.string(),
messageID: z.string(),
}),
),
PartUpdated: BusEvent.define(
PartUpdated: DatabaseEvent.agg("sessionID").define(
"message.part.updated",
"v1",
z.object({
sessionID: z.string(),
part: Part,
time: z.number(),
}),
),
PartDelta: BusEvent.define(
@@ -477,8 +484,9 @@ export namespace MessageV2 {
delta: z.string(),
}),
),
PartRemoved: BusEvent.define(
PartRemoved: DatabaseEvent.agg("sessionID").define(
"message.part.removed",
"v1",
z.object({
sessionID: z.string(),
messageID: z.string(),

View File

@@ -0,0 +1,93 @@
import { Bus } from "@/bus"
import { Database, NotFoundError, eq, and } from "../storage/db"
import { DatabaseEvent } from "@/storage/event"
import { Session } from "./index"
import { MessageV2 } from "./message-v2"
import { SessionTable, MessageTable, PartTable } from "./session.sql"
import { ProjectTable } from "../project/project.sql"
DatabaseEvent.addProjector(Session.Event.Created, (db, data) => {
const existing = db
.select({ id: ProjectTable.id })
.from(ProjectTable)
.where(eq(ProjectTable.id, data.info.projectID))
.get()
if (!existing) {
// Create a (temporary) project to make this work. In the future
// we should separate sessions and projects
db.insert(ProjectTable)
.values({
id: data.info.projectID,
worktree: data.info.directory,
sandboxes: [],
})
.run()
}
db.insert(SessionTable).values(Session.toRow(data.info)).run()
})
DatabaseEvent.addProjector(Session.Event.Shared, (db, data) => {
const row = db.update(SessionTable).set({ share_url: data.url }).where(eq(SessionTable.id, data.id)).returning().get()
if (!row) throw new NotFoundError({ message: `Session not found: ${data.id}` })
const info = Session.fromRow(row)
})
DatabaseEvent.addProjector(Session.Event.Touch, (db, data) => {
const row = db
.update(SessionTable)
.set({ time_updated: data.time })
.where(eq(SessionTable.id, data.id))
.returning()
.get()
if (!row) throw new NotFoundError({ message: `Session not found: ${data.id}` })
// const info = Session.fromRow(row)
// Database.effect(() => Bus.publish(Event.Updated, { id: data.id, info }))
})
DatabaseEvent.addProjector(Session.Event.Deleted, (db, data) => {
db.delete(SessionTable).where(eq(SessionTable.id, data.id)).run()
})
DatabaseEvent.addProjector(MessageV2.Event.Updated, (db, data) => {
const time_created = data.info.time.created
const { id, sessionID, ...rest } = data.info
db.insert(MessageTable)
.values({
id,
session_id: sessionID,
time_created,
data: rest,
})
.onConflictDoUpdate({ target: MessageTable.id, set: { data: rest } })
.run()
})
DatabaseEvent.addProjector(MessageV2.Event.Removed, (db, data) => {
db.delete(MessageTable)
.where(and(eq(MessageTable.id, data.messageID), eq(MessageTable.session_id, data.sessionID)))
.run()
})
DatabaseEvent.addProjector(MessageV2.Event.PartRemoved, (db, data) => {
db.delete(PartTable)
.where(and(eq(PartTable.id, data.partID), eq(PartTable.session_id, data.sessionID)))
.run()
})
DatabaseEvent.addProjector(MessageV2.Event.PartUpdated, (db, data) => {
const { id, messageID, sessionID, ...rest } = data.part
db.insert(PartTable)
.values({
id,
message_id: messageID,
session_id: sessionID,
time_created: data.time,
data: rest,
})
.onConflictDoUpdate({ target: PartTable.id, set: { data: rest } })
.run()
})

View File

@@ -650,7 +650,12 @@ export namespace SessionPrompt {
await Plugin.trigger("experimental.chat.messages.transform", {}, { messages: msgs })
// Build system prompt, adding structured output instruction if needed
const system = [...(await SystemPrompt.environment(model)), ...(await InstructionPrompt.system())]
const skills = await SystemPrompt.skills(agent)
const system = [
...(await SystemPrompt.environment(model)),
...(skills ? [skills] : []),
...(await InstructionPrompt.system()),
]
const format = lastUser.format ?? { type: "text" }
if (format.type === "json_schema") {
system.push(STRUCTURED_OUTPUT_SYSTEM_PROMPT)
@@ -1629,6 +1634,7 @@ NOTE: At any point in time through this workflow you should feel free to ask the
const proc = spawn(shell, args, {
cwd,
detached: process.platform !== "win32",
windowsHide: process.platform === "win32",
stdio: ["ignore", "pipe", "pipe"],
env: {
...process.env,

View File

@@ -10,6 +10,9 @@ import PROMPT_GEMINI from "./prompt/gemini.txt"
import PROMPT_CODEX from "./prompt/codex_header.txt"
import PROMPT_TRINITY from "./prompt/trinity.txt"
import type { Provider } from "@/provider/provider"
import type { Agent } from "@/agent/agent"
import { PermissionNext } from "@/permission/next"
import { Skill } from "@/skill"
export namespace SystemPrompt {
export function instructions() {
@@ -34,6 +37,7 @@ export namespace SystemPrompt {
`Here is some useful information about the environment you are running in:`,
`<env>`,
` Working directory: ${Instance.directory}`,
` Workspace root folder: ${Instance.worktree}`,
` Is directory a git repo: ${project.vcs === "git" ? "yes" : "no"}`,
` Platform: ${process.platform}`,
` Today's date: ${new Date().toDateString()}`,
@@ -51,4 +55,16 @@ export namespace SystemPrompt {
].join("\n"),
]
}
export async function skills(agent: Agent.Info) {
if (PermissionNext.disabled(["skill"], agent.permission).has("skill")) return
const list = await Skill.available(agent)
return [
"Skills provide specialized instructions and workflows for specific tasks.",
"Use the skill tool to load a skill when a task matches its description.",
list.length === 0 ? "No skills are currently available." : "\n" + Skill.fmt(list),
].join("\n")
}
}

View File

@@ -15,7 +15,10 @@ export namespace Shell {
if (process.platform === "win32") {
await new Promise<void>((resolve) => {
const killer = spawn("taskkill", ["/pid", String(pid), "/f", "/t"], { stdio: "ignore" })
const killer = spawn("taskkill", ["/pid", String(pid), "/f", "/t"], {
stdio: "ignore",
windowsHide: true,
})
killer.once("exit", () => resolve())
killer.once("error", () => resolve())
})

View File

@@ -13,6 +13,9 @@ import { Bus } from "@/bus"
import { Session } from "@/session"
import { Discovery } from "./discovery"
import { Glob } from "../util/glob"
import { pathToFileURL } from "url"
import type { Agent } from "@/agent/agent"
import { PermissionNext } from "@/permission/next"
export namespace Skill {
const log = Log.create({ service: "skill" })
@@ -186,4 +189,24 @@ export namespace Skill {
export async function dirs() {
return state().then((x) => x.dirs)
}
export async function available(agent?: Agent.Info) {
const list = await all()
if (!agent) return list
return list.filter((skill) => PermissionNext.evaluate("skill", skill.name, agent.permission).action !== "deny")
}
export function fmt(list: Info[]) {
return [
"<available_skills>",
...list.flatMap((skill) => [
` <skill>`,
` <name>${skill.name}</name>`,
` <description>${skill.description}</description>`,
` <location>${pathToFileURL(skill.location).href}</location>`,
` </skill>`,
]),
"</available_skills>",
].join("\n")
}
}

View File

@@ -29,6 +29,9 @@ const log = Log.create({ service: "db" })
export namespace Database {
export const Path = iife(() => {
if (Installation.isTesting()) {
return ":memory:"
}
const channel = Installation.CHANNEL
if (["latest", "beta"].includes(channel) || Flag.OPENCODE_DISABLE_CHANNEL_DB)
return path.join(Global.Path.data, "opencode.db")
@@ -153,19 +156,37 @@ export namespace Database {
}
}
export function transaction<T>(callback: (tx: TxOrDb) => T): T {
type NotPromise<T> = T extends Promise<any> ? never : T
export function _transaction<T>(
callback: (tx: TxOrDb) => NotPromise<T>,
options?: {
behavior?: "deferred" | "immediate" | "exclusive"
},
): NotPromise<T> {
try {
return callback(ctx.use().tx)
} catch (err) {
if (err instanceof Context.NotFound) {
const effects: (() => void | Promise<void>)[] = []
const result = (Client().transaction as any)((tx: TxOrDb) => {
return ctx.provide({ tx, effects }, () => callback(tx))
})
const result = Client().transaction(
(tx: TxOrDb) => {
return ctx.provide({ tx, effects }, () => callback(tx))
},
{ behavior: options?.behavior },
)
for (const effect of effects) effect()
return result
return result as NotPromise<T>
}
throw err
}
}
export function transaction<T>(callback: (tx: TxOrDb) => NotPromise<T>) {
return _transaction(callback)
}
export function immediateTransaction<T>(callback: (tx: TxOrDb) => NotPromise<T>) {
return _transaction(callback, { behavior: "immediate" })
}
}

View File

@@ -0,0 +1,15 @@
import { sqliteTable, text, integer } from "drizzle-orm/sqlite-core"
export const EventSequenceTable = sqliteTable("event_sequence", {
aggregate_id: text().notNull().primaryKey(),
seq: integer().notNull(),
})
export const EventTable = sqliteTable("event", {
seq: integer().notNull(),
aggregateId: text()
.notNull()
.references(() => EventSequenceTable.aggregate_id, { onDelete: "cascade" }),
name: text().notNull(),
data: text({ mode: "json" }).$type<Record<string, unknown>>().notNull(),
})

View File

@@ -0,0 +1,176 @@
import z from "zod"
import type { ZodObject } from "zod"
import { BusEvent } from "@/bus/bus-event"
import { Instance } from "../project/instance"
import { Bus as ProjectBus } from "@/bus"
import { Database, eq, max } from "./db"
import { EventSequenceTable, EventTable } from "./event.sql"
import { EventEmitter } from "events"
export namespace DatabaseEvent {
export type Definition = {
type: string
properties: ZodObject
version: string
aggregateField: string
}
const registry = new Map<string, Definition>()
const projectors = new Map<Definition, (db: Database.TxOrDb, data: unknown) => void>()
export type BusEvent = {
type: string
data: {
seq: number
aggregateId: string
data: Record<string, unknown>
}
}
export const Bus = new EventEmitter<{
event: [BusEvent]
}>()
function versionedName(type: string, version: string) {
return `${type}.${version}`
}
function hasInstance() {
try {
Instance.project
return true
} catch (err) {
return false
}
}
export function define<Type extends string, Properties extends ZodObject<{ id: z.ZodString }>>(
type: Type,
version: string,
properties: Properties,
) {
return agg("id").define(type, version, properties)
}
export function agg<F extends string>(aggregateField: F) {
return {
define<Type extends string, Properties extends ZodObject<Record<F, z.ZodString>>>(
type: Type,
version: string,
properties: Properties,
) {
const def = {
...BusEvent.define(type, properties),
version,
aggregateField,
}
registry.set(versionedName(def.type, def.version), def)
return def
},
}
}
export function addProjector<Def extends Definition>(
event: Def,
func: (db: Database.TxOrDb, data: z.output<Def["properties"]>) => void,
) {
projectors.set(event, func as (db: Database.TxOrDb, data: unknown) => void)
}
function process<Def extends Definition>(
event: Def,
input: { seq: number; aggregateId: string; data: z.output<Def["properties"]> },
) {
const projector = projectors.get(event)
if (!projector) {
throw new Error(`Projector not found for event: ${event.type}`)
}
// idempotent
Database.transaction((tx) => {
projector(tx, input.data)
tx.insert(EventSequenceTable)
.values({
aggregate_id: input.aggregateId,
seq: input.seq,
})
.onConflictDoUpdate({
target: EventSequenceTable.aggregate_id,
set: { seq: input.seq },
})
.run()
tx.insert(EventTable)
.values({
seq: input.seq,
aggregateId: input.aggregateId,
name: versionedName(event.type, event.version),
data: input.data as Record<string, unknown>,
})
.run()
})
}
// TODO:
//
// * Support applying multiple events at one time. One transaction,
// and it validets all the sequence ids
// * when loading events from db, apply zod validation to ensure shape
export function replay(event: BusEvent) {
const def = registry.get(event.type)
if (!def) {
throw new Error(`Unknown event type: ${event.type}`)
}
const maxSeq = Database.use((db) =>
db
.select({ val: max(EventTable.seq) })
.from(EventTable)
.where(eq(EventTable.aggregateId, event.data.aggregateId))
.get(),
)
const expected = maxSeq ? maxSeq.val! + 1 : 0
if (event.data.seq !== expected) {
throw new Error(
`Sequence mismatch for aggregate "${event.data.aggregateId}": expected ${expected}, got ${event.data.seq}`,
)
}
process(def, event.data)
}
export function run<Def extends Definition>(event: Def, data: z.output<Def["properties"]>) {
const agg = data[event.aggregateField] as string
// This should never happen: we've enforced it via typescript
if (agg == null) {
throw new Error(`DatabaseEvent: "${event.aggregateField}" required but not found: ${JSON.stringify(event)}`)
}
Database.immediateTransaction((tx) => {
const row = tx
.select({ seq: EventSequenceTable.seq })
.from(EventSequenceTable)
.where(eq(EventSequenceTable.aggregate_id, agg))
.get()
const seq = (row?.seq ?? 0) + 1
process(event, { seq, aggregateId: agg, data })
Database.effect(() => {
if (hasInstance()) {
ProjectBus.publish(event, data)
}
Bus.emit("event", {
type: versionedName(event.type, event.version),
data: {
seq: seq,
aggregateId: agg,
data: data,
},
})
})
})
}
}

View File

@@ -0,0 +1,86 @@
import os from "os"
import path from "path"
import fs from "fs/promises"
// Set XDG env vars BEFORE any src/ imports to isolate from real data
const dir = path.join(os.tmpdir(), "opencode-test-projection-" + process.pid)
await fs.mkdir(dir, { recursive: true })
process.env["XDG_DATA_HOME"] = path.join(dir, "share")
process.env["XDG_CACHE_HOME"] = path.join(dir, "cache")
process.env["XDG_CONFIG_HOME"] = path.join(dir, "config")
process.env["XDG_STATE_HOME"] = path.join(dir, "state")
// Write the cache version file
const cache = path.join(dir, "cache", "opencode")
await fs.mkdir(cache, { recursive: true })
await fs.writeFile(path.join(cache, "version"), "14")
// Now safe to import src/
const { Log } = await import("@/util/log")
Log.init({ print: true, dev: true, level: "DEBUG" })
const { Instance } = await import("@/project/instance")
const { Database } = await import("@/storage/db")
const { GlobalBus } = await import("@/bus/global")
const { Bus } = await import("@/bus")
const { Session } = await import("@/session")
const { Server } = await import("@/server/server")
const { SessionPrompt } = await import("@/session/prompt")
// register projectors
await import("@/session/projectors")
async function wait(ms: number) {
return new Promise((resolve) => {
setTimeout(resolve, ms)
})
}
async function run() {
console.log("project id:", Instance.project.id)
// start the server
const server = Server.listen({
port: 0,
hostname: "127.0.0.1",
})
console.log("server listening on:", server.url.toString())
console.log("SSE endpoint:", `${server.url}event`)
const base = server.url.toString().replace(/\/$/, "")
console.log("\nServer running. Try:")
console.log(` curl -N ${base}/event`)
console.log("\nPress Ctrl+C to stop.\n")
while (1) {
await wait(5000)
const session = await Session.create({
title: "test session",
})
console.log("created session:", session.id, session.title)
// send messages to the session
async function prompt(text: string) {
console.log(`\n--- sending: "${text}" ---`)
await SessionPrompt.prompt({
sessionID: session.id,
parts: [{ type: "text", text }],
})
console.log(`--- done: "${text}" ---`)
}
await prompt("What is 2 + 2?")
await wait(2500)
await prompt("Now multiply that by 10")
await wait(2500)
await prompt("Summarize what we've discussed")
}
await new Promise(() => {})
}
await Instance.provide({
directory: "~/tmp/project-test7",
fn: run,
})

View File

@@ -0,0 +1,72 @@
import os from "os"
import path from "path"
import fs from "fs/promises"
// Set XDG env vars BEFORE any src/ imports to isolate from real data
const dir = path.join("/Users/james/tmp/opencode-test-replicate")
await fs.mkdir(dir, { recursive: true })
process.env["XDG_DATA_HOME"] = path.join(dir, "share")
process.env["XDG_CACHE_HOME"] = path.join(dir, "cache")
process.env["XDG_CONFIG_HOME"] = path.join(dir, "config")
process.env["XDG_STATE_HOME"] = path.join(dir, "state")
// Write the cache version file
const cache = path.join(dir, "cache", "opencode")
await fs.mkdir(cache, { recursive: true })
await fs.writeFile(path.join(cache, "version"), "14")
// Now safe to import src/
const { Log } = await import("@/util/log")
Log.init({ print: true, dev: true, level: "DEBUG" })
const { Instance } = await import("@/project/instance")
const { Database } = await import("@/storage/db")
const { DatabaseEvent } = await import("@/storage/event")
const { parseSSE } = await import("@/control-plane/sse")
// register projectors so apply can find them
await import("@/session/projectors")
const url = process.argv[2] || "http://127.0.0.1:4096/global/db-event"
const ac = new AbortController()
process.on("SIGINT", () => ac.abort())
process.on("SIGTERM", () => ac.abort())
async function run() {
const res = await fetch(url, {
headers: { accept: "text/event-stream" },
signal: ac.signal,
})
if (!res.ok) {
console.error("failed to connect:", res.status, await res.text())
process.exit(1)
}
if (!res.body) {
console.error("no response body")
process.exit(1)
}
console.log("connected, listening for events...\n")
await parseSSE(res.body, ac.signal, (event: any) => {
console.log("[sse]", JSON.stringify(event, null, 2))
if (event.type && event.data) {
try {
DatabaseEvent.replay(event)
console.log("[apply] ok:", event.type)
console.log("db path", Database.Path)
} catch (err) {
console.error("[apply] error:", err)
}
}
})
console.log("\ndisconnected")
Database.close()
// await fs.rm(dir, { recursive: true, force: true })
}
run()

View File

@@ -173,6 +173,7 @@ export const BashTool = Tool.define("bash", async () => {
},
stdio: ["ignore", "pipe", "pipe"],
detached: process.platform !== "win32",
windowsHide: process.platform === "win32",
})
let output = ""

View File

@@ -3,24 +3,14 @@ import { pathToFileURL } from "url"
import z from "zod"
import { Tool } from "./tool"
import { Skill } from "../skill"
import { PermissionNext } from "../permission/next"
import { Ripgrep } from "../file/ripgrep"
import { iife } from "@/util/iife"
export const SkillTool = Tool.define("skill", async (ctx) => {
const skills = await Skill.all()
// Filter skills by agent permissions if agent provided
const agent = ctx?.agent
const accessibleSkills = agent
? skills.filter((skill) => {
const rule = PermissionNext.evaluate("skill", skill.name, agent.permission)
return rule.action !== "deny"
})
: skills
const list = await Skill.available(ctx?.agent)
const description =
accessibleSkills.length === 0
list.length === 0
? "Load a specialized skill that provides domain-specific instructions and workflows. No skills are currently available."
: [
"Load a specialized skill that provides domain-specific instructions and workflows.",
@@ -34,18 +24,10 @@ export const SkillTool = Tool.define("skill", async (ctx) => {
"The following skills provide specialized sets of instructions for particular tasks",
"Invoke this tool to load a skill when a task matches one of the available skills listed below:",
"",
"<available_skills>",
...accessibleSkills.flatMap((skill) => [
` <skill>`,
` <name>${skill.name}</name>`,
` <description>${skill.description}</description>`,
` <location>${pathToFileURL(skill.location).href}</location>`,
` </skill>`,
]),
"</available_skills>",
Skill.fmt(list),
].join("\n")
const examples = accessibleSkills
const examples = list
.map((skill) => `'${skill.name}'`)
.slice(0, 3)
.join(", ")
@@ -62,7 +44,7 @@ export const SkillTool = Tool.define("skill", async (ctx) => {
const skill = await Skill.get(params.name)
if (!skill) {
const available = await Skill.all().then((x) => Object.keys(x).join(", "))
const available = await Skill.all().then((x) => x.map((skill) => skill.name).join(", "))
throw new Error(`Skill "${params.name}" not found. Available skills: ${available || "none"}`)
}

View File

@@ -60,6 +60,7 @@ export namespace Process {
cwd: opts.cwd,
env: opts.env === null ? {} : opts.env ? { ...process.env, ...opts.env } : undefined,
stdio: [opts.stdin ?? "ignore", opts.stdout ?? "ignore", opts.stderr ?? "ignore"],
windowsHide: process.platform === "win32",
})
let closed = false

View File

@@ -842,35 +842,6 @@ describe("session.message-v2.fromError", () => {
})
})
test("maps github-copilot 403 to reauth guidance", () => {
const error = new APICallError({
message: "forbidden",
url: "https://api.githubcopilot.com/v1/chat/completions",
requestBodyValues: {},
statusCode: 403,
responseHeaders: { "content-type": "application/json" },
responseBody: '{"error":"forbidden"}',
isRetryable: false,
})
const result = MessageV2.fromError(error, { providerID: "github-copilot" })
expect(result).toStrictEqual({
name: "APIError",
data: {
message:
"Please reauthenticate with the copilot provider to ensure your credentials work properly with OpenCode.",
statusCode: 403,
isRetryable: false,
responseHeaders: { "content-type": "application/json" },
responseBody: '{"error":"forbidden"}',
metadata: {
url: "https://api.githubcopilot.com/v1/chat/completions",
},
},
})
})
test("detects context overflow from APICallError provider messages", () => {
const cases = [
"prompt is too long: 213462 tokens > 200000 maximum",

View File

@@ -0,0 +1,114 @@
import { describe, test, expect, beforeEach } from "bun:test"
import { tmpdir } from "../fixture/fixture"
import z from "zod"
import { Bus } from "../../src/bus"
import { Instance } from "../../src/project/instance"
import { DatabaseEvent } from "../../src/storage/event"
import { Database } from "../../src/storage/db"
import { EventTable } from "../../src/storage/event.sql"
import { Identifier } from "../../src/id/id"
beforeEach(() => {
Database.Client.reset()
})
describe("DatabaseEvent", () => {
const Created = DatabaseEvent.define("item.created", "v1", z.object({ id: z.string(), name: z.string() }))
const Sent = DatabaseEvent.agg("item_id").define("item.sent", "v1", z.object({ item_id: z.string(), to: z.string() }))
DatabaseEvent.addProjector(Created, () => {})
DatabaseEvent.addProjector(Sent, () => {})
describe("run", () => {
test("inserts event row", () => {
DatabaseEvent.run(Created, { id: "msg_1", name: "first" })
const rows = Database.use((db) => db.select().from(EventTable).all())
expect(rows).toHaveLength(1)
expect(rows[0].name).toBe("item.created.v1")
expect(rows[0].aggregateId).toBe("msg_1")
})
test("increments seq per aggregate", () => {
DatabaseEvent.run(Created, { id: "msg_1", name: "first" })
DatabaseEvent.run(Created, { id: "msg_1", name: "second" })
const rows = Database.use((db) => db.select().from(EventTable).all())
console.log(rows)
expect(rows).toHaveLength(2)
expect(rows[1].seq).toBe(rows[0].seq + 1)
})
test("uses custom aggregate field from agg()", () => {
DatabaseEvent.run(Sent, { item_id: "msg_1", to: "james" })
const rows = Database.use((db) => db.select().from(EventTable).all())
expect(rows).toHaveLength(1)
expect(rows[0].aggregateId).toBe("msg_1")
})
test("emits events", async () => {
await using tmp = await tmpdir()
Instance.provide({
directory: tmp.path,
fn: async () => {
const dbEvents: DatabaseEvent.BusEvent[] = []
DatabaseEvent.Bus.on("event", (e) => dbEvents.push(e))
const events: Array<any> = []
const unsubscribe = Bus.subscribeAll((e) => events.push(e))
DatabaseEvent.run(Created, { id: "msg_1", name: "test" })
expect(events).toHaveLength(1)
expect(events[0]).toEqual({
type: "item.created",
properties: {
id: "msg_1",
name: "test",
},
})
expect(dbEvents).toHaveLength(1)
DatabaseEvent.Bus.removeAllListeners("event")
unsubscribe()
},
})
})
})
describe("replay", () => {
test("inserts event from external payload", () => {
const id = Identifier.descending("message")
DatabaseEvent.replay({
type: "item.created.v1",
data: { seq: 1, aggregateId: id, data: { id, name: "replayed" } },
})
const rows = Database.use((db) => db.select().from(EventTable).all())
expect(rows).toHaveLength(1)
expect(rows[0].aggregateId).toBe(id)
})
test("throws on sequence mismatch", () => {
const id = Identifier.descending("message")
DatabaseEvent.replay({
type: "item.created.v1",
data: { seq: 1, aggregateId: id, data: { id, name: "first" } },
})
expect(() =>
DatabaseEvent.replay({
type: "item.created.v1",
data: { seq: 5, aggregateId: id, data: { id, name: "bad" } },
}),
).toThrow(/Sequence mismatch/)
})
test("throws on unknown event type", () => {
expect(() =>
DatabaseEvent.replay({
type: "unknown.event.1",
data: { seq: 0, aggregateId: "x", data: {} },
}),
).toThrow(/Unknown event type/)
})
})
})

View File

@@ -437,8 +437,8 @@ function groupParts(parts: { messageID: string; part: PartType }[]) {
return result
}
function partByID(parts: readonly PartType[], partID: string) {
return parts.find((part) => part.id === partID)
function index<T extends { id: string }>(items: readonly T[]) {
return new Map(items.map((item) => [item.id, item] as const))
}
function renderable(part: PartType, showReasoningSummaries = true) {
@@ -474,6 +474,13 @@ export function AssistantParts(props: {
const data = useData()
const emptyParts: PartType[] = []
const emptyTools: ToolPart[] = []
const msgs = createMemo(() => index(props.messages))
const part = createMemo(
() =>
new Map(
props.messages.map((message) => [message.id, index(list(data.store.part?.[message.id], emptyParts))] as const),
),
)
const grouped = createMemo(
() =>
@@ -507,7 +514,7 @@ export function AssistantParts(props: {
const entry = entryAccessor()
if (entry.type !== "context") return emptyTools
return entry.refs
.map((ref) => partByID(list(data.store.part?.[ref.messageID], emptyParts), ref.partID))
.map((ref) => part().get(ref.messageID)?.get(ref.partID))
.filter((part): part is ToolPart => !!part && isContextGroupTool(part))
},
emptyTools,
@@ -527,23 +534,23 @@ export function AssistantParts(props: {
const message = createMemo(() => {
const entry = entryAccessor()
if (entry.type !== "part") return
return props.messages.find((item) => item.id === entry.ref.messageID)
return msgs().get(entry.ref.messageID)
})
const part = createMemo(() => {
const item = createMemo(() => {
const entry = entryAccessor()
if (entry.type !== "part") return
return partByID(list(data.store.part?.[entry.ref.messageID], emptyParts), entry.ref.partID)
return part().get(entry.ref.messageID)?.get(entry.ref.partID)
})
return (
<Show when={message()}>
<Show when={part()}>
<Show when={item()}>
<Part
part={part()!}
part={item()!}
message={message()!}
showAssistantCopyPartID={props.showAssistantCopyPartID}
turnDurationMs={props.turnDurationMs}
defaultOpen={partDefaultOpen(part()!, props.shellToolDefaultOpen, props.editToolDefaultOpen)}
defaultOpen={partDefaultOpen(item()!, props.shellToolDefaultOpen, props.editToolDefaultOpen)}
/>
</Show>
</Show>
@@ -695,6 +702,7 @@ export function AssistantMessageDisplay(props: {
showReasoningSummaries?: boolean
}) {
const emptyTools: ToolPart[] = []
const part = createMemo(() => index(props.parts))
const grouped = createMemo(
() =>
groupParts(
@@ -723,7 +731,7 @@ export function AssistantMessageDisplay(props: {
const entry = entryAccessor()
if (entry.type !== "context") return emptyTools
return entry.refs
.map((ref) => partByID(props.parts, ref.partID))
.map((ref) => part().get(ref.partID))
.filter((part): part is ToolPart => !!part && isContextGroupTool(part))
},
emptyTools,
@@ -739,16 +747,16 @@ export function AssistantMessageDisplay(props: {
</Match>
<Match when={entryType() === "part"}>
{(() => {
const part = createMemo(() => {
const item = createMemo(() => {
const entry = entryAccessor()
if (entry.type !== "part") return
return partByID(props.parts, entry.ref.partID)
return part().get(entry.ref.partID)
})
return (
<Show when={part()}>
<Show when={item()}>
<Part
part={part()!}
part={item()!}
message={props.message}
showAssistantCopyPartID={props.showAssistantCopyPartID}
/>
@@ -1190,7 +1198,7 @@ PART_MAPPING["tool"] = function ToolPartDisplay(props) {
</div>
)
}
return <ToolErrorCard tool={part().tool} error={error()} />
return <ToolErrorCard tool={part().tool} error={error()} defaultOpen={props.defaultOpen} />
}}
</Match>
<Match when={true}>

View File

@@ -9,13 +9,14 @@ import { useI18n } from "../context/i18n"
export interface ToolErrorCardProps extends Omit<ComponentProps<typeof Card>, "children" | "variant"> {
tool: string
error: string
defaultOpen?: boolean
}
export function ToolErrorCard(props: ToolErrorCardProps) {
const i18n = useI18n()
const [open, setOpen] = createSignal(true)
const [open, setOpen] = createSignal(props.defaultOpen ?? false)
const [copied, setCopied] = createSignal(false)
const [split, rest] = splitProps(props, ["tool", "error"])
const [split, rest] = splitProps(props, ["tool", "error", "defaultOpen"])
const name = createMemo(() => {
const map: Record<string, string> = {
read: "ui.tool.read",

View File

@@ -1890,7 +1890,7 @@ You can use any OpenAI-compatible provider with opencode. Most modern AI provide
```
Here are the configuration options:
- **npm**: AI SDK package to use, `@ai-sdk/openai-compatible` for OpenAI-compatible providers
- **npm**: AI SDK package to use, `@ai-sdk/openai-compatible` for OpenAI-compatible providers (for `/v1/chat/completions`). If your provider/model uses `/v1/responses`, use `@ai-sdk/openai`.
- **name**: Display name in UI.
- **models**: Available models.
- **options.baseURL**: API endpoint URL.
@@ -1957,5 +1957,5 @@ If you are having trouble with configuring a provider, check the following:
2. For custom providers, check the opencode config and:
- Make sure the provider ID used in the `/connect` command matches the ID in your opencode config.
- The right npm package is used for the provider. For example, use `@ai-sdk/cerebras` for Cerebras. And for all other OpenAI-compatible providers, use `@ai-sdk/openai-compatible`.
- The right npm package is used for the provider. For example, use `@ai-sdk/cerebras` for Cerebras. And for all other OpenAI-compatible providers, use `@ai-sdk/openai-compatible` (for `/v1/chat/completions`); if a model uses `/v1/responses`, use `@ai-sdk/openai`. For mixed setups under one provider, you can override per model via `provider.npm`.
- Check correct API endpoint is used in the `options.baseURL` field.

View File

@@ -1845,7 +1845,7 @@ Vercel AI Gateway 允许你通过统一端点访问来自 OpenAI、Anthropic、G
```
以下是配置选项说明:
- **npm**:要使用的 AI SDK 包,对于 OpenAI 兼容的提供商使用 `@ai-sdk/openai-compatible`
- **npm**:要使用的 AI SDK 包,对于 OpenAI 兼容的提供商使用 `@ai-sdk/openai-compatible`(适用于 `/v1/chat/completions`)。如果你的提供商/模型走 `/v1/responses`,请使用 `@ai-sdk/openai`。
- **name**:在 UI 中显示的名称。
- **models**:可用模型。
- **options.baseURL**API 端点 URL。
@@ -1911,5 +1911,5 @@ Vercel AI Gateway 允许你通过统一端点访问来自 OpenAI、Anthropic、G
2. 对于自定义提供商,请检查 OpenCode 配置并确认:
- `/connect` 命令中使用的提供商 ID 与 OpenCode 配置中的 ID 一致。
- 使用了正确的 npm 包。例如Cerebras 应使用 `@ai-sdk/cerebras`。对于其他所有 OpenAI 兼容的提供商,使用 `@ai-sdk/openai-compatible`。
- 使用了正确的 npm 包。例如Cerebras 应使用 `@ai-sdk/cerebras`。对于其他所有 OpenAI 兼容的提供商,使用 `@ai-sdk/openai-compatible``/v1/chat/completions`);如果模型走 `/v1/responses`,请使用 `@ai-sdk/openai`。同一 provider 混用时,可在模型下设置 `provider.npm` 覆盖默认值
- `options.baseURL` 字段中的 API 端点地址正确。