Compare commits

...

12 Commits

Author SHA1 Message Date
Shoubhit Dash
ba4cc8119e fix(session): disable todo dock auto-scroll 2026-04-03 14:09:31 +05:30
opencode-agent[bot]
500dcfc586 chore: update nix node_modules hashes 2026-04-03 03:53:46 +00:00
Luke Parker
7b8dc8065e fix(sdk): handle Windows opencode spawn and shutdown (#20772) 2026-04-03 13:18:50 +10:00
Kevin Flansburg
e89527c9f0 feat: Send x-session-affinity and x-parent-session-id headers (#20744) 2026-04-02 22:09:53 -05:00
Dax
aa2239d5de add automatic heap snapshots for high-memory cli processes (#20788) 2026-04-03 02:34:33 +00:00
opencode-agent[bot]
8daeacc989 chore: generate 2026-04-03 02:10:50 +00:00
Juan Pablo Carranza Hurtado
81d3ac3bf0 fix: prevent Tool.define() wrapper accumulation on object-defined tools (#16952)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
2026-04-02 22:09:53 -04:00
Luke Parker
eb6f1dada8 fix: call models.dev once instead of twice on start (#20765) 2026-04-03 11:26:53 +10:00
Kit Langton
8e9e79d276 refactor(share): effectify share next (#20596) 2026-04-03 00:56:56 +00:00
Aiden Cline
38014fe448 fix: rm dynamic part from bash tool description again to restore cache hits across projects (#20771) 2026-04-03 00:16:40 +00:00
Kit Langton
8942fc21aa refactor(effect): prune unused facades (#20748) 2026-04-02 20:15:09 -04:00
ykswang
7f45943a9e fix(opencode): honor model limit.input overrides (#16306)
Co-authored-by: Aiden Cline <63023139+rekram1-node@users.noreply.github.com>
2026-04-03 00:13:10 +00:00
43 changed files with 1100 additions and 564 deletions

View File

@@ -355,7 +355,7 @@
"bun-pty": "0.4.8",
"chokidar": "4.0.3",
"clipboardy": "4.0.0",
"cross-spawn": "^7.0.6",
"cross-spawn": "catalog:",
"decimal.js": "10.5.0",
"diff": "catalog:",
"drizzle-orm": "catalog:",
@@ -410,7 +410,7 @@
"@tsconfig/bun": "catalog:",
"@types/babel__core": "7.20.5",
"@types/bun": "catalog:",
"@types/cross-spawn": "6.0.6",
"@types/cross-spawn": "catalog:",
"@types/mime-types": "3.0.1",
"@types/npmcli__arborist": "6.3.3",
"@types/semver": "^7.5.8",
@@ -463,9 +463,13 @@
"packages/sdk/js": {
"name": "@opencode-ai/sdk",
"version": "1.3.13",
"dependencies": {
"cross-spawn": "catalog:",
},
"devDependencies": {
"@hey-api/openapi-ts": "0.90.10",
"@tsconfig/node22": "catalog:",
"@types/cross-spawn": "catalog:",
"@types/node": "catalog:",
"@typescript/native-preview": "catalog:",
"typescript": "catalog:",
@@ -634,11 +638,13 @@
"@tsconfig/bun": "1.0.9",
"@tsconfig/node22": "22.0.2",
"@types/bun": "1.3.11",
"@types/cross-spawn": "6.0.6",
"@types/luxon": "3.7.1",
"@types/node": "22.13.9",
"@types/semver": "7.7.1",
"@typescript/native-preview": "7.0.0-dev.20251207.1",
"ai": "6.0.138",
"cross-spawn": "7.0.6",
"diff": "8.0.2",
"dompurify": "3.3.1",
"drizzle-kit": "1.0.0-beta.19-d95b7a4",

View File

@@ -1,8 +1,8 @@
{
"nodeModules": {
"x86_64-linux": "sha256-DEwIpQ55Bdgxh6Pk39IO1+h+NWUKHQHALevTHlC/MoQ=",
"aarch64-linux": "sha256-iJak0E3DIVuBbudPjgoqaGeikruhXbnFceUugmOy4j0=",
"aarch64-darwin": "sha256-WBb54Gp8EcsEuLa0iMkOkV9EtsoQa66sCtfMqKm4L7w=",
"x86_64-darwin": "sha256-zBNXSUu/37CV5FvxGpjZHjNH/E47H0kTIWg7v/L3Qzo="
"x86_64-linux": "sha256-0jwPCu2Lod433GPQLHN8eEkhfpPviDFfkFJmuvkRdlE=",
"aarch64-linux": "sha256-Qi0IkGkaIBKZsPLTO8kaTbCVL0cEfVOm/Y/6VUVI9TY=",
"aarch64-darwin": "sha256-1eZBBLgYVkjg5RYN/etR1Mb5UjU3VelElBB5ug5hQdc=",
"x86_64-darwin": "sha256-jdXgA+kZb/foFHR40UiPif6rsA2GDVCCVHnJR3jBUGI="
}
}

View File

@@ -27,6 +27,7 @@
"catalog": {
"@effect/platform-node": "4.0.0-beta.43",
"@types/bun": "1.3.11",
"@types/cross-spawn": "6.0.6",
"@octokit/rest": "22.0.0",
"@hono/zod-validator": "0.4.2",
"ulid": "3.0.1",
@@ -47,6 +48,7 @@
"drizzle-orm": "1.0.0-beta.19-d95b7a4",
"effect": "4.0.0-beta.43",
"ai": "6.0.138",
"cross-spawn": "7.0.6",
"hono": "4.10.7",
"hono-openapi": "1.1.2",
"fuzzysort": "3.1.0",

View File

@@ -7,7 +7,7 @@ import { useSpring } from "@opencode-ai/ui/motion-spring"
import { TextReveal } from "@opencode-ai/ui/text-reveal"
import { TextStrikethrough } from "@opencode-ai/ui/text-strikethrough"
import { createResizeObserver } from "@solid-primitives/resize-observer"
import { Index, createEffect, createMemo, on, onCleanup } from "solid-js"
import { Index, createEffect, createMemo, onCleanup } from "solid-js"
import { createStore } from "solid-js/store"
import { composerEnabled, composerProbe } from "@/testing/session-composer"
import { useLanguage } from "@/context/language"
@@ -210,76 +210,25 @@ export function SessionTodoDock(props: {
opacity: `${Math.max(0, Math.min(1, 1 - hide()))}`,
}}
>
<TodoList todos={props.todos} open={!store.collapsed} />
<TodoList todos={props.todos} />
</div>
</div>
</DockTray>
)
}
function TodoList(props: { todos: Todo[]; open: boolean }) {
function TodoList(props: { todos: Todo[] }) {
const [store, setStore] = createStore({
stuck: false,
scrolling: false,
})
let scrollRef!: HTMLDivElement
let timer: number | undefined
const inProgress = createMemo(() => props.todos.findIndex((todo) => todo.status === "in_progress"))
const ensure = () => {
if (!props.open) return
if (store.scrolling) return
if (!scrollRef || scrollRef.offsetParent === null) return
const el = scrollRef.querySelector("[data-in-progress]")
if (!(el instanceof HTMLElement)) return
const topFade = 16
const bottomFade = 44
const container = scrollRef.getBoundingClientRect()
const rect = el.getBoundingClientRect()
const top = rect.top - container.top + scrollRef.scrollTop
const bottom = rect.bottom - container.top + scrollRef.scrollTop
const viewTop = scrollRef.scrollTop + topFade
const viewBottom = scrollRef.scrollTop + scrollRef.clientHeight - bottomFade
if (top < viewTop) {
scrollRef.scrollTop = Math.max(0, top - topFade)
} else if (bottom > viewBottom) {
scrollRef.scrollTop = bottom - (scrollRef.clientHeight - bottomFade)
}
setStore("stuck", scrollRef.scrollTop > 0)
}
createEffect(
on([() => props.open, inProgress], () => {
if (!props.open || inProgress() < 0) return
requestAnimationFrame(ensure)
}),
)
onCleanup(() => {
if (!timer) return
window.clearTimeout(timer)
})
return (
<div class="relative">
<div
class="px-3 pb-11 flex flex-col gap-1.5 max-h-42 overflow-y-auto no-scrollbar"
ref={scrollRef}
style={{ "overflow-anchor": "none" }}
onScroll={(e) => {
setStore("stuck", e.currentTarget.scrollTop > 0)
setStore("scrolling", true)
if (timer) window.clearTimeout(timer)
timer = window.setTimeout(() => {
setStore("scrolling", false)
if (inProgress() < 0) return
requestAnimationFrame(ensure)
}, 250)
}}
>
<Index each={props.todos}>

View File

@@ -51,7 +51,7 @@
"@tsconfig/bun": "catalog:",
"@types/babel__core": "7.20.5",
"@types/bun": "catalog:",
"@types/cross-spawn": "6.0.6",
"@types/cross-spawn": "catalog:",
"@types/mime-types": "3.0.1",
"@types/npmcli__arborist": "6.3.3",
"@types/semver": "^7.5.8",
@@ -118,7 +118,7 @@
"bun-pty": "0.4.8",
"chokidar": "4.0.3",
"clipboardy": "4.0.0",
"cross-spawn": "^7.0.6",
"cross-spawn": "catalog:",
"decimal.js": "10.5.0",
"diff": "catalog:",
"drizzle-orm": "catalog:",

View File

@@ -417,11 +417,6 @@ export namespace Account {
return Option.getOrUndefined(await runPromise((service) => service.active()))
}
export async function config(accountID: AccountID, orgID: OrgID): Promise<Record<string, unknown> | undefined> {
const cfg = await runPromise((service) => service.config(accountID, orgID))
return Option.getOrUndefined(cfg)
}
export async function token(accountID: AccountID): Promise<AccessToken | undefined> {
const t = await runPromise((service) => service.token(accountID))
return Option.getOrUndefined(t)

View File

@@ -28,7 +28,7 @@ export const ModelsCommand = cmd({
},
handler: async (args) => {
if (args.refresh) {
await ModelsDev.refresh()
await ModelsDev.refresh(true)
UI.println(UI.Style.TEXT_SUCCESS_BOLD + "Models cache refreshed" + UI.Style.TEXT_NORMAL)
}

View File

@@ -303,7 +303,7 @@ export const ProvidersLoginCommand = cmd({
prompts.outro("Done")
return
}
await ModelsDev.refresh().catch(() => {})
await ModelsDev.refresh(true).catch(() => {})
const config = await Config.get()

View File

@@ -13,6 +13,7 @@ import { Flag } from "@/flag/flag"
import { setTimeout as sleep } from "node:timers/promises"
import { writeHeapSnapshot } from "node:v8"
import { WorkspaceID } from "@/control-plane/schema"
import { Heap } from "@/cli/heap"
await Log.init({
print: process.argv.includes("--print-logs"),
@@ -23,6 +24,8 @@ await Log.init({
})(),
})
Heap.start()
process.on("unhandledRejection", (e) => {
Log.Default.error("rejection", {
e: e instanceof Error ? e.message : e,

View File

@@ -0,0 +1,59 @@
import path from "path"
import { writeHeapSnapshot } from "node:v8"
import { Flag } from "@/flag/flag"
import { Global } from "@/global"
import { Log } from "@/util/log"
const log = Log.create({ service: "heap" })
const MINUTE = 60_000
const LIMIT = 2 * 1024 * 1024 * 1024
export namespace Heap {
let timer: Timer | undefined
let lock = false
let armed = true
export function start() {
if (!Flag.OPENCODE_AUTO_HEAP_SNAPSHOT) return
if (timer) return
const run = async () => {
if (lock) return
const stat = process.memoryUsage()
if (stat.rss <= LIMIT) {
armed = true
return
}
if (!armed) return
lock = true
armed = false
const file = path.join(
Global.Path.log,
`heap-${process.pid}-${new Date().toISOString().replace(/[:.]/g, "")}.heapsnapshot`,
)
log.warn("heap usage exceeded limit", {
rss: stat.rss,
heap: stat.heapUsed,
file,
})
await Promise.resolve()
.then(() => writeHeapSnapshot(file))
.catch((err) => {
log.error("failed to write heap snapshot", {
error: err instanceof Error ? err.message : String(err),
file,
})
})
lock = false
}
timer = setInterval(() => {
void run()
}, MINUTE)
timer.unref?.()
}
}

View File

@@ -124,20 +124,24 @@ export namespace Command {
source: "mcp",
description: prompt.description,
get template() {
return new Promise<string>(async (resolve, reject) => {
const template = await MCP.getPrompt(
prompt.client,
prompt.name,
prompt.arguments
? Object.fromEntries(prompt.arguments.map((argument, i) => [argument.name, `$${i + 1}`]))
: {},
).catch(reject)
resolve(
template?.messages
.map((message) => (message.content.type === "text" ? message.content.text : ""))
.join("\n") || "",
)
})
return Effect.runPromise(
mcp
.getPrompt(
prompt.client,
prompt.name,
prompt.arguments
? Object.fromEntries(prompt.arguments.map((argument, i) => [argument.name, `$${i + 1}`]))
: {},
)
.pipe(
Effect.map(
(template) =>
template?.messages
.map((message) => (message.content.type === "text" ? message.content.text : ""))
.join("\n") || "",
),
),
)
},
hints: prompt.arguments?.map((_, i) => `$${i + 1}`) ?? [],
}
@@ -185,10 +189,6 @@ export namespace Command {
const { runPromise } = makeRuntime(Service, defaultLayer)
export async function get(name: string) {
return runPromise((svc) => svc.get(name))
}
export async function list() {
return runPromise((svc) => svc.list())
}

View File

@@ -12,6 +12,7 @@ function falsy(key: string) {
export namespace Flag {
export const OPENCODE_AUTO_SHARE = truthy("OPENCODE_AUTO_SHARE")
export const OPENCODE_AUTO_HEAP_SNAPSHOT = truthy("OPENCODE_AUTO_HEAP_SNAPSHOT")
export const OPENCODE_GIT_BASH_PATH = process.env["OPENCODE_GIT_BASH_PATH"]
export const OPENCODE_CONFIG = process.env["OPENCODE_CONFIG"]
export declare const OPENCODE_PURE: boolean

View File

@@ -35,6 +35,7 @@ import { JsonMigration } from "./storage/json-migration"
import { Database } from "./storage/db"
import { errorMessage } from "./util/error"
import { PluginCommand } from "./cli/cmd/plug"
import { Heap } from "./cli/heap"
process.on("unhandledRejection", (e) => {
Log.Default.error("rejection", {
@@ -96,6 +97,8 @@ const cli = yargs(args)
})(),
})
Heap.start()
process.env.AGENT = "1"
process.env.OPENCODE = "1"
process.env.OPENCODE_PID = String(process.pid)

View File

@@ -341,10 +341,6 @@ export namespace Installation {
const { runPromise } = makeRuntime(Service, defaultLayer)
export async function info(): Promise<Info> {
return runPromise((svc) => svc.info())
}
export async function method(): Promise<Method> {
return runPromise((svc) => svc.method())
}

View File

@@ -168,14 +168,6 @@ export namespace McpAuth {
export const updateCodeVerifier = async (mcpName: string, codeVerifier: string) =>
runPromise((svc) => svc.updateCodeVerifier(mcpName, codeVerifier))
export const clearCodeVerifier = async (mcpName: string) => runPromise((svc) => svc.clearCodeVerifier(mcpName))
export const updateOAuthState = async (mcpName: string, oauthState: string) =>
runPromise((svc) => svc.updateOAuthState(mcpName, oauthState))
export const getOAuthState = async (mcpName: string) => runPromise((svc) => svc.getOAuthState(mcpName))
export const clearOAuthState = async (mcpName: string) => runPromise((svc) => svc.clearOAuthState(mcpName))
export const isTokenExpired = async (mcpName: string) => runPromise((svc) => svc.isTokenExpired(mcpName))
}

View File

@@ -889,8 +889,6 @@ export namespace MCP {
export const status = async () => runPromise((svc) => svc.status())
export const clients = async () => runPromise((svc) => svc.clients())
export const tools = async () => runPromise((svc) => svc.tools())
export const prompts = async () => runPromise((svc) => svc.prompts())
@@ -906,9 +904,6 @@ export namespace MCP {
export const getPrompt = async (clientName: string, name: string, args?: Record<string, string>) =>
runPromise((svc) => svc.getPrompt(clientName, name, args))
export const readResource = async (clientName: string, resourceUri: string) =>
runPromise((svc) => svc.readResource(clientName, resourceUri))
export const startAuth = async (mcpName: string) => runPromise((svc) => svc.startAuth(mcpName))
export const authenticate = async (mcpName: string) => runPromise((svc) => svc.authenticate(mcpName))

View File

@@ -140,6 +140,7 @@ export namespace Permission {
export const layer = Layer.effect(
Service,
Effect.gen(function* () {
const bus = yield* Bus.Service
const state = yield* InstanceState.make<State>(
Effect.fn("Permission.state")(function* (ctx) {
const row = Database.use((db) =>
@@ -191,7 +192,7 @@ export namespace Permission {
const deferred = yield* Deferred.make<void, RejectedError | CorrectedError>()
pending.set(id, { info, deferred })
void Bus.publish(Event.Asked, info)
yield* bus.publish(Event.Asked, info)
return yield* Effect.ensuring(
Deferred.await(deferred),
Effect.sync(() => {
@@ -206,7 +207,7 @@ export namespace Permission {
if (!existing) return
pending.delete(input.requestID)
void Bus.publish(Event.Replied, {
yield* bus.publish(Event.Replied, {
sessionID: existing.info.sessionID,
requestID: existing.info.id,
reply: input.reply,
@@ -221,7 +222,7 @@ export namespace Permission {
for (const [id, item] of pending.entries()) {
if (item.info.sessionID !== existing.info.sessionID) continue
pending.delete(id)
void Bus.publish(Event.Replied, {
yield* bus.publish(Event.Replied, {
sessionID: item.info.sessionID,
requestID: item.info.id,
reply: "reject",
@@ -249,7 +250,7 @@ export namespace Permission {
)
if (!ok) continue
pending.delete(id)
void Bus.publish(Event.Replied, {
yield* bus.publish(Event.Replied, {
sessionID: item.info.sessionID,
requestID: item.info.id,
reply: "always",
@@ -306,7 +307,9 @@ export namespace Permission {
return result
}
export const { runPromise } = makeRuntime(Service, layer)
export const defaultLayer = layer.pipe(Layer.provide(Bus.layer))
export const { runPromise } = makeRuntime(Service, defaultLayer)
export async function ask(input: z.infer<typeof AskInput>) {
return runPromise((s) => s.ask(input))

View File

@@ -74,8 +74,8 @@ export namespace Plugin {
return result
}
function publishPluginError(message: string) {
Bus.publish(Session.Event.Error, { error: new NamedError.Unknown({ message }).toObject() })
function publishPluginError(bus: Bus.Interface, message: string) {
Effect.runFork(bus.publish(Session.Event.Error, { error: new NamedError.Unknown({ message }).toObject() }))
}
async function applyPlugin(load: PluginLoader.Loaded, input: PluginInput, hooks: Hooks[]) {
@@ -161,24 +161,24 @@ export namespace Plugin {
if (stage === "install") {
const parsed = parsePluginSpecifier(spec)
log.error("failed to install plugin", { pkg: parsed.pkg, version: parsed.version, error: message })
publishPluginError(`Failed to install plugin ${parsed.pkg}@${parsed.version}: ${message}`)
publishPluginError(bus, `Failed to install plugin ${parsed.pkg}@${parsed.version}: ${message}`)
return
}
if (stage === "compatibility") {
log.warn("plugin incompatible", { path: spec, error: message })
publishPluginError(`Plugin ${spec} skipped: ${message}`)
publishPluginError(bus, `Plugin ${spec} skipped: ${message}`)
return
}
if (stage === "entry") {
log.error("failed to resolve plugin server entry", { path: spec, error: message })
publishPluginError(`Failed to load plugin ${spec}: ${message}`)
publishPluginError(bus, `Failed to load plugin ${spec}: ${message}`)
return
}
log.error("failed to load plugin", { path: spec, target: resolved?.entry, error: message })
publishPluginError(`Failed to load plugin ${spec}: ${message}`)
publishPluginError(bus, `Failed to load plugin ${spec}: ${message}`)
},
},
}),

View File

@@ -6,6 +6,8 @@ import { Installation } from "../installation"
import { Flag } from "../flag/flag"
import { lazy } from "@/util/lazy"
import { Filesystem } from "../util/filesystem"
import { Flock } from "@/util/flock"
import { Hash } from "@/util/hash"
// Try to import bundled snapshot (generated at build time)
// Falls back to undefined in dev mode when snapshot doesn't exist
@@ -13,7 +15,12 @@ import { Filesystem } from "../util/filesystem"
export namespace ModelsDev {
const log = Log.create({ service: "models.dev" })
const filepath = path.join(Global.Path.cache, "models.json")
const source = url()
const filepath = path.join(
Global.Path.cache,
source === "https://models.dev" ? "models.json" : `models-${Hash.fast(source)}.json`,
)
const ttl = 5 * 60 * 1000
export const Model = z.object({
id: z.string(),
@@ -85,6 +92,22 @@ export namespace ModelsDev {
return Flag.OPENCODE_MODELS_URL || "https://models.dev"
}
function fresh() {
return Date.now() - Number(Filesystem.stat(filepath)?.mtimeMs ?? 0) < ttl
}
function skip(force: boolean) {
return !force && fresh()
}
const fetchApi = async () => {
const result = await fetch(`${url()}/api.json`, {
headers: { "User-Agent": Installation.USER_AGENT },
signal: AbortSignal.timeout(10000),
})
return { ok: result.ok, text: await result.text() }
}
export const Data = lazy(async () => {
const result = await Filesystem.readJson(Flag.OPENCODE_MODELS_PATH ?? filepath).catch(() => {})
if (result) return result
@@ -94,8 +117,17 @@ export namespace ModelsDev {
.catch(() => undefined)
if (snapshot) return snapshot
if (Flag.OPENCODE_DISABLE_MODELS_FETCH) return {}
const json = await fetch(`${url()}/api.json`).then((x) => x.text())
return JSON.parse(json)
return Flock.withLock(`models-dev:${filepath}`, async () => {
const result = await Filesystem.readJson(Flag.OPENCODE_MODELS_PATH ?? filepath).catch(() => {})
if (result) return result
const result2 = await fetchApi()
if (result2.ok) {
await Filesystem.write(filepath, result2.text).catch((e) => {
log.error("Failed to write models cache", { error: e })
})
}
return JSON.parse(result2.text)
})
})
export async function get() {
@@ -103,21 +135,19 @@ export namespace ModelsDev {
return result as Record<string, Provider>
}
export async function refresh() {
const result = await fetch(`${url()}/api.json`, {
headers: {
"User-Agent": Installation.USER_AGENT,
},
signal: AbortSignal.timeout(10 * 1000),
export async function refresh(force = false) {
if (skip(force)) return ModelsDev.Data.reset()
await Flock.withLock(`models-dev:${filepath}`, async () => {
if (skip(force)) return ModelsDev.Data.reset()
const result = await fetchApi()
if (!result.ok) return
await Filesystem.write(filepath, result.text)
ModelsDev.Data.reset()
}).catch((e) => {
log.error("Failed to fetch models.dev", {
error: e,
})
})
if (result && result.ok) {
await Filesystem.write(filepath, await result.text())
ModelsDev.Data.reset()
}
}
}

View File

@@ -1100,6 +1100,7 @@ export namespace Provider {
options: mergeDeep(existingModel?.options ?? {}, model.options ?? {}),
limit: {
context: model.limit?.context ?? existingModel?.limit?.context ?? 0,
input: model.limit?.input ?? existingModel?.limit?.input,
output: model.limit?.output ?? existingModel?.limit?.output ?? 0,
},
headers: mergeDeep(existingModel?.headers ?? {}, model.headers ?? {}),

View File

@@ -118,6 +118,8 @@ export namespace Pty {
export const layer = Layer.effect(
Service,
Effect.gen(function* () {
const bus = yield* Bus.Service
const plugin = yield* Plugin.Service
function teardown(session: Active) {
try {
session.process.kill()
@@ -157,7 +159,7 @@ export namespace Pty {
s.sessions.delete(id)
log.info("removing session", { id })
teardown(session)
void Bus.publish(Event.Deleted, { id: session.info.id })
yield* bus.publish(Event.Deleted, { id: session.info.id })
})
const list = Effect.fn("Pty.list")(function* () {
@@ -172,95 +174,95 @@ export namespace Pty {
const create = Effect.fn("Pty.create")(function* (input: CreateInput) {
const s = yield* InstanceState.get(state)
return yield* Effect.promise(async () => {
const id = PtyID.ascending()
const command = input.command || Shell.preferred()
const args = input.args || []
if (Shell.login(command)) {
args.push("-l")
}
const id = PtyID.ascending()
const command = input.command || Shell.preferred()
const args = input.args || []
if (Shell.login(command)) {
args.push("-l")
}
const cwd = input.cwd || s.dir
const shellEnv = await Plugin.trigger("shell.env", { cwd }, { env: {} })
const env = {
...process.env,
...input.env,
...shellEnv.env,
TERM: "xterm-256color",
OPENCODE_TERMINAL: "1",
} as Record<string, string>
const cwd = input.cwd || s.dir
const shell = yield* plugin.trigger("shell.env", { cwd }, { env: {} })
const env = {
...process.env,
...input.env,
...shell.env,
TERM: "xterm-256color",
OPENCODE_TERMINAL: "1",
} as Record<string, string>
if (process.platform === "win32") {
env.LC_ALL = "C.UTF-8"
env.LC_CTYPE = "C.UTF-8"
env.LANG = "C.UTF-8"
}
log.info("creating session", { id, cmd: command, args, cwd })
if (process.platform === "win32") {
env.LC_ALL = "C.UTF-8"
env.LC_CTYPE = "C.UTF-8"
env.LANG = "C.UTF-8"
}
log.info("creating session", { id, cmd: command, args, cwd })
const spawn = await pty()
const proc = spawn(command, args, {
const spawn = yield* Effect.promise(() => pty())
const proc = yield* Effect.sync(() =>
spawn(command, args, {
name: "xterm-256color",
cwd,
env,
})
}),
)
const info = {
id,
title: input.title || `Terminal ${id.slice(-4)}`,
command,
args,
cwd,
status: "running",
pid: proc.pid,
} as const
const session: Active = {
info,
process: proc,
buffer: "",
bufferCursor: 0,
cursor: 0,
subscribers: new Map(),
}
s.sessions.set(id, session)
proc.onData(
Instance.bind((chunk) => {
session.cursor += chunk.length
const info = {
id,
title: input.title || `Terminal ${id.slice(-4)}`,
command,
args,
cwd,
status: "running",
pid: proc.pid,
} as const
const session: Active = {
info,
process: proc,
buffer: "",
bufferCursor: 0,
cursor: 0,
subscribers: new Map(),
}
s.sessions.set(id, session)
proc.onData(
Instance.bind((chunk) => {
session.cursor += chunk.length
for (const [key, ws] of session.subscribers.entries()) {
if (ws.readyState !== 1) {
session.subscribers.delete(key)
continue
}
if (ws.data !== key) {
session.subscribers.delete(key)
continue
}
try {
ws.send(chunk)
} catch {
session.subscribers.delete(key)
}
for (const [key, ws] of session.subscribers.entries()) {
if (ws.readyState !== 1) {
session.subscribers.delete(key)
continue
}
if (ws.data !== key) {
session.subscribers.delete(key)
continue
}
try {
ws.send(chunk)
} catch {
session.subscribers.delete(key)
}
}
session.buffer += chunk
if (session.buffer.length <= BUFFER_LIMIT) return
const excess = session.buffer.length - BUFFER_LIMIT
session.buffer = session.buffer.slice(excess)
session.bufferCursor += excess
}),
)
proc.onExit(
Instance.bind(({ exitCode }) => {
if (session.info.status === "exited") return
log.info("session exited", { id, exitCode })
session.info.status = "exited"
void Bus.publish(Event.Exited, { id, exitCode })
Effect.runFork(remove(id))
}),
)
await Bus.publish(Event.Created, { info })
return info
})
session.buffer += chunk
if (session.buffer.length <= BUFFER_LIMIT) return
const excess = session.buffer.length - BUFFER_LIMIT
session.buffer = session.buffer.slice(excess)
session.bufferCursor += excess
}),
)
proc.onExit(
Instance.bind(({ exitCode }) => {
if (session.info.status === "exited") return
log.info("session exited", { id, exitCode })
session.info.status = "exited"
Effect.runFork(bus.publish(Event.Exited, { id, exitCode }))
Effect.runFork(remove(id))
}),
)
yield* bus.publish(Event.Created, { info })
return info
})
const update = Effect.fn("Pty.update")(function* (id: PtyID, input: UpdateInput) {
@@ -273,7 +275,7 @@ export namespace Pty {
if (input.size) {
session.process.resize(input.size.cols, input.size.rows)
}
void Bus.publish(Event.Updated, { info: session.info })
yield* bus.publish(Event.Updated, { info: session.info })
return session.info
})
@@ -361,7 +363,9 @@ export namespace Pty {
}),
)
const { runPromise } = makeRuntime(Service, layer)
const defaultLayer = layer.pipe(Layer.provide(Bus.layer), Layer.provide(Plugin.defaultLayer))
const { runPromise } = makeRuntime(Service, defaultLayer)
export async function list() {
return runPromise((svc) => svc.list())

View File

@@ -109,6 +109,7 @@ export namespace Question {
export const layer = Layer.effect(
Service,
Effect.gen(function* () {
const bus = yield* Bus.Service
const state = yield* InstanceState.make<State>(
Effect.fn("Question.state")(function* () {
const state = {
@@ -145,7 +146,7 @@ export namespace Question {
tool: input.tool,
}
pending.set(id, { info, deferred })
Bus.publish(Event.Asked, info)
yield* bus.publish(Event.Asked, info)
return yield* Effect.ensuring(
Deferred.await(deferred),
@@ -164,7 +165,7 @@ export namespace Question {
}
pending.delete(input.requestID)
log.info("replied", { requestID: input.requestID, answers: input.answers })
Bus.publish(Event.Replied, {
yield* bus.publish(Event.Replied, {
sessionID: existing.info.sessionID,
requestID: existing.info.id,
answers: input.answers,
@@ -181,7 +182,7 @@ export namespace Question {
}
pending.delete(requestID)
log.info("rejected", { requestID })
Bus.publish(Event.Rejected, {
yield* bus.publish(Event.Rejected, {
sessionID: existing.info.sessionID,
requestID: existing.info.id,
})
@@ -197,7 +198,9 @@ export namespace Question {
}),
)
const { runPromise } = makeRuntime(Service, layer)
const defaultLayer = layer.pipe(Layer.provide(Bus.layer))
const { runPromise } = makeRuntime(Service, defaultLayer)
export async function ask(input: {
sessionID: SessionID

View File

@@ -248,18 +248,10 @@ export namespace Instruction {
return runPromise((svc) => svc.systemPaths())
}
export async function system() {
return runPromise((svc) => svc.system())
}
export function loaded(messages: MessageV2.WithParts[]) {
return extract(messages)
}
export async function find(dir: string) {
return runPromise((svc) => svc.find(dir))
}
export async function resolve(messages: MessageV2.WithParts[], filepath: string, messageID: MessageID) {
return runPromise((svc) => svc.resolve(messages, filepath, messageID))
}

View File

@@ -25,6 +25,7 @@ export namespace LLM {
export type StreamInput = {
user: MessageV2.User
sessionID: string
parentSessionID?: string
model: Provider.Model
agent: Agent.Info
permission?: Permission.Ruleset
@@ -301,6 +302,8 @@ export namespace LLM {
"x-opencode-client": Flag.OPENCODE_CLIENT,
}
: {
"x-session-affinity": input.sessionID,
...(input.parentSessionID ? { "x-parent-session-id": input.parentSessionID } : {}),
"User-Agent": `opencode/${Installation.VERSION}`,
}),
...input.model.headers,

View File

@@ -512,7 +512,7 @@ export namespace SessionProcessor {
Layer.provide(Snapshot.defaultLayer),
Layer.provide(Agent.defaultLayer),
Layer.provide(LLM.defaultLayer),
Layer.provide(Permission.layer),
Layer.provide(Permission.defaultLayer),
Layer.provide(Plugin.defaultLayer),
Layer.provide(SessionStatus.layer.pipe(Layer.provide(Bus.layer))),
Layer.provide(Bus.layer),

View File

@@ -1512,6 +1512,7 @@ NOTE: At any point in time through this workflow you should feel free to ask the
agent,
permission: session.permission,
sessionID,
parentSessionID: session.parentID,
system,
messages: [...modelMsgs, ...(isLastStep ? [{ role: "assistant" as const, content: MAX_STEPS }] : [])],
tools,
@@ -1715,7 +1716,7 @@ NOTE: At any point in time through this workflow you should feel free to ask the
Layer.provide(SessionCompaction.defaultLayer),
Layer.provide(SessionProcessor.defaultLayer),
Layer.provide(Command.defaultLayer),
Layer.provide(Permission.layer),
Layer.provide(Permission.defaultLayer),
Layer.provide(MCP.defaultLayer),
Layer.provide(LSP.defaultLayer),
Layer.provide(FileTime.defaultLayer),

View File

@@ -174,8 +174,4 @@ export namespace SessionSummary {
export async function diff(input: z.infer<typeof DiffInput>) {
return runPromise((svc) => svc.diff(input))
}
export async function computeDiff(input: { messages: MessageV2.WithParts[] }) {
return runPromise((svc) => svc.computeDiff(input))
}
}

View File

@@ -1,152 +1,47 @@
import { Bus } from "@/bus"
import { Account } from "@/account"
import { Config } from "@/config/config"
import { Provider } from "@/provider/provider"
import { ProviderID, ModelID } from "@/provider/schema"
import { Session } from "@/session"
import type { SessionID } from "@/session/schema"
import { MessageV2 } from "@/session/message-v2"
import { Database, eq } from "@/storage/db"
import { SessionShareTable } from "./share.sql"
import { Log } from "@/util/log"
import type * as SDK from "@opencode-ai/sdk/v2"
import { Effect, Exit, Layer, Option, Schema, Scope, ServiceMap, Stream } from "effect"
import { FetchHttpClient, HttpClient, HttpClientRequest, HttpClientResponse } from "effect/unstable/http"
import { Account } from "@/account"
import { Bus } from "@/bus"
import { InstanceState } from "@/effect/instance-state"
import { makeRuntime } from "@/effect/run-service"
import { Provider } from "@/provider/provider"
import { ModelID, ProviderID } from "@/provider/schema"
import { Session } from "@/session"
import { MessageV2 } from "@/session/message-v2"
import type { SessionID } from "@/session/schema"
import { Database, eq } from "@/storage/db"
import { Config } from "@/config/config"
import { Log } from "@/util/log"
import { SessionShareTable } from "./share.sql"
export namespace ShareNext {
const log = Log.create({ service: "share-next" })
type ApiEndpoints = {
create: string
sync: (shareId: string) => string
remove: (shareId: string) => string
data: (shareId: string) => string
}
function apiEndpoints(resource: string): ApiEndpoints {
return {
create: `/api/${resource}`,
sync: (shareId) => `/api/${resource}/${shareId}/sync`,
remove: (shareId) => `/api/${resource}/${shareId}`,
data: (shareId) => `/api/${resource}/${shareId}/data`,
}
}
const legacyApi = apiEndpoints("share")
const consoleApi = apiEndpoints("shares")
export async function url() {
const req = await request()
return req.baseUrl
}
export async function request(): Promise<{
headers: Record<string, string>
api: ApiEndpoints
baseUrl: string
}> {
const headers: Record<string, string> = {}
const active = await Account.active()
if (!active?.active_org_id) {
const baseUrl = await Config.get().then((x) => x.enterprise?.url ?? "https://opncd.ai")
return { headers, api: legacyApi, baseUrl }
}
const token = await Account.token(active.id)
if (!token) {
throw new Error("No active account token available for sharing")
}
headers["authorization"] = `Bearer ${token}`
headers["x-org-id"] = active.active_org_id
return { headers, api: consoleApi, baseUrl: active.url }
}
const disabled = process.env["OPENCODE_DISABLE_SHARE"] === "true" || process.env["OPENCODE_DISABLE_SHARE"] === "1"
export async function init() {
if (disabled) return
Bus.subscribe(Session.Event.Updated, async (evt) => {
const session = await Session.get(evt.properties.sessionID)
await sync(session.id, [
{
type: "session",
data: session,
},
])
})
Bus.subscribe(MessageV2.Event.Updated, async (evt) => {
const info = evt.properties.info
await sync(info.sessionID, [
{
type: "message",
data: evt.properties.info,
},
])
if (info.role === "user") {
await sync(info.sessionID, [
{
type: "model",
data: [await Provider.getModel(info.model.providerID, info.model.modelID).then((m) => m)],
},
])
}
})
Bus.subscribe(MessageV2.Event.PartUpdated, async (evt) => {
await sync(evt.properties.part.sessionID, [
{
type: "part",
data: evt.properties.part,
},
])
})
Bus.subscribe(Session.Event.Diff, async (evt) => {
await sync(evt.properties.sessionID, [
{
type: "session_diff",
data: evt.properties.diff,
},
])
})
export type Api = {
create: string
sync: (shareID: string) => string
remove: (shareID: string) => string
data: (shareID: string) => string
}
export async function create(sessionID: SessionID) {
if (disabled) return { id: "", url: "", secret: "" }
log.info("creating share", { sessionID })
const req = await request()
const response = await fetch(`${req.baseUrl}${req.api.create}`, {
method: "POST",
headers: { ...req.headers, "Content-Type": "application/json" },
body: JSON.stringify({ sessionID: sessionID }),
})
if (!response.ok) {
const message = await response.text().catch(() => response.statusText)
throw new Error(`Failed to create share (${response.status}): ${message || response.statusText}`)
}
const result = (await response.json()) as { id: string; url: string; secret: string }
Database.use((db) =>
db
.insert(SessionShareTable)
.values({ session_id: sessionID, id: result.id, secret: result.secret, url: result.url })
.onConflictDoUpdate({
target: SessionShareTable.session_id,
set: { id: result.id, secret: result.secret, url: result.url },
})
.run(),
)
fullSync(sessionID)
return result
export type Req = {
headers: Record<string, string>
api: Api
baseUrl: string
}
function get(sessionID: SessionID) {
const row = Database.use((db) =>
db.select().from(SessionShareTable).where(eq(SessionShareTable.session_id, sessionID)).get(),
)
if (!row) return
return { id: row.id, secret: row.secret, url: row.url }
const ShareSchema = Schema.Struct({
id: Schema.String,
url: Schema.String,
secret: Schema.String,
})
export type Share = typeof ShareSchema.Type
type State = {
queue: Map<string, { data: Map<string, Data> }>
scope: Scope.Closeable
}
type Data =
@@ -171,6 +66,31 @@ export namespace ShareNext {
data: SDK.Model[]
}
export interface Interface {
readonly init: () => Effect.Effect<void, unknown>
readonly url: () => Effect.Effect<string, unknown>
readonly request: () => Effect.Effect<Req, unknown>
readonly create: (sessionID: SessionID) => Effect.Effect<Share, unknown>
readonly remove: (sessionID: SessionID) => Effect.Effect<void, unknown>
}
export class Service extends ServiceMap.Service<Service, Interface>()("@opencode/ShareNext") {}
const db = <T>(fn: (d: Parameters<typeof Database.use>[0] extends (trx: infer D) => any ? D : never) => T) =>
Effect.sync(() => Database.use(fn))
function api(resource: string): Api {
return {
create: `/api/${resource}`,
sync: (shareID) => `/api/${resource}/${shareID}/sync`,
remove: (shareID) => `/api/${resource}/${shareID}`,
data: (shareID) => `/api/${resource}/${shareID}/data`,
}
}
const legacyApi = api("share")
const consoleApi = api("shares")
function key(item: Data) {
switch (item.type) {
case "session":
@@ -186,102 +106,264 @@ export namespace ShareNext {
}
}
const queue = new Map<string, { timeout: NodeJS.Timeout; data: Map<string, Data> }>()
async function sync(sessionID: SessionID, data: Data[]) {
if (disabled) return
const existing = queue.get(sessionID)
if (existing) {
for (const item of data) {
existing.data.set(key(item), item)
export const layer = Layer.effect(
Service,
Effect.gen(function* () {
const account = yield* Account.Service
const bus = yield* Bus.Service
const cfg = yield* Config.Service
const http = yield* HttpClient.HttpClient
const httpOk = HttpClient.filterStatusOk(http)
const provider = yield* Provider.Service
const session = yield* Session.Service
function sync(sessionID: SessionID, data: Data[]): Effect.Effect<void> {
return Effect.gen(function* () {
if (disabled) return
const s = yield* InstanceState.get(state)
const existing = s.queue.get(sessionID)
if (existing) {
for (const item of data) {
existing.data.set(key(item), item)
}
return
}
const next = new Map(data.map((item) => [key(item), item]))
s.queue.set(sessionID, { data: next })
yield* flush(sessionID).pipe(
Effect.delay(1000),
Effect.catchCause((cause) =>
Effect.sync(() => {
log.error("share flush failed", { sessionID, cause })
}),
),
Effect.forkIn(s.scope),
)
})
}
return
}
const dataMap = new Map<string, Data>()
for (const item of data) {
dataMap.set(key(item), item)
}
const state: InstanceState<State> = yield* InstanceState.make<State>(
Effect.fn("ShareNext.state")(function* (_ctx) {
const cache: State = { queue: new Map(), scope: yield* Scope.make() }
const timeout = setTimeout(async () => {
const queued = queue.get(sessionID)
if (!queued) return
queue.delete(sessionID)
const share = get(sessionID)
if (!share) return
yield* Effect.addFinalizer(() =>
Scope.close(cache.scope, Exit.void).pipe(
Effect.andThen(
Effect.sync(() => {
cache.queue.clear()
}),
),
),
)
const req = await request()
const response = await fetch(`${req.baseUrl}${req.api.sync(share.id)}`, {
method: "POST",
headers: { ...req.headers, "Content-Type": "application/json" },
body: JSON.stringify({
secret: share.secret,
data: Array.from(queued.data.values()),
if (disabled) return cache
const watch = <D extends { type: string }>(def: D, fn: (evt: { properties: any }) => Effect.Effect<void>) =>
bus.subscribe(def as never).pipe(
Stream.runForEach((evt) =>
fn(evt).pipe(
Effect.catchCause((cause) =>
Effect.sync(() => {
log.error("share subscriber failed", { type: def.type, cause })
}),
),
),
),
Effect.forkScoped,
)
yield* watch(Session.Event.Updated, (evt) =>
Effect.gen(function* () {
const info = yield* session.get(evt.properties.sessionID)
yield* sync(info.id, [{ type: "session", data: info }])
}),
)
yield* watch(MessageV2.Event.Updated, (evt) =>
Effect.gen(function* () {
const info = evt.properties.info
yield* sync(info.sessionID, [{ type: "message", data: info }])
if (info.role !== "user") return
const model = yield* provider.getModel(info.model.providerID, info.model.modelID)
yield* sync(info.sessionID, [{ type: "model", data: [model] }])
}),
)
yield* watch(MessageV2.Event.PartUpdated, (evt) =>
sync(evt.properties.part.sessionID, [{ type: "part", data: evt.properties.part }]),
)
yield* watch(Session.Event.Diff, (evt) =>
sync(evt.properties.sessionID, [{ type: "session_diff", data: evt.properties.diff }]),
)
return cache
}),
)
const request = Effect.fn("ShareNext.request")(function* () {
const headers: Record<string, string> = {}
const active = yield* account.active()
if (Option.isNone(active) || !active.value.active_org_id) {
const baseUrl = (yield* cfg.get()).enterprise?.url ?? "https://opncd.ai"
return { headers, api: legacyApi, baseUrl } satisfies Req
}
const token = yield* account.token(active.value.id)
if (Option.isNone(token)) {
throw new Error("No active account token available for sharing")
}
headers.authorization = `Bearer ${token.value}`
headers["x-org-id"] = active.value.active_org_id
return { headers, api: consoleApi, baseUrl: active.value.url } satisfies Req
})
if (!response.ok) {
log.warn("failed to sync share", { sessionID, shareID: share.id, status: response.status })
}
}, 1000)
queue.set(sessionID, { timeout, data: dataMap })
const get = Effect.fnUntraced(function* (sessionID: SessionID) {
const row = yield* db((db) =>
db.select().from(SessionShareTable).where(eq(SessionShareTable.session_id, sessionID)).get(),
)
if (!row) return
return { id: row.id, secret: row.secret, url: row.url } satisfies Share
})
const flush = Effect.fn("ShareNext.flush")(function* (sessionID: SessionID) {
if (disabled) return
const s = yield* InstanceState.get(state)
const queued = s.queue.get(sessionID)
if (!queued) return
s.queue.delete(sessionID)
const share = yield* get(sessionID)
if (!share) return
const req = yield* request()
const res = yield* HttpClientRequest.post(`${req.baseUrl}${req.api.sync(share.id)}`).pipe(
HttpClientRequest.setHeaders(req.headers),
HttpClientRequest.bodyJson({ secret: share.secret, data: Array.from(queued.data.values()) }),
Effect.flatMap((r) => http.execute(r)),
)
if (res.status >= 400) {
log.warn("failed to sync share", { sessionID, shareID: share.id, status: res.status })
}
})
const full = Effect.fn("ShareNext.full")(function* (sessionID: SessionID) {
log.info("full sync", { sessionID })
const info = yield* session.get(sessionID)
const diffs = yield* session.diff(sessionID)
const messages = yield* Effect.sync(() => Array.from(MessageV2.stream(sessionID)))
const models = yield* Effect.forEach(
Array.from(
new Map(
messages
.filter((msg) => msg.info.role === "user")
.map((msg) => (msg.info as SDK.UserMessage).model)
.map((item) => [`${item.providerID}/${item.modelID}`, item] as const),
).values(),
),
(item) => provider.getModel(ProviderID.make(item.providerID), ModelID.make(item.modelID)),
{ concurrency: 8 },
)
yield* sync(sessionID, [
{ type: "session", data: info },
...messages.map((item) => ({ type: "message" as const, data: item.info })),
...messages.flatMap((item) => item.parts.map((part) => ({ type: "part" as const, data: part }))),
{ type: "session_diff", data: diffs },
{ type: "model", data: models },
])
})
const init = Effect.fn("ShareNext.init")(function* () {
if (disabled) return
yield* InstanceState.get(state)
})
const url = Effect.fn("ShareNext.url")(function* () {
return (yield* request()).baseUrl
})
const create = Effect.fn("ShareNext.create")(function* (sessionID: SessionID) {
if (disabled) return { id: "", url: "", secret: "" }
log.info("creating share", { sessionID })
const req = yield* request()
const result = yield* HttpClientRequest.post(`${req.baseUrl}${req.api.create}`).pipe(
HttpClientRequest.setHeaders(req.headers),
HttpClientRequest.bodyJson({ sessionID }),
Effect.flatMap((r) => httpOk.execute(r)),
Effect.flatMap(HttpClientResponse.schemaBodyJson(ShareSchema)),
)
yield* db((db) =>
db
.insert(SessionShareTable)
.values({ session_id: sessionID, id: result.id, secret: result.secret, url: result.url })
.onConflictDoUpdate({
target: SessionShareTable.session_id,
set: { id: result.id, secret: result.secret, url: result.url },
})
.run(),
)
const s = yield* InstanceState.get(state)
yield* full(sessionID).pipe(
Effect.catchCause((cause) =>
Effect.sync(() => {
log.error("share full sync failed", { sessionID, cause })
}),
),
Effect.forkIn(s.scope),
)
return result
})
const remove = Effect.fn("ShareNext.remove")(function* (sessionID: SessionID) {
if (disabled) return
log.info("removing share", { sessionID })
const share = yield* get(sessionID)
if (!share) return
const req = yield* request()
yield* HttpClientRequest.delete(`${req.baseUrl}${req.api.remove(share.id)}`).pipe(
HttpClientRequest.setHeaders(req.headers),
HttpClientRequest.bodyJson({ secret: share.secret }),
Effect.flatMap((r) => httpOk.execute(r)),
)
yield* db((db) => db.delete(SessionShareTable).where(eq(SessionShareTable.session_id, sessionID)).run())
})
return Service.of({ init, url, request, create, remove })
}),
)
export const defaultLayer = layer.pipe(
Layer.provide(Bus.layer),
Layer.provide(Account.defaultLayer),
Layer.provide(Config.defaultLayer),
Layer.provide(FetchHttpClient.layer),
Layer.provide(Provider.defaultLayer),
Layer.provide(Session.defaultLayer),
)
const { runPromise } = makeRuntime(Service, defaultLayer)
export async function init() {
return runPromise((svc) => svc.init())
}
export async function url() {
return runPromise((svc) => svc.url())
}
export async function request(): Promise<Req> {
return runPromise((svc) => svc.request())
}
export async function create(sessionID: SessionID) {
return runPromise((svc) => svc.create(sessionID))
}
export async function remove(sessionID: SessionID) {
if (disabled) return
log.info("removing share", { sessionID })
const share = get(sessionID)
if (!share) return
const req = await request()
const response = await fetch(`${req.baseUrl}${req.api.remove(share.id)}`, {
method: "DELETE",
headers: { ...req.headers, "Content-Type": "application/json" },
body: JSON.stringify({
secret: share.secret,
}),
})
if (!response.ok) {
const message = await response.text().catch(() => response.statusText)
throw new Error(`Failed to remove share (${response.status}): ${message || response.statusText}`)
}
Database.use((db) => db.delete(SessionShareTable).where(eq(SessionShareTable.session_id, sessionID)).run())
}
async function fullSync(sessionID: SessionID) {
log.info("full sync", { sessionID })
const session = await Session.get(sessionID)
const diffs = await Session.diff(sessionID)
const messages = await Array.fromAsync(MessageV2.stream(sessionID))
const models = await Promise.all(
Array.from(
new Map(
messages
.filter((m) => m.info.role === "user")
.map((m) => (m.info as SDK.UserMessage).model)
.map((m) => [`${m.providerID}/${m.modelID}`, m] as const),
).values(),
).map((m) => Provider.getModel(ProviderID.make(m.providerID), ModelID.make(m.modelID)).then((item) => item)),
)
await sync(sessionID, [
{
type: "session",
data: session,
},
...messages.map((x) => ({
type: "message" as const,
data: x.info,
})),
...messages.flatMap((x) => x.parts.map((y) => ({ type: "part" as const, data: y }))),
{
type: "session_diff",
data: diffs,
},
{
type: "model",
data: models,
},
])
return runPromise((svc) => svc.remove(sessionID))
}
}

View File

@@ -545,10 +545,6 @@ export namespace Snapshot {
return runPromise((svc) => svc.init())
}
export async function cleanup() {
return runPromise((svc) => svc.cleanup())
}
export async function track() {
return runPromise((svc) => svc.track())
}

View File

@@ -2,7 +2,7 @@ Executes a given bash command in a persistent shell session with optional timeou
Be aware: OS: ${os}, Shell: ${shell}
All commands run in ${directory} by default. Use the `workdir` parameter if you need to run a command in a different directory. AVOID using `cd <directory> && <command>` patterns - use `workdir` instead.
All commands run in the current working directory by default. Use the `workdir` parameter if you need to run a command in a different directory. AVOID using `cd <directory> && <command>` patterns - use `workdir` instead.
IMPORTANT: This tool is for terminal operations like git, npm, docker, etc. DO NOT use it for file operations (reading, writing, editing, searching, finding files) - use the specialized tools for this instead.

View File

@@ -206,10 +206,6 @@ export namespace ToolRegistry {
const { runPromise } = makeRuntime(Service, defaultLayer)
export async function register(tool: Tool.Info) {
return runPromise((svc) => svc.register(tool))
}
export async function ids() {
return runPromise((svc) => svc.ids())
}

View File

@@ -55,7 +55,7 @@ export namespace Tool {
return {
id,
init: async (initCtx) => {
const toolInfo = init instanceof Function ? await init(initCtx) : init
const toolInfo = init instanceof Function ? await init(initCtx) : { ...init }
const execute = toolInfo.execute
toolInfo.execute = async (args, ctx) => {
try {

View File

@@ -144,7 +144,11 @@ export namespace Process {
throw new RunFailedError(cmd, out.code, out.stdout, out.stderr)
}
// Duplicated in `packages/sdk/js/src/process.ts` because the SDK cannot import
// `opencode` without creating a cycle. Keep both copies in sync.
export async function stop(proc: ChildProcess) {
if (proc.exitCode !== null || proc.signalCode !== null) return
if (process.platform !== "win32" || !proc.pid) {
proc.kill()
return

View File

@@ -211,7 +211,7 @@ function liveRuntime(layer: Layer.Layer<LLM.Service>, provider = ProviderTest.fa
Layer.provide(Session.defaultLayer),
Layer.provide(Snapshot.defaultLayer),
Layer.provide(layer),
Layer.provide(Permission.layer),
Layer.provide(Permission.defaultLayer),
Layer.provide(Agent.defaultLayer),
Layer.provide(Plugin.defaultLayer),
Layer.provide(status),

View File

@@ -149,7 +149,7 @@ const deps = Layer.mergeAll(
Session.defaultLayer,
Snapshot.defaultLayer,
AgentSvc.defaultLayer,
Permission.layer,
Permission.defaultLayer,
Plugin.defaultLayer,
Config.defaultLayer,
LLM.defaultLayer,

View File

@@ -150,7 +150,7 @@ function makeHttp() {
LLM.defaultLayer,
AgentSvc.defaultLayer,
Command.defaultLayer,
Permission.layer,
Permission.defaultLayer,
Plugin.defaultLayer,
Config.defaultLayer,
ProviderSvc.defaultLayer,

View File

@@ -114,7 +114,7 @@ function makeHttp() {
LLM.defaultLayer,
AgentSvc.defaultLayer,
Command.defaultLayer,
Permission.layer,
Permission.defaultLayer,
Plugin.defaultLayer,
Config.defaultLayer,
ProviderSvc.defaultLayer,

View File

@@ -1,76 +1,333 @@
import { test, expect, mock } from "bun:test"
import { ShareNext } from "../../src/share/share-next"
import { AccessToken, Account, AccountID, OrgID } from "../../src/account"
import { NodeFileSystem } from "@effect/platform-node"
import { beforeEach, describe, expect } from "bun:test"
import { Effect, Exit, Layer, Option } from "effect"
import { HttpClient, HttpClientRequest, HttpClientResponse } from "effect/unstable/http"
import { AccessToken, AccountID, OrgID, RefreshToken } from "../../src/account"
import { Account } from "../../src/account"
import { AccountRepo } from "../../src/account/repo"
import * as CrossSpawnSpawner from "../../src/effect/cross-spawn-spawner"
import { Bus } from "../../src/bus"
import { Config } from "../../src/config/config"
import { Provider } from "../../src/provider/provider"
import { Session } from "../../src/session"
import type { SessionID } from "../../src/session/schema"
import { ShareNext } from "../../src/share/share-next"
import { SessionShareTable } from "../../src/share/share.sql"
import { Database, eq } from "../../src/storage/db"
import { provideTmpdirInstance } from "../fixture/fixture"
import { resetDatabase } from "../fixture/db"
import { testEffect } from "../lib/effect"
test("ShareNext.request uses legacy share API without active org account", async () => {
const originalActive = Account.active
const originalConfigGet = Config.get
const env = Layer.mergeAll(
Session.defaultLayer,
AccountRepo.layer,
NodeFileSystem.layer,
CrossSpawnSpawner.defaultLayer,
)
const it = testEffect(env)
Account.active = mock(async () => undefined)
Config.get = mock(async () => ({ enterprise: { url: "https://legacy-share.example.com" } }))
const json = (req: Parameters<typeof HttpClientResponse.fromWeb>[0], body: unknown, status = 200) =>
HttpClientResponse.fromWeb(
req,
new Response(JSON.stringify(body), {
status,
headers: { "content-type": "application/json" },
}),
)
try {
const req = await ShareNext.request()
const none = HttpClient.make(() => Effect.die("unexpected http call"))
expect(req.api.create).toBe("/api/share")
expect(req.api.sync("shr_123")).toBe("/api/share/shr_123/sync")
expect(req.api.remove("shr_123")).toBe("/api/share/shr_123")
expect(req.api.data("shr_123")).toBe("/api/share/shr_123/data")
expect(req.baseUrl).toBe("https://legacy-share.example.com")
expect(req.headers).toEqual({})
} finally {
Account.active = originalActive
Config.get = originalConfigGet
}
function live(client: HttpClient.HttpClient) {
const http = Layer.succeed(HttpClient.HttpClient, client)
return ShareNext.layer.pipe(
Layer.provide(Bus.layer),
Layer.provide(Account.layer.pipe(Layer.provide(AccountRepo.layer), Layer.provide(http))),
Layer.provide(Config.defaultLayer),
Layer.provide(http),
Layer.provide(Provider.defaultLayer),
Layer.provide(Session.defaultLayer),
)
}
function wired(client: HttpClient.HttpClient) {
const http = Layer.succeed(HttpClient.HttpClient, client)
return Layer.mergeAll(
Bus.layer,
ShareNext.layer,
Session.layer,
AccountRepo.layer,
NodeFileSystem.layer,
CrossSpawnSpawner.defaultLayer,
).pipe(
Layer.provide(Bus.layer),
Layer.provide(Account.layer.pipe(Layer.provide(AccountRepo.layer), Layer.provide(http))),
Layer.provide(Config.defaultLayer),
Layer.provide(http),
Layer.provide(Provider.defaultLayer),
)
}
const share = (id: SessionID) =>
Database.use((db) => db.select().from(SessionShareTable).where(eq(SessionShareTable.session_id, id)).get())
const seed = (url: string, org?: string) =>
AccountRepo.use((repo) =>
repo.persistAccount({
id: AccountID.make("account-1"),
email: "user@example.com",
url,
accessToken: AccessToken.make("st_test_token"),
refreshToken: RefreshToken.make("rt_test_token"),
expiry: Date.now() + 10 * 60_000,
orgID: org ? Option.some(OrgID.make(org)) : Option.none(),
}),
)
beforeEach(async () => {
await resetDatabase()
})
test("ShareNext.request uses org share API with auth headers when account is active", async () => {
const originalActive = Account.active
const originalToken = Account.token
describe("ShareNext", () => {
it.live("request uses legacy share API without active org account", () =>
provideTmpdirInstance(
() =>
ShareNext.Service.use((svc) =>
Effect.gen(function* () {
const req = yield* svc.request()
Account.active = mock(async () => ({
id: AccountID.make("account-1"),
email: "user@example.com",
url: "https://control.example.com",
active_org_id: OrgID.make("org-1"),
}))
Account.token = mock(async () => AccessToken.make("st_test_token"))
expect(req.api.create).toBe("/api/share")
expect(req.api.sync("shr_123")).toBe("/api/share/shr_123/sync")
expect(req.api.remove("shr_123")).toBe("/api/share/shr_123")
expect(req.api.data("shr_123")).toBe("/api/share/shr_123/data")
expect(req.baseUrl).toBe("https://legacy-share.example.com")
expect(req.headers).toEqual({})
}),
).pipe(Effect.provide(live(none))),
{ config: { enterprise: { url: "https://legacy-share.example.com" } } },
),
)
try {
const req = await ShareNext.request()
it.live("request uses default URL when no enterprise config", () =>
provideTmpdirInstance(() =>
ShareNext.Service.use((svc) =>
Effect.gen(function* () {
const req = yield* svc.request()
expect(req.api.create).toBe("/api/shares")
expect(req.api.sync("shr_123")).toBe("/api/shares/shr_123/sync")
expect(req.api.remove("shr_123")).toBe("/api/shares/shr_123")
expect(req.api.data("shr_123")).toBe("/api/shares/shr_123/data")
expect(req.baseUrl).toBe("https://control.example.com")
expect(req.headers).toEqual({
authorization: "Bearer st_test_token",
"x-org-id": "org-1",
})
} finally {
Account.active = originalActive
Account.token = originalToken
}
})
test("ShareNext.request fails when org account has no token", async () => {
const originalActive = Account.active
const originalToken = Account.token
Account.active = mock(async () => ({
id: AccountID.make("account-1"),
email: "user@example.com",
url: "https://control.example.com",
active_org_id: OrgID.make("org-1"),
}))
Account.token = mock(async () => undefined)
try {
await expect(ShareNext.request()).rejects.toThrow("No active account token available for sharing")
} finally {
Account.active = originalActive
Account.token = originalToken
}
expect(req.baseUrl).toBe("https://opncd.ai")
expect(req.api.create).toBe("/api/share")
expect(req.headers).toEqual({})
}),
).pipe(Effect.provide(live(none))),
),
)
it.live("request uses org share API with auth headers when account is active", () =>
provideTmpdirInstance(() =>
Effect.gen(function* () {
yield* seed("https://control.example.com", "org-1")
const req = yield* ShareNext.Service.use((svc) => svc.request()).pipe(Effect.provide(live(none)))
expect(req.api.create).toBe("/api/shares")
expect(req.api.sync("shr_123")).toBe("/api/shares/shr_123/sync")
expect(req.api.remove("shr_123")).toBe("/api/shares/shr_123")
expect(req.api.data("shr_123")).toBe("/api/shares/shr_123/data")
expect(req.baseUrl).toBe("https://control.example.com")
expect(req.headers).toEqual({
authorization: "Bearer st_test_token",
"x-org-id": "org-1",
})
}),
),
)
it.live("create posts share, persists it, and returns the result", () =>
provideTmpdirInstance(
() =>
Effect.gen(function* () {
const session = yield* Session.Service.use((svc) => svc.create({ title: "test" }))
const seen: HttpClientRequest.HttpClientRequest[] = []
const client = HttpClient.make((req) => {
seen.push(req)
if (req.url.endsWith("/api/share")) {
return Effect.succeed(
json(req, {
id: "shr_abc",
url: "https://legacy-share.example.com/share/abc",
secret: "sec_123",
}),
)
}
return Effect.succeed(json(req, { ok: true }))
})
const result = yield* ShareNext.Service.use((svc) => svc.create(session.id)).pipe(
Effect.provide(live(client)),
)
expect(result.id).toBe("shr_abc")
expect(result.url).toBe("https://legacy-share.example.com/share/abc")
expect(result.secret).toBe("sec_123")
const row = share(session.id)
expect(row?.id).toBe("shr_abc")
expect(row?.url).toBe("https://legacy-share.example.com/share/abc")
expect(row?.secret).toBe("sec_123")
expect(seen).toHaveLength(1)
expect(seen[0].method).toBe("POST")
expect(seen[0].url).toBe("https://legacy-share.example.com/api/share")
}),
{ config: { enterprise: { url: "https://legacy-share.example.com" } } },
),
)
it.live("remove deletes the persisted share and calls the delete endpoint", () =>
provideTmpdirInstance(
() =>
Effect.gen(function* () {
const session = yield* Session.Service.use((svc) => svc.create({ title: "test" }))
const seen: HttpClientRequest.HttpClientRequest[] = []
const client = HttpClient.make((req) => {
seen.push(req)
if (req.method === "POST") {
return Effect.succeed(
json(req, {
id: "shr_abc",
url: "https://legacy-share.example.com/share/abc",
secret: "sec_123",
}),
)
}
return Effect.succeed(HttpClientResponse.fromWeb(req, new Response(null, { status: 200 })))
})
yield* Effect.gen(function* () {
yield* ShareNext.Service.use((svc) => svc.create(session.id))
yield* ShareNext.Service.use((svc) => svc.remove(session.id))
}).pipe(Effect.provide(live(client)))
expect(share(session.id)).toBeUndefined()
expect(seen.map((req) => [req.method, req.url])).toEqual([
["POST", "https://legacy-share.example.com/api/share"],
["DELETE", "https://legacy-share.example.com/api/share/shr_abc"],
])
}),
{ config: { enterprise: { url: "https://legacy-share.example.com" } } },
),
)
it.live("create fails on a non-ok response and does not persist a share", () =>
provideTmpdirInstance(() =>
Effect.gen(function* () {
const session = yield* Session.Service.use((svc) => svc.create({ title: "test" }))
const client = HttpClient.make((req) => Effect.succeed(json(req, { error: "bad" }, 500)))
const exit = yield* ShareNext.Service.use((svc) => Effect.exit(svc.create(session.id))).pipe(
Effect.provide(live(client)),
)
expect(Exit.isFailure(exit)).toBe(true)
expect(share(session.id)).toBeUndefined()
}),
),
)
it.live("ShareNext coalesces rapid diff events into one delayed sync with latest data", () =>
provideTmpdirInstance(
() => {
const seen: Array<{ url: string; body: string }> = []
const client = HttpClient.make((req) => {
if (req.url.endsWith("/sync") && req.body._tag === "Uint8Array") {
seen.push({ url: req.url, body: new TextDecoder().decode(req.body.body) })
}
return Effect.succeed(json(req, { ok: true }))
})
return Effect.gen(function* () {
const bus = yield* Bus.Service
const share = yield* ShareNext.Service
const session = yield* Session.Service
const info = yield* session.create({ title: "first" })
yield* share.init()
yield* Effect.sleep(50)
yield* Effect.sync(() =>
Database.use((db) =>
db
.insert(SessionShareTable)
.values({
session_id: info.id,
id: "shr_abc",
url: "https://legacy-share.example.com/share/abc",
secret: "sec_123",
})
.run(),
),
)
yield* bus.publish(Session.Event.Diff, {
sessionID: info.id,
diff: [
{
file: "a.ts",
before: "one",
after: "two",
additions: 1,
deletions: 1,
status: "modified",
},
],
})
yield* bus.publish(Session.Event.Diff, {
sessionID: info.id,
diff: [
{
file: "b.ts",
before: "old",
after: "new",
additions: 2,
deletions: 0,
status: "modified",
},
],
})
yield* Effect.sleep(1_250)
expect(seen).toHaveLength(1)
expect(seen[0].url).toBe("https://legacy-share.example.com/api/share/shr_abc/sync")
const body = JSON.parse(seen[0].body) as {
secret: string
data: Array<{
type: string
data: Array<{
file: string
before: string
after: string
additions: number
deletions: number
status?: string
}>
}>
}
expect(body.secret).toBe("sec_123")
expect(body.data).toHaveLength(1)
expect(body.data[0].type).toBe("session_diff")
expect(body.data[0].data).toEqual([
{
file: "b.ts",
before: "old",
after: "new",
additions: 2,
deletions: 0,
status: "modified",
},
])
}).pipe(Effect.provide(wired(client)))
},
{ config: { enterprise: { url: "https://legacy-share.example.com" } } },
),
)
})

View File

@@ -0,0 +1,111 @@
import { describe, test, expect } from "bun:test"
import z from "zod"
import { Tool } from "../../src/tool/tool"
const params = z.object({ input: z.string() })
const defaultArgs = { input: "test" }
function makeTool(id: string, executeFn?: () => void) {
return {
description: "test tool",
parameters: params,
async execute() {
executeFn?.()
return { title: "test", output: "ok", metadata: {} }
},
}
}
describe("Tool.define", () => {
test("object-defined tool does not mutate the original init object", async () => {
const original = makeTool("test")
const originalExecute = original.execute
const tool = Tool.define("test-tool", original)
await tool.init()
await tool.init()
await tool.init()
// The original object's execute should never be overwritten
expect(original.execute).toBe(originalExecute)
})
test("object-defined tool does not accumulate wrapper layers across init() calls", async () => {
let executeCalls = 0
const tool = Tool.define(
"test-tool",
makeTool("test", () => executeCalls++),
)
// Call init() many times to simulate many agentic steps
for (let i = 0; i < 100; i++) {
await tool.init()
}
// Resolve the tool and call execute
const resolved = await tool.init()
executeCalls = 0
// Capture the stack trace inside execute to measure wrapper depth
let stackInsideExecute = ""
const origExec = resolved.execute
resolved.execute = async (args: any, ctx: any) => {
const result = await origExec.call(resolved, args, ctx)
const err = new Error()
stackInsideExecute = err.stack || ""
return result
}
await resolved.execute(defaultArgs, {} as any)
expect(executeCalls).toBe(1)
// Count how many times tool.ts appears in the stack.
// With the fix: 1 wrapper layer (from the most recent init()).
// Without the fix: 101 wrapper layers from accumulated closures.
const toolTsFrames = stackInsideExecute.split("\n").filter((l) => l.includes("tool.ts")).length
expect(toolTsFrames).toBeLessThan(5)
})
test("function-defined tool returns fresh objects and is unaffected", async () => {
const tool = Tool.define("test-fn-tool", () => Promise.resolve(makeTool("test")))
const first = await tool.init()
const second = await tool.init()
// Function-defined tools return distinct objects each time
expect(first).not.toBe(second)
})
test("object-defined tool returns distinct objects per init() call", async () => {
const tool = Tool.define("test-copy", makeTool("test"))
const first = await tool.init()
const second = await tool.init()
// Each init() should return a separate object so wrappers don't accumulate
expect(first).not.toBe(second)
})
test("validation still works after many init() calls", async () => {
const tool = Tool.define("test-validation", {
description: "validation test",
parameters: z.object({ count: z.number().int().positive() }),
async execute(args) {
return { title: "test", output: String(args.count), metadata: {} }
},
})
for (let i = 0; i < 100; i++) {
await tool.init()
}
const resolved = await tool.init()
const result = await resolved.execute({ count: 42 }, {} as any)
expect(result.output).toBe("42")
await expect(resolved.execute({ count: -1 }, {} as any)).rejects.toThrow("invalid arguments")
})
})

View File

@@ -23,9 +23,12 @@
"devDependencies": {
"@hey-api/openapi-ts": "0.90.10",
"@tsconfig/node22": "catalog:",
"@types/cross-spawn": "catalog:",
"@types/node": "catalog:",
"typescript": "catalog:",
"@typescript/native-preview": "catalog:"
"@typescript/native-preview": "catalog:",
"typescript": "catalog:"
},
"dependencies": {}
"dependencies": {
"cross-spawn": "catalog:"
}
}

View File

@@ -0,0 +1,31 @@
import { type ChildProcess, spawnSync } from "node:child_process"
// Duplicated from `packages/opencode/src/util/process.ts` because the SDK cannot
// import `opencode` without creating a cycle (`opencode` depends on `@opencode-ai/sdk`).
export function stop(proc: ChildProcess) {
if (proc.exitCode !== null || proc.signalCode !== null) return
if (process.platform === "win32" && proc.pid) {
const out = spawnSync("taskkill", ["/pid", String(proc.pid), "/T", "/F"], { windowsHide: true })
if (!out.error && out.status === 0) return
}
proc.kill()
}
export function bindAbort(proc: ChildProcess, signal?: AbortSignal, onAbort?: () => void) {
if (!signal) return () => {}
const abort = () => {
clear()
stop(proc)
onAbort?.()
}
const clear = () => {
signal.removeEventListener("abort", abort)
proc.off("exit", clear)
proc.off("error", clear)
}
signal.addEventListener("abort", abort, { once: true })
proc.on("exit", clear)
proc.on("error", clear)
if (signal.aborted) abort()
return clear
}

View File

@@ -1,5 +1,6 @@
import { spawn } from "node:child_process"
import launch from "cross-spawn"
import { type Config } from "./gen/types.gen.js"
import { stop, bindAbort } from "./process.js"
export type ServerOptions = {
hostname?: string
@@ -31,29 +32,38 @@ export async function createOpencodeServer(options?: ServerOptions) {
const args = [`serve`, `--hostname=${options.hostname}`, `--port=${options.port}`]
if (options.config?.logLevel) args.push(`--log-level=${options.config.logLevel}`)
const proc = spawn(`opencode`, args, {
signal: options.signal,
const proc = launch(`opencode`, args, {
env: {
...process.env,
OPENCODE_CONFIG_CONTENT: JSON.stringify(options.config ?? {}),
},
})
let clear = () => {}
const url = await new Promise<string>((resolve, reject) => {
const id = setTimeout(() => {
clear()
stop(proc)
reject(new Error(`Timeout waiting for server to start after ${options.timeout}ms`))
}, options.timeout)
let output = ""
let resolved = false
proc.stdout?.on("data", (chunk) => {
if (resolved) return
output += chunk.toString()
const lines = output.split("\n")
for (const line of lines) {
if (line.startsWith("opencode server listening")) {
const match = line.match(/on\s+(https?:\/\/[^\s]+)/)
if (!match) {
throw new Error(`Failed to parse server url from output: ${line}`)
clear()
stop(proc)
clearTimeout(id)
reject(new Error(`Failed to parse server url from output: ${line}`))
return
}
clearTimeout(id)
resolved = true
resolve(match[1]!)
return
}
@@ -74,18 +84,17 @@ export async function createOpencodeServer(options?: ServerOptions) {
clearTimeout(id)
reject(error)
})
if (options.signal) {
options.signal.addEventListener("abort", () => {
clearTimeout(id)
reject(new Error("Aborted"))
})
}
clear = bindAbort(proc, options.signal, () => {
clearTimeout(id)
reject(options.signal?.reason)
})
})
return {
url,
close() {
proc.kill()
clear()
stop(proc)
},
}
}
@@ -106,8 +115,7 @@ export function createOpencodeTui(options?: TuiOptions) {
args.push(`--agent=${options.agent}`)
}
const proc = spawn(`opencode`, args, {
signal: options?.signal,
const proc = launch(`opencode`, args, {
stdio: "inherit",
env: {
...process.env,
@@ -115,9 +123,12 @@ export function createOpencodeTui(options?: TuiOptions) {
},
})
const clear = bindAbort(proc, options?.signal)
return {
close() {
proc.kill()
clear()
stop(proc)
},
}
}

View File

@@ -1,5 +1,6 @@
import { spawn } from "node:child_process"
import launch from "cross-spawn"
import { type Config } from "./gen/types.gen.js"
import { stop, bindAbort } from "../process.js"
export type ServerOptions = {
hostname?: string
@@ -31,29 +32,38 @@ export async function createOpencodeServer(options?: ServerOptions) {
const args = [`serve`, `--hostname=${options.hostname}`, `--port=${options.port}`]
if (options.config?.logLevel) args.push(`--log-level=${options.config.logLevel}`)
const proc = spawn(`opencode`, args, {
signal: options.signal,
const proc = launch(`opencode`, args, {
env: {
...process.env,
OPENCODE_CONFIG_CONTENT: JSON.stringify(options.config ?? {}),
},
})
let clear = () => {}
const url = await new Promise<string>((resolve, reject) => {
const id = setTimeout(() => {
clear()
stop(proc)
reject(new Error(`Timeout waiting for server to start after ${options.timeout}ms`))
}, options.timeout)
let output = ""
let resolved = false
proc.stdout?.on("data", (chunk) => {
if (resolved) return
output += chunk.toString()
const lines = output.split("\n")
for (const line of lines) {
if (line.startsWith("opencode server listening")) {
const match = line.match(/on\s+(https?:\/\/[^\s]+)/)
if (!match) {
throw new Error(`Failed to parse server url from output: ${line}`)
clear()
stop(proc)
clearTimeout(id)
reject(new Error(`Failed to parse server url from output: ${line}`))
return
}
clearTimeout(id)
resolved = true
resolve(match[1]!)
return
}
@@ -74,18 +84,17 @@ export async function createOpencodeServer(options?: ServerOptions) {
clearTimeout(id)
reject(error)
})
if (options.signal) {
options.signal.addEventListener("abort", () => {
clearTimeout(id)
reject(new Error("Aborted"))
})
}
clear = bindAbort(proc, options.signal, () => {
clearTimeout(id)
reject(options.signal?.reason)
})
})
return {
url,
close() {
proc.kill()
clear()
stop(proc)
},
}
}
@@ -106,8 +115,7 @@ export function createOpencodeTui(options?: TuiOptions) {
args.push(`--agent=${options.agent}`)
}
const proc = spawn(`opencode`, args, {
signal: options?.signal,
const proc = launch(`opencode`, args, {
stdio: "inherit",
env: {
...process.env,
@@ -115,9 +123,12 @@ export function createOpencodeTui(options?: TuiOptions) {
},
})
const clear = bindAbort(proc, options?.signal)
return {
close() {
proc.kill()
clear()
stop(proc)
},
}
}