Compare commits

..

3 Commits

Author SHA1 Message Date
Kit Langton
221aa8c19e test(opencode): clear otlp url in preload 2026-04-07 16:50:55 -04:00
Kit Langton
544d012515 fix(opencode): make leto export opt-in by url 2026-04-07 16:49:45 -04:00
Kit Langton
ba41b6928f wip: wire local leto observability 2026-04-06 11:00:42 -04:00
48 changed files with 290 additions and 451 deletions

View File

@@ -371,7 +371,6 @@
"jsonc-parser": "3.3.1",
"mime-types": "3.0.2",
"minimatch": "10.0.3",
"npm-package-arg": "13.0.2",
"open": "10.1.2",
"opencode-gitlab-auth": "2.0.1",
"opencode-poe-auth": "0.0.1",
@@ -413,7 +412,6 @@
"@types/bun": "catalog:",
"@types/cross-spawn": "catalog:",
"@types/mime-types": "3.0.1",
"@types/npm-package-arg": "6.1.4",
"@types/npmcli__arborist": "6.3.3",
"@types/semver": "^7.5.8",
"@types/turndown": "5.0.5",

View File

@@ -1,8 +1,8 @@
{
"nodeModules": {
"x86_64-linux": "sha256-LRhPPrOKCGUSCEWTpAxPdWKTKVNkg82WrvD25cP3jts=",
"aarch64-linux": "sha256-sbNxkil47n+B7v6ds5EYFybLytXUyRlu0Cpka0ZmDx4=",
"aarch64-darwin": "sha256-5+99gtpIHGygMW3VBAexNhmaORgI8LCxPk/Gf1fW/ds=",
"x86_64-darwin": "sha256-LqnvZGGnQaRxIoowOr5gf6lFgDhbgQhVPiAcRTtU6fE="
"x86_64-linux": "sha256-0jwPCu2Lod433GPQLHN8eEkhfpPviDFfkFJmuvkRdlE=",
"aarch64-linux": "sha256-Qi0IkGkaIBKZsPLTO8kaTbCVL0cEfVOm/Y/6VUVI9TY=",
"aarch64-darwin": "sha256-1eZBBLgYVkjg5RYN/etR1Mb5UjU3VelElBB5ug5hQdc=",
"x86_64-darwin": "sha256-jdXgA+kZb/foFHR40UiPif6rsA2GDVCCVHnJR3jBUGI="
}
}

View File

@@ -54,7 +54,6 @@
"@types/bun": "catalog:",
"@types/cross-spawn": "catalog:",
"@types/mime-types": "3.0.1",
"@types/npm-package-arg": "6.1.4",
"@types/npmcli__arborist": "6.3.3",
"@types/semver": "^7.5.8",
"@types/turndown": "5.0.5",
@@ -136,7 +135,6 @@
"jsonc-parser": "3.3.1",
"mime-types": "3.0.2",
"minimatch": "10.0.3",
"npm-package-arg": "13.0.2",
"open": "10.1.2",
"opencode-gitlab-auth": "2.0.1",
"opencode-poe-auth": "0.0.1",

View File

@@ -78,7 +78,7 @@ export namespace Agent {
const provider = yield* Provider.Service
const state = yield* InstanceState.make<State>(
Effect.fn("Agent.state")(function* (ctx) {
Effect.fnUntraced(function* (ctx) {
const cfg = yield* config.get()
const skillDirs = yield* skill.dirs()
const whitelistedDirs = [Truncate.GLOB, ...skillDirs.map((dir) => path.join(dir, "*"))]

View File

@@ -47,7 +47,7 @@ export namespace Bus {
Service,
Effect.gen(function* () {
const state = yield* InstanceState.make<State>(
Effect.fn("Bus.state")(function* (ctx) {
Effect.fnUntraced(function* (ctx) {
const wildcard = yield* PubSub.unbounded<Payload>()
const typed = new Map<string, PubSub.PubSub<Payload>>()

View File

@@ -125,16 +125,14 @@ import type { EventSource } from "./context/sdk"
import { DialogVariant } from "./component/dialog-variant"
function rendererConfig(_config: TuiConfig.Info): CliRendererConfig {
const mouseEnabled = !Flag.OPENCODE_DISABLE_MOUSE && (_config.mouse ?? true)
return {
externalOutputMode: "passthrough",
targetFps: 60,
gatherStats: false,
exitOnCtrlC: false,
useKittyKeyboard: { events: process.platform === "win32" },
autoFocus: false,
openConsoleOnError: false,
useMouse: mouseEnabled,
consoleOptions: {
keyBindings: [{ name: "y", ctrl: true, action: "copy-selection" }],
onCopySelection: (text) => {

View File

@@ -841,20 +841,8 @@ export function Prompt(props: PromptProps) {
return !!current
})
const suggestion = createMemo(() => {
if (!props.sessionID) return
if (store.mode !== "normal") return
if (store.prompt.input) return
const current = status()
if (current.type !== "idle") return
const value = current.suggestion?.trim()
if (!value) return
return value
})
const placeholderText = createMemo(() => {
if (props.showPlaceholder === false) return undefined
if (suggestion()) return suggestion()
if (store.mode === "shell") {
if (!shell().length) return undefined
const example = shell()[store.placeholder % shell().length]
@@ -945,16 +933,6 @@ export function Prompt(props: PromptProps) {
e.preventDefault()
return
}
if (!store.prompt.input && e.name === "right" && !e.ctrl && !e.meta && !e.shift && !e.super) {
const value = suggestion()
if (value) {
input.setText(value)
setStore("prompt", "input", value)
input.gotoBufferEnd()
e.preventDefault()
return
}
}
// Check clipboard for images before terminal-handled paste runs.
// This helps terminals that forward Ctrl+V to the app; Windows
// Terminal 1.25+ usually handles Ctrl+V before this path.

View File

@@ -233,7 +233,7 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({
}
case "session.status": {
setStore("session_status", event.properties.sessionID, reconcile(event.properties.status))
setStore("session_status", event.properties.sessionID, event.properties.status)
break
}

View File

@@ -79,7 +79,7 @@ export namespace Command {
const mcp = yield* MCP.Service
const skill = yield* Skill.Service
const init = Effect.fn("Command.state")(function* (ctx) {
const init = Effect.fnUntraced(function* (ctx) {
const cfg = yield* config.get()
const commands: Record<string, Info> = {}

View File

@@ -1475,7 +1475,7 @@ export namespace Config {
})
const state = yield* InstanceState.make<State>(
Effect.fn("Config.state")(function* (ctx) {
Effect.fnUntraced(function* (ctx) {
return yield* loadInstanceState(ctx)
}),
)

View File

@@ -22,7 +22,6 @@ export const TuiOptions = z.object({
.enum(["auto", "stacked"])
.optional()
.describe("Control diff rendering style: 'auto' adapts to terminal width, 'stacked' always shows single column"),
mouse: z.boolean().optional().describe("Enable or disable mouse capture (default: true)"),
})
export const TuiInfo = z

View File

@@ -0,0 +1,26 @@
import { Layer } from "effect"
import { FetchHttpClient } from "effect/unstable/http"
import { Otlp } from "effect/unstable/observability"
import { Flag } from "@/flag/flag"
import { CHANNEL, VERSION } from "@/installation/meta"
export namespace Observability {
const base = Flag.OPENCODE_OTLP_BASE_URL?.trim() || undefined
export const enabled = !!base
export const layer = !base
? Layer.empty
: Otlp.layerJson({
baseUrl: base,
loggerMergeWithExisting: false,
resource: {
serviceName: "opencode",
serviceVersion: VERSION,
attributes: {
"deployment.environment.name": CHANNEL === "local" ? "local" : CHANNEL,
"opencode.client": Flag.OPENCODE_CLIENT,
},
},
}).pipe(Layer.provide(FetchHttpClient.layer))
}

View File

@@ -3,6 +3,7 @@ import * as ServiceMap from "effect/ServiceMap"
import { Instance } from "@/project/instance"
import { Context } from "@/util/context"
import { InstanceRef } from "./instance-ref"
import { Observability } from "./observability"
export const memoMap = Layer.makeMemoMapUnsafe()
@@ -18,7 +19,7 @@ function attach<A, E, R>(effect: Effect.Effect<A, E, R>): Effect.Effect<A, E, R>
export function makeRuntime<I, S, E>(service: ServiceMap.Service<I, S>, layer: Layer.Layer<I, E>) {
let rt: ManagedRuntime.ManagedRuntime<I, E> | undefined
const getRuntime = () => (rt ??= ManagedRuntime.make(layer, { memoMap }))
const getRuntime = () => (rt ??= ManagedRuntime.make(Layer.merge(layer, Observability.layer), { memoMap }))
return {
runSync: <A, Err>(fn: (svc: S) => Effect.Effect<A, Err, I>) => getRuntime().runSync(attach(service.use(fn))),

View File

@@ -346,11 +346,11 @@ export namespace File {
const appFs = yield* AppFileSystem.Service
const state = yield* InstanceState.make<State>(
Effect.fn("File.state")(() =>
Effect.succeed({
Effect.fnUntraced(function* () {
return {
cache: { files: [], dirs: [] } as Entry,
}),
),
}
}),
)
const scan = Effect.fn("File.scan")(function* () {

View File

@@ -54,12 +54,12 @@ export namespace FileTime {
}
})
const state = yield* InstanceState.make<State>(
Effect.fn("FileTime.state")(() =>
Effect.succeed({
Effect.fnUntraced(function* () {
return {
reads: new Map<SessionID, Map<string, Stamp>>(),
locks: new Map<string, Semaphore.Semaphore>(),
}),
),
}
}),
)
const getLock = Effect.fn("FileTime.lock")(function* (filepath: string) {

View File

@@ -73,7 +73,7 @@ export namespace FileWatcher {
const config = yield* Config.Service
const state = yield* InstanceState.make(
Effect.fn("FileWatcher.state")(
Effect.fnUntraced(
function* () {
if (yield* Flag.OPENCODE_EXPERIMENTAL_DISABLE_FILEWATCHER) return

View File

@@ -31,7 +31,6 @@ export namespace Flag {
export const OPENCODE_ENABLE_EXPERIMENTAL_MODELS = truthy("OPENCODE_ENABLE_EXPERIMENTAL_MODELS")
export const OPENCODE_DISABLE_AUTOCOMPACT = truthy("OPENCODE_DISABLE_AUTOCOMPACT")
export const OPENCODE_DISABLE_MODELS_FETCH = truthy("OPENCODE_DISABLE_MODELS_FETCH")
export const OPENCODE_DISABLE_MOUSE = truthy("OPENCODE_DISABLE_MOUSE")
export const OPENCODE_DISABLE_CLAUDE_CODE = truthy("OPENCODE_DISABLE_CLAUDE_CODE")
export const OPENCODE_DISABLE_CLAUDE_CODE_PROMPT =
OPENCODE_DISABLE_CLAUDE_CODE || truthy("OPENCODE_DISABLE_CLAUDE_CODE_PROMPT")
@@ -45,6 +44,7 @@ export namespace Flag {
export const OPENCODE_SERVER_PASSWORD = process.env["OPENCODE_SERVER_PASSWORD"]
export const OPENCODE_SERVER_USERNAME = process.env["OPENCODE_SERVER_USERNAME"]
export const OPENCODE_ENABLE_QUESTION_TOOL = truthy("OPENCODE_ENABLE_QUESTION_TOOL")
export const OPENCODE_OTLP_BASE_URL = process.env["OPENCODE_OTLP_BASE_URL"]
// Experimental
export const OPENCODE_EXPERIMENTAL = truthy("OPENCODE_EXPERIMENTAL")
@@ -73,7 +73,6 @@ export namespace Flag {
export const OPENCODE_EXPERIMENTAL_PLAN_MODE = OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_PLAN_MODE")
export const OPENCODE_EXPERIMENTAL_WORKSPACES = OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_WORKSPACES")
export const OPENCODE_EXPERIMENTAL_MARKDOWN = !falsy("OPENCODE_EXPERIMENTAL_MARKDOWN")
export const OPENCODE_EXPERIMENTAL_NEXT_PROMPT = truthy("OPENCODE_EXPERIMENTAL_NEXT_PROMPT")
export const OPENCODE_MODELS_URL = process.env["OPENCODE_MODELS_URL"]
export const OPENCODE_MODELS_PATH = process.env["OPENCODE_MODELS_PATH"]
export const OPENCODE_DISABLE_EMBEDDED_WEB_UI = truthy("OPENCODE_DISABLE_EMBEDDED_WEB_UI")

View File

@@ -40,7 +40,7 @@ export namespace Format {
const spawner = yield* ChildProcessSpawner.ChildProcessSpawner
const state = yield* InstanceState.make(
Effect.fn("Format.state")(function* (_ctx) {
Effect.fnUntraced(function* (_ctx) {
const commands: Record<string, string[] | false> = {}
const formatters: Record<string, Formatter.Info> = {}
@@ -84,57 +84,106 @@ export namespace Format {
return cmd !== false
}
async function getFormatter(ext: string) {
const matching = Object.values(formatters).filter((item) => item.extensions.includes(ext))
const checks = await Promise.all(
matching.map(async (item) => {
log.info("checking", { name: item.name, ext })
const cmd = await getCommand(item)
if (cmd) {
log.info("enabled", { name: item.name, ext })
}
return {
item,
cmd,
}
}),
)
return checks.filter((x) => x.cmd).map((x) => ({ item: x.item, cmd: x.cmd! }))
function check(item: Formatter.Info, ext: string) {
return Effect.gen(function* () {
yield* Effect.annotateCurrentSpan({
ext,
formatter: item.name,
})
log.info("checking", { name: item.name, ext })
const cmd = yield* Effect.promise(() => getCommand(item))
if (cmd) {
log.info("enabled", { name: item.name, ext })
}
yield* Effect.annotateCurrentSpan({ enabled: !!cmd })
return {
item,
cmd,
}
}).pipe(Effect.withSpan("Format.checkFormatter"))
}
function formatFile(filepath: string) {
function resolve(ext: string) {
return Effect.gen(function* () {
const matching = Object.values(formatters).filter((item) => item.extensions.includes(ext))
const checks = yield* Effect.all(matching.map((item) => check(item, ext)))
const enabled = checks.filter((item) => item.cmd).map((item) => ({ item: item.item, cmd: item.cmd! }))
yield* Effect.annotateCurrentSpan({
ext,
matched_formatters: matching.map((item) => item.name).join(",") || "none",
enabled_formatters: enabled.map((item) => item.item.name).join(",") || "none",
})
return {
matching,
enabled,
}
}).pipe(Effect.withSpan("Format.resolveFormatters"))
}
function spawn(item: Formatter.Info, command: string[], filepath: string) {
return Effect.gen(function* () {
const dir = yield* InstanceState.directory
yield* Effect.annotateCurrentSpan({
file: filepath,
formatter: item.name,
command: command.join(" "),
})
return yield* spawner.spawn(
ChildProcess.make(command[0]!, command.slice(1), {
cwd: dir,
env: item.environment,
extendEnv: true,
}),
)
}).pipe(Effect.withSpan("Format.spawnFormatter"))
}
function wait(
handle: ChildProcessSpawner.ChildProcessHandle,
item: Formatter.Info,
command: string[],
filepath: string,
) {
return Effect.gen(function* () {
yield* Effect.annotateCurrentSpan({
file: filepath,
formatter: item.name,
command: command.join(" "),
})
return yield* handle.exitCode
}).pipe(Effect.withSpan("Format.waitFormatter"))
}
function formatFile(filepath: string): Effect.Effect<void, never, never> {
return Effect.gen(function* () {
log.info("formatting", { file: filepath })
const ext = path.extname(filepath)
yield* Effect.annotateCurrentSpan({ file: filepath, ext })
const fmt = yield* resolve(ext)
yield* Effect.annotateCurrentSpan({
matched_formatters: fmt.matching.map((item) => item.name).join(",") || "none",
enabled_formatters: fmt.enabled.map((item) => item.item.name).join(",") || "none",
})
for (const { item, cmd } of yield* Effect.promise(() => getFormatter(ext))) {
for (const { item, cmd } of fmt.enabled) {
if (cmd === false) continue
log.info("running", { command: cmd })
const replaced = cmd.map((x) => x.replace("$FILE", filepath))
const dir = yield* InstanceState.directory
const code = yield* spawner
.spawn(
ChildProcess.make(replaced[0]!, replaced.slice(1), {
cwd: dir,
env: item.environment,
extendEnv: true,
const code = yield* spawn(item, replaced, filepath).pipe(
Effect.flatMap((handle) => wait(handle, item, replaced, filepath)),
Effect.scoped,
Effect.catch(() =>
Effect.sync(() => {
log.error("failed to format file", {
error: "spawn failed",
command: replaced,
...item.environment,
file: filepath,
})
return ChildProcessSpawner.ExitCode(1)
}),
)
.pipe(
Effect.flatMap((handle) => handle.exitCode),
Effect.scoped,
Effect.catch(() =>
Effect.sync(() => {
log.error("failed to format file", {
error: "spawn failed",
command: cmd,
...item.environment,
file: filepath,
})
return ChildProcessSpawner.ExitCode(1)
}),
),
)
),
)
if (code !== 0) {
log.error("failed", {
command: cmd,

View File

@@ -164,7 +164,7 @@ export namespace LSP {
const config = yield* Config.Service
const state = yield* InstanceState.make<State>(
Effect.fn("LSP.state")(function* () {
Effect.fnUntraced(function* () {
const cfg = yield* config.get()
const servers: Record<string, LSPServer.Info> = {}

View File

@@ -478,7 +478,7 @@ export namespace MCP {
}
const state = yield* InstanceState.make<State>(
Effect.fn("MCP.state")(function* () {
Effect.fnUntraced(function* () {
const cfg = yield* cfgSvc.get()
const config = cfg.mcp ?? {}
const s: State = {

View File

@@ -11,7 +11,6 @@ import { Arborist } from "@npmcli/arborist"
export namespace Npm {
const log = Log.create({ service: "npm" })
const illegal = process.platform === "win32" ? new Set(["<", ">", ":", '"', "|", "?", "*"]) : undefined
export const InstallFailedError = NamedError.create(
"NpmInstallFailedError",
@@ -20,13 +19,8 @@ export namespace Npm {
}),
)
export function sanitize(pkg: string) {
if (!illegal) return pkg
return Array.from(pkg, (char) => (illegal.has(char) || char.charCodeAt(0) < 32 ? "_" : char)).join("")
}
function directory(pkg: string) {
return path.join(Global.Path.cache, "packages", sanitize(pkg))
return path.join(Global.Path.cache, "packages", pkg)
}
function resolveEntryPoint(name: string, dir: string) {

View File

@@ -142,7 +142,7 @@ export namespace Permission {
Effect.gen(function* () {
const bus = yield* Bus.Service
const state = yield* InstanceState.make<State>(
Effect.fn("Permission.state")(function* (ctx) {
Effect.fnUntraced(function* (ctx) {
const row = Database.use((db) =>
db.select().from(PermissionTable).where(eq(PermissionTable.project_id, ctx.project.id)).get(),
)

View File

@@ -98,7 +98,7 @@ export namespace Plugin {
const config = yield* Config.Service
const state = yield* InstanceState.make<State>(
Effect.fn("Plugin.state")(function* (ctx) {
Effect.fnUntraced(function* (ctx) {
const hooks: Hooks[] = []
const { Server } = yield* Effect.promise(() => import("../server/server"))

View File

@@ -1,6 +1,5 @@
import path from "path"
import { fileURLToPath, pathToFileURL } from "url"
import npa from "npm-package-arg"
import semver from "semver"
import { Npm } from "@/npm"
import { Filesystem } from "@/util/filesystem"
@@ -13,24 +12,11 @@ export function isDeprecatedPlugin(spec: string) {
return DEPRECATED_PLUGIN_PACKAGES.some((pkg) => spec.includes(pkg))
}
function parse(spec: string) {
try {
return npa(spec)
} catch {}
}
export function parsePluginSpecifier(spec: string) {
const hit = parse(spec)
if (hit?.type === "alias" && !hit.name) {
const sub = (hit as npa.AliasResult).subSpec
if (sub?.name) {
const version = !sub.rawSpec || sub.rawSpec === "*" ? "latest" : sub.rawSpec
return { pkg: sub.name, version }
}
}
if (!hit?.name) return { pkg: spec, version: "" }
if (hit.raw === hit.name) return { pkg: hit.name, version: "latest" }
return { pkg: hit.name, version: hit.rawSpec }
const lastAt = spec.lastIndexOf("@")
const pkg = lastAt > 0 ? spec.substring(0, lastAt) : spec
const version = lastAt > 0 ? spec.substring(lastAt + 1) : "latest"
return { pkg, version }
}
export type PluginSource = "file" | "npm"
@@ -204,11 +190,9 @@ export async function checkPluginCompatibility(target: string, opencodeVersion:
}
}
export async function resolvePluginTarget(spec: string) {
export async function resolvePluginTarget(spec: string, parsed = parsePluginSpecifier(spec)) {
if (isPathPluginSpec(spec)) return resolvePathPluginTarget(spec)
const hit = parse(spec)
const pkg = hit?.name && hit.raw === hit.name ? `${hit.name}@latest` : spec
const result = await Npm.add(pkg)
const result = await Npm.add(parsed.pkg + "@" + parsed.version)
return result.directory
}

View File

@@ -147,39 +147,37 @@ export namespace Vcs {
const bus = yield* Bus.Service
const state = yield* InstanceState.make<State>(
Effect.fn("Vcs.state")((ctx) =>
Effect.gen(function* () {
if (ctx.project.vcs !== "git") {
return { current: undefined, root: undefined }
}
Effect.fnUntraced(function* (ctx) {
if (ctx.project.vcs !== "git") {
return { current: undefined, root: undefined }
}
const get = Effect.fnUntraced(function* () {
return yield* git.branch(ctx.directory)
})
const [current, root] = yield* Effect.all([git.branch(ctx.directory), git.defaultBranch(ctx.directory)], {
concurrency: 2,
})
const value = { current, root }
log.info("initialized", { branch: value.current, default_branch: value.root?.name })
const get = Effect.fnUntraced(function* () {
return yield* git.branch(ctx.directory)
})
const [current, root] = yield* Effect.all([git.branch(ctx.directory), git.defaultBranch(ctx.directory)], {
concurrency: 2,
})
const value = { current, root }
log.info("initialized", { branch: value.current, default_branch: value.root?.name })
yield* bus.subscribe(FileWatcher.Event.Updated).pipe(
Stream.filter((evt) => evt.properties.file.endsWith("HEAD")),
Stream.runForEach((_evt) =>
Effect.gen(function* () {
const next = yield* get()
if (next !== value.current) {
log.info("branch changed", { from: value.current, to: next })
value.current = next
yield* bus.publish(Event.BranchUpdated, { branch: next })
}
}),
),
Effect.forkScoped,
)
yield* bus.subscribe(FileWatcher.Event.Updated).pipe(
Stream.filter((evt) => evt.properties.file.endsWith("HEAD")),
Stream.runForEach((_evt) =>
Effect.gen(function* () {
const next = yield* get()
if (next !== value.current) {
log.info("branch changed", { from: value.current, to: next })
value.current = next
yield* bus.publish(Event.BranchUpdated, { branch: next })
}
}),
),
Effect.forkScoped,
)
return value
}),
),
return value
}),
)
return Service.of({

View File

@@ -117,7 +117,7 @@ export namespace ProviderAuth {
const auth = yield* Auth.Service
const plugin = yield* Plugin.Service
const state = yield* InstanceState.make<State>(
Effect.fn("ProviderAuth.state")(function* () {
Effect.fnUntraced(function* () {
const plugins = yield* plugin.list()
return {
hooks: Record.fromEntries(

View File

@@ -133,7 +133,7 @@ export namespace Pty {
}
const state = yield* InstanceState.make<State>(
Effect.fn("Pty.state")(function* (ctx) {
Effect.fnUntraced(function* (ctx) {
const state = {
dir: ctx.directory,
sessions: new Map<PtyID, Active>(),

View File

@@ -111,7 +111,7 @@ export namespace Question {
Effect.gen(function* () {
const bus = yield* Bus.Service
const state = yield* InstanceState.make<State>(
Effect.fn("Question.state")(function* () {
Effect.fnUntraced(function* () {
const state = {
pending: new Map<QuestionID, PendingEntry>(),
}

View File

@@ -138,7 +138,7 @@ export namespace SessionCompaction {
}
})
const processCompaction = Effect.fn("SessionCompaction.process")(function* (input: {
const process = Effect.fn("SessionCompaction.process")(function* (input: {
parentID: MessageID
messages: MessageV2.WithParts[]
sessionID: SessionID
@@ -374,7 +374,7 @@ When constructing the summary, try to stick to this template:
return Service.of({
isOverflow,
prune,
process: processCompaction,
process,
create,
})
}),

View File

@@ -75,12 +75,12 @@ export namespace Instruction {
const http = HttpClient.filterStatusOk(withTransientReadRetry(yield* HttpClient.HttpClient))
const state = yield* InstanceState.make(
Effect.fn("Instruction.state")(() =>
Effect.succeed({
Effect.fnUntraced(function* () {
return {
// Track which instruction files have already been attached for a given assistant message.
claims: new Map<MessageID, Set<string>>(),
}),
),
}
}),
)
const relative = Effect.fnUntraced(function* (instruction: string) {

View File

@@ -415,9 +415,20 @@ export namespace SessionProcessor {
const halt = Effect.fn("SessionProcessor.halt")(function* (e: unknown) {
log.error("process", { error: e, stack: e instanceof Error ? e.stack : undefined })
yield* Effect.logError("session processor failed", {
agent: ctx.assistantMessage.agent,
modelID: ctx.model.id,
providerID: ctx.model.providerID,
sessionID: ctx.sessionID,
})
const error = parse(e)
if (MessageV2.ContextOverflowError.isInstance(error)) {
ctx.needsCompaction = true
yield* Effect.logWarning("session processor requested compaction", {
modelID: ctx.model.id,
providerID: ctx.model.providerID,
sessionID: ctx.sessionID,
})
yield* bus.publish(Session.Event.Error, { sessionID: ctx.sessionID, error })
return
}
@@ -446,6 +457,18 @@ export namespace SessionProcessor {
log.info("process")
ctx.needsCompaction = false
ctx.shouldBreak = (yield* config.get()).experimental?.continue_loop_on_deny !== true
yield* Effect.annotateCurrentSpan({
agent: streamInput.agent.name,
modelID: streamInput.model.id,
providerID: streamInput.model.providerID,
sessionID: ctx.sessionID,
})
yield* Effect.logInfo("session processor started", {
agent: streamInput.agent.name,
modelID: streamInput.model.id,
providerID: streamInput.model.providerID,
sessionID: ctx.sessionID,
})
return yield* Effect.gen(function* () {
yield* Effect.gen(function* () {
@@ -459,6 +482,7 @@ export namespace SessionProcessor {
Stream.runDrain,
)
}).pipe(
Effect.withSpan("SessionProcessor.stream"),
Effect.onInterrupt(() => Effect.sync(() => void (aborted = true))),
Effect.catchCauseIf(
(cause) => !Cause.hasInterruptsOnly(cause),
@@ -483,6 +507,12 @@ export namespace SessionProcessor {
if (aborted && !ctx.assistantMessage.error) {
yield* abort()
}
yield* Effect.logInfo("session processor finished", {
aborted,
blocked: ctx.blocked,
compact: ctx.needsCompaction,
sessionID: ctx.sessionID,
})
if (ctx.needsCompaction) return "compact"
if (ctx.blocked || ctx.assistantMessage.error || aborted) return "stop"
return "continue"

View File

@@ -20,7 +20,6 @@ import { Plugin } from "../plugin"
import PROMPT_PLAN from "../session/prompt/plan.txt"
import BUILD_SWITCH from "../session/prompt/build-switch.txt"
import MAX_STEPS from "../session/prompt/max-steps.txt"
import PROMPT_SUGGEST_NEXT from "../session/prompt/suggest-next.txt"
import { ToolRegistry } from "../tool/registry"
import { Runner } from "@/effect/runner"
import { MCP } from "../mcp"
@@ -104,7 +103,7 @@ export namespace SessionPrompt {
const instruction = yield* Instruction.Service
const state = yield* InstanceState.make(
Effect.fn("SessionPrompt.state")(function* () {
Effect.fnUntraced(function* () {
const runners = new Map<string, Runner<MessageV2.WithParts>>()
yield* Effect.addFinalizer(
Effect.fnUntraced(function* () {
@@ -250,80 +249,6 @@ export namespace SessionPrompt {
)
})
const suggest = Effect.fn("SessionPrompt.suggest")(function* (input: {
session: Session.Info
sessionID: SessionID
message: MessageV2.WithParts
}) {
if (input.session.parentID) return
const message = input.message.info
if (message.role !== "assistant") return
if (message.error) return
if (!message.finish) return
if (["tool-calls", "unknown"].includes(message.finish)) return
if ((yield* status.get(input.sessionID)).type !== "idle") return
// Use the same model for prompt-cache hit on the conversation prefix
const model = yield* Effect.promise(async () =>
Provider.getModel(message.providerID, message.modelID).catch(() => undefined),
)
if (!model) return
const ag = yield* agents.get(message.agent ?? "code")
if (!ag) return
// Full message history so the cached KV from the main conversation is reused
const msgs = yield* MessageV2.filterCompactedEffect(input.sessionID)
const real = (item: MessageV2.WithParts) =>
item.info.role === "user" && !item.parts.every((part) => "synthetic" in part && part.synthetic)
const parent = msgs.find((item) => item.info.id === message.parentID)
const user = parent && real(parent) ? parent.info : msgs.findLast((item) => real(item))?.info
if (!user || user.role !== "user") return
// Rebuild system prompt identical to the main loop for cache hit
const skills = yield* Effect.promise(() => SystemPrompt.skills(ag))
const env = yield* Effect.promise(() => SystemPrompt.environment(model))
const instructions = yield* instruction.system().pipe(Effect.orDie)
const modelMsgs = yield* Effect.promise(() => MessageV2.toModelMessages(msgs, model))
const system = [...env, ...(skills ? [skills] : []), ...instructions]
const text = yield* Effect.promise(async (signal) => {
const result = await LLM.stream({
agent: ag,
user,
system,
small: false,
tools: {},
model,
abort: signal,
sessionID: input.sessionID,
retries: 1,
toolChoice: "none",
// Append suggestion instruction after the full conversation
messages: [...modelMsgs, { role: "user" as const, content: PROMPT_SUGGEST_NEXT }],
})
return result.text
})
const line = text
.replace(/<think>[\s\S]*?<\/think>\s*/g, "")
.split("\n")
.map((item) => item.trim())
.find((item) => item.length > 0)
?.replace(/^["'`]+|["'`]+$/g, "")
if (!line) return
const tag = line
.toUpperCase()
.replace(/[\s-]+/g, "_")
.replace(/[^A-Z_]/g, "")
if (tag === "NO_SUGGESTION") return
const suggestion = line.length > 110 ? line.slice(0, 107) + "..." : line
if ((yield* status.get(input.sessionID)).type !== "idle") return
yield* status.suggest(input.sessionID, suggestion)
})
const insertReminders = Effect.fn("SessionPrompt.insertReminders")(function* (input: {
messages: MessageV2.WithParts[]
agent: Agent.Info
@@ -1394,13 +1319,7 @@ NOTE: At any point in time through this workflow you should feel free to ask the
}
if (input.noReply === true) return message
const result = yield* loop({ sessionID: input.sessionID })
yield* suggest({
session,
sessionID: input.sessionID,
message: result,
}).pipe(Effect.ignore, Effect.forkIn(scope))
return result
return yield* loop({ sessionID: input.sessionID })
},
)
@@ -1421,12 +1340,15 @@ NOTE: At any point in time through this workflow you should feel free to ask the
let structured: unknown | undefined
let step = 0
const session = yield* sessions.get(sessionID)
yield* Effect.annotateCurrentSpan({ sessionID })
while (true) {
yield* status.set(sessionID, { type: "busy" })
log.info("loop", { step, sessionID })
let msgs = yield* MessageV2.filterCompactedEffect(sessionID)
let msgs = yield* MessageV2.filterCompactedEffect(sessionID).pipe(
Effect.withSpan("SessionPrompt.loadMessages"),
)
let lastUser: MessageV2.User | undefined
let lastAssistant: MessageV2.Assistant | undefined
@@ -1479,13 +1401,20 @@ NOTE: At any point in time through this workflow you should feel free to ask the
}
if (task?.type === "compaction") {
const result = yield* compaction.process({
messages: msgs,
parentID: lastUser.id,
sessionID,
yield* Effect.logWarning("session compaction task", {
auto: task.auto,
overflow: task.overflow,
sessionID,
})
const result = yield* compaction
.process({
messages: msgs,
parentID: lastUser.id,
sessionID,
auto: task.auto,
overflow: task.overflow,
})
.pipe(Effect.withSpan("SessionPrompt.compaction"))
if (result === "stop") break
continue
}
@@ -1495,6 +1424,7 @@ NOTE: At any point in time through this workflow you should feel free to ask the
lastFinished.summary !== true &&
(yield* compaction.isOverflow({ tokens: lastFinished.tokens, model }))
) {
yield* Effect.logWarning("session overflow detected", { modelID: model.id, sessionID, step })
yield* compaction.create({ sessionID, agent: lastUser.agent, model: lastUser.model, auto: true })
continue
}
@@ -1510,6 +1440,13 @@ NOTE: At any point in time through this workflow you should feel free to ask the
const maxSteps = agent.steps ?? Infinity
const isLastStep = step >= maxSteps
msgs = yield* insertReminders({ messages: msgs, agent, session })
yield* Effect.logInfo("session turn", {
agent: agent.name,
modelID: model.id,
providerID: model.providerID,
sessionID,
step,
})
const msg: MessageV2.Assistant = {
id: MessageID.ascending(),
@@ -1584,7 +1521,7 @@ NOTE: At any point in time through this workflow you should feel free to ask the
Effect.promise(() => SystemPrompt.environment(model)),
instruction.system().pipe(Effect.orDie),
Effect.promise(() => MessageV2.toModelMessages(msgs, model)),
])
]).pipe(Effect.withSpan("SessionPrompt.buildInput"))
const system = [...env, ...(skills ? [skills] : []), ...instructions]
const format = lastUser.format ?? { type: "text" as const }
if (format.type === "json_schema") system.push(STRUCTURED_OUTPUT_SYSTEM_PROMPT)

View File

@@ -1,21 +0,0 @@
You are generating a suggested next user message for the current conversation.
Goal:
- Suggest a useful next step that keeps momentum.
Rules:
- Output exactly one line, 110 characters max. Be concise.
- Write as the user speaking to the assistant (for example: "Can you...", "Help me...", "Let's...").
- Match the user's tone and language; keep it natural and human.
- Prefer a concrete action over a broad question.
- If the conversation is vague or small-talk, steer toward a practical starter request.
- If there is no meaningful or appropriate next step to suggest, output exactly: NO_SUGGESTION
- Avoid corporate or robotic phrasing.
- Avoid asking multiple discovery questions in one sentence.
- Do not include quotes, labels, markdown, or explanations.
Examples:
- Greeting context -> "Can you scan this repo and suggest the best first task to tackle?"
- Bug-fix context -> "Can you reproduce this bug and propose the smallest safe fix?"
- Feature context -> "Let's implement this incrementally; start with the MVP version first."
- Conversation is complete -> "NO_SUGGESTION"

View File

@@ -11,7 +11,6 @@ export namespace SessionStatus {
.union([
z.object({
type: z.literal("idle"),
suggestion: z.string().optional(),
}),
z.object({
type: z.literal("retry"),
@@ -49,7 +48,6 @@ export namespace SessionStatus {
readonly get: (sessionID: SessionID) => Effect.Effect<Info>
readonly list: () => Effect.Effect<Map<SessionID, Info>>
readonly set: (sessionID: SessionID, status: Info) => Effect.Effect<void>
readonly suggest: (sessionID: SessionID, suggestion: string) => Effect.Effect<void>
}
export class Service extends ServiceMap.Service<Service, Interface>()("@opencode/SessionStatus") {}
@@ -60,7 +58,9 @@ export namespace SessionStatus {
const bus = yield* Bus.Service
const state = yield* InstanceState.make(
Effect.fn("SessionStatus.state")(() => Effect.succeed(new Map<SessionID, Info>())),
Effect.fnUntraced(function* () {
return new Map<SessionID, Info>()
}),
)
const get = Effect.fn("SessionStatus.get")(function* (sessionID: SessionID) {
@@ -83,17 +83,7 @@ export namespace SessionStatus {
data.set(sessionID, status)
})
const suggest = Effect.fn("SessionStatus.suggest")(function* (sessionID: SessionID, suggestion: string) {
const data = yield* InstanceState.get(state)
const current = data.get(sessionID)
if (current && current.type !== "idle") return
const status: Info = { type: "idle", suggestion }
// only publish Status so the TUI sees the suggestion;
// skip Event.Idle to avoid spurious plugin notifications
yield* bus.publish(Event.Status, { sessionID, status })
})
return Service.of({ get, list, set, suggest })
return Service.of({ get, list, set })
}),
)
@@ -111,8 +101,4 @@ export namespace SessionStatus {
export async function set(sessionID: SessionID, status: Info) {
return runPromise((svc) => svc.set(sessionID, status))
}
export async function suggest(sessionID: SessionID, suggestion: string) {
return runPromise((svc) => svc.suggest(sessionID, suggestion))
}
}

View File

@@ -99,8 +99,8 @@ export namespace SessionSummary {
if (part.type === "step-finish" && part.snapshot) to = part.snapshot
}
}
if (from && to) return yield* snapshot.diffFull(from, to)
return []
if (!from || !to || from === to) return []
return yield* snapshot.diffFull(from, to)
})
const summarize = Effect.fn("SessionSummary.summarize")(function* (input: {

View File

@@ -144,7 +144,7 @@ export namespace ShareNext {
}
const state: InstanceState<State> = yield* InstanceState.make<State>(
Effect.fn("ShareNext.state")(function* (_ctx) {
Effect.fnUntraced(function* (_ctx) {
const cache: State = { queue: new Map(), scope: yield* Scope.make() }
yield* Effect.addFinalizer(() =>

View File

@@ -197,7 +197,7 @@ export namespace Skill {
const bus = yield* Bus.Service
const fsys = yield* AppFileSystem.Service
const state = yield* InstanceState.make(
Effect.fn("Skill.state")(function* (ctx) {
Effect.fnUntraced(function* (ctx) {
const s: State = { skills: {}, dirs: new Set() }
yield* loadSkills(s, config, discovery, bus, fsys, ctx.directory, ctx.worktree)
return s

View File

@@ -1,4 +1,3 @@
import { NodeFileSystem, NodePath } from "@effect/platform-node"
import { Cause, Duration, Effect, Layer, Schedule, Semaphore, ServiceMap, Stream } from "effect"
import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process"
import path from "path"
@@ -82,7 +81,7 @@ export namespace Snapshot {
}
const state = yield* InstanceState.make<State>(
Effect.fn("Snapshot.state")(function* (ctx) {
Effect.fnUntraced(function* (ctx) {
const state = {
directory: ctx.directory,
worktree: ctx.worktree,
@@ -150,7 +149,7 @@ export namespace Snapshot {
yield* fs.writeFileString(target, text ? `${text}\n` : "").pipe(Effect.orDie)
})
const add = Effect.fnUntraced(function* () {
const add = Effect.fn("Snapshot.add")(function* () {
yield* sync()
const [diff, other] = yield* Effect.all(
[
@@ -203,7 +202,7 @@ export namespace Snapshot {
}
})
const cleanup = Effect.fnUntraced(function* () {
const cleanup = Effect.fn("Snapshot.cleanup")(function* () {
return yield* locked(
Effect.gen(function* () {
if (!(yield* enabled())) return
@@ -221,7 +220,7 @@ export namespace Snapshot {
)
})
const track = Effect.fnUntraced(function* () {
const track = Effect.fn("Snapshot.track")(function* () {
return yield* locked(
Effect.gen(function* () {
if (!(yield* enabled())) return
@@ -238,7 +237,9 @@ export namespace Snapshot {
log.info("initialized")
}
yield* add()
const result = yield* git(args(["write-tree"]), { cwd: state.directory })
const result = yield* git(args(["write-tree"]), { cwd: state.directory }).pipe(
Effect.withSpan("Snapshot.writeTree"),
)
const hash = result.text.trim()
log.info("tracking", { hash, cwd: state.directory, git: state.gitdir })
return hash
@@ -246,7 +247,7 @@ export namespace Snapshot {
)
})
const patch = Effect.fnUntraced(function* (hash: string) {
const patch = Effect.fn("Snapshot.patch")(function* (hash: string) {
return yield* locked(
Effect.gen(function* () {
yield* add()
@@ -273,7 +274,7 @@ export namespace Snapshot {
)
})
const restore = Effect.fnUntraced(function* (snapshot: string) {
const restore = Effect.fn("Snapshot.restore")(function* (snapshot: string) {
return yield* locked(
Effect.gen(function* () {
log.info("restore", { commit: snapshot })
@@ -299,7 +300,7 @@ export namespace Snapshot {
)
})
const revert = Effect.fnUntraced(function* (patches: Snapshot.Patch[]) {
const revert = Effect.fn("Snapshot.revert")(function* (patches: Snapshot.Patch[]) {
return yield* locked(
Effect.gen(function* () {
const ops: { hash: string; file: string; rel: string }[] = []
@@ -414,7 +415,7 @@ export namespace Snapshot {
)
})
const diff = Effect.fnUntraced(function* (hash: string) {
const diff = Effect.fn("Snapshot.diff")(function* (hash: string) {
return yield* locked(
Effect.gen(function* () {
yield* add()
@@ -434,7 +435,7 @@ export namespace Snapshot {
)
})
const diffFull = Effect.fnUntraced(function* (from: string, to: string) {
const diffFull = Effect.fn("Snapshot.diffFull")(function* (from: string, to: string) {
return yield* locked(
Effect.gen(function* () {
type Row = {
@@ -451,7 +452,7 @@ export namespace Snapshot {
ref: string
}
const show = Effect.fnUntraced(function* (row: Row) {
const show = Effect.fn("Snapshot.show")(function* (row: Row) {
if (row.binary) return ["", ""]
if (row.status === "added") {
return [
@@ -478,7 +479,7 @@ export namespace Snapshot {
)
})
const load = Effect.fnUntraced(
const load = Effect.fn("Snapshot.load")(
function* (rows: Row[]) {
const refs = rows.flatMap((row) => {
if (row.binary) return []
@@ -583,7 +584,7 @@ export namespace Snapshot {
const statuses = yield* git(
[...quote, ...args(["diff", "--no-ext-diff", "--name-status", "--no-renames", from, to, "--", "."])],
{ cwd: state.directory },
)
).pipe(Effect.withSpan("Snapshot.diffStatus"))
for (const line of statuses.text.trim().split("\n")) {
if (!line) continue
@@ -597,7 +598,7 @@ export namespace Snapshot {
{
cwd: state.directory,
},
)
).pipe(Effect.withSpan("Snapshot.diffNumstat"))
const rows = numstat.text
.trim()
@@ -660,30 +661,14 @@ export namespace Snapshot {
)
return Service.of({
init: Effect.fn("Snapshot.init")(function* () {
yield* InstanceState.get(state)
}),
cleanup: Effect.fn("Snapshot.cleanup")(function* () {
return yield* InstanceState.useEffect(state, (s) => s.cleanup())
}),
track: Effect.fn("Snapshot.track")(function* () {
return yield* InstanceState.useEffect(state, (s) => s.track())
}),
patch: Effect.fn("Snapshot.patch")(function* (hash: string) {
return yield* InstanceState.useEffect(state, (s) => s.patch(hash))
}),
restore: Effect.fn("Snapshot.restore")(function* (snapshot: string) {
return yield* InstanceState.useEffect(state, (s) => s.restore(snapshot))
}),
revert: Effect.fn("Snapshot.revert")(function* (patches: Snapshot.Patch[]) {
return yield* InstanceState.useEffect(state, (s) => s.revert(patches))
}),
diff: Effect.fn("Snapshot.diff")(function* (hash: string) {
return yield* InstanceState.useEffect(state, (s) => s.diff(hash))
}),
diffFull: Effect.fn("Snapshot.diffFull")(function* (from: string, to: string) {
return yield* InstanceState.useEffect(state, (s) => s.diffFull(from, to))
}),
init: () => InstanceState.get(state).pipe(Effect.asVoid),
cleanup: () => InstanceState.useEffect(state, (s) => s.cleanup()),
track: () => InstanceState.useEffect(state, (s) => s.track()),
patch: (hash: string) => InstanceState.useEffect(state, (s) => s.patch(hash)),
restore: (snapshot: string) => InstanceState.useEffect(state, (s) => s.restore(snapshot)),
revert: (patches: Snapshot.Patch[]) => InstanceState.useEffect(state, (s) => s.revert(patches)),
diff: (hash: string) => InstanceState.useEffect(state, (s) => s.diff(hash)),
diffFull: (from: string, to: string) => InstanceState.useEffect(state, (s) => s.diffFull(from, to)),
})
}),
)

View File

@@ -185,7 +185,7 @@ export const ReadTool = Tool.defineEffect(
)
}
let output = [`<path>${filepath}</path>`, `<type>file</type>`, "<content>" + "\n"].join("\n")
let output = [`<path>${filepath}</path>`, `<type>file</type>`, "<content>"].join("\n")
output += file.raw.map((line, i) => `${i + file.offset}: ${line}`).join("\n")
const last = file.offset + file.raw.length - 1

View File

@@ -82,7 +82,7 @@ export namespace ToolRegistry {
Effect.isEffect(tool) ? tool : Effect.succeed(tool)
const state = yield* InstanceState.make<State>(
Effect.fn("ToolRegistry.state")(function* (ctx) {
Effect.fnUntraced(function* (ctx) {
const custom: Tool.Info[] = []
function fromPlugin(id: string, def: ToolDefinition): Tool.Info {

View File

@@ -1,18 +0,0 @@
import { describe, expect, test } from "bun:test"
import { Npm } from "../src/npm"
const win = process.platform === "win32"
describe("Npm.sanitize", () => {
test("keeps normal scoped package specs unchanged", () => {
expect(Npm.sanitize("@opencode/acme")).toBe("@opencode/acme")
expect(Npm.sanitize("@opencode/acme@1.0.0")).toBe("@opencode/acme@1.0.0")
expect(Npm.sanitize("prettier")).toBe("prettier")
})
test("handles git https specs", () => {
const spec = "acme@git+https://github.com/opencode/acme.git"
const expected = win ? "acme@git+https_//github.com/opencode/acme.git" : spec
expect(Npm.sanitize(spec)).toBe(expected)
})
})

View File

@@ -1,88 +0,0 @@
import { describe, expect, test } from "bun:test"
import { parsePluginSpecifier } from "../../src/plugin/shared"
describe("parsePluginSpecifier", () => {
test("parses standard npm package without version", () => {
expect(parsePluginSpecifier("acme")).toEqual({
pkg: "acme",
version: "latest",
})
})
test("parses standard npm package with version", () => {
expect(parsePluginSpecifier("acme@1.0.0")).toEqual({
pkg: "acme",
version: "1.0.0",
})
})
test("parses scoped npm package without version", () => {
expect(parsePluginSpecifier("@opencode/acme")).toEqual({
pkg: "@opencode/acme",
version: "latest",
})
})
test("parses scoped npm package with version", () => {
expect(parsePluginSpecifier("@opencode/acme@1.0.0")).toEqual({
pkg: "@opencode/acme",
version: "1.0.0",
})
})
test("parses package with git+https url", () => {
expect(parsePluginSpecifier("acme@git+https://github.com/opencode/acme.git")).toEqual({
pkg: "acme",
version: "git+https://github.com/opencode/acme.git",
})
})
test("parses scoped package with git+https url", () => {
expect(parsePluginSpecifier("@opencode/acme@git+https://github.com/opencode/acme.git")).toEqual({
pkg: "@opencode/acme",
version: "git+https://github.com/opencode/acme.git",
})
})
test("parses package with git+ssh url containing another @", () => {
expect(parsePluginSpecifier("acme@git+ssh://git@github.com/opencode/acme.git")).toEqual({
pkg: "acme",
version: "git+ssh://git@github.com/opencode/acme.git",
})
})
test("parses scoped package with git+ssh url containing another @", () => {
expect(parsePluginSpecifier("@opencode/acme@git+ssh://git@github.com/opencode/acme.git")).toEqual({
pkg: "@opencode/acme",
version: "git+ssh://git@github.com/opencode/acme.git",
})
})
test("parses unaliased git+ssh url", () => {
expect(parsePluginSpecifier("git+ssh://git@github.com/opencode/acme.git")).toEqual({
pkg: "git+ssh://git@github.com/opencode/acme.git",
version: "",
})
})
test("parses npm alias using the alias name", () => {
expect(parsePluginSpecifier("acme@npm:@opencode/acme@1.0.0")).toEqual({
pkg: "acme",
version: "npm:@opencode/acme@1.0.0",
})
})
test("parses bare npm protocol specifier using the target package", () => {
expect(parsePluginSpecifier("npm:@opencode/acme@1.0.0")).toEqual({
pkg: "@opencode/acme",
version: "1.0.0",
})
})
test("parses unversioned npm protocol specifier", () => {
expect(parsePluginSpecifier("npm:@opencode/acme")).toEqual({
pkg: "@opencode/acme",
version: "latest",
})
})
})

View File

@@ -33,6 +33,7 @@ process.env["XDG_DATA_HOME"] = path.join(dir, "share")
process.env["XDG_CACHE_HOME"] = path.join(dir, "cache")
process.env["XDG_CONFIG_HOME"] = path.join(dir, "config")
process.env["XDG_STATE_HOME"] = path.join(dir, "state")
delete process.env["OPENCODE_OTLP_BASE_URL"]
process.env["OPENCODE_MODELS_PATH"] = path.join(import.meta.dir, "tool", "fixtures", "models-api.json")
// Set test home directory to isolate tests from user's actual home directory

View File

@@ -126,7 +126,6 @@ export type EventPermissionReplied = {
export type SessionStatus =
| {
type: "idle"
suggestion?: string
}
| {
type: "retry"

View File

@@ -573,7 +573,6 @@ OpenCode can be configured using environment variables.
| `OPENCODE_DISABLE_CLAUDE_CODE_PROMPT` | boolean | Disable reading `~/.claude/CLAUDE.md` |
| `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | boolean | Disable loading `.claude/skills` |
| `OPENCODE_DISABLE_MODELS_FETCH` | boolean | Disable fetching models from remote sources |
| `OPENCODE_DISABLE_MOUSE` | boolean | Disable mouse capture in the TUI |
| `OPENCODE_FAKE_VCS` | string | Fake VCS provider for testing purposes |
| `OPENCODE_DISABLE_FILETIME_CHECK` | boolean | Disable file time checking for optimization |
| `OPENCODE_CLIENT` | string | Client identifier (defaults to `cli`) |

View File

@@ -272,8 +272,7 @@ Use a dedicated `tui.json` (or `tui.jsonc`) file for TUI-specific settings.
"scroll_acceleration": {
"enabled": true
},
"diff_style": "auto",
"mouse": true
"diff_style": "auto"
}
```
@@ -281,6 +280,8 @@ Use `OPENCODE_TUI_CONFIG` to point to a custom TUI config file.
Legacy `theme`, `keybinds`, and `tui` keys in `opencode.json` are deprecated and automatically migrated when possible.
[Learn more about TUI configuration here](/docs/tui#configure).
---
### Server

View File

@@ -368,8 +368,7 @@ You can customize TUI behavior through `tui.json` (or `tui.jsonc`).
"scroll_acceleration": {
"enabled": true
},
"diff_style": "auto",
"mouse": true
"diff_style": "auto"
}
```
@@ -382,7 +381,6 @@ This is separate from `opencode.json`, which configures server/runtime behavior.
- `scroll_acceleration.enabled` - Enable macOS-style scroll acceleration for smooth, natural scrolling. When enabled, scroll speed increases with rapid scrolling gestures and stays precise for slower movements. **This setting takes precedence over `scroll_speed` and overrides it when enabled.**
- `scroll_speed` - Controls how fast the TUI scrolls when using scroll commands (minimum: `0.001`, supports decimal values). Defaults to `3`. **Note: This is ignored if `scroll_acceleration.enabled` is set to `true`.**
- `diff_style` - Controls diff rendering. `"auto"` adapts to terminal width, `"stacked"` always shows a single-column layout.
- `mouse` - Enable or disable mouse capture in the TUI (default: `true`). When disabled, the terminal's native mouse selection/scrolling behavior is preserved.
Use `OPENCODE_TUI_CONFIG` to load a custom TUI config path.

View File

@@ -94,6 +94,8 @@ You can also access our models through the following API endpoints.
| GLM 5 | glm-5 | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
| Kimi K2.5 | kimi-k2.5 | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
| Big Pickle | big-pickle | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
| MiMo V2 Pro Free | mimo-v2-pro-free | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
| MiMo V2 Omni Free | mimo-v2-omni-free | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
| Qwen3.6 Plus Free | qwen3.6-plus-free | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
| Nemotron 3 Super Free | nemotron-3-super-free | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
@@ -120,6 +122,8 @@ We support a pay-as-you-go model. Below are the prices **per 1M tokens**.
| Model | Input | Output | Cached Read | Cached Write |
| --------------------------------- | ------ | ------- | ----------- | ------------ |
| Big Pickle | Free | Free | Free | - |
| MiMo V2 Pro Free | Free | Free | Free | - |
| MiMo V2 Omni Free | Free | Free | Free | - |
| Qwen3.6 Plus Free | Free | Free | Free | - |
| Nemotron 3 Super Free | Free | Free | Free | - |
| MiniMax M2.5 Free | Free | Free | Free | - |
@@ -165,6 +169,8 @@ Credit card fees are passed along at cost (4.4% + $0.30 per transaction); we don
The free models:
- MiniMax M2.5 Free is available on OpenCode for a limited time. The team is using this time to collect feedback and improve the model.
- MiMo V2 Pro Free is available on OpenCode for a limited time. The team is using this time to collect feedback and improve the model.
- MiMo V2 Omni Free is available on OpenCode for a limited time. The team is using this time to collect feedback and improve the model.
- Qwen3.6 Plus Free is available on OpenCode for a limited time. The team is using this time to collect feedback and improve the model.
- Nemotron 3 Super Free is available on OpenCode for a limited time. The team is using this time to collect feedback and improve the model.
- Big Pickle is a stealth model that's free on OpenCode for a limited time. The team is using this time to collect feedback and improve the model.
@@ -212,6 +218,8 @@ All our models are hosted in the US. Our providers follow a zero-retention polic
- Big Pickle: During its free period, collected data may be used to improve the model.
- MiniMax M2.5 Free: During its free period, collected data may be used to improve the model.
- MiMo V2 Pro Free: During its free period, collected data may be used to improve the model.
- MiMo V2 Omni Free: During its free period, collected data may be used to improve the model.
- Qwen3.6 Plus Free: During its free period, collected data may be used to improve the model.
- Nemotron 3 Super Free: During its free period, collected data may be used to improve the model.
- OpenAI APIs: Requests are retained for 30 days in accordance with [OpenAI's Data Policies](https://platform.openai.com/docs/guides/your-data).