Compare commits

..

1 Commits

Author SHA1 Message Date
Ryan Vogel
2d7e9575d2 fix(attach): default working directory to invoker's cwd
When running `opencode attach` without `--dir`, the TUI session
previously used the server's process.cwd() as its working directory.
This changes the default to send the attaching client's cwd so the
session operates in the directory where the attach command was invoked.
2026-02-16 08:30:49 -05:00
96 changed files with 695 additions and 1489 deletions

View File

@@ -359,7 +359,6 @@ opencode serve --hostname 0.0.0.0 --port 4096
opencode serve [--port <number>] [--hostname <string>] [--cors <origin>]
opencode session [command]
opencode session list
opencode session delete <sessionID>
opencode stats
opencode uninstall
opencode upgrade
@@ -599,7 +598,6 @@ OPENCODE_EXPERIMENTAL_MARKDOWN
OPENCODE_EXPERIMENTAL_OUTPUT_TOKEN_MAX
OPENCODE_EXPERIMENTAL_OXFMT
OPENCODE_EXPERIMENTAL_PLAN_MODE
OPENCODE_ENABLE_QUESTION_TOOL
OPENCODE_FAKE_VCS
OPENCODE_GIT_BASH_PATH
OPENCODE_MODEL

View File

@@ -1,5 +1,8 @@
{
"$schema": "https://opencode.ai/config.json",
// "enterprise": {
// "url": "https://enterprise.dev.opencode.ai",
// },
"provider": {
"opencode": {
"options": {},

View File

@@ -23,7 +23,7 @@
},
"packages/app": {
"name": "@opencode-ai/app",
"version": "1.2.6",
"version": "1.2.4",
"dependencies": {
"@kobalte/core": "catalog:",
"@opencode-ai/sdk": "workspace:*",
@@ -73,7 +73,7 @@
},
"packages/console/app": {
"name": "@opencode-ai/console-app",
"version": "1.2.6",
"version": "1.2.4",
"dependencies": {
"@cloudflare/vite-plugin": "1.15.2",
"@ibm/plex": "6.4.1",
@@ -107,7 +107,7 @@
},
"packages/console/core": {
"name": "@opencode-ai/console-core",
"version": "1.2.6",
"version": "1.2.4",
"dependencies": {
"@aws-sdk/client-sts": "3.782.0",
"@jsx-email/render": "1.1.1",
@@ -134,7 +134,7 @@
},
"packages/console/function": {
"name": "@opencode-ai/console-function",
"version": "1.2.6",
"version": "1.2.4",
"dependencies": {
"@ai-sdk/anthropic": "2.0.0",
"@ai-sdk/openai": "2.0.2",
@@ -158,7 +158,7 @@
},
"packages/console/mail": {
"name": "@opencode-ai/console-mail",
"version": "1.2.6",
"version": "1.2.4",
"dependencies": {
"@jsx-email/all": "2.2.3",
"@jsx-email/cli": "1.4.3",
@@ -182,7 +182,7 @@
},
"packages/desktop": {
"name": "@opencode-ai/desktop",
"version": "1.2.6",
"version": "1.2.4",
"dependencies": {
"@opencode-ai/app": "workspace:*",
"@opencode-ai/ui": "workspace:*",
@@ -215,7 +215,7 @@
},
"packages/enterprise": {
"name": "@opencode-ai/enterprise",
"version": "1.2.6",
"version": "1.2.4",
"dependencies": {
"@opencode-ai/ui": "workspace:*",
"@opencode-ai/util": "workspace:*",
@@ -244,7 +244,7 @@
},
"packages/function": {
"name": "@opencode-ai/function",
"version": "1.2.6",
"version": "1.2.4",
"dependencies": {
"@octokit/auth-app": "8.0.1",
"@octokit/rest": "catalog:",
@@ -260,7 +260,7 @@
},
"packages/opencode": {
"name": "opencode",
"version": "1.2.6",
"version": "1.2.4",
"bin": {
"opencode": "./bin/opencode",
},
@@ -288,8 +288,8 @@
"@ai-sdk/vercel": "1.0.33",
"@ai-sdk/xai": "2.0.51",
"@clack/prompts": "1.0.0-alpha.1",
"@gitlab/gitlab-ai-provider": "3.5.1",
"@gitlab/opencode-gitlab-auth": "1.3.3",
"@gitlab/gitlab-ai-provider": "3.5.0",
"@gitlab/opencode-gitlab-auth": "1.3.2",
"@hono/standard-validator": "0.1.5",
"@hono/zod-validator": "catalog:",
"@modelcontextprotocol/sdk": "1.25.2",
@@ -369,7 +369,7 @@
},
"packages/plugin": {
"name": "@opencode-ai/plugin",
"version": "1.2.6",
"version": "1.2.4",
"dependencies": {
"@opencode-ai/sdk": "workspace:*",
"zod": "catalog:",
@@ -389,7 +389,7 @@
},
"packages/sdk/js": {
"name": "@opencode-ai/sdk",
"version": "1.2.6",
"version": "1.2.4",
"devDependencies": {
"@hey-api/openapi-ts": "0.90.10",
"@tsconfig/node22": "catalog:",
@@ -400,7 +400,7 @@
},
"packages/slack": {
"name": "@opencode-ai/slack",
"version": "1.2.6",
"version": "1.2.4",
"dependencies": {
"@opencode-ai/sdk": "workspace:*",
"@slack/bolt": "^3.17.1",
@@ -413,7 +413,7 @@
},
"packages/ui": {
"name": "@opencode-ai/ui",
"version": "1.2.6",
"version": "1.2.4",
"dependencies": {
"@kobalte/core": "catalog:",
"@opencode-ai/sdk": "workspace:*",
@@ -455,7 +455,7 @@
},
"packages/util": {
"name": "@opencode-ai/util",
"version": "1.2.6",
"version": "1.2.4",
"dependencies": {
"zod": "catalog:",
},
@@ -466,7 +466,7 @@
},
"packages/web": {
"name": "@opencode-ai/web",
"version": "1.2.6",
"version": "1.2.4",
"dependencies": {
"@astrojs/cloudflare": "12.6.3",
"@astrojs/markdown-remark": "6.3.1",
@@ -989,9 +989,9 @@
"@fontsource/inter": ["@fontsource/inter@5.2.8", "", {}, "sha512-P6r5WnJoKiNVV+zvW2xM13gNdFhAEpQ9dQJHt3naLvfg+LkF2ldgSLiF4T41lf1SQCM9QmkqPTn4TH568IRagg=="],
"@gitlab/gitlab-ai-provider": ["@gitlab/gitlab-ai-provider@3.5.1", "", { "dependencies": { "@anthropic-ai/sdk": "^0.71.0", "@anycable/core": "^0.9.2", "graphql-request": "^6.1.0", "isomorphic-ws": "^5.0.0", "openai": "^6.16.0", "socket.io-client": "^4.8.1", "vscode-jsonrpc": "^8.2.1", "zod": "^3.25.76" }, "peerDependencies": { "@ai-sdk/provider": ">=2.0.0", "@ai-sdk/provider-utils": ">=3.0.0" } }, "sha512-I8+EGdUeKmGJSjAdFobHtqpxM9Fm00w0j7NJbtln/D/XQ1SKEGoZIuqJko4v0pV2mkhGUIs7qezljH/2kbXovA=="],
"@gitlab/gitlab-ai-provider": ["@gitlab/gitlab-ai-provider@3.5.0", "", { "dependencies": { "@anthropic-ai/sdk": "^0.71.0", "@anycable/core": "^0.9.2", "graphql-request": "^6.1.0", "isomorphic-ws": "^5.0.0", "openai": "^6.16.0", "socket.io-client": "^4.8.1", "vscode-jsonrpc": "^8.2.1", "zod": "^3.25.76" }, "peerDependencies": { "@ai-sdk/provider": ">=2.0.0", "@ai-sdk/provider-utils": ">=3.0.0" } }, "sha512-OoAwCz4fOci3h/2l+PRHMclclh3IaFq8w1es2wvBJ8ca7vtglKsBYT7dvmYpsXlu7pg9mopbjcexvmVCQEUTAQ=="],
"@gitlab/opencode-gitlab-auth": ["@gitlab/opencode-gitlab-auth@1.3.3", "", { "dependencies": { "@fastify/rate-limit": "^10.2.0", "@opencode-ai/plugin": "*", "fastify": "^5.2.0", "open": "^10.0.0" } }, "sha512-FT+KsCmAJjtqWr1YAq0MywGgL9kaLQ4apmsoowAXrPqHtoYf2i/nY10/A+L06kNj22EATeEDRpbB1NWXMto/SA=="],
"@gitlab/opencode-gitlab-auth": ["@gitlab/opencode-gitlab-auth@1.3.2", "", { "dependencies": { "@fastify/rate-limit": "^10.2.0", "@opencode-ai/plugin": "*", "fastify": "^5.2.0", "open": "^10.0.0" } }, "sha512-pvGrC+aDVLY8bRCC/fZaG/Qihvt2r4by5xbTo5JTSz9O7yIcR6xG2d9Wkuu4bcXFz674z2C+i5bUk+J/RSdBpg=="],
"@graphql-typed-document-node/core": ["@graphql-typed-document-node/core@3.2.0", "", { "peerDependencies": { "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" } }, "sha512-mB9oAsNCm9aM3/SOv4YtBMqZbYj10R7dkq8byBqxGY/ncFwhf2oQzMV+LCRlWoDSEBJ3COiR1yeDvMtsoOsuFQ=="],

View File

@@ -1,8 +1,8 @@
{
"nodeModules": {
"x86_64-linux": "sha256-C3WIEER2XgzO85wk2sp3BzQ6dknW026zslD8nKZjo2U=",
"aarch64-linux": "sha256-+tTJHZMZ/+8fAjI/1fUTuca8J2MZfB+5vhBoZ7jgqcE=",
"aarch64-darwin": "sha256-vS82puFGBBToxyIBa8Zi0KLKdJYr64T6HZL2rL32mH8=",
"x86_64-darwin": "sha256-Tr8JMTCxV6WVt3dXV7iq3PNCm2Cn+RXAbU9+o7pKKV0="
"x86_64-linux": "sha256-5pgd2xuvIIkTbIOGIdK5MIXo6O9qRpvk1RKQZ1e1R+8=",
"aarch64-linux": "sha256-FZiHwihM4b82ipQ9XfW08X+sd5CvZhx/+pU/8X1zsns=",
"aarch64-darwin": "sha256-iZv0w1NthV53pY5uvuf3JlI14GeKmCu7WHwGSRdEQeM=",
"x86_64-darwin": "sha256-c3Zm3P1goFPgg3vNAZPMFOhHX/gyTmsCN/PKbGO/v0E="
}
}

View File

@@ -87,8 +87,6 @@
"url": "https://github.com/anomalyco/opencode"
},
"license": "MIT",
"randomField": "hello-world-12345",
"anotherRandomField": "xyz-abc-789",
"prettier": {
"semi": false,
"printWidth": 120

View File

@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/app",
"version": "1.2.6",
"version": "1.2.4",
"description": "",
"type": "module",
"exports": {

View File

@@ -23,6 +23,7 @@ import { useSync } from "@/context/sync"
import { useTerminal, type LocalPTY } from "@/context/terminal"
import { useLayout } from "@/context/layout"
import { checksum, base64Encode } from "@opencode-ai/util/encode"
import { findLast } from "@opencode-ai/util/array"
import { useDialog } from "@opencode-ai/ui/context/dialog"
import { DialogSelectFile } from "@/components/dialog-select-file"
import FileTree from "@/components/file-tree"
@@ -34,6 +35,7 @@ import { useSDK } from "@/context/sdk"
import { usePrompt } from "@/context/prompt"
import { useComments } from "@/context/comments"
import { ConstrainDragYAxis, getDraggableId } from "@/utils/solid-dnd"
import { usePermission } from "@/context/permission"
import { showToast } from "@opencode-ai/ui/toast"
import { SessionHeader, SessionContextTab, SortableTab, FileVisual, NewSessionView } from "@/components/session"
import { navMark, navParams } from "@/utils/perf"
@@ -99,6 +101,7 @@ export default function Page() {
const sdk = useSDK()
const prompt = usePrompt()
const comments = useComments()
const permission = usePermission()
const permRequest = createMemo(() => {
const sessionID = params.id
@@ -269,7 +272,6 @@ export default function Page() {
if (!path) return
file.load(path)
openReviewPanel()
tabs().setActive(next)
}
createEffect(() => {
@@ -767,6 +769,11 @@ export default function Page() {
return lines.slice(0, 2).join("\n")
}
const addSelectionToContext = (path: string, selection: FileSelection) => {
const preview = selectionPreview(path, selection)
prompt.context.add({ type: "file", path, selection, preview })
}
const addCommentToContext = (input: {
file: string
selection: SelectedLineRange
@@ -905,11 +912,31 @@ export default function Page() {
const focusInput = () => inputRef?.focus()
useSessionCommands({
command,
dialog,
file,
language,
local,
permission,
prompt,
sdk,
sync,
terminal,
layout,
params,
navigate,
tabs,
view,
info,
status,
userMessages,
visibleUserMessages,
activeMessage,
showAllFiles,
navigateMessageByOffset,
setExpanded: (id, fn) => setStore("expanded", id, fn),
setActiveMessage,
addSelectionToContext,
focusInput,
})

View File

@@ -1,5 +1,5 @@
import { describe, expect, test } from "bun:test"
import { createOpenReviewFile, focusTerminalById, getTabReorderIndex } from "./helpers"
import { combineCommandSections, createOpenReviewFile, focusTerminalById, getTabReorderIndex } from "./helpers"
describe("createOpenReviewFile", () => {
test("opens and loads selected review file", () => {
@@ -46,6 +46,20 @@ describe("focusTerminalById", () => {
})
})
describe("combineCommandSections", () => {
test("keeps section order stable", () => {
const result = combineCommandSections([
[{ id: "a", title: "A" }],
[
{ id: "b", title: "B" },
{ id: "c", title: "C" },
],
])
expect(result.map((item) => item.id)).toEqual(["a", "b", "c"])
})
})
describe("getTabReorderIndex", () => {
test("returns target index for valid drag reorder", () => {
expect(getTabReorderIndex(["a", "b", "c"], "a", "c")).toBe(2)

View File

@@ -1,3 +1,4 @@
import type { CommandOption } from "@/context/command"
import { batch } from "solid-js"
export const focusTerminalById = (id: string) => {
@@ -35,6 +36,10 @@ export const createOpenReviewFile = (input: {
}
}
export const combineCommandSections = (sections: readonly (readonly CommandOption[])[]) => {
return sections.flatMap((section) => section)
}
export const getTabReorderIndex = (tabs: readonly string[], from: string, to: string) => {
const fromIndex = tabs.indexOf(from)
const toIndex = tabs.indexOf(to)

View File

@@ -19,14 +19,35 @@ import { showToast } from "@opencode-ai/ui/toast"
import { findLast } from "@opencode-ai/util/array"
import { extractPromptFromParts } from "@/utils/prompt"
import { UserMessage } from "@opencode-ai/sdk/v2"
import { combineCommandSections } from "@/pages/session/helpers"
import { canAddSelectionContext } from "@/pages/session/session-command-helpers"
export type SessionCommandContext = {
command: ReturnType<typeof useCommand>
dialog: ReturnType<typeof useDialog>
file: ReturnType<typeof useFile>
language: ReturnType<typeof useLanguage>
local: ReturnType<typeof useLocal>
permission: ReturnType<typeof usePermission>
prompt: ReturnType<typeof usePrompt>
sdk: ReturnType<typeof useSDK>
sync: ReturnType<typeof useSync>
terminal: ReturnType<typeof useTerminal>
layout: ReturnType<typeof useLayout>
params: ReturnType<typeof useParams>
navigate: ReturnType<typeof useNavigate>
tabs: () => ReturnType<ReturnType<typeof useLayout>["tabs"]>
view: () => ReturnType<ReturnType<typeof useLayout>["view"]>
info: () => { revert?: { messageID?: string }; share?: { url?: string } } | undefined
status: () => { type: string }
userMessages: () => UserMessage[]
visibleUserMessages: () => UserMessage[]
activeMessage: () => UserMessage | undefined
showAllFiles: () => void
navigateMessageByOffset: (offset: number) => void
setExpanded: (id: string, fn: (open: boolean | undefined) => boolean) => void
setActiveMessage: (message: UserMessage | undefined) => void
addSelectionToContext: (path: string, selection: FileSelection) => void
focusInput: () => void
}
@@ -37,88 +58,45 @@ const withCategory = (category: string) => {
})
}
export const useSessionCommands = (args: SessionCommandContext) => {
const command = useCommand()
const dialog = useDialog()
const file = useFile()
const language = useLanguage()
const local = useLocal()
const permission = usePermission()
const prompt = usePrompt()
const sdk = useSDK()
const sync = useSync()
const terminal = useTerminal()
const layout = useLayout()
const params = useParams()
const navigate = useNavigate()
const sessionKey = createMemo(() => `${params.dir}${params.id ? "/" + params.id : ""}`)
const tabs = createMemo(() => layout.tabs(sessionKey))
const view = createMemo(() => layout.view(sessionKey))
const info = createMemo(() => (params.id ? sync.session.get(params.id) : undefined))
const idle = { type: "idle" as const }
const status = createMemo(() => sync.data.session_status[params.id ?? ""] ?? idle)
const messages = createMemo(() => (params.id ? (sync.data.message[params.id] ?? []) : []))
const userMessages = createMemo(() => messages().filter((m) => m.role === "user") as UserMessage[])
const visibleUserMessages = createMemo(() => {
const revert = info()?.revert?.messageID
if (!revert) return userMessages()
return userMessages().filter((m) => m.id < revert)
})
const selectionPreview = (path: string, selection: FileSelection) => {
const content = file.get(path)?.content?.content
if (!content) return undefined
const start = Math.max(1, Math.min(selection.startLine, selection.endLine))
const end = Math.max(selection.startLine, selection.endLine)
const lines = content.split("\n").slice(start - 1, end)
if (lines.length === 0) return undefined
return lines.slice(0, 2).join("\n")
}
const addSelectionToContext = (path: string, selection: FileSelection) => {
const preview = selectionPreview(path, selection)
prompt.context.add({ type: "file", path, selection, preview })
}
const sessionCommand = withCategory(language.t("command.category.session"))
const fileCommand = withCategory(language.t("command.category.file"))
const contextCommand = withCategory(language.t("command.category.context"))
const viewCommand = withCategory(language.t("command.category.view"))
const terminalCommand = withCategory(language.t("command.category.terminal"))
const modelCommand = withCategory(language.t("command.category.model"))
const mcpCommand = withCategory(language.t("command.category.mcp"))
const agentCommand = withCategory(language.t("command.category.agent"))
const permissionsCommand = withCategory(language.t("command.category.permissions"))
export const useSessionCommands = (input: SessionCommandContext) => {
const sessionCommand = withCategory(input.language.t("command.category.session"))
const fileCommand = withCategory(input.language.t("command.category.file"))
const contextCommand = withCategory(input.language.t("command.category.context"))
const viewCommand = withCategory(input.language.t("command.category.view"))
const terminalCommand = withCategory(input.language.t("command.category.terminal"))
const modelCommand = withCategory(input.language.t("command.category.model"))
const mcpCommand = withCategory(input.language.t("command.category.mcp"))
const agentCommand = withCategory(input.language.t("command.category.agent"))
const permissionsCommand = withCategory(input.language.t("command.category.permissions"))
const sessionCommands = createMemo(() => [
sessionCommand({
id: "session.new",
title: language.t("command.session.new"),
title: input.language.t("command.session.new"),
keybind: "mod+shift+s",
slash: "new",
onSelect: () => navigate(`/${params.dir}/session`),
onSelect: () => input.navigate(`/${input.params.dir}/session`),
}),
])
const fileCommands = createMemo(() => [
fileCommand({
id: "file.open",
title: language.t("command.file.open"),
description: language.t("palette.search.placeholder"),
title: input.language.t("command.file.open"),
description: input.language.t("palette.search.placeholder"),
keybind: "mod+p",
slash: "open",
onSelect: () => dialog.show(() => <DialogSelectFile onOpenFile={args.showAllFiles} />),
onSelect: () => input.dialog.show(() => <DialogSelectFile onOpenFile={input.showAllFiles} />),
}),
fileCommand({
id: "tab.close",
title: language.t("command.tab.close"),
title: input.language.t("command.tab.close"),
keybind: "mod+w",
disabled: !tabs().active(),
disabled: !input.tabs().active(),
onSelect: () => {
const active = tabs().active()
const active = input.tabs().active()
if (!active) return
tabs().close(active)
input.tabs().close(active)
},
}),
])
@@ -126,30 +104,30 @@ export const useSessionCommands = (args: SessionCommandContext) => {
const contextCommands = createMemo(() => [
contextCommand({
id: "context.addSelection",
title: language.t("command.context.addSelection"),
description: language.t("command.context.addSelection.description"),
title: input.language.t("command.context.addSelection"),
description: input.language.t("command.context.addSelection.description"),
keybind: "mod+shift+l",
disabled: !canAddSelectionContext({
active: tabs().active(),
pathFromTab: file.pathFromTab,
selectedLines: file.selectedLines,
active: input.tabs().active(),
pathFromTab: input.file.pathFromTab,
selectedLines: input.file.selectedLines,
}),
onSelect: () => {
const active = tabs().active()
const active = input.tabs().active()
if (!active) return
const path = file.pathFromTab(active)
const path = input.file.pathFromTab(active)
if (!path) return
const range = file.selectedLines(path) as SelectedLineRange | null | undefined
const range = input.file.selectedLines(path) as SelectedLineRange | null | undefined
if (!range) {
showToast({
title: language.t("toast.context.noLineSelection.title"),
description: language.t("toast.context.noLineSelection.description"),
title: input.language.t("toast.context.noLineSelection.title"),
description: input.language.t("toast.context.noLineSelection.description"),
})
return
}
addSelectionToContext(path, selectionFromLines(range))
input.addSelectionToContext(path, selectionFromLines(range))
},
}),
])
@@ -157,50 +135,50 @@ export const useSessionCommands = (args: SessionCommandContext) => {
const viewCommands = createMemo(() => [
viewCommand({
id: "terminal.toggle",
title: language.t("command.terminal.toggle"),
title: input.language.t("command.terminal.toggle"),
keybind: "ctrl+`",
slash: "terminal",
onSelect: () => view().terminal.toggle(),
onSelect: () => input.view().terminal.toggle(),
}),
viewCommand({
id: "review.toggle",
title: language.t("command.review.toggle"),
title: input.language.t("command.review.toggle"),
keybind: "mod+shift+r",
onSelect: () => view().reviewPanel.toggle(),
onSelect: () => input.view().reviewPanel.toggle(),
}),
viewCommand({
id: "fileTree.toggle",
title: language.t("command.fileTree.toggle"),
title: input.language.t("command.fileTree.toggle"),
keybind: "mod+\\",
onSelect: () => layout.fileTree.toggle(),
onSelect: () => input.layout.fileTree.toggle(),
}),
viewCommand({
id: "input.focus",
title: language.t("command.input.focus"),
title: input.language.t("command.input.focus"),
keybind: "ctrl+l",
onSelect: () => args.focusInput(),
onSelect: () => input.focusInput(),
}),
terminalCommand({
id: "terminal.new",
title: language.t("command.terminal.new"),
description: language.t("command.terminal.new.description"),
title: input.language.t("command.terminal.new"),
description: input.language.t("command.terminal.new.description"),
keybind: "ctrl+alt+t",
onSelect: () => {
if (terminal.all().length > 0) terminal.new()
view().terminal.open()
if (input.terminal.all().length > 0) input.terminal.new()
input.view().terminal.open()
},
}),
viewCommand({
id: "steps.toggle",
title: language.t("command.steps.toggle"),
description: language.t("command.steps.toggle.description"),
title: input.language.t("command.steps.toggle"),
description: input.language.t("command.steps.toggle.description"),
keybind: "mod+e",
slash: "steps",
disabled: !params.id,
disabled: !input.params.id,
onSelect: () => {
const msg = args.activeMessage()
const msg = input.activeMessage()
if (!msg) return
args.setExpanded(msg.id, (open: boolean | undefined) => !open)
input.setExpanded(msg.id, (open: boolean | undefined) => !open)
},
}),
])
@@ -208,61 +186,61 @@ export const useSessionCommands = (args: SessionCommandContext) => {
const messageCommands = createMemo(() => [
sessionCommand({
id: "message.previous",
title: language.t("command.message.previous"),
description: language.t("command.message.previous.description"),
title: input.language.t("command.message.previous"),
description: input.language.t("command.message.previous.description"),
keybind: "mod+arrowup",
disabled: !params.id,
onSelect: () => args.navigateMessageByOffset(-1),
disabled: !input.params.id,
onSelect: () => input.navigateMessageByOffset(-1),
}),
sessionCommand({
id: "message.next",
title: language.t("command.message.next"),
description: language.t("command.message.next.description"),
title: input.language.t("command.message.next"),
description: input.language.t("command.message.next.description"),
keybind: "mod+arrowdown",
disabled: !params.id,
onSelect: () => args.navigateMessageByOffset(1),
disabled: !input.params.id,
onSelect: () => input.navigateMessageByOffset(1),
}),
])
const agentCommands = createMemo(() => [
modelCommand({
id: "model.choose",
title: language.t("command.model.choose"),
description: language.t("command.model.choose.description"),
title: input.language.t("command.model.choose"),
description: input.language.t("command.model.choose.description"),
keybind: "mod+'",
slash: "model",
onSelect: () => dialog.show(() => <DialogSelectModel />),
onSelect: () => input.dialog.show(() => <DialogSelectModel />),
}),
mcpCommand({
id: "mcp.toggle",
title: language.t("command.mcp.toggle"),
description: language.t("command.mcp.toggle.description"),
title: input.language.t("command.mcp.toggle"),
description: input.language.t("command.mcp.toggle.description"),
keybind: "mod+;",
slash: "mcp",
onSelect: () => dialog.show(() => <DialogSelectMcp />),
onSelect: () => input.dialog.show(() => <DialogSelectMcp />),
}),
agentCommand({
id: "agent.cycle",
title: language.t("command.agent.cycle"),
description: language.t("command.agent.cycle.description"),
title: input.language.t("command.agent.cycle"),
description: input.language.t("command.agent.cycle.description"),
keybind: "mod+.",
slash: "agent",
onSelect: () => local.agent.move(1),
onSelect: () => input.local.agent.move(1),
}),
agentCommand({
id: "agent.cycle.reverse",
title: language.t("command.agent.cycle.reverse"),
description: language.t("command.agent.cycle.reverse.description"),
title: input.language.t("command.agent.cycle.reverse"),
description: input.language.t("command.agent.cycle.reverse.description"),
keybind: "shift+mod+.",
onSelect: () => local.agent.move(-1),
onSelect: () => input.local.agent.move(-1),
}),
modelCommand({
id: "model.variant.cycle",
title: language.t("command.model.variant.cycle"),
description: language.t("command.model.variant.cycle.description"),
title: input.language.t("command.model.variant.cycle"),
description: input.language.t("command.model.variant.cycle.description"),
keybind: "shift+mod+d",
onSelect: () => {
local.model.variant.cycle()
input.local.model.variant.cycle()
},
}),
])
@@ -271,22 +249,22 @@ export const useSessionCommands = (args: SessionCommandContext) => {
permissionsCommand({
id: "permissions.autoaccept",
title:
params.id && permission.isAutoAccepting(params.id, sdk.directory)
? language.t("command.permissions.autoaccept.disable")
: language.t("command.permissions.autoaccept.enable"),
input.params.id && input.permission.isAutoAccepting(input.params.id, input.sdk.directory)
? input.language.t("command.permissions.autoaccept.disable")
: input.language.t("command.permissions.autoaccept.enable"),
keybind: "mod+shift+a",
disabled: !params.id || !permission.permissionsEnabled(),
disabled: !input.params.id || !input.permission.permissionsEnabled(),
onSelect: () => {
const sessionID = params.id
const sessionID = input.params.id
if (!sessionID) return
permission.toggleAutoAccept(sessionID, sdk.directory)
input.permission.toggleAutoAccept(sessionID, input.sdk.directory)
showToast({
title: permission.isAutoAccepting(sessionID, sdk.directory)
? language.t("toast.permissions.autoaccept.on.title")
: language.t("toast.permissions.autoaccept.off.title"),
description: permission.isAutoAccepting(sessionID, sdk.directory)
? language.t("toast.permissions.autoaccept.on.description")
: language.t("toast.permissions.autoaccept.off.description"),
title: input.permission.isAutoAccepting(sessionID, input.sdk.directory)
? input.language.t("toast.permissions.autoaccept.on.title")
: input.language.t("toast.permissions.autoaccept.off.title"),
description: input.permission.isAutoAccepting(sessionID, input.sdk.directory)
? input.language.t("toast.permissions.autoaccept.on.description")
: input.language.t("toast.permissions.autoaccept.off.description"),
})
},
}),
@@ -295,71 +273,71 @@ export const useSessionCommands = (args: SessionCommandContext) => {
const sessionActionCommands = createMemo(() => [
sessionCommand({
id: "session.undo",
title: language.t("command.session.undo"),
description: language.t("command.session.undo.description"),
title: input.language.t("command.session.undo"),
description: input.language.t("command.session.undo.description"),
slash: "undo",
disabled: !params.id || visibleUserMessages().length === 0,
disabled: !input.params.id || input.visibleUserMessages().length === 0,
onSelect: async () => {
const sessionID = params.id
const sessionID = input.params.id
if (!sessionID) return
if (status()?.type !== "idle") {
await sdk.client.session.abort({ sessionID }).catch(() => {})
if (input.status()?.type !== "idle") {
await input.sdk.client.session.abort({ sessionID }).catch(() => {})
}
const revert = info()?.revert?.messageID
const message = findLast(userMessages(), (x) => !revert || x.id < revert)
const revert = input.info()?.revert?.messageID
const message = findLast(input.userMessages(), (x) => !revert || x.id < revert)
if (!message) return
await sdk.client.session.revert({ sessionID, messageID: message.id })
const parts = sync.data.part[message.id]
await input.sdk.client.session.revert({ sessionID, messageID: message.id })
const parts = input.sync.data.part[message.id]
if (parts) {
const restored = extractPromptFromParts(parts, { directory: sdk.directory })
prompt.set(restored)
const restored = extractPromptFromParts(parts, { directory: input.sdk.directory })
input.prompt.set(restored)
}
const priorMessage = findLast(userMessages(), (x) => x.id < message.id)
args.setActiveMessage(priorMessage)
const priorMessage = findLast(input.userMessages(), (x) => x.id < message.id)
input.setActiveMessage(priorMessage)
},
}),
sessionCommand({
id: "session.redo",
title: language.t("command.session.redo"),
description: language.t("command.session.redo.description"),
title: input.language.t("command.session.redo"),
description: input.language.t("command.session.redo.description"),
slash: "redo",
disabled: !params.id || !info()?.revert?.messageID,
disabled: !input.params.id || !input.info()?.revert?.messageID,
onSelect: async () => {
const sessionID = params.id
const sessionID = input.params.id
if (!sessionID) return
const revertMessageID = info()?.revert?.messageID
const revertMessageID = input.info()?.revert?.messageID
if (!revertMessageID) return
const nextMessage = userMessages().find((x) => x.id > revertMessageID)
const nextMessage = input.userMessages().find((x) => x.id > revertMessageID)
if (!nextMessage) {
await sdk.client.session.unrevert({ sessionID })
prompt.reset()
const lastMsg = findLast(userMessages(), (x) => x.id >= revertMessageID)
args.setActiveMessage(lastMsg)
await input.sdk.client.session.unrevert({ sessionID })
input.prompt.reset()
const lastMsg = findLast(input.userMessages(), (x) => x.id >= revertMessageID)
input.setActiveMessage(lastMsg)
return
}
await sdk.client.session.revert({ sessionID, messageID: nextMessage.id })
const priorMsg = findLast(userMessages(), (x) => x.id < nextMessage.id)
args.setActiveMessage(priorMsg)
await input.sdk.client.session.revert({ sessionID, messageID: nextMessage.id })
const priorMsg = findLast(input.userMessages(), (x) => x.id < nextMessage.id)
input.setActiveMessage(priorMsg)
},
}),
sessionCommand({
id: "session.compact",
title: language.t("command.session.compact"),
description: language.t("command.session.compact.description"),
title: input.language.t("command.session.compact"),
description: input.language.t("command.session.compact.description"),
slash: "compact",
disabled: !params.id || visibleUserMessages().length === 0,
disabled: !input.params.id || input.visibleUserMessages().length === 0,
onSelect: async () => {
const sessionID = params.id
const sessionID = input.params.id
if (!sessionID) return
const model = local.model.current()
const model = input.local.model.current()
if (!model) {
showToast({
title: language.t("toast.model.none.title"),
description: language.t("toast.model.none.description"),
title: input.language.t("toast.model.none.title"),
description: input.language.t("toast.model.none.description"),
})
return
}
await sdk.client.session.summarize({
await input.sdk.client.session.summarize({
sessionID,
modelID: model.id,
providerID: model.provider.id,
@@ -368,27 +346,29 @@ export const useSessionCommands = (args: SessionCommandContext) => {
}),
sessionCommand({
id: "session.fork",
title: language.t("command.session.fork"),
description: language.t("command.session.fork.description"),
title: input.language.t("command.session.fork"),
description: input.language.t("command.session.fork.description"),
slash: "fork",
disabled: !params.id || visibleUserMessages().length === 0,
onSelect: () => dialog.show(() => <DialogFork />),
disabled: !input.params.id || input.visibleUserMessages().length === 0,
onSelect: () => input.dialog.show(() => <DialogFork />),
}),
])
const shareCommands = createMemo(() => {
if (sync.data.config.share === "disabled") return []
if (input.sync.data.config.share === "disabled") return []
return [
sessionCommand({
id: "session.share",
title: info()?.share?.url ? language.t("session.share.copy.copyLink") : language.t("command.session.share"),
description: info()?.share?.url
? language.t("toast.session.share.success.description")
: language.t("command.session.share.description"),
title: input.info()?.share?.url
? input.language.t("session.share.copy.copyLink")
: input.language.t("command.session.share"),
description: input.info()?.share?.url
? input.language.t("toast.session.share.success.description")
: input.language.t("command.session.share.description"),
slash: "share",
disabled: !params.id,
disabled: !input.params.id,
onSelect: async () => {
if (!params.id) return
if (!input.params.id) return
const write = (value: string) => {
const body = typeof document === "undefined" ? undefined : document.body
@@ -418,7 +398,7 @@ export const useSessionCommands = (args: SessionCommandContext) => {
const ok = await write(url)
if (!ok) {
showToast({
title: language.t("toast.session.share.copyFailed.title"),
title: input.language.t("toast.session.share.copyFailed.title"),
variant: "error",
})
return
@@ -426,27 +406,27 @@ export const useSessionCommands = (args: SessionCommandContext) => {
showToast({
title: existing
? language.t("session.share.copy.copied")
: language.t("toast.session.share.success.title"),
description: language.t("toast.session.share.success.description"),
? input.language.t("session.share.copy.copied")
: input.language.t("toast.session.share.success.title"),
description: input.language.t("toast.session.share.success.description"),
variant: "success",
})
}
const existing = info()?.share?.url
const existing = input.info()?.share?.url
if (existing) {
await copy(existing, true)
return
}
const url = await sdk.client.session
.share({ sessionID: params.id })
const url = await input.sdk.client.session
.share({ sessionID: input.params.id })
.then((res) => res.data?.share?.url)
.catch(() => undefined)
if (!url) {
showToast({
title: language.t("toast.session.share.failed.title"),
description: language.t("toast.session.share.failed.description"),
title: input.language.t("toast.session.share.failed.title"),
description: input.language.t("toast.session.share.failed.description"),
variant: "error",
})
return
@@ -457,25 +437,25 @@ export const useSessionCommands = (args: SessionCommandContext) => {
}),
sessionCommand({
id: "session.unshare",
title: language.t("command.session.unshare"),
description: language.t("command.session.unshare.description"),
title: input.language.t("command.session.unshare"),
description: input.language.t("command.session.unshare.description"),
slash: "unshare",
disabled: !params.id || !info()?.share?.url,
disabled: !input.params.id || !input.info()?.share?.url,
onSelect: async () => {
if (!params.id) return
await sdk.client.session
.unshare({ sessionID: params.id })
if (!input.params.id) return
await input.sdk.client.session
.unshare({ sessionID: input.params.id })
.then(() =>
showToast({
title: language.t("toast.session.unshare.success.title"),
description: language.t("toast.session.unshare.success.description"),
title: input.language.t("toast.session.unshare.success.title"),
description: input.language.t("toast.session.unshare.success.description"),
variant: "success",
}),
)
.catch(() =>
showToast({
title: language.t("toast.session.unshare.failed.title"),
description: language.t("toast.session.unshare.failed.description"),
title: input.language.t("toast.session.unshare.failed.title"),
description: input.language.t("toast.session.unshare.failed.description"),
variant: "error",
}),
)
@@ -484,8 +464,8 @@ export const useSessionCommands = (args: SessionCommandContext) => {
]
})
command.register("session", () =>
[
input.command.register("session", () =>
combineCommandSections([
sessionCommands(),
fileCommands(),
contextCommands(),
@@ -495,6 +475,6 @@ export const useSessionCommands = (args: SessionCommandContext) => {
permissionCommands(),
sessionActionCommands(),
shareCommands(),
].flatMap((section) => section),
]),
)
}

View File

@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/console-app",
"version": "1.2.6",
"version": "1.2.4",
"type": "module",
"license": "MIT",
"scripts": {

View File

@@ -174,6 +174,21 @@ body {
}
}
input:-webkit-autofill,
input:-webkit-autofill:hover,
input:-webkit-autofill:focus,
input:-webkit-autofill:active {
transition: background-color 5000000s ease-in-out 0s;
}
input:-webkit-autofill {
-webkit-text-fill-color: var(--color-text-strong) !important;
}
input:-moz-autofill {
-moz-text-fill-color: var(--color-text-strong) !important;
}
[data-component="container"] {
max-width: 67.5rem;
margin: 0 auto;
@@ -1234,19 +1249,4 @@ body {
text-decoration: underline;
}
}
input:-webkit-autofill,
input:-webkit-autofill:hover,
input:-webkit-autofill:focus,
input:-webkit-autofill:active {
transition: background-color 5000000s ease-in-out 0s;
}
input:-webkit-autofill {
-webkit-text-fill-color: var(--color-text-strong) !important;
}
input:-moz-autofill {
-moz-text-fill-color: var(--color-text-strong) !important;
}
}

View File

@@ -1,7 +1,7 @@
{
"$schema": "https://json.schemastore.org/package.json",
"name": "@opencode-ai/console-core",
"version": "1.2.6",
"version": "1.2.4",
"private": true,
"type": "module",
"license": "MIT",

View File

@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/console-function",
"version": "1.2.6",
"version": "1.2.4",
"$schema": "https://json.schemastore.org/package.json",
"private": true,
"type": "module",

View File

@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/console-mail",
"version": "1.2.6",
"version": "1.2.4",
"dependencies": {
"@jsx-email/all": "2.2.3",
"@jsx-email/cli": "1.4.3",

View File

@@ -1,7 +1,7 @@
{
"name": "@opencode-ai/desktop",
"private": true,
"version": "1.2.6",
"version": "1.2.4",
"type": "module",
"license": "MIT",
"scripts": {

View File

@@ -2343,9 +2343,9 @@ dependencies = [
[[package]]
name = "libc"
version = "0.2.180"
version = "0.2.177"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bcc35a38544a891a5f7c865aca548a982ccb3b8650a5b06d0fd33a10283c56fc"
checksum = "2874a2af47a2325c2001a6e6fad9b16a53b802102b528163885171cf92b15976"
[[package]]
name = "libloading"
@@ -2663,18 +2663,6 @@ dependencies = [
"memoffset",
]
[[package]]
name = "nix"
version = "0.31.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "225e7cfe711e0ba79a68baeddb2982723e4235247aefce1482f2f16c27865b66"
dependencies = [
"bitflags 2.10.0",
"cfg-if",
"cfg_aliases",
"libc",
]
[[package]]
name = "nodrop"
version = "0.1.14"
@@ -3105,7 +3093,6 @@ dependencies = [
"listeners",
"objc2 0.6.3",
"objc2-web-kit",
"process-wrap",
"reqwest 0.12.24",
"semver",
"serde",
@@ -3136,6 +3123,7 @@ dependencies = [
"tracing-subscriber",
"uuid",
"webkit2gtk",
"windows 0.61.3",
]
[[package]]
@@ -3650,20 +3638,6 @@ dependencies = [
"unicode-ident",
]
[[package]]
name = "process-wrap"
version = "9.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ccd9713fe2c91c3c85ac388b31b89de339365d2c995146e630b5e0da9d06526a"
dependencies = [
"futures",
"indexmap 2.12.1",
"nix 0.31.1",
"tokio",
"tracing",
"windows 0.62.2",
]
[[package]]
name = "psl-types"
version = "2.0.11"
@@ -6486,23 +6460,11 @@ version = "0.61.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9babd3a767a4c1aef6900409f85f5d53ce2544ccdfaa86dad48c91782c6d6893"
dependencies = [
"windows-collections 0.2.0",
"windows-collections",
"windows-core 0.61.2",
"windows-future 0.2.1",
"windows-future",
"windows-link 0.1.3",
"windows-numerics 0.2.0",
]
[[package]]
name = "windows"
version = "0.62.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "527fadee13e0c05939a6a05d5bd6eec6cd2e3dbd648b9f8e447c6518133d8580"
dependencies = [
"windows-collections 0.3.2",
"windows-core 0.62.2",
"windows-future 0.3.2",
"windows-numerics 0.3.1",
"windows-numerics",
]
[[package]]
@@ -6514,15 +6476,6 @@ dependencies = [
"windows-core 0.61.2",
]
[[package]]
name = "windows-collections"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "23b2d95af1a8a14a3c7367e1ed4fc9c20e0a26e79551b1454d72583c97cc6610"
dependencies = [
"windows-core 0.62.2",
]
[[package]]
name = "windows-core"
version = "0.51.1"
@@ -6566,18 +6519,7 @@ checksum = "fc6a41e98427b19fe4b73c550f060b59fa592d7d686537eebf9385621bfbad8e"
dependencies = [
"windows-core 0.61.2",
"windows-link 0.1.3",
"windows-threading 0.1.0",
]
[[package]]
name = "windows-future"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1d6f90251fe18a279739e78025bd6ddc52a7e22f921070ccdc67dde84c605cb"
dependencies = [
"windows-core 0.62.2",
"windows-link 0.2.1",
"windows-threading 0.2.1",
"windows-threading",
]
[[package]]
@@ -6624,16 +6566,6 @@ dependencies = [
"windows-link 0.1.3",
]
[[package]]
name = "windows-numerics"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e2e40844ac143cdb44aead537bbf727de9b044e107a0f1220392177d15b0f26"
dependencies = [
"windows-core 0.62.2",
"windows-link 0.2.1",
]
[[package]]
name = "windows-registry"
version = "0.5.3"
@@ -6809,15 +6741,6 @@ dependencies = [
"windows-link 0.1.3",
]
[[package]]
name = "windows-threading"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3949bd5b99cafdf1c7ca86b43ca564028dfe27d66958f2470940f73d86d75b37"
dependencies = [
"windows-link 0.2.1",
]
[[package]]
name = "windows-version"
version = "0.1.7"

View File

@@ -34,7 +34,7 @@ tauri-plugin-single-instance = { version = "2", features = ["deep-link"] }
serde = { version = "1", features = ["derive"] }
serde_json = "1"
tokio = { version = "1.48.0", features = ["process"] }
tokio = "1.48.0"
listeners = "0.3"
tauri-plugin-os = "2"
futures = "0.3.31"
@@ -52,7 +52,6 @@ tracing-subscriber = { version = "0.3", features = ["env-filter"] }
tracing-appender = "0.2"
chrono = "0.4"
tokio-stream = { version = "0.1.18", features = ["sync"] }
process-wrap = { version = "9.0.3", features = ["tokio1"] }
[target.'cfg(target_os = "linux")'.dependencies]
gtk = "0.18.2"
@@ -63,6 +62,14 @@ objc2 = "0.6"
objc2-web-kit = "0.3"
[target.'cfg(windows)'.dependencies]
windows = { version = "0.61", features = [
"Win32_Foundation",
"Win32_System_JobObjects",
"Win32_System_Threading",
"Win32_Security"
] }
[patch.crates-io]
specta = { git = "https://github.com/specta-rs/specta", rev = "591a5f3ddc78348abf4cbb541d599d65306d92b9" }
specta-typescript = { git = "https://github.com/specta-rs/specta", rev = "591a5f3ddc78348abf4cbb541d599d65306d92b9" }

View File

@@ -1,19 +1,12 @@
use futures::{FutureExt, Stream, StreamExt, future};
use process_wrap::tokio::CommandWrap;
#[cfg(unix)]
use process_wrap::tokio::ProcessGroup;
#[cfg(windows)]
use process_wrap::tokio::{JobObject, KillOnDrop};
#[cfg(unix)]
use std::os::unix::process::ExitStatusExt;
use std::{process::Stdio, time::Duration};
use tauri::{AppHandle, Manager, path::BaseDirectory};
use tauri_plugin_shell::{
ShellExt,
process::{CommandChild, CommandEvent, TerminatedPayload},
};
use tauri_plugin_store::StoreExt;
use tauri_specta::Event;
use tokio::io::{AsyncBufReadExt, BufReader};
use tokio::process::Command;
use tokio::sync::{mpsc, oneshot};
use tokio_stream::wrappers::ReceiverStream;
use tokio::sync::oneshot;
use tracing::Instrument;
use crate::constants::{SETTINGS_STORE, WSL_ENABLED_KEY};
@@ -32,33 +25,6 @@ pub struct Config {
pub server: Option<ServerConfig>,
}
#[derive(Clone, Debug)]
pub enum CommandEvent {
Stdout(Vec<u8>),
Stderr(Vec<u8>),
Error(String),
Terminated(TerminatedPayload),
}
#[derive(Clone, Copy, Debug)]
pub struct TerminatedPayload {
pub code: Option<i32>,
pub signal: Option<i32>,
}
#[derive(Clone, Debug)]
pub struct CommandChild {
kill: mpsc::Sender<()>,
}
impl CommandChild {
pub fn kill(&self) -> std::io::Result<()> {
self.kill
.try_send(())
.map_err(|e| std::io::Error::other(e.to_string()))
}
}
pub async fn get_config(app: &AppHandle) -> Option<Config> {
let (events, _) = spawn_command(app, "debug config", &[]).ok()?;
@@ -224,7 +190,7 @@ pub fn spawn_command(
app: &tauri::AppHandle,
args: &str,
extra_env: &[(&str, String)],
) -> Result<(impl Stream<Item = CommandEvent> + 'static, CommandChild), std::io::Error> {
) -> Result<(impl Stream<Item = CommandEvent> + 'static, CommandChild), tauri_plugin_shell::Error> {
let state_dir = app
.path()
.resolve("", BaseDirectory::AppLocalData)
@@ -251,7 +217,7 @@ pub fn spawn_command(
.map(|(key, value)| (key.to_string(), value.clone())),
);
let mut cmd = if cfg!(windows) {
let cmd = if cfg!(windows) {
if is_wsl_enabled(app) {
tracing::info!("WSL is enabled, spawning CLI server in WSL");
let version = app.package_info().version.to_string();
@@ -283,16 +249,18 @@ pub fn spawn_command(
script.push(format!("{} exec \"$BIN\" {}", env_prefix.join(" "), args));
let mut cmd = Command::new("wsl");
cmd.args(["-e", "bash", "-lc", &script.join("\n")]);
cmd
app.shell()
.command("wsl")
.args(["-e", "bash", "-lc", &script.join("\n")])
} else {
let sidecar = get_sidecar_path(app);
let mut cmd = Command::new(sidecar);
cmd.args(args.split_whitespace());
let mut cmd = app
.shell()
.sidecar("opencode-cli")
.unwrap()
.args(args.split_whitespace());
for (key, value) in envs {
cmd.env(key, value);
cmd = cmd.env(key, value);
}
cmd
@@ -301,111 +269,26 @@ pub fn spawn_command(
let sidecar = get_sidecar_path(app);
let shell = get_user_shell();
let line = if shell.ends_with("/nu") {
let cmd = if shell.ends_with("/nu") {
format!("^\"{}\" {}", sidecar.display(), args)
} else {
format!("\"{}\" {}", sidecar.display(), args)
};
let mut cmd = Command::new(shell);
cmd.args(["-il", "-c", &line]);
let mut cmd = app.shell().command(&shell).args(["-il", "-c", &cmd]);
for (key, value) in envs {
cmd.env(key, value);
cmd = cmd.env(key, value);
}
cmd
};
cmd.stdin(Stdio::null())
.stdout(Stdio::piped())
.stderr(Stdio::piped());
let mut wrap = CommandWrap::from(cmd);
#[cfg(unix)]
{
wrap.wrap(ProcessGroup::leader());
}
#[cfg(windows)]
{
wrap.wrap(JobObject).wrap(KillOnDrop);
}
let mut child = wrap.spawn()?;
let stdout = child.stdout().take();
let stderr = child.stderr().take();
let (tx, rx) = mpsc::channel(256);
let (kill_tx, mut kill_rx) = mpsc::channel(1);
if let Some(stdout) = stdout {
let tx = tx.clone();
tokio::spawn(async move {
let mut lines = BufReader::new(stdout).lines();
while let Ok(Some(line)) = lines.next_line().await {
let _ = tx.send(CommandEvent::Stdout(line.into_bytes())).await;
}
});
}
if let Some(stderr) = stderr {
let tx = tx.clone();
tokio::spawn(async move {
let mut lines = BufReader::new(stderr).lines();
while let Ok(Some(line)) = lines.next_line().await {
let _ = tx.send(CommandEvent::Stderr(line.into_bytes())).await;
}
});
}
tokio::spawn(async move {
let status = loop {
match child.try_wait() {
Ok(Some(status)) => break Ok(status),
Ok(None) => {}
Err(err) => break Err(err),
}
tokio::select! {
_ = kill_rx.recv() => {
let _ = child.start_kill();
}
_ = tokio::time::sleep(Duration::from_millis(100)) => {}
}
};
match status {
Ok(status) => {
let payload = TerminatedPayload {
code: status.code(),
signal: signal_from_status(status),
};
let _ = tx.send(CommandEvent::Terminated(payload)).await;
}
Err(err) => {
let _ = tx.send(CommandEvent::Error(err.to_string())).await;
}
}
});
let event_stream = ReceiverStream::new(rx);
let (rx, child) = cmd.spawn()?;
let event_stream = tokio_stream::wrappers::ReceiverStream::new(rx);
let event_stream = sqlite_migration::logs_middleware(app.clone(), event_stream);
Ok((event_stream, CommandChild { kill: kill_tx }))
}
fn signal_from_status(status: std::process::ExitStatus) -> Option<i32> {
#[cfg(unix)]
{
return status.signal();
}
#[cfg(not(unix))]
{
let _ = status;
None
}
Ok((event_stream, child))
}
pub fn serve(
@@ -457,6 +340,7 @@ pub fn serve(
let _ = tx.send(payload);
}
}
_ => {}
}
future::ready(())

View File

@@ -0,0 +1,145 @@
//! Windows Job Object for reliable child process cleanup.
//!
//! This module provides a wrapper around Windows Job Objects with the
//! `JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE` flag set. When the job object handle
//! is closed (including when the parent process exits or crashes), Windows
//! automatically terminates all processes assigned to the job.
//!
//! This is more reliable than manual cleanup because it works even if:
//! - The parent process crashes
//! - The parent is killed via Task Manager
//! - The RunEvent::Exit handler fails to run
use std::io::{Error, Result};
#[cfg(windows)]
use std::sync::Mutex;
use windows::Win32::Foundation::{CloseHandle, HANDLE};
use windows::Win32::System::JobObjects::{
AssignProcessToJobObject, CreateJobObjectW, JobObjectExtendedLimitInformation,
SetInformationJobObject, JOBOBJECT_EXTENDED_LIMIT_INFORMATION,
JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE,
};
use windows::Win32::System::Threading::{OpenProcess, PROCESS_SET_QUOTA, PROCESS_TERMINATE};
/// A Windows Job Object configured to kill all assigned processes when closed.
///
/// When this struct is dropped or when the owning process exits (even abnormally),
/// Windows will automatically terminate all processes that have been assigned to it.
pub struct JobObject(HANDLE);
// SAFETY: HANDLE is just a pointer-sized value, and Windows job objects
// can be safely accessed from multiple threads.
unsafe impl Send for JobObject {}
unsafe impl Sync for JobObject {}
impl JobObject {
/// Creates a new anonymous job object with `JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE` set.
///
/// When the last handle to this job is closed (including on process exit),
/// Windows will terminate all processes assigned to the job.
pub fn new() -> Result<Self> {
unsafe {
// Create an anonymous job object
let job = CreateJobObjectW(None, None).map_err(|e| Error::other(e.message()))?;
// Configure the job to kill all processes when the handle is closed
let mut info = JOBOBJECT_EXTENDED_LIMIT_INFORMATION::default();
info.BasicLimitInformation.LimitFlags = JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE;
SetInformationJobObject(
job,
JobObjectExtendedLimitInformation,
&info as *const _ as *const std::ffi::c_void,
std::mem::size_of::<JOBOBJECT_EXTENDED_LIMIT_INFORMATION>() as u32,
)
.map_err(|e| Error::other(e.message()))?;
Ok(Self(job))
}
}
/// Assigns a process to this job object by its process ID.
///
/// Once assigned, the process will be terminated when this job object is dropped
/// or when the owning process exits.
///
/// # Arguments
/// * `pid` - The process ID of the process to assign
pub fn assign_pid(&self, pid: u32) -> Result<()> {
unsafe {
// Open a handle to the process with the minimum required permissions
// PROCESS_SET_QUOTA and PROCESS_TERMINATE are required by AssignProcessToJobObject
let process = OpenProcess(PROCESS_SET_QUOTA | PROCESS_TERMINATE, false, pid)
.map_err(|e| Error::other(e.message()))?;
// Assign the process to the job
let result = AssignProcessToJobObject(self.0, process);
// Close our handle to the process - the job object maintains its own reference
let _ = CloseHandle(process);
result.map_err(|e| Error::other(e.message()))
}
}
}
impl Drop for JobObject {
fn drop(&mut self) {
unsafe {
// When this handle is closed and it's the last handle to the job,
// Windows will terminate all processes in the job due to KILL_ON_JOB_CLOSE
let _ = CloseHandle(self.0);
}
}
}
/// Holds the Windows Job Object that ensures child processes are killed when the app exits.
/// On Windows, when the job object handle is closed (including on crash), all assigned
/// processes are automatically terminated by the OS.
#[cfg(windows)]
pub struct JobObjectState {
job: Mutex<Option<JobObject>>,
error: Mutex<Option<String>>,
}
#[cfg(windows)]
impl JobObjectState {
pub fn new() -> Self {
match JobObject::new() {
Ok(job) => Self {
job: Mutex::new(Some(job)),
error: Mutex::new(None),
},
Err(e) => {
tracing::error!("Failed to create job object: {e}");
Self {
job: Mutex::new(None),
error: Mutex::new(Some(format!("Failed to create job object: {e}"))),
}
}
}
}
pub fn assign_pid(&self, pid: u32) {
if let Some(job) = self.job.lock().unwrap().as_ref() {
if let Err(e) = job.assign_pid(pid) {
tracing::error!(pid, "Failed to assign process to job object: {e}");
*self.error.lock().unwrap() =
Some(format!("Failed to assign process to job object: {e}"));
} else {
tracing::info!(pid, "Assigned process to job object for automatic cleanup");
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_job_object_creation() {
let job = JobObject::new();
assert!(job.is_ok(), "Failed to create job object: {:?}", job.err());
}
}

View File

@@ -1,20 +1,21 @@
mod cli;
mod constants;
#[cfg(windows)]
mod job_object;
#[cfg(target_os = "linux")]
pub mod linux_display;
#[cfg(target_os = "linux")]
pub mod linux_windowing;
mod logging;
mod markdown;
mod server;
mod window_customizer;
mod windows;
use crate::cli::CommandChild;
use futures::{
FutureExt, TryFutureExt,
future::{self, Shared},
};
#[cfg(windows)]
use job_object::*;
use std::{
env,
net::TcpListener,
@@ -26,6 +27,7 @@ use std::{
use tauri::{AppHandle, Listener, Manager, RunEvent, State, ipc::Channel};
#[cfg(any(target_os = "linux", all(debug_assertions, windows)))]
use tauri_plugin_deep_link::DeepLinkExt;
use tauri_plugin_shell::process::CommandChild;
use tauri_specta::Event;
use tokio::{
sync::{oneshot, watch},
@@ -629,6 +631,12 @@ async fn initialize(app: AppHandle) {
tracing::info!("CLI health check OK");
#[cfg(windows)]
{
let job_state = app.state::<JobObjectState>();
job_state.assign_pid(child.pid());
}
app.state::<ServerState>().set_child(Some(child));
Ok(ServerReadyData { url, password })
@@ -702,6 +710,9 @@ fn setup_app(app: &tauri::AppHandle, init_rx: watch::Receiver<InitStep>) {
#[cfg(any(target_os = "linux", all(debug_assertions, windows)))]
app.deep_link().register_all().ok();
#[cfg(windows)]
app.manage(JobObjectState::new());
app.manage(InitState { current: init_rx });
}

View File

@@ -1,475 +0,0 @@
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Backend {
Auto,
Wayland,
X11,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct BackendDecision {
pub backend: Backend,
pub note: String,
}
#[derive(Debug, Clone, Default)]
pub struct SessionEnv {
pub wayland_display: bool,
pub xdg_session_type: Option<String>,
pub display: bool,
pub xdg_current_desktop: Option<String>,
pub xdg_session_desktop: Option<String>,
pub desktop_session: Option<String>,
pub oc_allow_wayland: Option<String>,
pub oc_force_x11: Option<String>,
pub oc_force_wayland: Option<String>,
pub oc_linux_decorations: Option<String>,
pub oc_force_decorations: Option<String>,
pub oc_no_decorations: Option<String>,
pub i3_sock: bool,
}
impl SessionEnv {
pub fn capture() -> Self {
Self {
wayland_display: std::env::var_os("WAYLAND_DISPLAY").is_some(),
xdg_session_type: std::env::var("XDG_SESSION_TYPE").ok(),
display: std::env::var_os("DISPLAY").is_some(),
xdg_current_desktop: std::env::var("XDG_CURRENT_DESKTOP").ok(),
xdg_session_desktop: std::env::var("XDG_SESSION_DESKTOP").ok(),
desktop_session: std::env::var("DESKTOP_SESSION").ok(),
oc_allow_wayland: std::env::var("OC_ALLOW_WAYLAND").ok(),
oc_force_x11: std::env::var("OC_FORCE_X11").ok(),
oc_force_wayland: std::env::var("OC_FORCE_WAYLAND").ok(),
oc_linux_decorations: std::env::var("OC_LINUX_DECORATIONS").ok(),
oc_force_decorations: std::env::var("OC_FORCE_DECORATIONS").ok(),
oc_no_decorations: std::env::var("OC_NO_DECORATIONS").ok(),
i3_sock: std::env::var_os("I3SOCK").is_some(),
}
}
}
pub fn select_backend(env: &SessionEnv, prefer_wayland: bool) -> Option<BackendDecision> {
if is_truthy(env.oc_force_x11.as_deref()) {
return Some(BackendDecision {
backend: Backend::X11,
note: "Forcing X11 due to OC_FORCE_X11=1".into(),
});
}
if is_truthy(env.oc_force_wayland.as_deref()) {
return Some(BackendDecision {
backend: Backend::Wayland,
note: "Forcing native Wayland due to OC_FORCE_WAYLAND=1".into(),
});
}
if !is_wayland_session(env) {
return None;
}
if prefer_wayland {
return Some(BackendDecision {
backend: Backend::Wayland,
note: "Wayland session detected; forcing native Wayland from settings".into(),
});
}
if is_truthy(env.oc_allow_wayland.as_deref()) {
return Some(BackendDecision {
backend: Backend::Wayland,
note: "Wayland session detected; forcing native Wayland due to OC_ALLOW_WAYLAND=1"
.into(),
});
}
Some(BackendDecision {
backend: Backend::Auto,
note: "Wayland session detected; using native Wayland first with X11 fallback (auto backend). Set OC_FORCE_X11=1 to force X11."
.into(),
})
}
pub fn use_decorations(env: &SessionEnv) -> bool {
if let Some(mode) = decoration_override(env.oc_linux_decorations.as_deref()) {
return match mode {
DecorationOverride::Native => true,
DecorationOverride::None => false,
DecorationOverride::Auto => default_use_decorations(env),
};
}
if is_truthy(env.oc_force_decorations.as_deref()) {
return true;
}
if is_truthy(env.oc_no_decorations.as_deref()) {
return false;
}
default_use_decorations(env)
}
fn default_use_decorations(env: &SessionEnv) -> bool {
if is_known_tiling_session(env) {
return false;
}
if !is_wayland_session(env) {
return true;
}
is_full_desktop_session(env)
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum DecorationOverride {
Auto,
Native,
None,
}
fn decoration_override(value: Option<&str>) -> Option<DecorationOverride> {
let value = value?.trim().to_ascii_lowercase();
if matches!(value.as_str(), "auto") {
return Some(DecorationOverride::Auto);
}
if matches!(
value.as_str(),
"native" | "server" | "de" | "wayland" | "on" | "true" | "1"
) {
return Some(DecorationOverride::Native);
}
if matches!(
value.as_str(),
"none" | "off" | "false" | "0" | "client" | "csd"
) {
return Some(DecorationOverride::None);
}
None
}
fn is_truthy(value: Option<&str>) -> bool {
matches!(
value.map(|v| v.trim().to_ascii_lowercase()),
Some(v) if matches!(v.as_str(), "1" | "true" | "yes" | "on")
)
}
fn is_wayland_session(env: &SessionEnv) -> bool {
env.wayland_display
|| matches!(
env.xdg_session_type.as_deref(),
Some(value) if value.eq_ignore_ascii_case("wayland")
)
}
fn is_full_desktop_session(env: &SessionEnv) -> bool {
desktop_tokens(env).any(|value| {
matches!(
value.as_str(),
"gnome"
| "kde"
| "plasma"
| "xfce"
| "xfce4"
| "x-cinnamon"
| "cinnamon"
| "mate"
| "lxqt"
| "budgie"
| "pantheon"
| "deepin"
| "unity"
| "cosmic"
)
})
}
fn is_known_tiling_session(env: &SessionEnv) -> bool {
if env.i3_sock {
return true;
}
desktop_tokens(env).any(|value| {
matches!(
value.as_str(),
"niri"
| "sway"
| "swayfx"
| "hyprland"
| "river"
| "i3"
| "i3wm"
| "bspwm"
| "dwm"
| "qtile"
| "xmonad"
| "leftwm"
| "dwl"
| "awesome"
| "herbstluftwm"
| "spectrwm"
| "worm"
| "i3-gnome"
)
})
}
fn desktop_tokens<'a>(env: &'a SessionEnv) -> impl Iterator<Item = String> + 'a {
[
env.xdg_current_desktop.as_deref(),
env.xdg_session_desktop.as_deref(),
env.desktop_session.as_deref(),
]
.into_iter()
.flatten()
.flat_map(|desktop| desktop.split(':'))
.map(|value| value.trim().to_ascii_lowercase())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn prefers_wayland_first_on_wayland_session() {
let env = SessionEnv {
wayland_display: true,
display: true,
..Default::default()
};
let decision = select_backend(&env, false).expect("missing decision");
assert_eq!(decision.backend, Backend::Auto);
}
#[test]
fn force_x11_override_wins() {
let env = SessionEnv {
wayland_display: true,
display: true,
oc_force_x11: Some("1".into()),
oc_allow_wayland: Some("1".into()),
oc_force_wayland: Some("1".into()),
..Default::default()
};
let decision = select_backend(&env, true).expect("missing decision");
assert_eq!(decision.backend, Backend::X11);
}
#[test]
fn prefer_wayland_forces_wayland_backend() {
let env = SessionEnv {
wayland_display: true,
display: true,
..Default::default()
};
let decision = select_backend(&env, true).expect("missing decision");
assert_eq!(decision.backend, Backend::Wayland);
}
#[test]
fn force_wayland_override_works_outside_wayland_session() {
let env = SessionEnv {
display: true,
oc_force_wayland: Some("1".into()),
..Default::default()
};
let decision = select_backend(&env, false).expect("missing decision");
assert_eq!(decision.backend, Backend::Wayland);
}
#[test]
fn allow_wayland_forces_wayland_backend() {
let env = SessionEnv {
wayland_display: true,
display: true,
oc_allow_wayland: Some("1".into()),
..Default::default()
};
let decision = select_backend(&env, false).expect("missing decision");
assert_eq!(decision.backend, Backend::Wayland);
}
#[test]
fn xdg_session_type_wayland_is_detected() {
let env = SessionEnv {
xdg_session_type: Some("wayland".into()),
..Default::default()
};
let decision = select_backend(&env, false).expect("missing decision");
assert_eq!(decision.backend, Backend::Auto);
}
#[test]
fn returns_none_when_not_wayland_and_no_overrides() {
let env = SessionEnv {
display: true,
xdg_current_desktop: Some("GNOME".into()),
..Default::default()
};
assert!(select_backend(&env, false).is_none());
}
#[test]
fn prefer_wayland_setting_does_not_override_x11_session() {
let env = SessionEnv {
display: true,
xdg_current_desktop: Some("GNOME".into()),
..Default::default()
};
assert!(select_backend(&env, true).is_none());
}
#[test]
fn disables_decorations_on_niri() {
let env = SessionEnv {
xdg_current_desktop: Some("niri".into()),
wayland_display: true,
..Default::default()
};
assert!(!use_decorations(&env));
}
#[test]
fn keeps_decorations_on_gnome() {
let env = SessionEnv {
xdg_current_desktop: Some("GNOME".into()),
wayland_display: true,
..Default::default()
};
assert!(use_decorations(&env));
}
#[test]
fn disables_decorations_when_session_desktop_is_tiling() {
let env = SessionEnv {
xdg_session_desktop: Some("Hyprland".into()),
wayland_display: true,
..Default::default()
};
assert!(!use_decorations(&env));
}
#[test]
fn disables_decorations_for_unknown_wayland_session() {
let env = SessionEnv {
xdg_current_desktop: Some("labwc".into()),
wayland_display: true,
..Default::default()
};
assert!(!use_decorations(&env));
}
#[test]
fn disables_decorations_for_dwm_on_x11() {
let env = SessionEnv {
xdg_current_desktop: Some("dwm".into()),
display: true,
..Default::default()
};
assert!(!use_decorations(&env));
}
#[test]
fn disables_decorations_for_i3_on_x11() {
let env = SessionEnv {
xdg_current_desktop: Some("i3".into()),
display: true,
..Default::default()
};
assert!(!use_decorations(&env));
}
#[test]
fn disables_decorations_for_i3sock_without_xdg_tokens() {
let env = SessionEnv {
display: true,
i3_sock: true,
..Default::default()
};
assert!(!use_decorations(&env));
}
#[test]
fn keeps_decorations_for_gnome_on_x11() {
let env = SessionEnv {
xdg_current_desktop: Some("GNOME".into()),
display: true,
..Default::default()
};
assert!(use_decorations(&env));
}
#[test]
fn no_decorations_override_wins() {
let env = SessionEnv {
xdg_current_desktop: Some("GNOME".into()),
oc_no_decorations: Some("1".into()),
..Default::default()
};
assert!(!use_decorations(&env));
}
#[test]
fn linux_decorations_native_override_wins() {
let env = SessionEnv {
xdg_current_desktop: Some("niri".into()),
wayland_display: true,
oc_linux_decorations: Some("native".into()),
..Default::default()
};
assert!(use_decorations(&env));
}
#[test]
fn linux_decorations_none_override_wins() {
let env = SessionEnv {
xdg_current_desktop: Some("GNOME".into()),
wayland_display: true,
oc_linux_decorations: Some("none".into()),
..Default::default()
};
assert!(!use_decorations(&env));
}
#[test]
fn linux_decorations_auto_uses_default_policy() {
let env = SessionEnv {
xdg_current_desktop: Some("sway".into()),
wayland_display: true,
oc_linux_decorations: Some("auto".into()),
..Default::default()
};
assert!(!use_decorations(&env));
}
#[test]
fn linux_decorations_override_beats_legacy_overrides() {
let env = SessionEnv {
xdg_current_desktop: Some("GNOME".into()),
wayland_display: true,
oc_linux_decorations: Some("none".into()),
oc_force_decorations: Some("1".into()),
..Default::default()
};
assert!(!use_decorations(&env));
}
}

View File

@@ -36,7 +36,11 @@ pub fn init(log_dir: &Path) -> WorkerGuard {
tracing_subscriber::registry()
.with(filter)
.with(fmt::layer().with_writer(std::io::stderr))
.with(fmt::layer().with_writer(non_blocking).with_ansi(false))
.with(
fmt::layer()
.with_writer(non_blocking)
.with_ansi(false),
)
.init();
guard
@@ -51,7 +55,10 @@ pub fn tail() -> String {
return String::new();
};
let lines: Vec<String> = BufReader::new(file).lines().map_while(Result::ok).collect();
let lines: Vec<String> = BufReader::new(file)
.lines()
.map_while(Result::ok)
.collect();
let start = lines.len().saturating_sub(TAIL_LINES);
lines[start..].join("\n")

View File

@@ -4,7 +4,6 @@
// borrowed from https://github.com/skyline69/balatro-mod-manager
#[cfg(target_os = "linux")]
fn configure_display_backend() -> Option<String> {
use opencode_lib::linux_windowing::{Backend, SessionEnv, select_backend};
use std::env;
let set_env_if_absent = |key: &str, value: &str| {
@@ -15,28 +14,45 @@ fn configure_display_backend() -> Option<String> {
}
};
let session = SessionEnv::capture();
let prefer_wayland = opencode_lib::linux_display::read_wayland().unwrap_or(false);
let decision = select_backend(&session, prefer_wayland)?;
match decision.backend {
Backend::X11 => {
set_env_if_absent("WINIT_UNIX_BACKEND", "x11");
set_env_if_absent("GDK_BACKEND", "x11");
set_env_if_absent("WEBKIT_DISABLE_DMABUF_RENDERER", "1");
}
Backend::Wayland => {
set_env_if_absent("WINIT_UNIX_BACKEND", "wayland");
set_env_if_absent("GDK_BACKEND", "wayland");
set_env_if_absent("WEBKIT_DISABLE_DMABUF_RENDERER", "1");
}
Backend::Auto => {
set_env_if_absent("GDK_BACKEND", "wayland,x11");
set_env_if_absent("WEBKIT_DISABLE_DMABUF_RENDERER", "1");
}
let on_wayland = env::var_os("WAYLAND_DISPLAY").is_some()
|| matches!(
env::var("XDG_SESSION_TYPE"),
Ok(v) if v.eq_ignore_ascii_case("wayland")
);
if !on_wayland {
return None;
}
Some(decision.note)
let prefer_wayland = opencode_lib::linux_display::read_wayland().unwrap_or(false);
let allow_wayland = prefer_wayland
|| matches!(
env::var("OC_ALLOW_WAYLAND"),
Ok(v) if matches!(v.to_ascii_lowercase().as_str(), "1" | "true" | "yes")
);
if allow_wayland {
if prefer_wayland {
return Some("Wayland session detected; using native Wayland from settings".into());
}
return Some("Wayland session detected; respecting OC_ALLOW_WAYLAND=1".into());
}
// Prefer XWayland when available to avoid Wayland protocol errors seen during startup.
if env::var_os("DISPLAY").is_some() {
set_env_if_absent("WINIT_UNIX_BACKEND", "x11");
set_env_if_absent("GDK_BACKEND", "x11");
set_env_if_absent("WEBKIT_DISABLE_DMABUF_RENDERER", "1");
return Some(
"Wayland session detected; forcing X11 backend to avoid compositor protocol errors. \
Set OC_ALLOW_WAYLAND=1 to keep native Wayland."
.into(),
);
}
set_env_if_absent("WEBKIT_DISABLE_DMABUF_RENDERER", "1");
Some(
"Wayland session detected without X11; leaving Wayland enabled (set WINIT_UNIX_BACKEND/GDK_BACKEND manually if needed)."
.into(),
)
}
fn main() {

View File

@@ -2,12 +2,12 @@ use std::time::{Duration, Instant};
use tauri::AppHandle;
use tauri_plugin_dialog::{DialogExt, MessageDialogButtons, MessageDialogResult};
use tauri_plugin_shell::process::CommandChild;
use tauri_plugin_store::StoreExt;
use tokio::task::JoinHandle;
use crate::{
cli,
cli::CommandChild,
constants::{DEFAULT_SERVER_URL_KEY, SETTINGS_STORE, WSL_ENABLED_KEY},
};

View File

@@ -7,22 +7,6 @@ use tauri::{AppHandle, Manager, Runtime, WebviewUrl, WebviewWindow, WebviewWindo
use tauri_plugin_window_state::AppHandleExt;
use tokio::sync::mpsc;
#[cfg(target_os = "linux")]
use std::sync::OnceLock;
#[cfg(target_os = "linux")]
fn use_decorations() -> bool {
static DECORATIONS: OnceLock<bool> = OnceLock::new();
*DECORATIONS.get_or_init(|| {
crate::linux_windowing::use_decorations(&crate::linux_windowing::SessionEnv::capture())
})
}
#[cfg(not(target_os = "linux"))]
fn use_decorations() -> bool {
true
}
pub struct MainWindow(WebviewWindow);
impl Deref for MainWindow {
@@ -47,13 +31,13 @@ impl MainWindow {
.ok()
.map(|v| v.enabled)
.unwrap_or(false);
let decorations = use_decorations();
let window_builder = base_window_config(
WebviewWindowBuilder::new(app, Self::LABEL, WebviewUrl::App("/".into())),
app,
decorations,
)
.title("OpenCode")
.decorations(true)
.disable_drag_drop_handler()
.zoom_hotkeys_enabled(false)
.visible(true)
@@ -129,12 +113,9 @@ impl LoadingWindow {
pub const LABEL: &str = "loading";
pub fn create(app: &AppHandle) -> Result<Self, tauri::Error> {
let decorations = use_decorations();
let window_builder = base_window_config(
WebviewWindowBuilder::new(app, Self::LABEL, tauri::WebviewUrl::App("/loading".into())),
app,
decorations,
)
.center()
.resizable(false)
@@ -148,9 +129,8 @@ impl LoadingWindow {
fn base_window_config<'a, R: Runtime, M: Manager<R>>(
window_builder: WebviewWindowBuilder<'a, R, M>,
_app: &AppHandle,
decorations: bool,
) -> WebviewWindowBuilder<'a, R, M> {
let window_builder = window_builder.decorations(decorations);
let window_builder = window_builder.decorations(true);
#[cfg(windows)]
let window_builder = window_builder

View File

@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/enterprise",
"version": "1.2.6",
"version": "1.2.4",
"private": true,
"type": "module",
"license": "MIT",

View File

@@ -1,7 +1,7 @@
id = "opencode"
name = "OpenCode"
description = "The open source coding agent."
version = "1.2.6"
version = "1.2.4"
schema_version = 1
authors = ["Anomaly"]
repository = "https://github.com/anomalyco/opencode"
@@ -11,26 +11,26 @@ name = "OpenCode"
icon = "./icons/opencode.svg"
[agent_servers.opencode.targets.darwin-aarch64]
archive = "https://github.com/anomalyco/opencode/releases/download/v1.2.6/opencode-darwin-arm64.zip"
archive = "https://github.com/anomalyco/opencode/releases/download/v1.2.4/opencode-darwin-arm64.zip"
cmd = "./opencode"
args = ["acp"]
[agent_servers.opencode.targets.darwin-x86_64]
archive = "https://github.com/anomalyco/opencode/releases/download/v1.2.6/opencode-darwin-x64.zip"
archive = "https://github.com/anomalyco/opencode/releases/download/v1.2.4/opencode-darwin-x64.zip"
cmd = "./opencode"
args = ["acp"]
[agent_servers.opencode.targets.linux-aarch64]
archive = "https://github.com/anomalyco/opencode/releases/download/v1.2.6/opencode-linux-arm64.tar.gz"
archive = "https://github.com/anomalyco/opencode/releases/download/v1.2.4/opencode-linux-arm64.tar.gz"
cmd = "./opencode"
args = ["acp"]
[agent_servers.opencode.targets.linux-x86_64]
archive = "https://github.com/anomalyco/opencode/releases/download/v1.2.6/opencode-linux-x64.tar.gz"
archive = "https://github.com/anomalyco/opencode/releases/download/v1.2.4/opencode-linux-x64.tar.gz"
cmd = "./opencode"
args = ["acp"]
[agent_servers.opencode.targets.windows-x86_64]
archive = "https://github.com/anomalyco/opencode/releases/download/v1.2.6/opencode-windows-x64.zip"
archive = "https://github.com/anomalyco/opencode/releases/download/v1.2.4/opencode-windows-x64.zip"
cmd = "./opencode.exe"
args = ["acp"]

View File

@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/function",
"version": "1.2.6",
"version": "1.2.4",
"$schema": "https://json.schemastore.org/package.json",
"private": true,
"type": "module",

View File

@@ -1,6 +1,6 @@
{
"$schema": "https://json.schemastore.org/package.json",
"version": "1.2.6",
"version": "1.2.4",
"name": "opencode",
"type": "module",
"license": "MIT",
@@ -74,8 +74,8 @@
"@ai-sdk/vercel": "1.0.33",
"@ai-sdk/xai": "2.0.51",
"@clack/prompts": "1.0.0-alpha.1",
"@gitlab/gitlab-ai-provider": "3.5.1",
"@gitlab/opencode-gitlab-auth": "1.3.3",
"@gitlab/gitlab-ai-provider": "3.5.0",
"@gitlab/opencode-gitlab-auth": "1.3.2",
"@hono/standard-validator": "0.1.5",
"@hono/zod-validator": "catalog:",
"@modelcontextprotocol/sdk": "1.25.2",

View File

@@ -44,16 +44,6 @@ opencode acp
opencode acp --cwd /path/to/project
```
### Question Tool Opt-In
ACP excludes `QuestionTool` by default.
```bash
OPENCODE_ENABLE_QUESTION_TOOL=1 opencode acp
```
Enable this only for ACP clients that support interactive question prompts.
### Programmatic
```typescript

View File

@@ -21,6 +21,7 @@ export class ACPSessionManager {
const session = await this.sdk.session
.create(
{
title: `ACP Session ${crypto.randomUUID()}`,
directory: cwd,
},
{ throwOnError: true },

View File

@@ -4,8 +4,6 @@ import { Database } from "../../storage/db"
import { Database as BunDatabase } from "bun:sqlite"
import { UI } from "../ui"
import { cmd } from "./cmd"
import { JsonMigration } from "../../storage/json-migration"
import { EOL } from "os"
const QueryCommand = cmd({
command: "$0 [query]",
@@ -60,59 +58,11 @@ const PathCommand = cmd({
},
})
const MigrateCommand = cmd({
command: "migrate",
describe: "migrate JSON data to SQLite (merges with existing data)",
handler: async () => {
const sqlite = new BunDatabase(Database.Path)
const tty = process.stderr.isTTY
const width = 36
const orange = "\x1b[38;5;214m"
const muted = "\x1b[0;2m"
const reset = "\x1b[0m"
let last = -1
if (tty) process.stderr.write("\x1b[?25l")
try {
const stats = await JsonMigration.run(sqlite, {
progress: (event) => {
const percent = Math.floor((event.current / event.total) * 100)
if (percent === last) return
last = percent
if (tty) {
const fill = Math.round((percent / 100) * width)
const bar = `${"■".repeat(fill)}${"・".repeat(width - fill)}`
process.stderr.write(
`\r${orange}${bar} ${percent.toString().padStart(3)}%${reset} ${muted}${event.current}/${event.total}${reset} `,
)
} else {
process.stderr.write(`sqlite-migration:${percent}${EOL}`)
}
},
})
if (tty) process.stderr.write("\n")
if (tty) process.stderr.write("\x1b[?25h")
else process.stderr.write(`sqlite-migration:done${EOL}`)
UI.println(
`Migration complete: ${stats.projects} projects, ${stats.sessions} sessions, ${stats.messages} messages`,
)
if (stats.errors.length > 0) {
UI.println(`${stats.errors.length} errors occurred during migration`)
}
} catch (err) {
if (tty) process.stderr.write("\x1b[?25h")
UI.error(`Migration failed: ${err instanceof Error ? err.message : String(err)}`)
process.exit(1)
} finally {
sqlite.close()
}
},
})
export const DbCommand = cmd({
command: "db",
describe: "database tools",
builder: (yargs: Argv) => {
return yargs.command(QueryCommand).command(PathCommand).command(MigrateCommand).demandCommand()
return yargs.command(QueryCommand).command(PathCommand).demandCommand()
},
handler: () => {},
})

View File

@@ -38,34 +38,10 @@ function pagerCmd(): string[] {
export const SessionCommand = cmd({
command: "session",
describe: "manage sessions",
builder: (yargs: Argv) => yargs.command(SessionListCommand).command(SessionDeleteCommand).demandCommand(),
builder: (yargs: Argv) => yargs.command(SessionListCommand).demandCommand(),
async handler() {},
})
export const SessionDeleteCommand = cmd({
command: "delete <sessionID>",
describe: "delete a session",
builder: (yargs: Argv) => {
return yargs.positional("sessionID", {
describe: "session ID to delete",
type: "string",
demandOption: true,
})
},
handler: async (args) => {
await bootstrap(process.cwd(), async () => {
try {
await Session.get(args.sessionID)
} catch {
UI.error(`Session not found: ${args.sessionID}`)
process.exit(1)
}
await Session.remove(args.sessionID)
UI.println(UI.Style.TEXT_SUCCESS_BOLD + `Session ${args.sessionID} deleted` + UI.Style.TEXT_NORMAL)
})
},
})
export const SessionListCommand = cmd({
command: "list",
describe: "list sessions",

View File

@@ -1,5 +1,4 @@
import { cmd } from "../cmd"
import { UI } from "@/cli/ui"
import { tui } from "./app"
import { win32DisableProcessedInput, win32InstallCtrlCGuard } from "./win32"
@@ -17,20 +16,11 @@ export const AttachCommand = cmd({
type: "string",
description: "directory to run in",
})
.option("continue", {
alias: ["c"],
describe: "continue the last session",
type: "boolean",
})
.option("session", {
alias: ["s"],
type: "string",
describe: "session id to continue",
})
.option("fork", {
type: "boolean",
describe: "fork the session when continuing (use with --continue or --session)",
})
.option("password", {
alias: ["p"],
type: "string",
@@ -41,14 +31,8 @@ export const AttachCommand = cmd({
try {
win32DisableProcessedInput()
if (args.fork && !args.continue && !args.session) {
UI.error("--fork requires --continue or --session")
process.exitCode = 1
return
}
const directory = (() => {
if (!args.dir) return undefined
if (!args.dir) return process.cwd()
try {
process.chdir(args.dir)
return process.cwd()
@@ -65,11 +49,7 @@ export const AttachCommand = cmd({
})()
await tui({
url: args.url,
args: {
continue: args.continue,
sessionID: args.session,
fork: args.fork,
},
args: { sessionID: args.session },
directory,
headers,
})

View File

@@ -247,8 +247,7 @@ export function Autocomplete(props: {
const width = props.anchor().width - 4
options.push(
...sortedFiles.map((item): AutocompleteOption => {
const baseDir = (sync.data.path.directory || process.cwd()).replace(/\/+$/, "")
const fullPath = `${baseDir}/${item}`
const fullPath = `${process.cwd()}/${item}`
const urlObj = pathToFileURL(fullPath)
let filename = item
if (lineRange && !item.endsWith("/")) {

View File

@@ -2,7 +2,7 @@ import { createMemo, createSignal, For } from "solid-js"
import { DEFAULT_THEMES, useTheme } from "@tui/context/theme"
const themeCount = Object.keys(DEFAULT_THEMES).length
const themeTip = `Use {highlight}/themes{/highlight} or {highlight}Ctrl+X T{/highlight} to switch between ${themeCount} built-in themes`
const themeTip = `Use {highlight}/theme{/highlight} or {highlight}Ctrl+X T{/highlight} to switch between ${themeCount} built-in themes`
type TipPart = { text: string; highlight: boolean }
@@ -126,7 +126,7 @@ const TIPS = [
"Use {highlight}{file:path}{/highlight} to include file contents in config values",
"Use {highlight}instructions{/highlight} in config to load additional rules files",
"Set agent {highlight}temperature{/highlight} from 0.0 (focused) to 1.0 (creative)",
"Configure {highlight}steps{/highlight} to limit agentic iterations per request",
"Configure {highlight}maxSteps{/highlight} to limit agentic iterations per request",
'Set {highlight}"tools": {"bash": false}{/highlight} to disable specific tools',
'Set {highlight}"mcp_*": false{/highlight} to disable all tools from an MCP server',
"Override global tool settings per agent configuration",
@@ -147,6 +147,7 @@ const TIPS = [
"Commit your project's {highlight}AGENTS.md{/highlight} file to Git for team sharing",
"Use {highlight}/review{/highlight} to review uncommitted changes, branches, or PRs",
"Run {highlight}/help{/highlight} or {highlight}Ctrl+X H{/highlight} to show the help dialog",
"Use {highlight}/details{/highlight} to toggle tool execution details visibility",
"Use {highlight}/rename{/highlight} to rename the current session",
"Press {highlight}Ctrl+Z{/highlight} to suspend the terminal and return to your shell",
]

View File

@@ -30,7 +30,6 @@ export namespace Flag {
export declare const OPENCODE_CLIENT: string
export const OPENCODE_SERVER_PASSWORD = process.env["OPENCODE_SERVER_PASSWORD"]
export const OPENCODE_SERVER_USERNAME = process.env["OPENCODE_SERVER_USERNAME"]
export const OPENCODE_ENABLE_QUESTION_TOOL = truthy("OPENCODE_ENABLE_QUESTION_TOOL")
// Experimental
export const OPENCODE_EXPERIMENTAL = truthy("OPENCODE_EXPERIMENTAL")

View File

@@ -364,21 +364,3 @@ export const ormolu: Info = {
return Bun.which("ormolu") !== null
},
}
export const cljfmt: Info = {
name: "cljfmt",
command: ["cljfmt", "fix", "--quiet", "$FILE"],
extensions: [".clj", ".cljs", ".cljc", ".edn"],
async enabled() {
return Bun.which("cljfmt") !== null
},
}
export const dfmt: Info = {
name: "dfmt",
command: ["dfmt", "-i", "$FILE"],
extensions: [".d"],
async enabled() {
return Bun.which("dfmt") !== null
},
}

View File

@@ -57,30 +57,6 @@ export namespace Provider {
return isGpt5OrLater(modelID) && !modelID.startsWith("gpt-5-mini")
}
function googleVertexVars(options: Record<string, any>) {
const project =
options["project"] ?? Env.get("GOOGLE_CLOUD_PROJECT") ?? Env.get("GCP_PROJECT") ?? Env.get("GCLOUD_PROJECT")
const location =
options["location"] ?? Env.get("GOOGLE_CLOUD_LOCATION") ?? Env.get("VERTEX_LOCATION") ?? "us-central1"
const endpoint = location === "global" ? "aiplatform.googleapis.com" : `${location}-aiplatform.googleapis.com`
return {
GOOGLE_VERTEX_PROJECT: project,
GOOGLE_VERTEX_LOCATION: location,
GOOGLE_VERTEX_ENDPOINT: endpoint,
}
}
function loadBaseURL(model: Model, options: Record<string, any>) {
const raw = options["baseURL"] ?? model.api.url
if (typeof raw !== "string") return raw
const vars = model.providerID === "google-vertex" ? googleVertexVars(options) : undefined
return raw.replace(/\$\{([^}]+)\}/g, (match, key) => {
const val = Env.get(String(key)) ?? vars?.[String(key) as keyof typeof vars]
return val ?? match
})
}
const BUNDLED_PROVIDERS: Record<string, (options: any) => SDK> = {
"@ai-sdk/amazon-bedrock": createAmazonBedrock,
"@ai-sdk/anthropic": createAnthropic,
@@ -377,16 +353,9 @@ export namespace Provider {
},
}
},
"google-vertex": async (provider) => {
const project =
provider.options?.project ??
Env.get("GOOGLE_CLOUD_PROJECT") ??
Env.get("GCP_PROJECT") ??
Env.get("GCLOUD_PROJECT")
const location =
provider.options?.location ?? Env.get("GOOGLE_CLOUD_LOCATION") ?? Env.get("VERTEX_LOCATION") ?? "us-central1"
"google-vertex": async () => {
const project = Env.get("GOOGLE_CLOUD_PROJECT") ?? Env.get("GCP_PROJECT") ?? Env.get("GCLOUD_PROJECT")
const location = Env.get("GOOGLE_CLOUD_LOCATION") ?? Env.get("VERTEX_LOCATION") ?? "us-east5"
const autoload = Boolean(project)
if (!autoload) return { autoload: false }
return {
@@ -394,18 +363,6 @@ export namespace Provider {
options: {
project,
location,
fetch: async (input: RequestInfo | URL, init?: RequestInit) => {
const { GoogleAuth } = await import(await BunProc.install("google-auth-library"))
const auth = new GoogleAuth()
const client = await auth.getApplicationDefault()
const credentials = await client.credential
const token = await credentials.getAccessToken()
const headers = new Headers(init?.headers)
headers.set("Authorization", `Bearer ${token.token}`)
return fetch(input, { ...init, headers })
},
},
async getModel(sdk: any, modelID: string) {
const id = String(modelID).trim()
@@ -1037,16 +994,11 @@ export namespace Provider {
const provider = s.providers[model.providerID]
const options = { ...provider.options }
if (model.providerID === "google-vertex" && !model.api.npm.includes("@ai-sdk/openai-compatible")) {
delete options.fetch
}
if (model.api.npm.includes("@ai-sdk/openai-compatible") && options["includeUsage"] !== false) {
options["includeUsage"] = true
}
const baseURL = loadBaseURL(model, options)
if (baseURL !== undefined) options["baseURL"] = baseURL
if (!options["baseURL"]) options["baseURL"] = model.api.url
if (options["apiKey"] === undefined && provider.key) options["apiKey"] = provider.key
if (model.headers)
options["headers"] = {

View File

@@ -298,8 +298,8 @@ export namespace ProviderTransform {
if (id.includes("glm-4.7")) return 1.0
if (id.includes("minimax-m2")) return 1.0
if (id.includes("kimi-k2")) {
// kimi-k2-thinking & kimi-k2.5 && kimi-k2p5 && kimi-k2-5
if (["thinking", "k2.", "k2p", "k2-5"].some((s) => id.includes(s))) {
// kimi-k2-thinking & kimi-k2.5 && kimi-k2p5
if (id.includes("thinking") || id.includes("k2.") || id.includes("k2p")) {
return 1.0
}
return 0.6
@@ -310,7 +310,7 @@ export namespace ProviderTransform {
export function topP(model: Provider.Model) {
const id = model.id.toLowerCase()
if (id.includes("qwen")) return 1
if (["minimax-m2", "gemini", "kimi-k2.5", "kimi-k2p5", "kimi-k2-5"].some((s) => id.includes(s))) {
if (id.includes("minimax-m2") || id.includes("kimi-k2.5") || id.includes("kimi-k2p5") || id.includes("gemini")) {
return 0.95
}
return undefined
@@ -319,7 +319,7 @@ export namespace ProviderTransform {
export function topK(model: Provider.Model) {
const id = model.id.toLowerCase()
if (id.includes("minimax-m2")) {
if (["m2.", "m25", "m21"].some((s) => id.includes(s))) return 40
if (id.includes("m2.1")) return 40
return 20
}
if (id.includes("gemini")) return 64
@@ -802,11 +802,6 @@ export namespace ProviderTransform {
}
return { reasoningEffort: "minimal" }
}
if (model.providerID === "venice") {
return { veniceParameters: { disableThinking: true } }
}
return {}
}

View File

@@ -445,12 +445,6 @@ export namespace SessionPrompt {
log.error("subtask execution failed", { error, agent: task.agent, description: task.description })
return undefined
})
const attachments = result?.attachments?.map((attachment) => ({
...attachment,
id: Identifier.ascending("part"),
sessionID,
messageID: assistantMessage.id,
}))
await Plugin.trigger(
"tool.execute.after",
{
@@ -473,7 +467,7 @@ export namespace SessionPrompt {
title: result.title,
metadata: result.metadata,
output: result.output,
attachments,
attachments: result.attachments,
time: {
...part.state.time,
end: Date.now(),
@@ -803,15 +797,6 @@ export namespace SessionPrompt {
},
)
const result = await item.execute(args, ctx)
const output = {
...result,
attachments: result.attachments?.map((attachment) => ({
...attachment,
id: Identifier.ascending("part"),
sessionID: ctx.sessionID,
messageID: input.processor.message.id,
})),
}
await Plugin.trigger(
"tool.execute.after",
{
@@ -820,9 +805,9 @@ export namespace SessionPrompt {
callID: ctx.callID,
args,
},
output,
result,
)
return output
return result
},
})
}
@@ -870,13 +855,16 @@ export namespace SessionPrompt {
)
const textParts: string[] = []
const attachments: Omit<MessageV2.FilePart, "id" | "sessionID" | "messageID">[] = []
const attachments: MessageV2.FilePart[] = []
for (const contentItem of result.content) {
if (contentItem.type === "text") {
textParts.push(contentItem.text)
} else if (contentItem.type === "image") {
attachments.push({
id: Identifier.ascending("part"),
sessionID: input.session.id,
messageID: input.processor.message.id,
type: "file",
mime: contentItem.mimeType,
url: `data:${contentItem.mimeType};base64,${contentItem.data}`,
@@ -888,6 +876,9 @@ export namespace SessionPrompt {
}
if (resource.blob) {
attachments.push({
id: Identifier.ascending("part"),
sessionID: input.session.id,
messageID: input.processor.message.id,
type: "file",
mime: resource.mimeType ?? "application/octet-stream",
url: `data:${resource.mimeType ?? "application/octet-stream"};base64,${resource.blob}`,
@@ -1166,7 +1157,6 @@ export namespace SessionPrompt {
pieces.push(
...result.attachments.map((attachment) => ({
...attachment,
id: Identifier.ascending("part"),
synthetic: true,
filename: attachment.filename ?? part.filename,
messageID: info.id,

View File

@@ -1,3 +1,5 @@
import { Provider } from "@/provider/provider"
import { fn } from "@/util/fn"
import z from "zod"
import { Session } from "."
@@ -6,10 +8,16 @@ import { MessageV2 } from "./message-v2"
import { Identifier } from "@/id/id"
import { Snapshot } from "@/snapshot"
import { Log } from "@/util/log"
import { Storage } from "@/storage/storage"
import { Bus } from "@/bus"
import { LLM } from "./llm"
import { Agent } from "@/agent/agent"
export namespace SessionSummary {
const log = Log.create({ service: "session.summary" })
function unquoteGitPath(input: string) {
if (!input.startsWith('"')) return input
if (!input.endsWith('"')) return input
@@ -109,6 +117,41 @@ export namespace SessionSummary {
diffs,
}
await Session.updateMessage(userMsg)
const textPart = msgWithParts.parts.find((p) => p.type === "text" && !p.synthetic) as MessageV2.TextPart
if (textPart && !userMsg.summary?.title) {
const agent = await Agent.get("title")
if (!agent) return
const stream = await LLM.stream({
agent,
user: userMsg,
tools: {},
model: agent.model
? await Provider.getModel(agent.model.providerID, agent.model.modelID)
: ((await Provider.getSmallModel(userMsg.model.providerID)) ??
(await Provider.getModel(userMsg.model.providerID, userMsg.model.modelID))),
small: true,
messages: [
{
role: "user" as const,
content: `
The following is the text to summarize:
<text>
${textPart?.text ?? ""}
</text>
`,
},
],
abort: new AbortController().signal,
sessionID: userMsg.sessionID,
system: [],
retries: 3,
})
const result = await stream.text
log.info("title", { title: result })
userMsg.summary.title = result
await Session.updateMessage(userMsg)
}
}
export const diff = fn(

View File

@@ -77,12 +77,6 @@ export const BatchTool = Tool.define("batch", async () => {
})
const result = await tool.execute(validatedParams, { ...ctx, callID: partID })
const attachments = result.attachments?.map((attachment) => ({
...attachment,
id: Identifier.ascending("part"),
sessionID: ctx.sessionID,
messageID: ctx.messageID,
}))
await Session.updatePart({
id: partID,
@@ -97,7 +91,7 @@ export const BatchTool = Tool.define("batch", async () => {
output: result.output,
title: result.title,
metadata: result.metadata,
attachments,
attachments: result.attachments,
time: {
start: callStartTime,
end: Date.now(),

View File

@@ -6,6 +6,7 @@ import { LSP } from "../lsp"
import { FileTime } from "../file/time"
import DESCRIPTION from "./read.txt"
import { Instance } from "../project/instance"
import { Identifier } from "../id/id"
import { assertExternalDirectory } from "./external-directory"
import { InstructionPrompt } from "../session/instruction"
@@ -126,6 +127,9 @@ export const ReadTool = Tool.define("read", {
},
attachments: [
{
id: Identifier.ascending("part"),
sessionID: ctx.sessionID,
messageID: ctx.messageID,
type: "file",
mime,
url: `data:${mime};base64,${Buffer.from(await file.bytes()).toString("base64")}`,

View File

@@ -94,11 +94,10 @@ export namespace ToolRegistry {
async function all(): Promise<Tool.Info[]> {
const custom = await state().then((x) => x.custom)
const config = await Config.get()
const question = ["app", "cli", "desktop"].includes(Flag.OPENCODE_CLIENT) || Flag.OPENCODE_ENABLE_QUESTION_TOOL
return [
InvalidTool,
...(question ? [QuestionTool] : []),
...(["app", "cli", "desktop"].includes(Flag.OPENCODE_CLIENT) ? [QuestionTool] : []),
BashTool,
ReadTool,
GlobTool,

View File

@@ -36,7 +36,7 @@ export namespace Tool {
title: string
metadata: M
output: string
attachments?: Omit<MessageV2.FilePart, "id" | "sessionID" | "messageID">[]
attachments?: MessageV2.FilePart[]
}>
formatValidationError?(error: z.ZodError): string
}>

View File

@@ -3,6 +3,7 @@ import { Tool } from "./tool"
import TurndownService from "turndown"
import DESCRIPTION from "./webfetch.txt"
import { abortAfterAny } from "../util/abort"
import { Identifier } from "../id/id"
const MAX_RESPONSE_SIZE = 5 * 1024 * 1024 // 5MB
const DEFAULT_TIMEOUT = 30 * 1000 // 30 seconds
@@ -102,6 +103,9 @@ export const WebFetchTool = Tool.define("webfetch", {
metadata: {},
attachments: [
{
id: Identifier.ascending("part"),
sessionID: ctx.sessionID,
messageID: ctx.messageID,
type: "file",
mime,
url: `data:${mime};base64,${base64Content}`,

View File

@@ -2127,94 +2127,3 @@ test("custom model with variants enabled and disabled", async () => {
},
})
})
test("Google Vertex: retains baseURL for custom proxy", async () => {
await using tmp = await tmpdir({
init: async (dir) => {
await Bun.write(
path.join(dir, "opencode.json"),
JSON.stringify({
$schema: "https://opencode.ai/config.json",
provider: {
"vertex-proxy": {
name: "Vertex Proxy",
npm: "@ai-sdk/google-vertex",
api: "https://my-proxy.com/v1",
env: ["GOOGLE_APPLICATION_CREDENTIALS"], // Mock env var requirement
models: {
"gemini-pro": {
name: "Gemini Pro",
tool_call: true,
},
},
options: {
project: "test-project",
location: "us-central1",
baseURL: "https://my-proxy.com/v1", // Should be retained
},
},
},
}),
)
},
})
await Instance.provide({
directory: tmp.path,
init: async () => {
Env.set("GOOGLE_APPLICATION_CREDENTIALS", "test-creds")
},
fn: async () => {
const providers = await Provider.list()
expect(providers["vertex-proxy"]).toBeDefined()
expect(providers["vertex-proxy"].options.baseURL).toBe("https://my-proxy.com/v1")
},
})
})
test("Google Vertex: supports OpenAI compatible models", async () => {
await using tmp = await tmpdir({
init: async (dir) => {
await Bun.write(
path.join(dir, "opencode.json"),
JSON.stringify({
$schema: "https://opencode.ai/config.json",
provider: {
"vertex-openai": {
name: "Vertex OpenAI",
npm: "@ai-sdk/google-vertex",
env: ["GOOGLE_APPLICATION_CREDENTIALS"],
models: {
"gpt-4": {
name: "GPT-4",
provider: {
npm: "@ai-sdk/openai-compatible",
api: "https://api.openai.com/v1",
},
},
},
options: {
project: "test-project",
location: "us-central1",
},
},
},
}),
)
},
})
await Instance.provide({
directory: tmp.path,
init: async () => {
Env.set("GOOGLE_APPLICATION_CREDENTIALS", "test-creds")
},
fn: async () => {
const providers = await Provider.list()
const model = providers["vertex-openai"].models["gpt-4"]
expect(model).toBeDefined()
expect(model.api.npm).toBe("@ai-sdk/openai-compatible")
},
})
})

View File

@@ -349,9 +349,6 @@ describe("tool.read truncation", () => {
expect(result.metadata.truncated).toBe(false)
expect(result.attachments).toBeDefined()
expect(result.attachments?.length).toBe(1)
expect(result.attachments?.[0]).not.toHaveProperty("id")
expect(result.attachments?.[0]).not.toHaveProperty("sessionID")
expect(result.attachments?.[0]).not.toHaveProperty("messageID")
},
})
})
@@ -366,9 +363,6 @@ describe("tool.read truncation", () => {
expect(result.attachments).toBeDefined()
expect(result.attachments?.length).toBe(1)
expect(result.attachments?.[0].type).toBe("file")
expect(result.attachments?.[0]).not.toHaveProperty("id")
expect(result.attachments?.[0]).not.toHaveProperty("sessionID")
expect(result.attachments?.[0]).not.toHaveProperty("messageID")
},
})
})

View File

@@ -46,9 +46,6 @@ describe("tool.webfetch", () => {
expect(result.attachments?.[0].type).toBe("file")
expect(result.attachments?.[0].mime).toBe("image/png")
expect(result.attachments?.[0].url.startsWith("data:image/png;base64,")).toBe(true)
expect(result.attachments?.[0]).not.toHaveProperty("id")
expect(result.attachments?.[0]).not.toHaveProperty("sessionID")
expect(result.attachments?.[0]).not.toHaveProperty("messageID")
},
})
},

View File

@@ -1,7 +1,7 @@
{
"$schema": "https://json.schemastore.org/package.json",
"name": "@opencode-ai/plugin",
"version": "1.2.6",
"version": "1.2.4",
"type": "module",
"license": "MIT",
"scripts": {

View File

@@ -1,7 +1,7 @@
{
"$schema": "https://json.schemastore.org/package.json",
"name": "@opencode-ai/sdk",
"version": "1.2.6",
"version": "1.2.4",
"type": "module",
"license": "MIT",
"scripts": {

View File

@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/slack",
"version": "1.2.6",
"version": "1.2.4",
"type": "module",
"license": "MIT",
"scripts": {

View File

@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/ui",
"version": "1.2.6",
"version": "1.2.4",
"type": "module",
"license": "MIT",
"exports": {

View File

@@ -560,12 +560,6 @@
overflow-y: auto;
}
.retry-error-link,
.error-card-link {
color: var(--text-strong);
text-decoration: underline;
}
[data-slot="session-turn-collapsible-content-inner"] {
width: 100%;
min-width: 0;

View File

@@ -436,11 +436,6 @@ export function SessionTurn(
if (s.type !== "retry") return
return s
})
const isRetryFreeUsageLimitError = createMemo(() => {
const r = retry()
if (!r) return false
return r.message.includes("Free usage exceeded")
})
const response = createMemo(() => lastTextPart()?.text)
const responsePartId = createMemo(() => lastTextPart()?.id)
@@ -696,22 +691,10 @@ export function SessionTurn(
{(() => {
const r = retry()
if (!r) return ""
const msg = isRetryFreeUsageLimitError()
? i18n.t("ui.sessionTurn.error.freeUsageExceeded")
: unwrap(r.message)
const msg = unwrap(r.message)
return msg.length > 60 ? msg.slice(0, 60) + "..." : msg
})()}
</span>
<Show when={isRetryFreeUsageLimitError()}>
<a
href="https://opencode.ai/zen"
target="_blank"
class="retry-error-link"
rel="noopener noreferrer"
>
{i18n.t("ui.sessionTurn.error.addCredits")}
</a>
</Show>
<span data-slot="session-turn-retry-seconds">
· {i18n.t("ui.sessionTurn.retry.retrying")}
{store.retrySeconds > 0

View File

@@ -28,8 +28,6 @@ export const dict = {
"ui.sessionTurn.retry.retrying": "إعادة المحاولة",
"ui.sessionTurn.retry.inSeconds": "خلال {{seconds}} ثواني",
"ui.sessionTurn.error.freeUsageExceeded": "تم تجاوز حد الاستخدام المجاني",
"ui.sessionTurn.error.addCredits": "إضافة رصيد",
"ui.sessionTurn.status.delegating": "تفويض العمل",
"ui.sessionTurn.status.planning": "تخطيط الخطوات التالية",

View File

@@ -28,8 +28,6 @@ export const dict = {
"ui.sessionTurn.retry.retrying": "tentando novamente",
"ui.sessionTurn.retry.inSeconds": "em {{seconds}}s",
"ui.sessionTurn.error.freeUsageExceeded": "Limite de uso gratuito excedido",
"ui.sessionTurn.error.addCredits": "Adicionar créditos",
"ui.sessionTurn.status.delegating": "Delegando trabalho",
"ui.sessionTurn.status.planning": "Planejando próximos passos",

View File

@@ -32,8 +32,6 @@ export const dict = {
"ui.sessionTurn.retry.retrying": "ponovni pokušaj",
"ui.sessionTurn.retry.inSeconds": "za {{seconds}}s",
"ui.sessionTurn.error.freeUsageExceeded": "Besplatna upotreba premašena",
"ui.sessionTurn.error.addCredits": "Dodaj kredite",
"ui.sessionTurn.status.delegating": "Delegiranje posla",
"ui.sessionTurn.status.planning": "Planiranje sljedećih koraka",

View File

@@ -27,8 +27,6 @@ export const dict = {
"ui.sessionTurn.retry.retrying": "prøver igen",
"ui.sessionTurn.retry.inSeconds": "om {{seconds}}s",
"ui.sessionTurn.error.freeUsageExceeded": "Gratis forbrug overskredet",
"ui.sessionTurn.error.addCredits": "Tilføj kreditter",
"ui.sessionTurn.status.delegating": "Delegerer arbejde",
"ui.sessionTurn.status.planning": "Planlægger næste trin",

View File

@@ -31,8 +31,6 @@ export const dict = {
"ui.sessionTurn.retry.retrying": "erneuter Versuch",
"ui.sessionTurn.retry.inSeconds": "in {{seconds}}s",
"ui.sessionTurn.error.freeUsageExceeded": "Kostenloses Nutzungslimit überschritten",
"ui.sessionTurn.error.addCredits": "Guthaben aufladen",
"ui.sessionTurn.status.delegating": "Arbeit delegieren",
"ui.sessionTurn.status.planning": "Nächste Schritte planen",

View File

@@ -28,8 +28,6 @@ export const dict = {
"ui.sessionTurn.retry.retrying": "retrying",
"ui.sessionTurn.retry.inSeconds": "in {{seconds}}s",
"ui.sessionTurn.error.freeUsageExceeded": "Free usage exceeded",
"ui.sessionTurn.error.addCredits": "Add credits",
"ui.sessionTurn.status.delegating": "Delegating work",
"ui.sessionTurn.status.planning": "Planning next steps",

View File

@@ -28,8 +28,6 @@ export const dict = {
"ui.sessionTurn.retry.retrying": "reintentando",
"ui.sessionTurn.retry.inSeconds": "en {{seconds}}s",
"ui.sessionTurn.error.freeUsageExceeded": "Límite de uso gratuito excedido",
"ui.sessionTurn.error.addCredits": "Añadir créditos",
"ui.sessionTurn.status.delegating": "Delegando trabajo",
"ui.sessionTurn.status.planning": "Planificando siguientes pasos",

View File

@@ -28,8 +28,6 @@ export const dict = {
"ui.sessionTurn.retry.retrying": "nouvelle tentative",
"ui.sessionTurn.retry.inSeconds": "dans {{seconds}}s",
"ui.sessionTurn.error.freeUsageExceeded": "Limite d'utilisation gratuite dépassée",
"ui.sessionTurn.error.addCredits": "Ajouter des crédits",
"ui.sessionTurn.status.delegating": "Délégation du travail",
"ui.sessionTurn.status.planning": "Planification des prochaines étapes",

View File

@@ -27,8 +27,6 @@ export const dict = {
"ui.sessionTurn.retry.retrying": "再試行中",
"ui.sessionTurn.retry.inSeconds": "{{seconds}}秒後",
"ui.sessionTurn.error.freeUsageExceeded": "無料使用制限に達しました",
"ui.sessionTurn.error.addCredits": "クレジットを追加",
"ui.sessionTurn.status.delegating": "作業を委任中",
"ui.sessionTurn.status.planning": "次のステップを計画中",

View File

@@ -28,8 +28,6 @@ export const dict = {
"ui.sessionTurn.retry.retrying": "재시도 중",
"ui.sessionTurn.retry.inSeconds": "{{seconds}}초 후",
"ui.sessionTurn.error.freeUsageExceeded": "무료 사용량 초과",
"ui.sessionTurn.error.addCredits": "크레딧 추가",
"ui.sessionTurn.status.delegating": "작업 위임 중",
"ui.sessionTurn.status.planning": "다음 단계 계획 중",

View File

@@ -31,8 +31,6 @@ export const dict: Record<Keys, string> = {
"ui.sessionTurn.retry.retrying": "Prøver igjen",
"ui.sessionTurn.retry.inSeconds": "om {{seconds}}s",
"ui.sessionTurn.error.freeUsageExceeded": "Gratis bruk overskredet",
"ui.sessionTurn.error.addCredits": "Legg til kreditt",
"ui.sessionTurn.status.delegating": "Delegerer arbeid",
"ui.sessionTurn.status.planning": "Planlegger neste trinn",

View File

@@ -27,8 +27,6 @@ export const dict = {
"ui.sessionTurn.retry.retrying": "ponawianie",
"ui.sessionTurn.retry.inSeconds": "za {{seconds}}s",
"ui.sessionTurn.error.freeUsageExceeded": "Przekroczono limit darmowego użytkowania",
"ui.sessionTurn.error.addCredits": "Dodaj kredyty",
"ui.sessionTurn.status.delegating": "Delegowanie pracy",
"ui.sessionTurn.status.planning": "Planowanie kolejnych kroków",

View File

@@ -27,8 +27,6 @@ export const dict = {
"ui.sessionTurn.retry.retrying": "повтор",
"ui.sessionTurn.retry.inSeconds": "через {{seconds}}с",
"ui.sessionTurn.error.freeUsageExceeded": "Лимит бесплатного использования превышен",
"ui.sessionTurn.error.addCredits": "Добавить кредиты",
"ui.sessionTurn.status.delegating": "Делегирование работы",
"ui.sessionTurn.status.planning": "Планирование следующих шагов",

View File

@@ -28,8 +28,6 @@ export const dict = {
"ui.sessionTurn.retry.retrying": "กำลังลองใหม่",
"ui.sessionTurn.retry.inSeconds": "ใน {{seconds}}วิ",
"ui.sessionTurn.error.freeUsageExceeded": "เกินขีดจำกัดการใช้งานฟรี",
"ui.sessionTurn.error.addCredits": "เพิ่มเครดิต",
"ui.sessionTurn.status.delegating": "มอบหมายงาน",
"ui.sessionTurn.status.planning": "วางแผนขั้นตอนถัดไป",

View File

@@ -32,8 +32,6 @@ export const dict = {
"ui.sessionTurn.retry.retrying": "重试中",
"ui.sessionTurn.retry.inSeconds": "{{seconds}} 秒后",
"ui.sessionTurn.error.freeUsageExceeded": "免费使用额度已用完",
"ui.sessionTurn.error.addCredits": "添加积分",
"ui.sessionTurn.status.delegating": "正在委派工作",
"ui.sessionTurn.status.planning": "正在规划下一步",

View File

@@ -32,8 +32,6 @@ export const dict = {
"ui.sessionTurn.retry.retrying": "重試中",
"ui.sessionTurn.retry.inSeconds": "{{seconds}} 秒後",
"ui.sessionTurn.error.freeUsageExceeded": "免費使用額度已用完",
"ui.sessionTurn.error.addCredits": "新增點數",
"ui.sessionTurn.status.delegating": "正在委派工作",
"ui.sessionTurn.status.planning": "正在規劃下一步",

View File

@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/util",
"version": "1.2.6",
"version": "1.2.4",
"private": true,
"type": "module",
"license": "MIT",

View File

@@ -2,7 +2,7 @@
"name": "@opencode-ai/web",
"type": "module",
"license": "MIT",
"version": "1.2.6",
"version": "1.2.4",
"scripts": {
"dev": "astro dev",
"dev:remote": "VITE_API_URL=https://api.opencode.ai astro dev",

View File

@@ -29,7 +29,6 @@ description: يستخدم OpenCode مُنسِّقات خاصة بكل لغة.
| htmlbeautifier | .erb, .html.erb | يتوفر أمر `htmlbeautifier` |
| air | .R | يتوفر أمر `air` |
| dart | .dart | يتوفر أمر `dart` |
| dfmt | .d | يتوفر أمر `dfmt` |
| ocamlformat | .ml, .mli | يتوفر أمر `ocamlformat` وملف إعداد `.ocamlformat` |
| terraform | .tf, .tfvars | يتوفر أمر `terraform` |
| gleam | .gleam | يتوفر أمر `gleam` |

View File

@@ -27,7 +27,6 @@ OpenCode dolazi sa nekoliko ugrađenih formatera za popularne jezike i okvire. I
| htmlbeautifier | .erb, .html.erb | `htmlbeautifier` komanda dostupna |
| air | .R | `air` komanda dostupna |
| dart | .dart | `dart` komanda dostupna |
| dfmt | .d | `dfmt` komanda dostupna |
| ocamlformat | .ml, .mli | `ocamlformat` komanda dostupna i `.ocamlformat` konfiguracioni fajl |
| terraform | .tf, .tfvars | `terraform` komanda dostupna |
| gleam | .bleam | `gleam` komanda dostupna |

View File

@@ -29,7 +29,6 @@ OpenCode leveres med flere indbyggede formatere til populære sprog og rammer. N
| htmlbeautifier | .erb,.html.erb | `htmlbeautifier` kommando tilgængelig |
| luft | .R | `air` kommando tilgængelig |
| dart | .dart | `dart` kommando tilgængelig |
| dfmt | .d | `dfmt` kommando tilgængelig |
| ocamlformat | .ml,.mli | `ocamlformat` kommando tilgængelig og `.ocamlformat` config fil |
| terraform | .tf,.tfvars | `terraform` kommando tilgængelig |
| glimt | .glimt | `gleam` kommando tilgængelig |

View File

@@ -29,7 +29,6 @@ OpenCode verfügt über mehrere integrierte Formatierer für gängige Sprachen u
| htmlbeautifier | .erb, .html.erb | `htmlbeautifier`-Befehl verfügbar |
| air | .R | `air`-Befehl verfügbar |
| dart | .dart | `dart`-Befehl verfügbar |
| dfmt | .d | `dfmt`-Befehl verfügbar |
| ocamlformat | .ml, .mli | `ocamlformat` Befehl verfügbar und `.ocamlformat` Konfigurationsdatei |
| terraform | .tf, .tfvars | `terraform`-Befehl verfügbar |
| gleam | .gleam | `gleam`-Befehl verfügbar |

View File

@@ -29,7 +29,6 @@ OpenCode viene con varios formateadores integrados para lenguajes y marcos popul
| htmlbeautifier | .erb, .html.erb | Comando `htmlbeautifier` disponible |
| air | .R | Comando `air` disponible |
| dart | .dart | Comando `dart` disponible |
| dfmt | .d | Comando `dfmt` disponible |
| ocamlformat | .ml, .mli | Comando `ocamlformat` disponible y archivo de configuración `.ocamlformat` |
| terraform | .tf, .tfvars | Comando `terraform` disponible |
| gleam | .gleam | Comando `gleam` disponible |

View File

@@ -13,32 +13,30 @@ OpenCode comes with several built-in formatters for popular languages and framew
| Formatter | Extensions | Requirements |
| -------------------- | -------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------- |
| air | .R | `air` command available |
| biome | .js, .jsx, .ts, .tsx, .html, .css, .md, .json, .yaml, and [more](https://biomejs.dev/) | `biome.json(c)` config file |
| cargofmt | .rs | `cargo fmt` command available |
| clang-format | .c, .cpp, .h, .hpp, .ino, and [more](https://clang.llvm.org/docs/ClangFormat.html) | `.clang-format` config file |
| cljfmt | .clj, .cljs, .cljc, .edn | `cljfmt` command available |
| dart | .dart | `dart` command available |
| dfmt | .d | `dfmt` command available |
| gleam | .gleam | `gleam` command available |
| gofmt | .go | `gofmt` command available |
| htmlbeautifier | .erb, .html.erb | `htmlbeautifier` command available |
| ktlint | .kt, .kts | `ktlint` command available |
| mix | .ex, .exs, .eex, .heex, .leex, .neex, .sface | `mix` command available |
| nixfmt | .nix | `nixfmt` command available |
| ocamlformat | .ml, .mli | `ocamlformat` command available and `.ocamlformat` config file |
| ormolu | .hs | `ormolu` command available |
| oxfmt (Experimental) | .js, .jsx, .ts, .tsx | `oxfmt` dependency in `package.json` and an [experimental env variable flag](/docs/cli/#experimental) |
| pint | .php | `laravel/pint` dependency in `composer.json` |
| prettier | .js, .jsx, .ts, .tsx, .html, .css, .md, .json, .yaml, and [more](https://prettier.io/docs/en/index.html) | `prettier` dependency in `package.json` |
| rubocop | .rb, .rake, .gemspec, .ru | `rubocop` command available |
| biome | .js, .jsx, .ts, .tsx, .html, .css, .md, .json, .yaml, and [more](https://biomejs.dev/) | `biome.json(c)` config file |
| zig | .zig, .zon | `zig` command available |
| clang-format | .c, .cpp, .h, .hpp, .ino, and [more](https://clang.llvm.org/docs/ClangFormat.html) | `.clang-format` config file |
| ktlint | .kt, .kts | `ktlint` command available |
| ruff | .py, .pyi | `ruff` command available with config |
| rustfmt | .rs | `rustfmt` command available |
| shfmt | .sh, .bash | `shfmt` command available |
| standardrb | .rb, .rake, .gemspec, .ru | `standardrb` command available |
| terraform | .tf, .tfvars | `terraform` command available |
| cargofmt | .rs | `cargo fmt` command available |
| uv | .py, .pyi | `uv` command available |
| zig | .zig, .zon | `zig` command available |
| rubocop | .rb, .rake, .gemspec, .ru | `rubocop` command available |
| standardrb | .rb, .rake, .gemspec, .ru | `standardrb` command available |
| htmlbeautifier | .erb, .html.erb | `htmlbeautifier` command available |
| air | .R | `air` command available |
| dart | .dart | `dart` command available |
| ocamlformat | .ml, .mli | `ocamlformat` command available and `.ocamlformat` config file |
| terraform | .tf, .tfvars | `terraform` command available |
| gleam | .gleam | `gleam` command available |
| nixfmt | .nix | `nixfmt` command available |
| shfmt | .sh, .bash | `shfmt` command available |
| pint | .php | `laravel/pint` dependency in `composer.json` |
| oxfmt (Experimental) | .js, .jsx, .ts, .tsx | `oxfmt` dependency in `package.json` and an [experimental env variable flag](/docs/cli/#experimental) |
| ormolu | .hs | `ormolu` command available |
So if your project has `prettier` in your `package.json`, OpenCode will automatically use it.

View File

@@ -29,7 +29,6 @@ OpenCode est livré avec plusieurs formateurs intégrés pour les langages et fr
| htmlbeautifier | .erb, .html.erb | Commande `htmlbeautifier` disponible |
| air | .R | Commande `air` disponible |
| dart | .dart | Commande `dart` disponible |
| dfmt | .d | Commande `dfmt` disponible |
| ocamlformat | .ml, .mli | Commande `ocamlformat` disponible et fichier de configuration `.ocamlformat` |
| terraform | .tf, .tfvars | Commande `terraform` disponible |
| gleam | .gleam | Commande `gleam` disponible |

View File

@@ -29,7 +29,6 @@ OpenCode include diversi formattatori integrati per linguaggi e framework popola
| htmlbeautifier | .erb, .html.erb | `htmlbeautifier` command available |
| air | .R | `air` command available |
| dart | .dart | `dart` command available |
| dfmt | .d | `dfmt` command available |
| ocamlformat | .ml, .mli | `ocamlformat` command available and `.ocamlformat` config file |
| terraform | .tf, .tfvars | `terraform` command available |
| gleam | .gleam | `gleam` command available |

View File

@@ -29,7 +29,6 @@ OpenCode には、一般的な言語およびフレームワーク用のいく
| htmlbeautifier | .erb, .html.erb | `htmlbeautifier` command available |
| air | .R | `air` command available |
| dart | .dart | `dart` command available |
| dfmt | .d | `dfmt` command available |
| ocamlformat | .ml, .mli | `ocamlformat` command available and `.ocamlformat` config file |
| terraform | .tf, .tfvars | `terraform` command available |
| gleam | .gleam | `gleam` command available |

View File

@@ -28,7 +28,6 @@ opencode는 인기있는 언어 및 프레임 워크에 대한 몇 가지 내장
| htmlbeautifier | .erb, .html.erb | `htmlbeautifier` 명령 사용 가능 |
| Air | .R | `air` 명령 사용 가능 |
| Dart | 다트 | `dart` 명령 |
| dfmt | .d | `dfmt` 명령 사용 가능 |
| ocamlformat | .ml, .mli | `ocamlformat` 명령 사용 가능·`.ocamlformat` 설정 파일 |
| Terraform | .tf, .tfvars | `terraform` 명령 사용 가능 |
| gleam | .gleam | `gleam` 명령 사용 가능 |

View File

@@ -29,7 +29,6 @@ OpenCode kommer med flere innebygde formattere for populære språk og rammeverk
| htmlbeautifier | .erb, .html.erb | `htmlbeautifier` kommando tilgjengelig |
| air | .R | `air` kommando tilgjengelig |
| dart | .dart | `dart` kommando tilgjengelig |
| dfmt | .d | `dfmt` kommando tilgjengelig |
| ocamlformat | .ml, .mli | `ocamlformat` kommando tilgjengelig og `.ocamlformat` konfigurasjonsfil |
| terraform | .tf, .tfvars | `terraform` kommando tilgjengelig |
| gleam | .gleam | `gleam` kommando tilgjengelig |

View File

@@ -29,7 +29,6 @@ OpenCode zawiera kilka wbudowanych formaterów dla popularnych języków i frame
| htmlbeautifier | .erb, .html.erb | Dostępne polecenie `htmlbeautifier` |
| air | .R | Dostępne polecenie `air` |
| dart | .dart | Dostępne polecenie `dart` |
| dfmt | .d | Dostępne polecenie `dfmt` |
| ocamlformat | .ml, .mli | Dostępne polecenie `ocamlformat` i plik konfiguracyjny `.ocamlformat` |
| terraform | .tf, .tfvars | Dostępne polecenie `terraform` |
| gleam | .gleam | Dostępne polecenie `gleam` |

View File

@@ -29,7 +29,6 @@ O opencode vem com vários formatadores integrados para linguagens e frameworks
| htmlbeautifier | .erb, .html.erb | Comando `htmlbeautifier` disponível |
| air | .R | Comando `air` disponível |
| dart | .dart | Comando `dart` disponível |
| dfmt | .d | Comando `dfmt` disponível |
| ocamlformat | .ml, .mli | Comando `ocamlformat` disponível e arquivo de configuração `.ocamlformat` |
| terraform | .tf, .tfvars | Comando `terraform` disponível |
| gleam | .gleam | Comando `gleam` disponível |

View File

@@ -29,7 +29,6 @@ opencode поставляется с несколькими встроенным
| htmlbeautifier | .erb, .html.erb | Доступна команда `htmlbeautifier` |
| air | .R | Доступна команда `air` |
| dart | .dart | Доступна команда `dart` |
| dfmt | .d | Доступна команда `dfmt` |
| ocamlformat | .ml, .mli | Доступна команда `ocamlformat` и файл конфигурации `.ocamlformat`. |
| terraform | .tf, .tfvars | Доступна команда `terraform` |
| gleam | .gleam | Доступна команда `gleam` |

View File

@@ -1,29 +1,29 @@
---
title: Zen
description: Подобранный список моделей, предоставленный OpenCode.
description: Кураторский список моделей, предоставленный opencode.
---
import config from "../../../../config.mjs"
export const console = config.console
export const email = `mailto:${config.email}`
OpenCode Zen — это список протестированных и проверенных моделей, предоставленный командой OpenCode.
OpenCode Zen — это список протестированных и проверенных моделей, предоставленный командой opencode.
:::note
OpenCode Zen в настоящее время находится в стадии бета-тестирования.
:::
Zen работает как любой другой провайдер в OpenCode. Вы входите в OpenCode Zen и получаете
Zen работает как любой другой провайдер в opencode. Вы входите в OpenCode Zen и получаете
ваш ключ API. Это **совершенно необязательно**, и вам не обязательно использовать его для использования
OpenCode.
Открытый код.
---
## Предыстория
Существует большое количество моделей, но лишь некоторые из них
хорошо работают в качестве кодинг-агентов. Кроме того, большинство провайдеров
настроены совсем по-другому; так что вы получите совсем другую производительность и качество.
Существует большое количество моделей, но лишь некоторые из них.
эти модели хорошо работают в качестве агентов кодирования. Кроме того, большинство провайдеров
настроен совсем по-другому; так что вы получите совсем другую производительность и качество.
:::tip
Мы протестировали избранную группу моделей и поставщиков, которые хорошо работают с opencode.
@@ -36,9 +36,10 @@ OpenCode.
1. Мы протестировали избранную группу моделей и поговорили с их командами о том, как
лучше всего запустить их.
2. Затем мы поработали с несколькими поставщиками услуг, чтобы убедиться, что они обслуживаются правильно.
3. Наконец, мы сравнили комбинацию модель/провайдер и составили
список, который мы с удовольствием рекомендуем.
2. Затем мы поработали с несколькими поставщиками услуг, чтобы убедиться, что они обслуживаются.
правильно.
3. Наконец, мы сравнили комбинацию модель/провайдер и пришли к выводу, что
со списком, который мы с удовольствием рекомендуем.
OpenCode Zen — это шлюз искусственного интеллекта, который дает вам доступ к этим моделям.
@@ -46,10 +47,10 @@ OpenCode Zen — это шлюз искусственного интеллект
## Как это работает
OpenCode Zen работает так же, как и любой другой поставщик OpenCode.
OpenCode Zen работает так же, как и любой другой поставщик opencode.
1. Вы входите в систему **<a href={console}>OpenCode Zen</a>**, добавляете платежные
данные и копируете свой ключ API.
1. Вы входите в систему **<a href={console}>OpenCode Zen</a>**, добавляете свой платежный аккаунт.
подробности и скопируйте свой ключ API.
2. Вы запускаете команду `/connect` в TUI, выбираете OpenCode Zen и вставляете свой ключ API.
3. Запустите `/models` в TUI, чтобы просмотреть список рекомендуемых нами моделей.
@@ -81,10 +82,8 @@ OpenCode Zen работает так же, как и любой другой п
| Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` |
| Gemini 3 Pro | gemini-3-pro | `https://opencode.ai/zen/v1/models/gemini-3-pro` | `@ai-sdk/google` |
| Gemini 3 Flash | gemini-3-flash | `https://opencode.ai/zen/v1/models/gemini-3-flash` | `@ai-sdk/google` |
| MiniMax M2.5 | minimax-m2.5 | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
| MiniMax M2.5 Free | minimax-m2.5-free | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
| MiniMax M2.1 | minimax-m2.1 | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
| GLM 5 | glm-5 | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
| MiniMax M2.1 Free | minimax-m2.1-free | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` |
| GLM 4.7 | glm-4.7 | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
| GLM 4.7 Free | glm-4.7-free | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
| GLM 4.6 | glm-4.6 | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
@@ -118,14 +117,11 @@ https://opencode.ai/zen/v1/models
| Модель | Вход | Выход | Кэшированное чтение | Кэшированная запись |
| -------------------------------------- | --------- | --------- | ------------------- | ------------------- |
| Big Pickle | Бесплатно | Бесплатно | Бесплатно | - |
| MiniMax M2.5 Free | Бесплатно | Бесплатно | Бесплатно | - |
| MiniMax M2.5 | $0.30 | $1.20 | $0.06 | - |
| MiniMax M2.1 Free | Бесплатно | Бесплатно | Бесплатно | - |
| MiniMax M2.1 | $0.30 | $1.20 | $0.10 | - |
| GLM 5 | $1.00 | $3.20 | $0.20 | - |
| GLM 4.7 | $0.60 | $2.20 | $0.10 | - |
| GLM 4.7 Free | Бесплатно | Бесплатно | Бесплатно | - |
| GLM 4.7 | $0.60 | $2.20 | $0.10 | - |
| GLM 4.6 | $0.60 | $2.20 | $0.10 | - |
| GLM 4.7 Free | Бесплатно | Бесплатно | Бесплатно | - |
| Kimi K2.5 Free | Бесплатно | Бесплатно | Бесплатно | - |
| Kimi K2.5 | $0.60 | $3.00 | $0.08 | - |
| Kimi K2 Thinking | $0.40 | $2.50 | - | - |
@@ -162,9 +158,10 @@ https://opencode.ai/zen/v1/models
Бесплатные модели:
- Kimi K2.5 Free доступен на OpenCode в течение ограниченного времени. Команда использует это время для сбора отзывов и улучшения модели.
- MiniMax M2.5 Free доступен на OpenCode в течение ограниченного времени. Команда использует это время для сбора отзывов и улучшения модели.
- Big Pickle — это стелс-модель, которая доступна бесплатно на OpenCode в течение ограниченного времени. Команда использует это время для сбора отзывов и улучшения модели.
- GLM 4.7 Free доступен на opencode в течение ограниченного времени. Команда использует это время для сбора отзывов и улучшения модели.
- Kimi K2.5 Free доступен на opencode в течение ограниченного времени. Команда использует это время для сбора отзывов и улучшения модели.
- MiniMax M2.1 Free доступен на opencode в течение ограниченного времени. Команда использует это время для сбора отзывов и улучшения модели.
- Big Pickle — это стелс-модель, которая доступна бесплатно на opencode в течение ограниченного времени. Команда использует это время для сбора отзывов и улучшения модели.
<a href={email}>Свяжитесь с нами</a>, если у вас есть вопросы.
@@ -174,7 +171,7 @@ https://opencode.ai/zen/v1/models
Если ваш баланс упадет ниже 5 долларов, Zen автоматически пополнит 20 долларов.
Вы можете изменить сумму автопополнения. Вы также можете полностью отключить автопополнение.
Вы можете изменить сумму автопополнения. Вы также можете полностью отключить автоматическую перезагрузку.
---
@@ -184,7 +181,7 @@ https://opencode.ai/zen/v1/models
член вашей команды.
Например, предположим, что вы установили ежемесячный лимит использования в размере 20 долларов США, Zen не будет использовать
более 20 долларов в месяц. Но если у вас включено автопополнение, Zen может
более 20 долларов в месяц. Но если у вас включена автоматическая перезагрузка, Дзен может оказаться
взимать с вас более 20 долларов США, если ваш баланс опускается ниже 5 долларов США.
---
@@ -194,8 +191,9 @@ https://opencode.ai/zen/v1/models
Все наши модели размещены в США. Наши поставщики придерживаются политики нулевого хранения и не используют ваши данные для обучения моделей, за следующими исключениями:
- Big Pickle: во время бесплатного периода собранные данные могут быть использованы для улучшения модели.
- GLM 4.7 Free: в течение бесплатного периода собранные данные могут использоваться для улучшения модели.
- Kimi K2.5 Free: в течение бесплатного периода собранные данные могут использоваться для улучшения модели.
- MiniMax M2.5 Free: в течение бесплатного периода собранные данные могут использоваться для улучшения модели.
- MiniMax M2.1 Free: в течение бесплатного периода собранные данные могут использоваться для улучшения модели.
- API OpenAI: запросы хранятся в течение 30 дней в соответствии с [Политикой данных OpenAI](https://platform.openai.com/docs/guides/your-data).
- API-интерфейсы Anthropic: запросы хранятся в течение 30 дней в соответствии с [Политикой данных Anthropic](https://docs.anthropic.com/en/docs/claude-code/data-usage).
@@ -203,15 +201,15 @@ https://opencode.ai/zen/v1/models
## Для команд
Zen также отлично подходит для команд. Вы можете приглашать товарищей по команде, назначать роли, выбирать
Дзен также отлично подходит для команд. Вы можете приглашать товарищей по команде, назначать роли, курировать
модели, которые использует ваша команда, и многое другое.
:::note
Рабочие пространства в настоящее время бесплатны для команд в рамках бета-тестирования.
:::
Управление вашим рабочим пространством в настоящее время бесплатно для команд в рамках бета-тестирования. Мы вскоре
поделимся более подробной информацией о ценах.
Управление вашим рабочим пространством в настоящее время бесплатно для команд в рамках бета-тестирования. Мы будем
скоро поделимся более подробной информацией о ценах.
---
@@ -235,7 +233,7 @@ Zen также отлично подходит для команд. Вы мож
---
### Использование собственных API-ключей
### Принесите свой ключ
Вы можете использовать свои собственные ключи API OpenAI или Anthropic, сохраняя при этом доступ к другим моделям в Zen.
@@ -250,7 +248,7 @@ Zen также отлично подходит для команд. Вы мож
Мы создали OpenCode Zen, чтобы:
1. **Сравнить** лучшие модели/поставщики кодинг-агентов.
2. Получить доступ к вариантам **наивысшего качества**, не снижая производительность и не обращаясь к более дешевым поставщикам.
3. Передавать **снижение цен**, продавая по себестоимости; поэтому единственная наценка предназначена для покрытия наших комиссий за обработку.
4. Исключить **привязку**, позволяя использовать его с любым другим кодинг-агентом. И всегда позволяя вам использовать любого другого провайдера с OpenCode.
1. **Сравните** лучшие модели/поставщики агентов кодирования.
2. Получите доступ к вариантам **самого высокого качества**, не снижая производительность и не обращаясь к более дешевым поставщикам.
3. Не допускайте **падения цен**, продавая по себестоимости; поэтому единственная надбавка предназначена для покрытия наших сборов за обработку.
4. Не допускайте **привязки**, позволяя использовать его с любым другим агентом кодирования. И всегда позволяйте вам использовать любого другого провайдера с opencode.

View File

@@ -29,7 +29,6 @@ OpenCode มาพร้อมกับฟอร์แมตเตอร์ใ
| htmlbeautifier | .erb, .html.erb | `htmlbeautifier` คำสั่งใช้ได้ |
| air | .r | `air` คำสั่งใช้ได้ |
| dart | .dart | `dart` คำสั่งใช้ได้ |
| dfmt | .d | `dfmt` คำสั่งใช้ได้ |
| ocamlformat | .ml, .mli | มีคำสั่ง `ocamlformat` และไฟล์ปรับแต่ง `.ocamlformat` |
| terraform | .tf, .tfvars | `terraform` คำสั่งใช้ได้ |
| gleam | .gleam | `gleam` คำสั่งใช้ได้ |

View File

@@ -29,7 +29,6 @@ opencode, popüler diller ve çerçeveler için çeşitli yerleşik biçimlendir
| htmlbeautifier | .erb, .html.erb | `htmlbeautifier` komutu mevcut |
| air | .R | `air` komutu mevcut |
| dart | .dart | `dart` komutu mevcut |
| dfmt | .d | `dfmt` komutu mevcut |
| ocamlformat | .ml, .mli | `ocamlformat` komutu mevcut ve `.ocamlformat` yapılandırma dosyası |
| terraform | .tf, .tfvars | `terraform` komutu mevcut |
| gleam | .gleam | `gleam` komutu mevcut |

View File

@@ -29,7 +29,6 @@ opencode 附带了多个适用于流行语言和框架的内置格式化程序
| htmlbeautifier | .erb, .html.erb | `htmlbeautifier` command available |
| air | .R | `air` command available |
| dart | .dart | `dart` command available |
| dfmt | .d | `dfmt` command available |
| ocamlformat | .ml, .mli | `ocamlformat` command available and `.ocamlformat` config file |
| terraform | .tf, .tfvars | `terraform` command available |
| gleam | .gleam | `gleam` command available |

View File

@@ -116,39 +116,39 @@ https://opencode.ai/zen/v1/models
| 模型 | 输入 | 输出 | 缓存读取 | 缓存写入 |
| ---------------------------------- | ---------- | ---------- | ---------- | ---------- |
| Big Pickle | 免费 | 免费 | 免费 | - |
| MiniMax M2.1 Free | 免费 | 免费 | 免费 | - |
| MiniMax M2.1 | 0.30 美元 | 1.20 美元 | 0.10 美元 | - |
| GLM 4.7 Free | 免费 | 免费 | 免费 | - |
| 大泡菜 | 免费 | 免费 | 免费 | - |
| MiniMax M2.1 免费 | 免费 | 免费 | 免费 | - |
| 迷你最大M2.1 | 0.30 美元 | 1.20 美元 | 0.10 美元 | - |
| GLM 4.7 免费 | 免费 | 免费 | 免费 | - |
| GLM 4.7 | 0.60 美元 | 2.20 美元 | 0.10 美元 | - |
| GLM 4.6 | 0.60 美元 | 2.20 美元 | 0.10 美元 | - |
| Kimi K2.5 Free | 免费 | 免费 | 免费 | - |
| Kimi K2.5 | 0.60 美元 | 3.00 美元 | 0.08 美元 | - |
| Kimi K2 Thinking | 0.40 美元 | 2.50 美元 | - | - |
| Kimi K2 | 0.40 美元 | 2.50 美元 | - | - |
| Qwen3 Coder 480B | 0.45 美元 | 1.50 美元 | - | - |
| Claude Sonnet 4.5(≤ 200K Tokens | 3.00 美元 | 15.00 美元 | 0.30 美元 | 3.75 美元 |
| Claude Sonnet 4.5> 200K Tokens | 6.00 美元 | 22.50 美元 | 0.60 美元 | 7.50 美元 |
| Claude Sonnet 4≤ 200K Tokens | 3.00 美元 | 15.00 美元 | 0.30 美元 | 3.75 美元 |
| Kimi K2.5 免费 | 免费 | 免费 | 免费 | - |
| 作为K2.5 | 0.60 美元 | $3.00 | 0.08 美元 | - |
| Kimi K2 思考 | 0.40 美元 | 2.50 美元 | - | - |
| 作为K2 | 0.40 美元 | 2.50 美元 | - | - |
| Qwen3 编码器 480B | 0.45 美元 | 1.50 美元 | - | - |
| Claude Sonnet 4.5(≤ 200K Tokens | $3.00 | 15.00 美元 | 0.30 美元 | 3.75 美元 |
| 克劳德十四行诗 4.5> 200K 代币) | 6.00 美元 | 22.50 美元 | 0.60 美元 | 7.50 美元 |
| Claude Sonnet 4≤ 200K Tokens | $3.00 | 15.00 美元 | 0.30 美元 | 3.75 美元 |
| Claude Sonnet 4> 200K Tokens | 6.00 美元 | 22.50 美元 | 0.60 美元 | 7.50 美元 |
| Claude Haiku 4.5 | 1.00 美元 | 5.00 美元 | 0.10 美元 | 1.25 美元 |
| Claude Haiku 3.5 | 0.80 美元 | 4.00 美元 | 0.08 美元 | 1.00 美元 |
| Claude Opus 4.6(≤ 200K Tokens | 5.00 美元 | 25.00 美元 | 0.50 美元 | 6.25 美元 |
| Claude 俳句 4.5 | 1.00 美元 | 5.00 美元 | 0.10 美元 | 1.25 美元 |
| Claude 俳句 3.5 | 0.80 美元 | 4.00 美元 | 0.08 美元 | 1.00 美元 |
| 克劳德作品4.6(≤ 200K 代币) | 5.00 美元 | 25.00 美元 | 0.50 美元 | 6.25 美元 |
| Claude Opus 4.6> 200K Tokens | 10.00 美元 | 37.50 美元 | 1.00 美元 | 12.50 美元 |
| Claude Opus 4.5 | 5.00 美元 | 25.00 美元 | 0.50 美元 | 6.25 美元 |
| Claude Opus 4.1 | 15.00 美元 | 75.00 美元 | 1.50 美元 | 18.75 美元 |
| Gemini 3 Pro≤20万 Tokens | 2.00 美元 | 12.00 美元 | 0.20 美元 | - |
| Gemini 3 Pro>20万 Tokens | 4.00 美元 | 18.00 美元 | 0.40 美元 | - |
| Gemini 3 Flash | 0.50 美元 | 3.00 美元 | 0.05 美元 | - |
| Claude 工作 4.5 | 5.00 美元 | 25.00 美元 | 0.50 美元 | 6.25 美元 |
| Claude 工作 4.1 | 15.00 美元 | 75.00 美元 | 1.50 美元 | 18.75 美元 |
| Gemini 3 Pro≤20万代币) | 2.00 美元 | 12.00 美元 | 0.20 美元 | - |
| Gemini 3 Pro>20万代币) | 4.00 美元 | 18.00 美元 | 0.40 美元 | - |
| 双子座 3 闪光 | 0.50 美元 | $3.00 | 0.05 美元 | - |
| GPT 5.2 | 1.75 美元 | 14.00 美元 | 0.175 美元 | - |
| GPT 5.2 Codex | 1.75 美元 | 14.00 美元 | 0.175 美元 | - |
| GPT 5.2 法典 | 1.75 美元 | 14.00 美元 | 0.175 美元 | - |
| GPT 5.1 | 1.07 美元 | 8.50 美元 | 0.107 美元 | - |
| GPT 5.1 Codex | 1.07 美元 | 8.50 美元 | 0.107 美元 | - |
| GPT 5.1 Codex Max | 1.25 美元 | 10.00 美元 | 0.125 美元 | - |
| GPT 5.1 Codex Mini | 0.25 美元 | 2.00 美元 | 0.025 美元 | - |
| GPT 5.1 法典 | 1.07 美元 | 8.50 美元 | 0.107 美元 | - |
| GPT 5.1 法典最大 | 1.25 美元 | 10.00 美元 | 0.125 美元 | - |
| GPT 5.1 迷你版 | 0.25 美元 | 2.00 美元 | 0.025 美元 | - |
| GPT 5 | 1.07 美元 | 8.50 美元 | 0.107 美元 | - |
| GPT 5 Codex | 1.07 美元 | 8.50 美元 | 0.107 美元 | - |
| GPT 5 Nano | 免费 | 免费 | 免费 | - |
| GPT 5 法典 | 1.07 美元 | 8.50 美元 | 0.107 美元 | - |
| GPT 5 奈米 | 免费 | 免费 | 免费 | - |
您可能会在您的使用历史记录中注意到*Claude Haiku 3.5*。这是一个[低成本模型](/docs/config/#models),用于生成会话标题。
@@ -216,8 +216,8 @@ Zen 也非常适合团队使用。您可以邀请您可以邀请队友,分配
您可以邀请团队成员到您的工作区并分配角色:
- **管理员**管理模型、成员、API 密钥和计费/账单
- **成员**:仅管理自己的 API 密钥
- **管理员**管理模型、成员、API 密钥和设备
- **成员**仅管理自己的API 金?
管理员还可以为每个成员设置每月支出限额,以控制成本。

View File

@@ -29,7 +29,6 @@ opencode 附帶了多個適用於流行語言和框架的內建格式化程式
| htmlbeautifier | .erb, .html.erb | `htmlbeautifier` 指令可用 |
| air | .R | `air` 指令可用 |
| dart | .dart | `dart` 指令可用 |
| dfmt | .d | `dfmt` 指令可用 |
| ocamlformat | .ml, .mli | `ocamlformat` 指令可用,且存在 `.ocamlformat` 設定檔 |
| terraform | .tf, .tfvars | `terraform` 指令可用 |
| gleam | .gleam | `gleam` 指令可用 |

View File

@@ -2,7 +2,7 @@
"name": "opencode",
"displayName": "opencode",
"description": "opencode for VS Code",
"version": "1.2.6",
"version": "1.2.4",
"publisher": "sst-dev",
"repository": {
"type": "git",