Compare commits

..

10 Commits

Author SHA1 Message Date
Kit Langton
e61fe8e5f8 feat: unwrap file namespaces to flat exports + barrel 2026-04-15 23:49:51 -04:00
opencode-agent[bot]
225a769411 chore: generate 2026-04-16 03:42:25 +00:00
Kit Langton
0e20382396 fix: resolve circular sibling imports causing runtime ReferenceError (#22752) 2026-04-15 23:41:34 -04:00
Kit Langton
509bc11f81 feat: unwrap lsp namespaces to flat exports + barrel (#22748) 2026-04-15 23:30:52 -04:00
Kit Langton
f24207844f feat: unwrap storage namespaces to flat exports + barrel (#22747) 2026-04-15 23:30:49 -04:00
Kit Langton
1ca257e356 feat: unwrap config namespaces to flat exports + barrel (#22746) 2026-04-15 23:29:14 -04:00
Kit Langton
d4cfbd020d feat: unwrap effect namespaces to flat exports + barrel (#22745) 2026-04-15 23:29:12 -04:00
Kit Langton
581d5208ca feat: unwrap share namespaces to flat exports + barrel (#22744) 2026-04-15 23:28:46 -04:00
Kit Langton
a427a28fa9 feat: unwrap project namespaces to flat exports + barrel (#22743) 2026-04-15 23:28:46 -04:00
opencode-agent[bot]
0beaf04df5 chore: generate 2026-04-16 03:28:30 +00:00
139 changed files with 6130 additions and 6134 deletions

View File

@@ -2,7 +2,7 @@
import { z } from "zod"
import { Config } from "../src/config"
import { TuiConfig } from "../src/config/tui"
import { TuiConfig } from "../src/config"
function generate(schema: z.ZodType) {
const result = z.toJSONSchema(schema, {

View File

@@ -1,7 +1,7 @@
import { eq } from "drizzle-orm"
import { Effect, Layer, Option, Schema, Context } from "effect"
import { Database } from "@/storage/db"
import { Database } from "@/storage"
import { AccountStateTable, AccountTable } from "./account.sql"
import { AccessToken, AccountID, AccountRepoError, Info, OrgID, RefreshToken } from "./schema"
import { normalizeServerUrl } from "./url"

View File

@@ -1,11 +1,11 @@
import type { Argv } from "yargs"
import { spawn } from "child_process"
import { Database } from "../../storage/db"
import { Database } from "../../storage"
import { drizzle } from "drizzle-orm/bun-sqlite"
import { Database as BunDatabase } from "bun:sqlite"
import { UI } from "../ui"
import { cmd } from "./cmd"
import { JsonMigration } from "../../storage/json-migration"
import { JsonMigration } from "../../storage"
import { EOL } from "os"
import { errorMessage } from "../../util/error"

View File

@@ -1,7 +1,7 @@
import { EOL } from "os"
import { AppRuntime } from "@/effect/app-runtime"
import { File } from "../../../file"
import { Ripgrep } from "@/file/ripgrep"
import { Ripgrep } from "@/file"
import { bootstrap } from "../../bootstrap"
import { cmd } from "../cmd"

View File

@@ -1,7 +1,7 @@
import { EOL } from "os"
import { Effect, Stream } from "effect"
import { AppRuntime } from "../../../effect/app-runtime"
import { Ripgrep } from "../../../file/ripgrep"
import { Ripgrep } from "../../../file"
import { Instance } from "../../../project/instance"
import { bootstrap } from "../../bootstrap"
import { cmd } from "../cmd"

View File

@@ -1,5 +1,5 @@
import { EOL } from "os"
import { Project } from "../../../project/project"
import { Project } from "../../../project"
import { Log } from "../../../util"
import { cmd } from "../cmd"

View File

@@ -21,7 +21,7 @@ import { cmd } from "./cmd"
import { ModelsDev } from "../../provider/models"
import { Instance } from "@/project/instance"
import { bootstrap } from "../bootstrap"
import { SessionShare } from "@/share/session"
import { SessionShare } from "@/share"
import { Session } from "../../session"
import type { SessionID } from "../../session/schema"
import { MessageID, PartID } from "../../session/schema"

View File

@@ -4,10 +4,10 @@ import { Session } from "../../session"
import { MessageV2 } from "../../session/message-v2"
import { cmd } from "./cmd"
import { bootstrap } from "../bootstrap"
import { Database } from "../../storage/db"
import { Database } from "../../storage"
import { SessionTable, MessageTable, PartTable } from "../../session/session.sql"
import { Instance } from "../../project/instance"
import { ShareNext } from "../../share/share-next"
import { ShareNext } from "../../share"
import { EOL } from "os"
import { Filesystem } from "../../util"
import { AppRuntime } from "@/effect/app-runtime"

View File

@@ -1,7 +1,7 @@
import { intro, log, outro, spinner } from "@clack/prompts"
import type { Argv } from "yargs"
import { ConfigPaths } from "../../config/paths"
import { ConfigPaths } from "../../config"
import { Global } from "../../global"
import { installPlugin, patchPluginConfig, readPluginManifest } from "../../plugin/install"
import { resolvePluginTarget } from "../../plugin/shared"

View File

@@ -2,9 +2,9 @@ import type { Argv } from "yargs"
import { cmd } from "./cmd"
import { Session } from "../../session"
import { bootstrap } from "../bootstrap"
import { Database } from "../../storage/db"
import { Database } from "../../storage"
import { SessionTable } from "../../session/session.sql"
import { Project } from "../../project/project"
import { Project } from "../../project"
import { Instance } from "../../project/instance"
import { AppRuntime } from "@/effect/app-runtime"

View File

@@ -57,7 +57,7 @@ import { ArgsProvider, useArgs, type Args } from "./context/args"
import open from "open"
import { PromptRefProvider, usePromptRef } from "./context/prompt"
import { TuiConfigProvider, useTuiConfig } from "./context/tui-config"
import { TuiConfig } from "@/config/tui"
import { TuiConfig } from "@/config"
import { createTuiApi, TuiPluginRuntime, type RouteMap } from "./plugin"
import { FormatError, FormatUnknownError } from "@/cli/error"

View File

@@ -2,7 +2,7 @@ import { cmd } from "../cmd"
import { UI } from "@/cli/ui"
import { tui } from "./app"
import { win32DisableProcessedInput, win32InstallCtrlCGuard } from "./win32"
import { TuiConfig } from "@/config/tui"
import { TuiConfig } from "@/config"
import { Instance } from "@/project/instance"
import { existsSync } from "fs"

View File

@@ -1,7 +1,7 @@
import { createMemo } from "solid-js"
import { Keybind } from "@/util"
import { pipe, mapValues } from "remeda"
import type { TuiConfig } from "@/config/tui"
import type { TuiConfig } from "@/config"
import type { ParsedKey, Renderable } from "@opentui/core"
import { createStore } from "solid-js/store"
import { useKeyboard, useRenderer } from "@opentui/solid"

View File

@@ -1,4 +1,4 @@
import { TuiConfig } from "@/config/tui"
import { TuiConfig } from "@/config"
import { createSimpleContext } from "./helper"
export const { use: useTuiConfig, provider: TuiConfigProvider } = createSimpleContext({

View File

@@ -8,7 +8,7 @@ import type { useSDK } from "@tui/context/sdk"
import type { useSync } from "@tui/context/sync"
import type { useTheme } from "@tui/context/theme"
import { Dialog as DialogUI, type useDialog } from "@tui/ui/dialog"
import type { TuiConfig } from "@/config/tui"
import type { TuiConfig } from "@/config"
import { createPluginKeybind } from "../context/plugin-keybinds"
import type { useKV } from "../context/kv"
import { DialogAlert } from "../ui/dialog-alert"

View File

@@ -14,7 +14,7 @@ import path from "path"
import { fileURLToPath } from "url"
import { Config } from "@/config"
import { TuiConfig } from "@/config/tui"
import { TuiConfig } from "@/config"
import { Log } from "@/util"
import { errorData, errorMessage } from "@/util/error"
import { isRecord } from "@/util/record"

View File

@@ -13,7 +13,7 @@ import { Filesystem } from "@/util"
import type { GlobalEvent } from "@opencode-ai/sdk/v2"
import type { EventSource } from "./context/sdk"
import { win32DisableProcessedInput, win32InstallCtrlCGuard } from "./win32"
import { TuiConfig } from "@/config/tui"
import { TuiConfig } from "@/config"
import { Instance } from "@/project/instance"
import { writeHeapSnapshot } from "v8"

View File

@@ -1,5 +1,5 @@
import { MacOSScrollAccel, type ScrollAcceleration } from "@opentui/core"
import type { TuiConfig } from "@/config/tui"
import type { TuiConfig } from "@/config"
export class CustomSpeedScroll implements ScrollAcceleration {
constructor(private speed: number) {}

View File

@@ -1,5 +1,5 @@
import { AccountServiceError, AccountTransportError } from "@/account"
import { ConfigMarkdown } from "@/config/markdown"
import { ConfigMarkdown } from "@/config"
import { errorFormat } from "@/util/error"
import { Config } from "../config"
import { MCP } from "../mcp"

View File

@@ -19,9 +19,9 @@ import {
printParseErrorCode,
} from "jsonc-parser"
import { Instance, type InstanceContext } from "../project/instance"
import { LSPServer } from "../lsp/server"
import * as LSPServer from "../lsp/server"
import { Installation } from "@/installation"
import { ConfigMarkdown } from "./markdown"
import * as ConfigMarkdown from "./markdown"
import { existsSync } from "fs"
import { Bus } from "@/bus"
import { GlobalBus } from "@/bus/global"
@@ -29,7 +29,7 @@ import { Event } from "../server/event"
import { Glob } from "@opencode-ai/shared/util/glob"
import { Account } from "@/account"
import { isRecord } from "@/util/record"
import { ConfigPaths } from "./paths"
import * as ConfigPaths from "./paths"
import type { ConsoleState } from "./console-state"
import { AppFileSystem } from "@opencode-ai/shared/filesystem"
import { InstanceState } from "@/effect"

View File

@@ -1 +1,4 @@
export * as Config from "./config"
export * as ConfigMarkdown from "./markdown"
export * as ConfigPaths from "./paths"
export * as TuiConfig from "./tui"

View File

@@ -3,97 +3,95 @@ import matter from "gray-matter"
import { z } from "zod"
import { Filesystem } from "../util"
export namespace ConfigMarkdown {
export const FILE_REGEX = /(?<![\w`])@(\.?[^\s`,.]*(?:\.[^\s`,.]+)*)/g
export const SHELL_REGEX = /!`([^`]+)`/g
export const FILE_REGEX = /(?<![\w`])@(\.?[^\s`,.]*(?:\.[^\s`,.]+)*)/g
export const SHELL_REGEX = /!`([^`]+)`/g
export function files(template: string) {
return Array.from(template.matchAll(FILE_REGEX))
}
export function shell(template: string) {
return Array.from(template.matchAll(SHELL_REGEX))
}
// other coding agents like claude code allow invalid yaml in their
// frontmatter, we need to fallback to a more permissive parser for those cases
export function fallbackSanitization(content: string): string {
const match = content.match(/^---\r?\n([\s\S]*?)\r?\n---/)
if (!match) return content
const frontmatter = match[1]
const lines = frontmatter.split(/\r?\n/)
const result: string[] = []
for (const line of lines) {
// skip comments and empty lines
if (line.trim().startsWith("#") || line.trim() === "") {
result.push(line)
continue
}
// skip lines that are continuations (indented)
if (line.match(/^\s+/)) {
result.push(line)
continue
}
// match key: value pattern
const kvMatch = line.match(/^([a-zA-Z_][a-zA-Z0-9_]*)\s*:\s*(.*)$/)
if (!kvMatch) {
result.push(line)
continue
}
const key = kvMatch[1]
const value = kvMatch[2].trim()
// skip if value is empty, already quoted, or uses block scalar
if (value === "" || value === ">" || value === "|" || value.startsWith('"') || value.startsWith("'")) {
result.push(line)
continue
}
// if value contains a colon, convert to block scalar
if (value.includes(":")) {
result.push(`${key}: |-`)
result.push(` ${value}`)
continue
}
result.push(line)
}
const processed = result.join("\n")
return content.replace(frontmatter, () => processed)
}
export async function parse(filePath: string) {
const template = await Filesystem.readText(filePath)
try {
const md = matter(template)
return md
} catch {
try {
return matter(fallbackSanitization(template))
} catch (err) {
throw new FrontmatterError(
{
path: filePath,
message: `${filePath}: Failed to parse YAML frontmatter: ${err instanceof Error ? err.message : String(err)}`,
},
{ cause: err },
)
}
}
}
export const FrontmatterError = NamedError.create(
"ConfigFrontmatterError",
z.object({
path: z.string(),
message: z.string(),
}),
)
export function files(template: string) {
return Array.from(template.matchAll(FILE_REGEX))
}
export function shell(template: string) {
return Array.from(template.matchAll(SHELL_REGEX))
}
// other coding agents like claude code allow invalid yaml in their
// frontmatter, we need to fallback to a more permissive parser for those cases
export function fallbackSanitization(content: string): string {
const match = content.match(/^---\r?\n([\s\S]*?)\r?\n---/)
if (!match) return content
const frontmatter = match[1]
const lines = frontmatter.split(/\r?\n/)
const result: string[] = []
for (const line of lines) {
// skip comments and empty lines
if (line.trim().startsWith("#") || line.trim() === "") {
result.push(line)
continue
}
// skip lines that are continuations (indented)
if (line.match(/^\s+/)) {
result.push(line)
continue
}
// match key: value pattern
const kvMatch = line.match(/^([a-zA-Z_][a-zA-Z0-9_]*)\s*:\s*(.*)$/)
if (!kvMatch) {
result.push(line)
continue
}
const key = kvMatch[1]
const value = kvMatch[2].trim()
// skip if value is empty, already quoted, or uses block scalar
if (value === "" || value === ">" || value === "|" || value.startsWith('"') || value.startsWith("'")) {
result.push(line)
continue
}
// if value contains a colon, convert to block scalar
if (value.includes(":")) {
result.push(`${key}: |-`)
result.push(` ${value}`)
continue
}
result.push(line)
}
const processed = result.join("\n")
return content.replace(frontmatter, () => processed)
}
export async function parse(filePath: string) {
const template = await Filesystem.readText(filePath)
try {
const md = matter(template)
return md
} catch {
try {
return matter(fallbackSanitization(template))
} catch (err) {
throw new FrontmatterError(
{
path: filePath,
message: `${filePath}: Failed to parse YAML frontmatter: ${err instanceof Error ? err.message : String(err)}`,
},
{ cause: err },
)
}
}
}
export const FrontmatterError = NamedError.create(
"ConfigFrontmatterError",
z.object({
path: z.string(),
message: z.string(),
}),
)

View File

@@ -7,161 +7,159 @@ import { Filesystem } from "@/util"
import { Flag } from "@/flag/flag"
import { Global } from "@/global"
export namespace ConfigPaths {
export async function projectFiles(name: string, directory: string, worktree: string) {
return Filesystem.findUp([`${name}.json`, `${name}.jsonc`], directory, worktree, { rootFirst: true })
}
export async function directories(directory: string, worktree: string) {
return [
Global.Path.config,
...(!Flag.OPENCODE_DISABLE_PROJECT_CONFIG
? await Array.fromAsync(
Filesystem.up({
targets: [".opencode"],
start: directory,
stop: worktree,
}),
)
: []),
...(await Array.fromAsync(
Filesystem.up({
targets: [".opencode"],
start: Global.Path.home,
stop: Global.Path.home,
}),
)),
...(Flag.OPENCODE_CONFIG_DIR ? [Flag.OPENCODE_CONFIG_DIR] : []),
]
}
export function fileInDirectory(dir: string, name: string) {
return [path.join(dir, `${name}.json`), path.join(dir, `${name}.jsonc`)]
}
export const JsonError = NamedError.create(
"ConfigJsonError",
z.object({
path: z.string(),
message: z.string().optional(),
}),
)
export const InvalidError = NamedError.create(
"ConfigInvalidError",
z.object({
path: z.string(),
issues: z.custom<z.core.$ZodIssue[]>().optional(),
message: z.string().optional(),
}),
)
/** Read a config file, returning undefined for missing files and throwing JsonError for other failures. */
export async function readFile(filepath: string) {
return Filesystem.readText(filepath).catch((err: NodeJS.ErrnoException) => {
if (err.code === "ENOENT") return
throw new JsonError({ path: filepath }, { cause: err })
})
}
type ParseSource = string | { source: string; dir: string }
function source(input: ParseSource) {
return typeof input === "string" ? input : input.source
}
function dir(input: ParseSource) {
return typeof input === "string" ? path.dirname(input) : input.dir
}
/** Apply {env:VAR} and {file:path} substitutions to config text. */
async function substitute(text: string, input: ParseSource, missing: "error" | "empty" = "error") {
text = text.replace(/\{env:([^}]+)\}/g, (_, varName) => {
return process.env[varName] || ""
})
const fileMatches = Array.from(text.matchAll(/\{file:[^}]+\}/g))
if (!fileMatches.length) return text
const configDir = dir(input)
const configSource = source(input)
let out = ""
let cursor = 0
for (const match of fileMatches) {
const token = match[0]
const index = match.index!
out += text.slice(cursor, index)
const lineStart = text.lastIndexOf("\n", index - 1) + 1
const prefix = text.slice(lineStart, index).trimStart()
if (prefix.startsWith("//")) {
out += token
cursor = index + token.length
continue
}
let filePath = token.replace(/^\{file:/, "").replace(/\}$/, "")
if (filePath.startsWith("~/")) {
filePath = path.join(os.homedir(), filePath.slice(2))
}
const resolvedPath = path.isAbsolute(filePath) ? filePath : path.resolve(configDir, filePath)
const fileContent = (
await Filesystem.readText(resolvedPath).catch((error: NodeJS.ErrnoException) => {
if (missing === "empty") return ""
const errMsg = `bad file reference: "${token}"`
if (error.code === "ENOENT") {
throw new InvalidError(
{
path: configSource,
message: errMsg + ` ${resolvedPath} does not exist`,
},
{ cause: error },
)
}
throw new InvalidError({ path: configSource, message: errMsg }, { cause: error })
})
).trim()
out += JSON.stringify(fileContent).slice(1, -1)
cursor = index + token.length
}
out += text.slice(cursor)
return out
}
/** Substitute and parse JSONC text, throwing JsonError on syntax errors. */
export async function parseText(text: string, input: ParseSource, missing: "error" | "empty" = "error") {
const configSource = source(input)
text = await substitute(text, input, missing)
const errors: JsoncParseError[] = []
const data = parseJsonc(text, errors, { allowTrailingComma: true })
if (errors.length) {
const lines = text.split("\n")
const errorDetails = errors
.map((e) => {
const beforeOffset = text.substring(0, e.offset).split("\n")
const line = beforeOffset.length
const column = beforeOffset[beforeOffset.length - 1].length + 1
const problemLine = lines[line - 1]
const error = `${printParseErrorCode(e.error)} at line ${line}, column ${column}`
if (!problemLine) return error
return `${error}\n Line ${line}: ${problemLine}\n${"".padStart(column + 9)}^`
})
.join("\n")
throw new JsonError({
path: configSource,
message: `\n--- JSONC Input ---\n${text}\n--- Errors ---\n${errorDetails}\n--- End ---`,
})
}
return data
}
export async function projectFiles(name: string, directory: string, worktree: string) {
return Filesystem.findUp([`${name}.json`, `${name}.jsonc`], directory, worktree, { rootFirst: true })
}
export async function directories(directory: string, worktree: string) {
return [
Global.Path.config,
...(!Flag.OPENCODE_DISABLE_PROJECT_CONFIG
? await Array.fromAsync(
Filesystem.up({
targets: [".opencode"],
start: directory,
stop: worktree,
}),
)
: []),
...(await Array.fromAsync(
Filesystem.up({
targets: [".opencode"],
start: Global.Path.home,
stop: Global.Path.home,
}),
)),
...(Flag.OPENCODE_CONFIG_DIR ? [Flag.OPENCODE_CONFIG_DIR] : []),
]
}
export function fileInDirectory(dir: string, name: string) {
return [path.join(dir, `${name}.json`), path.join(dir, `${name}.jsonc`)]
}
export const JsonError = NamedError.create(
"ConfigJsonError",
z.object({
path: z.string(),
message: z.string().optional(),
}),
)
export const InvalidError = NamedError.create(
"ConfigInvalidError",
z.object({
path: z.string(),
issues: z.custom<z.core.$ZodIssue[]>().optional(),
message: z.string().optional(),
}),
)
/** Read a config file, returning undefined for missing files and throwing JsonError for other failures. */
export async function readFile(filepath: string) {
return Filesystem.readText(filepath).catch((err: NodeJS.ErrnoException) => {
if (err.code === "ENOENT") return
throw new JsonError({ path: filepath }, { cause: err })
})
}
type ParseSource = string | { source: string; dir: string }
function source(input: ParseSource) {
return typeof input === "string" ? input : input.source
}
function dir(input: ParseSource) {
return typeof input === "string" ? path.dirname(input) : input.dir
}
/** Apply {env:VAR} and {file:path} substitutions to config text. */
async function substitute(text: string, input: ParseSource, missing: "error" | "empty" = "error") {
text = text.replace(/\{env:([^}]+)\}/g, (_, varName) => {
return process.env[varName] || ""
})
const fileMatches = Array.from(text.matchAll(/\{file:[^}]+\}/g))
if (!fileMatches.length) return text
const configDir = dir(input)
const configSource = source(input)
let out = ""
let cursor = 0
for (const match of fileMatches) {
const token = match[0]
const index = match.index!
out += text.slice(cursor, index)
const lineStart = text.lastIndexOf("\n", index - 1) + 1
const prefix = text.slice(lineStart, index).trimStart()
if (prefix.startsWith("//")) {
out += token
cursor = index + token.length
continue
}
let filePath = token.replace(/^\{file:/, "").replace(/\}$/, "")
if (filePath.startsWith("~/")) {
filePath = path.join(os.homedir(), filePath.slice(2))
}
const resolvedPath = path.isAbsolute(filePath) ? filePath : path.resolve(configDir, filePath)
const fileContent = (
await Filesystem.readText(resolvedPath).catch((error: NodeJS.ErrnoException) => {
if (missing === "empty") return ""
const errMsg = `bad file reference: "${token}"`
if (error.code === "ENOENT") {
throw new InvalidError(
{
path: configSource,
message: errMsg + ` ${resolvedPath} does not exist`,
},
{ cause: error },
)
}
throw new InvalidError({ path: configSource, message: errMsg }, { cause: error })
})
).trim()
out += JSON.stringify(fileContent).slice(1, -1)
cursor = index + token.length
}
out += text.slice(cursor)
return out
}
/** Substitute and parse JSONC text, throwing JsonError on syntax errors. */
export async function parseText(text: string, input: ParseSource, missing: "error" | "empty" = "error") {
const configSource = source(input)
text = await substitute(text, input, missing)
const errors: JsoncParseError[] = []
const data = parseJsonc(text, errors, { allowTrailingComma: true })
if (errors.length) {
const lines = text.split("\n")
const errorDetails = errors
.map((e) => {
const beforeOffset = text.substring(0, e.offset).split("\n")
const line = beforeOffset.length
const column = beforeOffset[beforeOffset.length - 1].length + 1
const problemLine = lines[line - 1]
const error = `${printParseErrorCode(e.error)} at line ${line}, column ${column}`
if (!problemLine) return error
return `${error}\n Line ${line}: ${problemLine}\n${"".padStart(column + 9)}^`
})
.join("\n")
throw new JsonError({
path: configSource,
message: `\n--- JSONC Input ---\n${text}\n--- Errors ---\n${errorDetails}\n--- End ---`,
})
}
return data
}

View File

@@ -2,7 +2,7 @@ import path from "path"
import { type ParseError as JsoncParseError, applyEdits, modify, parse as parseJsonc } from "jsonc-parser"
import { unique } from "remeda"
import z from "zod"
import { ConfigPaths } from "./paths"
import * as ConfigPaths from "./paths"
import { TuiInfo, TuiOptions } from "./tui-schema"
import { Instance } from "@/project/instance"
import { Flag } from "@/flag/flag"

View File

@@ -1,5 +1,5 @@
import z from "zod"
import { Config } from "."
import * as Config from "./config"
const KeybindOverride = z
.object(

View File

@@ -2,8 +2,8 @@ import { existsSync } from "fs"
import z from "zod"
import { mergeDeep, unique } from "remeda"
import { Context, Effect, Fiber, Layer } from "effect"
import { Config } from "."
import { ConfigPaths } from "./paths"
import * as Config from "./config"
import * as ConfigPaths from "./paths"
import { migrateTuiConfig } from "./tui-migrate"
import { TuiInfo } from "./tui-schema"
import { Flag } from "@/flag/flag"
@@ -14,201 +14,199 @@ import { InstanceState } from "@/effect"
import { makeRuntime } from "@/effect/run-service"
import { AppFileSystem } from "@opencode-ai/shared/filesystem"
export namespace TuiConfig {
const log = Log.create({ service: "tui.config" })
const log = Log.create({ service: "tui.config" })
export const Info = TuiInfo
export const Info = TuiInfo
type Acc = {
result: Info
}
type Acc = {
result: Info
}
type State = {
config: Info
deps: Array<Fiber.Fiber<void, AppFileSystem.Error>>
}
type State = {
config: Info
deps: Array<Fiber.Fiber<void, AppFileSystem.Error>>
}
export type Info = z.output<typeof Info> & {
// Internal resolved plugin list used by runtime loading.
plugin_origins?: Config.PluginOrigin[]
}
export type Info = z.output<typeof Info> & {
// Internal resolved plugin list used by runtime loading.
plugin_origins?: Config.PluginOrigin[]
}
export interface Interface {
readonly get: () => Effect.Effect<Info>
readonly waitForDependencies: () => Effect.Effect<void, AppFileSystem.Error>
}
export interface Interface {
readonly get: () => Effect.Effect<Info>
readonly waitForDependencies: () => Effect.Effect<void, AppFileSystem.Error>
}
export class Service extends Context.Service<Service, Interface>()("@opencode/TuiConfig") {}
export class Service extends Context.Service<Service, Interface>()("@opencode/TuiConfig") {}
function pluginScope(file: string, ctx: { directory: string; worktree: string }): Config.PluginScope {
if (AppFileSystem.contains(ctx.directory, file)) return "local"
if (ctx.worktree !== "/" && AppFileSystem.contains(ctx.worktree, file)) return "local"
return "global"
}
function pluginScope(file: string, ctx: { directory: string; worktree: string }): Config.PluginScope {
if (AppFileSystem.contains(ctx.directory, file)) return "local"
if (ctx.worktree !== "/" && AppFileSystem.contains(ctx.worktree, file)) return "local"
return "global"
}
function customPath() {
return Flag.OPENCODE_TUI_CONFIG
}
function customPath() {
return Flag.OPENCODE_TUI_CONFIG
}
function normalize(raw: Record<string, unknown>) {
const data = { ...raw }
if (!("tui" in data)) return data
if (!isRecord(data.tui)) {
delete data.tui
return data
}
const tui = data.tui
function normalize(raw: Record<string, unknown>) {
const data = { ...raw }
if (!("tui" in data)) return data
if (!isRecord(data.tui)) {
delete data.tui
return {
...tui,
...data,
}
}
async function mergeFile(acc: Acc, file: string, ctx: { directory: string; worktree: string }) {
const data = await loadFile(file)
acc.result = mergeDeep(acc.result, data)
if (!data.plugin?.length) return
const scope = pluginScope(file, ctx)
const plugins = Config.deduplicatePluginOrigins([
...(acc.result.plugin_origins ?? []),
...data.plugin.map((spec) => ({ spec, scope, source: file })),
])
acc.result.plugin = plugins.map((item) => item.spec)
acc.result.plugin_origins = plugins
}
async function loadState(ctx: { directory: string; worktree: string }) {
let projectFiles = Flag.OPENCODE_DISABLE_PROJECT_CONFIG
? []
: await ConfigPaths.projectFiles("tui", ctx.directory, ctx.worktree)
const directories = await ConfigPaths.directories(ctx.directory, ctx.worktree)
const custom = customPath()
const managed = Config.managedConfigDir()
await migrateTuiConfig({ directories, custom, managed })
// Re-compute after migration since migrateTuiConfig may have created new tui.json files
projectFiles = Flag.OPENCODE_DISABLE_PROJECT_CONFIG
? []
: await ConfigPaths.projectFiles("tui", ctx.directory, ctx.worktree)
const acc: Acc = {
result: {},
}
for (const file of ConfigPaths.fileInDirectory(Global.Path.config, "tui")) {
await mergeFile(acc, file, ctx)
}
if (custom) {
await mergeFile(acc, custom, ctx)
log.debug("loaded custom tui config", { path: custom })
}
for (const file of projectFiles) {
await mergeFile(acc, file, ctx)
}
const dirs = unique(directories).filter((dir) => dir.endsWith(".opencode") || dir === Flag.OPENCODE_CONFIG_DIR)
for (const dir of dirs) {
if (!dir.endsWith(".opencode") && dir !== Flag.OPENCODE_CONFIG_DIR) continue
for (const file of ConfigPaths.fileInDirectory(dir, "tui")) {
await mergeFile(acc, file, ctx)
}
}
if (existsSync(managed)) {
for (const file of ConfigPaths.fileInDirectory(managed, "tui")) {
await mergeFile(acc, file, ctx)
}
}
const keybinds = { ...acc.result.keybinds }
if (process.platform === "win32") {
// Native Windows terminals do not support POSIX suspend, so prefer prompt undo.
keybinds.terminal_suspend = "none"
keybinds.input_undo ??= unique(["ctrl+z", ...Config.Keybinds.shape.input_undo.parse(undefined).split(",")]).join(
",",
)
}
acc.result.keybinds = Config.Keybinds.parse(keybinds)
return {
config: acc.result,
dirs: acc.result.plugin?.length ? dirs : [],
}
}
export const layer = Layer.effect(
Service,
Effect.gen(function* () {
const cfg = yield* Config.Service
const state = yield* InstanceState.make<State>(
Effect.fn("TuiConfig.state")(function* (ctx) {
const data = yield* Effect.promise(() => loadState(ctx))
const deps = yield* Effect.forEach(data.dirs, (dir) => cfg.installDependencies(dir).pipe(Effect.forkScoped), {
concurrency: "unbounded",
})
return { config: data.config, deps }
}),
)
const get = Effect.fn("TuiConfig.get")(() => InstanceState.use(state, (s) => s.config))
const waitForDependencies = Effect.fn("TuiConfig.waitForDependencies")(() =>
InstanceState.useEffect(state, (s) =>
Effect.forEach(s.deps, Fiber.join, { concurrency: "unbounded" }).pipe(Effect.asVoid),
),
)
return Service.of({ get, waitForDependencies })
}),
)
export const defaultLayer = layer.pipe(Layer.provide(Config.defaultLayer))
const { runPromise } = makeRuntime(Service, defaultLayer)
export async function get() {
return runPromise((svc) => svc.get())
}
export async function waitForDependencies() {
await runPromise((svc) => svc.waitForDependencies())
}
async function loadFile(filepath: string): Promise<Info> {
const text = await ConfigPaths.readFile(filepath)
if (!text) return {}
return load(text, filepath).catch((error) => {
log.warn("failed to load tui config", { path: filepath, error })
return {}
})
}
async function load(text: string, configFilepath: string): Promise<Info> {
const raw = await ConfigPaths.parseText(text, configFilepath, "empty")
if (!isRecord(raw)) return {}
// Flatten a nested "tui" key so users who wrote `{ "tui": { ... } }` inside tui.json
// (mirroring the old opencode.json shape) still get their settings applied.
const normalized = normalize(raw)
const parsed = Info.safeParse(normalized)
if (!parsed.success) {
log.warn("invalid tui config", { path: configFilepath, issues: parsed.error.issues })
return {}
}
const data = parsed.data
if (data.plugin) {
for (let i = 0; i < data.plugin.length; i++) {
data.plugin[i] = await Config.resolvePluginSpec(data.plugin[i], configFilepath)
}
}
return data
}
const tui = data.tui
delete data.tui
return {
...tui,
...data,
}
}
async function mergeFile(acc: Acc, file: string, ctx: { directory: string; worktree: string }) {
const data = await loadFile(file)
acc.result = mergeDeep(acc.result, data)
if (!data.plugin?.length) return
const scope = pluginScope(file, ctx)
const plugins = Config.deduplicatePluginOrigins([
...(acc.result.plugin_origins ?? []),
...data.plugin.map((spec) => ({ spec, scope, source: file })),
])
acc.result.plugin = plugins.map((item) => item.spec)
acc.result.plugin_origins = plugins
}
async function loadState(ctx: { directory: string; worktree: string }) {
let projectFiles = Flag.OPENCODE_DISABLE_PROJECT_CONFIG
? []
: await ConfigPaths.projectFiles("tui", ctx.directory, ctx.worktree)
const directories = await ConfigPaths.directories(ctx.directory, ctx.worktree)
const custom = customPath()
const managed = Config.managedConfigDir()
await migrateTuiConfig({ directories, custom, managed })
// Re-compute after migration since migrateTuiConfig may have created new tui.json files
projectFiles = Flag.OPENCODE_DISABLE_PROJECT_CONFIG
? []
: await ConfigPaths.projectFiles("tui", ctx.directory, ctx.worktree)
const acc: Acc = {
result: {},
}
for (const file of ConfigPaths.fileInDirectory(Global.Path.config, "tui")) {
await mergeFile(acc, file, ctx)
}
if (custom) {
await mergeFile(acc, custom, ctx)
log.debug("loaded custom tui config", { path: custom })
}
for (const file of projectFiles) {
await mergeFile(acc, file, ctx)
}
const dirs = unique(directories).filter((dir) => dir.endsWith(".opencode") || dir === Flag.OPENCODE_CONFIG_DIR)
for (const dir of dirs) {
if (!dir.endsWith(".opencode") && dir !== Flag.OPENCODE_CONFIG_DIR) continue
for (const file of ConfigPaths.fileInDirectory(dir, "tui")) {
await mergeFile(acc, file, ctx)
}
}
if (existsSync(managed)) {
for (const file of ConfigPaths.fileInDirectory(managed, "tui")) {
await mergeFile(acc, file, ctx)
}
}
const keybinds = { ...acc.result.keybinds }
if (process.platform === "win32") {
// Native Windows terminals do not support POSIX suspend, so prefer prompt undo.
keybinds.terminal_suspend = "none"
keybinds.input_undo ??= unique(["ctrl+z", ...Config.Keybinds.shape.input_undo.parse(undefined).split(",")]).join(
",",
)
}
acc.result.keybinds = Config.Keybinds.parse(keybinds)
return {
config: acc.result,
dirs: acc.result.plugin?.length ? dirs : [],
}
}
export const layer = Layer.effect(
Service,
Effect.gen(function* () {
const cfg = yield* Config.Service
const state = yield* InstanceState.make<State>(
Effect.fn("TuiConfig.state")(function* (ctx) {
const data = yield* Effect.promise(() => loadState(ctx))
const deps = yield* Effect.forEach(data.dirs, (dir) => cfg.installDependencies(dir).pipe(Effect.forkScoped), {
concurrency: "unbounded",
})
return { config: data.config, deps }
}),
)
const get = Effect.fn("TuiConfig.get")(() => InstanceState.use(state, (s) => s.config))
const waitForDependencies = Effect.fn("TuiConfig.waitForDependencies")(() =>
InstanceState.useEffect(state, (s) =>
Effect.forEach(s.deps, Fiber.join, { concurrency: "unbounded" }).pipe(Effect.asVoid),
),
)
return Service.of({ get, waitForDependencies })
}),
)
export const defaultLayer = layer.pipe(Layer.provide(Config.defaultLayer))
const { runPromise } = makeRuntime(Service, defaultLayer)
export async function get() {
return runPromise((svc) => svc.get())
}
export async function waitForDependencies() {
await runPromise((svc) => svc.waitForDependencies())
}
async function loadFile(filepath: string): Promise<Info> {
const text = await ConfigPaths.readFile(filepath)
if (!text) return {}
return load(text, filepath).catch((error) => {
log.warn("failed to load tui config", { path: filepath, error })
return {}
})
}
async function load(text: string, configFilepath: string): Promise<Info> {
const raw = await ConfigPaths.parseText(text, configFilepath, "empty")
if (!isRecord(raw)) return {}
// Flatten a nested "tui" key so users who wrote `{ "tui": { ... } }` inside tui.json
// (mirroring the old opencode.json shape) still get their settings applied.
const normalized = normalize(raw)
const parsed = Info.safeParse(normalized)
if (!parsed.success) {
log.warn("invalid tui config", { path: configFilepath, issues: parsed.error.issues })
return {}
}
const data = parsed.data
if (data.plugin) {
for (let i = 0; i < data.plugin.length; i++) {
data.plugin[i] = await Config.resolvePluginSpec(data.plugin[i], configFilepath)
}
}
return data
}

View File

@@ -1,8 +1,8 @@
import z from "zod"
import { setTimeout as sleep } from "node:timers/promises"
import { fn } from "@/util/fn"
import { Database, asc, eq, inArray } from "@/storage/db"
import { Project } from "@/project/project"
import { Database, asc, eq, inArray } from "@/storage"
import { Project } from "@/project"
import { BusEvent } from "@/bus/bus-event"
import { GlobalBus } from "@/bus/global"
import { SyncEvent } from "@/sync"
@@ -114,7 +114,7 @@ export namespace Workspace {
await adaptor.create(config)
void startSync(info)
startSync(info)
await waitEvent({
timeout: TIMEOUT,
@@ -294,7 +294,7 @@ export namespace Workspace {
)
const spaces = rows.map(fromRow).sort((a, b) => a.id.localeCompare(b.id))
for (const space of spaces) void startSync(space)
for (const space of spaces) startSync(space)
return spaces
}
@@ -307,7 +307,7 @@ export namespace Workspace {
export const get = fn(WorkspaceID.zod, async (id) => {
const space = lookup(id)
if (!space) return
void startSync(space)
startSync(space)
return space
})

View File

@@ -1,6 +1,6 @@
import { Layer, ManagedRuntime } from "effect"
import { attach, memoMap } from "./run-service"
import { Observability } from "./observability"
import * as Observability from "./observability"
import { AppFileSystem } from "@opencode-ai/shared/filesystem"
import { Bus } from "@/bus"
@@ -8,11 +8,11 @@ import { Auth } from "@/auth"
import { Account } from "@/account"
import { Config } from "@/config"
import { Git } from "@/git"
import { Ripgrep } from "@/file/ripgrep"
import { FileTime } from "@/file/time"
import { Ripgrep } from "@/file"
import { FileTime } from "@/file"
import { File } from "@/file"
import { FileWatcher } from "@/file/watcher"
import { Storage } from "@/storage/storage"
import { FileWatcher } from "@/file"
import { Storage } from "@/storage"
import { Snapshot } from "@/snapshot"
import { Plugin } from "@/plugin"
import { Provider } from "@/provider"
@@ -40,13 +40,13 @@ import { Command } from "@/command"
import { Truncate } from "@/tool/truncate"
import { ToolRegistry } from "@/tool/registry"
import { Format } from "@/format"
import { Project } from "@/project/project"
import { Vcs } from "@/project/vcs"
import { Project } from "@/project"
import { Vcs } from "@/project"
import { Worktree } from "@/worktree"
import { Pty } from "@/pty"
import { Installation } from "@/installation"
import { ShareNext } from "@/share/share-next"
import { SessionShare } from "@/share/session"
import { ShareNext } from "@/share"
import { SessionShare } from "@/share"
export const AppLayer = Layer.mergeAll(
AppFileSystem.defaultLayer,

View File

@@ -3,14 +3,14 @@ import { memoMap } from "./run-service"
import { Plugin } from "@/plugin"
import { LSP } from "@/lsp"
import { FileWatcher } from "@/file/watcher"
import { FileWatcher } from "@/file"
import { Format } from "@/format"
import { ShareNext } from "@/share/share-next"
import { ShareNext } from "@/share"
import { File } from "@/file"
import { Vcs } from "@/project/vcs"
import { Vcs } from "@/project"
import { Snapshot } from "@/snapshot"
import { Bus } from "@/bus"
import { Observability } from "./observability"
import * as Observability from "./observability"
export const BootstrapLayer = Layer.mergeAll(
Plugin.defaultLayer,

View File

@@ -1,2 +1,5 @@
export * as InstanceState from "./instance-state"
export * as EffectBridge from "./bridge"
export * as Runner from "./runner"
export * as Observability from "./observability"
export * as EffectLogger from "./logger"

View File

@@ -1,5 +1,5 @@
import { Effect, Fiber, ScopedCache, Scope, Context } from "effect"
import { EffectLogger } from "@/effect/logger"
import * as EffectLogger from "./logger"
import { Instance, type InstanceContext } from "@/project/instance"
import { LocalContext } from "@/util"
import { InstanceRef, WorkspaceRef } from "./instance-ref"

View File

@@ -1,67 +1,65 @@
import { Cause, Effect, Logger, References } from "effect"
import { Log } from "@/util"
export namespace EffectLogger {
type Fields = Record<string, unknown>
type Fields = Record<string, unknown>
export interface Handle {
readonly debug: (msg?: unknown, extra?: Fields) => Effect.Effect<void>
readonly info: (msg?: unknown, extra?: Fields) => Effect.Effect<void>
readonly warn: (msg?: unknown, extra?: Fields) => Effect.Effect<void>
readonly error: (msg?: unknown, extra?: Fields) => Effect.Effect<void>
readonly with: (extra: Fields) => Handle
}
const clean = (input?: Fields): Fields =>
Object.fromEntries(Object.entries(input ?? {}).filter((entry) => entry[1] !== undefined && entry[1] !== null))
const text = (input: unknown): string => {
if (Array.isArray(input)) return input.map((item) => String(item)).join(" ")
return input === undefined ? "" : String(input)
}
const call = (run: (msg?: unknown) => Effect.Effect<void>, base: Fields, msg?: unknown, extra?: Fields) => {
const ann = clean({ ...base, ...extra })
const fx = run(msg)
return Object.keys(ann).length ? Effect.annotateLogs(fx, ann) : fx
}
export const logger = Logger.make((opts) => {
const extra = clean(opts.fiber.getRef(References.CurrentLogAnnotations))
const now = opts.date.getTime()
for (const [key, start] of opts.fiber.getRef(References.CurrentLogSpans)) {
extra[`logSpan.${key}`] = `${now - start}ms`
}
if (opts.cause.reasons.length > 0) {
extra.cause = Cause.pretty(opts.cause)
}
const svc = typeof extra.service === "string" ? extra.service : undefined
if (svc) delete extra.service
const log = svc ? Log.create({ service: svc }) : Log.Default
const msg = text(opts.message)
switch (opts.logLevel) {
case "Trace":
case "Debug":
return log.debug(msg, extra)
case "Warn":
return log.warn(msg, extra)
case "Error":
case "Fatal":
return log.error(msg, extra)
default:
return log.info(msg, extra)
}
})
export const layer = Logger.layer([logger], { mergeWithExisting: false })
export const create = (base: Fields = {}): Handle => ({
debug: (msg, extra) => call((item) => Effect.logDebug(item), base, msg, extra),
info: (msg, extra) => call((item) => Effect.logInfo(item), base, msg, extra),
warn: (msg, extra) => call((item) => Effect.logWarning(item), base, msg, extra),
error: (msg, extra) => call((item) => Effect.logError(item), base, msg, extra),
with: (extra) => create({ ...base, ...extra }),
})
export interface Handle {
readonly debug: (msg?: unknown, extra?: Fields) => Effect.Effect<void>
readonly info: (msg?: unknown, extra?: Fields) => Effect.Effect<void>
readonly warn: (msg?: unknown, extra?: Fields) => Effect.Effect<void>
readonly error: (msg?: unknown, extra?: Fields) => Effect.Effect<void>
readonly with: (extra: Fields) => Handle
}
const clean = (input?: Fields): Fields =>
Object.fromEntries(Object.entries(input ?? {}).filter((entry) => entry[1] !== undefined && entry[1] !== null))
const text = (input: unknown): string => {
if (Array.isArray(input)) return input.map((item) => String(item)).join(" ")
return input === undefined ? "" : String(input)
}
const call = (run: (msg?: unknown) => Effect.Effect<void>, base: Fields, msg?: unknown, extra?: Fields) => {
const ann = clean({ ...base, ...extra })
const fx = run(msg)
return Object.keys(ann).length ? Effect.annotateLogs(fx, ann) : fx
}
export const logger = Logger.make((opts) => {
const extra = clean(opts.fiber.getRef(References.CurrentLogAnnotations))
const now = opts.date.getTime()
for (const [key, start] of opts.fiber.getRef(References.CurrentLogSpans)) {
extra[`logSpan.${key}`] = `${now - start}ms`
}
if (opts.cause.reasons.length > 0) {
extra.cause = Cause.pretty(opts.cause)
}
const svc = typeof extra.service === "string" ? extra.service : undefined
if (svc) delete extra.service
const log = svc ? Log.create({ service: svc }) : Log.Default
const msg = text(opts.message)
switch (opts.logLevel) {
case "Trace":
case "Debug":
return log.debug(msg, extra)
case "Warn":
return log.warn(msg, extra)
case "Error":
case "Fatal":
return log.error(msg, extra)
default:
return log.info(msg, extra)
}
})
export const layer = Logger.layer([logger], { mergeWithExisting: false })
export const create = (base: Fields = {}): Handle => ({
debug: (msg, extra) => call((item) => Effect.logDebug(item), base, msg, extra),
info: (msg, extra) => call((item) => Effect.logInfo(item), base, msg, extra),
warn: (msg, extra) => call((item) => Effect.logWarning(item), base, msg, extra),
error: (msg, extra) => call((item) => Effect.logError(item), base, msg, extra),
with: (extra) => create({ ...base, ...extra }),
})

View File

@@ -1,80 +1,78 @@
import { Effect, Layer, Logger } from "effect"
import { FetchHttpClient } from "effect/unstable/http"
import { OtlpLogger, OtlpSerialization } from "effect/unstable/observability"
import { EffectLogger } from "@/effect/logger"
import * as EffectLogger from "./logger"
import { Flag } from "@/flag/flag"
import { CHANNEL, VERSION } from "@/installation/meta"
export namespace Observability {
const base = Flag.OTEL_EXPORTER_OTLP_ENDPOINT
export const enabled = !!base
const base = Flag.OTEL_EXPORTER_OTLP_ENDPOINT
export const enabled = !!base
const headers = Flag.OTEL_EXPORTER_OTLP_HEADERS
? Flag.OTEL_EXPORTER_OTLP_HEADERS.split(",").reduce(
(acc, x) => {
const [key, ...value] = x.split("=")
acc[key] = value.join("=")
return acc
},
{} as Record<string, string>,
)
: undefined
const headers = Flag.OTEL_EXPORTER_OTLP_HEADERS
? Flag.OTEL_EXPORTER_OTLP_HEADERS.split(",").reduce(
(acc, x) => {
const [key, ...value] = x.split("=")
acc[key] = value.join("=")
return acc
},
{} as Record<string, string>,
)
: undefined
const resource = {
serviceName: "opencode",
serviceVersion: VERSION,
attributes: {
"deployment.environment.name": CHANNEL === "local" ? "local" : CHANNEL,
"opencode.client": Flag.OPENCODE_CLIENT,
},
}
const resource = {
serviceName: "opencode",
serviceVersion: VERSION,
attributes: {
"deployment.environment.name": CHANNEL === "local" ? "local" : CHANNEL,
"opencode.client": Flag.OPENCODE_CLIENT,
},
}
const logs = Logger.layer(
[
EffectLogger.logger,
OtlpLogger.make({
url: `${base}/v1/logs`,
resource,
const logs = Logger.layer(
[
EffectLogger.logger,
OtlpLogger.make({
url: `${base}/v1/logs`,
resource,
headers,
}),
],
{ mergeWithExisting: false },
).pipe(Layer.provide(OtlpSerialization.layerJson), Layer.provide(FetchHttpClient.layer))
const traces = async () => {
const NodeSdk = await import("@effect/opentelemetry/NodeSdk")
const OTLP = await import("@opentelemetry/exporter-trace-otlp-http")
const SdkBase = await import("@opentelemetry/sdk-trace-base")
// @effect/opentelemetry creates a NodeTracerProvider but never calls
// register(), so the global @opentelemetry/api context manager stays
// as the no-op default. Non-Effect code (like the AI SDK) that calls
// tracer.startActiveSpan() relies on context.active() to find the
// parent span — without a real context manager every span starts a
// new trace. Registering AsyncLocalStorageContextManager fixes this.
const { AsyncLocalStorageContextManager } = await import("@opentelemetry/context-async-hooks")
const { context } = await import("@opentelemetry/api")
const mgr = new AsyncLocalStorageContextManager()
mgr.enable()
context.setGlobalContextManager(mgr)
return NodeSdk.layer(() => ({
resource,
spanProcessor: new SdkBase.BatchSpanProcessor(
new OTLP.OTLPTraceExporter({
url: `${base}/v1/traces`,
headers,
}),
],
{ mergeWithExisting: false },
).pipe(Layer.provide(OtlpSerialization.layerJson), Layer.provide(FetchHttpClient.layer))
const traces = async () => {
const NodeSdk = await import("@effect/opentelemetry/NodeSdk")
const OTLP = await import("@opentelemetry/exporter-trace-otlp-http")
const SdkBase = await import("@opentelemetry/sdk-trace-base")
// @effect/opentelemetry creates a NodeTracerProvider but never calls
// register(), so the global @opentelemetry/api context manager stays
// as the no-op default. Non-Effect code (like the AI SDK) that calls
// tracer.startActiveSpan() relies on context.active() to find the
// parent span — without a real context manager every span starts a
// new trace. Registering AsyncLocalStorageContextManager fixes this.
const { AsyncLocalStorageContextManager } = await import("@opentelemetry/context-async-hooks")
const { context } = await import("@opentelemetry/api")
const mgr = new AsyncLocalStorageContextManager()
mgr.enable()
context.setGlobalContextManager(mgr)
return NodeSdk.layer(() => ({
resource,
spanProcessor: new SdkBase.BatchSpanProcessor(
new OTLP.OTLPTraceExporter({
url: `${base}/v1/traces`,
headers,
}),
),
}))
}
export const layer = !base
? EffectLogger.layer
: Layer.unwrap(
Effect.gen(function* () {
const trace = yield* Effect.promise(traces)
return Layer.mergeAll(trace, logs)
}),
)
),
}))
}
export const layer = !base
? EffectLogger.layer
: Layer.unwrap(
Effect.gen(function* () {
const trace = yield* Effect.promise(traces)
return Layer.mergeAll(trace, logs)
}),
)

View File

@@ -3,7 +3,7 @@ import * as Context from "effect/Context"
import { Instance } from "@/project/instance"
import { LocalContext } from "@/util"
import { InstanceRef, WorkspaceRef } from "./instance-ref"
import { Observability } from "./observability"
import * as Observability from "./observability"
import { WorkspaceContext } from "@/control-plane/workspace-context"
import type { InstanceContext } from "@/project/instance"

View File

@@ -1,208 +1,206 @@
import { Cause, Deferred, Effect, Exit, Fiber, Schema, Scope, SynchronizedRef } from "effect"
export interface Runner<A, E = never> {
readonly state: Runner.State<A, E>
readonly state: State<A, E>
readonly busy: boolean
readonly ensureRunning: (work: Effect.Effect<A, E>) => Effect.Effect<A, E>
readonly startShell: (work: Effect.Effect<A, E>) => Effect.Effect<A, E>
readonly cancel: Effect.Effect<void>
}
export namespace Runner {
export class Cancelled extends Schema.TaggedErrorClass<Cancelled>()("RunnerCancelled", {}) {}
export class Cancelled extends Schema.TaggedErrorClass<Cancelled>()("RunnerCancelled", {}) {}
interface RunHandle<A, E> {
id: number
done: Deferred.Deferred<A, E | Cancelled>
fiber: Fiber.Fiber<A, E>
interface RunHandle<A, E> {
id: number
done: Deferred.Deferred<A, E | Cancelled>
fiber: Fiber.Fiber<A, E>
}
interface ShellHandle<A, E> {
id: number
fiber: Fiber.Fiber<A, E>
}
interface PendingHandle<A, E> {
id: number
done: Deferred.Deferred<A, E | Cancelled>
work: Effect.Effect<A, E>
}
export type State<A, E> =
| { readonly _tag: "Idle" }
| { readonly _tag: "Running"; readonly run: RunHandle<A, E> }
| { readonly _tag: "Shell"; readonly shell: ShellHandle<A, E> }
| { readonly _tag: "ShellThenRun"; readonly shell: ShellHandle<A, E>; readonly run: PendingHandle<A, E> }
export const make = <A, E = never>(
scope: Scope.Scope,
opts?: {
onIdle?: Effect.Effect<void>
onBusy?: Effect.Effect<void>
onInterrupt?: Effect.Effect<A, E>
busy?: () => never
},
): Runner<A, E> => {
const ref = SynchronizedRef.makeUnsafe<State<A, E>>({ _tag: "Idle" })
const idle = opts?.onIdle ?? Effect.void
const busy = opts?.onBusy ?? Effect.void
const onInterrupt = opts?.onInterrupt
let ids = 0
const state = () => SynchronizedRef.getUnsafe(ref)
const next = () => {
ids += 1
return ids
}
interface ShellHandle<A, E> {
id: number
fiber: Fiber.Fiber<A, E>
}
const complete = (done: Deferred.Deferred<A, E | Cancelled>, exit: Exit.Exit<A, E>) =>
Exit.isFailure(exit) && Cause.hasInterruptsOnly(exit.cause)
? Deferred.fail(done, new Cancelled()).pipe(Effect.asVoid)
: Deferred.done(done, exit).pipe(Effect.asVoid)
interface PendingHandle<A, E> {
id: number
done: Deferred.Deferred<A, E | Cancelled>
work: Effect.Effect<A, E>
}
const idleIfCurrent = () =>
SynchronizedRef.modify(ref, (st) => [st._tag === "Idle" ? idle : Effect.void, st] as const).pipe(Effect.flatten)
export type State<A, E> =
| { readonly _tag: "Idle" }
| { readonly _tag: "Running"; readonly run: RunHandle<A, E> }
| { readonly _tag: "Shell"; readonly shell: ShellHandle<A, E> }
| { readonly _tag: "ShellThenRun"; readonly shell: ShellHandle<A, E>; readonly run: PendingHandle<A, E> }
const finishRun = (id: number, done: Deferred.Deferred<A, E | Cancelled>, exit: Exit.Exit<A, E>) =>
SynchronizedRef.modify(
ref,
(st) =>
[
Effect.gen(function* () {
if (st._tag === "Running" && st.run.id === id) yield* idle
yield* complete(done, exit)
}),
st._tag === "Running" && st.run.id === id ? ({ _tag: "Idle" } as const) : st,
] as const,
).pipe(Effect.flatten)
export const make = <A, E = never>(
scope: Scope.Scope,
opts?: {
onIdle?: Effect.Effect<void>
onBusy?: Effect.Effect<void>
onInterrupt?: Effect.Effect<A, E>
busy?: () => never
},
): Runner<A, E> => {
const ref = SynchronizedRef.makeUnsafe<State<A, E>>({ _tag: "Idle" })
const idle = opts?.onIdle ?? Effect.void
const busy = opts?.onBusy ?? Effect.void
const onInterrupt = opts?.onInterrupt
let ids = 0
const state = () => SynchronizedRef.getUnsafe(ref)
const next = () => {
ids += 1
return ids
}
const complete = (done: Deferred.Deferred<A, E | Cancelled>, exit: Exit.Exit<A, E>) =>
Exit.isFailure(exit) && Cause.hasInterruptsOnly(exit.cause)
? Deferred.fail(done, new Cancelled()).pipe(Effect.asVoid)
: Deferred.done(done, exit).pipe(Effect.asVoid)
const idleIfCurrent = () =>
SynchronizedRef.modify(ref, (st) => [st._tag === "Idle" ? idle : Effect.void, st] as const).pipe(Effect.flatten)
const finishRun = (id: number, done: Deferred.Deferred<A, E | Cancelled>, exit: Exit.Exit<A, E>) =>
SynchronizedRef.modify(
ref,
(st) =>
[
Effect.gen(function* () {
if (st._tag === "Running" && st.run.id === id) yield* idle
yield* complete(done, exit)
}),
st._tag === "Running" && st.run.id === id ? ({ _tag: "Idle" } as const) : st,
] as const,
).pipe(Effect.flatten)
const startRun = (work: Effect.Effect<A, E>, done: Deferred.Deferred<A, E | Cancelled>) =>
Effect.gen(function* () {
const id = next()
const fiber = yield* work.pipe(
Effect.onExit((exit) => finishRun(id, done, exit)),
Effect.forkIn(scope),
)
return { id, done, fiber } satisfies RunHandle<A, E>
})
const finishShell = (id: number) =>
SynchronizedRef.modifyEffect(
ref,
Effect.fnUntraced(function* (st) {
if (st._tag === "Shell" && st.shell.id === id) return [idle, { _tag: "Idle" }] as const
if (st._tag === "ShellThenRun" && st.shell.id === id) {
const run = yield* startRun(st.run.work, st.run.done)
return [Effect.void, { _tag: "Running", run }] as const
}
return [Effect.void, st] as const
}),
).pipe(Effect.flatten)
const stopShell = (shell: ShellHandle<A, E>) => Fiber.interrupt(shell.fiber)
const ensureRunning = (work: Effect.Effect<A, E>) =>
SynchronizedRef.modifyEffect(
ref,
Effect.fnUntraced(function* (st) {
switch (st._tag) {
case "Running":
case "ShellThenRun":
return [Deferred.await(st.run.done), st] as const
case "Shell": {
const run = {
id: next(),
done: yield* Deferred.make<A, E | Cancelled>(),
work,
} satisfies PendingHandle<A, E>
return [Deferred.await(run.done), { _tag: "ShellThenRun", shell: st.shell, run }] as const
}
case "Idle": {
const done = yield* Deferred.make<A, E | Cancelled>()
const run = yield* startRun(work, done)
return [Deferred.await(done), { _tag: "Running", run }] as const
}
}
}),
).pipe(
Effect.flatten,
Effect.catch(
(e): Effect.Effect<A, E> => (e instanceof Cancelled ? (onInterrupt ?? Effect.die(e)) : Effect.fail(e as E)),
),
const startRun = (work: Effect.Effect<A, E>, done: Deferred.Deferred<A, E | Cancelled>) =>
Effect.gen(function* () {
const id = next()
const fiber = yield* work.pipe(
Effect.onExit((exit) => finishRun(id, done, exit)),
Effect.forkIn(scope),
)
return { id, done, fiber } satisfies RunHandle<A, E>
})
const startShell = (work: Effect.Effect<A, E>) =>
SynchronizedRef.modifyEffect(
ref,
Effect.fnUntraced(function* (st) {
if (st._tag !== "Idle") {
return [
Effect.sync(() => {
if (opts?.busy) opts.busy()
throw new Error("Runner is busy")
}),
st,
] as const
const finishShell = (id: number) =>
SynchronizedRef.modifyEffect(
ref,
Effect.fnUntraced(function* (st) {
if (st._tag === "Shell" && st.shell.id === id) return [idle, { _tag: "Idle" }] as const
if (st._tag === "ShellThenRun" && st.shell.id === id) {
const run = yield* startRun(st.run.work, st.run.done)
return [Effect.void, { _tag: "Running", run }] as const
}
return [Effect.void, st] as const
}),
).pipe(Effect.flatten)
const stopShell = (shell: ShellHandle<A, E>) => Fiber.interrupt(shell.fiber)
const ensureRunning = (work: Effect.Effect<A, E>) =>
SynchronizedRef.modifyEffect(
ref,
Effect.fnUntraced(function* (st) {
switch (st._tag) {
case "Running":
case "ShellThenRun":
return [Deferred.await(st.run.done), st] as const
case "Shell": {
const run = {
id: next(),
done: yield* Deferred.make<A, E | Cancelled>(),
work,
} satisfies PendingHandle<A, E>
return [Deferred.await(run.done), { _tag: "ShellThenRun", shell: st.shell, run }] as const
}
yield* busy
const id = next()
const fiber = yield* work.pipe(Effect.ensuring(finishShell(id)), Effect.forkChild)
const shell = { id, fiber } satisfies ShellHandle<A, E>
return [
Effect.gen(function* () {
const exit = yield* Fiber.await(fiber)
if (Exit.isSuccess(exit)) return exit.value
if (Cause.hasInterruptsOnly(exit.cause) && onInterrupt) return yield* onInterrupt
return yield* Effect.failCause(exit.cause)
}),
{ _tag: "Shell", shell },
] as const
}),
).pipe(Effect.flatten)
case "Idle": {
const done = yield* Deferred.make<A, E | Cancelled>()
const run = yield* startRun(work, done)
return [Deferred.await(done), { _tag: "Running", run }] as const
}
}
}),
).pipe(
Effect.flatten,
Effect.catch(
(e): Effect.Effect<A, E> => (e instanceof Cancelled ? (onInterrupt ?? Effect.die(e)) : Effect.fail(e as E)),
),
)
const cancel = SynchronizedRef.modify(ref, (st) => {
switch (st._tag) {
case "Idle":
return [Effect.void, st] as const
case "Running":
const startShell = (work: Effect.Effect<A, E>) =>
SynchronizedRef.modifyEffect(
ref,
Effect.fnUntraced(function* (st) {
if (st._tag !== "Idle") {
return [
Effect.gen(function* () {
yield* Fiber.interrupt(st.run.fiber)
yield* Deferred.await(st.run.done).pipe(Effect.exit, Effect.asVoid)
yield* idleIfCurrent()
Effect.sync(() => {
if (opts?.busy) opts.busy()
throw new Error("Runner is busy")
}),
{ _tag: "Idle" } as const,
st,
] as const
case "Shell":
return [
Effect.gen(function* () {
yield* stopShell(st.shell)
yield* idleIfCurrent()
}),
{ _tag: "Idle" } as const,
] as const
case "ShellThenRun":
return [
Effect.gen(function* () {
yield* Deferred.fail(st.run.done, new Cancelled()).pipe(Effect.asVoid)
yield* stopShell(st.shell)
yield* idleIfCurrent()
}),
{ _tag: "Idle" } as const,
] as const
}
}).pipe(Effect.flatten)
}
yield* busy
const id = next()
const fiber = yield* work.pipe(Effect.ensuring(finishShell(id)), Effect.forkChild)
const shell = { id, fiber } satisfies ShellHandle<A, E>
return [
Effect.gen(function* () {
const exit = yield* Fiber.await(fiber)
if (Exit.isSuccess(exit)) return exit.value
if (Cause.hasInterruptsOnly(exit.cause) && onInterrupt) return yield* onInterrupt
return yield* Effect.failCause(exit.cause)
}),
{ _tag: "Shell", shell },
] as const
}),
).pipe(Effect.flatten)
return {
get state() {
return state()
},
get busy() {
return state()._tag !== "Idle"
},
ensureRunning,
startShell,
cancel,
const cancel = SynchronizedRef.modify(ref, (st) => {
switch (st._tag) {
case "Idle":
return [Effect.void, st] as const
case "Running":
return [
Effect.gen(function* () {
yield* Fiber.interrupt(st.run.fiber)
yield* Deferred.await(st.run.done).pipe(Effect.exit, Effect.asVoid)
yield* idleIfCurrent()
}),
{ _tag: "Idle" } as const,
] as const
case "Shell":
return [
Effect.gen(function* () {
yield* stopShell(st.shell)
yield* idleIfCurrent()
}),
{ _tag: "Idle" } as const,
] as const
case "ShellThenRun":
return [
Effect.gen(function* () {
yield* Deferred.fail(st.run.done, new Cancelled()).pipe(Effect.asVoid)
yield* stopShell(st.shell)
yield* idleIfCurrent()
}),
{ _tag: "Idle" } as const,
] as const
}
}).pipe(Effect.flatten)
return {
get state() {
return state()
},
get busy() {
return state()._tag !== "Idle"
},
ensureRunning,
startShell,
cancel,
}
}

View File

@@ -13,8 +13,8 @@ import z from "zod"
import { Global } from "../global"
import { Instance } from "../project/instance"
import { Log } from "../util"
import { Protected } from "./protected"
import { Ripgrep } from "./ripgrep"
import * as Protected from "./protected"
import * as Ripgrep from "./ripgrep"
export const Info = z
.object({

View File

@@ -1,81 +1,79 @@
import { Glob } from "@opencode-ai/shared/util/glob"
export namespace FileIgnore {
const FOLDERS = new Set([
"node_modules",
"bower_components",
".pnpm-store",
"vendor",
".npm",
"dist",
"build",
"out",
".next",
"target",
"bin",
"obj",
".git",
".svn",
".hg",
".vscode",
".idea",
".turbo",
".output",
"desktop",
".sst",
".cache",
".webkit-cache",
"__pycache__",
".pytest_cache",
"mypy_cache",
".history",
".gradle",
])
const FOLDERS = new Set([
"node_modules",
"bower_components",
".pnpm-store",
"vendor",
".npm",
"dist",
"build",
"out",
".next",
"target",
"bin",
"obj",
".git",
".svn",
".hg",
".vscode",
".idea",
".turbo",
".output",
"desktop",
".sst",
".cache",
".webkit-cache",
"__pycache__",
".pytest_cache",
"mypy_cache",
".history",
".gradle",
])
const FILES = [
"**/*.swp",
"**/*.swo",
const FILES = [
"**/*.swp",
"**/*.swo",
"**/*.pyc",
"**/*.pyc",
// OS
"**/.DS_Store",
"**/Thumbs.db",
// OS
"**/.DS_Store",
"**/Thumbs.db",
// Logs & temp
"**/logs/**",
"**/tmp/**",
"**/temp/**",
"**/*.log",
// Logs & temp
"**/logs/**",
"**/tmp/**",
"**/temp/**",
"**/*.log",
// Coverage/test outputs
"**/coverage/**",
"**/.nyc_output/**",
]
// Coverage/test outputs
"**/coverage/**",
"**/.nyc_output/**",
]
export const PATTERNS = [...FILES, ...FOLDERS]
export const PATTERNS = [...FILES, ...FOLDERS]
export function match(
filepath: string,
opts?: {
extra?: string[]
whitelist?: string[]
},
) {
for (const pattern of opts?.whitelist || []) {
if (Glob.match(pattern, filepath)) return false
}
const parts = filepath.split(/[/\\]/)
for (let i = 0; i < parts.length; i++) {
if (FOLDERS.has(parts[i])) return true
}
const extra = opts?.extra || []
for (const pattern of [...FILES, ...extra]) {
if (Glob.match(pattern, filepath)) return true
}
return false
export function match(
filepath: string,
opts?: {
extra?: string[]
whitelist?: string[]
},
) {
for (const pattern of opts?.whitelist || []) {
if (Glob.match(pattern, filepath)) return false
}
const parts = filepath.split(/[/\\]/)
for (let i = 0; i < parts.length; i++) {
if (FOLDERS.has(parts[i])) return true
}
const extra = opts?.extra || []
for (const pattern of [...FILES, ...extra]) {
if (Glob.match(pattern, filepath)) return true
}
return false
}

View File

@@ -1 +1,6 @@
export * as File from "./file"
export * as Protected from "./protected"
export * as FileIgnore from "./ignore"
export * as FileWatcher from "./watcher"
export * as FileTime from "./time"
export * as Ripgrep from "./ripgrep"

View File

@@ -37,23 +37,21 @@ const DARWIN_ROOT = ["/.DocumentRevisions-V100", "/.Spotlight-V100", "/.Trashes"
const WIN32_HOME = ["AppData", "Downloads", "Desktop", "Documents", "Pictures", "Music", "Videos", "OneDrive"]
export namespace Protected {
/** Directory basenames to skip when scanning the home directory. */
export function names(): ReadonlySet<string> {
if (process.platform === "darwin") return new Set(DARWIN_HOME)
if (process.platform === "win32") return new Set(WIN32_HOME)
return new Set()
}
/** Absolute paths that should never be watched, stated, or scanned. */
export function paths(): string[] {
if (process.platform === "darwin")
return [
...DARWIN_HOME.map((n) => path.join(home, n)),
...DARWIN_LIBRARY.map((n) => path.join(home, "Library", n)),
...DARWIN_ROOT,
]
if (process.platform === "win32") return WIN32_HOME.map((n) => path.join(home, n))
return []
}
/** Directory basenames to skip when scanning the home directory. */
export function names(): ReadonlySet<string> {
if (process.platform === "darwin") return new Set(DARWIN_HOME)
if (process.platform === "win32") return new Set(WIN32_HOME)
return new Set()
}
/** Absolute paths that should never be watched, stated, or scanned. */
export function paths(): string[] {
if (process.platform === "darwin")
return [
...DARWIN_HOME.map((n) => path.join(home, n)),
...DARWIN_LIBRARY.map((n) => path.join(home, "Library", n)),
...DARWIN_ROOT,
]
if (process.platform === "win32") return WIN32_HOME.map((n) => path.join(home, n))
return []
}

File diff suppressed because it is too large Load Diff

View File

@@ -5,109 +5,107 @@ import { Flag } from "@/flag/flag"
import type { SessionID } from "@/session/schema"
import { Log } from "../util"
export namespace FileTime {
const log = Log.create({ service: "file.time" })
const log = Log.create({ service: "file.time" })
export type Stamp = {
readonly read: Date
readonly mtime: number | undefined
readonly size: number | undefined
}
const session = (reads: Map<SessionID, Map<string, Stamp>>, sessionID: SessionID) => {
const value = reads.get(sessionID)
if (value) return value
const next = new Map<string, Stamp>()
reads.set(sessionID, next)
return next
}
interface State {
reads: Map<SessionID, Map<string, Stamp>>
locks: Map<string, Semaphore.Semaphore>
}
export interface Interface {
readonly read: (sessionID: SessionID, file: string) => Effect.Effect<void>
readonly get: (sessionID: SessionID, file: string) => Effect.Effect<Date | undefined>
readonly assert: (sessionID: SessionID, filepath: string) => Effect.Effect<void>
readonly withLock: <T>(filepath: string, fn: () => Effect.Effect<T>) => Effect.Effect<T>
}
export class Service extends Context.Service<Service, Interface>()("@opencode/FileTime") {}
export const layer = Layer.effect(
Service,
Effect.gen(function* () {
const fsys = yield* AppFileSystem.Service
const disableCheck = yield* Flag.OPENCODE_DISABLE_FILETIME_CHECK
const stamp = Effect.fnUntraced(function* (file: string) {
const info = yield* fsys.stat(file).pipe(Effect.catch(() => Effect.void))
return {
read: yield* DateTime.nowAsDate,
mtime: info ? Option.getOrUndefined(info.mtime)?.getTime() : undefined,
size: info ? Number(info.size) : undefined,
}
})
const state = yield* InstanceState.make<State>(
Effect.fn("FileTime.state")(() =>
Effect.succeed({
reads: new Map<SessionID, Map<string, Stamp>>(),
locks: new Map<string, Semaphore.Semaphore>(),
}),
),
)
const getLock = Effect.fn("FileTime.lock")(function* (filepath: string) {
filepath = AppFileSystem.normalizePath(filepath)
const locks = (yield* InstanceState.get(state)).locks
const lock = locks.get(filepath)
if (lock) return lock
const next = Semaphore.makeUnsafe(1)
locks.set(filepath, next)
return next
})
const read = Effect.fn("FileTime.read")(function* (sessionID: SessionID, file: string) {
file = AppFileSystem.normalizePath(file)
const reads = (yield* InstanceState.get(state)).reads
log.info("read", { sessionID, file })
session(reads, sessionID).set(file, yield* stamp(file))
})
const get = Effect.fn("FileTime.get")(function* (sessionID: SessionID, file: string) {
file = AppFileSystem.normalizePath(file)
const reads = (yield* InstanceState.get(state)).reads
return reads.get(sessionID)?.get(file)?.read
})
const assert = Effect.fn("FileTime.assert")(function* (sessionID: SessionID, filepath: string) {
if (disableCheck) return
filepath = AppFileSystem.normalizePath(filepath)
const reads = (yield* InstanceState.get(state)).reads
const time = reads.get(sessionID)?.get(filepath)
if (!time) throw new Error(`You must read file ${filepath} before overwriting it. Use the Read tool first`)
const next = yield* stamp(filepath)
const changed = next.mtime !== time.mtime || next.size !== time.size
if (!changed) return
throw new Error(
`File ${filepath} has been modified since it was last read.\nLast modification: ${new Date(next.mtime ?? next.read.getTime()).toISOString()}\nLast read: ${time.read.toISOString()}\n\nPlease read the file again before modifying it.`,
)
})
const withLock = Effect.fn("FileTime.withLock")(function* <T>(filepath: string, fn: () => Effect.Effect<T>) {
return yield* fn().pipe((yield* getLock(filepath)).withPermits(1))
})
return Service.of({ read, get, assert, withLock })
}),
).pipe(Layer.orDie)
export const defaultLayer = layer.pipe(Layer.provide(AppFileSystem.defaultLayer))
export type Stamp = {
readonly read: Date
readonly mtime: number | undefined
readonly size: number | undefined
}
const session = (reads: Map<SessionID, Map<string, Stamp>>, sessionID: SessionID) => {
const value = reads.get(sessionID)
if (value) return value
const next = new Map<string, Stamp>()
reads.set(sessionID, next)
return next
}
interface State {
reads: Map<SessionID, Map<string, Stamp>>
locks: Map<string, Semaphore.Semaphore>
}
export interface Interface {
readonly read: (sessionID: SessionID, file: string) => Effect.Effect<void>
readonly get: (sessionID: SessionID, file: string) => Effect.Effect<Date | undefined>
readonly assert: (sessionID: SessionID, filepath: string) => Effect.Effect<void>
readonly withLock: <T>(filepath: string, fn: () => Effect.Effect<T>) => Effect.Effect<T>
}
export class Service extends Context.Service<Service, Interface>()("@opencode/FileTime") {}
export const layer = Layer.effect(
Service,
Effect.gen(function* () {
const fsys = yield* AppFileSystem.Service
const disableCheck = yield* Flag.OPENCODE_DISABLE_FILETIME_CHECK
const stamp = Effect.fnUntraced(function* (file: string) {
const info = yield* fsys.stat(file).pipe(Effect.catch(() => Effect.void))
return {
read: yield* DateTime.nowAsDate,
mtime: info ? Option.getOrUndefined(info.mtime)?.getTime() : undefined,
size: info ? Number(info.size) : undefined,
}
})
const state = yield* InstanceState.make<State>(
Effect.fn("FileTime.state")(() =>
Effect.succeed({
reads: new Map<SessionID, Map<string, Stamp>>(),
locks: new Map<string, Semaphore.Semaphore>(),
}),
),
)
const getLock = Effect.fn("FileTime.lock")(function* (filepath: string) {
filepath = AppFileSystem.normalizePath(filepath)
const locks = (yield* InstanceState.get(state)).locks
const lock = locks.get(filepath)
if (lock) return lock
const next = Semaphore.makeUnsafe(1)
locks.set(filepath, next)
return next
})
const read = Effect.fn("FileTime.read")(function* (sessionID: SessionID, file: string) {
file = AppFileSystem.normalizePath(file)
const reads = (yield* InstanceState.get(state)).reads
log.info("read", { sessionID, file })
session(reads, sessionID).set(file, yield* stamp(file))
})
const get = Effect.fn("FileTime.get")(function* (sessionID: SessionID, file: string) {
file = AppFileSystem.normalizePath(file)
const reads = (yield* InstanceState.get(state)).reads
return reads.get(sessionID)?.get(file)?.read
})
const assert = Effect.fn("FileTime.assert")(function* (sessionID: SessionID, filepath: string) {
if (disableCheck) return
filepath = AppFileSystem.normalizePath(filepath)
const reads = (yield* InstanceState.get(state)).reads
const time = reads.get(sessionID)?.get(filepath)
if (!time) throw new Error(`You must read file ${filepath} before overwriting it. Use the Read tool first`)
const next = yield* stamp(filepath)
const changed = next.mtime !== time.mtime || next.size !== time.size
if (!changed) return
throw new Error(
`File ${filepath} has been modified since it was last read.\nLast modification: ${new Date(next.mtime ?? next.read.getTime()).toISOString()}\nLast read: ${time.read.toISOString()}\n\nPlease read the file again before modifying it.`,
)
})
const withLock = Effect.fn("FileTime.withLock")(function* <T>(filepath: string, fn: () => Effect.Effect<T>) {
return yield* fn().pipe((yield* getLock(filepath)).withPermits(1))
})
return Service.of({ read, get, assert, withLock })
}),
).pipe(Layer.orDie)
export const defaultLayer = layer.pipe(Layer.provide(AppFileSystem.defaultLayer))

View File

@@ -13,151 +13,149 @@ import { Git } from "@/git"
import { Instance } from "@/project/instance"
import { lazy } from "@/util/lazy"
import { Config } from "../config"
import { FileIgnore } from "./ignore"
import { Protected } from "./protected"
import * as FileIgnore from "./ignore"
import * as Protected from "./protected"
import { Log } from "../util"
declare const OPENCODE_LIBC: string | undefined
export namespace FileWatcher {
const log = Log.create({ service: "file.watcher" })
const SUBSCRIBE_TIMEOUT_MS = 10_000
const log = Log.create({ service: "file.watcher" })
const SUBSCRIBE_TIMEOUT_MS = 10_000
export const Event = {
Updated: BusEvent.define(
"file.watcher.updated",
z.object({
file: z.string(),
event: z.union([z.literal("add"), z.literal("change"), z.literal("unlink")]),
}),
),
}
const watcher = lazy((): typeof import("@parcel/watcher") | undefined => {
try {
const binding = require(
`@parcel/watcher-${process.platform}-${process.arch}${process.platform === "linux" ? `-${OPENCODE_LIBC || "glibc"}` : ""}`,
)
return createWrapper(binding) as typeof import("@parcel/watcher")
} catch (error) {
log.error("failed to load watcher binding", { error })
return
}
})
function getBackend() {
if (process.platform === "win32") return "windows"
if (process.platform === "darwin") return "fs-events"
if (process.platform === "linux") return "inotify"
}
function protecteds(dir: string) {
return Protected.paths().filter((item) => {
const rel = path.relative(dir, item)
return rel !== "" && !rel.startsWith("..") && !path.isAbsolute(rel)
})
}
export const hasNativeBinding = () => !!watcher()
export interface Interface {
readonly init: () => Effect.Effect<void>
}
export class Service extends Context.Service<Service, Interface>()("@opencode/FileWatcher") {}
export const layer = Layer.effect(
Service,
Effect.gen(function* () {
const config = yield* Config.Service
const git = yield* Git.Service
const state = yield* InstanceState.make(
Effect.fn("FileWatcher.state")(
function* () {
if (yield* Flag.OPENCODE_EXPERIMENTAL_DISABLE_FILEWATCHER) return
log.info("init", { directory: Instance.directory })
const backend = getBackend()
if (!backend) {
log.error("watcher backend not supported", { directory: Instance.directory, platform: process.platform })
return
}
const w = watcher()
if (!w) return
log.info("watcher backend", { directory: Instance.directory, platform: process.platform, backend })
const subs: ParcelWatcher.AsyncSubscription[] = []
yield* Effect.addFinalizer(() =>
Effect.promise(() => Promise.allSettled(subs.map((sub) => sub.unsubscribe()))),
)
const cb: ParcelWatcher.SubscribeCallback = Instance.bind((err, evts) => {
if (err) return
for (const evt of evts) {
if (evt.type === "create") void Bus.publish(Event.Updated, { file: evt.path, event: "add" })
if (evt.type === "update") void Bus.publish(Event.Updated, { file: evt.path, event: "change" })
if (evt.type === "delete") void Bus.publish(Event.Updated, { file: evt.path, event: "unlink" })
}
})
const subscribe = (dir: string, ignore: string[]) => {
const pending = w.subscribe(dir, cb, { ignore, backend })
return Effect.gen(function* () {
const sub = yield* Effect.promise(() => pending)
subs.push(sub)
}).pipe(
Effect.timeout(SUBSCRIBE_TIMEOUT_MS),
Effect.catchCause((cause) => {
log.error("failed to subscribe", { dir, cause: Cause.pretty(cause) })
pending.then((s) => s.unsubscribe()).catch(() => {})
return Effect.void
}),
)
}
const cfg = yield* config.get()
const cfgIgnores = cfg.watcher?.ignore ?? []
if (yield* Flag.OPENCODE_EXPERIMENTAL_FILEWATCHER) {
yield* subscribe(Instance.directory, [
...FileIgnore.PATTERNS,
...cfgIgnores,
...protecteds(Instance.directory),
])
}
if (Instance.project.vcs === "git") {
const result = yield* git.run(["rev-parse", "--git-dir"], {
cwd: Instance.project.worktree,
})
const vcsDir =
result.exitCode === 0 ? path.resolve(Instance.project.worktree, result.text().trim()) : undefined
if (vcsDir && !cfgIgnores.includes(".git") && !cfgIgnores.includes(vcsDir)) {
const ignore = (yield* Effect.promise(() => readdir(vcsDir).catch(() => []))).filter(
(entry) => entry !== "HEAD",
)
yield* subscribe(vcsDir, ignore)
}
}
},
Effect.catchCause((cause) => {
log.error("failed to init watcher service", { cause: Cause.pretty(cause) })
return Effect.void
}),
),
)
return Service.of({
init: Effect.fn("FileWatcher.init")(function* () {
yield* InstanceState.get(state)
}),
})
export const Event = {
Updated: BusEvent.define(
"file.watcher.updated",
z.object({
file: z.string(),
event: z.union([z.literal("add"), z.literal("change"), z.literal("unlink")]),
}),
)
export const defaultLayer = layer.pipe(Layer.provide(Config.defaultLayer), Layer.provide(Git.defaultLayer))
),
}
const watcher = lazy((): typeof import("@parcel/watcher") | undefined => {
try {
const binding = require(
`@parcel/watcher-${process.platform}-${process.arch}${process.platform === "linux" ? `-${OPENCODE_LIBC || "glibc"}` : ""}`,
)
return createWrapper(binding) as typeof import("@parcel/watcher")
} catch (error) {
log.error("failed to load watcher binding", { error })
return
}
})
function getBackend() {
if (process.platform === "win32") return "windows"
if (process.platform === "darwin") return "fs-events"
if (process.platform === "linux") return "inotify"
}
function protecteds(dir: string) {
return Protected.paths().filter((item) => {
const rel = path.relative(dir, item)
return rel !== "" && !rel.startsWith("..") && !path.isAbsolute(rel)
})
}
export const hasNativeBinding = () => !!watcher()
export interface Interface {
readonly init: () => Effect.Effect<void>
}
export class Service extends Context.Service<Service, Interface>()("@opencode/FileWatcher") {}
export const layer = Layer.effect(
Service,
Effect.gen(function* () {
const config = yield* Config.Service
const git = yield* Git.Service
const state = yield* InstanceState.make(
Effect.fn("FileWatcher.state")(
function* () {
if (yield* Flag.OPENCODE_EXPERIMENTAL_DISABLE_FILEWATCHER) return
log.info("init", { directory: Instance.directory })
const backend = getBackend()
if (!backend) {
log.error("watcher backend not supported", { directory: Instance.directory, platform: process.platform })
return
}
const w = watcher()
if (!w) return
log.info("watcher backend", { directory: Instance.directory, platform: process.platform, backend })
const subs: ParcelWatcher.AsyncSubscription[] = []
yield* Effect.addFinalizer(() =>
Effect.promise(() => Promise.allSettled(subs.map((sub) => sub.unsubscribe()))),
)
const cb: ParcelWatcher.SubscribeCallback = Instance.bind((err, evts) => {
if (err) return
for (const evt of evts) {
if (evt.type === "create") void Bus.publish(Event.Updated, { file: evt.path, event: "add" })
if (evt.type === "update") void Bus.publish(Event.Updated, { file: evt.path, event: "change" })
if (evt.type === "delete") void Bus.publish(Event.Updated, { file: evt.path, event: "unlink" })
}
})
const subscribe = (dir: string, ignore: string[]) => {
const pending = w.subscribe(dir, cb, { ignore, backend })
return Effect.gen(function* () {
const sub = yield* Effect.promise(() => pending)
subs.push(sub)
}).pipe(
Effect.timeout(SUBSCRIBE_TIMEOUT_MS),
Effect.catchCause((cause) => {
log.error("failed to subscribe", { dir, cause: Cause.pretty(cause) })
pending.then((s) => s.unsubscribe()).catch(() => {})
return Effect.void
}),
)
}
const cfg = yield* config.get()
const cfgIgnores = cfg.watcher?.ignore ?? []
if (yield* Flag.OPENCODE_EXPERIMENTAL_FILEWATCHER) {
yield* subscribe(Instance.directory, [
...FileIgnore.PATTERNS,
...cfgIgnores,
...protecteds(Instance.directory),
])
}
if (Instance.project.vcs === "git") {
const result = yield* git.run(["rev-parse", "--git-dir"], {
cwd: Instance.project.worktree,
})
const vcsDir =
result.exitCode === 0 ? path.resolve(Instance.project.worktree, result.text().trim()) : undefined
if (vcsDir && !cfgIgnores.includes(".git") && !cfgIgnores.includes(vcsDir)) {
const ignore = (yield* Effect.promise(() => readdir(vcsDir).catch(() => []))).filter(
(entry) => entry !== "HEAD",
)
yield* subscribe(vcsDir, ignore)
}
}
},
Effect.catchCause((cause) => {
log.error("failed to init watcher service", { cause: Cause.pretty(cause) })
return Effect.void
}),
),
)
return Service.of({
init: Effect.fn("FileWatcher.init")(function* () {
yield* InstanceState.get(state)
}),
})
}),
)
export const defaultLayer = layer.pipe(Layer.provide(Config.defaultLayer), Layer.provide(Git.defaultLayer))

View File

@@ -31,8 +31,8 @@ import { SessionCommand } from "./cli/cmd/session"
import { DbCommand } from "./cli/cmd/db"
import path from "path"
import { Global } from "./global"
import { JsonMigration } from "./storage/json-migration"
import { Database } from "./storage/db"
import { JsonMigration } from "./storage"
import { Database } from "./storage"
import { errorMessage } from "./util/error"
import { PluginCommand } from "./cli/cmd/plug"
import { Heap } from "./cli/heap"

View File

@@ -8,7 +8,7 @@ import { Log } from "../util"
import { Process } from "../util"
import { LANGUAGE_EXTENSIONS } from "./language"
import z from "zod"
import type { LSPServer } from "./server"
import type * as LSPServer from "./server"
import { NamedError } from "@opencode-ai/shared/util/error"
import { withTimeout } from "../util/timeout"
import { Instance } from "../project/instance"
@@ -16,237 +16,235 @@ import { Filesystem } from "../util"
const DIAGNOSTICS_DEBOUNCE_MS = 150
export namespace LSPClient {
const log = Log.create({ service: "lsp.client" })
const log = Log.create({ service: "lsp.client" })
export type Info = NonNullable<Awaited<ReturnType<typeof create>>>
export type Info = NonNullable<Awaited<ReturnType<typeof create>>>
export type Diagnostic = VSCodeDiagnostic
export type Diagnostic = VSCodeDiagnostic
export const InitializeError = NamedError.create(
"LSPInitializeError",
export const InitializeError = NamedError.create(
"LSPInitializeError",
z.object({
serverID: z.string(),
}),
)
export const Event = {
Diagnostics: BusEvent.define(
"lsp.client.diagnostics",
z.object({
serverID: z.string(),
path: z.string(),
}),
),
}
export async function create(input: { serverID: string; server: LSPServer.Handle; root: string }) {
const l = log.clone().tag("serverID", input.serverID)
l.info("starting client")
const connection = createMessageConnection(
new StreamMessageReader(input.server.process.stdout as any),
new StreamMessageWriter(input.server.process.stdin as any),
)
export const Event = {
Diagnostics: BusEvent.define(
"lsp.client.diagnostics",
z.object({
serverID: z.string(),
path: z.string(),
}),
),
const diagnostics = new Map<string, Diagnostic[]>()
connection.onNotification("textDocument/publishDiagnostics", (params) => {
const filePath = Filesystem.normalizePath(fileURLToPath(params.uri))
l.info("textDocument/publishDiagnostics", {
path: filePath,
count: params.diagnostics.length,
})
const exists = diagnostics.has(filePath)
diagnostics.set(filePath, params.diagnostics)
if (!exists && input.serverID === "typescript") return
Bus.publish(Event.Diagnostics, { path: filePath, serverID: input.serverID })
})
connection.onRequest("window/workDoneProgress/create", (params) => {
l.info("window/workDoneProgress/create", params)
return null
})
connection.onRequest("workspace/configuration", async () => {
// Return server initialization options
return [input.server.initialization ?? {}]
})
connection.onRequest("client/registerCapability", async () => {})
connection.onRequest("client/unregisterCapability", async () => {})
connection.onRequest("workspace/workspaceFolders", async () => [
{
name: "workspace",
uri: pathToFileURL(input.root).href,
},
])
connection.listen()
l.info("sending initialize")
await withTimeout(
connection.sendRequest("initialize", {
rootUri: pathToFileURL(input.root).href,
processId: input.server.process.pid,
workspaceFolders: [
{
name: "workspace",
uri: pathToFileURL(input.root).href,
},
],
initializationOptions: {
...input.server.initialization,
},
capabilities: {
window: {
workDoneProgress: true,
},
workspace: {
configuration: true,
didChangeWatchedFiles: {
dynamicRegistration: true,
},
},
textDocument: {
synchronization: {
didOpen: true,
didChange: true,
},
publishDiagnostics: {
versionSupport: true,
},
},
},
}),
45_000,
).catch((err) => {
l.error("initialize error", { error: err })
throw new InitializeError(
{ serverID: input.serverID },
{
cause: err,
},
)
})
await connection.sendNotification("initialized", {})
if (input.server.initialization) {
await connection.sendNotification("workspace/didChangeConfiguration", {
settings: input.server.initialization,
})
}
export async function create(input: { serverID: string; server: LSPServer.Handle; root: string }) {
const l = log.clone().tag("serverID", input.serverID)
l.info("starting client")
const files: {
[path: string]: number
} = {}
const connection = createMessageConnection(
new StreamMessageReader(input.server.process.stdout as any),
new StreamMessageWriter(input.server.process.stdin as any),
)
const diagnostics = new Map<string, Diagnostic[]>()
connection.onNotification("textDocument/publishDiagnostics", (params) => {
const filePath = Filesystem.normalizePath(fileURLToPath(params.uri))
l.info("textDocument/publishDiagnostics", {
path: filePath,
count: params.diagnostics.length,
})
const exists = diagnostics.has(filePath)
diagnostics.set(filePath, params.diagnostics)
if (!exists && input.serverID === "typescript") return
void Bus.publish(Event.Diagnostics, { path: filePath, serverID: input.serverID })
})
connection.onRequest("window/workDoneProgress/create", (params) => {
l.info("window/workDoneProgress/create", params)
return null
})
connection.onRequest("workspace/configuration", async () => {
// Return server initialization options
return [input.server.initialization ?? {}]
})
connection.onRequest("client/registerCapability", async () => {})
connection.onRequest("client/unregisterCapability", async () => {})
connection.onRequest("workspace/workspaceFolders", async () => [
{
name: "workspace",
uri: pathToFileURL(input.root).href,
},
])
connection.listen()
l.info("sending initialize")
await withTimeout(
connection.sendRequest("initialize", {
rootUri: pathToFileURL(input.root).href,
processId: input.server.process.pid,
workspaceFolders: [
{
name: "workspace",
uri: pathToFileURL(input.root).href,
},
],
initializationOptions: {
...input.server.initialization,
},
capabilities: {
window: {
workDoneProgress: true,
},
workspace: {
configuration: true,
didChangeWatchedFiles: {
dynamicRegistration: true,
},
},
textDocument: {
synchronization: {
didOpen: true,
didChange: true,
},
publishDiagnostics: {
versionSupport: true,
},
},
},
}),
45_000,
).catch((err) => {
l.error("initialize error", { error: err })
throw new InitializeError(
{ serverID: input.serverID },
{
cause: err,
},
)
})
await connection.sendNotification("initialized", {})
if (input.server.initialization) {
await connection.sendNotification("workspace/didChangeConfiguration", {
settings: input.server.initialization,
})
}
const files: {
[path: string]: number
} = {}
const result = {
root: input.root,
get serverID() {
return input.serverID
},
get connection() {
return connection
},
notify: {
async open(input: { path: string }) {
input.path = path.isAbsolute(input.path) ? input.path : path.resolve(Instance.directory, input.path)
const text = await Filesystem.readText(input.path)
const extension = path.extname(input.path)
const languageId = LANGUAGE_EXTENSIONS[extension] ?? "plaintext"
const version = files[input.path]
if (version !== undefined) {
log.info("workspace/didChangeWatchedFiles", input)
await connection.sendNotification("workspace/didChangeWatchedFiles", {
changes: [
{
uri: pathToFileURL(input.path).href,
type: 2, // Changed
},
],
})
const next = version + 1
files[input.path] = next
log.info("textDocument/didChange", {
path: input.path,
version: next,
})
await connection.sendNotification("textDocument/didChange", {
textDocument: {
uri: pathToFileURL(input.path).href,
version: next,
},
contentChanges: [{ text }],
})
return
}
const result = {
root: input.root,
get serverID() {
return input.serverID
},
get connection() {
return connection
},
notify: {
async open(input: { path: string }) {
input.path = path.isAbsolute(input.path) ? input.path : path.resolve(Instance.directory, input.path)
const text = await Filesystem.readText(input.path)
const extension = path.extname(input.path)
const languageId = LANGUAGE_EXTENSIONS[extension] ?? "plaintext"
const version = files[input.path]
if (version !== undefined) {
log.info("workspace/didChangeWatchedFiles", input)
await connection.sendNotification("workspace/didChangeWatchedFiles", {
changes: [
{
uri: pathToFileURL(input.path).href,
type: 1, // Created
type: 2, // Changed
},
],
})
log.info("textDocument/didOpen", input)
diagnostics.delete(input.path)
await connection.sendNotification("textDocument/didOpen", {
const next = version + 1
files[input.path] = next
log.info("textDocument/didChange", {
path: input.path,
version: next,
})
await connection.sendNotification("textDocument/didChange", {
textDocument: {
uri: pathToFileURL(input.path).href,
languageId,
version: 0,
text,
version: next,
},
contentChanges: [{ text }],
})
files[input.path] = 0
return
},
}
log.info("workspace/didChangeWatchedFiles", input)
await connection.sendNotification("workspace/didChangeWatchedFiles", {
changes: [
{
uri: pathToFileURL(input.path).href,
type: 1, // Created
},
],
})
log.info("textDocument/didOpen", input)
diagnostics.delete(input.path)
await connection.sendNotification("textDocument/didOpen", {
textDocument: {
uri: pathToFileURL(input.path).href,
languageId,
version: 0,
text,
},
})
files[input.path] = 0
return
},
get diagnostics() {
return diagnostics
},
async waitForDiagnostics(input: { path: string }) {
const normalizedPath = Filesystem.normalizePath(
path.isAbsolute(input.path) ? input.path : path.resolve(Instance.directory, input.path),
)
log.info("waiting for diagnostics", { path: normalizedPath })
let unsub: () => void
let debounceTimer: ReturnType<typeof setTimeout> | undefined
return await withTimeout(
new Promise<void>((resolve) => {
unsub = Bus.subscribe(Event.Diagnostics, (event) => {
if (event.properties.path === normalizedPath && event.properties.serverID === result.serverID) {
// Debounce to allow LSP to send follow-up diagnostics (e.g., semantic after syntax)
if (debounceTimer) clearTimeout(debounceTimer)
debounceTimer = setTimeout(() => {
log.info("got diagnostics", { path: normalizedPath })
unsub?.()
resolve()
}, DIAGNOSTICS_DEBOUNCE_MS)
}
})
}),
3000,
)
.catch(() => {})
.finally(() => {
if (debounceTimer) clearTimeout(debounceTimer)
unsub?.()
},
get diagnostics() {
return diagnostics
},
async waitForDiagnostics(input: { path: string }) {
const normalizedPath = Filesystem.normalizePath(
path.isAbsolute(input.path) ? input.path : path.resolve(Instance.directory, input.path),
)
log.info("waiting for diagnostics", { path: normalizedPath })
let unsub: () => void
let debounceTimer: ReturnType<typeof setTimeout> | undefined
return await withTimeout(
new Promise<void>((resolve) => {
unsub = Bus.subscribe(Event.Diagnostics, (event) => {
if (event.properties.path === normalizedPath && event.properties.serverID === result.serverID) {
// Debounce to allow LSP to send follow-up diagnostics (e.g., semantic after syntax)
if (debounceTimer) clearTimeout(debounceTimer)
debounceTimer = setTimeout(() => {
log.info("got diagnostics", { path: normalizedPath })
unsub?.()
resolve()
}, DIAGNOSTICS_DEBOUNCE_MS)
}
})
},
async shutdown() {
l.info("shutting down")
connection.end()
connection.dispose()
await Process.stop(input.server.process)
l.info("shutdown")
},
}
l.info("initialized")
return result
}),
3000,
)
.catch(() => {})
.finally(() => {
if (debounceTimer) clearTimeout(debounceTimer)
unsub?.()
})
},
async shutdown() {
l.info("shutting down")
connection.end()
connection.dispose()
await Process.stop(input.server.process)
l.info("shutdown")
},
}
l.info("initialized")
return result
}

View File

@@ -1,537 +1,3 @@
import { BusEvent } from "@/bus/bus-event"
import { Bus } from "@/bus"
import { Log } from "../util"
import { LSPClient } from "./client"
import path from "path"
import { pathToFileURL, fileURLToPath } from "url"
import { LSPServer } from "./server"
import z from "zod"
import { Config } from "../config"
import { Instance } from "../project/instance"
import { Flag } from "@/flag/flag"
import { Process } from "../util"
import { spawn as lspspawn } from "./launch"
import { Effect, Layer, Context } from "effect"
import { InstanceState } from "@/effect"
export namespace LSP {
const log = Log.create({ service: "lsp" })
export const Event = {
Updated: BusEvent.define("lsp.updated", z.object({})),
}
export const Range = z
.object({
start: z.object({
line: z.number(),
character: z.number(),
}),
end: z.object({
line: z.number(),
character: z.number(),
}),
})
.meta({
ref: "Range",
})
export type Range = z.infer<typeof Range>
export const Symbol = z
.object({
name: z.string(),
kind: z.number(),
location: z.object({
uri: z.string(),
range: Range,
}),
})
.meta({
ref: "Symbol",
})
export type Symbol = z.infer<typeof Symbol>
export const DocumentSymbol = z
.object({
name: z.string(),
detail: z.string().optional(),
kind: z.number(),
range: Range,
selectionRange: Range,
})
.meta({
ref: "DocumentSymbol",
})
export type DocumentSymbol = z.infer<typeof DocumentSymbol>
export const Status = z
.object({
id: z.string(),
name: z.string(),
root: z.string(),
status: z.union([z.literal("connected"), z.literal("error")]),
})
.meta({
ref: "LSPStatus",
})
export type Status = z.infer<typeof Status>
enum SymbolKind {
File = 1,
Module = 2,
Namespace = 3,
Package = 4,
Class = 5,
Method = 6,
Property = 7,
Field = 8,
Constructor = 9,
Enum = 10,
Interface = 11,
Function = 12,
Variable = 13,
Constant = 14,
String = 15,
Number = 16,
Boolean = 17,
Array = 18,
Object = 19,
Key = 20,
Null = 21,
EnumMember = 22,
Struct = 23,
Event = 24,
Operator = 25,
TypeParameter = 26,
}
const kinds = [
SymbolKind.Class,
SymbolKind.Function,
SymbolKind.Method,
SymbolKind.Interface,
SymbolKind.Variable,
SymbolKind.Constant,
SymbolKind.Struct,
SymbolKind.Enum,
]
const filterExperimentalServers = (servers: Record<string, LSPServer.Info>) => {
if (Flag.OPENCODE_EXPERIMENTAL_LSP_TY) {
if (servers["pyright"]) {
log.info("LSP server pyright is disabled because OPENCODE_EXPERIMENTAL_LSP_TY is enabled")
delete servers["pyright"]
}
} else {
if (servers["ty"]) {
delete servers["ty"]
}
}
}
type LocInput = { file: string; line: number; character: number }
interface State {
clients: LSPClient.Info[]
servers: Record<string, LSPServer.Info>
broken: Set<string>
spawning: Map<string, Promise<LSPClient.Info | undefined>>
}
export interface Interface {
readonly init: () => Effect.Effect<void>
readonly status: () => Effect.Effect<Status[]>
readonly hasClients: (file: string) => Effect.Effect<boolean>
readonly touchFile: (input: string, waitForDiagnostics?: boolean) => Effect.Effect<void>
readonly diagnostics: () => Effect.Effect<Record<string, LSPClient.Diagnostic[]>>
readonly hover: (input: LocInput) => Effect.Effect<any>
readonly definition: (input: LocInput) => Effect.Effect<any[]>
readonly references: (input: LocInput) => Effect.Effect<any[]>
readonly implementation: (input: LocInput) => Effect.Effect<any[]>
readonly documentSymbol: (uri: string) => Effect.Effect<(LSP.DocumentSymbol | LSP.Symbol)[]>
readonly workspaceSymbol: (query: string) => Effect.Effect<LSP.Symbol[]>
readonly prepareCallHierarchy: (input: LocInput) => Effect.Effect<any[]>
readonly incomingCalls: (input: LocInput) => Effect.Effect<any[]>
readonly outgoingCalls: (input: LocInput) => Effect.Effect<any[]>
}
export class Service extends Context.Service<Service, Interface>()("@opencode/LSP") {}
export const layer = Layer.effect(
Service,
Effect.gen(function* () {
const config = yield* Config.Service
const state = yield* InstanceState.make<State>(
Effect.fn("LSP.state")(function* () {
const cfg = yield* config.get()
const servers: Record<string, LSPServer.Info> = {}
if (cfg.lsp === false) {
log.info("all LSPs are disabled")
} else {
for (const server of Object.values(LSPServer)) {
servers[server.id] = server
}
filterExperimentalServers(servers)
for (const [name, item] of Object.entries(cfg.lsp ?? {})) {
const existing = servers[name]
if (item.disabled) {
log.info(`LSP server ${name} is disabled`)
delete servers[name]
continue
}
servers[name] = {
...existing,
id: name,
root: existing?.root ?? (async () => Instance.directory),
extensions: item.extensions ?? existing?.extensions ?? [],
spawn: async (root) => ({
process: lspspawn(item.command[0], item.command.slice(1), {
cwd: root,
env: { ...process.env, ...item.env },
}),
initialization: item.initialization,
}),
}
}
log.info("enabled LSP servers", {
serverIds: Object.values(servers)
.map((server) => server.id)
.join(", "),
})
}
const s: State = {
clients: [],
servers,
broken: new Set(),
spawning: new Map(),
}
yield* Effect.addFinalizer(() =>
Effect.promise(async () => {
await Promise.all(s.clients.map((client) => client.shutdown()))
}),
)
return s
}),
)
const getClients = Effect.fnUntraced(function* (file: string) {
if (!Instance.containsPath(file)) return [] as LSPClient.Info[]
const s = yield* InstanceState.get(state)
return yield* Effect.promise(async () => {
const extension = path.parse(file).ext || file
const result: LSPClient.Info[] = []
async function schedule(server: LSPServer.Info, root: string, key: string) {
const handle = await server
.spawn(root)
.then((value) => {
if (!value) s.broken.add(key)
return value
})
.catch((err) => {
s.broken.add(key)
log.error(`Failed to spawn LSP server ${server.id}`, { error: err })
return undefined
})
if (!handle) return undefined
log.info("spawned lsp server", { serverID: server.id, root })
const client = await LSPClient.create({
serverID: server.id,
server: handle,
root,
}).catch(async (err) => {
s.broken.add(key)
await Process.stop(handle.process)
log.error(`Failed to initialize LSP client ${server.id}`, { error: err })
return undefined
})
if (!client) return undefined
const existing = s.clients.find((x) => x.root === root && x.serverID === server.id)
if (existing) {
await Process.stop(handle.process)
return existing
}
s.clients.push(client)
return client
}
for (const server of Object.values(s.servers)) {
if (server.extensions.length && !server.extensions.includes(extension)) continue
const root = await server.root(file)
if (!root) continue
if (s.broken.has(root + server.id)) continue
const match = s.clients.find((x) => x.root === root && x.serverID === server.id)
if (match) {
result.push(match)
continue
}
const inflight = s.spawning.get(root + server.id)
if (inflight) {
const client = await inflight
if (!client) continue
result.push(client)
continue
}
const task = schedule(server, root, root + server.id)
s.spawning.set(root + server.id, task)
void task.finally(() => {
if (s.spawning.get(root + server.id) === task) {
s.spawning.delete(root + server.id)
}
})
const client = await task
if (!client) continue
result.push(client)
void Bus.publish(Event.Updated, {})
}
return result
})
})
const run = Effect.fnUntraced(function* <T>(file: string, fn: (client: LSPClient.Info) => Promise<T>) {
const clients = yield* getClients(file)
return yield* Effect.promise(() => Promise.all(clients.map((x) => fn(x))))
})
const runAll = Effect.fnUntraced(function* <T>(fn: (client: LSPClient.Info) => Promise<T>) {
const s = yield* InstanceState.get(state)
return yield* Effect.promise(() => Promise.all(s.clients.map((x) => fn(x))))
})
const init = Effect.fn("LSP.init")(function* () {
yield* InstanceState.get(state)
})
const status = Effect.fn("LSP.status")(function* () {
const s = yield* InstanceState.get(state)
const result: Status[] = []
for (const client of s.clients) {
result.push({
id: client.serverID,
name: s.servers[client.serverID].id,
root: path.relative(Instance.directory, client.root),
status: "connected",
})
}
return result
})
const hasClients = Effect.fn("LSP.hasClients")(function* (file: string) {
const s = yield* InstanceState.get(state)
return yield* Effect.promise(async () => {
const extension = path.parse(file).ext || file
for (const server of Object.values(s.servers)) {
if (server.extensions.length && !server.extensions.includes(extension)) continue
const root = await server.root(file)
if (!root) continue
if (s.broken.has(root + server.id)) continue
return true
}
return false
})
})
const touchFile = Effect.fn("LSP.touchFile")(function* (input: string, waitForDiagnostics?: boolean) {
log.info("touching file", { file: input })
const clients = yield* getClients(input)
yield* Effect.promise(() =>
Promise.all(
clients.map(async (client) => {
const wait = waitForDiagnostics ? client.waitForDiagnostics({ path: input }) : Promise.resolve()
await client.notify.open({ path: input })
return wait
}),
).catch((err) => {
log.error("failed to touch file", { err, file: input })
}),
)
})
const diagnostics = Effect.fn("LSP.diagnostics")(function* () {
const results: Record<string, LSPClient.Diagnostic[]> = {}
const all = yield* runAll(async (client) => client.diagnostics)
for (const result of all) {
for (const [p, diags] of result.entries()) {
const arr = results[p] || []
arr.push(...diags)
results[p] = arr
}
}
return results
})
const hover = Effect.fn("LSP.hover")(function* (input: LocInput) {
return yield* run(input.file, (client) =>
client.connection
.sendRequest("textDocument/hover", {
textDocument: { uri: pathToFileURL(input.file).href },
position: { line: input.line, character: input.character },
})
.catch(() => null),
)
})
const definition = Effect.fn("LSP.definition")(function* (input: LocInput) {
const results = yield* run(input.file, (client) =>
client.connection
.sendRequest("textDocument/definition", {
textDocument: { uri: pathToFileURL(input.file).href },
position: { line: input.line, character: input.character },
})
.catch(() => null),
)
return results.flat().filter(Boolean)
})
const references = Effect.fn("LSP.references")(function* (input: LocInput) {
const results = yield* run(input.file, (client) =>
client.connection
.sendRequest("textDocument/references", {
textDocument: { uri: pathToFileURL(input.file).href },
position: { line: input.line, character: input.character },
context: { includeDeclaration: true },
})
.catch(() => []),
)
return results.flat().filter(Boolean)
})
const implementation = Effect.fn("LSP.implementation")(function* (input: LocInput) {
const results = yield* run(input.file, (client) =>
client.connection
.sendRequest("textDocument/implementation", {
textDocument: { uri: pathToFileURL(input.file).href },
position: { line: input.line, character: input.character },
})
.catch(() => null),
)
return results.flat().filter(Boolean)
})
const documentSymbol = Effect.fn("LSP.documentSymbol")(function* (uri: string) {
const file = fileURLToPath(uri)
const results = yield* run(file, (client) =>
client.connection.sendRequest("textDocument/documentSymbol", { textDocument: { uri } }).catch(() => []),
)
return (results.flat() as (LSP.DocumentSymbol | LSP.Symbol)[]).filter(Boolean)
})
const workspaceSymbol = Effect.fn("LSP.workspaceSymbol")(function* (query: string) {
const results = yield* runAll((client) =>
client.connection
.sendRequest("workspace/symbol", { query })
.then((result: any) => result.filter((x: LSP.Symbol) => kinds.includes(x.kind)))
.then((result: any) => result.slice(0, 10))
.catch(() => []),
)
return results.flat() as LSP.Symbol[]
})
const prepareCallHierarchy = Effect.fn("LSP.prepareCallHierarchy")(function* (input: LocInput) {
const results = yield* run(input.file, (client) =>
client.connection
.sendRequest("textDocument/prepareCallHierarchy", {
textDocument: { uri: pathToFileURL(input.file).href },
position: { line: input.line, character: input.character },
})
.catch(() => []),
)
return results.flat().filter(Boolean)
})
const callHierarchyRequest = Effect.fnUntraced(function* (
input: LocInput,
direction: "callHierarchy/incomingCalls" | "callHierarchy/outgoingCalls",
) {
const results = yield* run(input.file, async (client) => {
const items = (await client.connection
.sendRequest("textDocument/prepareCallHierarchy", {
textDocument: { uri: pathToFileURL(input.file).href },
position: { line: input.line, character: input.character },
})
.catch(() => [])) as any[]
if (!items?.length) return []
return client.connection.sendRequest(direction, { item: items[0] }).catch(() => [])
})
return results.flat().filter(Boolean)
})
const incomingCalls = Effect.fn("LSP.incomingCalls")(function* (input: LocInput) {
return yield* callHierarchyRequest(input, "callHierarchy/incomingCalls")
})
const outgoingCalls = Effect.fn("LSP.outgoingCalls")(function* (input: LocInput) {
return yield* callHierarchyRequest(input, "callHierarchy/outgoingCalls")
})
return Service.of({
init,
status,
hasClients,
touchFile,
diagnostics,
hover,
definition,
references,
implementation,
documentSymbol,
workspaceSymbol,
prepareCallHierarchy,
incomingCalls,
outgoingCalls,
})
}),
)
export const defaultLayer = layer.pipe(Layer.provide(Config.defaultLayer))
export namespace Diagnostic {
const MAX_PER_FILE = 20
export function pretty(diagnostic: LSPClient.Diagnostic) {
const severityMap = {
1: "ERROR",
2: "WARN",
3: "INFO",
4: "HINT",
}
const severity = severityMap[diagnostic.severity || 1]
const line = diagnostic.range.start.line + 1
const col = diagnostic.range.start.character + 1
return `${severity} [${line}:${col}] ${diagnostic.message}`
}
export function report(file: string, issues: LSPClient.Diagnostic[]) {
const errors = issues.filter((item) => item.severity === 1)
if (errors.length === 0) return ""
const limited = errors.slice(0, MAX_PER_FILE)
const more = errors.length - MAX_PER_FILE
const suffix = more > 0 ? `\n... and ${more} more` : ""
return `<diagnostics file="${file}">\n${limited.map(pretty).join("\n")}${suffix}\n</diagnostics>`
}
}
}
export * as LSP from "./lsp"
export * as LSPClient from "./client"
export * as LSPServer from "./server"

View File

@@ -0,0 +1,535 @@
import { BusEvent } from "@/bus/bus-event"
import { Bus } from "@/bus"
import { Log } from "../util"
import * as LSPClient from "./client"
import path from "path"
import { pathToFileURL, fileURLToPath } from "url"
import * as LSPServer from "./server"
import z from "zod"
import { Config } from "../config"
import { Instance } from "../project/instance"
import { Flag } from "@/flag/flag"
import { Process } from "../util"
import { spawn as lspspawn } from "./launch"
import { Effect, Layer, Context } from "effect"
import { InstanceState } from "@/effect"
const log = Log.create({ service: "lsp" })
export const Event = {
Updated: BusEvent.define("lsp.updated", z.object({})),
}
export const Range = z
.object({
start: z.object({
line: z.number(),
character: z.number(),
}),
end: z.object({
line: z.number(),
character: z.number(),
}),
})
.meta({
ref: "Range",
})
export type Range = z.infer<typeof Range>
export const Symbol = z
.object({
name: z.string(),
kind: z.number(),
location: z.object({
uri: z.string(),
range: Range,
}),
})
.meta({
ref: "Symbol",
})
export type Symbol = z.infer<typeof Symbol>
export const DocumentSymbol = z
.object({
name: z.string(),
detail: z.string().optional(),
kind: z.number(),
range: Range,
selectionRange: Range,
})
.meta({
ref: "DocumentSymbol",
})
export type DocumentSymbol = z.infer<typeof DocumentSymbol>
export const Status = z
.object({
id: z.string(),
name: z.string(),
root: z.string(),
status: z.union([z.literal("connected"), z.literal("error")]),
})
.meta({
ref: "LSPStatus",
})
export type Status = z.infer<typeof Status>
enum SymbolKind {
File = 1,
Module = 2,
Namespace = 3,
Package = 4,
Class = 5,
Method = 6,
Property = 7,
Field = 8,
Constructor = 9,
Enum = 10,
Interface = 11,
Function = 12,
Variable = 13,
Constant = 14,
String = 15,
Number = 16,
Boolean = 17,
Array = 18,
Object = 19,
Key = 20,
Null = 21,
EnumMember = 22,
Struct = 23,
Event = 24,
Operator = 25,
TypeParameter = 26,
}
const kinds = [
SymbolKind.Class,
SymbolKind.Function,
SymbolKind.Method,
SymbolKind.Interface,
SymbolKind.Variable,
SymbolKind.Constant,
SymbolKind.Struct,
SymbolKind.Enum,
]
const filterExperimentalServers = (servers: Record<string, LSPServer.Info>) => {
if (Flag.OPENCODE_EXPERIMENTAL_LSP_TY) {
if (servers["pyright"]) {
log.info("LSP server pyright is disabled because OPENCODE_EXPERIMENTAL_LSP_TY is enabled")
delete servers["pyright"]
}
} else {
if (servers["ty"]) {
delete servers["ty"]
}
}
}
type LocInput = { file: string; line: number; character: number }
interface State {
clients: LSPClient.Info[]
servers: Record<string, LSPServer.Info>
broken: Set<string>
spawning: Map<string, Promise<LSPClient.Info | undefined>>
}
export interface Interface {
readonly init: () => Effect.Effect<void>
readonly status: () => Effect.Effect<Status[]>
readonly hasClients: (file: string) => Effect.Effect<boolean>
readonly touchFile: (input: string, waitForDiagnostics?: boolean) => Effect.Effect<void>
readonly diagnostics: () => Effect.Effect<Record<string, LSPClient.Diagnostic[]>>
readonly hover: (input: LocInput) => Effect.Effect<any>
readonly definition: (input: LocInput) => Effect.Effect<any[]>
readonly references: (input: LocInput) => Effect.Effect<any[]>
readonly implementation: (input: LocInput) => Effect.Effect<any[]>
readonly documentSymbol: (uri: string) => Effect.Effect<(DocumentSymbol | Symbol)[]>
readonly workspaceSymbol: (query: string) => Effect.Effect<Symbol[]>
readonly prepareCallHierarchy: (input: LocInput) => Effect.Effect<any[]>
readonly incomingCalls: (input: LocInput) => Effect.Effect<any[]>
readonly outgoingCalls: (input: LocInput) => Effect.Effect<any[]>
}
export class Service extends Context.Service<Service, Interface>()("@opencode/LSP") {}
export const layer = Layer.effect(
Service,
Effect.gen(function* () {
const config = yield* Config.Service
const state = yield* InstanceState.make<State>(
Effect.fn("LSP.state")(function* () {
const cfg = yield* config.get()
const servers: Record<string, LSPServer.Info> = {}
if (cfg.lsp === false) {
log.info("all LSPs are disabled")
} else {
for (const server of Object.values(LSPServer)) {
servers[server.id] = server
}
filterExperimentalServers(servers)
for (const [name, item] of Object.entries(cfg.lsp ?? {})) {
const existing = servers[name]
if (item.disabled) {
log.info(`LSP server ${name} is disabled`)
delete servers[name]
continue
}
servers[name] = {
...existing,
id: name,
root: existing?.root ?? (async () => Instance.directory),
extensions: item.extensions ?? existing?.extensions ?? [],
spawn: async (root) => ({
process: lspspawn(item.command[0], item.command.slice(1), {
cwd: root,
env: { ...process.env, ...item.env },
}),
initialization: item.initialization,
}),
}
}
log.info("enabled LSP servers", {
serverIds: Object.values(servers)
.map((server) => server.id)
.join(", "),
})
}
const s: State = {
clients: [],
servers,
broken: new Set(),
spawning: new Map(),
}
yield* Effect.addFinalizer(() =>
Effect.promise(async () => {
await Promise.all(s.clients.map((client) => client.shutdown()))
}),
)
return s
}),
)
const getClients = Effect.fnUntraced(function* (file: string) {
if (!Instance.containsPath(file)) return [] as LSPClient.Info[]
const s = yield* InstanceState.get(state)
return yield* Effect.promise(async () => {
const extension = path.parse(file).ext || file
const result: LSPClient.Info[] = []
async function schedule(server: LSPServer.Info, root: string, key: string) {
const handle = await server
.spawn(root)
.then((value) => {
if (!value) s.broken.add(key)
return value
})
.catch((err) => {
s.broken.add(key)
log.error(`Failed to spawn LSP server ${server.id}`, { error: err })
return undefined
})
if (!handle) return undefined
log.info("spawned lsp server", { serverID: server.id, root })
const client = await LSPClient.create({
serverID: server.id,
server: handle,
root,
}).catch(async (err) => {
s.broken.add(key)
await Process.stop(handle.process)
log.error(`Failed to initialize LSP client ${server.id}`, { error: err })
return undefined
})
if (!client) return undefined
const existing = s.clients.find((x) => x.root === root && x.serverID === server.id)
if (existing) {
await Process.stop(handle.process)
return existing
}
s.clients.push(client)
return client
}
for (const server of Object.values(s.servers)) {
if (server.extensions.length && !server.extensions.includes(extension)) continue
const root = await server.root(file)
if (!root) continue
if (s.broken.has(root + server.id)) continue
const match = s.clients.find((x) => x.root === root && x.serverID === server.id)
if (match) {
result.push(match)
continue
}
const inflight = s.spawning.get(root + server.id)
if (inflight) {
const client = await inflight
if (!client) continue
result.push(client)
continue
}
const task = schedule(server, root, root + server.id)
s.spawning.set(root + server.id, task)
task.finally(() => {
if (s.spawning.get(root + server.id) === task) {
s.spawning.delete(root + server.id)
}
})
const client = await task
if (!client) continue
result.push(client)
Bus.publish(Event.Updated, {})
}
return result
})
})
const run = Effect.fnUntraced(function* <T>(file: string, fn: (client: LSPClient.Info) => Promise<T>) {
const clients = yield* getClients(file)
return yield* Effect.promise(() => Promise.all(clients.map((x) => fn(x))))
})
const runAll = Effect.fnUntraced(function* <T>(fn: (client: LSPClient.Info) => Promise<T>) {
const s = yield* InstanceState.get(state)
return yield* Effect.promise(() => Promise.all(s.clients.map((x) => fn(x))))
})
const init = Effect.fn("LSP.init")(function* () {
yield* InstanceState.get(state)
})
const status = Effect.fn("LSP.status")(function* () {
const s = yield* InstanceState.get(state)
const result: Status[] = []
for (const client of s.clients) {
result.push({
id: client.serverID,
name: s.servers[client.serverID].id,
root: path.relative(Instance.directory, client.root),
status: "connected",
})
}
return result
})
const hasClients = Effect.fn("LSP.hasClients")(function* (file: string) {
const s = yield* InstanceState.get(state)
return yield* Effect.promise(async () => {
const extension = path.parse(file).ext || file
for (const server of Object.values(s.servers)) {
if (server.extensions.length && !server.extensions.includes(extension)) continue
const root = await server.root(file)
if (!root) continue
if (s.broken.has(root + server.id)) continue
return true
}
return false
})
})
const touchFile = Effect.fn("LSP.touchFile")(function* (input: string, waitForDiagnostics?: boolean) {
log.info("touching file", { file: input })
const clients = yield* getClients(input)
yield* Effect.promise(() =>
Promise.all(
clients.map(async (client) => {
const wait = waitForDiagnostics ? client.waitForDiagnostics({ path: input }) : Promise.resolve()
await client.notify.open({ path: input })
return wait
}),
).catch((err) => {
log.error("failed to touch file", { err, file: input })
}),
)
})
const diagnostics = Effect.fn("LSP.diagnostics")(function* () {
const results: Record<string, LSPClient.Diagnostic[]> = {}
const all = yield* runAll(async (client) => client.diagnostics)
for (const result of all) {
for (const [p, diags] of result.entries()) {
const arr = results[p] || []
arr.push(...diags)
results[p] = arr
}
}
return results
})
const hover = Effect.fn("LSP.hover")(function* (input: LocInput) {
return yield* run(input.file, (client) =>
client.connection
.sendRequest("textDocument/hover", {
textDocument: { uri: pathToFileURL(input.file).href },
position: { line: input.line, character: input.character },
})
.catch(() => null),
)
})
const definition = Effect.fn("LSP.definition")(function* (input: LocInput) {
const results = yield* run(input.file, (client) =>
client.connection
.sendRequest("textDocument/definition", {
textDocument: { uri: pathToFileURL(input.file).href },
position: { line: input.line, character: input.character },
})
.catch(() => null),
)
return results.flat().filter(Boolean)
})
const references = Effect.fn("LSP.references")(function* (input: LocInput) {
const results = yield* run(input.file, (client) =>
client.connection
.sendRequest("textDocument/references", {
textDocument: { uri: pathToFileURL(input.file).href },
position: { line: input.line, character: input.character },
context: { includeDeclaration: true },
})
.catch(() => []),
)
return results.flat().filter(Boolean)
})
const implementation = Effect.fn("LSP.implementation")(function* (input: LocInput) {
const results = yield* run(input.file, (client) =>
client.connection
.sendRequest("textDocument/implementation", {
textDocument: { uri: pathToFileURL(input.file).href },
position: { line: input.line, character: input.character },
})
.catch(() => null),
)
return results.flat().filter(Boolean)
})
const documentSymbol = Effect.fn("LSP.documentSymbol")(function* (uri: string) {
const file = fileURLToPath(uri)
const results = yield* run(file, (client) =>
client.connection.sendRequest("textDocument/documentSymbol", { textDocument: { uri } }).catch(() => []),
)
return (results.flat() as (DocumentSymbol | Symbol)[]).filter(Boolean)
})
const workspaceSymbol = Effect.fn("LSP.workspaceSymbol")(function* (query: string) {
const results = yield* runAll((client) =>
client.connection
.sendRequest("workspace/symbol", { query })
.then((result: any) => result.filter((x: Symbol) => kinds.includes(x.kind)))
.then((result: any) => result.slice(0, 10))
.catch(() => []),
)
return results.flat() as Symbol[]
})
const prepareCallHierarchy = Effect.fn("LSP.prepareCallHierarchy")(function* (input: LocInput) {
const results = yield* run(input.file, (client) =>
client.connection
.sendRequest("textDocument/prepareCallHierarchy", {
textDocument: { uri: pathToFileURL(input.file).href },
position: { line: input.line, character: input.character },
})
.catch(() => []),
)
return results.flat().filter(Boolean)
})
const callHierarchyRequest = Effect.fnUntraced(function* (
input: LocInput,
direction: "callHierarchy/incomingCalls" | "callHierarchy/outgoingCalls",
) {
const results = yield* run(input.file, async (client) => {
const items = (await client.connection
.sendRequest("textDocument/prepareCallHierarchy", {
textDocument: { uri: pathToFileURL(input.file).href },
position: { line: input.line, character: input.character },
})
.catch(() => [])) as any[]
if (!items?.length) return []
return client.connection.sendRequest(direction, { item: items[0] }).catch(() => [])
})
return results.flat().filter(Boolean)
})
const incomingCalls = Effect.fn("LSP.incomingCalls")(function* (input: LocInput) {
return yield* callHierarchyRequest(input, "callHierarchy/incomingCalls")
})
const outgoingCalls = Effect.fn("LSP.outgoingCalls")(function* (input: LocInput) {
return yield* callHierarchyRequest(input, "callHierarchy/outgoingCalls")
})
return Service.of({
init,
status,
hasClients,
touchFile,
diagnostics,
hover,
definition,
references,
implementation,
documentSymbol,
workspaceSymbol,
prepareCallHierarchy,
incomingCalls,
outgoingCalls,
})
}),
)
export const defaultLayer = layer.pipe(Layer.provide(Config.defaultLayer))
export namespace Diagnostic {
const MAX_PER_FILE = 20
export function pretty(diagnostic: LSPClient.Diagnostic) {
const severityMap = {
1: "ERROR",
2: "WARN",
3: "INFO",
4: "HINT",
}
const severity = severityMap[diagnostic.severity || 1]
const line = diagnostic.range.start.line + 1
const col = diagnostic.range.start.character + 1
return `${severity} [${line}:${col}] ${diagnostic.message}`
}
export function report(file: string, issues: LSPClient.Diagnostic[]) {
const errors = issues.filter((item) => item.severity === 1)
if (errors.length === 0) return ""
const limited = errors.slice(0, MAX_PER_FILE)
const more = errors.length - MAX_PER_FILE
const suffix = more > 0 ? `\n... and ${more} more` : ""
return `<diagnostics file="${file}">\n${limited.map(pretty).join("\n")}${suffix}\n</diagnostics>`
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -2,5 +2,5 @@ export { Config } from "./config"
export { Server } from "./server/server"
export { bootstrap } from "./cli/bootstrap"
export { Log } from "./util"
export { Database } from "./storage/db"
export { JsonMigration } from "./storage/json-migration"
export { Database } from "./storage"
export { JsonMigration } from "./storage"

View File

@@ -5,7 +5,7 @@ import { InstanceState } from "@/effect"
import { ProjectID } from "@/project/schema"
import { MessageID, SessionID } from "@/session/schema"
import { PermissionTable } from "@/session/session.sql"
import { Database, eq } from "@/storage/db"
import { Database, eq } from "@/storage"
import { zod } from "@/util/effect-zod"
import { Log } from "@/util"
import { withStatics } from "@/util/schema"

View File

@@ -7,7 +7,7 @@ import {
printParseErrorCode,
} from "jsonc-parser"
import { ConfigPaths } from "@/config/paths"
import { ConfigPaths } from "@/config"
import { Global } from "@/global"
import { Filesystem } from "@/util"
import { Flock } from "@opencode-ai/shared/util/flock"

View File

@@ -3,14 +3,14 @@ import { Format } from "../format"
import { LSP } from "../lsp"
import { File } from "../file"
import { Snapshot } from "../snapshot"
import { Project } from "./project"
import { Vcs } from "./vcs"
import * as Project from "./project"
import * as Vcs from "./vcs"
import { Bus } from "../bus"
import { Command } from "../command"
import { Instance } from "./instance"
import { Log } from "@/util"
import { FileWatcher } from "@/file/watcher"
import { ShareNext } from "@/share/share-next"
import { FileWatcher } from "@/file"
import { ShareNext } from "@/share"
import * as Effect from "effect/Effect"
export const InstanceBootstrap = Effect.gen(function* () {

View File

@@ -0,0 +1,2 @@
export * as Vcs from "./vcs"
export * as Project from "./project"

View File

@@ -5,7 +5,7 @@ import { AppFileSystem } from "@opencode-ai/shared/filesystem"
import { iife } from "@/util/iife"
import { Log } from "@/util"
import { LocalContext } from "../util"
import { Project } from "./project"
import * as Project from "./project"
import { WorkspaceContext } from "@/control-plane/workspace-context"
export interface InstanceContext {

View File

@@ -1,5 +1,5 @@
import z from "zod"
import { and, Database, eq } from "../storage/db"
import { and, Database, eq } from "../storage"
import { ProjectTable } from "./project.sql"
import { SessionTable } from "../session/session.sql"
import { Log } from "../util"
@@ -14,474 +14,469 @@ import { NodePath } from "@effect/platform-node"
import { AppFileSystem } from "@opencode-ai/shared/filesystem"
import * as CrossSpawnSpawner from "@/effect/cross-spawn-spawner"
export namespace Project {
const log = Log.create({ service: "project" })
const log = Log.create({ service: "project" })
export const Info = z
.object({
id: ProjectID.zod,
worktree: z.string(),
vcs: z.literal("git").optional(),
name: z.string().optional(),
icon: z
.object({
url: z.string().optional(),
override: z.string().optional(),
color: z.string().optional(),
})
.optional(),
commands: z
.object({
start: z.string().optional().describe("Startup script to run when creating a new workspace (worktree)"),
})
.optional(),
time: z.object({
created: z.number(),
updated: z.number(),
initialized: z.number().optional(),
}),
sandboxes: z.array(z.string()),
})
.meta({
ref: "Project",
})
export type Info = z.infer<typeof Info>
export const Event = {
Updated: BusEvent.define("project.updated", Info),
}
type Row = typeof ProjectTable.$inferSelect
export function fromRow(row: Row): Info {
const icon =
row.icon_url || row.icon_color
? { url: row.icon_url ?? undefined, color: row.icon_color ?? undefined }
: undefined
return {
id: row.id,
worktree: row.worktree,
vcs: row.vcs ? Info.shape.vcs.parse(row.vcs) : undefined,
name: row.name ?? undefined,
icon,
time: {
created: row.time_created,
updated: row.time_updated,
initialized: row.time_initialized ?? undefined,
},
sandboxes: row.sandboxes,
commands: row.commands ?? undefined,
}
}
export const UpdateInput = z.object({
projectID: ProjectID.zod,
export const Info = z
.object({
id: ProjectID.zod,
worktree: z.string(),
vcs: z.literal("git").optional(),
name: z.string().optional(),
icon: Info.shape.icon.optional(),
commands: Info.shape.commands.optional(),
icon: z
.object({
url: z.string().optional(),
override: z.string().optional(),
color: z.string().optional(),
})
.optional(),
commands: z
.object({
start: z.string().optional().describe("Startup script to run when creating a new workspace (worktree)"),
})
.optional(),
time: z.object({
created: z.number(),
updated: z.number(),
initialized: z.number().optional(),
}),
sandboxes: z.array(z.string()),
})
export type UpdateInput = z.infer<typeof UpdateInput>
.meta({
ref: "Project",
})
export type Info = z.infer<typeof Info>
// ---------------------------------------------------------------------------
// Effect service
// ---------------------------------------------------------------------------
export const Event = {
Updated: BusEvent.define("project.updated", Info),
}
export interface Interface {
readonly fromDirectory: (directory: string) => Effect.Effect<{ project: Info; sandbox: string }>
readonly discover: (input: Info) => Effect.Effect<void>
readonly list: () => Effect.Effect<Info[]>
readonly get: (id: ProjectID) => Effect.Effect<Info | undefined>
readonly update: (input: UpdateInput) => Effect.Effect<Info>
readonly initGit: (input: { directory: string; project: Info }) => Effect.Effect<Info>
readonly setInitialized: (id: ProjectID) => Effect.Effect<void>
readonly sandboxes: (id: ProjectID) => Effect.Effect<string[]>
readonly addSandbox: (id: ProjectID, directory: string) => Effect.Effect<void>
readonly removeSandbox: (id: ProjectID, directory: string) => Effect.Effect<void>
type Row = typeof ProjectTable.$inferSelect
export function fromRow(row: Row): Info {
const icon =
row.icon_url || row.icon_color ? { url: row.icon_url ?? undefined, color: row.icon_color ?? undefined } : undefined
return {
id: row.id,
worktree: row.worktree,
vcs: row.vcs ? Info.shape.vcs.parse(row.vcs) : undefined,
name: row.name ?? undefined,
icon,
time: {
created: row.time_created,
updated: row.time_updated,
initialized: row.time_initialized ?? undefined,
},
sandboxes: row.sandboxes,
commands: row.commands ?? undefined,
}
}
export class Service extends Context.Service<Service, Interface>()("@opencode/Project") {}
export const UpdateInput = z.object({
projectID: ProjectID.zod,
name: z.string().optional(),
icon: Info.shape.icon.optional(),
commands: Info.shape.commands.optional(),
})
export type UpdateInput = z.infer<typeof UpdateInput>
type GitResult = { code: number; text: string; stderr: string }
// ---------------------------------------------------------------------------
// Effect service
// ---------------------------------------------------------------------------
export const layer: Layer.Layer<
Service,
never,
AppFileSystem.Service | Path.Path | ChildProcessSpawner.ChildProcessSpawner
> = Layer.effect(
Service,
Effect.gen(function* () {
const fs = yield* AppFileSystem.Service
const pathSvc = yield* Path.Path
const spawner = yield* ChildProcessSpawner.ChildProcessSpawner
export interface Interface {
readonly fromDirectory: (directory: string) => Effect.Effect<{ project: Info; sandbox: string }>
readonly discover: (input: Info) => Effect.Effect<void>
readonly list: () => Effect.Effect<Info[]>
readonly get: (id: ProjectID) => Effect.Effect<Info | undefined>
readonly update: (input: UpdateInput) => Effect.Effect<Info>
readonly initGit: (input: { directory: string; project: Info }) => Effect.Effect<Info>
readonly setInitialized: (id: ProjectID) => Effect.Effect<void>
readonly sandboxes: (id: ProjectID) => Effect.Effect<string[]>
readonly addSandbox: (id: ProjectID, directory: string) => Effect.Effect<void>
readonly removeSandbox: (id: ProjectID, directory: string) => Effect.Effect<void>
}
const git = Effect.fnUntraced(
function* (args: string[], opts?: { cwd?: string }) {
const handle = yield* spawner.spawn(
ChildProcess.make("git", args, { cwd: opts?.cwd, extendEnv: true, stdin: "ignore" }),
)
const [text, stderr] = yield* Effect.all(
[Stream.mkString(Stream.decodeText(handle.stdout)), Stream.mkString(Stream.decodeText(handle.stderr))],
{ concurrency: 2 },
)
const code = yield* handle.exitCode
return { code, text, stderr } satisfies GitResult
},
Effect.scoped,
Effect.catch(() => Effect.succeed({ code: 1, text: "", stderr: "" } satisfies GitResult)),
export class Service extends Context.Service<Service, Interface>()("@opencode/Project") {}
type GitResult = { code: number; text: string; stderr: string }
export const layer: Layer.Layer<
Service,
never,
AppFileSystem.Service | Path.Path | ChildProcessSpawner.ChildProcessSpawner
> = Layer.effect(
Service,
Effect.gen(function* () {
const fs = yield* AppFileSystem.Service
const pathSvc = yield* Path.Path
const spawner = yield* ChildProcessSpawner.ChildProcessSpawner
const git = Effect.fnUntraced(
function* (args: string[], opts?: { cwd?: string }) {
const handle = yield* spawner.spawn(
ChildProcess.make("git", args, { cwd: opts?.cwd, extendEnv: true, stdin: "ignore" }),
)
const [text, stderr] = yield* Effect.all(
[Stream.mkString(Stream.decodeText(handle.stdout)), Stream.mkString(Stream.decodeText(handle.stderr))],
{ concurrency: 2 },
)
const code = yield* handle.exitCode
return { code, text, stderr } satisfies GitResult
},
Effect.scoped,
Effect.catch(() => Effect.succeed({ code: 1, text: "", stderr: "" } satisfies GitResult)),
)
const db = <T>(fn: (d: Parameters<typeof Database.use>[0] extends (trx: infer D) => any ? D : never) => T) =>
Effect.sync(() => Database.use(fn))
const emitUpdated = (data: Info) =>
Effect.sync(() =>
GlobalBus.emit("event", {
directory: "global",
project: data.id,
payload: { type: Event.Updated.type, properties: data },
}),
)
const db = <T>(fn: (d: Parameters<typeof Database.use>[0] extends (trx: infer D) => any ? D : never) => T) =>
Effect.sync(() => Database.use(fn))
const fakeVcs = Info.shape.vcs.parse(Flag.OPENCODE_FAKE_VCS)
const emitUpdated = (data: Info) =>
Effect.sync(() =>
GlobalBus.emit("event", {
directory: "global",
project: data.id,
payload: { type: Event.Updated.type, properties: data },
}),
)
const resolveGitPath = (cwd: string, name: string) => {
if (!name) return cwd
name = name.replace(/[\r\n]+$/, "")
if (!name) return cwd
name = AppFileSystem.windowsPath(name)
if (pathSvc.isAbsolute(name)) return pathSvc.normalize(name)
return pathSvc.resolve(cwd, name)
}
const fakeVcs = Info.shape.vcs.parse(Flag.OPENCODE_FAKE_VCS)
const scope = yield* Scope.Scope
const resolveGitPath = (cwd: string, name: string) => {
if (!name) return cwd
name = name.replace(/[\r\n]+$/, "")
if (!name) return cwd
name = AppFileSystem.windowsPath(name)
if (pathSvc.isAbsolute(name)) return pathSvc.normalize(name)
return pathSvc.resolve(cwd, name)
}
const readCachedProjectId = Effect.fnUntraced(function* (dir: string) {
return yield* fs.readFileString(pathSvc.join(dir, "opencode")).pipe(
Effect.map((x) => x.trim()),
Effect.map(ProjectID.make),
Effect.catch(() => Effect.void),
)
})
const scope = yield* Scope.Scope
const fromDirectory = Effect.fn("Project.fromDirectory")(function* (directory: string) {
log.info("fromDirectory", { directory })
const readCachedProjectId = Effect.fnUntraced(function* (dir: string) {
return yield* fs.readFileString(pathSvc.join(dir, "opencode")).pipe(
Effect.map((x) => x.trim()),
Effect.map(ProjectID.make),
Effect.catch(() => Effect.void),
)
// Phase 1: discover git info
type DiscoveryResult = { id: ProjectID; worktree: string; sandbox: string; vcs: Info["vcs"] }
const data: DiscoveryResult = yield* Effect.gen(function* () {
const dotgitMatches = yield* fs.up({ targets: [".git"], start: directory }).pipe(Effect.orDie)
const dotgit = dotgitMatches[0]
if (!dotgit) {
return {
id: ProjectID.global,
worktree: "/",
sandbox: "/",
vcs: fakeVcs,
}
}
let sandbox = pathSvc.dirname(dotgit)
const gitBinary = yield* Effect.sync(() => which("git"))
let id = yield* readCachedProjectId(dotgit)
if (!gitBinary) {
return {
id: id ?? ProjectID.global,
worktree: sandbox,
sandbox,
vcs: fakeVcs,
}
}
const commonDir = yield* git(["rev-parse", "--git-common-dir"], { cwd: sandbox })
if (commonDir.code !== 0) {
return {
id: id ?? ProjectID.global,
worktree: sandbox,
sandbox,
vcs: fakeVcs,
}
}
const worktree = (() => {
const common = resolveGitPath(sandbox, commonDir.text.trim())
return common === sandbox ? sandbox : pathSvc.dirname(common)
})()
if (id == null) {
id = yield* readCachedProjectId(pathSvc.join(worktree, ".git"))
}
if (!id) {
const revList = yield* git(["rev-list", "--max-parents=0", "HEAD"], { cwd: sandbox })
const roots = revList.text
.split("\n")
.filter(Boolean)
.map((x) => x.trim())
.toSorted()
id = roots[0] ? ProjectID.make(roots[0]) : undefined
if (id) {
yield* fs.writeFileString(pathSvc.join(worktree, ".git", "opencode"), id).pipe(Effect.ignore)
}
}
if (!id) {
return { id: ProjectID.global, worktree: sandbox, sandbox, vcs: "git" as const }
}
const topLevel = yield* git(["rev-parse", "--show-toplevel"], { cwd: sandbox })
if (topLevel.code !== 0) {
return {
id,
worktree: sandbox,
sandbox,
vcs: fakeVcs,
}
}
sandbox = resolveGitPath(sandbox, topLevel.text.trim())
return { id, sandbox, worktree, vcs: "git" as const }
})
const fromDirectory = Effect.fn("Project.fromDirectory")(function* (directory: string) {
log.info("fromDirectory", { directory })
// Phase 1: discover git info
type DiscoveryResult = { id: ProjectID; worktree: string; sandbox: string; vcs: Info["vcs"] }
const data: DiscoveryResult = yield* Effect.gen(function* () {
const dotgitMatches = yield* fs.up({ targets: [".git"], start: directory }).pipe(Effect.orDie)
const dotgit = dotgitMatches[0]
if (!dotgit) {
return {
id: ProjectID.global,
worktree: "/",
sandbox: "/",
vcs: fakeVcs,
}
// Phase 2: upsert
const row = yield* db((d) => d.select().from(ProjectTable).where(eq(ProjectTable.id, data.id)).get())
const existing = row
? fromRow(row)
: {
id: data.id,
worktree: data.worktree,
vcs: data.vcs,
sandboxes: [] as string[],
time: { created: Date.now(), updated: Date.now() },
}
let sandbox = pathSvc.dirname(dotgit)
const gitBinary = yield* Effect.sync(() => which("git"))
let id = yield* readCachedProjectId(dotgit)
if (Flag.OPENCODE_EXPERIMENTAL_ICON_DISCOVERY) yield* discover(existing).pipe(Effect.ignore, Effect.forkIn(scope))
if (!gitBinary) {
return {
id: id ?? ProjectID.global,
worktree: sandbox,
sandbox,
vcs: fakeVcs,
}
}
const result: Info = {
...existing,
worktree: data.worktree,
vcs: data.vcs,
time: { ...existing.time, updated: Date.now() },
}
if (data.sandbox !== result.worktree && !result.sandboxes.includes(data.sandbox))
result.sandboxes.push(data.sandbox)
result.sandboxes = yield* Effect.forEach(
result.sandboxes,
(s) =>
fs.exists(s).pipe(
Effect.orDie,
Effect.map((exists) => (exists ? s : undefined)),
),
{ concurrency: "unbounded" },
).pipe(Effect.map((arr) => arr.filter((x): x is string => x !== undefined)))
const commonDir = yield* git(["rev-parse", "--git-common-dir"], { cwd: sandbox })
if (commonDir.code !== 0) {
return {
id: id ?? ProjectID.global,
worktree: sandbox,
sandbox,
vcs: fakeVcs,
}
}
const worktree = (() => {
const common = resolveGitPath(sandbox, commonDir.text.trim())
return common === sandbox ? sandbox : pathSvc.dirname(common)
})()
if (id == null) {
id = yield* readCachedProjectId(pathSvc.join(worktree, ".git"))
}
if (!id) {
const revList = yield* git(["rev-list", "--max-parents=0", "HEAD"], { cwd: sandbox })
const roots = revList.text
.split("\n")
.filter(Boolean)
.map((x) => x.trim())
.toSorted()
id = roots[0] ? ProjectID.make(roots[0]) : undefined
if (id) {
yield* fs.writeFileString(pathSvc.join(worktree, ".git", "opencode"), id).pipe(Effect.ignore)
}
}
if (!id) {
return { id: ProjectID.global, worktree: sandbox, sandbox, vcs: "git" as const }
}
const topLevel = yield* git(["rev-parse", "--show-toplevel"], { cwd: sandbox })
if (topLevel.code !== 0) {
return {
id,
worktree: sandbox,
sandbox,
vcs: fakeVcs,
}
}
sandbox = resolveGitPath(sandbox, topLevel.text.trim())
return { id, sandbox, worktree, vcs: "git" as const }
})
// Phase 2: upsert
const row = yield* db((d) => d.select().from(ProjectTable).where(eq(ProjectTable.id, data.id)).get())
const existing = row
? fromRow(row)
: {
id: data.id,
worktree: data.worktree,
vcs: data.vcs,
sandboxes: [] as string[],
time: { created: Date.now(), updated: Date.now() },
}
if (Flag.OPENCODE_EXPERIMENTAL_ICON_DISCOVERY)
yield* discover(existing).pipe(Effect.ignore, Effect.forkIn(scope))
const result: Info = {
...existing,
worktree: data.worktree,
vcs: data.vcs,
time: { ...existing.time, updated: Date.now() },
}
if (data.sandbox !== result.worktree && !result.sandboxes.includes(data.sandbox))
result.sandboxes.push(data.sandbox)
result.sandboxes = yield* Effect.forEach(
result.sandboxes,
(s) =>
fs.exists(s).pipe(
Effect.orDie,
Effect.map((exists) => (exists ? s : undefined)),
),
{ concurrency: "unbounded" },
).pipe(Effect.map((arr) => arr.filter((x): x is string => x !== undefined)))
yield* db((d) =>
d
.insert(ProjectTable)
.values({
id: result.id,
yield* db((d) =>
d
.insert(ProjectTable)
.values({
id: result.id,
worktree: result.worktree,
vcs: result.vcs ?? null,
name: result.name,
icon_url: result.icon?.url,
icon_color: result.icon?.color,
time_created: result.time.created,
time_updated: result.time.updated,
time_initialized: result.time.initialized,
sandboxes: result.sandboxes,
commands: result.commands,
})
.onConflictDoUpdate({
target: ProjectTable.id,
set: {
worktree: result.worktree,
vcs: result.vcs ?? null,
name: result.name,
icon_url: result.icon?.url,
icon_color: result.icon?.color,
time_created: result.time.created,
time_updated: result.time.updated,
time_initialized: result.time.initialized,
sandboxes: result.sandboxes,
commands: result.commands,
})
.onConflictDoUpdate({
target: ProjectTable.id,
set: {
worktree: result.worktree,
vcs: result.vcs ?? null,
name: result.name,
icon_url: result.icon?.url,
icon_color: result.icon?.color,
time_updated: result.time.updated,
time_initialized: result.time.initialized,
sandboxes: result.sandboxes,
commands: result.commands,
},
})
},
})
.run(),
)
if (data.id !== ProjectID.global) {
yield* db((d) =>
d
.update(SessionTable)
.set({ project_id: data.id })
.where(and(eq(SessionTable.project_id, ProjectID.global), eq(SessionTable.directory, data.worktree)))
.run(),
)
}
if (data.id !== ProjectID.global) {
yield* db((d) =>
d
.update(SessionTable)
.set({ project_id: data.id })
.where(and(eq(SessionTable.project_id, ProjectID.global), eq(SessionTable.directory, data.worktree)))
.run(),
)
}
yield* emitUpdated(result)
return { project: result, sandbox: data.sandbox }
})
yield* emitUpdated(result)
return { project: result, sandbox: data.sandbox }
})
const discover = Effect.fn("Project.discover")(function* (input: Info) {
if (input.vcs !== "git") return
if (input.icon?.override) return
if (input.icon?.url) return
const discover = Effect.fn("Project.discover")(function* (input: Info) {
if (input.vcs !== "git") return
if (input.icon?.override) return
if (input.icon?.url) return
const matches = yield* fs
.glob("**/favicon.{ico,png,svg,jpg,jpeg,webp}", {
cwd: input.worktree,
absolute: true,
include: "file",
})
.pipe(Effect.orDie)
const shortest = matches.sort((a, b) => a.length - b.length)[0]
if (!shortest) return
const matches = yield* fs
.glob("**/favicon.{ico,png,svg,jpg,jpeg,webp}", {
cwd: input.worktree,
absolute: true,
include: "file",
const buffer = yield* fs.readFile(shortest).pipe(Effect.orDie)
const base64 = Buffer.from(buffer).toString("base64")
const mime = AppFileSystem.mimeType(shortest)
const url = `data:${mime};base64,${base64}`
yield* update({ projectID: input.id, icon: { url } })
})
const list = Effect.fn("Project.list")(function* () {
return yield* db((d) => d.select().from(ProjectTable).all().map(fromRow))
})
const get = Effect.fn("Project.get")(function* (id: ProjectID) {
const row = yield* db((d) => d.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get())
return row ? fromRow(row) : undefined
})
const update = Effect.fn("Project.update")(function* (input: UpdateInput) {
const result = yield* db((d) =>
d
.update(ProjectTable)
.set({
name: input.name,
icon_url: input.icon?.url,
icon_color: input.icon?.color,
commands: input.commands,
time_updated: Date.now(),
})
.pipe(Effect.orDie)
const shortest = matches.sort((a, b) => a.length - b.length)[0]
if (!shortest) return
.where(eq(ProjectTable.id, input.projectID))
.returning()
.get(),
)
if (!result) throw new Error(`Project not found: ${input.projectID}`)
const data = fromRow(result)
yield* emitUpdated(data)
return data
})
const buffer = yield* fs.readFile(shortest).pipe(Effect.orDie)
const base64 = Buffer.from(buffer).toString("base64")
const mime = AppFileSystem.mimeType(shortest)
const url = `data:${mime};base64,${base64}`
yield* update({ projectID: input.id, icon: { url } })
})
const initGit = Effect.fn("Project.initGit")(function* (input: { directory: string; project: Info }) {
if (input.project.vcs === "git") return input.project
if (!(yield* Effect.sync(() => which("git")))) throw new Error("Git is not installed")
const result = yield* git(["init", "--quiet"], { cwd: input.directory })
if (result.code !== 0) {
throw new Error(result.stderr.trim() || result.text.trim() || "Failed to initialize git repository")
}
const { project } = yield* fromDirectory(input.directory)
return project
})
const list = Effect.fn("Project.list")(function* () {
return yield* db((d) => d.select().from(ProjectTable).all().map(fromRow))
})
const setInitialized = Effect.fn("Project.setInitialized")(function* (id: ProjectID) {
yield* db((d) =>
d.update(ProjectTable).set({ time_initialized: Date.now() }).where(eq(ProjectTable.id, id)).run(),
)
})
const get = Effect.fn("Project.get")(function* (id: ProjectID) {
const row = yield* db((d) => d.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get())
return row ? fromRow(row) : undefined
})
const sandboxes = Effect.fn("Project.sandboxes")(function* (id: ProjectID) {
const row = yield* db((d) => d.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get())
if (!row) return []
const data = fromRow(row)
return yield* Effect.forEach(
data.sandboxes,
(dir) =>
fs.isDir(dir).pipe(
Effect.orDie,
Effect.map((ok) => (ok ? dir : undefined)),
),
{ concurrency: "unbounded" },
).pipe(Effect.map((arr) => arr.filter((x): x is string => x !== undefined)))
})
const update = Effect.fn("Project.update")(function* (input: UpdateInput) {
const result = yield* db((d) =>
d
.update(ProjectTable)
.set({
name: input.name,
icon_url: input.icon?.url,
icon_color: input.icon?.color,
commands: input.commands,
time_updated: Date.now(),
})
.where(eq(ProjectTable.id, input.projectID))
.returning()
.get(),
)
if (!result) throw new Error(`Project not found: ${input.projectID}`)
const data = fromRow(result)
yield* emitUpdated(data)
return data
})
const addSandbox = Effect.fn("Project.addSandbox")(function* (id: ProjectID, directory: string) {
const row = yield* db((d) => d.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get())
if (!row) throw new Error(`Project not found: ${id}`)
const sboxes = [...row.sandboxes]
if (!sboxes.includes(directory)) sboxes.push(directory)
const result = yield* db((d) =>
d
.update(ProjectTable)
.set({ sandboxes: sboxes, time_updated: Date.now() })
.where(eq(ProjectTable.id, id))
.returning()
.get(),
)
if (!result) throw new Error(`Project not found: ${id}`)
yield* emitUpdated(fromRow(result))
})
const initGit = Effect.fn("Project.initGit")(function* (input: { directory: string; project: Info }) {
if (input.project.vcs === "git") return input.project
if (!(yield* Effect.sync(() => which("git")))) throw new Error("Git is not installed")
const result = yield* git(["init", "--quiet"], { cwd: input.directory })
if (result.code !== 0) {
throw new Error(result.stderr.trim() || result.text.trim() || "Failed to initialize git repository")
}
const { project } = yield* fromDirectory(input.directory)
return project
})
const removeSandbox = Effect.fn("Project.removeSandbox")(function* (id: ProjectID, directory: string) {
const row = yield* db((d) => d.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get())
if (!row) throw new Error(`Project not found: ${id}`)
const sboxes = row.sandboxes.filter((s) => s !== directory)
const result = yield* db((d) =>
d
.update(ProjectTable)
.set({ sandboxes: sboxes, time_updated: Date.now() })
.where(eq(ProjectTable.id, id))
.returning()
.get(),
)
if (!result) throw new Error(`Project not found: ${id}`)
yield* emitUpdated(fromRow(result))
})
const setInitialized = Effect.fn("Project.setInitialized")(function* (id: ProjectID) {
yield* db((d) =>
d.update(ProjectTable).set({ time_initialized: Date.now() }).where(eq(ProjectTable.id, id)).run(),
)
})
return Service.of({
fromDirectory,
discover,
list,
get,
update,
initGit,
setInitialized,
sandboxes,
addSandbox,
removeSandbox,
})
}),
)
const sandboxes = Effect.fn("Project.sandboxes")(function* (id: ProjectID) {
const row = yield* db((d) => d.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get())
if (!row) return []
const data = fromRow(row)
return yield* Effect.forEach(
data.sandboxes,
(dir) =>
fs.isDir(dir).pipe(
Effect.orDie,
Effect.map((ok) => (ok ? dir : undefined)),
),
{ concurrency: "unbounded" },
).pipe(Effect.map((arr) => arr.filter((x): x is string => x !== undefined)))
})
export const defaultLayer = layer.pipe(
Layer.provide(CrossSpawnSpawner.defaultLayer),
Layer.provide(AppFileSystem.defaultLayer),
Layer.provide(NodePath.layer),
)
const addSandbox = Effect.fn("Project.addSandbox")(function* (id: ProjectID, directory: string) {
const row = yield* db((d) => d.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get())
if (!row) throw new Error(`Project not found: ${id}`)
const sboxes = [...row.sandboxes]
if (!sboxes.includes(directory)) sboxes.push(directory)
const result = yield* db((d) =>
d
.update(ProjectTable)
.set({ sandboxes: sboxes, time_updated: Date.now() })
.where(eq(ProjectTable.id, id))
.returning()
.get(),
)
if (!result) throw new Error(`Project not found: ${id}`)
yield* emitUpdated(fromRow(result))
})
const removeSandbox = Effect.fn("Project.removeSandbox")(function* (id: ProjectID, directory: string) {
const row = yield* db((d) => d.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get())
if (!row) throw new Error(`Project not found: ${id}`)
const sboxes = row.sandboxes.filter((s) => s !== directory)
const result = yield* db((d) =>
d
.update(ProjectTable)
.set({ sandboxes: sboxes, time_updated: Date.now() })
.where(eq(ProjectTable.id, id))
.returning()
.get(),
)
if (!result) throw new Error(`Project not found: ${id}`)
yield* emitUpdated(fromRow(result))
})
return Service.of({
fromDirectory,
discover,
list,
get,
update,
initGit,
setInitialized,
sandboxes,
addSandbox,
removeSandbox,
})
}),
export function list() {
return Database.use((db) =>
db
.select()
.from(ProjectTable)
.all()
.map((row) => fromRow(row)),
)
}
export function get(id: ProjectID): Info | undefined {
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get())
if (!row) return undefined
return fromRow(row)
}
export function setInitialized(id: ProjectID) {
Database.use((db) =>
db.update(ProjectTable).set({ time_initialized: Date.now() }).where(eq(ProjectTable.id, id)).run(),
)
export const defaultLayer = layer.pipe(
Layer.provide(CrossSpawnSpawner.defaultLayer),
Layer.provide(AppFileSystem.defaultLayer),
Layer.provide(NodePath.layer),
)
export function list() {
return Database.use((db) =>
db
.select()
.from(ProjectTable)
.all()
.map((row) => fromRow(row)),
)
}
export function get(id: ProjectID): Info | undefined {
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get())
if (!row) return undefined
return fromRow(row)
}
export function setInitialized(id: ProjectID) {
Database.use((db) =>
db.update(ProjectTable).set({ time_initialized: Date.now() }).where(eq(ProjectTable.id, id)).run(),
)
}
}

View File

@@ -5,229 +5,227 @@ import { Bus } from "@/bus"
import { BusEvent } from "@/bus/bus-event"
import { InstanceState } from "@/effect"
import { AppFileSystem } from "@opencode-ai/shared/filesystem"
import { FileWatcher } from "@/file/watcher"
import { FileWatcher } from "@/file"
import { Git } from "@/git"
import { Log } from "@/util"
import { Instance } from "./instance"
import z from "zod"
export namespace Vcs {
const log = Log.create({ service: "vcs" })
const log = Log.create({ service: "vcs" })
const count = (text: string) => {
if (!text) return 0
if (!text.endsWith("\n")) return text.split("\n").length
return text.slice(0, -1).split("\n").length
}
const work = Effect.fnUntraced(function* (fs: AppFileSystem.Interface, cwd: string, file: string) {
const full = path.join(cwd, file)
if (!(yield* fs.exists(full).pipe(Effect.orDie))) return ""
const buf = yield* fs.readFile(full).pipe(Effect.catch(() => Effect.succeed(new Uint8Array())))
if (Buffer.from(buf).includes(0)) return ""
return Buffer.from(buf).toString("utf8")
})
const nums = (list: Git.Stat[]) =>
new Map(list.map((item) => [item.file, { additions: item.additions, deletions: item.deletions }] as const))
const merge = (...lists: Git.Item[][]) => {
const out = new Map<string, Git.Item>()
lists.flat().forEach((item) => {
if (!out.has(item.file)) out.set(item.file, item)
})
return [...out.values()]
}
const files = Effect.fnUntraced(function* (
fs: AppFileSystem.Interface,
git: Git.Interface,
cwd: string,
ref: string | undefined,
list: Git.Item[],
map: Map<string, { additions: number; deletions: number }>,
) {
const base = ref ? yield* git.prefix(cwd) : ""
const patch = (file: string, before: string, after: string) =>
formatPatch(structuredPatch(file, file, before, after, "", "", { context: Number.MAX_SAFE_INTEGER }))
const next = yield* Effect.forEach(
list,
(item) =>
Effect.gen(function* () {
const before = item.status === "added" || !ref ? "" : yield* git.show(cwd, ref, item.file, base)
const after = item.status === "deleted" ? "" : yield* work(fs, cwd, item.file)
const stat = map.get(item.file)
return {
file: item.file,
patch: patch(item.file, before, after),
additions: stat?.additions ?? (item.status === "added" ? count(after) : 0),
deletions: stat?.deletions ?? (item.status === "deleted" ? count(before) : 0),
status: item.status,
} satisfies FileDiff
}),
{ concurrency: 8 },
)
return next.toSorted((a, b) => a.file.localeCompare(b.file))
})
const track = Effect.fnUntraced(function* (
fs: AppFileSystem.Interface,
git: Git.Interface,
cwd: string,
ref: string | undefined,
) {
if (!ref) return yield* files(fs, git, cwd, ref, yield* git.status(cwd), new Map())
const [list, stats] = yield* Effect.all([git.status(cwd), git.stats(cwd, ref)], { concurrency: 2 })
return yield* files(fs, git, cwd, ref, list, nums(stats))
})
const compare = Effect.fnUntraced(function* (
fs: AppFileSystem.Interface,
git: Git.Interface,
cwd: string,
ref: string,
) {
const [list, stats, extra] = yield* Effect.all([git.diff(cwd, ref), git.stats(cwd, ref), git.status(cwd)], {
concurrency: 3,
})
return yield* files(
fs,
git,
cwd,
ref,
merge(
list,
extra.filter((item) => item.code === "??"),
),
nums(stats),
)
})
export const Mode = z.enum(["git", "branch"])
export type Mode = z.infer<typeof Mode>
export const Event = {
BranchUpdated: BusEvent.define(
"vcs.branch.updated",
z.object({
branch: z.string().optional(),
}),
),
}
export const Info = z
.object({
branch: z.string().optional(),
default_branch: z.string().optional(),
})
.meta({
ref: "VcsInfo",
})
export type Info = z.infer<typeof Info>
export const FileDiff = z
.object({
file: z.string(),
patch: z.string(),
additions: z.number(),
deletions: z.number(),
status: z.enum(["added", "deleted", "modified"]).optional(),
})
.meta({
ref: "VcsFileDiff",
})
export type FileDiff = z.infer<typeof FileDiff>
export interface Interface {
readonly init: () => Effect.Effect<void>
readonly branch: () => Effect.Effect<string | undefined>
readonly defaultBranch: () => Effect.Effect<string | undefined>
readonly diff: (mode: Mode) => Effect.Effect<FileDiff[]>
}
interface State {
current: string | undefined
root: Git.Base | undefined
}
export class Service extends Context.Service<Service, Interface>()("@opencode/Vcs") {}
export const layer: Layer.Layer<Service, never, AppFileSystem.Service | Git.Service | Bus.Service> = Layer.effect(
Service,
Effect.gen(function* () {
const fs = yield* AppFileSystem.Service
const git = yield* Git.Service
const bus = yield* Bus.Service
const state = yield* InstanceState.make<State>(
Effect.fn("Vcs.state")(function* (ctx) {
if (ctx.project.vcs !== "git") {
return { current: undefined, root: undefined }
}
const get = Effect.fnUntraced(function* () {
return yield* git.branch(ctx.directory)
})
const [current, root] = yield* Effect.all([git.branch(ctx.directory), git.defaultBranch(ctx.directory)], {
concurrency: 2,
})
const value = { current, root }
log.info("initialized", { branch: value.current, default_branch: value.root?.name })
yield* bus.subscribe(FileWatcher.Event.Updated).pipe(
Stream.filter((evt) => evt.properties.file.endsWith("HEAD")),
Stream.runForEach((_evt) =>
Effect.gen(function* () {
const next = yield* get()
if (next !== value.current) {
log.info("branch changed", { from: value.current, to: next })
value.current = next
yield* bus.publish(Event.BranchUpdated, { branch: next })
}
}),
),
Effect.forkScoped,
)
return value
}),
)
return Service.of({
init: Effect.fn("Vcs.init")(function* () {
yield* InstanceState.get(state)
}),
branch: Effect.fn("Vcs.branch")(function* () {
return yield* InstanceState.use(state, (x) => x.current)
}),
defaultBranch: Effect.fn("Vcs.defaultBranch")(function* () {
return yield* InstanceState.use(state, (x) => x.root?.name)
}),
diff: Effect.fn("Vcs.diff")(function* (mode: Mode) {
const value = yield* InstanceState.get(state)
if (Instance.project.vcs !== "git") return []
if (mode === "git") {
return yield* track(
fs,
git,
Instance.directory,
(yield* git.hasHead(Instance.directory)) ? "HEAD" : undefined,
)
}
if (!value.root) return []
if (value.current && value.current === value.root.name) return []
const ref = yield* git.mergeBase(Instance.directory, value.root.ref)
if (!ref) return []
return yield* compare(fs, git, Instance.directory, ref)
}),
})
}),
)
export const defaultLayer = layer.pipe(
Layer.provide(Git.defaultLayer),
Layer.provide(AppFileSystem.defaultLayer),
Layer.provide(Bus.layer),
)
const count = (text: string) => {
if (!text) return 0
if (!text.endsWith("\n")) return text.split("\n").length
return text.slice(0, -1).split("\n").length
}
const work = Effect.fnUntraced(function* (fs: AppFileSystem.Interface, cwd: string, file: string) {
const full = path.join(cwd, file)
if (!(yield* fs.exists(full).pipe(Effect.orDie))) return ""
const buf = yield* fs.readFile(full).pipe(Effect.catch(() => Effect.succeed(new Uint8Array())))
if (Buffer.from(buf).includes(0)) return ""
return Buffer.from(buf).toString("utf8")
})
const nums = (list: Git.Stat[]) =>
new Map(list.map((item) => [item.file, { additions: item.additions, deletions: item.deletions }] as const))
const merge = (...lists: Git.Item[][]) => {
const out = new Map<string, Git.Item>()
lists.flat().forEach((item) => {
if (!out.has(item.file)) out.set(item.file, item)
})
return [...out.values()]
}
const files = Effect.fnUntraced(function* (
fs: AppFileSystem.Interface,
git: Git.Interface,
cwd: string,
ref: string | undefined,
list: Git.Item[],
map: Map<string, { additions: number; deletions: number }>,
) {
const base = ref ? yield* git.prefix(cwd) : ""
const patch = (file: string, before: string, after: string) =>
formatPatch(structuredPatch(file, file, before, after, "", "", { context: Number.MAX_SAFE_INTEGER }))
const next = yield* Effect.forEach(
list,
(item) =>
Effect.gen(function* () {
const before = item.status === "added" || !ref ? "" : yield* git.show(cwd, ref, item.file, base)
const after = item.status === "deleted" ? "" : yield* work(fs, cwd, item.file)
const stat = map.get(item.file)
return {
file: item.file,
patch: patch(item.file, before, after),
additions: stat?.additions ?? (item.status === "added" ? count(after) : 0),
deletions: stat?.deletions ?? (item.status === "deleted" ? count(before) : 0),
status: item.status,
} satisfies FileDiff
}),
{ concurrency: 8 },
)
return next.toSorted((a, b) => a.file.localeCompare(b.file))
})
const track = Effect.fnUntraced(function* (
fs: AppFileSystem.Interface,
git: Git.Interface,
cwd: string,
ref: string | undefined,
) {
if (!ref) return yield* files(fs, git, cwd, ref, yield* git.status(cwd), new Map())
const [list, stats] = yield* Effect.all([git.status(cwd), git.stats(cwd, ref)], { concurrency: 2 })
return yield* files(fs, git, cwd, ref, list, nums(stats))
})
const compare = Effect.fnUntraced(function* (
fs: AppFileSystem.Interface,
git: Git.Interface,
cwd: string,
ref: string,
) {
const [list, stats, extra] = yield* Effect.all([git.diff(cwd, ref), git.stats(cwd, ref), git.status(cwd)], {
concurrency: 3,
})
return yield* files(
fs,
git,
cwd,
ref,
merge(
list,
extra.filter((item) => item.code === "??"),
),
nums(stats),
)
})
export const Mode = z.enum(["git", "branch"])
export type Mode = z.infer<typeof Mode>
export const Event = {
BranchUpdated: BusEvent.define(
"vcs.branch.updated",
z.object({
branch: z.string().optional(),
}),
),
}
export const Info = z
.object({
branch: z.string().optional(),
default_branch: z.string().optional(),
})
.meta({
ref: "VcsInfo",
})
export type Info = z.infer<typeof Info>
export const FileDiff = z
.object({
file: z.string(),
patch: z.string(),
additions: z.number(),
deletions: z.number(),
status: z.enum(["added", "deleted", "modified"]).optional(),
})
.meta({
ref: "VcsFileDiff",
})
export type FileDiff = z.infer<typeof FileDiff>
export interface Interface {
readonly init: () => Effect.Effect<void>
readonly branch: () => Effect.Effect<string | undefined>
readonly defaultBranch: () => Effect.Effect<string | undefined>
readonly diff: (mode: Mode) => Effect.Effect<FileDiff[]>
}
interface State {
current: string | undefined
root: Git.Base | undefined
}
export class Service extends Context.Service<Service, Interface>()("@opencode/Vcs") {}
export const layer: Layer.Layer<Service, never, AppFileSystem.Service | Git.Service | Bus.Service> = Layer.effect(
Service,
Effect.gen(function* () {
const fs = yield* AppFileSystem.Service
const git = yield* Git.Service
const bus = yield* Bus.Service
const state = yield* InstanceState.make<State>(
Effect.fn("Vcs.state")(function* (ctx) {
if (ctx.project.vcs !== "git") {
return { current: undefined, root: undefined }
}
const get = Effect.fnUntraced(function* () {
return yield* git.branch(ctx.directory)
})
const [current, root] = yield* Effect.all([git.branch(ctx.directory), git.defaultBranch(ctx.directory)], {
concurrency: 2,
})
const value = { current, root }
log.info("initialized", { branch: value.current, default_branch: value.root?.name })
yield* bus.subscribe(FileWatcher.Event.Updated).pipe(
Stream.filter((evt) => evt.properties.file.endsWith("HEAD")),
Stream.runForEach((_evt) =>
Effect.gen(function* () {
const next = yield* get()
if (next !== value.current) {
log.info("branch changed", { from: value.current, to: next })
value.current = next
yield* bus.publish(Event.BranchUpdated, { branch: next })
}
}),
),
Effect.forkScoped,
)
return value
}),
)
return Service.of({
init: Effect.fn("Vcs.init")(function* () {
yield* InstanceState.get(state)
}),
branch: Effect.fn("Vcs.branch")(function* () {
return yield* InstanceState.use(state, (x) => x.current)
}),
defaultBranch: Effect.fn("Vcs.defaultBranch")(function* () {
return yield* InstanceState.use(state, (x) => x.root?.name)
}),
diff: Effect.fn("Vcs.diff")(function* (mode: Mode) {
const value = yield* InstanceState.get(state)
if (Instance.project.vcs !== "git") return []
if (mode === "git") {
return yield* track(
fs,
git,
Instance.directory,
(yield* git.hasHead(Instance.directory)) ? "HEAD" : undefined,
)
}
if (!value.root) return []
if (value.current && value.current === value.root.name) return []
const ref = yield* git.mergeBase(Instance.directory, value.root.ref)
if (!ref) return []
return yield* compare(fs, git, Instance.directory, ref)
}),
})
}),
)
export const defaultLayer = layer.pipe(
Layer.provide(Git.defaultLayer),
Layer.provide(AppFileSystem.defaultLayer),
Layer.provide(Bus.layer),
)

View File

@@ -2,7 +2,7 @@ import type { ModelMessage } from "ai"
import { mergeDeep, unique } from "remeda"
import type { JSONSchema7 } from "@ai-sdk/provider"
import type { JSONSchema } from "zod/v4/core"
import type { Provider } from "."
import type * as Provider from "./provider"
import type { ModelsDev } from "./models"
import { iife } from "@/util/iife"
import { Flag } from "@/flag/flag"

View File

@@ -1,6 +1,6 @@
import { resolver } from "hono-openapi"
import z from "zod"
import { NotFoundError } from "../storage/db"
import { NotFoundError } from "../storage"
export const ERRORS = {
400: {

View File

@@ -1,5 +1,5 @@
import type { MiddlewareHandler } from "hono"
import { Database, inArray } from "@/storage/db"
import { Database, inArray } from "@/storage"
import { EventSequenceTable } from "@/sync/event.sql"
import { Workspace } from "@/control-plane/workspace"
import type { WorkspaceID } from "@/control-plane/schema"

View File

@@ -5,7 +5,7 @@ import { ProviderID, ModelID } from "../../provider/schema"
import { ToolRegistry } from "../../tool/registry"
import { Worktree } from "../../worktree"
import { Instance } from "../../project/instance"
import { Project } from "../../project/project"
import { Project } from "../../project"
import { MCP } from "../../mcp"
import { Session } from "../../session"
import { Config } from "../../config"

View File

@@ -4,7 +4,7 @@ import { Effect } from "effect"
import z from "zod"
import { AppRuntime } from "../../effect/app-runtime"
import { File } from "../../file"
import { Ripgrep } from "../../file/ripgrep"
import { Ripgrep } from "../../file"
import { LSP } from "../../lsp"
import { Instance } from "../../project/instance"
import { lazy } from "../../util/lazy"

View File

@@ -3,7 +3,7 @@ import { HttpApiBuilder, HttpApiMiddleware, HttpApiSecurity } from "effect/unsta
import { HttpRouter, HttpServer, HttpServerRequest } from "effect/unstable/http"
import { AppRuntime } from "@/effect/app-runtime"
import { InstanceRef, WorkspaceRef } from "@/effect/instance-ref"
import { Observability } from "@/effect/observability"
import { Observability } from "@/effect"
import { memoMap } from "@/effect/run-service"
import { Flag } from "@/flag/flag"
import { InstanceBootstrap } from "@/project/bootstrap"

View File

@@ -6,7 +6,7 @@ import z from "zod"
import { Format } from "../../format"
import { TuiRoutes } from "./tui"
import { Instance } from "../../project/instance"
import { Vcs } from "../../project/vcs"
import { Vcs } from "../../project"
import { Agent } from "../../agent/agent"
import { Skill } from "../../skill"
import { Global } from "../../global"

View File

@@ -2,7 +2,7 @@ import { Hono } from "hono"
import { describeRoute, validator } from "hono-openapi"
import { resolver } from "hono-openapi"
import { Instance } from "../../project/instance"
import { Project } from "../../project/project"
import { Project } from "../../project"
import z from "zod"
import { ProjectID } from "../../project/schema"
import { errors } from "../error"

View File

@@ -6,7 +6,7 @@ import z from "zod"
import { AppRuntime } from "@/effect/app-runtime"
import { Pty } from "@/pty"
import { PtyID } from "@/pty/schema"
import { NotFoundError } from "../../storage/db"
import { NotFoundError } from "../../storage"
import { errors } from "../error"
export function PtyRoutes(upgradeWebSocket: UpgradeWebSocket) {

View File

@@ -9,7 +9,7 @@ import { SessionPrompt } from "../../session/prompt"
import { SessionRunState } from "@/session/run-state"
import { SessionCompaction } from "../../session/compaction"
import { SessionRevert } from "../../session/revert"
import { SessionShare } from "@/share/session"
import { SessionShare } from "@/share"
import { SessionStatus } from "@/session/status"
import { SessionSummary } from "@/session/summary"
import { Todo } from "../../session/todo"

View File

@@ -2,7 +2,7 @@ import z from "zod"
import { Hono } from "hono"
import { describeRoute, validator, resolver } from "hono-openapi"
import { SyncEvent } from "@/sync"
import { Database, asc, and, not, or, lte, eq } from "@/storage/db"
import { Database, asc, and, not, or, lte, eq } from "@/storage"
import { EventTable } from "@/sync/event.sql"
import { lazy } from "@/util/lazy"
import { Log } from "@/util"

View File

@@ -1,6 +1,6 @@
import { Provider } from "../provider"
import { NamedError } from "@opencode-ai/shared/util/error"
import { NotFoundError } from "../storage/db"
import { NotFoundError } from "../storage"
import { Session } from "../session"
import type { ContentfulStatusCode } from "hono/utils/http-status"
import type { ErrorHandler, MiddlewareHandler } from "hono"

View File

@@ -3,7 +3,7 @@ import sessionProjectors from "../session/projectors"
import { SyncEvent } from "@/sync"
import { Session } from "@/session"
import { SessionTable } from "@/session/session.sql"
import { Database, eq } from "@/storage/db"
import { Database, eq } from "@/storage"
export function initProjectors() {
SyncEvent.init({

View File

@@ -1,6 +1,6 @@
import { BusEvent } from "@/bus/bus-event"
import { Bus } from "@/bus"
import { Session } from "."
import * as Session from "./session"
import { SessionID, MessageID, PartID } from "./schema"
import { Provider } from "../provider"
import { MessageV2 } from "./message-v2"
@@ -11,7 +11,7 @@ import { SessionProcessor } from "./processor"
import { Agent } from "@/agent/agent"
import { Plugin } from "@/plugin"
import { Config } from "@/config"
import { NotFoundError } from "@/storage/db"
import { NotFoundError } from "@/storage"
import { ModelID, ProviderID } from "@/provider/schema"
import { Effect, Layer, Context } from "effect"
import { InstanceState } from "@/effect"

View File

@@ -6,7 +6,7 @@ import { APICallError, convertToModelMessages, LoadAPIKeyError, type ModelMessag
import { LSP } from "../lsp"
import { Snapshot } from "@/snapshot"
import { SyncEvent } from "../sync"
import { Database, NotFoundError, and, desc, eq, inArray, lt, or } from "@/storage/db"
import { Database, NotFoundError, and, desc, eq, inArray, lt, or } from "@/storage"
import { MessageTable, PartTable, SessionTable } from "./session.sql"
import { ProviderError } from "@/provider/error"
import { iife } from "@/util/iife"
@@ -15,7 +15,7 @@ import type { SystemError } from "bun"
import type { Provider } from "@/provider"
import { ModelID, ProviderID } from "@/provider/schema"
import { Effect } from "effect"
import { EffectLogger } from "@/effect/logger"
import { EffectLogger } from "@/effect"
/** Error shape thrown by Bun's fetch() when gzip/br decompression fails mid-stream */
interface FetchDecompressionError extends Error {

View File

@@ -6,7 +6,7 @@ import { Config } from "@/config"
import { Permission } from "@/permission"
import { Plugin } from "@/plugin"
import { Snapshot } from "@/snapshot"
import { Session } from "."
import * as Session from "./session"
import { LLM } from "./llm"
import { MessageV2 } from "./message-v2"
import { isOverflow } from "./overflow"

View File

@@ -1,6 +1,6 @@
import { NotFoundError, eq, and } from "../storage/db"
import { NotFoundError, eq, and } from "../storage"
import { SyncEvent } from "@/sync"
import { Session } from "."
import * as Session from "./session"
import { MessageV2 } from "./message-v2"
import { SessionTable, MessageTable, PartTable } from "./session.sql"
import { Log } from "../util"

View File

@@ -5,7 +5,7 @@ import { SessionID, MessageID, PartID } from "./schema"
import { MessageV2 } from "./message-v2"
import { Log } from "../util"
import { SessionRevert } from "./revert"
import { Session } from "."
import * as Session from "./session"
import { Agent } from "../agent/agent"
import { Provider } from "../provider"
import { ModelID, ProviderID } from "../provider/schema"
@@ -22,7 +22,7 @@ import MAX_STEPS from "../session/prompt/max-steps.txt"
import { ToolRegistry } from "../tool/registry"
import { MCP } from "../mcp"
import { LSP } from "../lsp"
import { FileTime } from "../file/time"
import { FileTime } from "../file"
import { Flag } from "../flag/flag"
import { ulid } from "ulid"
import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process"
@@ -30,7 +30,7 @@ import * as CrossSpawnSpawner from "@/effect/cross-spawn-spawner"
import * as Stream from "effect/Stream"
import { Command } from "../command"
import { pathToFileURL, fileURLToPath } from "url"
import { ConfigMarkdown } from "../config/markdown"
import { ConfigMarkdown } from "../config"
import { SessionSummary } from "./summary"
import { NamedError } from "@opencode-ai/shared/util/error"
import { SessionProcessor } from "./processor"
@@ -44,7 +44,7 @@ import { Truncate } from "@/tool/truncate"
import { decodeDataUrl } from "@/util/data-url"
import { Process } from "@/util"
import { Cause, Effect, Exit, Layer, Option, Scope, Context } from "effect"
import { EffectLogger } from "@/effect/logger"
import { EffectLogger } from "@/effect"
import { InstanceState } from "@/effect"
import { TaskTool, type TaskPromptOps } from "@/tool/task"
import { SessionRunState } from "./run-state"

View File

@@ -2,10 +2,10 @@ import z from "zod"
import { Effect, Layer, Context } from "effect"
import { Bus } from "../bus"
import { Snapshot } from "../snapshot"
import { Storage } from "@/storage/storage"
import { Storage } from "@/storage"
import { SyncEvent } from "../sync"
import { Log } from "../util"
import { Session } from "."
import * as Session from "./session"
import { MessageV2 } from "./message-v2"
import { SessionID, MessageID, PartID } from "./schema"
import { SessionRunState } from "./run-state"

View File

@@ -1,7 +1,7 @@
import { InstanceState } from "@/effect"
import { Runner } from "@/effect/runner"
import { Runner } from "@/effect"
import { Effect, Layer, Scope, Context } from "effect"
import { Session } from "."
import * as Session from "./session"
import { MessageV2 } from "./message-v2"
import { SessionID } from "./schema"
import { SessionStatus } from "./status"
@@ -32,7 +32,7 @@ export namespace SessionRunState {
const state = yield* InstanceState.make(
Effect.fn("SessionRunState.state")(function* () {
const scope = yield* Scope.Scope
const runners = new Map<SessionID, Runner<MessageV2.WithParts>>()
const runners = new Map<SessionID, Runner.Runner<MessageV2.WithParts>>()
yield* Effect.addFinalizer(
Effect.fnUntraced(function* () {
yield* Effect.forEach(runners.values(), (runner) => runner.cancel, {

View File

@@ -8,12 +8,12 @@ import { type ProviderMetadata, type LanguageModelUsage } from "ai"
import { Flag } from "../flag/flag"
import { Installation } from "../installation"
import { Database, NotFoundError, eq, and, gte, isNull, desc, like, inArray, lt } from "../storage/db"
import { Database, NotFoundError, eq, and, gte, isNull, desc, like, inArray, lt } from "../storage"
import { SyncEvent } from "../sync"
import type { SQL } from "../storage/db"
import type { SQL } from "../storage"
import { PartTable, SessionTable } from "./session.sql"
import { ProjectTable } from "../project/project.sql"
import { Storage } from "@/storage/storage"
import { Storage } from "@/storage"
import { Log } from "../util"
import { updateSchema } from "../util/update-schema"
import { MessageV2 } from "./message-v2"

View File

@@ -2,8 +2,8 @@ import z from "zod"
import { Effect, Layer, Context } from "effect"
import { Bus } from "@/bus"
import { Snapshot } from "@/snapshot"
import { Storage } from "@/storage/storage"
import { Session } from "."
import { Storage } from "@/storage"
import * as Session from "./session"
import { MessageV2 } from "./message-v2"
import { SessionID, MessageID } from "./schema"

View File

@@ -3,7 +3,7 @@ import { Bus } from "@/bus"
import { SessionID } from "./schema"
import { Effect, Layer, Context } from "effect"
import z from "zod"
import { Database, eq, asc } from "../storage/db"
import { Database, eq, asc } from "../storage"
import { TodoTable } from "./session.sql"
export namespace Todo {

View File

@@ -0,0 +1,2 @@
export * as ShareNext from "./share-next"
export * as SessionShare from "./session"

View File

@@ -4,56 +4,54 @@ import { SyncEvent } from "@/sync"
import { Effect, Layer, Scope, Context } from "effect"
import { Config } from "../config"
import { Flag } from "../flag/flag"
import { ShareNext } from "./share-next"
import * as ShareNext from "./share-next"
export namespace SessionShare {
export interface Interface {
readonly create: (input?: Session.CreateInput) => Effect.Effect<Session.Info>
readonly share: (sessionID: SessionID) => Effect.Effect<{ url: string }, unknown>
readonly unshare: (sessionID: SessionID) => Effect.Effect<void, unknown>
}
export class Service extends Context.Service<Service, Interface>()("@opencode/SessionShare") {}
export const layer = Layer.effect(
Service,
Effect.gen(function* () {
const cfg = yield* Config.Service
const session = yield* Session.Service
const shareNext = yield* ShareNext.Service
const scope = yield* Scope.Scope
const share = Effect.fn("SessionShare.share")(function* (sessionID: SessionID) {
const conf = yield* cfg.get()
if (conf.share === "disabled") throw new Error("Sharing is disabled in configuration")
const result = yield* shareNext.create(sessionID)
yield* Effect.sync(() =>
SyncEvent.run(Session.Event.Updated, { sessionID, info: { share: { url: result.url } } }),
)
return result
})
const unshare = Effect.fn("SessionShare.unshare")(function* (sessionID: SessionID) {
yield* shareNext.remove(sessionID)
yield* Effect.sync(() => SyncEvent.run(Session.Event.Updated, { sessionID, info: { share: { url: null } } }))
})
const create = Effect.fn("SessionShare.create")(function* (input?: Session.CreateInput) {
const result = yield* session.create(input)
if (result.parentID) return result
const conf = yield* cfg.get()
if (!(Flag.OPENCODE_AUTO_SHARE || conf.share === "auto")) return result
yield* share(result.id).pipe(Effect.ignore, Effect.forkIn(scope))
return result
})
return Service.of({ create, share, unshare })
}),
)
export const defaultLayer = layer.pipe(
Layer.provide(ShareNext.defaultLayer),
Layer.provide(Session.defaultLayer),
Layer.provide(Config.defaultLayer),
)
export interface Interface {
readonly create: (input?: Session.CreateInput) => Effect.Effect<Session.Info>
readonly share: (sessionID: SessionID) => Effect.Effect<{ url: string }, unknown>
readonly unshare: (sessionID: SessionID) => Effect.Effect<void, unknown>
}
export class Service extends Context.Service<Service, Interface>()("@opencode/SessionShare") {}
export const layer = Layer.effect(
Service,
Effect.gen(function* () {
const cfg = yield* Config.Service
const session = yield* Session.Service
const shareNext = yield* ShareNext.Service
const scope = yield* Scope.Scope
const share = Effect.fn("SessionShare.share")(function* (sessionID: SessionID) {
const conf = yield* cfg.get()
if (conf.share === "disabled") throw new Error("Sharing is disabled in configuration")
const result = yield* shareNext.create(sessionID)
yield* Effect.sync(() =>
SyncEvent.run(Session.Event.Updated, { sessionID, info: { share: { url: result.url } } }),
)
return result
})
const unshare = Effect.fn("SessionShare.unshare")(function* (sessionID: SessionID) {
yield* shareNext.remove(sessionID)
yield* Effect.sync(() => SyncEvent.run(Session.Event.Updated, { sessionID, info: { share: { url: null } } }))
})
const create = Effect.fn("SessionShare.create")(function* (input?: Session.CreateInput) {
const result = yield* session.create(input)
if (result.parentID) return result
const conf = yield* cfg.get()
if (!(Flag.OPENCODE_AUTO_SHARE || conf.share === "auto")) return result
yield* share(result.id).pipe(Effect.ignore, Effect.forkIn(scope))
return result
})
return Service.of({ create, share, unshare })
}),
)
export const defaultLayer = layer.pipe(
Layer.provide(ShareNext.defaultLayer),
Layer.provide(Session.defaultLayer),
Layer.provide(Config.defaultLayer),
)

View File

@@ -9,342 +9,340 @@ import { ModelID, ProviderID } from "@/provider/schema"
import { Session } from "@/session"
import { MessageV2 } from "@/session/message-v2"
import type { SessionID } from "@/session/schema"
import { Database, eq } from "@/storage/db"
import { Database, eq } from "@/storage"
import { Config } from "@/config"
import { Log } from "@/util"
import { SessionShareTable } from "./share.sql"
export namespace ShareNext {
const log = Log.create({ service: "share-next" })
const disabled = process.env["OPENCODE_DISABLE_SHARE"] === "true" || process.env["OPENCODE_DISABLE_SHARE"] === "1"
const log = Log.create({ service: "share-next" })
const disabled = process.env["OPENCODE_DISABLE_SHARE"] === "true" || process.env["OPENCODE_DISABLE_SHARE"] === "1"
export type Api = {
create: string
sync: (shareID: string) => string
remove: (shareID: string) => string
data: (shareID: string) => string
}
export type Api = {
create: string
sync: (shareID: string) => string
remove: (shareID: string) => string
data: (shareID: string) => string
}
export type Req = {
headers: Record<string, string>
api: Api
baseUrl: string
}
export type Req = {
headers: Record<string, string>
api: Api
baseUrl: string
}
const ShareSchema = Schema.Struct({
id: Schema.String,
url: Schema.String,
secret: Schema.String,
})
export type Share = typeof ShareSchema.Type
const ShareSchema = Schema.Struct({
id: Schema.String,
url: Schema.String,
secret: Schema.String,
})
export type Share = typeof ShareSchema.Type
type State = {
queue: Map<string, { data: Map<string, Data> }>
scope: Scope.Closeable
}
type State = {
queue: Map<string, { data: Map<string, Data> }>
scope: Scope.Closeable
}
type Data =
| {
type: "session"
data: SDK.Session
}
| {
type: "message"
data: SDK.Message
}
| {
type: "part"
data: SDK.Part
}
| {
type: "session_diff"
data: SDK.SnapshotFileDiff[]
}
| {
type: "model"
data: SDK.Model[]
}
export interface Interface {
readonly init: () => Effect.Effect<void, unknown>
readonly url: () => Effect.Effect<string, unknown>
readonly request: () => Effect.Effect<Req, unknown>
readonly create: (sessionID: SessionID) => Effect.Effect<Share, unknown>
readonly remove: (sessionID: SessionID) => Effect.Effect<void, unknown>
}
export class Service extends Context.Service<Service, Interface>()("@opencode/ShareNext") {}
const db = <T>(fn: (d: Parameters<typeof Database.use>[0] extends (trx: infer D) => any ? D : never) => T) =>
Effect.sync(() => Database.use(fn))
function api(resource: string): Api {
return {
create: `/api/${resource}`,
sync: (shareID) => `/api/${resource}/${shareID}/sync`,
remove: (shareID) => `/api/${resource}/${shareID}`,
data: (shareID) => `/api/${resource}/${shareID}/data`,
type Data =
| {
type: "session"
data: SDK.Session
}
}
const legacyApi = api("share")
const consoleApi = api("shares")
function key(item: Data) {
switch (item.type) {
case "session":
return "session"
case "message":
return `message/${item.data.id}`
case "part":
return `part/${item.data.messageID}/${item.data.id}`
case "session_diff":
return "session_diff"
case "model":
return "model"
| {
type: "message"
data: SDK.Message
}
| {
type: "part"
data: SDK.Part
}
| {
type: "session_diff"
data: SDK.SnapshotFileDiff[]
}
| {
type: "model"
data: SDK.Model[]
}
export interface Interface {
readonly init: () => Effect.Effect<void, unknown>
readonly url: () => Effect.Effect<string, unknown>
readonly request: () => Effect.Effect<Req, unknown>
readonly create: (sessionID: SessionID) => Effect.Effect<Share, unknown>
readonly remove: (sessionID: SessionID) => Effect.Effect<void, unknown>
}
export class Service extends Context.Service<Service, Interface>()("@opencode/ShareNext") {}
const db = <T>(fn: (d: Parameters<typeof Database.use>[0] extends (trx: infer D) => any ? D : never) => T) =>
Effect.sync(() => Database.use(fn))
function api(resource: string): Api {
return {
create: `/api/${resource}`,
sync: (shareID) => `/api/${resource}/${shareID}/sync`,
remove: (shareID) => `/api/${resource}/${shareID}`,
data: (shareID) => `/api/${resource}/${shareID}/data`,
}
}
export const layer = Layer.effect(
Service,
Effect.gen(function* () {
const account = yield* Account.Service
const bus = yield* Bus.Service
const cfg = yield* Config.Service
const http = yield* HttpClient.HttpClient
const httpOk = HttpClient.filterStatusOk(http)
const provider = yield* Provider.Service
const session = yield* Session.Service
const legacyApi = api("share")
const consoleApi = api("shares")
function sync(sessionID: SessionID, data: Data[]): Effect.Effect<void> {
return Effect.gen(function* () {
if (disabled) return
const s = yield* InstanceState.get(state)
const existing = s.queue.get(sessionID)
if (existing) {
for (const item of data) {
existing.data.set(key(item), item)
}
return
function key(item: Data) {
switch (item.type) {
case "session":
return "session"
case "message":
return `message/${item.data.id}`
case "part":
return `part/${item.data.messageID}/${item.data.id}`
case "session_diff":
return "session_diff"
case "model":
return "model"
}
}
export const layer = Layer.effect(
Service,
Effect.gen(function* () {
const account = yield* Account.Service
const bus = yield* Bus.Service
const cfg = yield* Config.Service
const http = yield* HttpClient.HttpClient
const httpOk = HttpClient.filterStatusOk(http)
const provider = yield* Provider.Service
const session = yield* Session.Service
function sync(sessionID: SessionID, data: Data[]): Effect.Effect<void> {
return Effect.gen(function* () {
if (disabled) return
const s = yield* InstanceState.get(state)
const existing = s.queue.get(sessionID)
if (existing) {
for (const item of data) {
existing.data.set(key(item), item)
}
const next = new Map(data.map((item) => [key(item), item]))
s.queue.set(sessionID, { data: next })
yield* flush(sessionID).pipe(
Effect.delay(1000),
Effect.catchCause((cause) =>
Effect.sync(() => {
log.error("share flush failed", { sessionID, cause })
}),
),
Effect.forkIn(s.scope),
)
})
}
const state: InstanceState.InstanceState<State> = yield* InstanceState.make<State>(
Effect.fn("ShareNext.state")(function* (_ctx) {
const cache: State = { queue: new Map(), scope: yield* Scope.make() }
yield* Effect.addFinalizer(() =>
Scope.close(cache.scope, Exit.void).pipe(
Effect.andThen(
Effect.sync(() => {
cache.queue.clear()
}),
),
),
)
if (disabled) return cache
const watch = <D extends { type: string }>(
def: D,
fn: (evt: { properties: any }) => Effect.Effect<void, unknown>,
) =>
bus.subscribe(def as never).pipe(
Stream.runForEach((evt) =>
fn(evt).pipe(
Effect.catchCause((cause) =>
Effect.sync(() => {
log.error("share subscriber failed", { type: def.type, cause })
}),
),
),
),
Effect.forkScoped,
)
yield* watch(Session.Event.Updated, (evt) =>
Effect.gen(function* () {
const info = yield* session.get(evt.properties.sessionID)
yield* sync(info.id, [{ type: "session", data: info }])
}),
)
yield* watch(MessageV2.Event.Updated, (evt) =>
Effect.gen(function* () {
const info = evt.properties.info
yield* sync(info.sessionID, [{ type: "message", data: info }])
if (info.role !== "user") return
const model = yield* provider.getModel(info.model.providerID, info.model.modelID)
yield* sync(info.sessionID, [{ type: "model", data: [model] }])
}),
)
yield* watch(MessageV2.Event.PartUpdated, (evt) =>
sync(evt.properties.part.sessionID, [{ type: "part", data: evt.properties.part }]),
)
yield* watch(Session.Event.Diff, (evt) =>
sync(evt.properties.sessionID, [{ type: "session_diff", data: evt.properties.diff }]),
)
yield* watch(Session.Event.Deleted, (evt) => remove(evt.properties.sessionID))
return cache
}),
)
const request = Effect.fn("ShareNext.request")(function* () {
const headers: Record<string, string> = {}
const active = yield* account.active()
if (Option.isNone(active) || !active.value.active_org_id) {
const baseUrl = (yield* cfg.get()).enterprise?.url ?? "https://opncd.ai"
return { headers, api: legacyApi, baseUrl } satisfies Req
return
}
const token = yield* account.token(active.value.id)
if (Option.isNone(token)) {
throw new Error("No active account token available for sharing")
}
headers.authorization = `Bearer ${token.value}`
headers["x-org-id"] = active.value.active_org_id
return { headers, api: consoleApi, baseUrl: active.value.url } satisfies Req
})
const get = Effect.fnUntraced(function* (sessionID: SessionID) {
const row = yield* db((db) =>
db.select().from(SessionShareTable).where(eq(SessionShareTable.session_id, sessionID)).get(),
)
if (!row) return
return { id: row.id, secret: row.secret, url: row.url } satisfies Share
})
const flush = Effect.fn("ShareNext.flush")(function* (sessionID: SessionID) {
if (disabled) return
const s = yield* InstanceState.get(state)
const queued = s.queue.get(sessionID)
if (!queued) return
s.queue.delete(sessionID)
const share = yield* get(sessionID)
if (!share) return
const req = yield* request()
const res = yield* HttpClientRequest.post(`${req.baseUrl}${req.api.sync(share.id)}`).pipe(
HttpClientRequest.setHeaders(req.headers),
HttpClientRequest.bodyJson({ secret: share.secret, data: Array.from(queued.data.values()) }),
Effect.flatMap((r) => http.execute(r)),
)
if (res.status >= 400) {
log.warn("failed to sync share", { sessionID, shareID: share.id, status: res.status })
}
})
const full = Effect.fn("ShareNext.full")(function* (sessionID: SessionID) {
log.info("full sync", { sessionID })
const info = yield* session.get(sessionID)
const diffs = yield* session.diff(sessionID)
const messages = yield* Effect.sync(() => Array.from(MessageV2.stream(sessionID)))
const models = yield* Effect.forEach(
Array.from(
new Map(
messages
.filter((msg) => msg.info.role === "user")
.map((msg) => (msg.info as SDK.UserMessage).model)
.map((item) => [`${item.providerID}/${item.modelID}`, item] as const),
).values(),
),
(item) => provider.getModel(ProviderID.make(item.providerID), ModelID.make(item.modelID)),
{ concurrency: 8 },
)
yield* sync(sessionID, [
{ type: "session", data: info },
...messages.map((item) => ({ type: "message" as const, data: item.info })),
...messages.flatMap((item) => item.parts.map((part) => ({ type: "part" as const, data: part }))),
{ type: "session_diff", data: diffs },
{ type: "model", data: models },
])
})
const init = Effect.fn("ShareNext.init")(function* () {
if (disabled) return
yield* InstanceState.get(state)
})
const url = Effect.fn("ShareNext.url")(function* () {
return (yield* request()).baseUrl
})
const create = Effect.fn("ShareNext.create")(function* (sessionID: SessionID) {
if (disabled) return { id: "", url: "", secret: "" }
log.info("creating share", { sessionID })
const req = yield* request()
const result = yield* HttpClientRequest.post(`${req.baseUrl}${req.api.create}`).pipe(
HttpClientRequest.setHeaders(req.headers),
HttpClientRequest.bodyJson({ sessionID }),
Effect.flatMap((r) => httpOk.execute(r)),
Effect.flatMap(HttpClientResponse.schemaBodyJson(ShareSchema)),
)
yield* db((db) =>
db
.insert(SessionShareTable)
.values({ session_id: sessionID, id: result.id, secret: result.secret, url: result.url })
.onConflictDoUpdate({
target: SessionShareTable.session_id,
set: { id: result.id, secret: result.secret, url: result.url },
})
.run(),
)
const s = yield* InstanceState.get(state)
yield* full(sessionID).pipe(
const next = new Map(data.map((item) => [key(item), item]))
s.queue.set(sessionID, { data: next })
yield* flush(sessionID).pipe(
Effect.delay(1000),
Effect.catchCause((cause) =>
Effect.sync(() => {
log.error("share full sync failed", { sessionID, cause })
log.error("share flush failed", { sessionID, cause })
}),
),
Effect.forkIn(s.scope),
)
return result
})
}
const remove = Effect.fn("ShareNext.remove")(function* (sessionID: SessionID) {
if (disabled) return
log.info("removing share", { sessionID })
const share = yield* get(sessionID)
if (!share) return
const state: InstanceState.InstanceState<State> = yield* InstanceState.make<State>(
Effect.fn("ShareNext.state")(function* (_ctx) {
const cache: State = { queue: new Map(), scope: yield* Scope.make() }
const req = yield* request()
yield* HttpClientRequest.delete(`${req.baseUrl}${req.api.remove(share.id)}`).pipe(
HttpClientRequest.setHeaders(req.headers),
HttpClientRequest.bodyJson({ secret: share.secret }),
Effect.flatMap((r) => httpOk.execute(r)),
yield* Effect.addFinalizer(() =>
Scope.close(cache.scope, Exit.void).pipe(
Effect.andThen(
Effect.sync(() => {
cache.queue.clear()
}),
),
),
)
yield* db((db) => db.delete(SessionShareTable).where(eq(SessionShareTable.session_id, sessionID)).run())
})
if (disabled) return cache
return Service.of({ init, url, request, create, remove })
}),
)
const watch = <D extends { type: string }>(
def: D,
fn: (evt: { properties: any }) => Effect.Effect<void, unknown>,
) =>
bus.subscribe(def as never).pipe(
Stream.runForEach((evt) =>
fn(evt).pipe(
Effect.catchCause((cause) =>
Effect.sync(() => {
log.error("share subscriber failed", { type: def.type, cause })
}),
),
),
),
Effect.forkScoped,
)
export const defaultLayer = layer.pipe(
Layer.provide(Bus.layer),
Layer.provide(Account.defaultLayer),
Layer.provide(Config.defaultLayer),
Layer.provide(FetchHttpClient.layer),
Layer.provide(Provider.defaultLayer),
Layer.provide(Session.defaultLayer),
)
}
yield* watch(Session.Event.Updated, (evt) =>
Effect.gen(function* () {
const info = yield* session.get(evt.properties.sessionID)
yield* sync(info.id, [{ type: "session", data: info }])
}),
)
yield* watch(MessageV2.Event.Updated, (evt) =>
Effect.gen(function* () {
const info = evt.properties.info
yield* sync(info.sessionID, [{ type: "message", data: info }])
if (info.role !== "user") return
const model = yield* provider.getModel(info.model.providerID, info.model.modelID)
yield* sync(info.sessionID, [{ type: "model", data: [model] }])
}),
)
yield* watch(MessageV2.Event.PartUpdated, (evt) =>
sync(evt.properties.part.sessionID, [{ type: "part", data: evt.properties.part }]),
)
yield* watch(Session.Event.Diff, (evt) =>
sync(evt.properties.sessionID, [{ type: "session_diff", data: evt.properties.diff }]),
)
yield* watch(Session.Event.Deleted, (evt) => remove(evt.properties.sessionID))
return cache
}),
)
const request = Effect.fn("ShareNext.request")(function* () {
const headers: Record<string, string> = {}
const active = yield* account.active()
if (Option.isNone(active) || !active.value.active_org_id) {
const baseUrl = (yield* cfg.get()).enterprise?.url ?? "https://opncd.ai"
return { headers, api: legacyApi, baseUrl } satisfies Req
}
const token = yield* account.token(active.value.id)
if (Option.isNone(token)) {
throw new Error("No active account token available for sharing")
}
headers.authorization = `Bearer ${token.value}`
headers["x-org-id"] = active.value.active_org_id
return { headers, api: consoleApi, baseUrl: active.value.url } satisfies Req
})
const get = Effect.fnUntraced(function* (sessionID: SessionID) {
const row = yield* db((db) =>
db.select().from(SessionShareTable).where(eq(SessionShareTable.session_id, sessionID)).get(),
)
if (!row) return
return { id: row.id, secret: row.secret, url: row.url } satisfies Share
})
const flush = Effect.fn("ShareNext.flush")(function* (sessionID: SessionID) {
if (disabled) return
const s = yield* InstanceState.get(state)
const queued = s.queue.get(sessionID)
if (!queued) return
s.queue.delete(sessionID)
const share = yield* get(sessionID)
if (!share) return
const req = yield* request()
const res = yield* HttpClientRequest.post(`${req.baseUrl}${req.api.sync(share.id)}`).pipe(
HttpClientRequest.setHeaders(req.headers),
HttpClientRequest.bodyJson({ secret: share.secret, data: Array.from(queued.data.values()) }),
Effect.flatMap((r) => http.execute(r)),
)
if (res.status >= 400) {
log.warn("failed to sync share", { sessionID, shareID: share.id, status: res.status })
}
})
const full = Effect.fn("ShareNext.full")(function* (sessionID: SessionID) {
log.info("full sync", { sessionID })
const info = yield* session.get(sessionID)
const diffs = yield* session.diff(sessionID)
const messages = yield* Effect.sync(() => Array.from(MessageV2.stream(sessionID)))
const models = yield* Effect.forEach(
Array.from(
new Map(
messages
.filter((msg) => msg.info.role === "user")
.map((msg) => (msg.info as SDK.UserMessage).model)
.map((item) => [`${item.providerID}/${item.modelID}`, item] as const),
).values(),
),
(item) => provider.getModel(ProviderID.make(item.providerID), ModelID.make(item.modelID)),
{ concurrency: 8 },
)
yield* sync(sessionID, [
{ type: "session", data: info },
...messages.map((item) => ({ type: "message" as const, data: item.info })),
...messages.flatMap((item) => item.parts.map((part) => ({ type: "part" as const, data: part }))),
{ type: "session_diff", data: diffs },
{ type: "model", data: models },
])
})
const init = Effect.fn("ShareNext.init")(function* () {
if (disabled) return
yield* InstanceState.get(state)
})
const url = Effect.fn("ShareNext.url")(function* () {
return (yield* request()).baseUrl
})
const create = Effect.fn("ShareNext.create")(function* (sessionID: SessionID) {
if (disabled) return { id: "", url: "", secret: "" }
log.info("creating share", { sessionID })
const req = yield* request()
const result = yield* HttpClientRequest.post(`${req.baseUrl}${req.api.create}`).pipe(
HttpClientRequest.setHeaders(req.headers),
HttpClientRequest.bodyJson({ sessionID }),
Effect.flatMap((r) => httpOk.execute(r)),
Effect.flatMap(HttpClientResponse.schemaBodyJson(ShareSchema)),
)
yield* db((db) =>
db
.insert(SessionShareTable)
.values({ session_id: sessionID, id: result.id, secret: result.secret, url: result.url })
.onConflictDoUpdate({
target: SessionShareTable.session_id,
set: { id: result.id, secret: result.secret, url: result.url },
})
.run(),
)
const s = yield* InstanceState.get(state)
yield* full(sessionID).pipe(
Effect.catchCause((cause) =>
Effect.sync(() => {
log.error("share full sync failed", { sessionID, cause })
}),
),
Effect.forkIn(s.scope),
)
return result
})
const remove = Effect.fn("ShareNext.remove")(function* (sessionID: SessionID) {
if (disabled) return
log.info("removing share", { sessionID })
const share = yield* get(sessionID)
if (!share) return
const req = yield* request()
yield* HttpClientRequest.delete(`${req.baseUrl}${req.api.remove(share.id)}`).pipe(
HttpClientRequest.setHeaders(req.headers),
HttpClientRequest.bodyJson({ secret: share.secret }),
Effect.flatMap((r) => httpOk.execute(r)),
)
yield* db((db) => db.delete(SessionShareTable).where(eq(SessionShareTable.session_id, sessionID)).run())
})
return Service.of({ init, url, request, create, remove })
}),
)
export const defaultLayer = layer.pipe(
Layer.provide(Bus.layer),
Layer.provide(Account.defaultLayer),
Layer.provide(Config.defaultLayer),
Layer.provide(FetchHttpClient.layer),
Layer.provide(Provider.defaultLayer),
Layer.provide(Session.defaultLayer),
)

View File

@@ -12,7 +12,7 @@ import { Global } from "@/global"
import { Permission } from "@/permission"
import { AppFileSystem } from "@opencode-ai/shared/filesystem"
import { Config } from "../config"
import { ConfigMarkdown } from "../config/markdown"
import { ConfigMarkdown } from "../config"
import { Glob } from "@opencode-ai/shared/util/glob"
import { Log } from "../util"
import { Discovery } from "./discovery"

View File

@@ -27,148 +27,146 @@ export const NotFoundError = NamedError.create(
const log = Log.create({ service: "db" })
export namespace Database {
export function getChannelPath() {
if (["latest", "beta", "prod"].includes(CHANNEL) || Flag.OPENCODE_DISABLE_CHANNEL_DB)
return path.join(Global.Path.data, "opencode.db")
const safe = CHANNEL.replace(/[^a-zA-Z0-9._-]/g, "-")
return path.join(Global.Path.data, `opencode-${safe}.db`)
export function getChannelPath() {
if (["latest", "beta", "prod"].includes(CHANNEL) || Flag.OPENCODE_DISABLE_CHANNEL_DB)
return path.join(Global.Path.data, "opencode.db")
const safe = CHANNEL.replace(/[^a-zA-Z0-9._-]/g, "-")
return path.join(Global.Path.data, `opencode-${safe}.db`)
}
export const Path = iife(() => {
if (Flag.OPENCODE_DB) {
if (Flag.OPENCODE_DB === ":memory:" || path.isAbsolute(Flag.OPENCODE_DB)) return Flag.OPENCODE_DB
return path.join(Global.Path.data, Flag.OPENCODE_DB)
}
return getChannelPath()
})
export const Path = iife(() => {
if (Flag.OPENCODE_DB) {
if (Flag.OPENCODE_DB === ":memory:" || path.isAbsolute(Flag.OPENCODE_DB)) return Flag.OPENCODE_DB
return path.join(Global.Path.data, Flag.OPENCODE_DB)
}
return getChannelPath()
})
export type Transaction = SQLiteTransaction<"sync", void>
export type Transaction = SQLiteTransaction<"sync", void>
type Client = SQLiteBunDatabase
type Client = SQLiteBunDatabase
type Journal = { sql: string; timestamp: number; name: string }[]
type Journal = { sql: string; timestamp: number; name: string }[]
function time(tag: string) {
const match = /^(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})/.exec(tag)
if (!match) return 0
return Date.UTC(
Number(match[1]),
Number(match[2]) - 1,
Number(match[3]),
Number(match[4]),
Number(match[5]),
Number(match[6]),
)
}
function time(tag: string) {
const match = /^(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})/.exec(tag)
if (!match) return 0
return Date.UTC(
Number(match[1]),
Number(match[2]) - 1,
Number(match[3]),
Number(match[4]),
Number(match[5]),
Number(match[6]),
)
}
function migrations(dir: string): Journal {
const dirs = readdirSync(dir, { withFileTypes: true })
.filter((entry) => entry.isDirectory())
.map((entry) => entry.name)
function migrations(dir: string): Journal {
const dirs = readdirSync(dir, { withFileTypes: true })
.filter((entry) => entry.isDirectory())
.map((entry) => entry.name)
const sql = dirs
.map((name) => {
const file = path.join(dir, name, "migration.sql")
if (!existsSync(file)) return
return {
sql: readFileSync(file, "utf-8"),
timestamp: time(name),
name,
}
})
.filter(Boolean) as Journal
return sql.sort((a, b) => a.timestamp - b.timestamp)
}
export const Client = lazy(() => {
log.info("opening database", { path: Path })
const db = init(Path)
db.run("PRAGMA journal_mode = WAL")
db.run("PRAGMA synchronous = NORMAL")
db.run("PRAGMA busy_timeout = 5000")
db.run("PRAGMA cache_size = -64000")
db.run("PRAGMA foreign_keys = ON")
db.run("PRAGMA wal_checkpoint(PASSIVE)")
// Apply schema migrations
const entries =
typeof OPENCODE_MIGRATIONS !== "undefined"
? OPENCODE_MIGRATIONS
: migrations(path.join(import.meta.dirname, "../../migration"))
if (entries.length > 0) {
log.info("applying migrations", {
count: entries.length,
mode: typeof OPENCODE_MIGRATIONS !== "undefined" ? "bundled" : "dev",
})
if (Flag.OPENCODE_SKIP_MIGRATIONS) {
for (const item of entries) {
item.sql = "select 1;"
}
const sql = dirs
.map((name) => {
const file = path.join(dir, name, "migration.sql")
if (!existsSync(file)) return
return {
sql: readFileSync(file, "utf-8"),
timestamp: time(name),
name,
}
migrate(db, entries)
}
})
.filter(Boolean) as Journal
return db
})
return sql.sort((a, b) => a.timestamp - b.timestamp)
}
export function close() {
Client().$client.close()
Client.reset()
}
export const Client = lazy(() => {
log.info("opening database", { path: Path })
export type TxOrDb = Transaction | Client
const db = init(Path)
const ctx = LocalContext.create<{
tx: TxOrDb
effects: (() => void | Promise<void>)[]
}>("database")
db.run("PRAGMA journal_mode = WAL")
db.run("PRAGMA synchronous = NORMAL")
db.run("PRAGMA busy_timeout = 5000")
db.run("PRAGMA cache_size = -64000")
db.run("PRAGMA foreign_keys = ON")
db.run("PRAGMA wal_checkpoint(PASSIVE)")
export function use<T>(callback: (trx: TxOrDb) => T): T {
try {
return callback(ctx.use().tx)
} catch (err) {
if (err instanceof LocalContext.NotFound) {
const effects: (() => void | Promise<void>)[] = []
const result = ctx.provide({ effects, tx: Client() }, () => callback(Client()))
for (const effect of effects) void effect()
return result
// Apply schema migrations
const entries =
typeof OPENCODE_MIGRATIONS !== "undefined"
? OPENCODE_MIGRATIONS
: migrations(path.join(import.meta.dirname, "../../migration"))
if (entries.length > 0) {
log.info("applying migrations", {
count: entries.length,
mode: typeof OPENCODE_MIGRATIONS !== "undefined" ? "bundled" : "dev",
})
if (Flag.OPENCODE_SKIP_MIGRATIONS) {
for (const item of entries) {
item.sql = "select 1;"
}
throw err
}
migrate(db, entries)
}
export function effect(fn: () => any | Promise<any>) {
const bound = InstanceState.bind(fn)
try {
ctx.use().effects.push(bound)
} catch {
void bound()
}
}
return db
})
type NotPromise<T> = T extends Promise<any> ? never : T
export function close() {
Client().$client.close()
Client.reset()
}
export function transaction<T>(
callback: (tx: TxOrDb) => NotPromise<T>,
options?: {
behavior?: "deferred" | "immediate" | "exclusive"
},
): NotPromise<T> {
try {
return callback(ctx.use().tx)
} catch (err) {
if (err instanceof LocalContext.NotFound) {
const effects: (() => void | Promise<void>)[] = []
const txCallback = InstanceState.bind((tx: TxOrDb) => ctx.provide({ tx, effects }, () => callback(tx)))
const result = Client().transaction(txCallback, { behavior: options?.behavior })
for (const effect of effects) void effect()
return result as NotPromise<T>
}
throw err
export type TxOrDb = Transaction | Client
const ctx = LocalContext.create<{
tx: TxOrDb
effects: (() => void | Promise<void>)[]
}>("database")
export function use<T>(callback: (trx: TxOrDb) => T): T {
try {
return callback(ctx.use().tx)
} catch (err) {
if (err instanceof LocalContext.NotFound) {
const effects: (() => void | Promise<void>)[] = []
const result = ctx.provide({ effects, tx: Client() }, () => callback(Client()))
for (const effect of effects) effect()
return result
}
throw err
}
}
export function effect(fn: () => any | Promise<any>) {
const bound = InstanceState.bind(fn)
try {
ctx.use().effects.push(bound)
} catch {
bound()
}
}
type NotPromise<T> = T extends Promise<any> ? never : T
export function transaction<T>(
callback: (tx: TxOrDb) => NotPromise<T>,
options?: {
behavior?: "deferred" | "immediate" | "exclusive"
},
): NotPromise<T> {
try {
return callback(ctx.use().tx)
} catch (err) {
if (err instanceof LocalContext.NotFound) {
const effects: (() => void | Promise<void>)[] = []
const txCallback = InstanceState.bind((tx: TxOrDb) => ctx.provide({ tx, effects }, () => callback(tx)))
const result = Client().transaction(txCallback, { behavior: options?.behavior })
for (const effect of effects) effect()
return result as NotPromise<T>
}
throw err
}
}

View File

@@ -0,0 +1,26 @@
export * as JsonMigration from "./json-migration"
export * as Database from "./db"
export * as Storage from "./storage"
export {
asc,
eq,
and,
or,
inArray,
desc,
not,
sql,
isNull,
isNotNull,
count,
like,
exists,
between,
gt,
gte,
lt,
lte,
ne,
} from "drizzle-orm"
export type { SQL } from "drizzle-orm"
export { NotFoundError } from "./storage"

View File

@@ -10,47 +10,24 @@ import { existsSync } from "fs"
import { Filesystem } from "../util"
import { Glob } from "@opencode-ai/shared/util/glob"
export namespace JsonMigration {
const log = Log.create({ service: "json-migration" })
const log = Log.create({ service: "json-migration" })
export type Progress = {
current: number
total: number
label: string
}
export type Progress = {
current: number
total: number
label: string
}
type Options = {
progress?: (event: Progress) => void
}
type Options = {
progress?: (event: Progress) => void
}
export async function run(db: SQLiteBunDatabase<any, any> | NodeSQLiteDatabase<any, any>, options?: Options) {
const storageDir = path.join(Global.Path.data, "storage")
export async function run(db: SQLiteBunDatabase<any, any> | NodeSQLiteDatabase<any, any>, options?: Options) {
const storageDir = path.join(Global.Path.data, "storage")
if (!existsSync(storageDir)) {
log.info("storage directory does not exist, skipping migration")
return {
projects: 0,
sessions: 0,
messages: 0,
parts: 0,
todos: 0,
permissions: 0,
shares: 0,
errors: [] as string[],
}
}
log.info("starting json to sqlite migration", { storageDir })
const start = performance.now()
// const db = drizzle({ client: sqlite })
// Optimize SQLite for bulk inserts
db.run("PRAGMA journal_mode = WAL")
db.run("PRAGMA synchronous = OFF")
db.run("PRAGMA cache_size = 10000")
db.run("PRAGMA temp_store = MEMORY")
const stats = {
if (!existsSync(storageDir)) {
log.info("storage directory does not exist, skipping migration")
return {
projects: 0,
sessions: 0,
messages: 0,
@@ -60,370 +37,391 @@ export namespace JsonMigration {
shares: 0,
errors: [] as string[],
}
const orphans = {
sessions: 0,
todos: 0,
permissions: 0,
shares: 0,
}
const errs = stats.errors
const batchSize = 1000
const now = Date.now()
async function list(pattern: string) {
return Glob.scan(pattern, { cwd: storageDir, absolute: true })
}
async function read(files: string[], start: number, end: number) {
const count = end - start
// oxlint-disable-next-line unicorn/no-new-array -- pre-allocated for index-based batch fill
const tasks = new Array(count)
for (let i = 0; i < count; i++) {
tasks[i] = Filesystem.readJson(files[start + i])
}
const results = await Promise.allSettled(tasks)
// oxlint-disable-next-line unicorn/no-new-array -- pre-allocated for index-based batch fill
const items = new Array(count)
for (let i = 0; i < results.length; i++) {
const result = results[i]
if (result.status === "fulfilled") {
items[i] = result.value
continue
}
errs.push(`failed to read ${files[start + i]}: ${result.reason}`)
}
return items
}
function insert(values: any[], table: any, label: string) {
if (values.length === 0) return 0
try {
db.insert(table).values(values).onConflictDoNothing().run()
return values.length
} catch (e) {
errs.push(`failed to migrate ${label} batch: ${e}`)
return 0
}
}
// Pre-scan all files upfront to avoid repeated glob operations
log.info("scanning files...")
const [projectFiles, sessionFiles, messageFiles, partFiles, todoFiles, permFiles, shareFiles] = await Promise.all([
list("project/*.json"),
list("session/*/*.json"),
list("message/*/*.json"),
list("part/*/*.json"),
list("todo/*.json"),
list("permission/*.json"),
list("session_share/*.json"),
])
log.info("file scan complete", {
projects: projectFiles.length,
sessions: sessionFiles.length,
messages: messageFiles.length,
parts: partFiles.length,
todos: todoFiles.length,
permissions: permFiles.length,
shares: shareFiles.length,
})
const total = Math.max(
1,
projectFiles.length +
sessionFiles.length +
messageFiles.length +
partFiles.length +
todoFiles.length +
permFiles.length +
shareFiles.length,
)
const progress = options?.progress
let current = 0
const step = (label: string, count: number) => {
current = Math.min(total, current + count)
progress?.({ current, total, label })
}
progress?.({ current, total, label: "starting" })
db.run("BEGIN TRANSACTION")
// Migrate projects first (no FK deps)
// Derive all IDs from file paths, not JSON content
const projectIds = new Set<string>()
const projectValues = [] as any[]
for (let i = 0; i < projectFiles.length; i += batchSize) {
const end = Math.min(i + batchSize, projectFiles.length)
const batch = await read(projectFiles, i, end)
projectValues.length = 0
for (let j = 0; j < batch.length; j++) {
const data = batch[j]
if (!data) continue
const id = path.basename(projectFiles[i + j], ".json")
projectIds.add(id)
projectValues.push({
id,
worktree: data.worktree ?? "/",
vcs: data.vcs,
name: data.name ?? undefined,
icon_url: data.icon?.url,
icon_color: data.icon?.color,
time_created: data.time?.created ?? now,
time_updated: data.time?.updated ?? now,
time_initialized: data.time?.initialized,
sandboxes: data.sandboxes ?? [],
commands: data.commands,
})
}
stats.projects += insert(projectValues, ProjectTable, "project")
step("projects", end - i)
}
log.info("migrated projects", { count: stats.projects, duration: Math.round(performance.now() - start) })
// Migrate sessions (depends on projects)
// Derive all IDs from directory/file paths, not JSON content, since earlier
// migrations may have moved sessions to new directories without updating the JSON
const sessionProjects = sessionFiles.map((file) => path.basename(path.dirname(file)))
const sessionIds = new Set<string>()
const sessionValues = [] as any[]
for (let i = 0; i < sessionFiles.length; i += batchSize) {
const end = Math.min(i + batchSize, sessionFiles.length)
const batch = await read(sessionFiles, i, end)
sessionValues.length = 0
for (let j = 0; j < batch.length; j++) {
const data = batch[j]
if (!data) continue
const id = path.basename(sessionFiles[i + j], ".json")
const projectID = sessionProjects[i + j]
if (!projectIds.has(projectID)) {
orphans.sessions++
continue
}
sessionIds.add(id)
sessionValues.push({
id,
project_id: projectID,
parent_id: data.parentID ?? null,
slug: data.slug ?? "",
directory: data.directory ?? "",
title: data.title ?? "",
version: data.version ?? "",
share_url: data.share?.url ?? null,
summary_additions: data.summary?.additions ?? null,
summary_deletions: data.summary?.deletions ?? null,
summary_files: data.summary?.files ?? null,
summary_diffs: data.summary?.diffs ?? null,
revert: data.revert ?? null,
permission: data.permission ?? null,
time_created: data.time?.created ?? now,
time_updated: data.time?.updated ?? now,
time_compacting: data.time?.compacting ?? null,
time_archived: data.time?.archived ?? null,
})
}
stats.sessions += insert(sessionValues, SessionTable, "session")
step("sessions", end - i)
}
log.info("migrated sessions", { count: stats.sessions })
if (orphans.sessions > 0) {
log.warn("skipped orphaned sessions", { count: orphans.sessions })
}
// Migrate messages using pre-scanned file map
const allMessageFiles = [] as string[]
const allMessageSessions = [] as string[]
const messageSessions = new Map<string, string>()
for (const file of messageFiles) {
const sessionID = path.basename(path.dirname(file))
if (!sessionIds.has(sessionID)) continue
allMessageFiles.push(file)
allMessageSessions.push(sessionID)
}
for (let i = 0; i < allMessageFiles.length; i += batchSize) {
const end = Math.min(i + batchSize, allMessageFiles.length)
const batch = await read(allMessageFiles, i, end)
// oxlint-disable-next-line unicorn/no-new-array -- pre-allocated for index-based batch fill
const values = new Array(batch.length)
let count = 0
for (let j = 0; j < batch.length; j++) {
const data = batch[j]
if (!data) continue
const file = allMessageFiles[i + j]
const id = path.basename(file, ".json")
const sessionID = allMessageSessions[i + j]
messageSessions.set(id, sessionID)
const rest = data
delete rest.id
delete rest.sessionID
values[count++] = {
id,
session_id: sessionID,
time_created: data.time?.created ?? now,
time_updated: data.time?.updated ?? now,
data: rest,
}
}
values.length = count
stats.messages += insert(values, MessageTable, "message")
step("messages", end - i)
}
log.info("migrated messages", { count: stats.messages })
// Migrate parts using pre-scanned file map
for (let i = 0; i < partFiles.length; i += batchSize) {
const end = Math.min(i + batchSize, partFiles.length)
const batch = await read(partFiles, i, end)
// oxlint-disable-next-line unicorn/no-new-array -- pre-allocated for index-based batch fill
const values = new Array(batch.length)
let count = 0
for (let j = 0; j < batch.length; j++) {
const data = batch[j]
if (!data) continue
const file = partFiles[i + j]
const id = path.basename(file, ".json")
const messageID = path.basename(path.dirname(file))
const sessionID = messageSessions.get(messageID)
if (!sessionID) {
errs.push(`part missing message session: ${file}`)
continue
}
if (!sessionIds.has(sessionID)) continue
const rest = data
delete rest.id
delete rest.messageID
delete rest.sessionID
values[count++] = {
id,
message_id: messageID,
session_id: sessionID,
time_created: data.time?.created ?? now,
time_updated: data.time?.updated ?? now,
data: rest,
}
}
values.length = count
stats.parts += insert(values, PartTable, "part")
step("parts", end - i)
}
log.info("migrated parts", { count: stats.parts })
// Migrate todos
const todoSessions = todoFiles.map((file) => path.basename(file, ".json"))
for (let i = 0; i < todoFiles.length; i += batchSize) {
const end = Math.min(i + batchSize, todoFiles.length)
const batch = await read(todoFiles, i, end)
const values = [] as any[]
for (let j = 0; j < batch.length; j++) {
const data = batch[j]
if (!data) continue
const sessionID = todoSessions[i + j]
if (!sessionIds.has(sessionID)) {
orphans.todos++
continue
}
if (!Array.isArray(data)) {
errs.push(`todo not an array: ${todoFiles[i + j]}`)
continue
}
for (let position = 0; position < data.length; position++) {
const todo = data[position]
if (!todo?.content || !todo?.status || !todo?.priority) continue
values.push({
session_id: sessionID,
content: todo.content,
status: todo.status,
priority: todo.priority,
position,
time_created: now,
time_updated: now,
})
}
}
stats.todos += insert(values, TodoTable, "todo")
step("todos", end - i)
}
log.info("migrated todos", { count: stats.todos })
if (orphans.todos > 0) {
log.warn("skipped orphaned todos", { count: orphans.todos })
}
// Migrate permissions
const permProjects = permFiles.map((file) => path.basename(file, ".json"))
const permValues = [] as any[]
for (let i = 0; i < permFiles.length; i += batchSize) {
const end = Math.min(i + batchSize, permFiles.length)
const batch = await read(permFiles, i, end)
permValues.length = 0
for (let j = 0; j < batch.length; j++) {
const data = batch[j]
if (!data) continue
const projectID = permProjects[i + j]
if (!projectIds.has(projectID)) {
orphans.permissions++
continue
}
permValues.push({ project_id: projectID, data })
}
stats.permissions += insert(permValues, PermissionTable, "permission")
step("permissions", end - i)
}
log.info("migrated permissions", { count: stats.permissions })
if (orphans.permissions > 0) {
log.warn("skipped orphaned permissions", { count: orphans.permissions })
}
// Migrate session shares
const shareSessions = shareFiles.map((file) => path.basename(file, ".json"))
const shareValues = [] as any[]
for (let i = 0; i < shareFiles.length; i += batchSize) {
const end = Math.min(i + batchSize, shareFiles.length)
const batch = await read(shareFiles, i, end)
shareValues.length = 0
for (let j = 0; j < batch.length; j++) {
const data = batch[j]
if (!data) continue
const sessionID = shareSessions[i + j]
if (!sessionIds.has(sessionID)) {
orphans.shares++
continue
}
if (!data?.id || !data?.secret || !data?.url) {
errs.push(`session_share missing id/secret/url: ${shareFiles[i + j]}`)
continue
}
shareValues.push({ session_id: sessionID, id: data.id, secret: data.secret, url: data.url })
}
stats.shares += insert(shareValues, SessionShareTable, "session_share")
step("shares", end - i)
}
log.info("migrated session shares", { count: stats.shares })
if (orphans.shares > 0) {
log.warn("skipped orphaned session shares", { count: orphans.shares })
}
db.run("COMMIT")
log.info("json migration complete", {
projects: stats.projects,
sessions: stats.sessions,
messages: stats.messages,
parts: stats.parts,
todos: stats.todos,
permissions: stats.permissions,
shares: stats.shares,
errorCount: stats.errors.length,
duration: Math.round(performance.now() - start),
})
if (stats.errors.length > 0) {
log.warn("migration errors", { errors: stats.errors.slice(0, 20) })
}
progress?.({ current: total, total, label: "complete" })
return stats
}
log.info("starting json to sqlite migration", { storageDir })
const start = performance.now()
// const db = drizzle({ client: sqlite })
// Optimize SQLite for bulk inserts
db.run("PRAGMA journal_mode = WAL")
db.run("PRAGMA synchronous = OFF")
db.run("PRAGMA cache_size = 10000")
db.run("PRAGMA temp_store = MEMORY")
const stats = {
projects: 0,
sessions: 0,
messages: 0,
parts: 0,
todos: 0,
permissions: 0,
shares: 0,
errors: [] as string[],
}
const orphans = {
sessions: 0,
todos: 0,
permissions: 0,
shares: 0,
}
const errs = stats.errors
const batchSize = 1000
const now = Date.now()
async function list(pattern: string) {
return Glob.scan(pattern, { cwd: storageDir, absolute: true })
}
async function read(files: string[], start: number, end: number) {
const count = end - start
// oxlint-disable-next-line unicorn/no-new-array -- pre-allocated for index-based batch fill
const tasks = new Array(count)
for (let i = 0; i < count; i++) {
tasks[i] = Filesystem.readJson(files[start + i])
}
const results = await Promise.allSettled(tasks)
// oxlint-disable-next-line unicorn/no-new-array -- pre-allocated for index-based batch fill
const items = new Array(count)
for (let i = 0; i < results.length; i++) {
const result = results[i]
if (result.status === "fulfilled") {
items[i] = result.value
continue
}
errs.push(`failed to read ${files[start + i]}: ${result.reason}`)
}
return items
}
function insert(values: any[], table: any, label: string) {
if (values.length === 0) return 0
try {
db.insert(table).values(values).onConflictDoNothing().run()
return values.length
} catch (e) {
errs.push(`failed to migrate ${label} batch: ${e}`)
return 0
}
}
// Pre-scan all files upfront to avoid repeated glob operations
log.info("scanning files...")
const [projectFiles, sessionFiles, messageFiles, partFiles, todoFiles, permFiles, shareFiles] = await Promise.all([
list("project/*.json"),
list("session/*/*.json"),
list("message/*/*.json"),
list("part/*/*.json"),
list("todo/*.json"),
list("permission/*.json"),
list("session_share/*.json"),
])
log.info("file scan complete", {
projects: projectFiles.length,
sessions: sessionFiles.length,
messages: messageFiles.length,
parts: partFiles.length,
todos: todoFiles.length,
permissions: permFiles.length,
shares: shareFiles.length,
})
const total = Math.max(
1,
projectFiles.length +
sessionFiles.length +
messageFiles.length +
partFiles.length +
todoFiles.length +
permFiles.length +
shareFiles.length,
)
const progress = options?.progress
let current = 0
const step = (label: string, count: number) => {
current = Math.min(total, current + count)
progress?.({ current, total, label })
}
progress?.({ current, total, label: "starting" })
db.run("BEGIN TRANSACTION")
// Migrate projects first (no FK deps)
// Derive all IDs from file paths, not JSON content
const projectIds = new Set<string>()
const projectValues = [] as any[]
for (let i = 0; i < projectFiles.length; i += batchSize) {
const end = Math.min(i + batchSize, projectFiles.length)
const batch = await read(projectFiles, i, end)
projectValues.length = 0
for (let j = 0; j < batch.length; j++) {
const data = batch[j]
if (!data) continue
const id = path.basename(projectFiles[i + j], ".json")
projectIds.add(id)
projectValues.push({
id,
worktree: data.worktree ?? "/",
vcs: data.vcs,
name: data.name ?? undefined,
icon_url: data.icon?.url,
icon_color: data.icon?.color,
time_created: data.time?.created ?? now,
time_updated: data.time?.updated ?? now,
time_initialized: data.time?.initialized,
sandboxes: data.sandboxes ?? [],
commands: data.commands,
})
}
stats.projects += insert(projectValues, ProjectTable, "project")
step("projects", end - i)
}
log.info("migrated projects", { count: stats.projects, duration: Math.round(performance.now() - start) })
// Migrate sessions (depends on projects)
// Derive all IDs from directory/file paths, not JSON content, since earlier
// migrations may have moved sessions to new directories without updating the JSON
const sessionProjects = sessionFiles.map((file) => path.basename(path.dirname(file)))
const sessionIds = new Set<string>()
const sessionValues = [] as any[]
for (let i = 0; i < sessionFiles.length; i += batchSize) {
const end = Math.min(i + batchSize, sessionFiles.length)
const batch = await read(sessionFiles, i, end)
sessionValues.length = 0
for (let j = 0; j < batch.length; j++) {
const data = batch[j]
if (!data) continue
const id = path.basename(sessionFiles[i + j], ".json")
const projectID = sessionProjects[i + j]
if (!projectIds.has(projectID)) {
orphans.sessions++
continue
}
sessionIds.add(id)
sessionValues.push({
id,
project_id: projectID,
parent_id: data.parentID ?? null,
slug: data.slug ?? "",
directory: data.directory ?? "",
title: data.title ?? "",
version: data.version ?? "",
share_url: data.share?.url ?? null,
summary_additions: data.summary?.additions ?? null,
summary_deletions: data.summary?.deletions ?? null,
summary_files: data.summary?.files ?? null,
summary_diffs: data.summary?.diffs ?? null,
revert: data.revert ?? null,
permission: data.permission ?? null,
time_created: data.time?.created ?? now,
time_updated: data.time?.updated ?? now,
time_compacting: data.time?.compacting ?? null,
time_archived: data.time?.archived ?? null,
})
}
stats.sessions += insert(sessionValues, SessionTable, "session")
step("sessions", end - i)
}
log.info("migrated sessions", { count: stats.sessions })
if (orphans.sessions > 0) {
log.warn("skipped orphaned sessions", { count: orphans.sessions })
}
// Migrate messages using pre-scanned file map
const allMessageFiles = [] as string[]
const allMessageSessions = [] as string[]
const messageSessions = new Map<string, string>()
for (const file of messageFiles) {
const sessionID = path.basename(path.dirname(file))
if (!sessionIds.has(sessionID)) continue
allMessageFiles.push(file)
allMessageSessions.push(sessionID)
}
for (let i = 0; i < allMessageFiles.length; i += batchSize) {
const end = Math.min(i + batchSize, allMessageFiles.length)
const batch = await read(allMessageFiles, i, end)
// oxlint-disable-next-line unicorn/no-new-array -- pre-allocated for index-based batch fill
const values = new Array(batch.length)
let count = 0
for (let j = 0; j < batch.length; j++) {
const data = batch[j]
if (!data) continue
const file = allMessageFiles[i + j]
const id = path.basename(file, ".json")
const sessionID = allMessageSessions[i + j]
messageSessions.set(id, sessionID)
const rest = data
delete rest.id
delete rest.sessionID
values[count++] = {
id,
session_id: sessionID,
time_created: data.time?.created ?? now,
time_updated: data.time?.updated ?? now,
data: rest,
}
}
values.length = count
stats.messages += insert(values, MessageTable, "message")
step("messages", end - i)
}
log.info("migrated messages", { count: stats.messages })
// Migrate parts using pre-scanned file map
for (let i = 0; i < partFiles.length; i += batchSize) {
const end = Math.min(i + batchSize, partFiles.length)
const batch = await read(partFiles, i, end)
// oxlint-disable-next-line unicorn/no-new-array -- pre-allocated for index-based batch fill
const values = new Array(batch.length)
let count = 0
for (let j = 0; j < batch.length; j++) {
const data = batch[j]
if (!data) continue
const file = partFiles[i + j]
const id = path.basename(file, ".json")
const messageID = path.basename(path.dirname(file))
const sessionID = messageSessions.get(messageID)
if (!sessionID) {
errs.push(`part missing message session: ${file}`)
continue
}
if (!sessionIds.has(sessionID)) continue
const rest = data
delete rest.id
delete rest.messageID
delete rest.sessionID
values[count++] = {
id,
message_id: messageID,
session_id: sessionID,
time_created: data.time?.created ?? now,
time_updated: data.time?.updated ?? now,
data: rest,
}
}
values.length = count
stats.parts += insert(values, PartTable, "part")
step("parts", end - i)
}
log.info("migrated parts", { count: stats.parts })
// Migrate todos
const todoSessions = todoFiles.map((file) => path.basename(file, ".json"))
for (let i = 0; i < todoFiles.length; i += batchSize) {
const end = Math.min(i + batchSize, todoFiles.length)
const batch = await read(todoFiles, i, end)
const values = [] as any[]
for (let j = 0; j < batch.length; j++) {
const data = batch[j]
if (!data) continue
const sessionID = todoSessions[i + j]
if (!sessionIds.has(sessionID)) {
orphans.todos++
continue
}
if (!Array.isArray(data)) {
errs.push(`todo not an array: ${todoFiles[i + j]}`)
continue
}
for (let position = 0; position < data.length; position++) {
const todo = data[position]
if (!todo?.content || !todo?.status || !todo?.priority) continue
values.push({
session_id: sessionID,
content: todo.content,
status: todo.status,
priority: todo.priority,
position,
time_created: now,
time_updated: now,
})
}
}
stats.todos += insert(values, TodoTable, "todo")
step("todos", end - i)
}
log.info("migrated todos", { count: stats.todos })
if (orphans.todos > 0) {
log.warn("skipped orphaned todos", { count: orphans.todos })
}
// Migrate permissions
const permProjects = permFiles.map((file) => path.basename(file, ".json"))
const permValues = [] as any[]
for (let i = 0; i < permFiles.length; i += batchSize) {
const end = Math.min(i + batchSize, permFiles.length)
const batch = await read(permFiles, i, end)
permValues.length = 0
for (let j = 0; j < batch.length; j++) {
const data = batch[j]
if (!data) continue
const projectID = permProjects[i + j]
if (!projectIds.has(projectID)) {
orphans.permissions++
continue
}
permValues.push({ project_id: projectID, data })
}
stats.permissions += insert(permValues, PermissionTable, "permission")
step("permissions", end - i)
}
log.info("migrated permissions", { count: stats.permissions })
if (orphans.permissions > 0) {
log.warn("skipped orphaned permissions", { count: orphans.permissions })
}
// Migrate session shares
const shareSessions = shareFiles.map((file) => path.basename(file, ".json"))
const shareValues = [] as any[]
for (let i = 0; i < shareFiles.length; i += batchSize) {
const end = Math.min(i + batchSize, shareFiles.length)
const batch = await read(shareFiles, i, end)
shareValues.length = 0
for (let j = 0; j < batch.length; j++) {
const data = batch[j]
if (!data) continue
const sessionID = shareSessions[i + j]
if (!sessionIds.has(sessionID)) {
orphans.shares++
continue
}
if (!data?.id || !data?.secret || !data?.url) {
errs.push(`session_share missing id/secret/url: ${shareFiles[i + j]}`)
continue
}
shareValues.push({ session_id: sessionID, id: data.id, secret: data.secret, url: data.url })
}
stats.shares += insert(shareValues, SessionShareTable, "session_share")
step("shares", end - i)
}
log.info("migrated session shares", { count: stats.shares })
if (orphans.shares > 0) {
log.warn("skipped orphaned session shares", { count: orphans.shares })
}
db.run("COMMIT")
log.info("json migration complete", {
projects: stats.projects,
sessions: stats.sessions,
messages: stats.messages,
parts: stats.parts,
todos: stats.todos,
permissions: stats.permissions,
shares: stats.shares,
errorCount: stats.errors.length,
duration: Math.round(performance.now() - start),
})
if (stats.errors.length > 0) {
log.warn("migration errors", { errors: stats.errors.slice(0, 20) })
}
progress?.({ current: total, total, label: "complete" })
return stats
}

View File

@@ -7,327 +7,325 @@ import { AppFileSystem } from "@opencode-ai/shared/filesystem"
import { Effect, Exit, Layer, Option, RcMap, Schema, Context, TxReentrantLock } from "effect"
import { Git } from "@/git"
export namespace Storage {
const log = Log.create({ service: "storage" })
const log = Log.create({ service: "storage" })
type Migration = (
dir: string,
fs: AppFileSystem.Interface,
git: Git.Interface,
) => Effect.Effect<void, AppFileSystem.Error>
type Migration = (
dir: string,
fs: AppFileSystem.Interface,
git: Git.Interface,
) => Effect.Effect<void, AppFileSystem.Error>
export const NotFoundError = NamedError.create(
"NotFoundError",
z.object({
message: z.string(),
export const NotFoundError = NamedError.create(
"NotFoundError",
z.object({
message: z.string(),
}),
)
export type Error = AppFileSystem.Error | InstanceType<typeof NotFoundError>
const RootFile = Schema.Struct({
path: Schema.optional(
Schema.Struct({
root: Schema.optional(Schema.String),
}),
)
),
})
export type Error = AppFileSystem.Error | InstanceType<typeof NotFoundError>
const SessionFile = Schema.Struct({
id: Schema.String,
})
const RootFile = Schema.Struct({
path: Schema.optional(
Schema.Struct({
root: Schema.optional(Schema.String),
}),
),
})
const MessageFile = Schema.Struct({
id: Schema.String,
})
const SessionFile = Schema.Struct({
id: Schema.String,
})
const DiffFile = Schema.Struct({
additions: Schema.Number,
deletions: Schema.Number,
})
const MessageFile = Schema.Struct({
id: Schema.String,
})
const SummaryFile = Schema.Struct({
id: Schema.String,
projectID: Schema.String,
summary: Schema.Struct({ diffs: Schema.Array(DiffFile) }),
})
const DiffFile = Schema.Struct({
additions: Schema.Number,
deletions: Schema.Number,
})
const decodeRoot = Schema.decodeUnknownOption(RootFile)
const decodeSession = Schema.decodeUnknownOption(SessionFile)
const decodeMessage = Schema.decodeUnknownOption(MessageFile)
const decodeSummary = Schema.decodeUnknownOption(SummaryFile)
const SummaryFile = Schema.Struct({
id: Schema.String,
projectID: Schema.String,
summary: Schema.Struct({ diffs: Schema.Array(DiffFile) }),
})
export interface Interface {
readonly remove: (key: string[]) => Effect.Effect<void, AppFileSystem.Error>
readonly read: <T>(key: string[]) => Effect.Effect<T, Error>
readonly update: <T>(key: string[], fn: (draft: T) => void) => Effect.Effect<T, Error>
readonly write: <T>(key: string[], content: T) => Effect.Effect<void, AppFileSystem.Error>
readonly list: (prefix: string[]) => Effect.Effect<string[][], AppFileSystem.Error>
}
const decodeRoot = Schema.decodeUnknownOption(RootFile)
const decodeSession = Schema.decodeUnknownOption(SessionFile)
const decodeMessage = Schema.decodeUnknownOption(MessageFile)
const decodeSummary = Schema.decodeUnknownOption(SummaryFile)
export class Service extends Context.Service<Service, Interface>()("@opencode/Storage") {}
export interface Interface {
readonly remove: (key: string[]) => Effect.Effect<void, AppFileSystem.Error>
readonly read: <T>(key: string[]) => Effect.Effect<T, Error>
readonly update: <T>(key: string[], fn: (draft: T) => void) => Effect.Effect<T, Error>
readonly write: <T>(key: string[], content: T) => Effect.Effect<void, AppFileSystem.Error>
readonly list: (prefix: string[]) => Effect.Effect<string[][], AppFileSystem.Error>
function file(dir: string, key: string[]) {
return path.join(dir, ...key) + ".json"
}
function missing(err: unknown) {
if (!err || typeof err !== "object") return false
if ("code" in err && err.code === "ENOENT") return true
if ("reason" in err && err.reason && typeof err.reason === "object" && "_tag" in err.reason) {
return err.reason._tag === "NotFound"
}
return false
}
export class Service extends Context.Service<Service, Interface>()("@opencode/Storage") {}
function parseMigration(text: string) {
const value = Number.parseInt(text, 10)
return Number.isNaN(value) ? 0 : value
}
function file(dir: string, key: string[]) {
return path.join(dir, ...key) + ".json"
}
const MIGRATIONS: Migration[] = [
Effect.fn("Storage.migration.1")(function* (dir: string, fs: AppFileSystem.Interface, git: Git.Interface) {
const project = path.resolve(dir, "../project")
if (!(yield* fs.isDir(project))) return
const projectDirs = yield* fs.glob("*", {
cwd: project,
include: "all",
})
for (const projectDir of projectDirs) {
const full = path.join(project, projectDir)
if (!(yield* fs.isDir(full))) continue
log.info(`migrating project ${projectDir}`)
let projectID = projectDir
let worktree = "/"
function missing(err: unknown) {
if (!err || typeof err !== "object") return false
if ("code" in err && err.code === "ENOENT") return true
if ("reason" in err && err.reason && typeof err.reason === "object" && "_tag" in err.reason) {
return err.reason._tag === "NotFound"
}
return false
}
function parseMigration(text: string) {
const value = Number.parseInt(text, 10)
return Number.isNaN(value) ? 0 : value
}
const MIGRATIONS: Migration[] = [
Effect.fn("Storage.migration.1")(function* (dir: string, fs: AppFileSystem.Interface, git: Git.Interface) {
const project = path.resolve(dir, "../project")
if (!(yield* fs.isDir(project))) return
const projectDirs = yield* fs.glob("*", {
cwd: project,
include: "all",
})
for (const projectDir of projectDirs) {
const full = path.join(project, projectDir)
if (!(yield* fs.isDir(full))) continue
log.info(`migrating project ${projectDir}`)
let projectID = projectDir
let worktree = "/"
if (projectID !== "global") {
for (const msgFile of yield* fs.glob("storage/session/message/*/*.json", {
cwd: full,
absolute: true,
})) {
const json = decodeRoot(yield* fs.readJson(msgFile), { onExcessProperty: "preserve" })
const root = Option.isSome(json) ? json.value.path?.root : undefined
if (!root) continue
worktree = root
break
}
if (!worktree) continue
if (!(yield* fs.isDir(worktree))) continue
const result = yield* git.run(["rev-list", "--max-parents=0", "--all"], {
cwd: worktree,
})
const [id] = result
.text()
.split("\n")
.filter(Boolean)
.map((x) => x.trim())
.toSorted()
if (!id) continue
projectID = id
yield* fs.writeWithDirs(
path.join(dir, "project", projectID + ".json"),
JSON.stringify(
{
id,
vcs: "git",
worktree,
time: {
created: Date.now(),
initialized: Date.now(),
},
},
null,
2,
),
)
log.info(`migrating sessions for project ${projectID}`)
for (const sessionFile of yield* fs.glob("storage/session/info/*.json", {
cwd: full,
absolute: true,
})) {
const dest = path.join(dir, "session", projectID, path.basename(sessionFile))
log.info("copying", { sessionFile, dest })
const session = yield* fs.readJson(sessionFile)
const info = decodeSession(session, { onExcessProperty: "preserve" })
yield* fs.writeWithDirs(dest, JSON.stringify(session, null, 2))
if (Option.isNone(info)) continue
log.info(`migrating messages for session ${info.value.id}`)
for (const msgFile of yield* fs.glob(`storage/session/message/${info.value.id}/*.json`, {
cwd: full,
absolute: true,
})) {
const next = path.join(dir, "message", info.value.id, path.basename(msgFile))
log.info("copying", {
msgFile,
dest: next,
})
const message = yield* fs.readJson(msgFile)
const item = decodeMessage(message, { onExcessProperty: "preserve" })
yield* fs.writeWithDirs(next, JSON.stringify(message, null, 2))
if (Option.isNone(item)) continue
log.info(`migrating parts for message ${item.value.id}`)
for (const partFile of yield* fs.glob(`storage/session/part/${info.value.id}/${item.value.id}/*.json`, {
cwd: full,
absolute: true,
})) {
const out = path.join(dir, "part", item.value.id, path.basename(partFile))
const part = yield* fs.readJson(partFile)
log.info("copying", {
partFile,
dest: out,
})
yield* fs.writeWithDirs(out, JSON.stringify(part, null, 2))
}
}
}
if (projectID !== "global") {
for (const msgFile of yield* fs.glob("storage/session/message/*/*.json", {
cwd: full,
absolute: true,
})) {
const json = decodeRoot(yield* fs.readJson(msgFile), { onExcessProperty: "preserve" })
const root = Option.isSome(json) ? json.value.path?.root : undefined
if (!root) continue
worktree = root
break
}
}
}),
Effect.fn("Storage.migration.2")(function* (dir: string, fs: AppFileSystem.Interface) {
for (const item of yield* fs.glob("session/*/*.json", {
cwd: dir,
absolute: true,
})) {
const raw = yield* fs.readJson(item)
const session = decodeSummary(raw, { onExcessProperty: "preserve" })
if (Option.isNone(session)) continue
const diffs = session.value.summary.diffs
if (!worktree) continue
if (!(yield* fs.isDir(worktree))) continue
const result = yield* git.run(["rev-list", "--max-parents=0", "--all"], {
cwd: worktree,
})
const [id] = result
.text()
.split("\n")
.filter(Boolean)
.map((x) => x.trim())
.toSorted()
if (!id) continue
projectID = id
yield* fs.writeWithDirs(
path.join(dir, "session_diff", session.value.id + ".json"),
JSON.stringify(diffs, null, 2),
)
yield* fs.writeWithDirs(
path.join(dir, "session", session.value.projectID, session.value.id + ".json"),
path.join(dir, "project", projectID + ".json"),
JSON.stringify(
{
...(raw as Record<string, unknown>),
summary: {
additions: diffs.reduce((sum, x) => sum + x.additions, 0),
deletions: diffs.reduce((sum, x) => sum + x.deletions, 0),
id,
vcs: "git",
worktree,
time: {
created: Date.now(),
initialized: Date.now(),
},
},
null,
2,
),
)
}
}),
]
export const layer = Layer.effect(
Service,
Effect.gen(function* () {
const fs = yield* AppFileSystem.Service
const git = yield* Git.Service
const locks = yield* RcMap.make({
lookup: () => TxReentrantLock.make(),
idleTimeToLive: 0,
})
const state = yield* Effect.cached(
Effect.gen(function* () {
const dir = path.join(Global.Path.data, "storage")
const marker = path.join(dir, "migration")
const migration = yield* fs.readFileString(marker).pipe(
Effect.map(parseMigration),
Effect.catchIf(missing, () => Effect.succeed(0)),
Effect.orElseSucceed(() => 0),
)
for (let i = migration; i < MIGRATIONS.length; i++) {
log.info("running migration", { index: i })
const step = MIGRATIONS[i]!
const exit = yield* Effect.exit(step(dir, fs, git))
if (Exit.isFailure(exit)) {
log.error("failed to run migration", { index: i, cause: exit.cause })
break
log.info(`migrating sessions for project ${projectID}`)
for (const sessionFile of yield* fs.glob("storage/session/info/*.json", {
cwd: full,
absolute: true,
})) {
const dest = path.join(dir, "session", projectID, path.basename(sessionFile))
log.info("copying", { sessionFile, dest })
const session = yield* fs.readJson(sessionFile)
const info = decodeSession(session, { onExcessProperty: "preserve" })
yield* fs.writeWithDirs(dest, JSON.stringify(session, null, 2))
if (Option.isNone(info)) continue
log.info(`migrating messages for session ${info.value.id}`)
for (const msgFile of yield* fs.glob(`storage/session/message/${info.value.id}/*.json`, {
cwd: full,
absolute: true,
})) {
const next = path.join(dir, "message", info.value.id, path.basename(msgFile))
log.info("copying", {
msgFile,
dest: next,
})
const message = yield* fs.readJson(msgFile)
const item = decodeMessage(message, { onExcessProperty: "preserve" })
yield* fs.writeWithDirs(next, JSON.stringify(message, null, 2))
if (Option.isNone(item)) continue
log.info(`migrating parts for message ${item.value.id}`)
for (const partFile of yield* fs.glob(`storage/session/part/${info.value.id}/${item.value.id}/*.json`, {
cwd: full,
absolute: true,
})) {
const out = path.join(dir, "part", item.value.id, path.basename(partFile))
const part = yield* fs.readJson(partFile)
log.info("copying", {
partFile,
dest: out,
})
yield* fs.writeWithDirs(out, JSON.stringify(part, null, 2))
}
yield* fs.writeWithDirs(marker, String(i + 1))
}
return { dir }
}
}
}
}),
Effect.fn("Storage.migration.2")(function* (dir: string, fs: AppFileSystem.Interface) {
for (const item of yield* fs.glob("session/*/*.json", {
cwd: dir,
absolute: true,
})) {
const raw = yield* fs.readJson(item)
const session = decodeSummary(raw, { onExcessProperty: "preserve" })
if (Option.isNone(session)) continue
const diffs = session.value.summary.diffs
yield* fs.writeWithDirs(
path.join(dir, "session_diff", session.value.id + ".json"),
JSON.stringify(diffs, null, 2),
)
yield* fs.writeWithDirs(
path.join(dir, "session", session.value.projectID, session.value.id + ".json"),
JSON.stringify(
{
...(raw as Record<string, unknown>),
summary: {
additions: diffs.reduce((sum, x) => sum + x.additions, 0),
deletions: diffs.reduce((sum, x) => sum + x.deletions, 0),
},
},
null,
2,
),
)
}
}),
]
export const layer = Layer.effect(
Service,
Effect.gen(function* () {
const fs = yield* AppFileSystem.Service
const git = yield* Git.Service
const locks = yield* RcMap.make({
lookup: () => TxReentrantLock.make(),
idleTimeToLive: 0,
})
const state = yield* Effect.cached(
Effect.gen(function* () {
const dir = path.join(Global.Path.data, "storage")
const marker = path.join(dir, "migration")
const migration = yield* fs.readFileString(marker).pipe(
Effect.map(parseMigration),
Effect.catchIf(missing, () => Effect.succeed(0)),
Effect.orElseSucceed(() => 0),
)
for (let i = migration; i < MIGRATIONS.length; i++) {
log.info("running migration", { index: i })
const step = MIGRATIONS[i]!
const exit = yield* Effect.exit(step(dir, fs, git))
if (Exit.isFailure(exit)) {
log.error("failed to run migration", { index: i, cause: exit.cause })
break
}
yield* fs.writeWithDirs(marker, String(i + 1))
}
return { dir }
}),
)
const fail = (target: string): Effect.Effect<never, InstanceType<typeof NotFoundError>> =>
Effect.fail(new NotFoundError({ message: `Resource not found: ${target}` }))
const wrap = <A>(target: string, body: Effect.Effect<A, AppFileSystem.Error>) =>
body.pipe(Effect.catchIf(missing, () => fail(target)))
const writeJson = Effect.fnUntraced(function* (target: string, content: unknown) {
yield* fs.writeWithDirs(target, JSON.stringify(content, null, 2))
})
const withResolved = <A, E>(
key: string[],
fn: (target: string, rw: TxReentrantLock.TxReentrantLock) => Effect.Effect<A, E>,
): Effect.Effect<A, E | AppFileSystem.Error> =>
Effect.scoped(
Effect.gen(function* () {
const target = file((yield* state).dir, key)
return yield* fn(target, yield* RcMap.get(locks, target))
}),
)
const fail = (target: string): Effect.Effect<never, InstanceType<typeof NotFoundError>> =>
Effect.fail(new NotFoundError({ message: `Resource not found: ${target}` }))
const remove: Interface["remove"] = Effect.fn("Storage.remove")(function* (key: string[]) {
yield* withResolved(key, (target, rw) =>
TxReentrantLock.withWriteLock(rw, fs.remove(target).pipe(Effect.catchIf(missing, () => Effect.void))),
)
})
const wrap = <A>(target: string, body: Effect.Effect<A, AppFileSystem.Error>) =>
body.pipe(Effect.catchIf(missing, () => fail(target)))
const writeJson = Effect.fnUntraced(function* (target: string, content: unknown) {
yield* fs.writeWithDirs(target, JSON.stringify(content, null, 2))
})
const withResolved = <A, E>(
key: string[],
fn: (target: string, rw: TxReentrantLock.TxReentrantLock) => Effect.Effect<A, E>,
): Effect.Effect<A, E | AppFileSystem.Error> =>
Effect.scoped(
Effect.gen(function* () {
const target = file((yield* state).dir, key)
return yield* fn(target, yield* RcMap.get(locks, target))
}),
const read: Interface["read"] = <T>(key: string[]) =>
Effect.gen(function* () {
const value = yield* withResolved(key, (target, rw) =>
TxReentrantLock.withReadLock(rw, wrap(target, fs.readJson(target))),
)
return value as T
})
const remove: Interface["remove"] = Effect.fn("Storage.remove")(function* (key: string[]) {
yield* withResolved(key, (target, rw) =>
TxReentrantLock.withWriteLock(rw, fs.remove(target).pipe(Effect.catchIf(missing, () => Effect.void))),
const update: Interface["update"] = <T>(key: string[], fn: (draft: T) => void) =>
Effect.gen(function* () {
const value = yield* withResolved(key, (target, rw) =>
TxReentrantLock.withWriteLock(
rw,
Effect.gen(function* () {
const content = yield* wrap(target, fs.readJson(target))
fn(content as T)
yield* writeJson(target, content)
return content
}),
),
)
return value as T
})
const read: Interface["read"] = <T>(key: string[]) =>
Effect.gen(function* () {
const value = yield* withResolved(key, (target, rw) =>
TxReentrantLock.withReadLock(rw, wrap(target, fs.readJson(target))),
)
return value as T
})
const update: Interface["update"] = <T>(key: string[], fn: (draft: T) => void) =>
Effect.gen(function* () {
const value = yield* withResolved(key, (target, rw) =>
TxReentrantLock.withWriteLock(
rw,
Effect.gen(function* () {
const content = yield* wrap(target, fs.readJson(target))
fn(content as T)
yield* writeJson(target, content)
return content
}),
),
)
return value as T
})
const write: Interface["write"] = (key: string[], content: unknown) =>
Effect.gen(function* () {
yield* withResolved(key, (target, rw) => TxReentrantLock.withWriteLock(rw, writeJson(target, content)))
})
const list: Interface["list"] = Effect.fn("Storage.list")(function* (prefix: string[]) {
const dir = (yield* state).dir
const cwd = path.join(dir, ...prefix)
const result = yield* fs
.glob("**/*", {
cwd,
include: "file",
})
.pipe(Effect.catch(() => Effect.succeed<string[]>([])))
return result
.map((x) => [...prefix, ...x.slice(0, -5).split(path.sep)])
.toSorted((a, b) => a.join("/").localeCompare(b.join("/")))
const write: Interface["write"] = (key: string[], content: unknown) =>
Effect.gen(function* () {
yield* withResolved(key, (target, rw) => TxReentrantLock.withWriteLock(rw, writeJson(target, content)))
})
return Service.of({
remove,
read,
update,
write,
list,
})
}),
)
const list: Interface["list"] = Effect.fn("Storage.list")(function* (prefix: string[]) {
const dir = (yield* state).dir
const cwd = path.join(dir, ...prefix)
const result = yield* fs
.glob("**/*", {
cwd,
include: "file",
})
.pipe(Effect.catch(() => Effect.succeed<string[]>([])))
return result
.map((x) => [...prefix, ...x.slice(0, -5).split(path.sep)])
.toSorted((a, b) => a.join("/").localeCompare(b.join("/")))
})
export const defaultLayer = layer.pipe(Layer.provide(AppFileSystem.defaultLayer), Layer.provide(Git.defaultLayer))
}
return Service.of({
remove,
read,
update,
write,
list,
})
}),
)
export const defaultLayer = layer.pipe(Layer.provide(AppFileSystem.defaultLayer), Layer.provide(Git.defaultLayer))

View File

@@ -1,6 +1,6 @@
import z from "zod"
import type { ZodObject } from "zod"
import { Database, eq } from "@/storage/db"
import { Database, eq } from "@/storage"
import { GlobalBus } from "@/bus/global"
import { Bus as ProjectBus } from "@/bus"
import { BusEvent } from "@/bus/bus-event"

View File

@@ -3,7 +3,7 @@ import * as path from "path"
import { Effect } from "effect"
import { Tool } from "./tool"
import { Bus } from "../bus"
import { FileWatcher } from "../file/watcher"
import { FileWatcher } from "../file"
import { Instance } from "../project/instance"
import { Patch } from "../patch"
import { createTwoFilesPatch, diffLines } from "diff"

View File

@@ -11,10 +11,10 @@ import { LSP } from "../lsp"
import { createTwoFilesPatch, diffLines } from "diff"
import DESCRIPTION from "./edit.txt"
import { File } from "../file"
import { FileWatcher } from "../file/watcher"
import { FileWatcher } from "../file"
import { Bus } from "../bus"
import { Format } from "../format"
import { FileTime } from "../file/time"
import { FileTime } from "../file"
import { Instance } from "../project/instance"
import { Snapshot } from "@/snapshot"
import { assertExternalDirectoryEffect } from "./external-directory"

View File

@@ -1,6 +1,6 @@
import path from "path"
import { Effect } from "effect"
import { EffectLogger } from "@/effect/logger"
import { EffectLogger } from "@/effect"
import { InstanceState } from "@/effect"
import type { Tool } from "./tool"
import { Instance } from "../project/instance"

View File

@@ -4,7 +4,7 @@ import { Effect, Option } from "effect"
import * as Stream from "effect/Stream"
import { InstanceState } from "@/effect"
import { AppFileSystem } from "@opencode-ai/shared/filesystem"
import { Ripgrep } from "../file/ripgrep"
import { Ripgrep } from "../file"
import { assertExternalDirectoryEffect } from "./external-directory"
import DESCRIPTION from "./glob.txt"
import { Tool } from "./tool"

View File

@@ -3,7 +3,7 @@ import z from "zod"
import { Effect, Option } from "effect"
import { InstanceState } from "@/effect"
import { AppFileSystem } from "@opencode-ai/shared/filesystem"
import { Ripgrep } from "../file/ripgrep"
import { Ripgrep } from "../file"
import { assertExternalDirectoryEffect } from "./external-directory"
import DESCRIPTION from "./grep.txt"
import { Tool } from "./tool"

View File

@@ -7,7 +7,7 @@ import { createInterface } from "readline"
import { Tool } from "./tool"
import { AppFileSystem } from "@opencode-ai/shared/filesystem"
import { LSP } from "../lsp"
import { FileTime } from "../file/time"
import { FileTime } from "../file"
import DESCRIPTION from "./read.txt"
import { Instance } from "../project/instance"
import { assertExternalDirectoryEffect } from "./external-directory"

View File

@@ -33,13 +33,13 @@ import { Effect, Layer, Context } from "effect"
import { FetchHttpClient, HttpClient } from "effect/unstable/http"
import { ChildProcessSpawner } from "effect/unstable/process/ChildProcessSpawner"
import * as CrossSpawnSpawner from "@/effect/cross-spawn-spawner"
import { Ripgrep } from "../file/ripgrep"
import { Ripgrep } from "../file"
import { Format } from "../format"
import { InstanceState } from "@/effect"
import { Question } from "../question"
import { Todo } from "../session/todo"
import { LSP } from "../lsp"
import { FileTime } from "../file/time"
import { FileTime } from "../file"
import { Instruction } from "../session/instruction"
import { AppFileSystem } from "@opencode-ai/shared/filesystem"
import { Bus } from "../bus"

View File

@@ -3,8 +3,8 @@ import { pathToFileURL } from "url"
import z from "zod"
import { Effect } from "effect"
import * as Stream from "effect/Stream"
import { EffectLogger } from "@/effect/logger"
import { Ripgrep } from "../file/ripgrep"
import { EffectLogger } from "@/effect"
import { Ripgrep } from "../file"
import { Skill } from "../skill"
import { Tool } from "./tool"

View File

@@ -7,9 +7,9 @@ import { createTwoFilesPatch } from "diff"
import DESCRIPTION from "./write.txt"
import { Bus } from "../bus"
import { File } from "../file"
import { FileWatcher } from "../file/watcher"
import { FileWatcher } from "../file"
import { Format } from "../format"
import { FileTime } from "../file/time"
import { FileTime } from "../file"
import { AppFileSystem } from "@opencode-ai/shared/filesystem"
import { Instance } from "../project/instance"
import { trimDiff } from "./edit"

View File

@@ -1,5 +1,5 @@
import path from "path"
import { Process } from "."
import * as Process from "./process"
export async function extractZip(zipPath: string, destDir: string) {
if (process.platform === "win32") {

View File

@@ -3,8 +3,8 @@ import { NamedError } from "@opencode-ai/shared/util/error"
import { Global } from "../global"
import { Instance } from "../project/instance"
import { InstanceBootstrap } from "../project/bootstrap"
import { Project } from "../project/project"
import { Database, eq } from "../storage/db"
import { Project } from "../project"
import { Database, eq } from "../storage"
import { ProjectTable } from "../project/project.sql"
import type { ProjectID } from "../project/schema"
import { Log } from "../util"

View File

@@ -3,7 +3,7 @@ import { Effect, Layer, Option } from "effect"
import { AccountRepo } from "../../src/account/repo"
import { AccessToken, AccountID, OrgID, RefreshToken } from "../../src/account/schema"
import { Database } from "../../src/storage/db"
import { Database } from "../../src/storage"
import { testEffect } from "../lib/effect"
const truncate = Layer.effectDiscard(

View File

@@ -15,7 +15,7 @@ import {
RefreshToken,
UserCode,
} from "../../src/account/schema"
import { Database } from "../../src/storage/db"
import { Database } from "../../src/storage"
import { testEffect } from "../lib/effect"
const truncate = Layer.effectDiscard(

Some files were not shown because too many files have changed in this diff Show More