Compare commits

...

10 Commits

Author SHA1 Message Date
Kit Langton
9b8dbeda78 refactor(session): effectify SessionSummary service
Convert SessionSummary to Effect service pattern with Interface,
Service, Layer, and async facades. Use Session.Service and Bus.Service
directly instead of facades.
2026-03-30 21:36:54 -04:00
Kit Langton
a898c2ea3a refactor(storage): effectify Storage service (#20132) 2026-03-31 01:16:02 +00:00
Kit Langton
bf777298c8 fix(theme): darken muted text in catppuccin tui themes (#20161) 2026-03-30 21:06:05 -04:00
Luke Parker
93fad99f7f smarter changelog (#20138) 2026-03-31 00:05:46 +00:00
opencode
057848deb8 release: v1.3.9 2026-03-30 23:43:58 +00:00
Luke Parker
1de06452d3 fix(plugin): properly resolve entrypoints without leading dot (#20140) 2026-03-31 09:21:17 +10:00
Frank
58f60629a1 wip: zen 2026-03-30 19:04:42 -04:00
Frank
39a47c9b8c wip: zen 2026-03-30 18:50:09 -04:00
opencode-agent[bot]
ea88044f2e chore: generate 2026-03-30 21:49:45 +00:00
Kit Langton
e6f6f7aff1 refactor: replace Filesystem util with AppFileSystem service (#20127) 2026-03-30 21:48:28 +00:00
44 changed files with 1295 additions and 654 deletions

View File

@@ -1,22 +1,19 @@
---
model: opencode/kimi-k2.5
model: opencode/gpt-5.4
---
Create `UPCOMING_CHANGELOG.md` from the structured changelog input below.
If `UPCOMING_CHANGELOG.md` already exists, ignore its current contents completely.
Do not preserve, merge, or reuse text from the existing file.
Any command arguments are passed directly to `bun script/changelog.ts`.
Use `--from` / `-f` and `--to` / `-t` to preview a specific release range.
The input already contains the exact commit range since the last non-draft release.
The commits are already filtered to the release-relevant packages and grouped into
the release sections. Do not fetch GitHub releases, PRs, or build your own commit list.
The input may also include a `## Community Contributors Input` section.
Before writing any entry you keep, inspect the real diff with
`git show --stat --format='' <hash>` or `git show --format='' <hash>` so the
summary reflects the actual user-facing change and not just the commit message.
`git show --stat --format='' <hash>` or `git show --format='' <hash>` so you can
understand the actual code changes and not just the commit message (they may be misleading).
Do not use `git log` or author metadata when deciding attribution.
Rules:
@@ -38,7 +35,12 @@ Rules:
- Do not add, remove, rewrite, or reorder contributor names or commit titles in that block
- Do not derive the thank-you section from the main summary bullets
- Do not include the heading `## Community Contributors Input` in the final file
- Focus on writing the least words to get your point across - users will skim read the changelog, so we should be precise
## Changelog Input
**Importantly, the changelog is for users (who are at least slightly technical), they may use the TUI, Desktop, SDK, Plugins and so forth. Be thorough in understanding flow on effects may not be immediately apparent. e.g. a package upgrade looks internal but may patch a bug. Or a refactor may also stabilise some race condition that fixes bugs for users. The PR title/body + commit message will give you the authors context, usually containing the outcome not just technical detail**
!`bun script/changelog.ts $ARGUMENTS`
<changelog_input>
!`bun script/raw-changelog.ts $ARGUMENTS`
</changelog_input>

View File

@@ -26,7 +26,7 @@
},
"packages/app": {
"name": "@opencode-ai/app",
"version": "1.3.8",
"version": "1.3.9",
"dependencies": {
"@kobalte/core": "catalog:",
"@opencode-ai/sdk": "workspace:*",
@@ -79,7 +79,7 @@
},
"packages/console/app": {
"name": "@opencode-ai/console-app",
"version": "1.3.8",
"version": "1.3.9",
"dependencies": {
"@cloudflare/vite-plugin": "1.15.2",
"@ibm/plex": "6.4.1",
@@ -113,7 +113,7 @@
},
"packages/console/core": {
"name": "@opencode-ai/console-core",
"version": "1.3.8",
"version": "1.3.9",
"dependencies": {
"@aws-sdk/client-sts": "3.782.0",
"@jsx-email/render": "1.1.1",
@@ -140,7 +140,7 @@
},
"packages/console/function": {
"name": "@opencode-ai/console-function",
"version": "1.3.8",
"version": "1.3.9",
"dependencies": {
"@ai-sdk/anthropic": "3.0.64",
"@ai-sdk/openai": "3.0.48",
@@ -164,7 +164,7 @@
},
"packages/console/mail": {
"name": "@opencode-ai/console-mail",
"version": "1.3.8",
"version": "1.3.9",
"dependencies": {
"@jsx-email/all": "2.2.3",
"@jsx-email/cli": "1.4.3",
@@ -188,7 +188,7 @@
},
"packages/desktop": {
"name": "@opencode-ai/desktop",
"version": "1.3.8",
"version": "1.3.9",
"dependencies": {
"@opencode-ai/app": "workspace:*",
"@opencode-ai/ui": "workspace:*",
@@ -221,7 +221,7 @@
},
"packages/desktop-electron": {
"name": "@opencode-ai/desktop-electron",
"version": "1.3.8",
"version": "1.3.9",
"dependencies": {
"@opencode-ai/app": "workspace:*",
"@opencode-ai/ui": "workspace:*",
@@ -252,7 +252,7 @@
},
"packages/enterprise": {
"name": "@opencode-ai/enterprise",
"version": "1.3.8",
"version": "1.3.9",
"dependencies": {
"@opencode-ai/ui": "workspace:*",
"@opencode-ai/util": "workspace:*",
@@ -281,7 +281,7 @@
},
"packages/function": {
"name": "@opencode-ai/function",
"version": "1.3.8",
"version": "1.3.9",
"dependencies": {
"@octokit/auth-app": "8.0.1",
"@octokit/rest": "catalog:",
@@ -297,7 +297,7 @@
},
"packages/opencode": {
"name": "opencode",
"version": "1.3.8",
"version": "1.3.9",
"bin": {
"opencode": "./bin/opencode",
},
@@ -423,7 +423,7 @@
},
"packages/plugin": {
"name": "@opencode-ai/plugin",
"version": "1.3.8",
"version": "1.3.9",
"dependencies": {
"@opencode-ai/sdk": "workspace:*",
"zod": "catalog:",
@@ -457,7 +457,7 @@
},
"packages/sdk/js": {
"name": "@opencode-ai/sdk",
"version": "1.3.8",
"version": "1.3.9",
"devDependencies": {
"@hey-api/openapi-ts": "0.90.10",
"@tsconfig/node22": "catalog:",
@@ -468,7 +468,7 @@
},
"packages/slack": {
"name": "@opencode-ai/slack",
"version": "1.3.8",
"version": "1.3.9",
"dependencies": {
"@opencode-ai/sdk": "workspace:*",
"@slack/bolt": "^3.17.1",
@@ -503,7 +503,7 @@
},
"packages/ui": {
"name": "@opencode-ai/ui",
"version": "1.3.8",
"version": "1.3.9",
"dependencies": {
"@kobalte/core": "catalog:",
"@opencode-ai/sdk": "workspace:*",
@@ -550,7 +550,7 @@
},
"packages/util": {
"name": "@opencode-ai/util",
"version": "1.3.8",
"version": "1.3.9",
"dependencies": {
"zod": "catalog:",
},
@@ -561,7 +561,7 @@
},
"packages/web": {
"name": "@opencode-ai/web",
"version": "1.3.8",
"version": "1.3.9",
"dependencies": {
"@astrojs/cloudflare": "12.6.3",
"@astrojs/markdown-remark": "6.3.1",

View File

@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/app",
"version": "1.3.8",
"version": "1.3.9",
"description": "",
"type": "module",
"exports": {

View File

@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/console-app",
"version": "1.3.8",
"version": "1.3.9",
"type": "module",
"license": "MIT",
"scripts": {

View File

@@ -471,7 +471,7 @@ export async function handler(
reqModel,
providerModel: modelProvider.model,
adjustCacheUsage: providerProps.adjustCacheUsage,
safetyIdentifier: ip,
safetyIdentifier: modelProvider.safetyIdentifier ? ip : undefined,
workspaceID: authInfo?.workspaceID,
}
if (format === "anthropic") return anthropicHelper(opts)

View File

@@ -1,7 +1,7 @@
{
"$schema": "https://json.schemastore.org/package.json",
"name": "@opencode-ai/console-core",
"version": "1.3.8",
"version": "1.3.9",
"private": true,
"type": "module",
"license": "MIT",

View File

@@ -37,6 +37,7 @@ export namespace ZenData {
disabled: z.boolean().optional(),
storeModel: z.string().optional(),
payloadModifier: z.record(z.string(), z.any()).optional(),
safetyIdentifier: z.boolean().optional(),
}),
),
})
@@ -49,7 +50,6 @@ export namespace ZenData {
payloadModifier: z.record(z.string(), z.any()).optional(),
payloadMappings: z.record(z.string(), z.string()).optional(),
adjustCacheUsage: z.boolean().optional(),
safetyIdentifier: z.boolean().optional(),
})
const ModelsSchema = z.object({

View File

@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/console-function",
"version": "1.3.8",
"version": "1.3.9",
"$schema": "https://json.schemastore.org/package.json",
"private": true,
"type": "module",

View File

@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/console-mail",
"version": "1.3.8",
"version": "1.3.9",
"dependencies": {
"@jsx-email/all": "2.2.3",
"@jsx-email/cli": "1.4.3",

View File

@@ -1,7 +1,7 @@
{
"name": "@opencode-ai/desktop-electron",
"private": true,
"version": "1.3.8",
"version": "1.3.9",
"type": "module",
"license": "MIT",
"homepage": "https://opencode.ai",

View File

@@ -1,7 +1,7 @@
{
"name": "@opencode-ai/desktop",
"private": true,
"version": "1.3.8",
"version": "1.3.9",
"type": "module",
"license": "MIT",
"scripts": {

View File

@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/enterprise",
"version": "1.3.8",
"version": "1.3.9",
"private": true,
"type": "module",
"license": "MIT",

View File

@@ -1,7 +1,7 @@
id = "opencode"
name = "OpenCode"
description = "The open source coding agent."
version = "1.3.8"
version = "1.3.9"
schema_version = 1
authors = ["Anomaly"]
repository = "https://github.com/anomalyco/opencode"
@@ -11,26 +11,26 @@ name = "OpenCode"
icon = "./icons/opencode.svg"
[agent_servers.opencode.targets.darwin-aarch64]
archive = "https://github.com/anomalyco/opencode/releases/download/v1.3.8/opencode-darwin-arm64.zip"
archive = "https://github.com/anomalyco/opencode/releases/download/v1.3.9/opencode-darwin-arm64.zip"
cmd = "./opencode"
args = ["acp"]
[agent_servers.opencode.targets.darwin-x86_64]
archive = "https://github.com/anomalyco/opencode/releases/download/v1.3.8/opencode-darwin-x64.zip"
archive = "https://github.com/anomalyco/opencode/releases/download/v1.3.9/opencode-darwin-x64.zip"
cmd = "./opencode"
args = ["acp"]
[agent_servers.opencode.targets.linux-aarch64]
archive = "https://github.com/anomalyco/opencode/releases/download/v1.3.8/opencode-linux-arm64.tar.gz"
archive = "https://github.com/anomalyco/opencode/releases/download/v1.3.9/opencode-linux-arm64.tar.gz"
cmd = "./opencode"
args = ["acp"]
[agent_servers.opencode.targets.linux-x86_64]
archive = "https://github.com/anomalyco/opencode/releases/download/v1.3.8/opencode-linux-x64.tar.gz"
archive = "https://github.com/anomalyco/opencode/releases/download/v1.3.9/opencode-linux-x64.tar.gz"
cmd = "./opencode"
args = ["acp"]
[agent_servers.opencode.targets.windows-x86_64]
archive = "https://github.com/anomalyco/opencode/releases/download/v1.3.8/opencode-windows-x64.zip"
archive = "https://github.com/anomalyco/opencode/releases/download/v1.3.9/opencode-windows-x64.zip"
cmd = "./opencode.exe"
args = ["acp"]

View File

@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/function",
"version": "1.3.8",
"version": "1.3.9",
"$schema": "https://json.schemastore.org/package.json",
"private": true,
"type": "module",

View File

@@ -1,6 +1,6 @@
{
"$schema": "https://json.schemastore.org/package.json",
"version": "1.3.8",
"version": "1.3.9",
"name": "opencode",
"type": "module",
"license": "MIT",

View File

@@ -393,7 +393,7 @@ export namespace Agent {
)
export const defaultLayer = layer.pipe(
Layer.provide(Auth.layer),
Layer.provide(Auth.defaultLayer),
Layer.provide(Config.defaultLayer),
Layer.provide(Skill.defaultLayer),
)

View File

@@ -3,7 +3,7 @@ import { Effect, Layer, Record, Result, Schema, ServiceMap } from "effect"
import { makeRuntime } from "@/effect/run-service"
import { zod } from "@/util/effect-zod"
import { Global } from "../global"
import { Filesystem } from "../util/filesystem"
import { AppFileSystem } from "../filesystem"
export const OAUTH_DUMMY_KEY = "opencode-oauth-dummy-key"
@@ -53,17 +53,13 @@ export namespace Auth {
export const layer = Layer.effect(
Service,
Effect.gen(function* () {
const fsys = yield* AppFileSystem.Service
const decode = Schema.decodeUnknownOption(Info)
const all = Effect.fn("Auth.all")(() =>
Effect.tryPromise({
try: async () => {
const data = await Filesystem.readJson<Record<string, unknown>>(file).catch(() => ({}))
return Record.filterMap(data, (value) => Result.fromOption(decode(value), () => undefined))
},
catch: fail("Failed to read auth data"),
}),
)
const all = Effect.fn("Auth.all")(function* () {
const data = (yield* fsys.readJson(file).pipe(Effect.orElseSucceed(() => ({})))) as Record<string, unknown>
return Record.filterMap(data, (value) => Result.fromOption(decode(value), () => undefined))
})
const get = Effect.fn("Auth.get")(function* (providerID: string) {
return (yield* all())[providerID]
@@ -74,10 +70,9 @@ export namespace Auth {
const data = yield* all()
if (norm !== key) delete data[key]
delete data[norm + "/"]
yield* Effect.tryPromise({
try: () => Filesystem.writeJson(file, { ...data, [norm]: info }, 0o600),
catch: fail("Failed to write auth data"),
})
yield* fsys
.writeJson(file, { ...data, [norm]: info }, 0o600)
.pipe(Effect.mapError(fail("Failed to write auth data")))
})
const remove = Effect.fn("Auth.remove")(function* (key: string) {
@@ -85,17 +80,16 @@ export namespace Auth {
const data = yield* all()
delete data[key]
delete data[norm]
yield* Effect.tryPromise({
try: () => Filesystem.writeJson(file, data, 0o600),
catch: fail("Failed to write auth data"),
})
yield* fsys.writeJson(file, data, 0o600).pipe(Effect.mapError(fail("Failed to write auth data")))
})
return Service.of({ get, all, set, remove })
}),
)
const { runPromise } = makeRuntime(Service, layer)
export const defaultLayer = layer.pipe(Layer.provide(AppFileSystem.defaultLayer))
const { runPromise } = makeRuntime(Service, defaultLayer)
export async function get(providerID: string) {
return runPromise((service) => service.get(providerID))

View File

@@ -62,8 +62,8 @@
"light": "frappeText"
},
"textMuted": {
"dark": "frappeSubtext1",
"light": "frappeSubtext1"
"dark": "frappeOverlay2",
"light": "frappeOverlay2"
},
"background": {
"dark": "frappeBase",

View File

@@ -62,8 +62,8 @@
"light": "macText"
},
"textMuted": {
"dark": "macSubtext1",
"light": "macSubtext1"
"dark": "macOverlay2",
"light": "macOverlay2"
},
"background": {
"dark": "macBase",

View File

@@ -63,7 +63,7 @@
"success": { "dark": "darkGreen", "light": "lightGreen" },
"info": { "dark": "darkTeal", "light": "lightTeal" },
"text": { "dark": "darkText", "light": "lightText" },
"textMuted": { "dark": "darkSubtext1", "light": "lightSubtext1" },
"textMuted": { "dark": "darkOverlay2", "light": "lightOverlay2" },
"background": { "dark": "darkBase", "light": "lightBase" },
"backgroundPanel": { "dark": "darkMantle", "light": "lightMantle" },
"backgroundElement": { "dark": "darkCrust", "light": "lightCrust" },

View File

@@ -1540,7 +1540,7 @@ export namespace Config {
export const defaultLayer = layer.pipe(
Layer.provide(AppFileSystem.defaultLayer),
Layer.provide(Auth.layer),
Layer.provide(Auth.defaultLayer),
Layer.provide(Account.defaultLayer),
)

View File

@@ -541,7 +541,7 @@ export namespace File {
const exists = yield* appFs.existsSafe(full)
if (!exists) return { type: "text" as const, content: "" }
const mimeType = Filesystem.mimeType(full)
const mimeType = AppFileSystem.mimeType(full)
const encode = knownText ? false : shouldEncode(mimeType)
if (encode && !isImage(mimeType)) return { type: "binary" as const, content: "", mimeType }

View File

@@ -1,9 +1,9 @@
import { DateTime, Effect, Layer, Semaphore, ServiceMap } from "effect"
import { DateTime, Effect, Layer, Option, Semaphore, ServiceMap } from "effect"
import { InstanceState } from "@/effect/instance-state"
import { makeRuntime } from "@/effect/run-service"
import { AppFileSystem } from "@/filesystem"
import { Flag } from "@/flag/flag"
import type { SessionID } from "@/session/schema"
import { Filesystem } from "../util/filesystem"
import { Log } from "../util/log"
export namespace FileTime {
@@ -12,21 +12,9 @@ export namespace FileTime {
export type Stamp = {
readonly read: Date
readonly mtime: number | undefined
readonly ctime: number | undefined
readonly size: number | undefined
}
const stamp = Effect.fnUntraced(function* (file: string) {
const stat = Filesystem.stat(file)
const size = typeof stat?.size === "bigint" ? Number(stat.size) : stat?.size
return {
read: yield* DateTime.nowAsDate,
mtime: stat?.mtime?.getTime(),
ctime: stat?.ctime?.getTime(),
size,
}
})
const session = (reads: Map<SessionID, Map<string, Stamp>>, sessionID: SessionID) => {
const value = reads.get(sessionID)
if (value) return value
@@ -53,7 +41,17 @@ export namespace FileTime {
export const layer = Layer.effect(
Service,
Effect.gen(function* () {
const fsys = yield* AppFileSystem.Service
const disableCheck = yield* Flag.OPENCODE_DISABLE_FILETIME_CHECK
const stamp = Effect.fnUntraced(function* (file: string) {
const info = yield* fsys.stat(file).pipe(Effect.catch(() => Effect.succeed(undefined)))
return {
read: yield* DateTime.nowAsDate,
mtime: info ? Option.getOrUndefined(info.mtime)?.getTime() : undefined,
size: info ? Number(info.size) : undefined,
}
})
const state = yield* InstanceState.make<State>(
Effect.fn("FileTime.state")(() =>
Effect.succeed({
@@ -92,7 +90,7 @@ export namespace FileTime {
if (!time) throw new Error(`You must read file ${filepath} before overwriting it. Use the Read tool first`)
const next = yield* stamp(filepath)
const changed = next.mtime !== time.mtime || next.ctime !== time.ctime || next.size !== time.size
const changed = next.mtime !== time.mtime || next.size !== time.size
if (!changed) return
throw new Error(
@@ -108,7 +106,9 @@ export namespace FileTime {
}),
).pipe(Layer.orDie)
const { runPromise } = makeRuntime(Service, layer)
export const defaultLayer = layer.pipe(Layer.provide(AppFileSystem.defaultLayer))
const { runPromise } = makeRuntime(Service, defaultLayer)
export function read(sessionID: SessionID, file: string) {
return runPromise((s) => s.read(sessionID, file))

View File

@@ -45,9 +45,9 @@ export function pluginSource(spec: string): PluginSource {
}
function resolveExportPath(raw: string, dir: string) {
if (raw.startsWith("./") || raw.startsWith("../")) return path.resolve(dir, raw)
if (raw.startsWith("file://")) return fileURLToPath(raw)
return raw
if (path.isAbsolute(raw)) return raw
return path.resolve(dir, raw)
}
function extractExportValue(value: unknown): string | undefined {
@@ -93,7 +93,7 @@ function resolvePackageEntrypoint(spec: string, kind: PluginKind, pkg: PluginPac
function targetPath(target: string) {
if (target.startsWith("file://")) return fileURLToPath(target)
if (path.isAbsolute(target) || /^[A-Za-z]:[\\/]/.test(target)) return target
if (path.isAbsolute(target)) return target
}
async function resolveDirectoryIndex(dir: string) {

View File

@@ -230,7 +230,7 @@ export namespace ProviderAuth {
}),
)
export const defaultLayer = layer.pipe(Layer.provide(Auth.layer))
export const defaultLayer = layer.pipe(Layer.provide(Auth.defaultLayer))
const { runPromise } = makeRuntime(Service, defaultLayer)

View File

@@ -436,13 +436,13 @@ export const SessionRoutes = lazy(() =>
validator(
"param",
z.object({
sessionID: SessionSummary.diff.schema.shape.sessionID,
sessionID: SessionSummary.DiffInput.shape.sessionID,
}),
),
validator(
"query",
z.object({
messageID: SessionSummary.diff.schema.shape.messageID,
messageID: SessionSummary.DiffInput.shape.messageID,
}),
),
async (c) => {

View File

@@ -294,12 +294,10 @@ export namespace SessionProcessor {
}
ctx.snapshot = undefined
}
yield* Effect.promise(() =>
SessionSummary.summarize({
sessionID: ctx.sessionID,
messageID: ctx.assistantMessage.parentID,
}),
).pipe(Effect.ignoreCause({ log: true, message: "session summary failed" }), Effect.forkDetach)
SessionSummary.summarize({
sessionID: ctx.sessionID,
messageID: ctx.assistantMessage.parentID,
})
if (
!ctx.assistantMessage.summary &&
isOverflow({ cfg: yield* config.get(), tokens: usage.tokens, model: ctx.model })

View File

@@ -1704,7 +1704,7 @@ NOTE: At any point in time through this workflow you should feel free to ask the
Layer.provide(Permission.layer),
Layer.provide(MCP.defaultLayer),
Layer.provide(LSP.defaultLayer),
Layer.provide(FileTime.layer),
Layer.provide(FileTime.defaultLayer),
Layer.provide(ToolRegistry.defaultLayer),
Layer.provide(Truncate.layer),
Layer.provide(AppFileSystem.defaultLayer),

View File

@@ -1,14 +1,12 @@
import { fn } from "@/util/fn"
import z from "zod"
import { Effect, Layer, ServiceMap } from "effect"
import { makeRuntime } from "@/effect/run-service"
import { Bus } from "@/bus"
import { Snapshot } from "@/snapshot"
import { Storage } from "@/storage/storage"
import { Session } from "."
import { MessageV2 } from "./message-v2"
import { SessionID, MessageID } from "./schema"
import { Snapshot } from "@/snapshot"
import { Storage } from "@/storage/storage"
import { Bus } from "@/bus"
import { NotFoundError } from "@/storage/db"
export namespace SessionSummary {
function unquoteGitPath(input: string) {
@@ -67,103 +65,121 @@ export namespace SessionSummary {
return Buffer.from(bytes).toString()
}
export const summarize = fn(
z.object({
sessionID: SessionID.zod,
messageID: MessageID.zod,
}),
async (input) => {
await Session.messages({ sessionID: input.sessionID })
.then((all) =>
Promise.all([
summarizeSession({ sessionID: input.sessionID, messages: all }),
summarizeMessage({ messageID: input.messageID, messages: all }),
]),
)
.catch((err) => {
if (NotFoundError.isInstance(err)) return
throw err
})
},
)
async function summarizeSession(input: { sessionID: SessionID; messages: MessageV2.WithParts[] }) {
const diffs = await computeDiff({ messages: input.messages })
await Session.setSummary({
sessionID: input.sessionID,
summary: {
additions: diffs.reduce((sum, x) => sum + x.additions, 0),
deletions: diffs.reduce((sum, x) => sum + x.deletions, 0),
files: diffs.length,
},
})
await Storage.write(["session_diff", input.sessionID], diffs)
Bus.publish(Session.Event.Diff, {
sessionID: input.sessionID,
diff: diffs,
})
export interface Interface {
readonly summarize: (input: { sessionID: SessionID; messageID: MessageID }) => Effect.Effect<void>
readonly diff: (input: { sessionID: SessionID; messageID?: MessageID }) => Effect.Effect<Snapshot.FileDiff[]>
readonly computeDiff: (input: { messages: MessageV2.WithParts[] }) => Effect.Effect<Snapshot.FileDiff[]>
}
async function summarizeMessage(input: { messageID: string; messages: MessageV2.WithParts[] }) {
const messages = input.messages.filter(
(m) => m.info.id === input.messageID || (m.info.role === "assistant" && m.info.parentID === input.messageID),
)
const msgWithParts = messages.find((m) => m.info.id === input.messageID)
if (!msgWithParts || msgWithParts.info.role !== "user") return
const userMsg = msgWithParts.info
const diffs = await computeDiff({ messages })
userMsg.summary = {
...userMsg.summary,
diffs,
}
await Session.updateMessage(userMsg)
}
export class Service extends ServiceMap.Service<Service, Interface>()("@opencode/SessionSummary") {}
export const diff = fn(
z.object({
sessionID: SessionID.zod,
messageID: MessageID.zod.optional(),
}),
async (input) => {
const diffs = await Storage.read<Snapshot.FileDiff[]>(["session_diff", input.sessionID]).catch(() => [])
const next = diffs.map((item) => {
const file = unquoteGitPath(item.file)
if (file === item.file) return item
return {
...item,
file,
}
})
const changed = next.some((item, i) => item.file !== diffs[i]?.file)
if (changed) Storage.write(["session_diff", input.sessionID], next).catch(() => {})
return next
},
)
export const layer = Layer.effect(
Service,
Effect.gen(function* () {
const sessions = yield* Session.Service
const snapshot = yield* Snapshot.Service
const storage = yield* Storage.Service
const bus = yield* Bus.Service
export async function computeDiff(input: { messages: MessageV2.WithParts[] }) {
let from: string | undefined
let to: string | undefined
// scan assistant messages to find earliest from and latest to
// snapshot
for (const item of input.messages) {
if (!from) {
for (const part of item.parts) {
if (part.type === "step-start" && part.snapshot) {
from = part.snapshot
break
const computeDiff = Effect.fn("SessionSummary.computeDiff")(function* (input: {
messages: MessageV2.WithParts[]
}) {
let from: string | undefined
let to: string | undefined
for (const item of input.messages) {
if (!from) {
for (const part of item.parts) {
if (part.type === "step-start" && part.snapshot) {
from = part.snapshot
break
}
}
}
for (const part of item.parts) {
if (part.type === "step-finish" && part.snapshot) to = part.snapshot
}
}
}
if (from && to) return yield* snapshot.diffFull(from, to)
return []
})
for (const part of item.parts) {
if (part.type === "step-finish" && part.snapshot) {
to = part.snapshot
}
}
}
const summarize = Effect.fn("SessionSummary.summarize")(function* (input: {
sessionID: SessionID
messageID: MessageID
}) {
const all = yield* sessions.messages({ sessionID: input.sessionID })
if (!all.length) return
if (from && to) return Snapshot.diffFull(from, to)
return []
const diffs = yield* computeDiff({ messages: all })
yield* sessions.setSummary({
sessionID: input.sessionID,
summary: {
additions: diffs.reduce((sum, x) => sum + x.additions, 0),
deletions: diffs.reduce((sum, x) => sum + x.deletions, 0),
files: diffs.length,
},
})
yield* storage.write(["session_diff", input.sessionID], diffs).pipe(Effect.ignore)
yield* bus.publish(Session.Event.Diff, { sessionID: input.sessionID, diff: diffs })
const messages = all.filter(
(m) =>
m.info.id === input.messageID || (m.info.role === "assistant" && m.info.parentID === input.messageID),
)
const target = messages.find((m) => m.info.id === input.messageID)
if (!target || target.info.role !== "user") return
const msgDiffs = yield* computeDiff({ messages })
target.info.summary = { ...target.info.summary, diffs: msgDiffs }
yield* sessions.updateMessage(target.info)
})
const diff = Effect.fn("SessionSummary.diff")(function* (input: {
sessionID: SessionID
messageID?: MessageID
}) {
const diffs = yield* storage.read<Snapshot.FileDiff[]>(["session_diff", input.sessionID]).pipe(
Effect.catch(() => Effect.succeed([] as Snapshot.FileDiff[])),
)
const next = diffs.map((item) => {
const file = unquoteGitPath(item.file)
if (file === item.file) return item
return { ...item, file }
})
const changed = next.some((item, i) => item.file !== diffs[i]?.file)
if (changed) yield* storage.write(["session_diff", input.sessionID], next).pipe(Effect.ignore)
return next
})
return Service.of({ summarize, diff, computeDiff })
}),
)
export const defaultLayer = Layer.unwrap(
Effect.sync(() =>
layer.pipe(
Layer.provide(Session.defaultLayer),
Layer.provide(Snapshot.defaultLayer),
Layer.provide(Storage.defaultLayer),
Layer.provide(Bus.layer),
),
),
)
const { runPromise } = makeRuntime(Service, defaultLayer)
export const summarize = (input: { sessionID: SessionID; messageID: MessageID }) =>
void runPromise((svc) => svc.summarize(input)).catch(() => {})
export const DiffInput = z.object({
sessionID: SessionID.zod,
messageID: MessageID.zod.optional(),
})
export async function diff(input: z.infer<typeof DiffInput>) {
return runPromise((svc) => svc.diff(input))
}
export async function computeDiff(input: { messages: MessageV2.WithParts[] }) {
return runPromise((svc) => svc.computeDiff(input))
}
}

View File

@@ -11,7 +11,7 @@ import { makeRuntime } from "@/effect/run-service"
import { Flag } from "@/flag/flag"
import { Global } from "@/global"
import { Permission } from "@/permission"
import { Filesystem } from "@/util/filesystem"
import { AppFileSystem } from "@/filesystem"
import { Config } from "../config/config"
import { ConfigMarkdown } from "../config/markdown"
import { Glob } from "../util/glob"
@@ -139,28 +139,20 @@ export namespace Skill {
config: Config.Interface,
discovery: Discovery.Interface,
bus: Bus.Interface,
fsys: AppFileSystem.Interface,
directory: string,
worktree: string,
) {
if (!Flag.OPENCODE_DISABLE_EXTERNAL_SKILLS) {
for (const dir of EXTERNAL_DIRS) {
const root = path.join(Global.Path.home, dir)
const isDir = yield* Effect.promise(() => Filesystem.isDir(root))
if (!isDir) continue
if (!(yield* fsys.isDir(root))) continue
yield* scan(state, bus, root, EXTERNAL_SKILL_PATTERN, { dot: true, scope: "global" })
}
const upDirs = yield* Effect.promise(async () => {
const dirs: string[] = []
for await (const root of Filesystem.up({
targets: EXTERNAL_DIRS,
start: directory,
stop: worktree,
})) {
dirs.push(root)
}
return dirs
})
const upDirs = yield* fsys
.up({ targets: EXTERNAL_DIRS, start: directory, stop: worktree })
.pipe(Effect.catch(() => Effect.succeed([] as string[])))
for (const root of upDirs) {
yield* scan(state, bus, root, EXTERNAL_SKILL_PATTERN, { dot: true, scope: "project" })
@@ -176,8 +168,7 @@ export namespace Skill {
for (const item of cfg.skills?.paths ?? []) {
const expanded = item.startsWith("~/") ? path.join(os.homedir(), item.slice(2)) : item
const dir = path.isAbsolute(expanded) ? expanded : path.join(directory, expanded)
const isDir = yield* Effect.promise(() => Filesystem.isDir(dir))
if (!isDir) {
if (!(yield* fsys.isDir(dir))) {
log.warn("skill path not found", { path: dir })
continue
}
@@ -198,16 +189,17 @@ export namespace Skill {
export class Service extends ServiceMap.Service<Service, Interface>()("@opencode/Skill") {}
export const layer: Layer.Layer<Service, never, Discovery.Service | Config.Service | Bus.Service> = Layer.effect(
export const layer = Layer.effect(
Service,
Effect.gen(function* () {
const discovery = yield* Discovery.Service
const config = yield* Config.Service
const bus = yield* Bus.Service
const fsys = yield* AppFileSystem.Service
const state = yield* InstanceState.make(
Effect.fn("Skill.state")(function* (ctx) {
const s: State = { skills: {}, dirs: new Set() }
yield* loadSkills(s, config, discovery, bus, ctx.directory, ctx.worktree)
yield* loadSkills(s, config, discovery, bus, fsys, ctx.directory, ctx.worktree)
return s
}),
)
@@ -238,10 +230,11 @@ export namespace Skill {
}),
)
export const defaultLayer: Layer.Layer<Service> = layer.pipe(
export const defaultLayer = layer.pipe(
Layer.provide(Discovery.defaultLayer),
Layer.provide(Config.defaultLayer),
Layer.provide(Bus.layer),
Layer.provide(AppFileSystem.defaultLayer),
)
export function fmt(list: Info[], opts: { verbose: boolean }) {

View File

@@ -1,19 +1,17 @@
import { Log } from "../util/log"
import path from "path"
import fs from "fs/promises"
import { Global } from "../global"
import { Filesystem } from "../util/filesystem"
import { lazy } from "../util/lazy"
import { Lock } from "../util/lock"
import { NamedError } from "@opencode-ai/util/error"
import z from "zod"
import { Glob } from "../util/glob"
import { git } from "@/util/git"
import { AppFileSystem } from "@/filesystem"
import { makeRuntime } from "@/effect/run-service"
import { Effect, Exit, Layer, Option, RcMap, Schema, ServiceMap, TxReentrantLock } from "effect"
export namespace Storage {
const log = Log.create({ service: "storage" })
type Migration = (dir: string) => Promise<void>
type Migration = (dir: string, fs: AppFileSystem.Interface) => Effect.Effect<void, AppFileSystem.Error>
export const NotFoundError = NamedError.create(
"NotFoundError",
@@ -22,36 +20,101 @@ export namespace Storage {
}),
)
export type Error = AppFileSystem.Error | InstanceType<typeof NotFoundError>
const RootFile = Schema.Struct({
path: Schema.optional(
Schema.Struct({
root: Schema.optional(Schema.String),
}),
),
})
const SessionFile = Schema.Struct({
id: Schema.String,
})
const MessageFile = Schema.Struct({
id: Schema.String,
})
const DiffFile = Schema.Struct({
additions: Schema.Number,
deletions: Schema.Number,
})
const SummaryFile = Schema.Struct({
id: Schema.String,
projectID: Schema.String,
summary: Schema.Struct({ diffs: Schema.Array(DiffFile) }),
})
const decodeRoot = Schema.decodeUnknownOption(RootFile)
const decodeSession = Schema.decodeUnknownOption(SessionFile)
const decodeMessage = Schema.decodeUnknownOption(MessageFile)
const decodeSummary = Schema.decodeUnknownOption(SummaryFile)
export interface Interface {
readonly remove: (key: string[]) => Effect.Effect<void, AppFileSystem.Error>
readonly read: <T>(key: string[]) => Effect.Effect<T, Error>
readonly update: <T>(key: string[], fn: (draft: T) => void) => Effect.Effect<T, Error>
readonly write: <T>(key: string[], content: T) => Effect.Effect<void, AppFileSystem.Error>
readonly list: (prefix: string[]) => Effect.Effect<string[][], AppFileSystem.Error>
}
export class Service extends ServiceMap.Service<Service, Interface>()("@opencode/Storage") {}
function file(dir: string, key: string[]) {
return path.join(dir, ...key) + ".json"
}
function missing(err: unknown) {
if (!err || typeof err !== "object") return false
if ("code" in err && err.code === "ENOENT") return true
if ("reason" in err && err.reason && typeof err.reason === "object" && "_tag" in err.reason) {
return err.reason._tag === "NotFound"
}
return false
}
function parseMigration(text: string) {
const value = Number.parseInt(text, 10)
return Number.isNaN(value) ? 0 : value
}
const MIGRATIONS: Migration[] = [
async (dir) => {
Effect.fn("Storage.migration.1")(function* (dir: string, fs: AppFileSystem.Interface) {
const project = path.resolve(dir, "../project")
if (!(await Filesystem.isDir(project))) return
const projectDirs = await Glob.scan("*", {
if (!(yield* fs.isDir(project))) return
const projectDirs = yield* fs.glob("*", {
cwd: project,
include: "all",
})
for (const projectDir of projectDirs) {
const fullPath = path.join(project, projectDir)
if (!(await Filesystem.isDir(fullPath))) continue
const full = path.join(project, projectDir)
if (!(yield* fs.isDir(full))) continue
log.info(`migrating project ${projectDir}`)
let projectID = projectDir
const fullProjectDir = path.join(project, projectDir)
let worktree = "/"
if (projectID !== "global") {
for (const msgFile of await Glob.scan("storage/session/message/*/*.json", {
cwd: path.join(project, projectDir),
for (const msgFile of yield* fs.glob("storage/session/message/*/*.json", {
cwd: full,
absolute: true,
})) {
const json = await Filesystem.readJson<any>(msgFile)
worktree = json.path?.root
if (worktree) break
const json = decodeRoot(yield* fs.readJson(msgFile), { onExcessProperty: "preserve" })
const root = Option.isSome(json) ? json.value.path?.root : undefined
if (!root) continue
worktree = root
break
}
if (!worktree) continue
if (!(await Filesystem.isDir(worktree))) continue
const result = await git(["rev-list", "--max-parents=0", "--all"], {
cwd: worktree,
})
if (!(yield* fs.isDir(worktree))) continue
const result = yield* Effect.promise(() =>
git(["rev-list", "--max-parents=0", "--all"], {
cwd: worktree,
}),
)
const [id] = result
.text()
.split("\n")
@@ -61,157 +124,230 @@ export namespace Storage {
if (!id) continue
projectID = id
await Filesystem.writeJson(path.join(dir, "project", projectID + ".json"), {
id,
vcs: "git",
worktree,
time: {
created: Date.now(),
initialized: Date.now(),
},
})
yield* fs.writeWithDirs(
path.join(dir, "project", projectID + ".json"),
JSON.stringify(
{
id,
vcs: "git",
worktree,
time: {
created: Date.now(),
initialized: Date.now(),
},
},
null,
2,
),
)
log.info(`migrating sessions for project ${projectID}`)
for (const sessionFile of await Glob.scan("storage/session/info/*.json", {
cwd: fullProjectDir,
for (const sessionFile of yield* fs.glob("storage/session/info/*.json", {
cwd: full,
absolute: true,
})) {
const dest = path.join(dir, "session", projectID, path.basename(sessionFile))
log.info("copying", {
sessionFile,
dest,
})
const session = await Filesystem.readJson<any>(sessionFile)
await Filesystem.writeJson(dest, session)
log.info(`migrating messages for session ${session.id}`)
for (const msgFile of await Glob.scan(`storage/session/message/${session.id}/*.json`, {
cwd: fullProjectDir,
log.info("copying", { sessionFile, dest })
const session = yield* fs.readJson(sessionFile)
const info = decodeSession(session, { onExcessProperty: "preserve" })
yield* fs.writeWithDirs(dest, JSON.stringify(session, null, 2))
if (Option.isNone(info)) continue
log.info(`migrating messages for session ${info.value.id}`)
for (const msgFile of yield* fs.glob(`storage/session/message/${info.value.id}/*.json`, {
cwd: full,
absolute: true,
})) {
const dest = path.join(dir, "message", session.id, path.basename(msgFile))
const next = path.join(dir, "message", info.value.id, path.basename(msgFile))
log.info("copying", {
msgFile,
dest,
dest: next,
})
const message = await Filesystem.readJson<any>(msgFile)
await Filesystem.writeJson(dest, message)
const message = yield* fs.readJson(msgFile)
const item = decodeMessage(message, { onExcessProperty: "preserve" })
yield* fs.writeWithDirs(next, JSON.stringify(message, null, 2))
if (Option.isNone(item)) continue
log.info(`migrating parts for message ${message.id}`)
for (const partFile of await Glob.scan(`storage/session/part/${session.id}/${message.id}/*.json`, {
cwd: fullProjectDir,
log.info(`migrating parts for message ${item.value.id}`)
for (const partFile of yield* fs.glob(`storage/session/part/${info.value.id}/${item.value.id}/*.json`, {
cwd: full,
absolute: true,
})) {
const dest = path.join(dir, "part", message.id, path.basename(partFile))
const part = await Filesystem.readJson(partFile)
const out = path.join(dir, "part", item.value.id, path.basename(partFile))
const part = yield* fs.readJson(partFile)
log.info("copying", {
partFile,
dest,
dest: out,
})
await Filesystem.writeJson(dest, part)
yield* fs.writeWithDirs(out, JSON.stringify(part, null, 2))
}
}
}
}
}
},
async (dir) => {
for (const item of await Glob.scan("session/*/*.json", {
}),
Effect.fn("Storage.migration.2")(function* (dir: string, fs: AppFileSystem.Interface) {
for (const item of yield* fs.glob("session/*/*.json", {
cwd: dir,
absolute: true,
})) {
const session = await Filesystem.readJson<any>(item)
if (!session.projectID) continue
if (!session.summary?.diffs) continue
const { diffs } = session.summary
await Filesystem.write(path.join(dir, "session_diff", session.id + ".json"), JSON.stringify(diffs))
await Filesystem.writeJson(path.join(dir, "session", session.projectID, session.id + ".json"), {
...session,
summary: {
additions: diffs.reduce((sum: any, x: any) => sum + x.additions, 0),
deletions: diffs.reduce((sum: any, x: any) => sum + x.deletions, 0),
},
})
const raw = yield* fs.readJson(item)
const session = decodeSummary(raw, { onExcessProperty: "preserve" })
if (Option.isNone(session)) continue
const diffs = session.value.summary.diffs
yield* fs.writeWithDirs(
path.join(dir, "session_diff", session.value.id + ".json"),
JSON.stringify(diffs, null, 2),
)
yield* fs.writeWithDirs(
path.join(dir, "session", session.value.projectID, session.value.id + ".json"),
JSON.stringify(
{
...(raw as Record<string, unknown>),
summary: {
additions: diffs.reduce((sum, x) => sum + x.additions, 0),
deletions: diffs.reduce((sum, x) => sum + x.deletions, 0),
},
},
null,
2,
),
)
}
},
}),
]
const state = lazy(async () => {
const dir = path.join(Global.Path.data, "storage")
const migration = await Filesystem.readJson<string>(path.join(dir, "migration"))
.then((x) => parseInt(x))
.catch(() => 0)
for (let index = migration; index < MIGRATIONS.length; index++) {
log.info("running migration", { index })
const migration = MIGRATIONS[index]
await migration(dir).catch(() => log.error("failed to run migration", { index }))
await Filesystem.write(path.join(dir, "migration"), (index + 1).toString())
}
return {
dir,
}
})
export const layer = Layer.effect(
Service,
Effect.gen(function* () {
const fs = yield* AppFileSystem.Service
const locks = yield* RcMap.make({
lookup: () => TxReentrantLock.make(),
idleTimeToLive: 0,
})
const state = yield* Effect.cached(
Effect.gen(function* () {
const dir = path.join(Global.Path.data, "storage")
const marker = path.join(dir, "migration")
const migration = yield* fs.readFileString(marker).pipe(
Effect.map(parseMigration),
Effect.catchIf(missing, () => Effect.succeed(0)),
Effect.orElseSucceed(() => 0),
)
for (let i = migration; i < MIGRATIONS.length; i++) {
log.info("running migration", { index: i })
const step = MIGRATIONS[i]!
const exit = yield* Effect.exit(step(dir, fs))
if (Exit.isFailure(exit)) {
log.error("failed to run migration", { index: i, cause: exit.cause })
break
}
yield* fs.writeWithDirs(marker, String(i + 1))
}
return { dir }
}),
)
const fail = (target: string): Effect.Effect<never, InstanceType<typeof NotFoundError>> =>
Effect.fail(new NotFoundError({ message: `Resource not found: ${target}` }))
const wrap = <A>(target: string, body: Effect.Effect<A, AppFileSystem.Error>) =>
body.pipe(Effect.catchIf(missing, () => fail(target)))
const writeJson = Effect.fnUntraced(function* (target: string, content: unknown) {
yield* fs.writeWithDirs(target, JSON.stringify(content, null, 2))
})
const withResolved = <A, E>(
key: string[],
fn: (target: string, rw: TxReentrantLock.TxReentrantLock) => Effect.Effect<A, E>,
): Effect.Effect<A, E | AppFileSystem.Error> =>
Effect.scoped(
Effect.gen(function* () {
const target = file((yield* state).dir, key)
return yield* fn(target, yield* RcMap.get(locks, target))
}),
)
const remove: Interface["remove"] = Effect.fn("Storage.remove")(function* (key: string[]) {
yield* withResolved(key, (target, rw) =>
TxReentrantLock.withWriteLock(rw, fs.remove(target).pipe(Effect.catchIf(missing, () => Effect.void))),
)
})
const read: Interface["read"] = <T>(key: string[]) =>
Effect.gen(function* () {
const value = yield* withResolved(key, (target, rw) =>
TxReentrantLock.withReadLock(rw, wrap(target, fs.readJson(target))),
)
return value as T
})
const update: Interface["update"] = <T>(key: string[], fn: (draft: T) => void) =>
Effect.gen(function* () {
const value = yield* withResolved(key, (target, rw) =>
TxReentrantLock.withWriteLock(
rw,
Effect.gen(function* () {
const content = yield* wrap(target, fs.readJson(target))
fn(content as T)
yield* writeJson(target, content)
return content
}),
),
)
return value as T
})
const write: Interface["write"] = (key: string[], content: unknown) =>
Effect.gen(function* () {
yield* withResolved(key, (target, rw) => TxReentrantLock.withWriteLock(rw, writeJson(target, content)))
})
const list: Interface["list"] = Effect.fn("Storage.list")(function* (prefix: string[]) {
const dir = (yield* state).dir
const cwd = path.join(dir, ...prefix)
const result = yield* fs
.glob("**/*", {
cwd,
include: "file",
})
.pipe(Effect.catch(() => Effect.succeed<string[]>([])))
return result
.map((x) => [...prefix, ...x.slice(0, -5).split(path.sep)])
.toSorted((a, b) => a.join("/").localeCompare(b.join("/")))
})
return Service.of({
remove,
read,
update,
write,
list,
})
}),
)
export const defaultLayer = layer.pipe(Layer.provide(AppFileSystem.defaultLayer))
const { runPromise } = makeRuntime(Service, defaultLayer)
export async function remove(key: string[]) {
const dir = await state().then((x) => x.dir)
const target = path.join(dir, ...key) + ".json"
return withErrorHandling(async () => {
await fs.unlink(target).catch(() => {})
})
return runPromise((svc) => svc.remove(key))
}
export async function read<T>(key: string[]) {
const dir = await state().then((x) => x.dir)
const target = path.join(dir, ...key) + ".json"
return withErrorHandling(async () => {
using _ = await Lock.read(target)
const result = await Filesystem.readJson<T>(target)
return result as T
})
return runPromise((svc) => svc.read<T>(key))
}
export async function update<T>(key: string[], fn: (draft: T) => void) {
const dir = await state().then((x) => x.dir)
const target = path.join(dir, ...key) + ".json"
return withErrorHandling(async () => {
using _ = await Lock.write(target)
const content = await Filesystem.readJson<T>(target)
fn(content as T)
await Filesystem.writeJson(target, content)
return content
})
return runPromise((svc) => svc.update<T>(key, fn))
}
export async function write<T>(key: string[], content: T) {
const dir = await state().then((x) => x.dir)
const target = path.join(dir, ...key) + ".json"
return withErrorHandling(async () => {
using _ = await Lock.write(target)
await Filesystem.writeJson(target, content)
})
}
async function withErrorHandling<T>(body: () => Promise<T>) {
return body().catch((e) => {
if (!(e instanceof Error)) throw e
const errnoException = e as NodeJS.ErrnoException
if (errnoException.code === "ENOENT") {
throw new NotFoundError({ message: `Resource not found: ${errnoException.path}` })
}
throw e
})
return runPromise((svc) => svc.write(key, content))
}
export async function list(prefix: string[]) {
const dir = await state().then((x) => x.dir)
try {
const result = await Glob.scan("**/*", {
cwd: path.join(dir, ...prefix),
include: "file",
}).then((results) => results.map((x) => [...prefix, ...x.slice(0, -5).split(path.sep)]))
result.sort()
return result
} catch {
return []
}
return runPromise((svc) => svc.list(prefix))
}
}

View File

@@ -331,6 +331,117 @@ describe("plugin.loader.shared", () => {
}
})
test("loads npm server plugin from package server export without leading dot", async () => {
await using tmp = await tmpdir({
init: async (dir) => {
const mod = path.join(dir, "mods", "acme-plugin")
const dist = path.join(mod, "dist")
const mark = path.join(dir, "server-called.txt")
await fs.mkdir(dist, { recursive: true })
await Bun.write(
path.join(mod, "package.json"),
JSON.stringify(
{
name: "acme-plugin",
type: "module",
exports: {
".": "./index.js",
"./server": "dist/server.js",
},
},
null,
2,
),
)
await Bun.write(path.join(mod, "index.js"), 'import "./main-throws.js"\nexport default {}\n')
await Bun.write(path.join(mod, "main-throws.js"), 'throw new Error("main loaded")\n')
await Bun.write(
path.join(dist, "server.js"),
[
"export default {",
" server: async () => {",
` await Bun.write(${JSON.stringify(mark)}, "called")`,
" return {}",
" },",
"}",
"",
].join("\n"),
)
await Bun.write(path.join(dir, "opencode.json"), JSON.stringify({ plugin: ["acme-plugin@1.0.0"] }, null, 2))
return {
mod,
mark,
}
},
})
const install = spyOn(BunProc, "install").mockResolvedValue(tmp.extra.mod)
try {
const errors = await errs(tmp.path)
expect(errors).toHaveLength(0)
expect(await Bun.file(tmp.extra.mark).text()).toBe("called")
} finally {
install.mockRestore()
}
})
test("loads npm server plugin from package main without leading dot", async () => {
await using tmp = await tmpdir({
init: async (dir) => {
const mod = path.join(dir, "mods", "acme-plugin")
const dist = path.join(mod, "dist")
const mark = path.join(dir, "main-called.txt")
await fs.mkdir(dist, { recursive: true })
await Bun.write(
path.join(mod, "package.json"),
JSON.stringify(
{
name: "acme-plugin",
type: "module",
main: "dist/index.js",
},
null,
2,
),
)
await Bun.write(
path.join(dist, "index.js"),
[
"export default {",
" server: async () => {",
` await Bun.write(${JSON.stringify(mark)}, "called")`,
" return {}",
" },",
"}",
"",
].join("\n"),
)
await Bun.write(path.join(dir, "opencode.json"), JSON.stringify({ plugin: ["acme-plugin@1.0.0"] }, null, 2))
return {
mod,
mark,
}
},
})
const install = spyOn(BunProc, "install").mockResolvedValue(tmp.extra.mod)
try {
const errors = await errs(tmp.path)
expect(errors).toHaveLength(0)
expect(await Bun.file(tmp.extra.mark).text()).toBe("called")
} finally {
install.mockRestore()
}
})
test("does not use npm package exports dot for server entry", async () => {
await using tmp = await tmpdir({
init: async (dir) => {

View File

@@ -13,6 +13,18 @@ afterEach(async () => {
await Instance.disposeAll()
})
async function withoutWatcher<T>(fn: () => Promise<T>) {
if (process.platform !== "win32") return fn()
const prev = process.env.OPENCODE_EXPERIMENTAL_DISABLE_FILEWATCHER
process.env.OPENCODE_EXPERIMENTAL_DISABLE_FILEWATCHER = "true"
try {
return await fn()
} finally {
if (prev === undefined) delete process.env.OPENCODE_EXPERIMENTAL_DISABLE_FILEWATCHER
else process.env.OPENCODE_EXPERIMENTAL_DISABLE_FILEWATCHER = prev
}
}
async function fill(sessionID: SessionID, count: number, time = (i: number) => Date.now() + i) {
const ids = [] as MessageID[]
for (let i = 0; i < count; i++) {
@@ -42,86 +54,94 @@ async function fill(sessionID: SessionID, count: number, time = (i: number) => D
describe("session messages endpoint", () => {
test("returns cursor headers for older pages", async () => {
await using tmp = await tmpdir({ git: true })
await Instance.provide({
directory: tmp.path,
fn: async () => {
const session = await Session.create({})
const ids = await fill(session.id, 5)
const app = Server.Default()
await withoutWatcher(() =>
Instance.provide({
directory: tmp.path,
fn: async () => {
const session = await Session.create({})
const ids = await fill(session.id, 5)
const app = Server.Default()
const a = await app.request(`/session/${session.id}/message?limit=2`)
expect(a.status).toBe(200)
const aBody = (await a.json()) as MessageV2.WithParts[]
expect(aBody.map((item) => item.info.id)).toEqual(ids.slice(-2))
const cursor = a.headers.get("x-next-cursor")
expect(cursor).toBeTruthy()
expect(a.headers.get("link")).toContain('rel="next"')
const a = await app.request(`/session/${session.id}/message?limit=2`)
expect(a.status).toBe(200)
const aBody = (await a.json()) as MessageV2.WithParts[]
expect(aBody.map((item) => item.info.id)).toEqual(ids.slice(-2))
const cursor = a.headers.get("x-next-cursor")
expect(cursor).toBeTruthy()
expect(a.headers.get("link")).toContain('rel="next"')
const b = await app.request(`/session/${session.id}/message?limit=2&before=${encodeURIComponent(cursor!)}`)
expect(b.status).toBe(200)
const bBody = (await b.json()) as MessageV2.WithParts[]
expect(bBody.map((item) => item.info.id)).toEqual(ids.slice(-4, -2))
const b = await app.request(`/session/${session.id}/message?limit=2&before=${encodeURIComponent(cursor!)}`)
expect(b.status).toBe(200)
const bBody = (await b.json()) as MessageV2.WithParts[]
expect(bBody.map((item) => item.info.id)).toEqual(ids.slice(-4, -2))
await Session.remove(session.id)
},
})
await Session.remove(session.id)
},
}),
)
})
test("keeps full-history responses when limit is omitted", async () => {
await using tmp = await tmpdir({ git: true })
await Instance.provide({
directory: tmp.path,
fn: async () => {
const session = await Session.create({})
const ids = await fill(session.id, 3)
const app = Server.Default()
await withoutWatcher(() =>
Instance.provide({
directory: tmp.path,
fn: async () => {
const session = await Session.create({})
const ids = await fill(session.id, 3)
const app = Server.Default()
const res = await app.request(`/session/${session.id}/message`)
expect(res.status).toBe(200)
const body = (await res.json()) as MessageV2.WithParts[]
expect(body.map((item) => item.info.id)).toEqual(ids)
const res = await app.request(`/session/${session.id}/message`)
expect(res.status).toBe(200)
const body = (await res.json()) as MessageV2.WithParts[]
expect(body.map((item) => item.info.id)).toEqual(ids)
await Session.remove(session.id)
},
})
await Session.remove(session.id)
},
}),
)
})
test("rejects invalid cursors and missing sessions", async () => {
await using tmp = await tmpdir({ git: true })
await Instance.provide({
directory: tmp.path,
fn: async () => {
const session = await Session.create({})
const app = Server.Default()
await withoutWatcher(() =>
Instance.provide({
directory: tmp.path,
fn: async () => {
const session = await Session.create({})
const app = Server.Default()
const bad = await app.request(`/session/${session.id}/message?limit=2&before=bad`)
expect(bad.status).toBe(400)
const bad = await app.request(`/session/${session.id}/message?limit=2&before=bad`)
expect(bad.status).toBe(400)
const miss = await app.request(`/session/ses_missing/message?limit=2`)
expect(miss.status).toBe(404)
const miss = await app.request(`/session/ses_missing/message?limit=2`)
expect(miss.status).toBe(404)
await Session.remove(session.id)
},
})
await Session.remove(session.id)
},
}),
)
})
test("does not truncate large legacy limit requests", async () => {
await using tmp = await tmpdir({ git: true })
await Instance.provide({
directory: tmp.path,
fn: async () => {
const session = await Session.create({})
await fill(session.id, 520)
const app = Server.Default()
await withoutWatcher(() =>
Instance.provide({
directory: tmp.path,
fn: async () => {
const session = await Session.create({})
await fill(session.id, 520)
const app = Server.Default()
const res = await app.request(`/session/${session.id}/message?limit=510`)
expect(res.status).toBe(200)
const body = (await res.json()) as MessageV2.WithParts[]
expect(body).toHaveLength(510)
const res = await app.request(`/session/${session.id}/message?limit=510`)
expect(res.status).toBe(200)
const body = (await res.json()) as MessageV2.WithParts[]
expect(body).toHaveLength(510)
await Session.remove(session.id)
},
})
await Session.remove(session.id)
},
}),
)
})
})

View File

@@ -0,0 +1,295 @@
import { describe, expect, test } from "bun:test"
import fs from "fs/promises"
import path from "path"
import { Effect, Layer, ManagedRuntime } from "effect"
import { AppFileSystem } from "../../src/filesystem"
import { Global } from "../../src/global"
import { Storage } from "../../src/storage/storage"
import { tmpdir } from "../fixture/fixture"
const dir = path.join(Global.Path.data, "storage")
async function withScope<T>(fn: (root: string[]) => Promise<T>) {
const root = ["storage_test", crypto.randomUUID()]
try {
return await fn(root)
} finally {
await fs.rm(path.join(dir, ...root), { recursive: true, force: true })
}
}
function map(root: string, file: string) {
if (file === Global.Path.data) return root
if (file.startsWith(Global.Path.data + path.sep)) return path.join(root, path.relative(Global.Path.data, file))
return file
}
function layer(root: string) {
return Layer.effect(
AppFileSystem.Service,
Effect.gen(function* () {
const fs = yield* AppFileSystem.Service
return AppFileSystem.Service.of({
...fs,
isDir: (file) => fs.isDir(map(root, file)),
readJson: (file) => fs.readJson(map(root, file)),
writeWithDirs: (file, content, mode) => fs.writeWithDirs(map(root, file), content, mode),
readFileString: (file) => fs.readFileString(map(root, file)),
remove: (file) => fs.remove(map(root, file)),
glob: (pattern, options) =>
fs.glob(pattern, options?.cwd ? { ...options, cwd: map(root, options.cwd) } : options),
})
}),
).pipe(Layer.provide(AppFileSystem.defaultLayer))
}
async function withStorage<T>(
root: string,
fn: (run: <A, E>(body: Effect.Effect<A, E, Storage.Service>) => Promise<A>) => Promise<T>,
) {
const rt = ManagedRuntime.make(Storage.layer.pipe(Layer.provide(layer(root))))
try {
return await fn((body) => rt.runPromise(body))
} finally {
await rt.dispose()
}
}
async function write(file: string, value: unknown) {
await fs.mkdir(path.dirname(file), { recursive: true })
await Bun.write(file, JSON.stringify(value, null, 2))
}
async function text(file: string, value: string) {
await fs.mkdir(path.dirname(file), { recursive: true })
await Bun.write(file, value)
}
async function exists(file: string) {
return fs
.stat(file)
.then(() => true)
.catch(() => false)
}
describe("Storage", () => {
test("round-trips JSON content", async () => {
await withScope(async (root) => {
const key = [...root, "session_diff", "roundtrip"]
const value = [{ file: "a.ts", additions: 2, deletions: 1 }]
await Storage.write(key, value)
expect(await Storage.read<typeof value>(key)).toEqual(value)
})
})
test("maps missing reads to NotFoundError", async () => {
await withScope(async (root) => {
await expect(Storage.read([...root, "missing", "value"])).rejects.toMatchObject({ name: "NotFoundError" })
})
})
test("update on missing key throws NotFoundError", async () => {
await withScope(async (root) => {
await expect(
Storage.update<{ value: number }>([...root, "missing", "key"], (draft) => {
draft.value += 1
}),
).rejects.toMatchObject({ name: "NotFoundError" })
})
})
test("write overwrites existing value", async () => {
await withScope(async (root) => {
const key = [...root, "overwrite", "test"]
await Storage.write<{ v: number }>(key, { v: 1 })
await Storage.write<{ v: number }>(key, { v: 2 })
expect(await Storage.read<{ v: number }>(key)).toEqual({ v: 2 })
})
})
test("remove on missing key is a no-op", async () => {
await withScope(async (root) => {
await expect(Storage.remove([...root, "nonexistent", "key"])).resolves.toBeUndefined()
})
})
test("list on missing prefix returns empty", async () => {
await withScope(async (root) => {
expect(await Storage.list([...root, "nonexistent"])).toEqual([])
})
})
test("serializes concurrent updates for the same key", async () => {
await withScope(async (root) => {
const key = [...root, "counter", "shared"]
await Storage.write(key, { value: 0 })
await Promise.all(
Array.from({ length: 25 }, () =>
Storage.update<{ value: number }>(key, (draft) => {
draft.value += 1
}),
),
)
expect(await Storage.read<{ value: number }>(key)).toEqual({ value: 25 })
})
})
test("concurrent reads do not block each other", async () => {
await withScope(async (root) => {
const key = [...root, "concurrent", "reads"]
await Storage.write(key, { ok: true })
const results = await Promise.all(Array.from({ length: 10 }, () => Storage.read(key)))
expect(results).toHaveLength(10)
for (const r of results) expect(r).toEqual({ ok: true })
})
})
test("nested keys create deep paths", async () => {
await withScope(async (root) => {
const key = [...root, "a", "b", "c", "deep"]
await Storage.write<{ nested: boolean }>(key, { nested: true })
expect(await Storage.read<{ nested: boolean }>(key)).toEqual({ nested: true })
expect(await Storage.list([...root, "a"])).toEqual([key])
})
})
test("lists and removes stored entries", async () => {
await withScope(async (root) => {
const a = [...root, "list", "a"]
const b = [...root, "list", "b"]
const prefix = [...root, "list"]
await Storage.write(b, { value: 2 })
await Storage.write(a, { value: 1 })
expect(await Storage.list(prefix)).toEqual([a, b])
await Storage.remove(a)
expect(await Storage.list(prefix)).toEqual([b])
await expect(Storage.read(a)).rejects.toMatchObject({ name: "NotFoundError" })
})
})
test("migration 2 runs when marker contents are invalid", async () => {
await using tmp = await tmpdir()
const storage = path.join(tmp.path, "storage")
const diffs = [
{ additions: 2, deletions: 1 },
{ additions: 3, deletions: 4 },
]
await text(path.join(storage, "migration"), "wat")
await write(path.join(storage, "session", "proj_test", "ses_test.json"), {
id: "ses_test",
projectID: "proj_test",
title: "legacy",
summary: { diffs },
})
await withStorage(tmp.path, async (run) => {
expect(await run(Storage.Service.use((svc) => svc.list(["session_diff"])))).toEqual([
["session_diff", "ses_test"],
])
expect(await run(Storage.Service.use((svc) => svc.read<typeof diffs>(["session_diff", "ses_test"])))).toEqual(
diffs,
)
expect(
await run(
Storage.Service.use((svc) =>
svc.read<{
id: string
projectID: string
title: string
summary: {
additions: number
deletions: number
}
}>(["session", "proj_test", "ses_test"]),
),
),
).toEqual({
id: "ses_test",
projectID: "proj_test",
title: "legacy",
summary: {
additions: 5,
deletions: 5,
},
})
})
expect(await Bun.file(path.join(storage, "migration")).text()).toBe("2")
})
test("migration 1 tolerates malformed legacy records", async () => {
await using tmp = await tmpdir({ git: true })
const storage = path.join(tmp.path, "storage")
const legacy = path.join(tmp.path, "project", "legacy")
await write(path.join(legacy, "storage", "session", "message", "probe", "0.json"), [])
await write(path.join(legacy, "storage", "session", "message", "probe", "1.json"), {
path: { root: tmp.path },
})
await write(path.join(legacy, "storage", "session", "info", "ses_legacy.json"), {
id: "ses_legacy",
title: "legacy",
})
await write(path.join(legacy, "storage", "session", "message", "ses_legacy", "msg_legacy.json"), {
role: "user",
text: "hello",
})
await withStorage(tmp.path, async (run) => {
const projects = await run(Storage.Service.use((svc) => svc.list(["project"])))
expect(projects).toHaveLength(1)
const project = projects[0]![1]
expect(await run(Storage.Service.use((svc) => svc.list(["session", project])))).toEqual([
["session", project, "ses_legacy"],
])
expect(
await run(
Storage.Service.use((svc) => svc.read<{ id: string; title: string }>(["session", project, "ses_legacy"])),
),
).toEqual({
id: "ses_legacy",
title: "legacy",
})
expect(
await run(
Storage.Service.use((svc) =>
svc.read<{ role: string; text: string }>(["message", "ses_legacy", "msg_legacy"]),
),
),
).toEqual({
role: "user",
text: "hello",
})
})
expect(await Bun.file(path.join(storage, "migration")).text()).toBe("2")
})
test("failed migrations do not advance the marker", async () => {
await using tmp = await tmpdir()
const storage = path.join(tmp.path, "storage")
const legacy = path.join(tmp.path, "project", "legacy")
await text(path.join(legacy, "storage", "session", "message", "probe", "0.json"), "{")
await withStorage(tmp.path, async (run) => {
expect(await run(Storage.Service.use((svc) => svc.list(["project"])))).toEqual([])
})
expect(await exists(path.join(storage, "migration"))).toBe(false)
})
})

View File

@@ -1,7 +1,7 @@
{
"$schema": "https://json.schemastore.org/package.json",
"name": "@opencode-ai/plugin",
"version": "1.3.8",
"version": "1.3.9",
"type": "module",
"license": "MIT",
"scripts": {

View File

@@ -1,7 +1,7 @@
{
"$schema": "https://json.schemastore.org/package.json",
"name": "@opencode-ai/sdk",
"version": "1.3.8",
"version": "1.3.9",
"type": "module",
"license": "MIT",
"scripts": {

View File

@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/slack",
"version": "1.3.8",
"version": "1.3.9",
"type": "module",
"license": "MIT",
"scripts": {

View File

@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/ui",
"version": "1.3.8",
"version": "1.3.9",
"type": "module",
"license": "MIT",
"exports": {

View File

@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/util",
"version": "1.3.8",
"version": "1.3.9",
"private": true,
"type": "module",
"license": "MIT",

View File

@@ -2,7 +2,7 @@
"name": "@opencode-ai/web",
"type": "module",
"license": "MIT",
"version": "1.3.8",
"version": "1.3.9",
"scripts": {
"dev": "astro dev",
"dev:remote": "VITE_API_URL=https://api.opencode.ai astro dev",

View File

@@ -1,249 +1,40 @@
#!/usr/bin/env bun
import { $ } from "bun"
import { rm } from "fs/promises"
import path from "path"
import { parseArgs } from "util"
type Release = {
tag_name: string
draft: boolean
}
const root = path.resolve(import.meta.dir, "..")
const file = path.join(root, "UPCOMING_CHANGELOG.md")
const { values, positionals } = parseArgs({
args: Bun.argv.slice(2),
options: {
from: { type: "string", short: "f" },
to: { type: "string", short: "t" },
variant: { type: "string", default: "low" },
quiet: { type: "boolean", default: false },
print: { type: "boolean", default: false },
help: { type: "boolean", short: "h", default: false },
},
allowPositionals: true,
})
const args = [...positionals]
type Commit = {
hash: string
author: string | null
message: string
areas: Set<string>
}
if (values.from) args.push("--from", values.from)
if (values.to) args.push("--to", values.to)
type User = Map<string, Set<string>>
type Diff = {
sha: string
login: string | null
message: string
}
const repo = process.env.GH_REPO ?? "anomalyco/opencode"
const bot = ["actions-user", "opencode", "opencode-agent[bot]"]
const team = [
...(await Bun.file(new URL("../.github/TEAM_MEMBERS", import.meta.url))
.text()
.then((x) => x.split(/\r?\n/).map((x) => x.trim()))
.then((x) => x.filter((x) => x && !x.startsWith("#")))),
...bot,
]
const order = ["Core", "TUI", "Desktop", "SDK", "Extensions"] as const
const sections = {
core: "Core",
tui: "TUI",
app: "Desktop",
tauri: "Desktop",
sdk: "SDK",
plugin: "SDK",
"extensions/zed": "Extensions",
"extensions/vscode": "Extensions",
github: "Extensions",
} as const
function ref(input: string) {
if (input === "HEAD") return input
if (input.startsWith("v")) return input
if (input.match(/^\d+\.\d+\.\d+(?:[-+][0-9A-Za-z.-]+)?$/)) return `v${input}`
return input
}
async function latest() {
const data = await $`gh api "/repos/${repo}/releases?per_page=100"`.json()
const release = (data as Release[]).find((item) => !item.draft)
if (!release) throw new Error("No releases found")
return release.tag_name.replace(/^v/, "")
}
async function diff(base: string, head: string) {
const list: Diff[] = []
for (let page = 1; ; page++) {
const text =
await $`gh api "/repos/${repo}/compare/${base}...${head}?per_page=100&page=${page}" --jq '.commits[] | {sha: .sha, login: .author.login, message: .commit.message}'`.text()
const batch = text
.split("\n")
.filter(Boolean)
.map((line) => JSON.parse(line) as Diff)
if (batch.length === 0) break
list.push(...batch)
if (batch.length < 100) break
}
return list
}
function section(areas: Set<string>) {
const priority = ["core", "tui", "app", "tauri", "sdk", "plugin", "extensions/zed", "extensions/vscode", "github"]
for (const area of priority) {
if (areas.has(area)) return sections[area as keyof typeof sections]
}
return "Core"
}
function reverted(commits: Commit[]) {
const seen = new Map<string, Commit>()
for (const commit of commits) {
const match = commit.message.match(/^Revert "(.+)"$/)
if (match) {
const msg = match[1]!
if (seen.has(msg)) seen.delete(msg)
else seen.set(commit.message, commit)
continue
}
const revert = `Revert "${commit.message}"`
if (seen.has(revert)) {
seen.delete(revert)
continue
}
seen.set(commit.message, commit)
}
return [...seen.values()]
}
async function commits(from: string, to: string) {
const base = ref(from)
const head = ref(to)
const data = new Map<string, { login: string | null; message: string }>()
for (const item of await diff(base, head)) {
data.set(item.sha, { login: item.login, message: item.message.split("\n")[0] ?? "" })
}
const log =
await $`git log ${base}..${head} --format=%H -- packages/opencode packages/sdk packages/plugin packages/desktop packages/app sdks/vscode packages/extensions github`.text()
const list: Commit[] = []
for (const hash of log.split("\n").filter(Boolean)) {
const item = data.get(hash)
if (!item) continue
if (item.message.match(/^(ignore:|test:|chore:|ci:|release:)/i)) continue
const diff = await $`git diff-tree --no-commit-id --name-only -r ${hash}`.text()
const areas = new Set<string>()
for (const file of diff.split("\n").filter(Boolean)) {
if (file.startsWith("packages/opencode/src/cli/cmd/")) areas.add("tui")
else if (file.startsWith("packages/opencode/")) areas.add("core")
else if (file.startsWith("packages/desktop/src-tauri/")) areas.add("tauri")
else if (file.startsWith("packages/desktop/") || file.startsWith("packages/app/")) areas.add("app")
else if (file.startsWith("packages/sdk/") || file.startsWith("packages/plugin/")) areas.add("sdk")
else if (file.startsWith("packages/extensions/")) areas.add("extensions/zed")
else if (file.startsWith("sdks/vscode/") || file.startsWith("github/")) areas.add("extensions/vscode")
}
if (areas.size === 0) continue
list.push({
hash: hash.slice(0, 7),
author: item.login,
message: item.message,
areas,
})
}
return reverted(list)
}
async function contributors(from: string, to: string) {
const base = ref(from)
const head = ref(to)
const users: User = new Map()
for (const item of await diff(base, head)) {
const title = item.message.split("\n")[0] ?? ""
if (!item.login || team.includes(item.login)) continue
if (title.match(/^(ignore:|test:|chore:|ci:|release:)/i)) continue
if (!users.has(item.login)) users.set(item.login, new Set())
users.get(item.login)!.add(title)
}
return users
}
async function published(to: string) {
if (to === "HEAD") return
const body = await $`gh release view ${ref(to)} --repo ${repo} --json body --jq .body`.text().catch(() => "")
if (!body) return
const lines = body.split(/\r?\n/)
const start = lines.findIndex((line) => line.startsWith("**Thank you to "))
if (start < 0) return
return lines.slice(start).join("\n").trim()
}
async function thanks(from: string, to: string, reuse: boolean) {
const release = reuse ? await published(to) : undefined
if (release) return release.split(/\r?\n/)
const users = await contributors(from, to)
if (users.size === 0) return []
const lines = [`**Thank you to ${users.size} community contributor${users.size > 1 ? "s" : ""}:**`]
for (const [name, commits] of users) {
lines.push(`- @${name}:`)
for (const commit of commits) lines.push(` - ${commit}`)
}
return lines
}
function format(from: string, to: string, list: Commit[], thanks: string[]) {
const grouped = new Map<string, string[]>()
for (const title of order) grouped.set(title, [])
for (const commit of list) {
const title = section(commit.areas)
const attr = commit.author && !team.includes(commit.author) ? ` (@${commit.author})` : ""
grouped.get(title)!.push(`- \`${commit.hash}\` ${commit.message}${attr}`)
}
const lines = [`Last release: ${ref(from)}`, `Target ref: ${to}`, ""]
if (list.length === 0) {
lines.push("No notable changes.")
}
for (const title of order) {
const entries = grouped.get(title)
if (!entries || entries.length === 0) continue
lines.push(`## ${title}`)
lines.push(...entries)
lines.push("")
}
if (thanks.length > 0) {
if (lines.at(-1) !== "") lines.push("")
lines.push("## Community Contributors Input")
lines.push("")
lines.push(...thanks)
}
if (lines.at(-1) === "") lines.pop()
return lines.join("\n")
}
if (import.meta.main) {
const { values } = parseArgs({
args: Bun.argv.slice(2),
options: {
from: { type: "string", short: "f" },
to: { type: "string", short: "t", default: "HEAD" },
help: { type: "boolean", short: "h", default: false },
},
})
if (values.help) {
console.log(`
if (values.help) {
console.log(`
Usage: bun script/changelog.ts [options]
Generates UPCOMING_CHANGELOG.md by running the opencode changelog command.
Options:
-f, --from <version> Starting version (default: latest non-draft GitHub release)
-t, --to <ref> Ending ref (default: HEAD)
--variant <name> Thinking variant for opencode run (default: low)
--quiet Suppress opencode command output unless it fails
--print Print the generated UPCOMING_CHANGELOG.md after success
-h, --help Show this help message
Examples:
@@ -251,11 +42,35 @@ Examples:
bun script/changelog.ts --from 1.0.200
bun script/changelog.ts -f 1.0.200 -t 1.0.205
`)
process.exit(0)
}
const to = values.to!
const from = values.from ?? (await latest())
const list = await commits(from, to)
console.log(format(from, to, list, await thanks(from, to, !values.from)))
process.exit(0)
}
await rm(file, { force: true })
const quiet = values.quiet
const cmd = ["opencode", "run"]
cmd.push("--variant", values.variant)
cmd.push("--command", "changelog", "--", ...args)
const proc = Bun.spawn(cmd, {
cwd: root,
stdin: "inherit",
stdout: quiet ? "pipe" : "inherit",
stderr: quiet ? "pipe" : "inherit",
})
const [out, err] = quiet
? await Promise.all([new Response(proc.stdout).text(), new Response(proc.stderr).text()])
: ["", ""]
const code = await proc.exited
if (code === 0) {
if (values.print) process.stdout.write(await Bun.file(file).text())
process.exit(0)
}
if (quiet) {
if (out) process.stdout.write(out)
if (err) process.stderr.write(err)
}
process.exit(code)

261
script/raw-changelog.ts Normal file
View File

@@ -0,0 +1,261 @@
#!/usr/bin/env bun
import { $ } from "bun"
import { parseArgs } from "util"
type Release = {
tag_name: string
draft: boolean
}
type Commit = {
hash: string
author: string | null
message: string
areas: Set<string>
}
type User = Map<string, Set<string>>
type Diff = {
sha: string
login: string | null
message: string
}
const repo = process.env.GH_REPO ?? "anomalyco/opencode"
const bot = ["actions-user", "opencode", "opencode-agent[bot]"]
const team = [
...(await Bun.file(new URL("../.github/TEAM_MEMBERS", import.meta.url))
.text()
.then((x) => x.split(/\r?\n/).map((x) => x.trim()))
.then((x) => x.filter((x) => x && !x.startsWith("#")))),
...bot,
]
const order = ["Core", "TUI", "Desktop", "SDK", "Extensions"] as const
const sections = {
core: "Core",
tui: "TUI",
app: "Desktop",
tauri: "Desktop",
sdk: "SDK",
plugin: "SDK",
"extensions/zed": "Extensions",
"extensions/vscode": "Extensions",
github: "Extensions",
} as const
function ref(input: string) {
if (input === "HEAD") return input
if (input.startsWith("v")) return input
if (input.match(/^\d+\.\d+\.\d+(?:[-+][0-9A-Za-z.-]+)?$/)) return `v${input}`
return input
}
async function latest() {
const data = await $`gh api "/repos/${repo}/releases?per_page=100"`.json()
const release = (data as Release[]).find((item) => !item.draft)
if (!release) throw new Error("No releases found")
return release.tag_name.replace(/^v/, "")
}
async function diff(base: string, head: string) {
const list: Diff[] = []
for (let page = 1; ; page++) {
const text =
await $`gh api "/repos/${repo}/compare/${base}...${head}?per_page=100&page=${page}" --jq '.commits[] | {sha: .sha, login: .author.login, message: .commit.message}'`.text()
const batch = text
.split("\n")
.filter(Boolean)
.map((line) => JSON.parse(line) as Diff)
if (batch.length === 0) break
list.push(...batch)
if (batch.length < 100) break
}
return list
}
function section(areas: Set<string>) {
const priority = ["core", "tui", "app", "tauri", "sdk", "plugin", "extensions/zed", "extensions/vscode", "github"]
for (const area of priority) {
if (areas.has(area)) return sections[area as keyof typeof sections]
}
return "Core"
}
function reverted(commits: Commit[]) {
const seen = new Map<string, Commit>()
for (const commit of commits) {
const match = commit.message.match(/^Revert "(.+)"$/)
if (match) {
const msg = match[1]!
if (seen.has(msg)) seen.delete(msg)
else seen.set(commit.message, commit)
continue
}
const revert = `Revert "${commit.message}"`
if (seen.has(revert)) {
seen.delete(revert)
continue
}
seen.set(commit.message, commit)
}
return [...seen.values()]
}
async function commits(from: string, to: string) {
const base = ref(from)
const head = ref(to)
const data = new Map<string, { login: string | null; message: string }>()
for (const item of await diff(base, head)) {
data.set(item.sha, { login: item.login, message: item.message.split("\n")[0] ?? "" })
}
const log =
await $`git log ${base}..${head} --format=%H -- packages/opencode packages/sdk packages/plugin packages/desktop packages/app sdks/vscode packages/extensions github`.text()
const list: Commit[] = []
for (const hash of log.split("\n").filter(Boolean)) {
const item = data.get(hash)
if (!item) continue
if (item.message.match(/^(ignore:|test:|chore:|ci:|release:)/i)) continue
const diff = await $`git diff-tree --no-commit-id --name-only -r ${hash}`.text()
const areas = new Set<string>()
for (const file of diff.split("\n").filter(Boolean)) {
if (file.startsWith("packages/opencode/src/cli/cmd/")) areas.add("tui")
else if (file.startsWith("packages/opencode/")) areas.add("core")
else if (file.startsWith("packages/desktop/src-tauri/")) areas.add("tauri")
else if (file.startsWith("packages/desktop/") || file.startsWith("packages/app/")) areas.add("app")
else if (file.startsWith("packages/sdk/") || file.startsWith("packages/plugin/")) areas.add("sdk")
else if (file.startsWith("packages/extensions/")) areas.add("extensions/zed")
else if (file.startsWith("sdks/vscode/") || file.startsWith("github/")) areas.add("extensions/vscode")
}
if (areas.size === 0) continue
list.push({
hash: hash.slice(0, 7),
author: item.login,
message: item.message,
areas,
})
}
return reverted(list)
}
async function contributors(from: string, to: string) {
const base = ref(from)
const head = ref(to)
const users: User = new Map()
for (const item of await diff(base, head)) {
const title = item.message.split("\n")[0] ?? ""
if (!item.login || team.includes(item.login)) continue
if (title.match(/^(ignore:|test:|chore:|ci:|release:)/i)) continue
if (!users.has(item.login)) users.set(item.login, new Set())
users.get(item.login)!.add(title)
}
return users
}
async function published(to: string) {
if (to === "HEAD") return
const body = await $`gh release view ${ref(to)} --repo ${repo} --json body --jq .body`.text().catch(() => "")
if (!body) return
const lines = body.split(/\r?\n/)
const start = lines.findIndex((line) => line.startsWith("**Thank you to "))
if (start < 0) return
return lines.slice(start).join("\n").trim()
}
async function thanks(from: string, to: string, reuse: boolean) {
const release = reuse ? await published(to) : undefined
if (release) return release.split(/\r?\n/)
const users = await contributors(from, to)
if (users.size === 0) return []
const lines = [`**Thank you to ${users.size} community contributor${users.size > 1 ? "s" : ""}:**`]
for (const [name, commits] of users) {
lines.push(`- @${name}:`)
for (const commit of commits) lines.push(` - ${commit}`)
}
return lines
}
function format(from: string, to: string, list: Commit[], thanks: string[]) {
const grouped = new Map<string, string[]>()
for (const title of order) grouped.set(title, [])
for (const commit of list) {
const title = section(commit.areas)
const attr = commit.author && !team.includes(commit.author) ? ` (@${commit.author})` : ""
grouped.get(title)!.push(`- \`${commit.hash}\` ${commit.message}${attr}`)
}
const lines = [`Last release: ${ref(from)}`, `Target ref: ${to}`, ""]
if (list.length === 0) {
lines.push("No notable changes.")
}
for (const title of order) {
const entries = grouped.get(title)
if (!entries || entries.length === 0) continue
lines.push(`## ${title}`)
lines.push(...entries)
lines.push("")
}
if (thanks.length > 0) {
if (lines.at(-1) !== "") lines.push("")
lines.push("## Community Contributors Input")
lines.push("")
lines.push(...thanks)
}
if (lines.at(-1) === "") lines.pop()
return lines.join("\n")
}
if (import.meta.main) {
const { values } = parseArgs({
args: Bun.argv.slice(2),
options: {
from: { type: "string", short: "f" },
to: { type: "string", short: "t", default: "HEAD" },
help: { type: "boolean", short: "h", default: false },
},
})
if (values.help) {
console.log(`
Usage: bun script/raw-changelog.ts [options]
Options:
-f, --from <version> Starting version (default: latest non-draft GitHub release)
-t, --to <ref> Ending ref (default: HEAD)
-h, --help Show this help message
Examples:
bun script/raw-changelog.ts
bun script/raw-changelog.ts --from 1.0.200
bun script/raw-changelog.ts -f 1.0.200 -t 1.0.205
`)
process.exit(0)
}
const to = values.to!
const from = values.from ?? (await latest())
const list = await commits(from, to)
console.log(format(from, to, list, await thanks(from, to, !values.from)))
}

View File

@@ -7,7 +7,7 @@ const output = [`version=${Script.version}`]
if (!Script.preview) {
const sha = process.env.GITHUB_SHA ?? (await $`git rev-parse HEAD`.text()).trim()
await $`opencode run --command changelog -- --to ${sha}`.cwd(process.cwd())
await $`bun script/changelog.ts --to ${sha}`.cwd(process.cwd())
const file = `${process.cwd()}/UPCOMING_CHANGELOG.md`
const body = await Bun.file(file)
.text()

View File

@@ -2,7 +2,7 @@
"name": "opencode",
"displayName": "opencode",
"description": "opencode for VS Code",
"version": "1.3.8",
"version": "1.3.9",
"publisher": "sst-dev",
"repository": {
"type": "git",