Compare commits

...

7 Commits

Author SHA1 Message Date
Kit Langton
26f64ad5ab refactor(session): effectify SessionRevert service
Convert SessionRevert to Effect service pattern. Use Session.Service,
Snapshot.Service, Storage.Service, and Bus.Service directly.
2026-03-30 21:37:10 -04:00
Kit Langton
12c8493729 refactor(session): effectify SessionRevert service
Convert SessionRevert to Effect service pattern with Interface,
Service, Layer, and async facades. Use Session.Service and Bus.Service
directly instead of facades.
2026-03-30 21:30:15 -04:00
Kit Langton
a898c2ea3a refactor(storage): effectify Storage service (#20132) 2026-03-31 01:16:02 +00:00
Kit Langton
bf777298c8 fix(theme): darken muted text in catppuccin tui themes (#20161) 2026-03-30 21:06:05 -04:00
Luke Parker
93fad99f7f smarter changelog (#20138) 2026-03-31 00:05:46 +00:00
opencode
057848deb8 release: v1.3.9 2026-03-30 23:43:58 +00:00
Luke Parker
1de06452d3 fix(plugin): properly resolve entrypoints without leading dot (#20140) 2026-03-31 09:21:17 +10:00
33 changed files with 1415 additions and 593 deletions

View File

@@ -1,22 +1,19 @@
---
model: opencode/kimi-k2.5
model: opencode/gpt-5.4
---
Create `UPCOMING_CHANGELOG.md` from the structured changelog input below.
If `UPCOMING_CHANGELOG.md` already exists, ignore its current contents completely.
Do not preserve, merge, or reuse text from the existing file.
Any command arguments are passed directly to `bun script/changelog.ts`.
Use `--from` / `-f` and `--to` / `-t` to preview a specific release range.
The input already contains the exact commit range since the last non-draft release.
The commits are already filtered to the release-relevant packages and grouped into
the release sections. Do not fetch GitHub releases, PRs, or build your own commit list.
The input may also include a `## Community Contributors Input` section.
Before writing any entry you keep, inspect the real diff with
`git show --stat --format='' <hash>` or `git show --format='' <hash>` so the
summary reflects the actual user-facing change and not just the commit message.
`git show --stat --format='' <hash>` or `git show --format='' <hash>` so you can
understand the actual code changes and not just the commit message (they may be misleading).
Do not use `git log` or author metadata when deciding attribution.
Rules:
@@ -38,7 +35,12 @@ Rules:
- Do not add, remove, rewrite, or reorder contributor names or commit titles in that block
- Do not derive the thank-you section from the main summary bullets
- Do not include the heading `## Community Contributors Input` in the final file
- Focus on writing the least words to get your point across - users will skim read the changelog, so we should be precise
## Changelog Input
**Importantly, the changelog is for users (who are at least slightly technical), they may use the TUI, Desktop, SDK, Plugins and so forth. Be thorough in understanding flow on effects may not be immediately apparent. e.g. a package upgrade looks internal but may patch a bug. Or a refactor may also stabilise some race condition that fixes bugs for users. The PR title/body + commit message will give you the authors context, usually containing the outcome not just technical detail**
!`bun script/changelog.ts $ARGUMENTS`
<changelog_input>
!`bun script/raw-changelog.ts $ARGUMENTS`
</changelog_input>

View File

@@ -26,7 +26,7 @@
},
"packages/app": {
"name": "@opencode-ai/app",
"version": "1.3.8",
"version": "1.3.9",
"dependencies": {
"@kobalte/core": "catalog:",
"@opencode-ai/sdk": "workspace:*",
@@ -79,7 +79,7 @@
},
"packages/console/app": {
"name": "@opencode-ai/console-app",
"version": "1.3.8",
"version": "1.3.9",
"dependencies": {
"@cloudflare/vite-plugin": "1.15.2",
"@ibm/plex": "6.4.1",
@@ -113,7 +113,7 @@
},
"packages/console/core": {
"name": "@opencode-ai/console-core",
"version": "1.3.8",
"version": "1.3.9",
"dependencies": {
"@aws-sdk/client-sts": "3.782.0",
"@jsx-email/render": "1.1.1",
@@ -140,7 +140,7 @@
},
"packages/console/function": {
"name": "@opencode-ai/console-function",
"version": "1.3.8",
"version": "1.3.9",
"dependencies": {
"@ai-sdk/anthropic": "3.0.64",
"@ai-sdk/openai": "3.0.48",
@@ -164,7 +164,7 @@
},
"packages/console/mail": {
"name": "@opencode-ai/console-mail",
"version": "1.3.8",
"version": "1.3.9",
"dependencies": {
"@jsx-email/all": "2.2.3",
"@jsx-email/cli": "1.4.3",
@@ -188,7 +188,7 @@
},
"packages/desktop": {
"name": "@opencode-ai/desktop",
"version": "1.3.8",
"version": "1.3.9",
"dependencies": {
"@opencode-ai/app": "workspace:*",
"@opencode-ai/ui": "workspace:*",
@@ -221,7 +221,7 @@
},
"packages/desktop-electron": {
"name": "@opencode-ai/desktop-electron",
"version": "1.3.8",
"version": "1.3.9",
"dependencies": {
"@opencode-ai/app": "workspace:*",
"@opencode-ai/ui": "workspace:*",
@@ -252,7 +252,7 @@
},
"packages/enterprise": {
"name": "@opencode-ai/enterprise",
"version": "1.3.8",
"version": "1.3.9",
"dependencies": {
"@opencode-ai/ui": "workspace:*",
"@opencode-ai/util": "workspace:*",
@@ -281,7 +281,7 @@
},
"packages/function": {
"name": "@opencode-ai/function",
"version": "1.3.8",
"version": "1.3.9",
"dependencies": {
"@octokit/auth-app": "8.0.1",
"@octokit/rest": "catalog:",
@@ -297,7 +297,7 @@
},
"packages/opencode": {
"name": "opencode",
"version": "1.3.8",
"version": "1.3.9",
"bin": {
"opencode": "./bin/opencode",
},
@@ -423,7 +423,7 @@
},
"packages/plugin": {
"name": "@opencode-ai/plugin",
"version": "1.3.8",
"version": "1.3.9",
"dependencies": {
"@opencode-ai/sdk": "workspace:*",
"zod": "catalog:",
@@ -457,7 +457,7 @@
},
"packages/sdk/js": {
"name": "@opencode-ai/sdk",
"version": "1.3.8",
"version": "1.3.9",
"devDependencies": {
"@hey-api/openapi-ts": "0.90.10",
"@tsconfig/node22": "catalog:",
@@ -468,7 +468,7 @@
},
"packages/slack": {
"name": "@opencode-ai/slack",
"version": "1.3.8",
"version": "1.3.9",
"dependencies": {
"@opencode-ai/sdk": "workspace:*",
"@slack/bolt": "^3.17.1",
@@ -503,7 +503,7 @@
},
"packages/ui": {
"name": "@opencode-ai/ui",
"version": "1.3.8",
"version": "1.3.9",
"dependencies": {
"@kobalte/core": "catalog:",
"@opencode-ai/sdk": "workspace:*",
@@ -550,7 +550,7 @@
},
"packages/util": {
"name": "@opencode-ai/util",
"version": "1.3.8",
"version": "1.3.9",
"dependencies": {
"zod": "catalog:",
},
@@ -561,7 +561,7 @@
},
"packages/web": {
"name": "@opencode-ai/web",
"version": "1.3.8",
"version": "1.3.9",
"dependencies": {
"@astrojs/cloudflare": "12.6.3",
"@astrojs/markdown-remark": "6.3.1",

View File

@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/app",
"version": "1.3.8",
"version": "1.3.9",
"description": "",
"type": "module",
"exports": {

View File

@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/console-app",
"version": "1.3.8",
"version": "1.3.9",
"type": "module",
"license": "MIT",
"scripts": {

View File

@@ -1,7 +1,7 @@
{
"$schema": "https://json.schemastore.org/package.json",
"name": "@opencode-ai/console-core",
"version": "1.3.8",
"version": "1.3.9",
"private": true,
"type": "module",
"license": "MIT",

View File

@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/console-function",
"version": "1.3.8",
"version": "1.3.9",
"$schema": "https://json.schemastore.org/package.json",
"private": true,
"type": "module",

View File

@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/console-mail",
"version": "1.3.8",
"version": "1.3.9",
"dependencies": {
"@jsx-email/all": "2.2.3",
"@jsx-email/cli": "1.4.3",

View File

@@ -1,7 +1,7 @@
{
"name": "@opencode-ai/desktop-electron",
"private": true,
"version": "1.3.8",
"version": "1.3.9",
"type": "module",
"license": "MIT",
"homepage": "https://opencode.ai",

View File

@@ -1,7 +1,7 @@
{
"name": "@opencode-ai/desktop",
"private": true,
"version": "1.3.8",
"version": "1.3.9",
"type": "module",
"license": "MIT",
"scripts": {

View File

@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/enterprise",
"version": "1.3.8",
"version": "1.3.9",
"private": true,
"type": "module",
"license": "MIT",

View File

@@ -1,7 +1,7 @@
id = "opencode"
name = "OpenCode"
description = "The open source coding agent."
version = "1.3.8"
version = "1.3.9"
schema_version = 1
authors = ["Anomaly"]
repository = "https://github.com/anomalyco/opencode"
@@ -11,26 +11,26 @@ name = "OpenCode"
icon = "./icons/opencode.svg"
[agent_servers.opencode.targets.darwin-aarch64]
archive = "https://github.com/anomalyco/opencode/releases/download/v1.3.8/opencode-darwin-arm64.zip"
archive = "https://github.com/anomalyco/opencode/releases/download/v1.3.9/opencode-darwin-arm64.zip"
cmd = "./opencode"
args = ["acp"]
[agent_servers.opencode.targets.darwin-x86_64]
archive = "https://github.com/anomalyco/opencode/releases/download/v1.3.8/opencode-darwin-x64.zip"
archive = "https://github.com/anomalyco/opencode/releases/download/v1.3.9/opencode-darwin-x64.zip"
cmd = "./opencode"
args = ["acp"]
[agent_servers.opencode.targets.linux-aarch64]
archive = "https://github.com/anomalyco/opencode/releases/download/v1.3.8/opencode-linux-arm64.tar.gz"
archive = "https://github.com/anomalyco/opencode/releases/download/v1.3.9/opencode-linux-arm64.tar.gz"
cmd = "./opencode"
args = ["acp"]
[agent_servers.opencode.targets.linux-x86_64]
archive = "https://github.com/anomalyco/opencode/releases/download/v1.3.8/opencode-linux-x64.tar.gz"
archive = "https://github.com/anomalyco/opencode/releases/download/v1.3.9/opencode-linux-x64.tar.gz"
cmd = "./opencode"
args = ["acp"]
[agent_servers.opencode.targets.windows-x86_64]
archive = "https://github.com/anomalyco/opencode/releases/download/v1.3.8/opencode-windows-x64.zip"
archive = "https://github.com/anomalyco/opencode/releases/download/v1.3.9/opencode-windows-x64.zip"
cmd = "./opencode.exe"
args = ["acp"]

View File

@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/function",
"version": "1.3.8",
"version": "1.3.9",
"$schema": "https://json.schemastore.org/package.json",
"private": true,
"type": "module",

View File

@@ -1,6 +1,6 @@
{
"$schema": "https://json.schemastore.org/package.json",
"version": "1.3.8",
"version": "1.3.9",
"name": "opencode",
"type": "module",
"license": "MIT",

View File

@@ -62,8 +62,8 @@
"light": "frappeText"
},
"textMuted": {
"dark": "frappeSubtext1",
"light": "frappeSubtext1"
"dark": "frappeOverlay2",
"light": "frappeOverlay2"
},
"background": {
"dark": "frappeBase",

View File

@@ -62,8 +62,8 @@
"light": "macText"
},
"textMuted": {
"dark": "macSubtext1",
"light": "macSubtext1"
"dark": "macOverlay2",
"light": "macOverlay2"
},
"background": {
"dark": "macBase",

View File

@@ -63,7 +63,7 @@
"success": { "dark": "darkGreen", "light": "lightGreen" },
"info": { "dark": "darkTeal", "light": "lightTeal" },
"text": { "dark": "darkText", "light": "lightText" },
"textMuted": { "dark": "darkSubtext1", "light": "lightSubtext1" },
"textMuted": { "dark": "darkOverlay2", "light": "lightOverlay2" },
"background": { "dark": "darkBase", "light": "lightBase" },
"backgroundPanel": { "dark": "darkMantle", "light": "lightMantle" },
"backgroundElement": { "dark": "darkCrust", "light": "lightCrust" },

View File

@@ -45,9 +45,9 @@ export function pluginSource(spec: string): PluginSource {
}
function resolveExportPath(raw: string, dir: string) {
if (raw.startsWith("./") || raw.startsWith("../")) return path.resolve(dir, raw)
if (raw.startsWith("file://")) return fileURLToPath(raw)
return raw
if (path.isAbsolute(raw)) return raw
return path.resolve(dir, raw)
}
function extractExportValue(value: unknown): string | undefined {
@@ -93,7 +93,7 @@ function resolvePackageEntrypoint(spec: string, kind: PluginKind, pkg: PluginPac
function targetPath(target: string) {
if (target.startsWith("file://")) return fileURLToPath(target)
if (path.isAbsolute(target) || /^[A-Za-z]:[\\/]/.test(target)) return target
if (path.isAbsolute(target)) return target
}
async function resolveDirectoryIndex(dir: string) {

View File

@@ -1,12 +1,14 @@
import z from "zod"
import { SessionID, MessageID, PartID } from "./schema"
import { Snapshot } from "../snapshot"
import { MessageV2 } from "./message-v2"
import { Session } from "."
import { Log } from "../util/log"
import { SyncEvent } from "../sync"
import { Storage } from "@/storage/storage"
import { Effect, Layer, ServiceMap } from "effect"
import { makeRuntime } from "@/effect/run-service"
import { Bus } from "../bus"
import { Snapshot } from "../snapshot"
import { Storage } from "@/storage/storage"
import { SyncEvent } from "../sync"
import { Log } from "../util/log"
import { Session } from "."
import { MessageV2 } from "./message-v2"
import { SessionID, MessageID, PartID } from "./schema"
import { SessionPrompt } from "./prompt"
import { SessionSummary } from "./summary"
@@ -20,116 +22,152 @@ export namespace SessionRevert {
})
export type RevertInput = z.infer<typeof RevertInput>
export async function revert(input: RevertInput) {
await SessionPrompt.assertNotBusy(input.sessionID)
const all = await Session.messages({ sessionID: input.sessionID })
let lastUser: MessageV2.User | undefined
const session = await Session.get(input.sessionID)
export interface Interface {
readonly revert: (input: RevertInput) => Effect.Effect<Session.Info>
readonly unrevert: (input: { sessionID: SessionID }) => Effect.Effect<Session.Info>
readonly cleanup: (session: Session.Info) => Effect.Effect<void>
}
let revert: Session.Info["revert"]
const patches: Snapshot.Patch[] = []
for (const msg of all) {
if (msg.info.role === "user") lastUser = msg.info
const remaining = []
for (const part of msg.parts) {
if (revert) {
if (part.type === "patch") {
patches.push(part)
}
continue
}
export class Service extends ServiceMap.Service<Service, Interface>()("@opencode/SessionRevert") {}
if (!revert) {
if ((msg.info.id === input.messageID && !input.partID) || part.id === input.partID) {
// if no useful parts left in message, same as reverting whole message
const partID = remaining.some((item) => ["text", "tool"].includes(item.type)) ? input.partID : undefined
revert = {
messageID: !partID && lastUser ? lastUser.id : msg.info.id,
partID,
export const layer = Layer.effect(
Service,
Effect.gen(function* () {
const sessions = yield* Session.Service
const snap = yield* Snapshot.Service
const storage = yield* Storage.Service
const bus = yield* Bus.Service
const revert = Effect.fn("SessionRevert.revert")(function* (input: RevertInput) {
yield* Effect.promise(() => SessionPrompt.assertNotBusy(input.sessionID))
const all = yield* sessions.messages({ sessionID: input.sessionID })
let lastUser: MessageV2.User | undefined
const session = yield* sessions.get(input.sessionID)
let rev: Session.Info["revert"]
const patches: Snapshot.Patch[] = []
for (const msg of all) {
if (msg.info.role === "user") lastUser = msg.info
const remaining = []
for (const part of msg.parts) {
if (rev) {
if (part.type === "patch") patches.push(part)
continue
}
if (!rev) {
if ((msg.info.id === input.messageID && !input.partID) || part.id === input.partID) {
const partID = remaining.some((item) => ["text", "tool"].includes(item.type)) ? input.partID : undefined
rev = {
messageID: !partID && lastUser ? lastUser.id : msg.info.id,
partID,
}
}
remaining.push(part)
}
}
remaining.push(part)
}
}
}
if (revert) {
const session = await Session.get(input.sessionID)
revert.snapshot = session.revert?.snapshot ?? (await Snapshot.track())
await Snapshot.revert(patches)
if (revert.snapshot) revert.diff = await Snapshot.diff(revert.snapshot)
const rangeMessages = all.filter((msg) => msg.info.id >= revert!.messageID)
const diffs = await SessionSummary.computeDiff({ messages: rangeMessages })
await Storage.write(["session_diff", input.sessionID], diffs)
Bus.publish(Session.Event.Diff, {
sessionID: input.sessionID,
diff: diffs,
if (!rev) return session
rev.snapshot = session.revert?.snapshot ?? (yield* snap.track())
yield* snap.revert(patches)
if (rev.snapshot) rev.diff = yield* snap.diff(rev.snapshot as string)
const range = all.filter((msg) => msg.info.id >= rev!.messageID)
const diffs = yield* Effect.promise(() => SessionSummary.computeDiff({ messages: range }))
yield* storage.write(["session_diff", input.sessionID], diffs).pipe(Effect.ignore)
yield* bus.publish(Session.Event.Diff, { sessionID: input.sessionID, diff: diffs })
yield* sessions.setRevert({
sessionID: input.sessionID,
revert: rev,
summary: {
additions: diffs.reduce((sum, x) => sum + x.additions, 0),
deletions: diffs.reduce((sum, x) => sum + x.deletions, 0),
files: diffs.length,
},
})
return yield* sessions.get(input.sessionID)
})
return Session.setRevert({
sessionID: input.sessionID,
revert,
summary: {
additions: diffs.reduce((sum, x) => sum + x.additions, 0),
deletions: diffs.reduce((sum, x) => sum + x.deletions, 0),
files: diffs.length,
},
const unrevert = Effect.fn("SessionRevert.unrevert")(function* (input: { sessionID: SessionID }) {
log.info("unreverting", input)
yield* Effect.promise(() => SessionPrompt.assertNotBusy(input.sessionID))
const session = yield* sessions.get(input.sessionID)
if (!session.revert) return session
if (session.revert.snapshot) yield* snap.restore(session.revert!.snapshot!)
yield* sessions.clearRevert(input.sessionID)
return yield* sessions.get(input.sessionID)
})
}
return session
const cleanup = Effect.fn("SessionRevert.cleanup")(function* (session: Session.Info) {
if (!session.revert) return
const sessionID = session.id
const msgs = yield* sessions.messages({ sessionID })
const messageID = session.revert.messageID
const remove = [] as MessageV2.WithParts[]
let target: MessageV2.WithParts | undefined
for (const msg of msgs) {
if (msg.info.id < messageID) continue
if (msg.info.id > messageID) {
remove.push(msg)
continue
}
if (session.revert.partID) {
target = msg
continue
}
remove.push(msg)
}
for (const msg of remove) {
SyncEvent.run(MessageV2.Event.Removed, {
sessionID,
messageID: msg.info.id,
})
}
if (session.revert.partID && target) {
const partID = session.revert.partID
const idx = target.parts.findIndex((part) => part.id === partID)
if (idx >= 0) {
const removeParts = target.parts.slice(idx)
target.parts = target.parts.slice(0, idx)
for (const part of removeParts) {
SyncEvent.run(MessageV2.Event.PartRemoved, {
sessionID,
messageID: target.info.id,
partID: part.id,
})
}
}
}
yield* sessions.clearRevert(sessionID)
})
return Service.of({ revert, unrevert, cleanup })
}),
)
export const defaultLayer = Layer.unwrap(
Effect.sync(() =>
layer.pipe(
Layer.provide(Session.defaultLayer),
Layer.provide(Snapshot.defaultLayer),
Layer.provide(Storage.defaultLayer),
Layer.provide(Bus.layer),
),
),
)
const { runPromise } = makeRuntime(Service, defaultLayer)
export async function revert(input: RevertInput) {
return runPromise((svc) => svc.revert(input))
}
export async function unrevert(input: { sessionID: SessionID }) {
log.info("unreverting", input)
await SessionPrompt.assertNotBusy(input.sessionID)
const session = await Session.get(input.sessionID)
if (!session.revert) return session
if (session.revert.snapshot) await Snapshot.restore(session.revert.snapshot)
return Session.clearRevert(input.sessionID)
return runPromise((svc) => svc.unrevert(input))
}
export async function cleanup(session: Session.Info) {
if (!session.revert) return
const sessionID = session.id
const msgs = await Session.messages({ sessionID })
const messageID = session.revert.messageID
const remove = [] as MessageV2.WithParts[]
let target: MessageV2.WithParts | undefined
for (const msg of msgs) {
if (msg.info.id < messageID) {
continue
}
if (msg.info.id > messageID) {
remove.push(msg)
continue
}
if (session.revert.partID) {
target = msg
continue
}
remove.push(msg)
}
for (const msg of remove) {
SyncEvent.run(MessageV2.Event.Removed, {
sessionID: sessionID,
messageID: msg.info.id,
})
}
if (session.revert.partID && target) {
const partID = session.revert.partID
const removeStart = target.parts.findIndex((part) => part.id === partID)
if (removeStart >= 0) {
const preserveParts = target.parts.slice(0, removeStart)
const removeParts = target.parts.slice(removeStart)
target.parts = preserveParts
for (const part of removeParts) {
SyncEvent.run(MessageV2.Event.PartRemoved, {
sessionID: sessionID,
messageID: target.info.id,
partID: part.id,
})
}
}
}
await Session.clearRevert(sessionID)
return runPromise((svc) => svc.cleanup(session))
}
}

View File

@@ -1,19 +1,17 @@
import { Log } from "../util/log"
import path from "path"
import fs from "fs/promises"
import { Global } from "../global"
import { Filesystem } from "../util/filesystem"
import { lazy } from "../util/lazy"
import { Lock } from "../util/lock"
import { NamedError } from "@opencode-ai/util/error"
import z from "zod"
import { Glob } from "../util/glob"
import { git } from "@/util/git"
import { AppFileSystem } from "@/filesystem"
import { makeRuntime } from "@/effect/run-service"
import { Effect, Exit, Layer, Option, RcMap, Schema, ServiceMap, TxReentrantLock } from "effect"
export namespace Storage {
const log = Log.create({ service: "storage" })
type Migration = (dir: string) => Promise<void>
type Migration = (dir: string, fs: AppFileSystem.Interface) => Effect.Effect<void, AppFileSystem.Error>
export const NotFoundError = NamedError.create(
"NotFoundError",
@@ -22,36 +20,101 @@ export namespace Storage {
}),
)
export type Error = AppFileSystem.Error | InstanceType<typeof NotFoundError>
const RootFile = Schema.Struct({
path: Schema.optional(
Schema.Struct({
root: Schema.optional(Schema.String),
}),
),
})
const SessionFile = Schema.Struct({
id: Schema.String,
})
const MessageFile = Schema.Struct({
id: Schema.String,
})
const DiffFile = Schema.Struct({
additions: Schema.Number,
deletions: Schema.Number,
})
const SummaryFile = Schema.Struct({
id: Schema.String,
projectID: Schema.String,
summary: Schema.Struct({ diffs: Schema.Array(DiffFile) }),
})
const decodeRoot = Schema.decodeUnknownOption(RootFile)
const decodeSession = Schema.decodeUnknownOption(SessionFile)
const decodeMessage = Schema.decodeUnknownOption(MessageFile)
const decodeSummary = Schema.decodeUnknownOption(SummaryFile)
export interface Interface {
readonly remove: (key: string[]) => Effect.Effect<void, AppFileSystem.Error>
readonly read: <T>(key: string[]) => Effect.Effect<T, Error>
readonly update: <T>(key: string[], fn: (draft: T) => void) => Effect.Effect<T, Error>
readonly write: <T>(key: string[], content: T) => Effect.Effect<void, AppFileSystem.Error>
readonly list: (prefix: string[]) => Effect.Effect<string[][], AppFileSystem.Error>
}
export class Service extends ServiceMap.Service<Service, Interface>()("@opencode/Storage") {}
function file(dir: string, key: string[]) {
return path.join(dir, ...key) + ".json"
}
function missing(err: unknown) {
if (!err || typeof err !== "object") return false
if ("code" in err && err.code === "ENOENT") return true
if ("reason" in err && err.reason && typeof err.reason === "object" && "_tag" in err.reason) {
return err.reason._tag === "NotFound"
}
return false
}
function parseMigration(text: string) {
const value = Number.parseInt(text, 10)
return Number.isNaN(value) ? 0 : value
}
const MIGRATIONS: Migration[] = [
async (dir) => {
Effect.fn("Storage.migration.1")(function* (dir: string, fs: AppFileSystem.Interface) {
const project = path.resolve(dir, "../project")
if (!(await Filesystem.isDir(project))) return
const projectDirs = await Glob.scan("*", {
if (!(yield* fs.isDir(project))) return
const projectDirs = yield* fs.glob("*", {
cwd: project,
include: "all",
})
for (const projectDir of projectDirs) {
const fullPath = path.join(project, projectDir)
if (!(await Filesystem.isDir(fullPath))) continue
const full = path.join(project, projectDir)
if (!(yield* fs.isDir(full))) continue
log.info(`migrating project ${projectDir}`)
let projectID = projectDir
const fullProjectDir = path.join(project, projectDir)
let worktree = "/"
if (projectID !== "global") {
for (const msgFile of await Glob.scan("storage/session/message/*/*.json", {
cwd: path.join(project, projectDir),
for (const msgFile of yield* fs.glob("storage/session/message/*/*.json", {
cwd: full,
absolute: true,
})) {
const json = await Filesystem.readJson<any>(msgFile)
worktree = json.path?.root
if (worktree) break
const json = decodeRoot(yield* fs.readJson(msgFile), { onExcessProperty: "preserve" })
const root = Option.isSome(json) ? json.value.path?.root : undefined
if (!root) continue
worktree = root
break
}
if (!worktree) continue
if (!(await Filesystem.isDir(worktree))) continue
const result = await git(["rev-list", "--max-parents=0", "--all"], {
cwd: worktree,
})
if (!(yield* fs.isDir(worktree))) continue
const result = yield* Effect.promise(() =>
git(["rev-list", "--max-parents=0", "--all"], {
cwd: worktree,
}),
)
const [id] = result
.text()
.split("\n")
@@ -61,157 +124,230 @@ export namespace Storage {
if (!id) continue
projectID = id
await Filesystem.writeJson(path.join(dir, "project", projectID + ".json"), {
id,
vcs: "git",
worktree,
time: {
created: Date.now(),
initialized: Date.now(),
},
})
yield* fs.writeWithDirs(
path.join(dir, "project", projectID + ".json"),
JSON.stringify(
{
id,
vcs: "git",
worktree,
time: {
created: Date.now(),
initialized: Date.now(),
},
},
null,
2,
),
)
log.info(`migrating sessions for project ${projectID}`)
for (const sessionFile of await Glob.scan("storage/session/info/*.json", {
cwd: fullProjectDir,
for (const sessionFile of yield* fs.glob("storage/session/info/*.json", {
cwd: full,
absolute: true,
})) {
const dest = path.join(dir, "session", projectID, path.basename(sessionFile))
log.info("copying", {
sessionFile,
dest,
})
const session = await Filesystem.readJson<any>(sessionFile)
await Filesystem.writeJson(dest, session)
log.info(`migrating messages for session ${session.id}`)
for (const msgFile of await Glob.scan(`storage/session/message/${session.id}/*.json`, {
cwd: fullProjectDir,
log.info("copying", { sessionFile, dest })
const session = yield* fs.readJson(sessionFile)
const info = decodeSession(session, { onExcessProperty: "preserve" })
yield* fs.writeWithDirs(dest, JSON.stringify(session, null, 2))
if (Option.isNone(info)) continue
log.info(`migrating messages for session ${info.value.id}`)
for (const msgFile of yield* fs.glob(`storage/session/message/${info.value.id}/*.json`, {
cwd: full,
absolute: true,
})) {
const dest = path.join(dir, "message", session.id, path.basename(msgFile))
const next = path.join(dir, "message", info.value.id, path.basename(msgFile))
log.info("copying", {
msgFile,
dest,
dest: next,
})
const message = await Filesystem.readJson<any>(msgFile)
await Filesystem.writeJson(dest, message)
const message = yield* fs.readJson(msgFile)
const item = decodeMessage(message, { onExcessProperty: "preserve" })
yield* fs.writeWithDirs(next, JSON.stringify(message, null, 2))
if (Option.isNone(item)) continue
log.info(`migrating parts for message ${message.id}`)
for (const partFile of await Glob.scan(`storage/session/part/${session.id}/${message.id}/*.json`, {
cwd: fullProjectDir,
log.info(`migrating parts for message ${item.value.id}`)
for (const partFile of yield* fs.glob(`storage/session/part/${info.value.id}/${item.value.id}/*.json`, {
cwd: full,
absolute: true,
})) {
const dest = path.join(dir, "part", message.id, path.basename(partFile))
const part = await Filesystem.readJson(partFile)
const out = path.join(dir, "part", item.value.id, path.basename(partFile))
const part = yield* fs.readJson(partFile)
log.info("copying", {
partFile,
dest,
dest: out,
})
await Filesystem.writeJson(dest, part)
yield* fs.writeWithDirs(out, JSON.stringify(part, null, 2))
}
}
}
}
}
},
async (dir) => {
for (const item of await Glob.scan("session/*/*.json", {
}),
Effect.fn("Storage.migration.2")(function* (dir: string, fs: AppFileSystem.Interface) {
for (const item of yield* fs.glob("session/*/*.json", {
cwd: dir,
absolute: true,
})) {
const session = await Filesystem.readJson<any>(item)
if (!session.projectID) continue
if (!session.summary?.diffs) continue
const { diffs } = session.summary
await Filesystem.write(path.join(dir, "session_diff", session.id + ".json"), JSON.stringify(diffs))
await Filesystem.writeJson(path.join(dir, "session", session.projectID, session.id + ".json"), {
...session,
summary: {
additions: diffs.reduce((sum: any, x: any) => sum + x.additions, 0),
deletions: diffs.reduce((sum: any, x: any) => sum + x.deletions, 0),
},
})
const raw = yield* fs.readJson(item)
const session = decodeSummary(raw, { onExcessProperty: "preserve" })
if (Option.isNone(session)) continue
const diffs = session.value.summary.diffs
yield* fs.writeWithDirs(
path.join(dir, "session_diff", session.value.id + ".json"),
JSON.stringify(diffs, null, 2),
)
yield* fs.writeWithDirs(
path.join(dir, "session", session.value.projectID, session.value.id + ".json"),
JSON.stringify(
{
...(raw as Record<string, unknown>),
summary: {
additions: diffs.reduce((sum, x) => sum + x.additions, 0),
deletions: diffs.reduce((sum, x) => sum + x.deletions, 0),
},
},
null,
2,
),
)
}
},
}),
]
const state = lazy(async () => {
const dir = path.join(Global.Path.data, "storage")
const migration = await Filesystem.readJson<string>(path.join(dir, "migration"))
.then((x) => parseInt(x))
.catch(() => 0)
for (let index = migration; index < MIGRATIONS.length; index++) {
log.info("running migration", { index })
const migration = MIGRATIONS[index]
await migration(dir).catch(() => log.error("failed to run migration", { index }))
await Filesystem.write(path.join(dir, "migration"), (index + 1).toString())
}
return {
dir,
}
})
export const layer = Layer.effect(
Service,
Effect.gen(function* () {
const fs = yield* AppFileSystem.Service
const locks = yield* RcMap.make({
lookup: () => TxReentrantLock.make(),
idleTimeToLive: 0,
})
const state = yield* Effect.cached(
Effect.gen(function* () {
const dir = path.join(Global.Path.data, "storage")
const marker = path.join(dir, "migration")
const migration = yield* fs.readFileString(marker).pipe(
Effect.map(parseMigration),
Effect.catchIf(missing, () => Effect.succeed(0)),
Effect.orElseSucceed(() => 0),
)
for (let i = migration; i < MIGRATIONS.length; i++) {
log.info("running migration", { index: i })
const step = MIGRATIONS[i]!
const exit = yield* Effect.exit(step(dir, fs))
if (Exit.isFailure(exit)) {
log.error("failed to run migration", { index: i, cause: exit.cause })
break
}
yield* fs.writeWithDirs(marker, String(i + 1))
}
return { dir }
}),
)
const fail = (target: string): Effect.Effect<never, InstanceType<typeof NotFoundError>> =>
Effect.fail(new NotFoundError({ message: `Resource not found: ${target}` }))
const wrap = <A>(target: string, body: Effect.Effect<A, AppFileSystem.Error>) =>
body.pipe(Effect.catchIf(missing, () => fail(target)))
const writeJson = Effect.fnUntraced(function* (target: string, content: unknown) {
yield* fs.writeWithDirs(target, JSON.stringify(content, null, 2))
})
const withResolved = <A, E>(
key: string[],
fn: (target: string, rw: TxReentrantLock.TxReentrantLock) => Effect.Effect<A, E>,
): Effect.Effect<A, E | AppFileSystem.Error> =>
Effect.scoped(
Effect.gen(function* () {
const target = file((yield* state).dir, key)
return yield* fn(target, yield* RcMap.get(locks, target))
}),
)
const remove: Interface["remove"] = Effect.fn("Storage.remove")(function* (key: string[]) {
yield* withResolved(key, (target, rw) =>
TxReentrantLock.withWriteLock(rw, fs.remove(target).pipe(Effect.catchIf(missing, () => Effect.void))),
)
})
const read: Interface["read"] = <T>(key: string[]) =>
Effect.gen(function* () {
const value = yield* withResolved(key, (target, rw) =>
TxReentrantLock.withReadLock(rw, wrap(target, fs.readJson(target))),
)
return value as T
})
const update: Interface["update"] = <T>(key: string[], fn: (draft: T) => void) =>
Effect.gen(function* () {
const value = yield* withResolved(key, (target, rw) =>
TxReentrantLock.withWriteLock(
rw,
Effect.gen(function* () {
const content = yield* wrap(target, fs.readJson(target))
fn(content as T)
yield* writeJson(target, content)
return content
}),
),
)
return value as T
})
const write: Interface["write"] = (key: string[], content: unknown) =>
Effect.gen(function* () {
yield* withResolved(key, (target, rw) => TxReentrantLock.withWriteLock(rw, writeJson(target, content)))
})
const list: Interface["list"] = Effect.fn("Storage.list")(function* (prefix: string[]) {
const dir = (yield* state).dir
const cwd = path.join(dir, ...prefix)
const result = yield* fs
.glob("**/*", {
cwd,
include: "file",
})
.pipe(Effect.catch(() => Effect.succeed<string[]>([])))
return result
.map((x) => [...prefix, ...x.slice(0, -5).split(path.sep)])
.toSorted((a, b) => a.join("/").localeCompare(b.join("/")))
})
return Service.of({
remove,
read,
update,
write,
list,
})
}),
)
export const defaultLayer = layer.pipe(Layer.provide(AppFileSystem.defaultLayer))
const { runPromise } = makeRuntime(Service, defaultLayer)
export async function remove(key: string[]) {
const dir = await state().then((x) => x.dir)
const target = path.join(dir, ...key) + ".json"
return withErrorHandling(async () => {
await fs.unlink(target).catch(() => {})
})
return runPromise((svc) => svc.remove(key))
}
export async function read<T>(key: string[]) {
const dir = await state().then((x) => x.dir)
const target = path.join(dir, ...key) + ".json"
return withErrorHandling(async () => {
using _ = await Lock.read(target)
const result = await Filesystem.readJson<T>(target)
return result as T
})
return runPromise((svc) => svc.read<T>(key))
}
export async function update<T>(key: string[], fn: (draft: T) => void) {
const dir = await state().then((x) => x.dir)
const target = path.join(dir, ...key) + ".json"
return withErrorHandling(async () => {
using _ = await Lock.write(target)
const content = await Filesystem.readJson<T>(target)
fn(content as T)
await Filesystem.writeJson(target, content)
return content
})
return runPromise((svc) => svc.update<T>(key, fn))
}
export async function write<T>(key: string[], content: T) {
const dir = await state().then((x) => x.dir)
const target = path.join(dir, ...key) + ".json"
return withErrorHandling(async () => {
using _ = await Lock.write(target)
await Filesystem.writeJson(target, content)
})
}
async function withErrorHandling<T>(body: () => Promise<T>) {
return body().catch((e) => {
if (!(e instanceof Error)) throw e
const errnoException = e as NodeJS.ErrnoException
if (errnoException.code === "ENOENT") {
throw new NotFoundError({ message: `Resource not found: ${errnoException.path}` })
}
throw e
})
return runPromise((svc) => svc.write(key, content))
}
export async function list(prefix: string[]) {
const dir = await state().then((x) => x.dir)
try {
const result = await Glob.scan("**/*", {
cwd: path.join(dir, ...prefix),
include: "file",
}).then((results) => results.map((x) => [...prefix, ...x.slice(0, -5).split(path.sep)]))
result.sort()
return result
} catch {
return []
}
return runPromise((svc) => svc.list(prefix))
}
}

View File

@@ -331,6 +331,117 @@ describe("plugin.loader.shared", () => {
}
})
test("loads npm server plugin from package server export without leading dot", async () => {
await using tmp = await tmpdir({
init: async (dir) => {
const mod = path.join(dir, "mods", "acme-plugin")
const dist = path.join(mod, "dist")
const mark = path.join(dir, "server-called.txt")
await fs.mkdir(dist, { recursive: true })
await Bun.write(
path.join(mod, "package.json"),
JSON.stringify(
{
name: "acme-plugin",
type: "module",
exports: {
".": "./index.js",
"./server": "dist/server.js",
},
},
null,
2,
),
)
await Bun.write(path.join(mod, "index.js"), 'import "./main-throws.js"\nexport default {}\n')
await Bun.write(path.join(mod, "main-throws.js"), 'throw new Error("main loaded")\n')
await Bun.write(
path.join(dist, "server.js"),
[
"export default {",
" server: async () => {",
` await Bun.write(${JSON.stringify(mark)}, "called")`,
" return {}",
" },",
"}",
"",
].join("\n"),
)
await Bun.write(path.join(dir, "opencode.json"), JSON.stringify({ plugin: ["acme-plugin@1.0.0"] }, null, 2))
return {
mod,
mark,
}
},
})
const install = spyOn(BunProc, "install").mockResolvedValue(tmp.extra.mod)
try {
const errors = await errs(tmp.path)
expect(errors).toHaveLength(0)
expect(await Bun.file(tmp.extra.mark).text()).toBe("called")
} finally {
install.mockRestore()
}
})
test("loads npm server plugin from package main without leading dot", async () => {
await using tmp = await tmpdir({
init: async (dir) => {
const mod = path.join(dir, "mods", "acme-plugin")
const dist = path.join(mod, "dist")
const mark = path.join(dir, "main-called.txt")
await fs.mkdir(dist, { recursive: true })
await Bun.write(
path.join(mod, "package.json"),
JSON.stringify(
{
name: "acme-plugin",
type: "module",
main: "dist/index.js",
},
null,
2,
),
)
await Bun.write(
path.join(dist, "index.js"),
[
"export default {",
" server: async () => {",
` await Bun.write(${JSON.stringify(mark)}, "called")`,
" return {}",
" },",
"}",
"",
].join("\n"),
)
await Bun.write(path.join(dir, "opencode.json"), JSON.stringify({ plugin: ["acme-plugin@1.0.0"] }, null, 2))
return {
mod,
mark,
}
},
})
const install = spyOn(BunProc, "install").mockResolvedValue(tmp.extra.mod)
try {
const errors = await errs(tmp.path)
expect(errors).toHaveLength(0)
expect(await Bun.file(tmp.extra.mark).text()).toBe("called")
} finally {
install.mockRestore()
}
})
test("does not use npm package exports dot for server entry", async () => {
await using tmp = await tmpdir({
init: async (dir) => {

View File

@@ -13,6 +13,18 @@ afterEach(async () => {
await Instance.disposeAll()
})
async function withoutWatcher<T>(fn: () => Promise<T>) {
if (process.platform !== "win32") return fn()
const prev = process.env.OPENCODE_EXPERIMENTAL_DISABLE_FILEWATCHER
process.env.OPENCODE_EXPERIMENTAL_DISABLE_FILEWATCHER = "true"
try {
return await fn()
} finally {
if (prev === undefined) delete process.env.OPENCODE_EXPERIMENTAL_DISABLE_FILEWATCHER
else process.env.OPENCODE_EXPERIMENTAL_DISABLE_FILEWATCHER = prev
}
}
async function fill(sessionID: SessionID, count: number, time = (i: number) => Date.now() + i) {
const ids = [] as MessageID[]
for (let i = 0; i < count; i++) {
@@ -42,86 +54,94 @@ async function fill(sessionID: SessionID, count: number, time = (i: number) => D
describe("session messages endpoint", () => {
test("returns cursor headers for older pages", async () => {
await using tmp = await tmpdir({ git: true })
await Instance.provide({
directory: tmp.path,
fn: async () => {
const session = await Session.create({})
const ids = await fill(session.id, 5)
const app = Server.Default()
await withoutWatcher(() =>
Instance.provide({
directory: tmp.path,
fn: async () => {
const session = await Session.create({})
const ids = await fill(session.id, 5)
const app = Server.Default()
const a = await app.request(`/session/${session.id}/message?limit=2`)
expect(a.status).toBe(200)
const aBody = (await a.json()) as MessageV2.WithParts[]
expect(aBody.map((item) => item.info.id)).toEqual(ids.slice(-2))
const cursor = a.headers.get("x-next-cursor")
expect(cursor).toBeTruthy()
expect(a.headers.get("link")).toContain('rel="next"')
const a = await app.request(`/session/${session.id}/message?limit=2`)
expect(a.status).toBe(200)
const aBody = (await a.json()) as MessageV2.WithParts[]
expect(aBody.map((item) => item.info.id)).toEqual(ids.slice(-2))
const cursor = a.headers.get("x-next-cursor")
expect(cursor).toBeTruthy()
expect(a.headers.get("link")).toContain('rel="next"')
const b = await app.request(`/session/${session.id}/message?limit=2&before=${encodeURIComponent(cursor!)}`)
expect(b.status).toBe(200)
const bBody = (await b.json()) as MessageV2.WithParts[]
expect(bBody.map((item) => item.info.id)).toEqual(ids.slice(-4, -2))
const b = await app.request(`/session/${session.id}/message?limit=2&before=${encodeURIComponent(cursor!)}`)
expect(b.status).toBe(200)
const bBody = (await b.json()) as MessageV2.WithParts[]
expect(bBody.map((item) => item.info.id)).toEqual(ids.slice(-4, -2))
await Session.remove(session.id)
},
})
await Session.remove(session.id)
},
}),
)
})
test("keeps full-history responses when limit is omitted", async () => {
await using tmp = await tmpdir({ git: true })
await Instance.provide({
directory: tmp.path,
fn: async () => {
const session = await Session.create({})
const ids = await fill(session.id, 3)
const app = Server.Default()
await withoutWatcher(() =>
Instance.provide({
directory: tmp.path,
fn: async () => {
const session = await Session.create({})
const ids = await fill(session.id, 3)
const app = Server.Default()
const res = await app.request(`/session/${session.id}/message`)
expect(res.status).toBe(200)
const body = (await res.json()) as MessageV2.WithParts[]
expect(body.map((item) => item.info.id)).toEqual(ids)
const res = await app.request(`/session/${session.id}/message`)
expect(res.status).toBe(200)
const body = (await res.json()) as MessageV2.WithParts[]
expect(body.map((item) => item.info.id)).toEqual(ids)
await Session.remove(session.id)
},
})
await Session.remove(session.id)
},
}),
)
})
test("rejects invalid cursors and missing sessions", async () => {
await using tmp = await tmpdir({ git: true })
await Instance.provide({
directory: tmp.path,
fn: async () => {
const session = await Session.create({})
const app = Server.Default()
await withoutWatcher(() =>
Instance.provide({
directory: tmp.path,
fn: async () => {
const session = await Session.create({})
const app = Server.Default()
const bad = await app.request(`/session/${session.id}/message?limit=2&before=bad`)
expect(bad.status).toBe(400)
const bad = await app.request(`/session/${session.id}/message?limit=2&before=bad`)
expect(bad.status).toBe(400)
const miss = await app.request(`/session/ses_missing/message?limit=2`)
expect(miss.status).toBe(404)
const miss = await app.request(`/session/ses_missing/message?limit=2`)
expect(miss.status).toBe(404)
await Session.remove(session.id)
},
})
await Session.remove(session.id)
},
}),
)
})
test("does not truncate large legacy limit requests", async () => {
await using tmp = await tmpdir({ git: true })
await Instance.provide({
directory: tmp.path,
fn: async () => {
const session = await Session.create({})
await fill(session.id, 520)
const app = Server.Default()
await withoutWatcher(() =>
Instance.provide({
directory: tmp.path,
fn: async () => {
const session = await Session.create({})
await fill(session.id, 520)
const app = Server.Default()
const res = await app.request(`/session/${session.id}/message?limit=510`)
expect(res.status).toBe(200)
const body = (await res.json()) as MessageV2.WithParts[]
expect(body).toHaveLength(510)
const res = await app.request(`/session/${session.id}/message?limit=510`)
expect(res.status).toBe(200)
const body = (await res.json()) as MessageV2.WithParts[]
expect(body).toHaveLength(510)
await Session.remove(session.id)
},
})
await Session.remove(session.id)
},
}),
)
})
})

View File

@@ -10,9 +10,59 @@ import { Instance } from "../../src/project/instance"
import { MessageID, PartID } from "../../src/session/schema"
import { tmpdir } from "../fixture/fixture"
const projectRoot = path.join(__dirname, "../..")
Log.init({ print: false })
function user(sessionID: string, agent = "default") {
return Session.updateMessage({
id: MessageID.ascending(),
role: "user" as const,
sessionID: sessionID as any,
agent,
model: { providerID: ProviderID.make("openai"), modelID: ModelID.make("gpt-4") },
time: { created: Date.now() },
})
}
function assistant(sessionID: string, parentID: string, dir: string) {
return Session.updateMessage({
id: MessageID.ascending(),
role: "assistant" as const,
sessionID: sessionID as any,
mode: "default",
agent: "default",
path: { cwd: dir, root: dir },
cost: 0,
tokens: { output: 0, input: 0, reasoning: 0, cache: { read: 0, write: 0 } },
modelID: ModelID.make("gpt-4"),
providerID: ProviderID.make("openai"),
parentID: parentID as any,
time: { created: Date.now() },
finish: "end_turn",
})
}
function text(sessionID: string, messageID: string, content: string) {
return Session.updatePart({
id: PartID.ascending(),
messageID: messageID as any,
sessionID: sessionID as any,
type: "text" as const,
text: content,
})
}
function tool(sessionID: string, messageID: string) {
return Session.updatePart({
id: PartID.ascending(),
messageID: messageID as any,
sessionID: sessionID as any,
type: "tool" as const,
tool: "bash",
callID: "call-1",
state: { status: "completed" as const, input: {}, output: "done", title: "", metadata: {}, time: { start: 0, end: 1 } },
})
}
describe("revert + compact workflow", () => {
test("should properly handle compact command after revert", async () => {
await using tmp = await tmpdir({ git: true })
@@ -283,4 +333,98 @@ describe("revert + compact workflow", () => {
},
})
})
test("cleanup with partID removes parts from the revert point onward", async () => {
await using tmp = await tmpdir({ git: true })
await Instance.provide({
directory: tmp.path,
fn: async () => {
const session = await Session.create({})
const sid = session.id
const u1 = await user(sid)
const p1 = await text(sid, u1.id, "first part")
const p2 = await tool(sid, u1.id)
const p3 = await text(sid, u1.id, "third part")
// Set revert state pointing at a specific part
await Session.setRevert({
sessionID: sid,
revert: { messageID: u1.id, partID: p2.id },
summary: { additions: 0, deletions: 0, files: 0 },
})
const info = await Session.get(sid)
await SessionRevert.cleanup(info)
const msgs = await Session.messages({ sessionID: sid })
expect(msgs.length).toBe(1)
// Only the first part should remain (before the revert partID)
expect(msgs[0].parts.length).toBe(1)
expect(msgs[0].parts[0].id).toBe(p1.id)
const cleared = await Session.get(sid)
expect(cleared.revert).toBeUndefined()
},
})
})
test("cleanup removes messages after revert point but keeps earlier ones", async () => {
await using tmp = await tmpdir({ git: true })
await Instance.provide({
directory: tmp.path,
fn: async () => {
const session = await Session.create({})
const sid = session.id
const u1 = await user(sid)
await text(sid, u1.id, "hello")
const a1 = await assistant(sid, u1.id, tmp.path)
await text(sid, a1.id, "hi back")
const u2 = await user(sid)
await text(sid, u2.id, "second question")
const a2 = await assistant(sid, u2.id, tmp.path)
await text(sid, a2.id, "second answer")
// Revert from u2 onward
await Session.setRevert({
sessionID: sid,
revert: { messageID: u2.id },
summary: { additions: 0, deletions: 0, files: 0 },
})
const info = await Session.get(sid)
await SessionRevert.cleanup(info)
const msgs = await Session.messages({ sessionID: sid })
const ids = msgs.map((m) => m.info.id)
expect(ids).toContain(u1.id)
expect(ids).toContain(a1.id)
expect(ids).not.toContain(u2.id)
expect(ids).not.toContain(a2.id)
},
})
})
test("cleanup is a no-op when session has no revert state", async () => {
await using tmp = await tmpdir({ git: true })
await Instance.provide({
directory: tmp.path,
fn: async () => {
const session = await Session.create({})
const sid = session.id
const u1 = await user(sid)
await text(sid, u1.id, "hello")
const info = await Session.get(sid)
expect(info.revert).toBeUndefined()
await SessionRevert.cleanup(info)
const msgs = await Session.messages({ sessionID: sid })
expect(msgs.length).toBe(1)
},
})
})
})

View File

@@ -0,0 +1,295 @@
import { describe, expect, test } from "bun:test"
import fs from "fs/promises"
import path from "path"
import { Effect, Layer, ManagedRuntime } from "effect"
import { AppFileSystem } from "../../src/filesystem"
import { Global } from "../../src/global"
import { Storage } from "../../src/storage/storage"
import { tmpdir } from "../fixture/fixture"
const dir = path.join(Global.Path.data, "storage")
async function withScope<T>(fn: (root: string[]) => Promise<T>) {
const root = ["storage_test", crypto.randomUUID()]
try {
return await fn(root)
} finally {
await fs.rm(path.join(dir, ...root), { recursive: true, force: true })
}
}
function map(root: string, file: string) {
if (file === Global.Path.data) return root
if (file.startsWith(Global.Path.data + path.sep)) return path.join(root, path.relative(Global.Path.data, file))
return file
}
function layer(root: string) {
return Layer.effect(
AppFileSystem.Service,
Effect.gen(function* () {
const fs = yield* AppFileSystem.Service
return AppFileSystem.Service.of({
...fs,
isDir: (file) => fs.isDir(map(root, file)),
readJson: (file) => fs.readJson(map(root, file)),
writeWithDirs: (file, content, mode) => fs.writeWithDirs(map(root, file), content, mode),
readFileString: (file) => fs.readFileString(map(root, file)),
remove: (file) => fs.remove(map(root, file)),
glob: (pattern, options) =>
fs.glob(pattern, options?.cwd ? { ...options, cwd: map(root, options.cwd) } : options),
})
}),
).pipe(Layer.provide(AppFileSystem.defaultLayer))
}
async function withStorage<T>(
root: string,
fn: (run: <A, E>(body: Effect.Effect<A, E, Storage.Service>) => Promise<A>) => Promise<T>,
) {
const rt = ManagedRuntime.make(Storage.layer.pipe(Layer.provide(layer(root))))
try {
return await fn((body) => rt.runPromise(body))
} finally {
await rt.dispose()
}
}
async function write(file: string, value: unknown) {
await fs.mkdir(path.dirname(file), { recursive: true })
await Bun.write(file, JSON.stringify(value, null, 2))
}
async function text(file: string, value: string) {
await fs.mkdir(path.dirname(file), { recursive: true })
await Bun.write(file, value)
}
async function exists(file: string) {
return fs
.stat(file)
.then(() => true)
.catch(() => false)
}
describe("Storage", () => {
test("round-trips JSON content", async () => {
await withScope(async (root) => {
const key = [...root, "session_diff", "roundtrip"]
const value = [{ file: "a.ts", additions: 2, deletions: 1 }]
await Storage.write(key, value)
expect(await Storage.read<typeof value>(key)).toEqual(value)
})
})
test("maps missing reads to NotFoundError", async () => {
await withScope(async (root) => {
await expect(Storage.read([...root, "missing", "value"])).rejects.toMatchObject({ name: "NotFoundError" })
})
})
test("update on missing key throws NotFoundError", async () => {
await withScope(async (root) => {
await expect(
Storage.update<{ value: number }>([...root, "missing", "key"], (draft) => {
draft.value += 1
}),
).rejects.toMatchObject({ name: "NotFoundError" })
})
})
test("write overwrites existing value", async () => {
await withScope(async (root) => {
const key = [...root, "overwrite", "test"]
await Storage.write<{ v: number }>(key, { v: 1 })
await Storage.write<{ v: number }>(key, { v: 2 })
expect(await Storage.read<{ v: number }>(key)).toEqual({ v: 2 })
})
})
test("remove on missing key is a no-op", async () => {
await withScope(async (root) => {
await expect(Storage.remove([...root, "nonexistent", "key"])).resolves.toBeUndefined()
})
})
test("list on missing prefix returns empty", async () => {
await withScope(async (root) => {
expect(await Storage.list([...root, "nonexistent"])).toEqual([])
})
})
test("serializes concurrent updates for the same key", async () => {
await withScope(async (root) => {
const key = [...root, "counter", "shared"]
await Storage.write(key, { value: 0 })
await Promise.all(
Array.from({ length: 25 }, () =>
Storage.update<{ value: number }>(key, (draft) => {
draft.value += 1
}),
),
)
expect(await Storage.read<{ value: number }>(key)).toEqual({ value: 25 })
})
})
test("concurrent reads do not block each other", async () => {
await withScope(async (root) => {
const key = [...root, "concurrent", "reads"]
await Storage.write(key, { ok: true })
const results = await Promise.all(Array.from({ length: 10 }, () => Storage.read(key)))
expect(results).toHaveLength(10)
for (const r of results) expect(r).toEqual({ ok: true })
})
})
test("nested keys create deep paths", async () => {
await withScope(async (root) => {
const key = [...root, "a", "b", "c", "deep"]
await Storage.write<{ nested: boolean }>(key, { nested: true })
expect(await Storage.read<{ nested: boolean }>(key)).toEqual({ nested: true })
expect(await Storage.list([...root, "a"])).toEqual([key])
})
})
test("lists and removes stored entries", async () => {
await withScope(async (root) => {
const a = [...root, "list", "a"]
const b = [...root, "list", "b"]
const prefix = [...root, "list"]
await Storage.write(b, { value: 2 })
await Storage.write(a, { value: 1 })
expect(await Storage.list(prefix)).toEqual([a, b])
await Storage.remove(a)
expect(await Storage.list(prefix)).toEqual([b])
await expect(Storage.read(a)).rejects.toMatchObject({ name: "NotFoundError" })
})
})
test("migration 2 runs when marker contents are invalid", async () => {
await using tmp = await tmpdir()
const storage = path.join(tmp.path, "storage")
const diffs = [
{ additions: 2, deletions: 1 },
{ additions: 3, deletions: 4 },
]
await text(path.join(storage, "migration"), "wat")
await write(path.join(storage, "session", "proj_test", "ses_test.json"), {
id: "ses_test",
projectID: "proj_test",
title: "legacy",
summary: { diffs },
})
await withStorage(tmp.path, async (run) => {
expect(await run(Storage.Service.use((svc) => svc.list(["session_diff"])))).toEqual([
["session_diff", "ses_test"],
])
expect(await run(Storage.Service.use((svc) => svc.read<typeof diffs>(["session_diff", "ses_test"])))).toEqual(
diffs,
)
expect(
await run(
Storage.Service.use((svc) =>
svc.read<{
id: string
projectID: string
title: string
summary: {
additions: number
deletions: number
}
}>(["session", "proj_test", "ses_test"]),
),
),
).toEqual({
id: "ses_test",
projectID: "proj_test",
title: "legacy",
summary: {
additions: 5,
deletions: 5,
},
})
})
expect(await Bun.file(path.join(storage, "migration")).text()).toBe("2")
})
test("migration 1 tolerates malformed legacy records", async () => {
await using tmp = await tmpdir({ git: true })
const storage = path.join(tmp.path, "storage")
const legacy = path.join(tmp.path, "project", "legacy")
await write(path.join(legacy, "storage", "session", "message", "probe", "0.json"), [])
await write(path.join(legacy, "storage", "session", "message", "probe", "1.json"), {
path: { root: tmp.path },
})
await write(path.join(legacy, "storage", "session", "info", "ses_legacy.json"), {
id: "ses_legacy",
title: "legacy",
})
await write(path.join(legacy, "storage", "session", "message", "ses_legacy", "msg_legacy.json"), {
role: "user",
text: "hello",
})
await withStorage(tmp.path, async (run) => {
const projects = await run(Storage.Service.use((svc) => svc.list(["project"])))
expect(projects).toHaveLength(1)
const project = projects[0]![1]
expect(await run(Storage.Service.use((svc) => svc.list(["session", project])))).toEqual([
["session", project, "ses_legacy"],
])
expect(
await run(
Storage.Service.use((svc) => svc.read<{ id: string; title: string }>(["session", project, "ses_legacy"])),
),
).toEqual({
id: "ses_legacy",
title: "legacy",
})
expect(
await run(
Storage.Service.use((svc) =>
svc.read<{ role: string; text: string }>(["message", "ses_legacy", "msg_legacy"]),
),
),
).toEqual({
role: "user",
text: "hello",
})
})
expect(await Bun.file(path.join(storage, "migration")).text()).toBe("2")
})
test("failed migrations do not advance the marker", async () => {
await using tmp = await tmpdir()
const storage = path.join(tmp.path, "storage")
const legacy = path.join(tmp.path, "project", "legacy")
await text(path.join(legacy, "storage", "session", "message", "probe", "0.json"), "{")
await withStorage(tmp.path, async (run) => {
expect(await run(Storage.Service.use((svc) => svc.list(["project"])))).toEqual([])
})
expect(await exists(path.join(storage, "migration"))).toBe(false)
})
})

View File

@@ -1,7 +1,7 @@
{
"$schema": "https://json.schemastore.org/package.json",
"name": "@opencode-ai/plugin",
"version": "1.3.8",
"version": "1.3.9",
"type": "module",
"license": "MIT",
"scripts": {

View File

@@ -1,7 +1,7 @@
{
"$schema": "https://json.schemastore.org/package.json",
"name": "@opencode-ai/sdk",
"version": "1.3.8",
"version": "1.3.9",
"type": "module",
"license": "MIT",
"scripts": {

View File

@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/slack",
"version": "1.3.8",
"version": "1.3.9",
"type": "module",
"license": "MIT",
"scripts": {

View File

@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/ui",
"version": "1.3.8",
"version": "1.3.9",
"type": "module",
"license": "MIT",
"exports": {

View File

@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/util",
"version": "1.3.8",
"version": "1.3.9",
"private": true,
"type": "module",
"license": "MIT",

View File

@@ -2,7 +2,7 @@
"name": "@opencode-ai/web",
"type": "module",
"license": "MIT",
"version": "1.3.8",
"version": "1.3.9",
"scripts": {
"dev": "astro dev",
"dev:remote": "VITE_API_URL=https://api.opencode.ai astro dev",

View File

@@ -1,249 +1,40 @@
#!/usr/bin/env bun
import { $ } from "bun"
import { rm } from "fs/promises"
import path from "path"
import { parseArgs } from "util"
type Release = {
tag_name: string
draft: boolean
}
const root = path.resolve(import.meta.dir, "..")
const file = path.join(root, "UPCOMING_CHANGELOG.md")
const { values, positionals } = parseArgs({
args: Bun.argv.slice(2),
options: {
from: { type: "string", short: "f" },
to: { type: "string", short: "t" },
variant: { type: "string", default: "low" },
quiet: { type: "boolean", default: false },
print: { type: "boolean", default: false },
help: { type: "boolean", short: "h", default: false },
},
allowPositionals: true,
})
const args = [...positionals]
type Commit = {
hash: string
author: string | null
message: string
areas: Set<string>
}
if (values.from) args.push("--from", values.from)
if (values.to) args.push("--to", values.to)
type User = Map<string, Set<string>>
type Diff = {
sha: string
login: string | null
message: string
}
const repo = process.env.GH_REPO ?? "anomalyco/opencode"
const bot = ["actions-user", "opencode", "opencode-agent[bot]"]
const team = [
...(await Bun.file(new URL("../.github/TEAM_MEMBERS", import.meta.url))
.text()
.then((x) => x.split(/\r?\n/).map((x) => x.trim()))
.then((x) => x.filter((x) => x && !x.startsWith("#")))),
...bot,
]
const order = ["Core", "TUI", "Desktop", "SDK", "Extensions"] as const
const sections = {
core: "Core",
tui: "TUI",
app: "Desktop",
tauri: "Desktop",
sdk: "SDK",
plugin: "SDK",
"extensions/zed": "Extensions",
"extensions/vscode": "Extensions",
github: "Extensions",
} as const
function ref(input: string) {
if (input === "HEAD") return input
if (input.startsWith("v")) return input
if (input.match(/^\d+\.\d+\.\d+(?:[-+][0-9A-Za-z.-]+)?$/)) return `v${input}`
return input
}
async function latest() {
const data = await $`gh api "/repos/${repo}/releases?per_page=100"`.json()
const release = (data as Release[]).find((item) => !item.draft)
if (!release) throw new Error("No releases found")
return release.tag_name.replace(/^v/, "")
}
async function diff(base: string, head: string) {
const list: Diff[] = []
for (let page = 1; ; page++) {
const text =
await $`gh api "/repos/${repo}/compare/${base}...${head}?per_page=100&page=${page}" --jq '.commits[] | {sha: .sha, login: .author.login, message: .commit.message}'`.text()
const batch = text
.split("\n")
.filter(Boolean)
.map((line) => JSON.parse(line) as Diff)
if (batch.length === 0) break
list.push(...batch)
if (batch.length < 100) break
}
return list
}
function section(areas: Set<string>) {
const priority = ["core", "tui", "app", "tauri", "sdk", "plugin", "extensions/zed", "extensions/vscode", "github"]
for (const area of priority) {
if (areas.has(area)) return sections[area as keyof typeof sections]
}
return "Core"
}
function reverted(commits: Commit[]) {
const seen = new Map<string, Commit>()
for (const commit of commits) {
const match = commit.message.match(/^Revert "(.+)"$/)
if (match) {
const msg = match[1]!
if (seen.has(msg)) seen.delete(msg)
else seen.set(commit.message, commit)
continue
}
const revert = `Revert "${commit.message}"`
if (seen.has(revert)) {
seen.delete(revert)
continue
}
seen.set(commit.message, commit)
}
return [...seen.values()]
}
async function commits(from: string, to: string) {
const base = ref(from)
const head = ref(to)
const data = new Map<string, { login: string | null; message: string }>()
for (const item of await diff(base, head)) {
data.set(item.sha, { login: item.login, message: item.message.split("\n")[0] ?? "" })
}
const log =
await $`git log ${base}..${head} --format=%H -- packages/opencode packages/sdk packages/plugin packages/desktop packages/app sdks/vscode packages/extensions github`.text()
const list: Commit[] = []
for (const hash of log.split("\n").filter(Boolean)) {
const item = data.get(hash)
if (!item) continue
if (item.message.match(/^(ignore:|test:|chore:|ci:|release:)/i)) continue
const diff = await $`git diff-tree --no-commit-id --name-only -r ${hash}`.text()
const areas = new Set<string>()
for (const file of diff.split("\n").filter(Boolean)) {
if (file.startsWith("packages/opencode/src/cli/cmd/")) areas.add("tui")
else if (file.startsWith("packages/opencode/")) areas.add("core")
else if (file.startsWith("packages/desktop/src-tauri/")) areas.add("tauri")
else if (file.startsWith("packages/desktop/") || file.startsWith("packages/app/")) areas.add("app")
else if (file.startsWith("packages/sdk/") || file.startsWith("packages/plugin/")) areas.add("sdk")
else if (file.startsWith("packages/extensions/")) areas.add("extensions/zed")
else if (file.startsWith("sdks/vscode/") || file.startsWith("github/")) areas.add("extensions/vscode")
}
if (areas.size === 0) continue
list.push({
hash: hash.slice(0, 7),
author: item.login,
message: item.message,
areas,
})
}
return reverted(list)
}
async function contributors(from: string, to: string) {
const base = ref(from)
const head = ref(to)
const users: User = new Map()
for (const item of await diff(base, head)) {
const title = item.message.split("\n")[0] ?? ""
if (!item.login || team.includes(item.login)) continue
if (title.match(/^(ignore:|test:|chore:|ci:|release:)/i)) continue
if (!users.has(item.login)) users.set(item.login, new Set())
users.get(item.login)!.add(title)
}
return users
}
async function published(to: string) {
if (to === "HEAD") return
const body = await $`gh release view ${ref(to)} --repo ${repo} --json body --jq .body`.text().catch(() => "")
if (!body) return
const lines = body.split(/\r?\n/)
const start = lines.findIndex((line) => line.startsWith("**Thank you to "))
if (start < 0) return
return lines.slice(start).join("\n").trim()
}
async function thanks(from: string, to: string, reuse: boolean) {
const release = reuse ? await published(to) : undefined
if (release) return release.split(/\r?\n/)
const users = await contributors(from, to)
if (users.size === 0) return []
const lines = [`**Thank you to ${users.size} community contributor${users.size > 1 ? "s" : ""}:**`]
for (const [name, commits] of users) {
lines.push(`- @${name}:`)
for (const commit of commits) lines.push(` - ${commit}`)
}
return lines
}
function format(from: string, to: string, list: Commit[], thanks: string[]) {
const grouped = new Map<string, string[]>()
for (const title of order) grouped.set(title, [])
for (const commit of list) {
const title = section(commit.areas)
const attr = commit.author && !team.includes(commit.author) ? ` (@${commit.author})` : ""
grouped.get(title)!.push(`- \`${commit.hash}\` ${commit.message}${attr}`)
}
const lines = [`Last release: ${ref(from)}`, `Target ref: ${to}`, ""]
if (list.length === 0) {
lines.push("No notable changes.")
}
for (const title of order) {
const entries = grouped.get(title)
if (!entries || entries.length === 0) continue
lines.push(`## ${title}`)
lines.push(...entries)
lines.push("")
}
if (thanks.length > 0) {
if (lines.at(-1) !== "") lines.push("")
lines.push("## Community Contributors Input")
lines.push("")
lines.push(...thanks)
}
if (lines.at(-1) === "") lines.pop()
return lines.join("\n")
}
if (import.meta.main) {
const { values } = parseArgs({
args: Bun.argv.slice(2),
options: {
from: { type: "string", short: "f" },
to: { type: "string", short: "t", default: "HEAD" },
help: { type: "boolean", short: "h", default: false },
},
})
if (values.help) {
console.log(`
if (values.help) {
console.log(`
Usage: bun script/changelog.ts [options]
Generates UPCOMING_CHANGELOG.md by running the opencode changelog command.
Options:
-f, --from <version> Starting version (default: latest non-draft GitHub release)
-t, --to <ref> Ending ref (default: HEAD)
--variant <name> Thinking variant for opencode run (default: low)
--quiet Suppress opencode command output unless it fails
--print Print the generated UPCOMING_CHANGELOG.md after success
-h, --help Show this help message
Examples:
@@ -251,11 +42,35 @@ Examples:
bun script/changelog.ts --from 1.0.200
bun script/changelog.ts -f 1.0.200 -t 1.0.205
`)
process.exit(0)
}
const to = values.to!
const from = values.from ?? (await latest())
const list = await commits(from, to)
console.log(format(from, to, list, await thanks(from, to, !values.from)))
process.exit(0)
}
await rm(file, { force: true })
const quiet = values.quiet
const cmd = ["opencode", "run"]
cmd.push("--variant", values.variant)
cmd.push("--command", "changelog", "--", ...args)
const proc = Bun.spawn(cmd, {
cwd: root,
stdin: "inherit",
stdout: quiet ? "pipe" : "inherit",
stderr: quiet ? "pipe" : "inherit",
})
const [out, err] = quiet
? await Promise.all([new Response(proc.stdout).text(), new Response(proc.stderr).text()])
: ["", ""]
const code = await proc.exited
if (code === 0) {
if (values.print) process.stdout.write(await Bun.file(file).text())
process.exit(0)
}
if (quiet) {
if (out) process.stdout.write(out)
if (err) process.stderr.write(err)
}
process.exit(code)

261
script/raw-changelog.ts Normal file
View File

@@ -0,0 +1,261 @@
#!/usr/bin/env bun
import { $ } from "bun"
import { parseArgs } from "util"
type Release = {
tag_name: string
draft: boolean
}
type Commit = {
hash: string
author: string | null
message: string
areas: Set<string>
}
type User = Map<string, Set<string>>
type Diff = {
sha: string
login: string | null
message: string
}
const repo = process.env.GH_REPO ?? "anomalyco/opencode"
const bot = ["actions-user", "opencode", "opencode-agent[bot]"]
const team = [
...(await Bun.file(new URL("../.github/TEAM_MEMBERS", import.meta.url))
.text()
.then((x) => x.split(/\r?\n/).map((x) => x.trim()))
.then((x) => x.filter((x) => x && !x.startsWith("#")))),
...bot,
]
const order = ["Core", "TUI", "Desktop", "SDK", "Extensions"] as const
const sections = {
core: "Core",
tui: "TUI",
app: "Desktop",
tauri: "Desktop",
sdk: "SDK",
plugin: "SDK",
"extensions/zed": "Extensions",
"extensions/vscode": "Extensions",
github: "Extensions",
} as const
function ref(input: string) {
if (input === "HEAD") return input
if (input.startsWith("v")) return input
if (input.match(/^\d+\.\d+\.\d+(?:[-+][0-9A-Za-z.-]+)?$/)) return `v${input}`
return input
}
async function latest() {
const data = await $`gh api "/repos/${repo}/releases?per_page=100"`.json()
const release = (data as Release[]).find((item) => !item.draft)
if (!release) throw new Error("No releases found")
return release.tag_name.replace(/^v/, "")
}
async function diff(base: string, head: string) {
const list: Diff[] = []
for (let page = 1; ; page++) {
const text =
await $`gh api "/repos/${repo}/compare/${base}...${head}?per_page=100&page=${page}" --jq '.commits[] | {sha: .sha, login: .author.login, message: .commit.message}'`.text()
const batch = text
.split("\n")
.filter(Boolean)
.map((line) => JSON.parse(line) as Diff)
if (batch.length === 0) break
list.push(...batch)
if (batch.length < 100) break
}
return list
}
function section(areas: Set<string>) {
const priority = ["core", "tui", "app", "tauri", "sdk", "plugin", "extensions/zed", "extensions/vscode", "github"]
for (const area of priority) {
if (areas.has(area)) return sections[area as keyof typeof sections]
}
return "Core"
}
function reverted(commits: Commit[]) {
const seen = new Map<string, Commit>()
for (const commit of commits) {
const match = commit.message.match(/^Revert "(.+)"$/)
if (match) {
const msg = match[1]!
if (seen.has(msg)) seen.delete(msg)
else seen.set(commit.message, commit)
continue
}
const revert = `Revert "${commit.message}"`
if (seen.has(revert)) {
seen.delete(revert)
continue
}
seen.set(commit.message, commit)
}
return [...seen.values()]
}
async function commits(from: string, to: string) {
const base = ref(from)
const head = ref(to)
const data = new Map<string, { login: string | null; message: string }>()
for (const item of await diff(base, head)) {
data.set(item.sha, { login: item.login, message: item.message.split("\n")[0] ?? "" })
}
const log =
await $`git log ${base}..${head} --format=%H -- packages/opencode packages/sdk packages/plugin packages/desktop packages/app sdks/vscode packages/extensions github`.text()
const list: Commit[] = []
for (const hash of log.split("\n").filter(Boolean)) {
const item = data.get(hash)
if (!item) continue
if (item.message.match(/^(ignore:|test:|chore:|ci:|release:)/i)) continue
const diff = await $`git diff-tree --no-commit-id --name-only -r ${hash}`.text()
const areas = new Set<string>()
for (const file of diff.split("\n").filter(Boolean)) {
if (file.startsWith("packages/opencode/src/cli/cmd/")) areas.add("tui")
else if (file.startsWith("packages/opencode/")) areas.add("core")
else if (file.startsWith("packages/desktop/src-tauri/")) areas.add("tauri")
else if (file.startsWith("packages/desktop/") || file.startsWith("packages/app/")) areas.add("app")
else if (file.startsWith("packages/sdk/") || file.startsWith("packages/plugin/")) areas.add("sdk")
else if (file.startsWith("packages/extensions/")) areas.add("extensions/zed")
else if (file.startsWith("sdks/vscode/") || file.startsWith("github/")) areas.add("extensions/vscode")
}
if (areas.size === 0) continue
list.push({
hash: hash.slice(0, 7),
author: item.login,
message: item.message,
areas,
})
}
return reverted(list)
}
async function contributors(from: string, to: string) {
const base = ref(from)
const head = ref(to)
const users: User = new Map()
for (const item of await diff(base, head)) {
const title = item.message.split("\n")[0] ?? ""
if (!item.login || team.includes(item.login)) continue
if (title.match(/^(ignore:|test:|chore:|ci:|release:)/i)) continue
if (!users.has(item.login)) users.set(item.login, new Set())
users.get(item.login)!.add(title)
}
return users
}
async function published(to: string) {
if (to === "HEAD") return
const body = await $`gh release view ${ref(to)} --repo ${repo} --json body --jq .body`.text().catch(() => "")
if (!body) return
const lines = body.split(/\r?\n/)
const start = lines.findIndex((line) => line.startsWith("**Thank you to "))
if (start < 0) return
return lines.slice(start).join("\n").trim()
}
async function thanks(from: string, to: string, reuse: boolean) {
const release = reuse ? await published(to) : undefined
if (release) return release.split(/\r?\n/)
const users = await contributors(from, to)
if (users.size === 0) return []
const lines = [`**Thank you to ${users.size} community contributor${users.size > 1 ? "s" : ""}:**`]
for (const [name, commits] of users) {
lines.push(`- @${name}:`)
for (const commit of commits) lines.push(` - ${commit}`)
}
return lines
}
function format(from: string, to: string, list: Commit[], thanks: string[]) {
const grouped = new Map<string, string[]>()
for (const title of order) grouped.set(title, [])
for (const commit of list) {
const title = section(commit.areas)
const attr = commit.author && !team.includes(commit.author) ? ` (@${commit.author})` : ""
grouped.get(title)!.push(`- \`${commit.hash}\` ${commit.message}${attr}`)
}
const lines = [`Last release: ${ref(from)}`, `Target ref: ${to}`, ""]
if (list.length === 0) {
lines.push("No notable changes.")
}
for (const title of order) {
const entries = grouped.get(title)
if (!entries || entries.length === 0) continue
lines.push(`## ${title}`)
lines.push(...entries)
lines.push("")
}
if (thanks.length > 0) {
if (lines.at(-1) !== "") lines.push("")
lines.push("## Community Contributors Input")
lines.push("")
lines.push(...thanks)
}
if (lines.at(-1) === "") lines.pop()
return lines.join("\n")
}
if (import.meta.main) {
const { values } = parseArgs({
args: Bun.argv.slice(2),
options: {
from: { type: "string", short: "f" },
to: { type: "string", short: "t", default: "HEAD" },
help: { type: "boolean", short: "h", default: false },
},
})
if (values.help) {
console.log(`
Usage: bun script/raw-changelog.ts [options]
Options:
-f, --from <version> Starting version (default: latest non-draft GitHub release)
-t, --to <ref> Ending ref (default: HEAD)
-h, --help Show this help message
Examples:
bun script/raw-changelog.ts
bun script/raw-changelog.ts --from 1.0.200
bun script/raw-changelog.ts -f 1.0.200 -t 1.0.205
`)
process.exit(0)
}
const to = values.to!
const from = values.from ?? (await latest())
const list = await commits(from, to)
console.log(format(from, to, list, await thanks(from, to, !values.from)))
}

View File

@@ -7,7 +7,7 @@ const output = [`version=${Script.version}`]
if (!Script.preview) {
const sha = process.env.GITHUB_SHA ?? (await $`git rev-parse HEAD`.text()).trim()
await $`opencode run --command changelog -- --to ${sha}`.cwd(process.cwd())
await $`bun script/changelog.ts --to ${sha}`.cwd(process.cwd())
const file = `${process.cwd()}/UPCOMING_CHANGELOG.md`
const body = await Bun.file(file)
.text()

View File

@@ -2,7 +2,7 @@
"name": "opencode",
"displayName": "opencode",
"description": "opencode for VS Code",
"version": "1.3.8",
"version": "1.3.9",
"publisher": "sst-dev",
"repository": {
"type": "git",