Merge branch 'dev' into sqlite2

This commit is contained in:
Dax Raad
2026-01-29 13:23:48 -05:00
76 changed files with 1148 additions and 678 deletions

View File

@@ -28,6 +28,10 @@ interface SessionStats {
tokens: {
input: number
output: number
cache: {
read: number
write: number
}
}
cost: number
}
@@ -175,6 +179,10 @@ export async function aggregateSessionStats(days?: number, projectFilter?: strin
tokens: {
input: number
output: number
cache: {
read: number
write: number
}
}
cost: number
}
@@ -188,7 +196,7 @@ export async function aggregateSessionStats(days?: number, projectFilter?: strin
if (!sessionModelUsage[modelKey]) {
sessionModelUsage[modelKey] = {
messages: 0,
tokens: { input: 0, output: 0 },
tokens: { input: 0, output: 0, cache: { read: 0, write: 0 } },
cost: 0,
}
}
@@ -205,6 +213,8 @@ export async function aggregateSessionStats(days?: number, projectFilter?: strin
sessionModelUsage[modelKey].tokens.input += message.info.tokens.input || 0
sessionModelUsage[modelKey].tokens.output +=
(message.info.tokens.output || 0) + (message.info.tokens.reasoning || 0)
sessionModelUsage[modelKey].tokens.cache.read += message.info.tokens.cache?.read || 0
sessionModelUsage[modelKey].tokens.cache.write += message.info.tokens.cache?.write || 0
}
}
@@ -219,7 +229,12 @@ export async function aggregateSessionStats(days?: number, projectFilter?: strin
messageCount: messages.length,
sessionCost,
sessionTokens,
sessionTotalTokens: sessionTokens.input + sessionTokens.output + sessionTokens.reasoning,
sessionTotalTokens:
sessionTokens.input +
sessionTokens.output +
sessionTokens.reasoning +
sessionTokens.cache.read +
sessionTokens.cache.write,
sessionToolUsage,
sessionModelUsage,
earliestTime: cutoffTime > 0 ? session.time.updated : session.time.created,
@@ -250,13 +265,15 @@ export async function aggregateSessionStats(days?: number, projectFilter?: strin
if (!stats.modelUsage[model]) {
stats.modelUsage[model] = {
messages: 0,
tokens: { input: 0, output: 0 },
tokens: { input: 0, output: 0, cache: { read: 0, write: 0 } },
cost: 0,
}
}
stats.modelUsage[model].messages += usage.messages
stats.modelUsage[model].tokens.input += usage.tokens.input
stats.modelUsage[model].tokens.output += usage.tokens.output
stats.modelUsage[model].tokens.cache.read += usage.tokens.cache.read
stats.modelUsage[model].tokens.cache.write += usage.tokens.cache.write
stats.modelUsage[model].cost += usage.cost
}
}
@@ -270,7 +287,12 @@ export async function aggregateSessionStats(days?: number, projectFilter?: strin
}
stats.days = effectiveDays
stats.costPerDay = stats.totalCost / effectiveDays
const totalTokens = stats.totalTokens.input + stats.totalTokens.output + stats.totalTokens.reasoning
const totalTokens =
stats.totalTokens.input +
stats.totalTokens.output +
stats.totalTokens.reasoning +
stats.totalTokens.cache.read +
stats.totalTokens.cache.write
stats.tokensPerSession = filteredSessions.length > 0 ? totalTokens / filteredSessions.length : 0
sessionTotalTokens.sort((a, b) => a - b)
const mid = Math.floor(sessionTotalTokens.length / 2)
@@ -337,6 +359,8 @@ export function displayStats(stats: SessionStats, toolLimit?: number, modelLimit
console.log(renderRow(" Messages", usage.messages.toLocaleString()))
console.log(renderRow(" Input Tokens", formatNumber(usage.tokens.input)))
console.log(renderRow(" Output Tokens", formatNumber(usage.tokens.output)))
console.log(renderRow(" Cache Read", formatNumber(usage.tokens.cache.read)))
console.log(renderRow(" Cache Write", formatNumber(usage.tokens.cache.write)))
console.log(renderRow(" Cost", `$${usage.cost.toFixed(4)}`))
console.log("├────────────────────────────────────────────────────────┤")
}

View File

@@ -93,8 +93,11 @@ export function Prompt(props: PromptProps) {
let promptPartTypeId = 0
sdk.event.on(TuiEvent.PromptAppend.type, (evt) => {
if (!input || input.isDestroyed) return
input.insertText(evt.properties.text)
setTimeout(() => {
// setTimeout is a workaround and needs to be addressed properly
if (!input || input.isDestroyed) return
input.getLayoutNode().markDirty()
input.gotoBufferEnd()
renderer.requestRender()
@@ -924,6 +927,8 @@ export function Prompt(props: PromptProps) {
// Force layout update and render for the pasted content
setTimeout(() => {
// setTimeout is a workaround and needs to be addressed properly
if (!input || input.isDestroyed) return
input.getLayoutNode().markDirty()
renderer.requestRender()
}, 0)
@@ -935,6 +940,8 @@ export function Prompt(props: PromptProps) {
}
props.ref?.(ref)
setTimeout(() => {
// setTimeout is a workaround and needs to be addressed properly
if (!input || input.isDestroyed) return
input.cursorColor = theme.text
}, 0)
}}

View File

@@ -34,9 +34,8 @@ export const { use: useKeybind, provider: KeybindProvider } = createSimpleContex
timeout = setTimeout(() => {
if (!store.leader) return
leader(false)
if (focus) {
focus.focus()
}
if (!focus || focus.isDestroyed) return
focus.focus()
}, 2000)
return
}

View File

@@ -275,7 +275,8 @@ export function Session() {
function toBottom() {
setTimeout(() => {
if (scroll) scroll.scrollTo(scroll.scrollHeight)
if (!scroll || scroll.isDestroyed) return
scroll.scrollTo(scroll.scrollHeight)
}, 50)
}

View File

@@ -68,6 +68,7 @@ export function DialogExportOptions(props: DialogExportOptionsProps) {
onMount(() => {
dialog.setSize("medium")
setTimeout(() => {
if (!textarea || textarea.isDestroyed) return
textarea.focus()
}, 1)
textarea.gotoLineEnd()

View File

@@ -27,6 +27,7 @@ export function DialogPrompt(props: DialogPromptProps) {
onMount(() => {
dialog.setSize("medium")
setTimeout(() => {
if (!textarea || textarea.isDestroyed) return
textarea.focus()
}, 1)
textarea.gotoLineEnd()

View File

@@ -241,7 +241,11 @@ export function DialogSelect<T>(props: DialogSelectProps<T>) {
focusedTextColor={theme.textMuted}
ref={(r) => {
input = r
setTimeout(() => input.focus(), 1)
setTimeout(() => {
if (!input) return
if (input.isDestroyed) return
input.focus()
}, 1)
}}
placeholder={props.placeholder ?? "Search"}
/>

View File

@@ -540,6 +540,7 @@ export namespace Config {
codesearch: PermissionAction.optional(),
lsp: PermissionRule.optional(),
doom_loop: PermissionAction.optional(),
skill: PermissionRule.optional(),
})
.catchall(PermissionRule)
.or(PermissionAction),
@@ -559,6 +560,11 @@ export namespace Config {
})
export type Command = z.infer<typeof Command>
export const Skills = z.object({
paths: z.array(z.string()).optional().describe("Additional paths to skill folders"),
})
export type Skills = z.infer<typeof Skills>
export const Agent = z
.object({
model: z.string().optional(),
@@ -894,6 +900,7 @@ export namespace Config {
.record(z.string(), Command)
.optional()
.describe("Command configuration, see https://opencode.ai/docs/commands"),
skills: Skills.optional().describe("Additional skill folder paths"),
watcher: z
.object({
ignore: z.array(z.string()).optional(),

View File

@@ -14,7 +14,9 @@ export namespace ConfigMarkdown {
return Array.from(template.matchAll(SHELL_REGEX))
}
export function preprocessFrontmatter(content: string): string {
// other coding agents like claude code allow invalid yaml in their
// frontmatter, we need to fallback to a more permissive parser for those cases
export function fallbackSanitization(content: string): string {
const match = content.match(/^---\r?\n([\s\S]*?)\r?\n---/)
if (!match) return content
@@ -53,7 +55,7 @@ export namespace ConfigMarkdown {
// if value contains a colon, convert to block scalar
if (value.includes(":")) {
result.push(`${key}: |`)
result.push(`${key}: |-`)
result.push(` ${value}`)
continue
}
@@ -66,20 +68,23 @@ export namespace ConfigMarkdown {
}
export async function parse(filePath: string) {
const raw = await Bun.file(filePath).text()
const template = preprocessFrontmatter(raw)
const template = await Bun.file(filePath).text()
try {
const md = matter(template)
return md
} catch (err) {
throw new FrontmatterError(
{
path: filePath,
message: `${filePath}: Failed to parse YAML frontmatter: ${err instanceof Error ? err.message : String(err)}`,
},
{ cause: err },
)
} catch {
try {
return matter(fallbackSanitization(template))
} catch (err) {
throw new FrontmatterError(
{
path: filePath,
message: `${filePath}: Failed to parse YAML frontmatter: ${err instanceof Error ? err.message : String(err)}`,
},
{ cause: err },
)
}
}
}

View File

@@ -209,7 +209,10 @@ export namespace Ripgrep {
hidden?: boolean
follow?: boolean
maxDepth?: number
signal?: AbortSignal
}) {
input.signal?.throwIfAborted()
const args = [await filepath(), "--files", "--glob=!.git/*"]
if (input.follow !== false) args.push("--follow")
if (input.hidden !== false) args.push("--hidden")
@@ -235,6 +238,7 @@ export namespace Ripgrep {
stdout: "pipe",
stderr: "ignore",
maxBuffer: 1024 * 1024 * 20,
signal: input.signal,
})
const reader = proc.stdout.getReader()
@@ -243,6 +247,8 @@ export namespace Ripgrep {
try {
while (true) {
input.signal?.throwIfAborted()
const { done, value } = await reader.read()
if (done) break
@@ -261,11 +267,13 @@ export namespace Ripgrep {
reader.releaseLock()
await proc.exited
}
input.signal?.throwIfAborted()
}
export async function tree(input: { cwd: string; limit?: number }) {
export async function tree(input: { cwd: string; limit?: number; signal?: AbortSignal }) {
log.info("tree", input)
const files = await Array.fromAsync(Ripgrep.files({ cwd: input.cwd }))
const files = await Array.fromAsync(Ripgrep.files({ cwd: input.cwd, signal: input.signal }))
interface Node {
path: string[]
children: Node[]

View File

@@ -38,7 +38,6 @@ export namespace Flag {
export const OPENCODE_EXPERIMENTAL_DISABLE_COPY_ON_SELECT = truthy("OPENCODE_EXPERIMENTAL_DISABLE_COPY_ON_SELECT")
export const OPENCODE_ENABLE_EXA =
truthy("OPENCODE_ENABLE_EXA") || OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_EXA")
export const OPENCODE_EXPERIMENTAL_BASH_MAX_OUTPUT_LENGTH = number("OPENCODE_EXPERIMENTAL_BASH_MAX_OUTPUT_LENGTH")
export const OPENCODE_EXPERIMENTAL_BASH_DEFAULT_TIMEOUT_MS = number("OPENCODE_EXPERIMENTAL_BASH_DEFAULT_TIMEOUT_MS")
export const OPENCODE_EXPERIMENTAL_OUTPUT_TOKEN_MAX = number("OPENCODE_EXPERIMENTAL_OUTPUT_TOKEN_MAX")
export const OPENCODE_EXPERIMENTAL_OXFMT = OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_OXFMT")

View File

@@ -40,22 +40,25 @@ export async function CopilotAuthPlugin(input: PluginInput): Promise<Hooks> {
},
}
// TODO: re-enable once messages api has higher rate limits
// TODO: move some of this hacky-ness to models.dev presets once we have better grasp of things here...
const base = baseURL ?? model.api.url
const claude = model.id.includes("claude")
const url = iife(() => {
if (!claude) return base
if (base.endsWith("/v1")) return base
if (base.endsWith("/")) return `${base}v1`
return `${base}/v1`
})
// const base = baseURL ?? model.api.url
// const claude = model.id.includes("claude")
// const url = iife(() => {
// if (!claude) return base
// if (base.endsWith("/v1")) return base
// if (base.endsWith("/")) return `${base}v1`
// return `${base}/v1`
// })
model.api.url = url
model.api.npm = claude ? "@ai-sdk/anthropic" : "@ai-sdk/github-copilot"
// model.api.url = url
// model.api.npm = claude ? "@ai-sdk/anthropic" : "@ai-sdk/github-copilot"
model.api.npm = "@ai-sdk/github-copilot"
}
}
return {
baseURL,
apiKey: "",
async fetch(request: RequestInfo | URL, init?: RequestInit) {
const info = await getAuth()

View File

@@ -977,7 +977,7 @@ export namespace Provider {
...model.headers,
}
const key = Bun.hash.xxHash32(JSON.stringify({ npm: model.api.npm, options }))
const key = Bun.hash.xxHash32(JSON.stringify({ providerID: model.providerID, npm: model.api.npm, options }))
const existing = s.sdk.get(key)
if (existing) return existing

View File

@@ -284,8 +284,8 @@ export namespace ProviderTransform {
if (id.includes("glm-4.7")) return 1.0
if (id.includes("minimax-m2")) return 1.0
if (id.includes("kimi-k2")) {
// kimi-k2-thinking & kimi-k2.5
if (id.includes("thinking") || id.includes("k2.")) {
// kimi-k2-thinking & kimi-k2.5 && kimi-k2p5
if (id.includes("thinking") || id.includes("k2.") || id.includes("k2p")) {
return 1.0
}
return 0.6
@@ -296,7 +296,7 @@ export namespace ProviderTransform {
export function topP(model: Provider.Model) {
const id = model.id.toLowerCase()
if (id.includes("qwen")) return 1
if (id.includes("minimax-m2") || id.includes("kimi-k2.5") || id.includes("gemini")) {
if (id.includes("minimax-m2") || id.includes("kimi-k2.5") || id.includes("kimi-k2p5") || id.includes("gemini")) {
return 0.95
}
return undefined
@@ -319,7 +319,14 @@ export namespace ProviderTransform {
if (!model.capabilities.reasoning) return {}
const id = model.id.toLowerCase()
if (id.includes("deepseek") || id.includes("minimax") || id.includes("glm") || id.includes("mistral")) return {}
if (
id.includes("deepseek") ||
id.includes("minimax") ||
id.includes("glm") ||
id.includes("mistral") ||
id.includes("kimi")
)
return {}
// see: https://docs.x.ai/docs/guides/reasoning#control-how-hard-the-model-thinks
if (id.includes("grok") && id.includes("grok-3-mini")) {
@@ -428,13 +435,13 @@ export namespace ProviderTransform {
high: {
thinking: {
type: "enabled",
budgetTokens: 16000,
budgetTokens: Math.min(16_000, Math.floor(model.limit.output / 2 - 1)),
},
},
max: {
thinking: {
type: "enabled",
budgetTokens: 31999,
budgetTokens: Math.min(31_999, model.limit.output - 1),
},
},
}

View File

@@ -539,7 +539,7 @@ export namespace Server {
})
response.headers.set(
"Content-Security-Policy",
"default-src 'self'; script-src 'self' 'wasm-unsafe-eval'; style-src 'self' 'unsafe-inline'; img-src 'self' data: https:; font-src 'self' data:; connect-src 'self' data:",
"default-src 'self'; script-src 'self' 'wasm-unsafe-eval'; style-src 'self' 'unsafe-inline'; img-src 'self' data: https:; font-src 'self' data:; media-src 'self' data:; connect-src 'self' data:",
)
return response
}) as unknown as Hono,

View File

@@ -99,6 +99,16 @@ export namespace Session {
}
}
function getForkedTitle(title: string): string {
const match = title.match(/^(.+) \(fork #(\d+)\)$/)
if (match) {
const base = match[1]
const num = parseInt(match[2], 10)
return `${base} (fork #${num + 1})`
}
return `${title} (fork #1)`
}
export const Info = z
.object({
id: Identifier.schema("session"),
@@ -201,8 +211,12 @@ export namespace Session {
messageID: Identifier.schema("message").optional(),
}),
async (input) => {
const original = await get(input.sessionID)
if (!original) throw new Error("session not found")
const title = getForkedTitle(original.title)
const session = await createNext({
directory: Instance.directory,
title,
})
const msgs = await messages({ sessionID: input.sessionID })
const idMap = new Map<string, string>()

View File

@@ -150,20 +150,14 @@ export namespace LLM {
},
)
const maxOutputTokens = isCodex ? undefined : undefined
log.info("max_output_tokens", {
tokens: ProviderTransform.maxOutputTokens(
input.model.api.npm,
params.options,
input.model.limit.output,
OUTPUT_TOKEN_MAX,
),
modelOptions: params.options,
outputLimit: input.model.limit.output,
})
// tokens = 32000
// outputLimit = 64000
// modelOptions={"reasoningEffort":"minimal"}
const maxOutputTokens = isCodex
? undefined
: ProviderTransform.maxOutputTokens(
input.model.api.npm,
params.options,
input.model.limit.output,
OUTPUT_TOKEN_MAX,
)
const tools = await resolveTools(input)
@@ -270,7 +264,13 @@ export namespace LLM {
extractReasoningMiddleware({ tagName: "think", startWithReasoning: false }),
],
}),
experimental_telemetry: { isEnabled: cfg.experimental?.openTelemetry },
experimental_telemetry: {
isEnabled: cfg.experimental?.openTelemetry,
metadata: {
userId: cfg.username ?? "unknown",
sessionId: input.sessionID,
},
},
})
}

View File

@@ -1,5 +1,6 @@
import z from "zod"
import path from "path"
import os from "os"
import { Config } from "../config/config"
import { Instance } from "../project/instance"
import { NamedError } from "@opencode-ai/util/error"
@@ -40,6 +41,7 @@ export namespace Skill {
const OPENCODE_SKILL_GLOB = new Bun.Glob("{skill,skills}/**/SKILL.md")
const CLAUDE_SKILL_GLOB = new Bun.Glob("skills/**/SKILL.md")
const SKILL_GLOB = new Bun.Glob("**/SKILL.md")
export const state = Instance.state(async () => {
const skills: Record<string, Info> = {}
@@ -122,6 +124,25 @@ export namespace Skill {
}
}
// Scan additional skill paths from config
const config = await Config.get()
for (const skillPath of config.skills?.paths ?? []) {
const expanded = skillPath.startsWith("~/") ? path.join(os.homedir(), skillPath.slice(2)) : skillPath
const resolved = path.isAbsolute(expanded) ? expanded : path.join(Instance.directory, expanded)
if (!(await Filesystem.isDir(resolved))) {
log.warn("skill path not found", { path: resolved })
continue
}
for await (const match of SKILL_GLOB.scan({
cwd: resolved,
absolute: true,
onlyFiles: true,
followSymlinks: true,
})) {
await addSkill(match)
}
}
return skills
})

View File

@@ -38,6 +38,7 @@ export const GlobTool = Tool.define("glob", {
for await (const file of Ripgrep.files({
cwd: search,
glob: [params.pattern],
signal: ctx.abort,
})) {
if (files.length >= limit) {
truncated = true

View File

@@ -54,6 +54,7 @@ export const GrepTool = Tool.define("grep", {
const proc = Bun.spawn([rgPath, ...args], {
stdout: "pipe",
stderr: "pipe",
signal: ctx.abort,
})
const output = await new Response(proc.stdout).text()

View File

@@ -56,7 +56,7 @@ export const ListTool = Tool.define("list", {
const ignoreGlobs = IGNORE_PATTERNS.map((p) => `!${p}*`).concat(params.ignore?.map((p) => `!${p}`) || [])
const files = []
for await (const file of Ripgrep.files({ cwd: searchPath, glob: ignoreGlobs })) {
for await (const file of Ripgrep.files({ cwd: searchPath, glob: ignoreGlobs, signal: ctx.abort })) {
files.push(file)
if (files.length >= LIMIT) break
}

View File

@@ -62,12 +62,11 @@ export const SkillTool = Tool.define("skill", async (ctx) => {
always: [params.name],
metadata: {},
})
// Load and parse skill content
const parsed = await ConfigMarkdown.parse(skill.location)
const content = (await ConfigMarkdown.parse(skill.location)).content
const dir = path.dirname(skill.location)
// Format output similar to plugin pattern
const output = [`## Skill: ${skill.name}`, "", `**Base directory**: ${dir}`, "", parsed.content.trim()].join("\n")
const output = [`## Skill: ${skill.name}`, "", `**Base directory**: ${dir}`, "", content.trim()].join("\n")
return {
title: `Loaded skill: ${skill.name}`,