mirror of
https://github.com/anomalyco/opencode.git
synced 2026-02-21 16:24:21 +00:00
Compare commits
21 Commits
snapshot-n
...
update-sta
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4ac88762ae | ||
|
|
cfe2d30a26 | ||
|
|
d2898141be | ||
|
|
1c2416b6de | ||
|
|
d86c10816d | ||
|
|
04a634a80d | ||
|
|
1eb6caa3c6 | ||
|
|
1a329ba47d | ||
|
|
8d781b08ce | ||
|
|
8b99ac6513 | ||
|
|
63a469d0ce | ||
|
|
ae98be83b3 | ||
|
|
a3181d5fbd | ||
|
|
998c8bf3a5 | ||
|
|
d2d7a37bca | ||
|
|
8ad60b1ec2 | ||
|
|
01d518708a | ||
|
|
ae50f24c06 | ||
|
|
d32dd4d7fd | ||
|
|
cb5a0de42f | ||
|
|
f2090b26c1 |
30
bun.lock
30
bun.lock
@@ -25,7 +25,7 @@
|
||||
},
|
||||
"packages/app": {
|
||||
"name": "@opencode-ai/app",
|
||||
"version": "1.2.7",
|
||||
"version": "1.2.9",
|
||||
"dependencies": {
|
||||
"@kobalte/core": "catalog:",
|
||||
"@opencode-ai/sdk": "workspace:*",
|
||||
@@ -75,7 +75,7 @@
|
||||
},
|
||||
"packages/console/app": {
|
||||
"name": "@opencode-ai/console-app",
|
||||
"version": "1.2.7",
|
||||
"version": "1.2.9",
|
||||
"dependencies": {
|
||||
"@cloudflare/vite-plugin": "1.15.2",
|
||||
"@ibm/plex": "6.4.1",
|
||||
@@ -109,7 +109,7 @@
|
||||
},
|
||||
"packages/console/core": {
|
||||
"name": "@opencode-ai/console-core",
|
||||
"version": "1.2.7",
|
||||
"version": "1.2.9",
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-sts": "3.782.0",
|
||||
"@jsx-email/render": "1.1.1",
|
||||
@@ -136,7 +136,7 @@
|
||||
},
|
||||
"packages/console/function": {
|
||||
"name": "@opencode-ai/console-function",
|
||||
"version": "1.2.7",
|
||||
"version": "1.2.9",
|
||||
"dependencies": {
|
||||
"@ai-sdk/anthropic": "2.0.0",
|
||||
"@ai-sdk/openai": "2.0.2",
|
||||
@@ -160,7 +160,7 @@
|
||||
},
|
||||
"packages/console/mail": {
|
||||
"name": "@opencode-ai/console-mail",
|
||||
"version": "1.2.7",
|
||||
"version": "1.2.9",
|
||||
"dependencies": {
|
||||
"@jsx-email/all": "2.2.3",
|
||||
"@jsx-email/cli": "1.4.3",
|
||||
@@ -184,7 +184,7 @@
|
||||
},
|
||||
"packages/desktop": {
|
||||
"name": "@opencode-ai/desktop",
|
||||
"version": "1.2.7",
|
||||
"version": "1.2.9",
|
||||
"dependencies": {
|
||||
"@opencode-ai/app": "workspace:*",
|
||||
"@opencode-ai/ui": "workspace:*",
|
||||
@@ -217,7 +217,7 @@
|
||||
},
|
||||
"packages/enterprise": {
|
||||
"name": "@opencode-ai/enterprise",
|
||||
"version": "1.2.7",
|
||||
"version": "1.2.9",
|
||||
"dependencies": {
|
||||
"@opencode-ai/ui": "workspace:*",
|
||||
"@opencode-ai/util": "workspace:*",
|
||||
@@ -246,7 +246,7 @@
|
||||
},
|
||||
"packages/function": {
|
||||
"name": "@opencode-ai/function",
|
||||
"version": "1.2.7",
|
||||
"version": "1.2.9",
|
||||
"dependencies": {
|
||||
"@octokit/auth-app": "8.0.1",
|
||||
"@octokit/rest": "catalog:",
|
||||
@@ -262,7 +262,7 @@
|
||||
},
|
||||
"packages/opencode": {
|
||||
"name": "opencode",
|
||||
"version": "1.2.7",
|
||||
"version": "1.2.9",
|
||||
"bin": {
|
||||
"opencode": "./bin/opencode",
|
||||
},
|
||||
@@ -376,7 +376,7 @@
|
||||
},
|
||||
"packages/plugin": {
|
||||
"name": "@opencode-ai/plugin",
|
||||
"version": "1.2.7",
|
||||
"version": "1.2.9",
|
||||
"dependencies": {
|
||||
"@opencode-ai/sdk": "workspace:*",
|
||||
"zod": "catalog:",
|
||||
@@ -396,7 +396,7 @@
|
||||
},
|
||||
"packages/sdk/js": {
|
||||
"name": "@opencode-ai/sdk",
|
||||
"version": "1.2.7",
|
||||
"version": "1.2.9",
|
||||
"devDependencies": {
|
||||
"@hey-api/openapi-ts": "0.90.10",
|
||||
"@tsconfig/node22": "catalog:",
|
||||
@@ -407,7 +407,7 @@
|
||||
},
|
||||
"packages/slack": {
|
||||
"name": "@opencode-ai/slack",
|
||||
"version": "1.2.7",
|
||||
"version": "1.2.9",
|
||||
"dependencies": {
|
||||
"@opencode-ai/sdk": "workspace:*",
|
||||
"@slack/bolt": "^3.17.1",
|
||||
@@ -420,7 +420,7 @@
|
||||
},
|
||||
"packages/ui": {
|
||||
"name": "@opencode-ai/ui",
|
||||
"version": "1.2.7",
|
||||
"version": "1.2.9",
|
||||
"dependencies": {
|
||||
"@kobalte/core": "catalog:",
|
||||
"@opencode-ai/sdk": "workspace:*",
|
||||
@@ -462,7 +462,7 @@
|
||||
},
|
||||
"packages/util": {
|
||||
"name": "@opencode-ai/util",
|
||||
"version": "1.2.7",
|
||||
"version": "1.2.9",
|
||||
"dependencies": {
|
||||
"zod": "catalog:",
|
||||
},
|
||||
@@ -473,7 +473,7 @@
|
||||
},
|
||||
"packages/web": {
|
||||
"name": "@opencode-ai/web",
|
||||
"version": "1.2.7",
|
||||
"version": "1.2.9",
|
||||
"dependencies": {
|
||||
"@astrojs/cloudflare": "12.6.3",
|
||||
"@astrojs/markdown-remark": "6.3.1",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/app",
|
||||
"version": "1.2.7",
|
||||
"version": "1.2.9",
|
||||
"description": "",
|
||||
"type": "module",
|
||||
"exports": {
|
||||
|
||||
@@ -418,7 +418,7 @@ export const SettingsGeneral: Component = () => {
|
||||
|
||||
return (
|
||||
<div class="flex flex-col h-full overflow-y-auto no-scrollbar px-4 pb-10 sm:px-10 sm:pb-10">
|
||||
<div class="sticky top-0 z-10 bg-[linear-gradient(to_bottom,var(--surface-raised-stronger-non-alpha)_calc(100%_-_24px),transparent)]">
|
||||
<div class="sticky top-0 z-10 bg-[linear-gradient(to_bottom,var(--surface-stronger-non-alpha)_calc(100%_-_24px),transparent)]">
|
||||
<div class="flex flex-col gap-1 pt-6 pb-8">
|
||||
<h2 class="text-16-medium text-text-strong">{language.t("settings.tab.general")}</h2>
|
||||
</div>
|
||||
|
||||
@@ -370,7 +370,7 @@ export const SettingsKeybinds: Component = () => {
|
||||
|
||||
return (
|
||||
<div class="flex flex-col h-full overflow-y-auto no-scrollbar px-4 pb-10 sm:px-10 sm:pb-10">
|
||||
<div class="sticky top-0 z-10 bg-[linear-gradient(to_bottom,var(--surface-raised-stronger-non-alpha)_calc(100%_-_24px),transparent)]">
|
||||
<div class="sticky top-0 z-10 bg-[linear-gradient(to_bottom,var(--surface-stronger-non-alpha)_calc(100%_-_24px),transparent)]">
|
||||
<div class="flex flex-col gap-4 pt-6 pb-6 max-w-[720px]">
|
||||
<div class="flex items-center justify-between gap-4">
|
||||
<h2 class="text-16-medium text-text-strong">{language.t("settings.shortcuts.title")}</h2>
|
||||
|
||||
@@ -59,7 +59,7 @@ export const SettingsModels: Component = () => {
|
||||
|
||||
return (
|
||||
<div class="flex flex-col h-full overflow-y-auto no-scrollbar px-4 pb-10 sm:px-10 sm:pb-10">
|
||||
<div class="sticky top-0 z-10 bg-[linear-gradient(to_bottom,var(--surface-raised-stronger-non-alpha)_calc(100%_-_24px),transparent)]">
|
||||
<div class="sticky top-0 z-10 bg-[linear-gradient(to_bottom,var(--surface-stronger-non-alpha)_calc(100%_-_24px),transparent)]">
|
||||
<div class="flex flex-col gap-4 pt-6 pb-6 max-w-[720px]">
|
||||
<h2 class="text-16-medium text-text-strong">{language.t("settings.models.title")}</h2>
|
||||
<div class="flex items-center gap-2 px-3 h-9 rounded-lg bg-surface-base">
|
||||
|
||||
@@ -177,7 +177,7 @@ export const SettingsPermissions: Component = () => {
|
||||
|
||||
return (
|
||||
<div class="flex flex-col h-full overflow-y-auto no-scrollbar">
|
||||
<div class="sticky top-0 z-10 bg-[linear-gradient(to_bottom,var(--surface-raised-stronger-non-alpha)_calc(100%_-_24px),transparent)]">
|
||||
<div class="sticky top-0 z-10 bg-[linear-gradient(to_bottom,var(--surface-stronger-non-alpha)_calc(100%_-_24px),transparent)]">
|
||||
<div class="flex flex-col gap-1 px-4 py-8 sm:p-8 max-w-[720px]">
|
||||
<h2 class="text-16-medium text-text-strong">{language.t("settings.permissions.title")}</h2>
|
||||
<p class="text-14-regular text-text-weak">{language.t("settings.permissions.description")}</p>
|
||||
|
||||
@@ -132,7 +132,7 @@ export const SettingsProviders: Component = () => {
|
||||
|
||||
return (
|
||||
<div class="flex flex-col h-full overflow-y-auto no-scrollbar px-4 pb-10 sm:px-10 sm:pb-10">
|
||||
<div class="sticky top-0 z-10 bg-[linear-gradient(to_bottom,var(--surface-raised-stronger-non-alpha)_calc(100%_-_24px),transparent)]">
|
||||
<div class="sticky top-0 z-10 bg-[linear-gradient(to_bottom,var(--surface-stronger-non-alpha)_calc(100%_-_24px),transparent)]">
|
||||
<div class="flex flex-col gap-1 pt-6 pb-8 max-w-[720px]">
|
||||
<h2 class="text-16-medium text-text-strong">{language.t("settings.providers.title")}</h2>
|
||||
</div>
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/console-app",
|
||||
"version": "1.2.7",
|
||||
"version": "1.2.9",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/package.json",
|
||||
"name": "@opencode-ai/console-core",
|
||||
"version": "1.2.7",
|
||||
"version": "1.2.9",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/console-function",
|
||||
"version": "1.2.7",
|
||||
"version": "1.2.9",
|
||||
"$schema": "https://json.schemastore.org/package.json",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/console-mail",
|
||||
"version": "1.2.7",
|
||||
"version": "1.2.9",
|
||||
"dependencies": {
|
||||
"@jsx-email/all": "2.2.3",
|
||||
"@jsx-email/cli": "1.4.3",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@opencode-ai/desktop",
|
||||
"private": true,
|
||||
"version": "1.2.7",
|
||||
"version": "1.2.9",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
|
||||
@@ -40,7 +40,9 @@ use crate::windows::{LoadingWindow, MainWindow};
|
||||
#[derive(Clone, serde::Serialize, specta::Type, Debug)]
|
||||
struct ServerReadyData {
|
||||
url: String,
|
||||
username: Option<String>,
|
||||
password: Option<String>,
|
||||
is_sidecar: bool
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, serde::Serialize, specta::Type, Debug)]
|
||||
@@ -605,6 +607,7 @@ async fn initialize(app: AppHandle) {
|
||||
child,
|
||||
health_check,
|
||||
url,
|
||||
username,
|
||||
password,
|
||||
} => {
|
||||
let app = app.clone();
|
||||
@@ -631,7 +634,7 @@ async fn initialize(app: AppHandle) {
|
||||
|
||||
app.state::<ServerState>().set_child(Some(child));
|
||||
|
||||
Ok(ServerReadyData { url, password })
|
||||
Ok(ServerReadyData { url, username,password, is_sidecar: true })
|
||||
}
|
||||
.map(move |res| {
|
||||
let _ = server_ready_tx.send(res);
|
||||
@@ -641,7 +644,9 @@ async fn initialize(app: AppHandle) {
|
||||
ServerConnection::Existing { url } => {
|
||||
let _ = server_ready_tx.send(Ok(ServerReadyData {
|
||||
url: url.to_string(),
|
||||
username: None,
|
||||
password: None,
|
||||
is_sidecar: false,
|
||||
}));
|
||||
None
|
||||
}
|
||||
@@ -719,6 +724,7 @@ enum ServerConnection {
|
||||
},
|
||||
CLI {
|
||||
url: String,
|
||||
username: Option<String>,
|
||||
password: Option<String>,
|
||||
child: CommandChild,
|
||||
health_check: server::HealthCheck,
|
||||
@@ -730,11 +736,15 @@ async fn setup_server_connection(app: AppHandle) -> ServerConnection {
|
||||
|
||||
tracing::info!(?custom_url, "Attempting server connection");
|
||||
|
||||
if let Some(url) = custom_url
|
||||
&& server::check_health_or_ask_retry(&app, &url).await
|
||||
if let Some(url) = &custom_url
|
||||
&& server::check_health_or_ask_retry(&app, url).await
|
||||
{
|
||||
tracing::info!(%url, "Connected to custom server");
|
||||
return ServerConnection::Existing { url: url.clone() };
|
||||
// If the default server is already local, no need to also spawn a sidecar
|
||||
if server::is_localhost_url(url) {
|
||||
return ServerConnection::Existing { url: url.clone() };
|
||||
}
|
||||
// Remote default server: fall through and also spawn a local sidecar
|
||||
}
|
||||
|
||||
let local_port = get_sidecar_port();
|
||||
@@ -755,6 +765,7 @@ async fn setup_server_connection(app: AppHandle) -> ServerConnection {
|
||||
|
||||
ServerConnection::CLI {
|
||||
url: local_url,
|
||||
username: Some("opencode".to_string()),
|
||||
password: Some(password),
|
||||
child,
|
||||
health_check,
|
||||
|
||||
@@ -150,7 +150,7 @@ pub async fn check_health(url: &str, password: Option<&str>) -> bool {
|
||||
return false;
|
||||
};
|
||||
|
||||
let mut builder = reqwest::Client::builder().timeout(Duration::from_secs(3));
|
||||
let mut builder = reqwest::Client::builder().timeout(Duration::from_secs(7));
|
||||
|
||||
if url_is_localhost(&url) {
|
||||
// Some environments set proxy variables (HTTP_PROXY/HTTPS_PROXY/ALL_PROXY) without
|
||||
@@ -178,6 +178,10 @@ pub async fn check_health(url: &str, password: Option<&str>) -> bool {
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
pub fn is_localhost_url(url: &str) -> bool {
|
||||
reqwest::Url::parse(url).is_ok_and(|u| url_is_localhost(&u))
|
||||
}
|
||||
|
||||
fn url_is_localhost(url: &reqwest::Url) -> bool {
|
||||
url.host_str().is_some_and(|host| {
|
||||
host.eq_ignore_ascii_case("localhost")
|
||||
|
||||
@@ -35,7 +35,9 @@ export type LoadingWindowComplete = null;
|
||||
|
||||
export type ServerReadyData = {
|
||||
url: string,
|
||||
username: string | null,
|
||||
password: string | null,
|
||||
is_sidecar: boolean,
|
||||
};
|
||||
|
||||
export type SqliteMigrationProgress = { type: "InProgress"; value: number } | { type: "Done" };
|
||||
|
||||
@@ -23,7 +23,7 @@ import { relaunch } from "@tauri-apps/plugin-process"
|
||||
import { open as shellOpen } from "@tauri-apps/plugin-shell"
|
||||
import { Store } from "@tauri-apps/plugin-store"
|
||||
import { check, type Update } from "@tauri-apps/plugin-updater"
|
||||
import { type Accessor, createResource, type JSX, onCleanup, onMount, Show } from "solid-js"
|
||||
import { createResource, type JSX, onCleanup, onMount, Show } from "solid-js"
|
||||
import { render } from "solid-js/web"
|
||||
import pkg from "../package.json"
|
||||
import { initI18n, t } from "./i18n"
|
||||
@@ -31,7 +31,7 @@ import { UPDATER_ENABLED } from "./updater"
|
||||
import { webviewZoom } from "./webview-zoom"
|
||||
import "./styles.css"
|
||||
import { Channel } from "@tauri-apps/api/core"
|
||||
import { commands, type InitStep } from "./bindings"
|
||||
import { commands, ServerReadyData, type InitStep } from "./bindings"
|
||||
import { createMenu } from "./menu"
|
||||
|
||||
const root = document.getElementById("root")
|
||||
@@ -452,16 +452,19 @@ render(() => {
|
||||
<AppBaseProviders>
|
||||
<ServerGate>
|
||||
{(data) => {
|
||||
const server: ServerConnection.Sidecar = {
|
||||
displayName: "Local Server",
|
||||
type: "sidecar",
|
||||
variant: "base",
|
||||
http: {
|
||||
url: data().url,
|
||||
username: "opencode",
|
||||
password: data().password ?? undefined,
|
||||
},
|
||||
const http = {
|
||||
url: data.url,
|
||||
username: data.username ?? undefined,
|
||||
password: data.password ?? undefined,
|
||||
}
|
||||
const server: ServerConnection.Any = data.is_sidecar
|
||||
? {
|
||||
displayName: "Local Server",
|
||||
type: "sidecar",
|
||||
variant: "base",
|
||||
http,
|
||||
}
|
||||
: { type: "http", http }
|
||||
|
||||
function Inner() {
|
||||
const cmd = useCommand()
|
||||
@@ -485,10 +488,8 @@ render(() => {
|
||||
)
|
||||
}, root!)
|
||||
|
||||
type ServerReadyData = { url: string; password: string | null }
|
||||
|
||||
// Gate component that waits for the server to be ready
|
||||
function ServerGate(props: { children: (data: Accessor<ServerReadyData>) => JSX.Element }) {
|
||||
function ServerGate(props: { children: (data: ServerReadyData) => JSX.Element }) {
|
||||
const [serverData] = createResource(() => commands.awaitInitialization(new Channel<InitStep>() as any))
|
||||
|
||||
return (
|
||||
@@ -516,7 +517,7 @@ function ServerGate(props: { children: (data: Accessor<ServerReadyData>) => JSX.
|
||||
</div>
|
||||
}
|
||||
>
|
||||
{(data) => props.children(data)}
|
||||
{(data) => props.children(data())}
|
||||
</Show>
|
||||
</Show>
|
||||
)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/enterprise",
|
||||
"version": "1.2.7",
|
||||
"version": "1.2.9",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
id = "opencode"
|
||||
name = "OpenCode"
|
||||
description = "The open source coding agent."
|
||||
version = "1.2.7"
|
||||
version = "1.2.9"
|
||||
schema_version = 1
|
||||
authors = ["Anomaly"]
|
||||
repository = "https://github.com/anomalyco/opencode"
|
||||
@@ -11,26 +11,26 @@ name = "OpenCode"
|
||||
icon = "./icons/opencode.svg"
|
||||
|
||||
[agent_servers.opencode.targets.darwin-aarch64]
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.2.7/opencode-darwin-arm64.zip"
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.2.9/opencode-darwin-arm64.zip"
|
||||
cmd = "./opencode"
|
||||
args = ["acp"]
|
||||
|
||||
[agent_servers.opencode.targets.darwin-x86_64]
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.2.7/opencode-darwin-x64.zip"
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.2.9/opencode-darwin-x64.zip"
|
||||
cmd = "./opencode"
|
||||
args = ["acp"]
|
||||
|
||||
[agent_servers.opencode.targets.linux-aarch64]
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.2.7/opencode-linux-arm64.tar.gz"
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.2.9/opencode-linux-arm64.tar.gz"
|
||||
cmd = "./opencode"
|
||||
args = ["acp"]
|
||||
|
||||
[agent_servers.opencode.targets.linux-x86_64]
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.2.7/opencode-linux-x64.tar.gz"
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.2.9/opencode-linux-x64.tar.gz"
|
||||
cmd = "./opencode"
|
||||
args = ["acp"]
|
||||
|
||||
[agent_servers.opencode.targets.windows-x86_64]
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.2.7/opencode-windows-x64.zip"
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.2.9/opencode-windows-x64.zip"
|
||||
cmd = "./opencode.exe"
|
||||
args = ["acp"]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/function",
|
||||
"version": "1.2.7",
|
||||
"version": "1.2.9",
|
||||
"$schema": "https://json.schemastore.org/package.json",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/package.json",
|
||||
"version": "1.2.7",
|
||||
"version": "1.2.9",
|
||||
"name": "opencode",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
|
||||
@@ -6,6 +6,10 @@ import { Database } from "../../storage/db"
|
||||
import { SessionTable } from "../../session/session.sql"
|
||||
import { Project } from "../../project/project"
|
||||
import { Instance } from "../../project/instance"
|
||||
import { inArray } from "drizzle-orm"
|
||||
import { MessageTable, PartTable } from "../../session/session.sql"
|
||||
import type { MessageV2 } from "../../session/message-v2"
|
||||
import { and, eq, gte } from "drizzle-orm"
|
||||
|
||||
interface SessionStats {
|
||||
totalSessions: number
|
||||
@@ -20,7 +24,7 @@ interface SessionStats {
|
||||
write: number
|
||||
}
|
||||
}
|
||||
toolUsage: Record<string, number>
|
||||
toolUsage: Record<string, { calls: number; errors: number }>
|
||||
modelUsage: Record<
|
||||
string,
|
||||
{
|
||||
@@ -34,6 +38,7 @@ interface SessionStats {
|
||||
}
|
||||
}
|
||||
cost: number
|
||||
toolUsage: Record<string, { calls: number; errors: number }>
|
||||
}
|
||||
>
|
||||
dateRange: {
|
||||
@@ -62,6 +67,11 @@ export const StatsCommand = cmd({
|
||||
.option("models", {
|
||||
describe: "show model statistics (default: hidden). Pass a number to show top N, otherwise shows all",
|
||||
})
|
||||
.option("model", {
|
||||
describe: "filter models to show (can be used multiple times)",
|
||||
type: "array",
|
||||
string: true,
|
||||
})
|
||||
.option("project", {
|
||||
describe: "filter by project (default: all projects, empty string: current project)",
|
||||
type: "string",
|
||||
@@ -72,13 +82,20 @@ export const StatsCommand = cmd({
|
||||
const stats = await aggregateSessionStats(args.days, args.project)
|
||||
|
||||
let modelLimit: number | undefined
|
||||
let modelFilter: string[] | undefined
|
||||
|
||||
if (args.models === true) {
|
||||
modelLimit = Infinity
|
||||
} else if (typeof args.models === "number") {
|
||||
modelLimit = args.models
|
||||
}
|
||||
|
||||
displayStats(stats, args.tools, modelLimit)
|
||||
if (args.model && args.model.length > 0) {
|
||||
modelFilter = args.model as string[]
|
||||
modelLimit = modelLimit ?? Infinity
|
||||
}
|
||||
|
||||
displayStats(stats, args.tools, modelLimit, modelFilter)
|
||||
})
|
||||
},
|
||||
})
|
||||
@@ -86,14 +103,7 @@ export const StatsCommand = cmd({
|
||||
async function getCurrentProject(): Promise<Project.Info> {
|
||||
return Instance.project
|
||||
}
|
||||
|
||||
async function getAllSessions(): Promise<Session.Info[]> {
|
||||
const rows = Database.use((db) => db.select().from(SessionTable).all())
|
||||
return rows.map((row) => Session.fromRow(row))
|
||||
}
|
||||
|
||||
export async function aggregateSessionStats(days?: number, projectFilter?: string): Promise<SessionStats> {
|
||||
const sessions = await getAllSessions()
|
||||
const MS_IN_DAY = 24 * 60 * 60 * 1000
|
||||
|
||||
const cutoffTime = (() => {
|
||||
@@ -112,17 +122,34 @@ export async function aggregateSessionStats(days?: number, projectFilter?: strin
|
||||
return days
|
||||
})()
|
||||
|
||||
let filteredSessions = cutoffTime > 0 ? sessions.filter((session) => session.time.updated >= cutoffTime) : sessions
|
||||
|
||||
let projectID: string | undefined
|
||||
if (projectFilter !== undefined) {
|
||||
if (projectFilter === "") {
|
||||
const currentProject = await getCurrentProject()
|
||||
filteredSessions = filteredSessions.filter((session) => session.projectID === currentProject.id)
|
||||
projectID = currentProject.id
|
||||
} else {
|
||||
filteredSessions = filteredSessions.filter((session) => session.projectID === projectFilter)
|
||||
projectID = projectFilter
|
||||
}
|
||||
}
|
||||
|
||||
const rows = Database.use((db) => {
|
||||
const conditions = []
|
||||
if (cutoffTime > 0) {
|
||||
conditions.push(gte(SessionTable.time_updated, cutoffTime))
|
||||
}
|
||||
if (projectID !== undefined) {
|
||||
conditions.push(eq(SessionTable.project_id, projectID))
|
||||
}
|
||||
|
||||
const baseQuery = db.select().from(SessionTable)
|
||||
if (conditions.length > 0) {
|
||||
return baseQuery.where(and(...conditions)).all()
|
||||
}
|
||||
return baseQuery.all()
|
||||
})
|
||||
|
||||
const filteredSessions = rows.map((row) => Session.fromRow(row))
|
||||
|
||||
const stats: SessionStats = {
|
||||
totalSessions: filteredSessions.length,
|
||||
totalMessages: 0,
|
||||
@@ -162,16 +189,58 @@ export async function aggregateSessionStats(days?: number, projectFilter?: strin
|
||||
|
||||
const sessionTotalTokens: number[] = []
|
||||
|
||||
const BATCH_SIZE = 20
|
||||
const BATCH_SIZE = 100
|
||||
for (let i = 0; i < filteredSessions.length; i += BATCH_SIZE) {
|
||||
const batch = filteredSessions.slice(i, i + BATCH_SIZE)
|
||||
const sessionIds = batch.map((s) => s.id)
|
||||
|
||||
const batchPromises = batch.map(async (session) => {
|
||||
const messages = await Session.messages({ sessionID: session.id })
|
||||
// Bulk fetch messages for this batch of sessions
|
||||
const messageRows = Database.use((db) =>
|
||||
db.select().from(MessageTable).where(inArray(MessageTable.session_id, sessionIds)).all(),
|
||||
)
|
||||
|
||||
// Group messages by session_id
|
||||
const messagesBySession = new Map<string, typeof messageRows>()
|
||||
const messageIds = messageRows.map((r) => r.id)
|
||||
|
||||
for (const row of messageRows) {
|
||||
const msgs = messagesBySession.get(row.session_id) || []
|
||||
msgs.push(row)
|
||||
messagesBySession.set(row.session_id, msgs)
|
||||
}
|
||||
|
||||
// Bulk fetch parts for all these messages
|
||||
let partRows: (typeof PartTable.$inferSelect)[] = []
|
||||
if (messageIds.length > 0) {
|
||||
// Chunk message IDs if there are too many for a single IN clause (SQLite has limits)
|
||||
const PART_BATCH_SIZE = 500
|
||||
for (let j = 0; j < messageIds.length; j += PART_BATCH_SIZE) {
|
||||
const idBatch = messageIds.slice(j, j + PART_BATCH_SIZE)
|
||||
const parts = Database.use((db) =>
|
||||
db.select().from(PartTable).where(inArray(PartTable.message_id, idBatch)).all(),
|
||||
)
|
||||
partRows.push(...parts)
|
||||
}
|
||||
}
|
||||
|
||||
// Group parts by message_id
|
||||
const partsByMessage = new Map<string, MessageV2.Part[]>()
|
||||
for (const row of partRows) {
|
||||
const parts = partsByMessage.get(row.message_id) || []
|
||||
parts.push({ ...row.data, id: row.id, sessionID: row.session_id, messageID: row.message_id } as MessageV2.Part)
|
||||
partsByMessage.set(row.message_id, parts)
|
||||
}
|
||||
|
||||
const batchResults = batch.map((session) => {
|
||||
const rawMessages = messagesBySession.get(session.id) || []
|
||||
const messages = rawMessages.map((row) => ({
|
||||
info: { ...row.data, id: row.id, sessionID: row.session_id } as MessageV2.Info,
|
||||
parts: partsByMessage.get(row.id) || [],
|
||||
}))
|
||||
|
||||
let sessionCost = 0
|
||||
let sessionTokens = { input: 0, output: 0, reasoning: 0, cache: { read: 0, write: 0 } }
|
||||
let sessionToolUsage: Record<string, number> = {}
|
||||
let sessionToolUsage: Record<string, { calls: number; errors: number }> = {}
|
||||
let sessionModelUsage: Record<
|
||||
string,
|
||||
{
|
||||
@@ -185,6 +254,7 @@ export async function aggregateSessionStats(days?: number, projectFilter?: strin
|
||||
}
|
||||
}
|
||||
cost: number
|
||||
toolUsage: Record<string, { calls: number; errors: number }>
|
||||
}
|
||||
> = {}
|
||||
|
||||
@@ -198,6 +268,7 @@ export async function aggregateSessionStats(days?: number, projectFilter?: strin
|
||||
messages: 0,
|
||||
tokens: { input: 0, output: 0, cache: { read: 0, write: 0 } },
|
||||
cost: 0,
|
||||
toolUsage: {},
|
||||
}
|
||||
}
|
||||
sessionModelUsage[modelKey].messages++
|
||||
@@ -216,11 +287,22 @@ export async function aggregateSessionStats(days?: number, projectFilter?: strin
|
||||
sessionModelUsage[modelKey].tokens.cache.read += message.info.tokens.cache?.read || 0
|
||||
sessionModelUsage[modelKey].tokens.cache.write += message.info.tokens.cache?.write || 0
|
||||
}
|
||||
}
|
||||
|
||||
for (const part of message.parts) {
|
||||
if (part.type === "tool" && part.tool) {
|
||||
sessionToolUsage[part.tool] = (sessionToolUsage[part.tool] || 0) + 1
|
||||
for (const part of message.parts) {
|
||||
if (part.type === "tool" && part.tool) {
|
||||
const isError =
|
||||
part.state && part.state.status === "error" && part.state.error !== "Tool execution aborted"
|
||||
|
||||
if (!sessionToolUsage[part.tool]) sessionToolUsage[part.tool] = { calls: 0, errors: 0 }
|
||||
sessionToolUsage[part.tool].calls++
|
||||
if (isError) sessionToolUsage[part.tool].errors++
|
||||
|
||||
if (!sessionModelUsage[modelKey].toolUsage[part.tool]) {
|
||||
sessionModelUsage[modelKey].toolUsage[part.tool] = { calls: 0, errors: 0 }
|
||||
}
|
||||
sessionModelUsage[modelKey].toolUsage[part.tool].calls++
|
||||
if (isError) sessionModelUsage[modelKey].toolUsage[part.tool].errors++
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -242,8 +324,6 @@ export async function aggregateSessionStats(days?: number, projectFilter?: strin
|
||||
}
|
||||
})
|
||||
|
||||
const batchResults = await Promise.all(batchPromises)
|
||||
|
||||
for (const result of batchResults) {
|
||||
earliestTime = Math.min(earliestTime, result.earliestTime)
|
||||
latestTime = Math.max(latestTime, result.latestTime)
|
||||
@@ -258,7 +338,9 @@ export async function aggregateSessionStats(days?: number, projectFilter?: strin
|
||||
stats.totalTokens.cache.write += result.sessionTokens.cache.write
|
||||
|
||||
for (const [tool, count] of Object.entries(result.sessionToolUsage)) {
|
||||
stats.toolUsage[tool] = (stats.toolUsage[tool] || 0) + count
|
||||
if (!stats.toolUsage[tool]) stats.toolUsage[tool] = { calls: 0, errors: 0 }
|
||||
stats.toolUsage[tool].calls += count.calls
|
||||
stats.toolUsage[tool].errors += count.errors
|
||||
}
|
||||
|
||||
for (const [model, usage] of Object.entries(result.sessionModelUsage)) {
|
||||
@@ -267,6 +349,7 @@ export async function aggregateSessionStats(days?: number, projectFilter?: strin
|
||||
messages: 0,
|
||||
tokens: { input: 0, output: 0, cache: { read: 0, write: 0 } },
|
||||
cost: 0,
|
||||
toolUsage: {},
|
||||
}
|
||||
}
|
||||
stats.modelUsage[model].messages += usage.messages
|
||||
@@ -275,6 +358,14 @@ export async function aggregateSessionStats(days?: number, projectFilter?: strin
|
||||
stats.modelUsage[model].tokens.cache.read += usage.tokens.cache.read
|
||||
stats.modelUsage[model].tokens.cache.write += usage.tokens.cache.write
|
||||
stats.modelUsage[model].cost += usage.cost
|
||||
|
||||
for (const [tool, toolUsage] of Object.entries(usage.toolUsage)) {
|
||||
if (!stats.modelUsage[model].toolUsage[tool]) {
|
||||
stats.modelUsage[model].toolUsage[tool] = { calls: 0, errors: 0 }
|
||||
}
|
||||
stats.modelUsage[model].toolUsage[tool].calls += toolUsage.calls
|
||||
stats.modelUsage[model].toolUsage[tool].errors += toolUsage.errors
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -306,7 +397,7 @@ export async function aggregateSessionStats(days?: number, projectFilter?: strin
|
||||
return stats
|
||||
}
|
||||
|
||||
export function displayStats(stats: SessionStats, toolLimit?: number, modelLimit?: number) {
|
||||
export function displayStats(stats: SessionStats, toolLimit?: number, modelLimit?: number, modelFilter?: string[]) {
|
||||
const width = 56
|
||||
|
||||
function renderRow(label: string, value: string): string {
|
||||
@@ -346,43 +437,73 @@ export function displayStats(stats: SessionStats, toolLimit?: number, modelLimit
|
||||
console.log()
|
||||
|
||||
// Model Usage section
|
||||
if (modelLimit !== undefined && Object.keys(stats.modelUsage).length > 0) {
|
||||
const sortedModels = Object.entries(stats.modelUsage).sort(([, a], [, b]) => b.messages - a.messages)
|
||||
const modelsToDisplay = modelLimit === Infinity ? sortedModels : sortedModels.slice(0, modelLimit)
|
||||
if ((modelLimit !== undefined || modelFilter !== undefined) && Object.keys(stats.modelUsage).length > 0) {
|
||||
let sortedModels = Object.entries(stats.modelUsage).sort(([, a], [, b]) => b.messages - a.messages)
|
||||
|
||||
console.log("┌────────────────────────────────────────────────────────┐")
|
||||
console.log("│ MODEL USAGE │")
|
||||
console.log("├────────────────────────────────────────────────────────┤")
|
||||
|
||||
for (const [model, usage] of modelsToDisplay) {
|
||||
console.log(`│ ${model.padEnd(54)} │`)
|
||||
console.log(renderRow(" Messages", usage.messages.toLocaleString()))
|
||||
console.log(renderRow(" Input Tokens", formatNumber(usage.tokens.input)))
|
||||
console.log(renderRow(" Output Tokens", formatNumber(usage.tokens.output)))
|
||||
console.log(renderRow(" Cache Read", formatNumber(usage.tokens.cache.read)))
|
||||
console.log(renderRow(" Cache Write", formatNumber(usage.tokens.cache.write)))
|
||||
console.log(renderRow(" Cost", `$${usage.cost.toFixed(4)}`))
|
||||
console.log("├────────────────────────────────────────────────────────┤")
|
||||
if (modelFilter && modelFilter.length > 0) {
|
||||
sortedModels = sortedModels.filter(([model]) => modelFilter.some((filter) => model.includes(filter)))
|
||||
}
|
||||
|
||||
const modelsToDisplay =
|
||||
modelLimit === Infinity || modelLimit === undefined ? sortedModels : sortedModels.slice(0, modelLimit)
|
||||
|
||||
if (modelsToDisplay.length > 0) {
|
||||
console.log("┌────────────────────────────────────────────────────────┐")
|
||||
console.log("│ MODEL USAGE │")
|
||||
console.log("├────────────────────────────────────────────────────────┤")
|
||||
|
||||
for (const [model, usage] of modelsToDisplay) {
|
||||
console.log(`│ ${model.padEnd(54)} │`)
|
||||
console.log(renderRow(" Messages", usage.messages.toLocaleString()))
|
||||
console.log(renderRow(" Input Tokens", formatNumber(usage.tokens.input)))
|
||||
console.log(renderRow(" Output Tokens", formatNumber(usage.tokens.output)))
|
||||
console.log(renderRow(" Cache Read", formatNumber(usage.tokens.cache.read)))
|
||||
console.log(renderRow(" Cache Write", formatNumber(usage.tokens.cache.write)))
|
||||
console.log(renderRow(" Cost", `$${usage.cost.toFixed(4)}`))
|
||||
|
||||
if (Object.keys(usage.toolUsage).length > 0) {
|
||||
console.log(`│ │`)
|
||||
console.log(`│ Tool Call Rate Error Rate │`)
|
||||
|
||||
const totalModelTools = Object.values(usage.toolUsage).reduce((sum, t) => sum + t.calls, 0)
|
||||
const sortedTools = Object.entries(usage.toolUsage).sort((a, b) => b[1].calls - a[1].calls)
|
||||
|
||||
for (const [tool, toolStats] of sortedTools) {
|
||||
const callRate = ((toolStats.calls / totalModelTools) * 100).toFixed(1) + "%"
|
||||
const errorRate = toolStats.calls > 0 ? ((toolStats.errors / toolStats.calls) * 100).toFixed(1) + "%" : "0%"
|
||||
|
||||
const toolName = tool.length > 22 ? tool.substring(0, 20) + ".." : tool
|
||||
const paddedTool = toolName.padEnd(24)
|
||||
const callStr = callRate.padStart(13)
|
||||
const errStr = errorRate.padStart(15)
|
||||
|
||||
console.log(`│ ${paddedTool}${callStr}${errStr} │`)
|
||||
}
|
||||
}
|
||||
|
||||
console.log("├────────────────────────────────────────────────────────┤")
|
||||
}
|
||||
// Remove last separator and add bottom border
|
||||
process.stdout.write("\x1B[1A") // Move up one line
|
||||
console.log("└────────────────────────────────────────────────────────┘")
|
||||
}
|
||||
// Remove last separator and add bottom border
|
||||
process.stdout.write("\x1B[1A") // Move up one line
|
||||
console.log("└────────────────────────────────────────────────────────┘")
|
||||
}
|
||||
console.log()
|
||||
|
||||
// Tool Usage section
|
||||
if (Object.keys(stats.toolUsage).length > 0) {
|
||||
const sortedTools = Object.entries(stats.toolUsage).sort(([, a], [, b]) => b - a)
|
||||
const sortedTools = Object.entries(stats.toolUsage).sort(([, a], [, b]) => b.calls - a.calls)
|
||||
const toolsToDisplay = toolLimit ? sortedTools.slice(0, toolLimit) : sortedTools
|
||||
|
||||
console.log("┌────────────────────────────────────────────────────────┐")
|
||||
console.log("│ TOOL USAGE │")
|
||||
console.log("├────────────────────────────────────────────────────────┤")
|
||||
|
||||
const maxCount = Math.max(...toolsToDisplay.map(([, count]) => count))
|
||||
const totalToolUsage = Object.values(stats.toolUsage).reduce((a, b) => a + b, 0)
|
||||
const maxCount = Math.max(...toolsToDisplay.map(([, toolStats]) => toolStats.calls))
|
||||
const totalToolUsage = Object.values(stats.toolUsage).reduce((a, b) => a + b.calls, 0)
|
||||
|
||||
for (const [tool, count] of toolsToDisplay) {
|
||||
for (const [tool, toolStats] of toolsToDisplay) {
|
||||
const count = toolStats.calls
|
||||
const barLength = Math.max(1, Math.floor((count / maxCount) * 20))
|
||||
const bar = "█".repeat(barLength)
|
||||
const percentage = ((count / totalToolUsage) * 100).toFixed(1)
|
||||
|
||||
@@ -2,8 +2,7 @@ import path from "path"
|
||||
import { Global } from "@/global"
|
||||
import { Filesystem } from "@/util/filesystem"
|
||||
import { onMount } from "solid-js"
|
||||
import { createStore, produce } from "solid-js/store"
|
||||
import { clone } from "remeda"
|
||||
import { createStore, produce, unwrap } from "solid-js/store"
|
||||
import { createSimpleContext } from "../../context/helper"
|
||||
import { appendFile, writeFile } from "fs/promises"
|
||||
import type { AgentPart, FilePart, TextPart } from "@opencode-ai/sdk/v2"
|
||||
@@ -83,7 +82,7 @@ export const { use: usePromptHistory, provider: PromptHistoryProvider } = create
|
||||
return store.history.at(store.index)
|
||||
},
|
||||
append(item: PromptInfo) {
|
||||
const entry = clone(item)
|
||||
const entry = structuredClone(unwrap(item))
|
||||
let trimmed = false
|
||||
setStore(
|
||||
produce((draft) => {
|
||||
|
||||
@@ -2,8 +2,7 @@ import path from "path"
|
||||
import { Global } from "@/global"
|
||||
import { Filesystem } from "@/util/filesystem"
|
||||
import { onMount } from "solid-js"
|
||||
import { createStore, produce } from "solid-js/store"
|
||||
import { clone } from "remeda"
|
||||
import { createStore, produce, unwrap } from "solid-js/store"
|
||||
import { createSimpleContext } from "../../context/helper"
|
||||
import { appendFile, writeFile } from "fs/promises"
|
||||
import type { PromptInfo } from "./history"
|
||||
@@ -53,7 +52,7 @@ export const { use: usePromptStash, provider: PromptStashProvider } = createSimp
|
||||
return store.entries
|
||||
},
|
||||
push(entry: Omit<StashEntry, "timestamp">) {
|
||||
const stash = clone({ ...entry, timestamp: Date.now() })
|
||||
const stash = structuredClone(unwrap({ ...entry, timestamp: Date.now() }))
|
||||
let trimmed = false
|
||||
setStore(
|
||||
produce((draft) => {
|
||||
|
||||
@@ -11,7 +11,7 @@ import {
|
||||
tool,
|
||||
jsonSchema,
|
||||
} from "ai"
|
||||
import { clone, mergeDeep, pipe } from "remeda"
|
||||
import { mergeDeep, pipe } from "remeda"
|
||||
import { ProviderTransform } from "@/provider/transform"
|
||||
import { Config } from "@/config/config"
|
||||
import { Instance } from "@/project/instance"
|
||||
@@ -80,15 +80,11 @@ export namespace LLM {
|
||||
)
|
||||
|
||||
const header = system[0]
|
||||
const original = clone(system)
|
||||
await Plugin.trigger(
|
||||
"experimental.chat.system.transform",
|
||||
{ sessionID: input.sessionID, model: input.model },
|
||||
{ system },
|
||||
)
|
||||
if (system.length === 0) {
|
||||
system.push(...original)
|
||||
}
|
||||
// rejoin to maintain 2-part structure for caching if header unchanged
|
||||
if (system.length > 2 && system[0] === header) {
|
||||
const rest = system.slice(1)
|
||||
|
||||
@@ -22,7 +22,6 @@ import PROMPT_PLAN from "../session/prompt/plan.txt"
|
||||
import BUILD_SWITCH from "../session/prompt/build-switch.txt"
|
||||
import MAX_STEPS from "../session/prompt/max-steps.txt"
|
||||
import { defer } from "../util/defer"
|
||||
import { clone } from "remeda"
|
||||
import { ToolRegistry } from "../tool/registry"
|
||||
import { MCP } from "../mcp"
|
||||
import { LSP } from "../lsp"
|
||||
@@ -627,11 +626,9 @@ export namespace SessionPrompt {
|
||||
})
|
||||
}
|
||||
|
||||
const sessionMessages = clone(msgs)
|
||||
|
||||
// Ephemerally wrap queued user messages with a reminder to stay on track
|
||||
if (step > 1 && lastFinished) {
|
||||
for (const msg of sessionMessages) {
|
||||
for (const msg of msgs) {
|
||||
if (msg.info.role !== "user" || msg.info.id <= lastFinished.id) continue
|
||||
for (const part of msg.parts) {
|
||||
if (part.type !== "text" || part.ignored || part.synthetic) continue
|
||||
@@ -648,7 +645,7 @@ export namespace SessionPrompt {
|
||||
}
|
||||
}
|
||||
|
||||
await Plugin.trigger("experimental.chat.messages.transform", {}, { messages: sessionMessages })
|
||||
await Plugin.trigger("experimental.chat.messages.transform", {}, { messages: msgs })
|
||||
|
||||
// Build system prompt, adding structured output instruction if needed
|
||||
const system = [...(await SystemPrompt.environment(model)), ...(await InstructionPrompt.system())]
|
||||
@@ -664,7 +661,7 @@ export namespace SessionPrompt {
|
||||
sessionID,
|
||||
system,
|
||||
messages: [
|
||||
...MessageV2.toModelMessages(sessionMessages, model),
|
||||
...MessageV2.toModelMessages(msgs, model),
|
||||
...(isLastStep
|
||||
? [
|
||||
{
|
||||
@@ -909,7 +906,12 @@ export namespace SessionPrompt {
|
||||
title: "",
|
||||
metadata,
|
||||
output: truncated.content,
|
||||
attachments,
|
||||
attachments: attachments.map((attachment) => ({
|
||||
...attachment,
|
||||
id: Identifier.ascending("part"),
|
||||
sessionID: ctx.sessionID,
|
||||
messageID: input.processor.message.id,
|
||||
})),
|
||||
content: result.content, // directly return content to preserve ordering when outputting to model
|
||||
}
|
||||
}
|
||||
|
||||
@@ -307,7 +307,6 @@ describe("session.llm.stream", () => {
|
||||
expect(url.pathname.startsWith("/v1/")).toBe(true)
|
||||
expect(url.pathname.endsWith("/chat/completions")).toBe(true)
|
||||
expect(headers.get("Authorization")).toBe("Bearer test-key")
|
||||
expect(headers.get("User-Agent") ?? "").toMatch(/^opencode\//)
|
||||
|
||||
expect(body.model).toBe(resolved.api.id)
|
||||
expect(body.temperature).toBe(0.4)
|
||||
|
||||
@@ -1,104 +0,0 @@
|
||||
import path from "path"
|
||||
import { describe, expect, test } from "bun:test"
|
||||
import { Instance } from "../../src/project/instance"
|
||||
import { Session } from "../../src/session"
|
||||
import { MessageV2 } from "../../src/session/message-v2"
|
||||
import { SessionPrompt } from "../../src/session/prompt"
|
||||
import { tmpdir } from "../fixture/fixture"
|
||||
|
||||
describe("session.prompt missing file", () => {
|
||||
test("does not fail the prompt when a file part is missing", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
git: true,
|
||||
config: {
|
||||
agent: {
|
||||
build: {
|
||||
model: "openai/gpt-5.2",
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const session = await Session.create({})
|
||||
|
||||
const missing = path.join(tmp.path, "does-not-exist.ts")
|
||||
const msg = await SessionPrompt.prompt({
|
||||
sessionID: session.id,
|
||||
agent: "build",
|
||||
noReply: true,
|
||||
parts: [
|
||||
{ type: "text", text: "please review @does-not-exist.ts" },
|
||||
{
|
||||
type: "file",
|
||||
mime: "text/plain",
|
||||
url: `file://${missing}`,
|
||||
filename: "does-not-exist.ts",
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
if (msg.info.role !== "user") throw new Error("expected user message")
|
||||
|
||||
const hasFailure = msg.parts.some(
|
||||
(part) => part.type === "text" && part.synthetic && part.text.includes("Read tool failed to read"),
|
||||
)
|
||||
expect(hasFailure).toBe(true)
|
||||
|
||||
await Session.remove(session.id)
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("keeps stored part order stable when file resolution is async", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
git: true,
|
||||
config: {
|
||||
agent: {
|
||||
build: {
|
||||
model: "openai/gpt-5.2",
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const session = await Session.create({})
|
||||
|
||||
const missing = path.join(tmp.path, "still-missing.ts")
|
||||
const msg = await SessionPrompt.prompt({
|
||||
sessionID: session.id,
|
||||
agent: "build",
|
||||
noReply: true,
|
||||
parts: [
|
||||
{
|
||||
type: "file",
|
||||
mime: "text/plain",
|
||||
url: `file://${missing}`,
|
||||
filename: "still-missing.ts",
|
||||
},
|
||||
{ type: "text", text: "after-file" },
|
||||
],
|
||||
})
|
||||
|
||||
if (msg.info.role !== "user") throw new Error("expected user message")
|
||||
|
||||
const stored = await MessageV2.get({
|
||||
sessionID: session.id,
|
||||
messageID: msg.info.id,
|
||||
})
|
||||
const text = stored.parts.filter((part) => part.type === "text").map((part) => part.text)
|
||||
|
||||
expect(text[0]?.startsWith("Called the Read tool with the following input:")).toBe(true)
|
||||
expect(text[1]?.includes("Read tool failed to read")).toBe(true)
|
||||
expect(text[2]).toBe("after-file")
|
||||
|
||||
await Session.remove(session.id)
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,56 +0,0 @@
|
||||
import path from "path"
|
||||
import { describe, expect, test } from "bun:test"
|
||||
import { fileURLToPath } from "url"
|
||||
import { Instance } from "../../src/project/instance"
|
||||
import { Log } from "../../src/util/log"
|
||||
import { Session } from "../../src/session"
|
||||
import { SessionPrompt } from "../../src/session/prompt"
|
||||
import { MessageV2 } from "../../src/session/message-v2"
|
||||
import { tmpdir } from "../fixture/fixture"
|
||||
|
||||
Log.init({ print: false })
|
||||
|
||||
describe("session.prompt special characters", () => {
|
||||
test("handles filenames with # character", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
git: true,
|
||||
init: async (dir) => {
|
||||
await Bun.write(path.join(dir, "file#name.txt"), "special content\n")
|
||||
},
|
||||
})
|
||||
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const session = await Session.create({})
|
||||
const template = "Read @file#name.txt"
|
||||
const parts = await SessionPrompt.resolvePromptParts(template)
|
||||
const fileParts = parts.filter((part) => part.type === "file")
|
||||
|
||||
expect(fileParts.length).toBe(1)
|
||||
expect(fileParts[0].filename).toBe("file#name.txt")
|
||||
|
||||
// Verify the URL is properly encoded (# should be %23)
|
||||
expect(fileParts[0].url).toContain("%23")
|
||||
|
||||
// Verify the URL can be correctly converted back to a file path
|
||||
const decodedPath = fileURLToPath(fileParts[0].url)
|
||||
expect(decodedPath).toBe(path.join(tmp.path, "file#name.txt"))
|
||||
|
||||
const message = await SessionPrompt.prompt({
|
||||
sessionID: session.id,
|
||||
parts,
|
||||
noReply: true,
|
||||
})
|
||||
const stored = await MessageV2.get({ sessionID: session.id, messageID: message.info.id })
|
||||
|
||||
// Verify the file content was read correctly
|
||||
const textParts = stored.parts.filter((part) => part.type === "text")
|
||||
const hasContent = textParts.some((part) => part.text.includes("special content"))
|
||||
expect(hasContent).toBe(true)
|
||||
|
||||
await Session.remove(session.id)
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,68 +0,0 @@
|
||||
import { describe, expect, test } from "bun:test"
|
||||
import { Instance } from "../../src/project/instance"
|
||||
import { Session } from "../../src/session"
|
||||
import { SessionPrompt } from "../../src/session/prompt"
|
||||
import { tmpdir } from "../fixture/fixture"
|
||||
|
||||
describe("session.prompt agent variant", () => {
|
||||
test("applies agent variant only when using agent model", async () => {
|
||||
const prev = process.env.OPENAI_API_KEY
|
||||
process.env.OPENAI_API_KEY = "test-openai-key"
|
||||
|
||||
try {
|
||||
await using tmp = await tmpdir({
|
||||
git: true,
|
||||
config: {
|
||||
agent: {
|
||||
build: {
|
||||
model: "openai/gpt-5.2",
|
||||
variant: "xhigh",
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const session = await Session.create({})
|
||||
|
||||
const other = await SessionPrompt.prompt({
|
||||
sessionID: session.id,
|
||||
agent: "build",
|
||||
model: { providerID: "opencode", modelID: "kimi-k2.5-free" },
|
||||
noReply: true,
|
||||
parts: [{ type: "text", text: "hello" }],
|
||||
})
|
||||
if (other.info.role !== "user") throw new Error("expected user message")
|
||||
expect(other.info.variant).toBeUndefined()
|
||||
|
||||
const match = await SessionPrompt.prompt({
|
||||
sessionID: session.id,
|
||||
agent: "build",
|
||||
noReply: true,
|
||||
parts: [{ type: "text", text: "hello again" }],
|
||||
})
|
||||
if (match.info.role !== "user") throw new Error("expected user message")
|
||||
expect(match.info.model).toEqual({ providerID: "openai", modelID: "gpt-5.2" })
|
||||
expect(match.info.variant).toBe("xhigh")
|
||||
|
||||
const override = await SessionPrompt.prompt({
|
||||
sessionID: session.id,
|
||||
agent: "build",
|
||||
noReply: true,
|
||||
variant: "high",
|
||||
parts: [{ type: "text", text: "hello third" }],
|
||||
})
|
||||
if (override.info.role !== "user") throw new Error("expected user message")
|
||||
expect(override.info.variant).toBe("high")
|
||||
|
||||
await Session.remove(session.id)
|
||||
},
|
||||
})
|
||||
} finally {
|
||||
if (prev === undefined) delete process.env.OPENAI_API_KEY
|
||||
else process.env.OPENAI_API_KEY = prev
|
||||
}
|
||||
})
|
||||
})
|
||||
211
packages/opencode/test/session/prompt.test.ts
Normal file
211
packages/opencode/test/session/prompt.test.ts
Normal file
@@ -0,0 +1,211 @@
|
||||
import path from "path"
|
||||
import { describe, expect, test } from "bun:test"
|
||||
import { fileURLToPath } from "url"
|
||||
import { Instance } from "../../src/project/instance"
|
||||
import { Session } from "../../src/session"
|
||||
import { MessageV2 } from "../../src/session/message-v2"
|
||||
import { SessionPrompt } from "../../src/session/prompt"
|
||||
import { Log } from "../../src/util/log"
|
||||
import { tmpdir } from "../fixture/fixture"
|
||||
|
||||
Log.init({ print: false })
|
||||
|
||||
describe("session.prompt missing file", () => {
|
||||
test("does not fail the prompt when a file part is missing", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
git: true,
|
||||
config: {
|
||||
agent: {
|
||||
build: {
|
||||
model: "openai/gpt-5.2",
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const session = await Session.create({})
|
||||
|
||||
const missing = path.join(tmp.path, "does-not-exist.ts")
|
||||
const msg = await SessionPrompt.prompt({
|
||||
sessionID: session.id,
|
||||
agent: "build",
|
||||
noReply: true,
|
||||
parts: [
|
||||
{ type: "text", text: "please review @does-not-exist.ts" },
|
||||
{
|
||||
type: "file",
|
||||
mime: "text/plain",
|
||||
url: `file://${missing}`,
|
||||
filename: "does-not-exist.ts",
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
if (msg.info.role !== "user") throw new Error("expected user message")
|
||||
|
||||
const hasFailure = msg.parts.some(
|
||||
(part) => part.type === "text" && part.synthetic && part.text.includes("Read tool failed to read"),
|
||||
)
|
||||
expect(hasFailure).toBe(true)
|
||||
|
||||
await Session.remove(session.id)
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("keeps stored part order stable when file resolution is async", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
git: true,
|
||||
config: {
|
||||
agent: {
|
||||
build: {
|
||||
model: "openai/gpt-5.2",
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const session = await Session.create({})
|
||||
|
||||
const missing = path.join(tmp.path, "still-missing.ts")
|
||||
const msg = await SessionPrompt.prompt({
|
||||
sessionID: session.id,
|
||||
agent: "build",
|
||||
noReply: true,
|
||||
parts: [
|
||||
{
|
||||
type: "file",
|
||||
mime: "text/plain",
|
||||
url: `file://${missing}`,
|
||||
filename: "still-missing.ts",
|
||||
},
|
||||
{ type: "text", text: "after-file" },
|
||||
],
|
||||
})
|
||||
|
||||
if (msg.info.role !== "user") throw new Error("expected user message")
|
||||
|
||||
const stored = await MessageV2.get({
|
||||
sessionID: session.id,
|
||||
messageID: msg.info.id,
|
||||
})
|
||||
const text = stored.parts.filter((part) => part.type === "text").map((part) => part.text)
|
||||
|
||||
expect(text[0]?.startsWith("Called the Read tool with the following input:")).toBe(true)
|
||||
expect(text[1]?.includes("Read tool failed to read")).toBe(true)
|
||||
expect(text[2]).toBe("after-file")
|
||||
|
||||
await Session.remove(session.id)
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("session.prompt special characters", () => {
|
||||
test("handles filenames with # character", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
git: true,
|
||||
init: async (dir) => {
|
||||
await Bun.write(path.join(dir, "file#name.txt"), "special content\n")
|
||||
},
|
||||
})
|
||||
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const session = await Session.create({})
|
||||
const template = "Read @file#name.txt"
|
||||
const parts = await SessionPrompt.resolvePromptParts(template)
|
||||
const fileParts = parts.filter((part) => part.type === "file")
|
||||
|
||||
expect(fileParts.length).toBe(1)
|
||||
expect(fileParts[0].filename).toBe("file#name.txt")
|
||||
expect(fileParts[0].url).toContain("%23")
|
||||
|
||||
const decodedPath = fileURLToPath(fileParts[0].url)
|
||||
expect(decodedPath).toBe(path.join(tmp.path, "file#name.txt"))
|
||||
|
||||
const message = await SessionPrompt.prompt({
|
||||
sessionID: session.id,
|
||||
parts,
|
||||
noReply: true,
|
||||
})
|
||||
const stored = await MessageV2.get({ sessionID: session.id, messageID: message.info.id })
|
||||
const textParts = stored.parts.filter((part) => part.type === "text")
|
||||
const hasContent = textParts.some((part) => part.text.includes("special content"))
|
||||
expect(hasContent).toBe(true)
|
||||
|
||||
await Session.remove(session.id)
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("session.prompt agent variant", () => {
|
||||
test("applies agent variant only when using agent model", async () => {
|
||||
const prev = process.env.OPENAI_API_KEY
|
||||
process.env.OPENAI_API_KEY = "test-openai-key"
|
||||
|
||||
try {
|
||||
await using tmp = await tmpdir({
|
||||
git: true,
|
||||
config: {
|
||||
agent: {
|
||||
build: {
|
||||
model: "openai/gpt-5.2",
|
||||
variant: "xhigh",
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const session = await Session.create({})
|
||||
|
||||
const other = await SessionPrompt.prompt({
|
||||
sessionID: session.id,
|
||||
agent: "build",
|
||||
model: { providerID: "opencode", modelID: "kimi-k2.5-free" },
|
||||
noReply: true,
|
||||
parts: [{ type: "text", text: "hello" }],
|
||||
})
|
||||
if (other.info.role !== "user") throw new Error("expected user message")
|
||||
expect(other.info.variant).toBeUndefined()
|
||||
|
||||
const match = await SessionPrompt.prompt({
|
||||
sessionID: session.id,
|
||||
agent: "build",
|
||||
noReply: true,
|
||||
parts: [{ type: "text", text: "hello again" }],
|
||||
})
|
||||
if (match.info.role !== "user") throw new Error("expected user message")
|
||||
expect(match.info.model).toEqual({ providerID: "openai", modelID: "gpt-5.2" })
|
||||
expect(match.info.variant).toBe("xhigh")
|
||||
|
||||
const override = await SessionPrompt.prompt({
|
||||
sessionID: session.id,
|
||||
agent: "build",
|
||||
noReply: true,
|
||||
variant: "high",
|
||||
parts: [{ type: "text", text: "hello third" }],
|
||||
})
|
||||
if (override.info.role !== "user") throw new Error("expected user message")
|
||||
expect(override.info.variant).toBe("high")
|
||||
|
||||
await Session.remove(session.id)
|
||||
},
|
||||
})
|
||||
} finally {
|
||||
if (prev === undefined) delete process.env.OPENAI_API_KEY
|
||||
else process.env.OPENAI_API_KEY = prev
|
||||
}
|
||||
})
|
||||
})
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/package.json",
|
||||
"name": "@opencode-ai/plugin",
|
||||
"version": "1.2.7",
|
||||
"version": "1.2.9",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/package.json",
|
||||
"name": "@opencode-ai/sdk",
|
||||
"version": "1.2.7",
|
||||
"version": "1.2.9",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/slack",
|
||||
"version": "1.2.7",
|
||||
"version": "1.2.9",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/ui",
|
||||
"version": "1.2.7",
|
||||
"version": "1.2.9",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"exports": {
|
||||
|
||||
@@ -26,13 +26,16 @@
|
||||
[data-slot="collapsible-arrow"] {
|
||||
opacity: 0;
|
||||
transition: opacity 0.15s ease;
|
||||
will-change: opacity;
|
||||
transform: translateZ(0);
|
||||
}
|
||||
|
||||
[data-slot="collapsible-arrow-icon"] {
|
||||
display: inline-flex;
|
||||
color: var(--icon-weaker);
|
||||
transform: rotate(-90deg);
|
||||
transform: translateZ(0) rotate(-90deg);
|
||||
transition: transform 0.15s ease;
|
||||
will-change: transform;
|
||||
}
|
||||
|
||||
&:hover [data-slot="collapsible-arrow"] {
|
||||
@@ -74,7 +77,7 @@
|
||||
}
|
||||
|
||||
[data-slot="collapsible-arrow-icon"] {
|
||||
transform: rotate(0deg);
|
||||
transform: translateZ(0) rotate(0deg);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -179,6 +179,7 @@
|
||||
|
||||
[data-component="text-part"] {
|
||||
width: 100%;
|
||||
margin-top: 24px;
|
||||
|
||||
[data-slot="text-part-body"] {
|
||||
margin-top: 0;
|
||||
@@ -227,13 +228,18 @@
|
||||
[data-component="reasoning-part"] {
|
||||
width: 100%;
|
||||
color: var(--text-base);
|
||||
font-size: var(--font-size-small);
|
||||
line-height: var(--line-height-large);
|
||||
line-height: var(--line-height-normal);
|
||||
|
||||
[data-component="markdown"] {
|
||||
margin-top: 24px;
|
||||
font-style: normal;
|
||||
font-size: inherit;
|
||||
color: var(--text-weak);
|
||||
|
||||
strong,
|
||||
b {
|
||||
color: var(--text-weak);
|
||||
}
|
||||
|
||||
p:has(strong) {
|
||||
margin-top: 24px;
|
||||
|
||||
@@ -129,6 +129,11 @@
|
||||
gap: 8px;
|
||||
flex-shrink: 0;
|
||||
|
||||
[data-slot="collapsible-arrow"] {
|
||||
margin-left: -6px;
|
||||
transform: translateY(2px);
|
||||
}
|
||||
|
||||
[data-component="diff-changes"][data-variant="bars"] {
|
||||
transform: translateY(1px);
|
||||
}
|
||||
|
||||
@@ -117,6 +117,7 @@
|
||||
--surface-weak: var(--smoke-light-alpha-3);
|
||||
--surface-weaker: var(--smoke-light-alpha-4);
|
||||
--surface-strong: #ffffff;
|
||||
--surface-stronger-non-alpha: var(--surface-raised-stronger-non-alpha);
|
||||
--surface-raised-stronger-non-alpha: var(--white);
|
||||
--surface-brand-base: var(--yuzu-light-9);
|
||||
--surface-brand-hover: var(--yuzu-light-10);
|
||||
@@ -375,6 +376,7 @@
|
||||
--surface-weak: var(--smoke-dark-alpha-4);
|
||||
--surface-weaker: var(--smoke-dark-alpha-5);
|
||||
--surface-strong: var(--smoke-dark-alpha-7);
|
||||
--surface-stronger-non-alpha: var(--surface-raised-stronger-non-alpha);
|
||||
--surface-raised-stronger-non-alpha: var(--smoke-dark-3);
|
||||
--surface-brand-base: var(--yuzu-light-9);
|
||||
--surface-brand-hover: var(--yuzu-light-10);
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/util",
|
||||
"version": "1.2.7",
|
||||
"version": "1.2.9",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
|
||||
@@ -178,7 +178,30 @@ export default defineConfig({
|
||||
"network",
|
||||
"enterprise",
|
||||
"troubleshooting",
|
||||
"windows-wsl",
|
||||
{
|
||||
label: "Windows",
|
||||
translations: {
|
||||
en: "Windows",
|
||||
ar: "Windows",
|
||||
"bs-BA": "Windows",
|
||||
"da-DK": "Windows",
|
||||
"de-DE": "Windows",
|
||||
"es-ES": "Windows",
|
||||
"fr-FR": "Windows",
|
||||
"it-IT": "Windows",
|
||||
"ja-JP": "Windows",
|
||||
"ko-KR": "Windows",
|
||||
"nb-NO": "Windows",
|
||||
"pl-PL": "Windows",
|
||||
"pt-BR": "Windows",
|
||||
"ru-RU": "Windows",
|
||||
"th-TH": "Windows",
|
||||
"tr-TR": "Windows",
|
||||
"zh-CN": "Windows",
|
||||
"zh-TW": "Windows",
|
||||
},
|
||||
link: "windows-wsl",
|
||||
},
|
||||
{
|
||||
label: "Usage",
|
||||
translations: {
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"name": "@opencode-ai/web",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"version": "1.2.7",
|
||||
"version": "1.2.9",
|
||||
"scripts": {
|
||||
"dev": "astro dev",
|
||||
"dev:remote": "VITE_API_URL=https://api.opencode.ai astro dev",
|
||||
|
||||
@@ -79,6 +79,32 @@ This creates two tools: `math_add` and `math_multiply`.
|
||||
|
||||
---
|
||||
|
||||
#### Name collisions with built-in tools
|
||||
|
||||
Custom tools are keyed by tool name. If a custom tool uses the same name as a built-in tool, the custom tool takes precedence.
|
||||
|
||||
For example, this file replaces the built-in `bash` tool:
|
||||
|
||||
```ts title=".opencode/tools/bash.ts"
|
||||
import { tool } from "@opencode-ai/plugin"
|
||||
|
||||
export default tool({
|
||||
description: "Restricted bash wrapper",
|
||||
args: {
|
||||
command: tool.schema.string(),
|
||||
},
|
||||
async execute(args) {
|
||||
return `blocked: ${args.command}`
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
:::note
|
||||
Prefer unique names unless you intentionally want to replace a built-in tool. If you want to disable a built in tool but not override it, use [permissions](/docs/permissions).
|
||||
:::
|
||||
|
||||
---
|
||||
|
||||
### Arguments
|
||||
|
||||
You can use `tool.schema`, which is just [Zod](https://zod.dev), to define argument types.
|
||||
|
||||
@@ -135,6 +135,8 @@ Per usare Amazon Bedrock con OpenCode:
|
||||
|
||||
2. **Configura l'autenticazione** usando uno dei seguenti metodi:
|
||||
|
||||
***
|
||||
|
||||
#### Variabili d'ambiente (Avvio rapido)
|
||||
|
||||
Imposta una di queste variabili d'ambiente mentre esegui opencode:
|
||||
@@ -157,6 +159,8 @@ Per usare Amazon Bedrock con OpenCode:
|
||||
export AWS_REGION=us-east-1
|
||||
```
|
||||
|
||||
***
|
||||
|
||||
#### File di configurazione (Consigliato)
|
||||
|
||||
Per configurazione specifica del progetto o persistente, usa `opencode.json`:
|
||||
@@ -184,6 +188,8 @@ Per usare Amazon Bedrock con OpenCode:
|
||||
Le opzioni del file di configurazione hanno la precedenza sulle variabili d'ambiente.
|
||||
:::
|
||||
|
||||
***
|
||||
|
||||
#### Avanzato: VPC Endpoints
|
||||
|
||||
Se stai usando VPC endpoints per Bedrock:
|
||||
@@ -207,12 +213,16 @@ Per usare Amazon Bedrock con OpenCode:
|
||||
L'opzione `endpoint` è un alias per l'opzione generica `baseURL`, usando terminologia specifica AWS. Se vengono specificati sia `endpoint` sia `baseURL`, `endpoint` ha la precedenza.
|
||||
:::
|
||||
|
||||
***
|
||||
|
||||
#### Metodi di autenticazione
|
||||
- **`AWS_ACCESS_KEY_ID` / `AWS_SECRET_ACCESS_KEY`**: Crea un utente IAM e genera chiavi di accesso nella Console AWS
|
||||
- **`AWS_PROFILE`**: Usa profili nominati da `~/.aws/credentials`. Configura prima con `aws configure --profile my-profile` o `aws sso login`
|
||||
- **`AWS_BEARER_TOKEN_BEDROCK`**: Genera chiavi API a lungo termine dalla console Amazon Bedrock
|
||||
- **`AWS_WEB_IDENTITY_TOKEN_FILE` / `AWS_ROLE_ARN`**: Per EKS IRSA (IAM Roles for Service Accounts) o altri ambienti Kubernetes con federazione OIDC. Queste variabili d'ambiente vengono iniettate automaticamente da Kubernetes quando usi le annotazioni del service account.
|
||||
|
||||
***
|
||||
|
||||
#### Precedenza autenticazione
|
||||
|
||||
Amazon Bedrock usa la seguente priorità di autenticazione:
|
||||
@@ -230,7 +240,8 @@ Per usare Amazon Bedrock con OpenCode:
|
||||
```
|
||||
|
||||
:::note
|
||||
Per profili di inferenza personalizzati, usa il nome del modello e del provider nella chiave e imposta la proprietà `id` all'arn. Questo assicura una cache corretta:
|
||||
Per profili di inferenza personalizzati, usa il nome del modello e del provider nella chiave e imposta la proprietà `id` all'arn. Questo assicura una cache corretta.
|
||||
:::
|
||||
|
||||
```json title="opencode.json"
|
||||
{
|
||||
@@ -248,8 +259,6 @@ Per profili di inferenza personalizzati, usa il nome del modello e del provider
|
||||
}
|
||||
```
|
||||
|
||||
:::
|
||||
|
||||
---
|
||||
|
||||
### Anthropic
|
||||
@@ -1161,6 +1170,8 @@ Per usare Kimi K2 di Moonshot AI:
|
||||
/models
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### MiniMax
|
||||
|
||||
1. Vai alla [MiniMax API Console](https://platform.minimax.io/login), crea un account e genera una chiave API.
|
||||
|
||||
@@ -3,7 +3,7 @@ title: 엔터프라이즈
|
||||
description: 조직에서 OpenCode를 안전하게 사용하는 방법입니다.
|
||||
---
|
||||
|
||||
import config from "../../../config.mjs"
|
||||
import config from "../../../../config.mjs"
|
||||
export const email = `mailto:${config.email}`
|
||||
|
||||
OpenCode Enterprise는 코드와 데이터가 조직의 인프라 밖으로 나가지 않도록 보장하려는 조직을 위한 기능입니다. SSO 및 내부 AI gateway와 연동되는 중앙 config를 사용해 이를 구현할 수 있습니다.
|
||||
|
||||
@@ -131,6 +131,8 @@ OpenCode로 Amazon Bedrock을 사용하려면:
|
||||
|
||||
2. 다음 방법 중 하나를 사용하여 **설정**합니다:
|
||||
|
||||
---
|
||||
|
||||
### 환경 변수 (빠른 시작)
|
||||
|
||||
OpenCode를 실행하는 동안 다음 환경 변수 중 하나를 설정합니다:
|
||||
@@ -153,6 +155,8 @@ OpenCode를 실행하는 동안 다음 환경 변수 중 하나를 설정합니
|
||||
export AWS_REGION=us-east-1
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
#### 설정 파일 (권장)
|
||||
|
||||
프로젝트별 또는 영구 구성을 위해 `opencode.json`을 사용하십시오.
|
||||
@@ -181,6 +185,8 @@ OpenCode를 실행하는 동안 다음 환경 변수 중 하나를 설정합니
|
||||
구성 파일 옵션은 환경 변수보다 우선 순위가 높습니다.
|
||||
:::
|
||||
|
||||
---
|
||||
|
||||
#### 고급: VPC 엔드포인트
|
||||
|
||||
Bedrock의 VPC 엔드포인트를 사용하는 경우:
|
||||
@@ -204,6 +210,8 @@ Bedrock의 VPC 엔드포인트를 사용하는 경우:
|
||||
`endpoint` 옵션은 일반적인 `baseURL` 옵션의 별칭입니다. `endpoint`와 `baseURL` 둘 다 지정된 경우 `endpoint`가 우선합니다.
|
||||
:::
|
||||
|
||||
---
|
||||
|
||||
#### 인증 방법
|
||||
|
||||
- **`AWS_ACCESS_KEY_ID`/`AWS_SECRET_ACCESS_KEY`**: IAM 사용자 및 AWS 콘솔에서 액세스 키 생성
|
||||
@@ -211,6 +219,8 @@ Bedrock의 VPC 엔드포인트를 사용하는 경우:
|
||||
- **`AWS_BEARER_TOKEN_BEDROCK`**: Amazon Bedrock 콘솔에서 임시 API 키 생성
|
||||
- **`AWS_WEB_IDENTITY_TOKEN_FILE` / `AWS_ROLE_ARN`**: EKS IRSA (서비스 계정용 IAM 역할) 또는 다른 Kubernetes 환경의 OIDC 연동. 이 환경 변수는 서비스 계정을 사용할 때 Kubernetes에 의해 자동으로 주입됩니다.
|
||||
|
||||
---
|
||||
|
||||
#### 인증 우선 순위
|
||||
|
||||
Amazon Bedrock은 다음과 같은 인증 우선 순위를 사용합니다.
|
||||
@@ -229,7 +239,8 @@ Amazon Bedrock은 다음과 같은 인증 우선 순위를 사용합니다.
|
||||
```
|
||||
|
||||
:::note
|
||||
사용자 정의 추론 프로필(custom inference profiles)의 경우, 키에 모델과 공급자 이름을 사용하고 `id` 속성에 ARN을 설정하십시오. 이렇게 하면 올바른 캐싱이 보장됩니다:
|
||||
사용자 정의 추론 프로필(custom inference profiles)의 경우, 키에 모델과 공급자 이름을 사용하고 `id` 속성에 ARN을 설정하십시오. 이렇게 하면 올바른 캐싱이 보장됩니다.
|
||||
:::
|
||||
|
||||
```json title="opencode.json"
|
||||
{
|
||||
@@ -247,8 +258,6 @@ Amazon Bedrock은 다음과 같은 인증 우선 순위를 사용합니다.
|
||||
}
|
||||
```
|
||||
|
||||
:::
|
||||
|
||||
---
|
||||
|
||||
#### Anthropic
|
||||
@@ -657,6 +666,8 @@ GitLab Duo는 GitLab의 Anthropic 프록시를 통해 기본 도구 호출 기
|
||||
|
||||
**OAuth**를 선택하면 브라우저에서 권한 부여를 요청합니다.
|
||||
|
||||
---
|
||||
|
||||
### 개인 액세스 토큰 사용
|
||||
|
||||
1. [GitLab User Settings > Access Tokens](https://gitlab.com/-/user_settings/personal_access_tokens)로 이동
|
||||
|
||||
@@ -308,6 +308,10 @@ The `tool` helper creates a custom tool that opencode can call. It takes a Zod s
|
||||
|
||||
Your custom tools will be available to opencode alongside built-in tools.
|
||||
|
||||
:::note
|
||||
If a plugin tool uses the same name as a built-in tool, the plugin tool takes precedence.
|
||||
:::
|
||||
|
||||
---
|
||||
|
||||
### Logging
|
||||
|
||||
@@ -135,6 +135,8 @@ To use Amazon Bedrock with OpenCode:
|
||||
|
||||
2. **Configure authentication** using one of the following methods:
|
||||
|
||||
***
|
||||
|
||||
#### Environment Variables (Quick Start)
|
||||
|
||||
Set one of these environment variables while running opencode:
|
||||
@@ -157,6 +159,8 @@ To use Amazon Bedrock with OpenCode:
|
||||
export AWS_REGION=us-east-1
|
||||
```
|
||||
|
||||
***
|
||||
|
||||
#### Configuration File (Recommended)
|
||||
|
||||
For project-specific or persistent configuration, use `opencode.json`:
|
||||
@@ -184,6 +188,8 @@ To use Amazon Bedrock with OpenCode:
|
||||
Configuration file options take precedence over environment variables.
|
||||
:::
|
||||
|
||||
***
|
||||
|
||||
#### Advanced: VPC Endpoints
|
||||
|
||||
If you're using VPC endpoints for Bedrock:
|
||||
@@ -207,12 +213,16 @@ To use Amazon Bedrock with OpenCode:
|
||||
The `endpoint` option is an alias for the generic `baseURL` option, using AWS-specific terminology. If both `endpoint` and `baseURL` are specified, `endpoint` takes precedence.
|
||||
:::
|
||||
|
||||
***
|
||||
|
||||
#### Authentication Methods
|
||||
- **`AWS_ACCESS_KEY_ID` / `AWS_SECRET_ACCESS_KEY`**: Create an IAM user and generate access keys in the AWS Console
|
||||
- **`AWS_PROFILE`**: Use named profiles from `~/.aws/credentials`. First configure with `aws configure --profile my-profile` or `aws sso login`
|
||||
- **`AWS_BEARER_TOKEN_BEDROCK`**: Generate long-term API keys from the Amazon Bedrock console
|
||||
- **`AWS_WEB_IDENTITY_TOKEN_FILE` / `AWS_ROLE_ARN`**: For EKS IRSA (IAM Roles for Service Accounts) or other Kubernetes environments with OIDC federation. These environment variables are automatically injected by Kubernetes when using service account annotations.
|
||||
|
||||
***
|
||||
|
||||
#### Authentication Precedence
|
||||
|
||||
Amazon Bedrock uses the following authentication priority:
|
||||
@@ -230,7 +240,8 @@ To use Amazon Bedrock with OpenCode:
|
||||
```
|
||||
|
||||
:::note
|
||||
For custom inference profiles, use the model and provider name in the key and set the `id` property to the arn. This ensures correct caching:
|
||||
For custom inference profiles, use the model and provider name in the key and set the `id` property to the arn. This ensures correct caching.
|
||||
:::
|
||||
|
||||
```json title="opencode.json"
|
||||
{
|
||||
@@ -248,8 +259,6 @@ For custom inference profiles, use the model and provider name in the key and se
|
||||
}
|
||||
```
|
||||
|
||||
:::
|
||||
|
||||
---
|
||||
|
||||
### Anthropic
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"name": "opencode",
|
||||
"displayName": "opencode",
|
||||
"description": "opencode for VS Code",
|
||||
"version": "1.2.7",
|
||||
"version": "1.2.9",
|
||||
"publisher": "sst-dev",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
||||
Reference in New Issue
Block a user