mirror of
https://github.com/anomalyco/opencode.git
synced 2026-03-31 19:14:39 +00:00
Compare commits
22 Commits
production
...
opencode-s
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
eb05287d73 | ||
|
|
7792060bc1 | ||
|
|
434d82bbe2 | ||
|
|
2929774acb | ||
|
|
6e61a46a84 | ||
|
|
2daf4b805a | ||
|
|
7342e650c0 | ||
|
|
8c2e2ecc95 | ||
|
|
25a2b739e6 | ||
|
|
85c16926c4 | ||
|
|
2e78fdec43 | ||
|
|
1fcb920eb4 | ||
|
|
b1e89c344b | ||
|
|
befbedacdc | ||
|
|
2cc738fb17 | ||
|
|
71b20698bb | ||
|
|
3df18dcde1 | ||
|
|
a898c2ea3a | ||
|
|
bf777298c8 | ||
|
|
93fad99f7f | ||
|
|
057848deb8 | ||
|
|
1de06452d3 |
3
.github/workflows/test.yml
vendored
3
.github/workflows/test.yml
vendored
@@ -100,6 +100,9 @@ jobs:
|
||||
run: bun --cwd packages/app test:e2e:local
|
||||
env:
|
||||
CI: true
|
||||
OPENCODE_API_KEY: ${{ secrets.OPENCODE_API_KEY }}
|
||||
OPENCODE_E2E_MODEL: opencode/claude-haiku-4-5
|
||||
OPENCODE_E2E_REQUIRE_PAID: "true"
|
||||
timeout-minutes: 30
|
||||
|
||||
- name: Upload Playwright artifacts
|
||||
|
||||
@@ -1,22 +1,19 @@
|
||||
---
|
||||
model: opencode/kimi-k2.5
|
||||
model: opencode/gpt-5.4
|
||||
---
|
||||
|
||||
Create `UPCOMING_CHANGELOG.md` from the structured changelog input below.
|
||||
If `UPCOMING_CHANGELOG.md` already exists, ignore its current contents completely.
|
||||
Do not preserve, merge, or reuse text from the existing file.
|
||||
|
||||
Any command arguments are passed directly to `bun script/changelog.ts`.
|
||||
Use `--from` / `-f` and `--to` / `-t` to preview a specific release range.
|
||||
|
||||
The input already contains the exact commit range since the last non-draft release.
|
||||
The commits are already filtered to the release-relevant packages and grouped into
|
||||
the release sections. Do not fetch GitHub releases, PRs, or build your own commit list.
|
||||
The input may also include a `## Community Contributors Input` section.
|
||||
|
||||
Before writing any entry you keep, inspect the real diff with
|
||||
`git show --stat --format='' <hash>` or `git show --format='' <hash>` so the
|
||||
summary reflects the actual user-facing change and not just the commit message.
|
||||
`git show --stat --format='' <hash>` or `git show --format='' <hash>` so you can
|
||||
understand the actual code changes and not just the commit message (they may be misleading).
|
||||
Do not use `git log` or author metadata when deciding attribution.
|
||||
|
||||
Rules:
|
||||
@@ -38,7 +35,12 @@ Rules:
|
||||
- Do not add, remove, rewrite, or reorder contributor names or commit titles in that block
|
||||
- Do not derive the thank-you section from the main summary bullets
|
||||
- Do not include the heading `## Community Contributors Input` in the final file
|
||||
- Focus on writing the least words to get your point across - users will skim read the changelog, so we should be precise
|
||||
|
||||
## Changelog Input
|
||||
**Importantly, the changelog is for users (who are at least slightly technical), they may use the TUI, Desktop, SDK, Plugins and so forth. Be thorough in understanding flow on effects may not be immediately apparent. e.g. a package upgrade looks internal but may patch a bug. Or a refactor may also stabilise some race condition that fixes bugs for users. The PR title/body + commit message will give you the authors context, usually containing the outcome not just technical detail**
|
||||
|
||||
!`bun script/changelog.ts $ARGUMENTS`
|
||||
<changelog_input>
|
||||
|
||||
!`bun script/raw-changelog.ts $ARGUMENTS`
|
||||
|
||||
</changelog_input>
|
||||
|
||||
60
bun.lock
60
bun.lock
@@ -26,7 +26,7 @@
|
||||
},
|
||||
"packages/app": {
|
||||
"name": "@opencode-ai/app",
|
||||
"version": "1.3.8",
|
||||
"version": "1.3.10",
|
||||
"dependencies": {
|
||||
"@kobalte/core": "catalog:",
|
||||
"@opencode-ai/sdk": "workspace:*",
|
||||
@@ -79,7 +79,7 @@
|
||||
},
|
||||
"packages/console/app": {
|
||||
"name": "@opencode-ai/console-app",
|
||||
"version": "1.3.8",
|
||||
"version": "1.3.10",
|
||||
"dependencies": {
|
||||
"@cloudflare/vite-plugin": "1.15.2",
|
||||
"@ibm/plex": "6.4.1",
|
||||
@@ -113,7 +113,7 @@
|
||||
},
|
||||
"packages/console/core": {
|
||||
"name": "@opencode-ai/console-core",
|
||||
"version": "1.3.8",
|
||||
"version": "1.3.10",
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-sts": "3.782.0",
|
||||
"@jsx-email/render": "1.1.1",
|
||||
@@ -140,7 +140,7 @@
|
||||
},
|
||||
"packages/console/function": {
|
||||
"name": "@opencode-ai/console-function",
|
||||
"version": "1.3.8",
|
||||
"version": "1.3.10",
|
||||
"dependencies": {
|
||||
"@ai-sdk/anthropic": "3.0.64",
|
||||
"@ai-sdk/openai": "3.0.48",
|
||||
@@ -164,7 +164,7 @@
|
||||
},
|
||||
"packages/console/mail": {
|
||||
"name": "@opencode-ai/console-mail",
|
||||
"version": "1.3.8",
|
||||
"version": "1.3.10",
|
||||
"dependencies": {
|
||||
"@jsx-email/all": "2.2.3",
|
||||
"@jsx-email/cli": "1.4.3",
|
||||
@@ -188,7 +188,7 @@
|
||||
},
|
||||
"packages/desktop": {
|
||||
"name": "@opencode-ai/desktop",
|
||||
"version": "1.3.8",
|
||||
"version": "1.3.10",
|
||||
"dependencies": {
|
||||
"@opencode-ai/app": "workspace:*",
|
||||
"@opencode-ai/ui": "workspace:*",
|
||||
@@ -221,7 +221,7 @@
|
||||
},
|
||||
"packages/desktop-electron": {
|
||||
"name": "@opencode-ai/desktop-electron",
|
||||
"version": "1.3.8",
|
||||
"version": "1.3.10",
|
||||
"dependencies": {
|
||||
"@opencode-ai/app": "workspace:*",
|
||||
"@opencode-ai/ui": "workspace:*",
|
||||
@@ -252,7 +252,7 @@
|
||||
},
|
||||
"packages/enterprise": {
|
||||
"name": "@opencode-ai/enterprise",
|
||||
"version": "1.3.8",
|
||||
"version": "1.3.10",
|
||||
"dependencies": {
|
||||
"@opencode-ai/ui": "workspace:*",
|
||||
"@opencode-ai/util": "workspace:*",
|
||||
@@ -281,7 +281,7 @@
|
||||
},
|
||||
"packages/function": {
|
||||
"name": "@opencode-ai/function",
|
||||
"version": "1.3.8",
|
||||
"version": "1.3.10",
|
||||
"dependencies": {
|
||||
"@octokit/auth-app": "8.0.1",
|
||||
"@octokit/rest": "catalog:",
|
||||
@@ -297,7 +297,7 @@
|
||||
},
|
||||
"packages/opencode": {
|
||||
"name": "opencode",
|
||||
"version": "1.3.8",
|
||||
"version": "1.3.10",
|
||||
"bin": {
|
||||
"opencode": "./bin/opencode",
|
||||
},
|
||||
@@ -338,8 +338,8 @@
|
||||
"@opencode-ai/sdk": "workspace:*",
|
||||
"@opencode-ai/util": "workspace:*",
|
||||
"@openrouter/ai-sdk-provider": "2.3.3",
|
||||
"@opentui/core": "0.1.92",
|
||||
"@opentui/solid": "0.1.92",
|
||||
"@opentui/core": "0.1.93",
|
||||
"@opentui/solid": "0.1.93",
|
||||
"@parcel/watcher": "2.5.1",
|
||||
"@pierre/diffs": "catalog:",
|
||||
"@solid-primitives/event-bus": "1.1.2",
|
||||
@@ -423,22 +423,22 @@
|
||||
},
|
||||
"packages/plugin": {
|
||||
"name": "@opencode-ai/plugin",
|
||||
"version": "1.3.8",
|
||||
"version": "1.3.10",
|
||||
"dependencies": {
|
||||
"@opencode-ai/sdk": "workspace:*",
|
||||
"zod": "catalog:",
|
||||
},
|
||||
"devDependencies": {
|
||||
"@opentui/core": "0.1.92",
|
||||
"@opentui/solid": "0.1.92",
|
||||
"@opentui/core": "0.1.93",
|
||||
"@opentui/solid": "0.1.93",
|
||||
"@tsconfig/node22": "catalog:",
|
||||
"@types/node": "catalog:",
|
||||
"@typescript/native-preview": "catalog:",
|
||||
"typescript": "catalog:",
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@opentui/core": ">=0.1.92",
|
||||
"@opentui/solid": ">=0.1.92",
|
||||
"@opentui/core": ">=0.1.93",
|
||||
"@opentui/solid": ">=0.1.93",
|
||||
},
|
||||
"optionalPeers": [
|
||||
"@opentui/core",
|
||||
@@ -457,7 +457,7 @@
|
||||
},
|
||||
"packages/sdk/js": {
|
||||
"name": "@opencode-ai/sdk",
|
||||
"version": "1.3.8",
|
||||
"version": "1.3.10",
|
||||
"devDependencies": {
|
||||
"@hey-api/openapi-ts": "0.90.10",
|
||||
"@tsconfig/node22": "catalog:",
|
||||
@@ -468,7 +468,7 @@
|
||||
},
|
||||
"packages/slack": {
|
||||
"name": "@opencode-ai/slack",
|
||||
"version": "1.3.8",
|
||||
"version": "1.3.10",
|
||||
"dependencies": {
|
||||
"@opencode-ai/sdk": "workspace:*",
|
||||
"@slack/bolt": "^3.17.1",
|
||||
@@ -503,7 +503,7 @@
|
||||
},
|
||||
"packages/ui": {
|
||||
"name": "@opencode-ai/ui",
|
||||
"version": "1.3.8",
|
||||
"version": "1.3.10",
|
||||
"dependencies": {
|
||||
"@kobalte/core": "catalog:",
|
||||
"@opencode-ai/sdk": "workspace:*",
|
||||
@@ -550,7 +550,7 @@
|
||||
},
|
||||
"packages/util": {
|
||||
"name": "@opencode-ai/util",
|
||||
"version": "1.3.8",
|
||||
"version": "1.3.10",
|
||||
"dependencies": {
|
||||
"zod": "catalog:",
|
||||
},
|
||||
@@ -561,7 +561,7 @@
|
||||
},
|
||||
"packages/web": {
|
||||
"name": "@opencode-ai/web",
|
||||
"version": "1.3.8",
|
||||
"version": "1.3.10",
|
||||
"dependencies": {
|
||||
"@astrojs/cloudflare": "12.6.3",
|
||||
"@astrojs/markdown-remark": "6.3.1",
|
||||
@@ -1461,21 +1461,21 @@
|
||||
|
||||
"@opentelemetry/api": ["@opentelemetry/api@1.9.0", "", {}, "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg=="],
|
||||
|
||||
"@opentui/core": ["@opentui/core@0.1.92", "", { "dependencies": { "bun-ffi-structs": "0.1.2", "diff": "8.0.2", "jimp": "1.6.0", "marked": "17.0.1", "yoga-layout": "3.2.1" }, "optionalDependencies": { "@dimforge/rapier2d-simd-compat": "^0.17.3", "@opentui/core-darwin-arm64": "0.1.92", "@opentui/core-darwin-x64": "0.1.92", "@opentui/core-linux-arm64": "0.1.92", "@opentui/core-linux-x64": "0.1.92", "@opentui/core-win32-arm64": "0.1.92", "@opentui/core-win32-x64": "0.1.92", "bun-webgpu": "0.1.5", "planck": "^1.4.2", "three": "0.177.0" }, "peerDependencies": { "web-tree-sitter": "0.25.10" } }, "sha512-c+KdYAIH3M8n24RYaor+t7AQtKZ3l84L7xdP7DEaN4xtuYH8W08E6Gi+wUal4g+HSai3HS9irox68yFf0VPAxw=="],
|
||||
"@opentui/core": ["@opentui/core@0.1.93", "", { "dependencies": { "bun-ffi-structs": "0.1.2", "diff": "8.0.2", "jimp": "1.6.0", "marked": "17.0.1", "yoga-layout": "3.2.1" }, "optionalDependencies": { "@dimforge/rapier2d-simd-compat": "^0.17.3", "@opentui/core-darwin-arm64": "0.1.93", "@opentui/core-darwin-x64": "0.1.93", "@opentui/core-linux-arm64": "0.1.93", "@opentui/core-linux-x64": "0.1.93", "@opentui/core-win32-arm64": "0.1.93", "@opentui/core-win32-x64": "0.1.93", "bun-webgpu": "0.1.5", "planck": "^1.4.2", "three": "0.177.0" }, "peerDependencies": { "web-tree-sitter": "0.25.10" } }, "sha512-HlTM16ZiBKN0mPBNMHSILkSrbzNku6Pg/ovIpVVkEPqLeWeSC2bfZS4Uhc0Ej1sckVVVoU9HKBJanfHvpP+pMg=="],
|
||||
|
||||
"@opentui/core-darwin-arm64": ["@opentui/core-darwin-arm64@0.1.92", "", { "os": "darwin", "cpu": "arm64" }, "sha512-NX/qFRuc7My0pazyOrw9fdTXmU7omXcZzQuHcsaVnwssljaT52UYMrJ7mCKhSo69RhHw0lnGCymTorvz3XBdsA=="],
|
||||
"@opentui/core-darwin-arm64": ["@opentui/core-darwin-arm64@0.1.93", "", { "os": "darwin", "cpu": "arm64" }, "sha512-4I2mwhXLqRNUv7tu88hA6cBGaGpLZXkAa8W0VqBiGDV+Tx337x4T+vbQ7G57OwKXT787oTrEOF9rOOrGLov6qw=="],
|
||||
|
||||
"@opentui/core-darwin-x64": ["@opentui/core-darwin-x64@0.1.92", "", { "os": "darwin", "cpu": "x64" }, "sha512-Zb4jn33hOf167llINKLniOabQIycs14LPOBZnQ6l4khbeeTPVJdG8gy9PhlAyIQygDKmRTFncVlP0RP+L6C7og=="],
|
||||
"@opentui/core-darwin-x64": ["@opentui/core-darwin-x64@0.1.93", "", { "os": "darwin", "cpu": "x64" }, "sha512-jvYMgcg47a5qLhSv1DnQiafEWBQ1UukGutmsYV1TvNuhWtuDXYLVy2AhKIHPzbB9JNrV0IpjbxUC8QnJaP3n8g=="],
|
||||
|
||||
"@opentui/core-linux-arm64": ["@opentui/core-linux-arm64@0.1.92", "", { "os": "linux", "cpu": "arm64" }, "sha512-4VA1A91OTMPJ3LkAyaxKEZVJsk5jIc3Kz0gV2vip8p2aGLPpYHHpkFZpXP/FyzsnJzoSGftBeA6ya1GKa5bkXg=="],
|
||||
"@opentui/core-linux-arm64": ["@opentui/core-linux-arm64@0.1.93", "", { "os": "linux", "cpu": "arm64" }, "sha512-bvFqRcPftmg14iYmMc3d63XC9rhe4yF7pJRApH6klLBKp27WX/LU0iSO4mvyX7qhy65gcmyy4Sj9dl5jNJ+vlA=="],
|
||||
|
||||
"@opentui/core-linux-x64": ["@opentui/core-linux-x64@0.1.92", "", { "os": "linux", "cpu": "x64" }, "sha512-tr7va8hfKS1uY+TBmulQBoBlwijzJk56K/U/L9/tbHfW7oJctqxPVwEFHIh1HDcOQ3/UhMMWGvMfeG6cFiK8/A=="],
|
||||
"@opentui/core-linux-x64": ["@opentui/core-linux-x64@0.1.93", "", { "os": "linux", "cpu": "x64" }, "sha512-/wJXhwtNxdcpshrRl1KouyGE54ODAHxRQgBHtnlM/F4bB8cjzOlq2Yc+5cv5DxRz4Q0nQZFCPefwpg2U6ZwNdA=="],
|
||||
|
||||
"@opentui/core-win32-arm64": ["@opentui/core-win32-arm64@0.1.92", "", { "os": "win32", "cpu": "arm64" }, "sha512-34YM3uPtDjzUVeSnJWIK2J8mxyduzV7f3mYc4Hub0glNpUdM1jjzF2HvvvnrKK5ElzTsIcno3c3lOYT8yvG1Zg=="],
|
||||
"@opentui/core-win32-arm64": ["@opentui/core-win32-arm64@0.1.93", "", { "os": "win32", "cpu": "arm64" }, "sha512-g3PQobfM2yFPSzkBKRKFp8FgTG4ulWyJcU+GYXjyYmxQIT+ZbOU7UfR//ImRq3/FxUAfUC/MhC6WwjqccjEqBw=="],
|
||||
|
||||
"@opentui/core-win32-x64": ["@opentui/core-win32-x64@0.1.92", "", { "os": "win32", "cpu": "x64" }, "sha512-uk442kA2Vn0mmJHHqk5sPM+Zai/AN9sgl7egekhoEOUx2VK3gxftKsVlx2YVpCHTvTE/S+vnD2WpQaJk2SNjww=="],
|
||||
"@opentui/core-win32-x64": ["@opentui/core-win32-x64@0.1.93", "", { "os": "win32", "cpu": "x64" }, "sha512-Spllte2W7q+WfB1zVHgHilVJNp+jpp77PkkxTWyMQNvT7vJNt9LABMNjGTGiJBBMkAuKvO0GgFNKxrda7tFKrQ=="],
|
||||
|
||||
"@opentui/solid": ["@opentui/solid@0.1.92", "", { "dependencies": { "@babel/core": "7.28.0", "@babel/preset-typescript": "7.27.1", "@opentui/core": "0.1.92", "babel-plugin-module-resolver": "5.0.2", "babel-preset-solid": "1.9.10", "entities": "7.0.1", "s-js": "^0.4.9" }, "peerDependencies": { "solid-js": "1.9.11" } }, "sha512-0Sx1+6zRpmMJ5oDEY0JS9b9+eGd/Q0fPndNllrQNnp7w2FCjpXmvHdBdq+pFI6kFp01MHq2ZOkUU5zX5/9YMSQ=="],
|
||||
"@opentui/solid": ["@opentui/solid@0.1.93", "", { "dependencies": { "@babel/core": "7.28.0", "@babel/preset-typescript": "7.27.1", "@opentui/core": "0.1.93", "babel-plugin-module-resolver": "5.0.2", "babel-preset-solid": "1.9.10", "entities": "7.0.1", "s-js": "^0.4.9" }, "peerDependencies": { "solid-js": "1.9.11" } }, "sha512-Qx+4qoLSjnRGoo/YY4sZJMyXj09Y5kaAMpVO+65Ax58MMj4TjABN4bOOiRT2KV7sKOMTjxiAgXAIaBuqBBJ0Qg=="],
|
||||
|
||||
"@oslojs/asn1": ["@oslojs/asn1@1.0.0", "", { "dependencies": { "@oslojs/binary": "1.0.0" } }, "sha512-zw/wn0sj0j0QKbIXfIlnEcTviaCzYOY3V5rAyjR6YtOByFtJiT574+8p9Wlach0lZH9fddD4yb9laEAIl4vXQA=="],
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
{
|
||||
"nodeModules": {
|
||||
"x86_64-linux": "sha256-5w+DwEvUrCly9LHZuTa1yTSD45X56cGJG8sds/N29mU=",
|
||||
"aarch64-linux": "sha256-pLhyzajYinBlFyGWwPypyC8gHEU8S7fVXIs6aqgBmhg=",
|
||||
"aarch64-darwin": "sha256-vN0sXYs7pLtpq7U9SorR2z6st/wMfHA3dybOnwIh1pU=",
|
||||
"x86_64-darwin": "sha256-P8fgyBcZJmY5VbNxNer/EL4r/F28dNxaqheaqNZH488="
|
||||
"x86_64-linux": "sha256-UuVbB5lTRB4bIcaKMc8CLSbQW7m9EjXgxYvxp/uO7Co=",
|
||||
"aarch64-linux": "sha256-8D7ReLRVb7NDd5PQTVxFhRLmlLbfjK007XgIhhpNKoE=",
|
||||
"aarch64-darwin": "sha256-M+z7C/eXfVqwDiGiiwKo/LT/m4dvCjL1Pblsr1kxoyI=",
|
||||
"x86_64-darwin": "sha256-RzZS6GMwYVDPK0W+K/mlebixNMs2+JRkMG9n8OFhd0c="
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,6 +15,16 @@ import { createSdk, dirSlug, getWorktree, sessionPath } from "./utils"
|
||||
|
||||
export const settingsKey = "settings.v3"
|
||||
|
||||
const seedModel = (() => {
|
||||
const [providerID = "opencode", modelID = "big-pickle"] = (
|
||||
process.env.OPENCODE_E2E_MODEL ?? "opencode/big-pickle"
|
||||
).split("/")
|
||||
return {
|
||||
providerID: providerID || "opencode",
|
||||
modelID: modelID || "big-pickle",
|
||||
}
|
||||
})()
|
||||
|
||||
type TestFixtures = {
|
||||
sdk: ReturnType<typeof createSdk>
|
||||
gotoSession: (sessionID?: string) => Promise<void>
|
||||
@@ -125,7 +135,7 @@ export const test = base.extend<TestFixtures, WorkerFixtures>({
|
||||
|
||||
async function seedStorage(page: Page, input: { directory: string; extra?: string[] }) {
|
||||
await seedProjects(page, input)
|
||||
await page.addInitScript(() => {
|
||||
await page.addInitScript((model: { providerID: string; modelID: string }) => {
|
||||
const win = window as E2EWindow
|
||||
win.__opencode_e2e = {
|
||||
...win.__opencode_e2e,
|
||||
@@ -143,12 +153,12 @@ async function seedStorage(page: Page, input: { directory: string; extra?: strin
|
||||
localStorage.setItem(
|
||||
"opencode.global.dat:model",
|
||||
JSON.stringify({
|
||||
recent: [{ providerID: "opencode", modelID: "big-pickle" }],
|
||||
recent: [model],
|
||||
user: [],
|
||||
variant: {},
|
||||
}),
|
||||
)
|
||||
})
|
||||
}, seedModel)
|
||||
}
|
||||
|
||||
export { expect }
|
||||
|
||||
@@ -234,6 +234,7 @@ async function fileOverflow(page: Parameters<typeof test>[0]["page"]) {
|
||||
}
|
||||
|
||||
test("review applies inline comment clicks without horizontal overflow", async ({ page, withProject }) => {
|
||||
test.skip(true, "Flaky in CI for now.")
|
||||
test.setTimeout(180_000)
|
||||
|
||||
const tag = `review-comment-${Date.now()}`
|
||||
@@ -283,6 +284,7 @@ test("review applies inline comment clicks without horizontal overflow", async (
|
||||
})
|
||||
|
||||
test("review file comments submit on click without clipping actions", async ({ page, withProject }) => {
|
||||
test.skip(true, "Flaky in CI for now.")
|
||||
test.setTimeout(180_000)
|
||||
|
||||
const tag = `review-file-comment-${Date.now()}`
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/app",
|
||||
"version": "1.3.8",
|
||||
"version": "1.3.10",
|
||||
"description": "",
|
||||
"type": "module",
|
||||
"exports": {
|
||||
|
||||
@@ -71,7 +71,7 @@ const serverEnv = {
|
||||
OPENCODE_E2E_PROJECT_DIR: repoDir,
|
||||
OPENCODE_E2E_SESSION_TITLE: "E2E Session",
|
||||
OPENCODE_E2E_MESSAGE: "Seeded for UI e2e",
|
||||
OPENCODE_E2E_MODEL: "opencode/gpt-5-nano",
|
||||
OPENCODE_E2E_MODEL: process.env.OPENCODE_E2E_MODEL ?? "opencode/gpt-5-nano",
|
||||
OPENCODE_CLIENT: "app",
|
||||
OPENCODE_STRICT_CONFIG_DEPS: "true",
|
||||
} satisfies Record<string, string>
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/console-app",
|
||||
"version": "1.3.8",
|
||||
"version": "1.3.10",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/package.json",
|
||||
"name": "@opencode-ai/console-core",
|
||||
"version": "1.3.8",
|
||||
"version": "1.3.10",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
|
||||
@@ -10,7 +10,7 @@ if (!stage) throw new Error("Stage is required")
|
||||
const root = path.resolve(process.cwd(), "..", "..", "..")
|
||||
|
||||
// read the secret
|
||||
const ret = await $`bun sst secret list`.cwd(root).text()
|
||||
const ret = await $`bun sst secret list --stage frank`.cwd(root).text()
|
||||
const lines = ret.split("\n")
|
||||
const value = lines.find((line) => line.startsWith("ZEN_LIMITS"))?.split("=")[1]
|
||||
if (!value) throw new Error("ZEN_LIMITS not found")
|
||||
|
||||
@@ -12,7 +12,7 @@ const root = path.resolve(process.cwd(), "..", "..", "..")
|
||||
const PARTS = 30
|
||||
|
||||
// read the secret
|
||||
const ret = await $`bun sst secret list`.cwd(root).text()
|
||||
const ret = await $`bun sst secret list --stage frank`.cwd(root).text()
|
||||
const lines = ret.split("\n")
|
||||
const values = Array.from({ length: PARTS }, (_, i) => {
|
||||
const value = lines
|
||||
|
||||
@@ -6,7 +6,7 @@ import os from "os"
|
||||
import { Subscription } from "../src/subscription"
|
||||
|
||||
const root = path.resolve(process.cwd(), "..", "..", "..")
|
||||
const secrets = await $`bun sst secret list`.cwd(root).text()
|
||||
const secrets = await $`bun sst secret list --stage frank`.cwd(root).text()
|
||||
|
||||
// read value
|
||||
const lines = secrets.split("\n")
|
||||
@@ -25,4 +25,4 @@ const newValue = JSON.stringify(JSON.parse(await tempFile.text()))
|
||||
Subscription.validate(JSON.parse(newValue))
|
||||
|
||||
// update the secret
|
||||
await $`bun sst secret set ZEN_LIMITS ${newValue}`
|
||||
await $`bun sst secret set ZEN_LIMITS ${newValue} --stage frank`.cwd(root)
|
||||
|
||||
@@ -6,7 +6,7 @@ import os from "os"
|
||||
import { ZenData } from "../src/model"
|
||||
|
||||
const root = path.resolve(process.cwd(), "..", "..", "..")
|
||||
const models = await $`bun sst secret list`.cwd(root).text()
|
||||
const models = await $`bun sst secret list --stage frank`.cwd(root).text()
|
||||
const PARTS = 30
|
||||
|
||||
// read the line starting with "ZEN_MODELS"
|
||||
@@ -40,4 +40,4 @@ const newValues = Array.from({ length: PARTS }, (_, i) =>
|
||||
|
||||
const envFile = Bun.file(path.join(os.tmpdir(), `models-${Date.now()}.env`))
|
||||
await envFile.write(newValues.map((v, i) => `ZEN_MODELS${i + 1}="${v.replace(/"/g, '\\"')}"`).join("\n"))
|
||||
await $`bun sst secret load ${envFile.name}`.cwd(root)
|
||||
await $`bun sst secret load ${envFile.name} --stage frank`.cwd(root)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/console-function",
|
||||
"version": "1.3.8",
|
||||
"version": "1.3.10",
|
||||
"$schema": "https://json.schemastore.org/package.json",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/console-mail",
|
||||
"version": "1.3.8",
|
||||
"version": "1.3.10",
|
||||
"dependencies": {
|
||||
"@jsx-email/all": "2.2.3",
|
||||
"@jsx-email/cli": "1.4.3",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@opencode-ai/desktop-electron",
|
||||
"private": true,
|
||||
"version": "1.3.8",
|
||||
"version": "1.3.10",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"homepage": "https://opencode.ai",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@opencode-ai/desktop",
|
||||
"private": true,
|
||||
"version": "1.3.8",
|
||||
"version": "1.3.10",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/enterprise",
|
||||
"version": "1.3.8",
|
||||
"version": "1.3.10",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
id = "opencode"
|
||||
name = "OpenCode"
|
||||
description = "The open source coding agent."
|
||||
version = "1.3.8"
|
||||
version = "1.3.10"
|
||||
schema_version = 1
|
||||
authors = ["Anomaly"]
|
||||
repository = "https://github.com/anomalyco/opencode"
|
||||
@@ -11,26 +11,26 @@ name = "OpenCode"
|
||||
icon = "./icons/opencode.svg"
|
||||
|
||||
[agent_servers.opencode.targets.darwin-aarch64]
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.3.8/opencode-darwin-arm64.zip"
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.3.10/opencode-darwin-arm64.zip"
|
||||
cmd = "./opencode"
|
||||
args = ["acp"]
|
||||
|
||||
[agent_servers.opencode.targets.darwin-x86_64]
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.3.8/opencode-darwin-x64.zip"
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.3.10/opencode-darwin-x64.zip"
|
||||
cmd = "./opencode"
|
||||
args = ["acp"]
|
||||
|
||||
[agent_servers.opencode.targets.linux-aarch64]
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.3.8/opencode-linux-arm64.tar.gz"
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.3.10/opencode-linux-arm64.tar.gz"
|
||||
cmd = "./opencode"
|
||||
args = ["acp"]
|
||||
|
||||
[agent_servers.opencode.targets.linux-x86_64]
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.3.8/opencode-linux-x64.tar.gz"
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.3.10/opencode-linux-x64.tar.gz"
|
||||
cmd = "./opencode"
|
||||
args = ["acp"]
|
||||
|
||||
[agent_servers.opencode.targets.windows-x86_64]
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.3.8/opencode-windows-x64.zip"
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.3.10/opencode-windows-x64.zip"
|
||||
cmd = "./opencode.exe"
|
||||
args = ["acp"]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/function",
|
||||
"version": "1.3.8",
|
||||
"version": "1.3.10",
|
||||
"$schema": "https://json.schemastore.org/package.json",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/package.json",
|
||||
"version": "1.3.8",
|
||||
"version": "1.3.10",
|
||||
"name": "opencode",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
@@ -102,8 +102,8 @@
|
||||
"@opencode-ai/sdk": "workspace:*",
|
||||
"@opencode-ai/util": "workspace:*",
|
||||
"@openrouter/ai-sdk-provider": "2.3.3",
|
||||
"@opentui/core": "0.1.92",
|
||||
"@opentui/solid": "0.1.92",
|
||||
"@opentui/core": "0.1.93",
|
||||
"@opentui/solid": "0.1.93",
|
||||
"@parcel/watcher": "2.5.1",
|
||||
"@pierre/diffs": "catalog:",
|
||||
"@solid-primitives/event-bus": "1.1.2",
|
||||
|
||||
@@ -2,6 +2,7 @@ const dir = process.env.OPENCODE_E2E_PROJECT_DIR ?? process.cwd()
|
||||
const title = process.env.OPENCODE_E2E_SESSION_TITLE ?? "E2E Session"
|
||||
const text = process.env.OPENCODE_E2E_MESSAGE ?? "Seeded for UI e2e"
|
||||
const model = process.env.OPENCODE_E2E_MODEL ?? "opencode/gpt-5-nano"
|
||||
const requirePaid = process.env.OPENCODE_E2E_REQUIRE_PAID === "true"
|
||||
const parts = model.split("/")
|
||||
const providerID = parts[0] ?? "opencode"
|
||||
const modelID = parts[1] ?? "gpt-5-nano"
|
||||
@@ -11,6 +12,7 @@ const seed = async () => {
|
||||
const { Instance } = await import("../src/project/instance")
|
||||
const { InstanceBootstrap } = await import("../src/project/bootstrap")
|
||||
const { Config } = await import("../src/config/config")
|
||||
const { Provider } = await import("../src/provider/provider")
|
||||
const { Session } = await import("../src/session")
|
||||
const { MessageID, PartID } = await import("../src/session/schema")
|
||||
const { Project } = await import("../src/project/project")
|
||||
@@ -25,6 +27,19 @@ const seed = async () => {
|
||||
await Config.waitForDependencies()
|
||||
await ToolRegistry.ids()
|
||||
|
||||
if (requirePaid && providerID === "opencode" && !process.env.OPENCODE_API_KEY) {
|
||||
throw new Error("OPENCODE_API_KEY is required when OPENCODE_E2E_REQUIRE_PAID=true")
|
||||
}
|
||||
|
||||
const info = await Provider.getModel(ProviderID.make(providerID), ModelID.make(modelID))
|
||||
if (requirePaid) {
|
||||
const paid =
|
||||
info.cost.input > 0 || info.cost.output > 0 || info.cost.cache.read > 0 || info.cost.cache.write > 0
|
||||
if (!paid) {
|
||||
throw new Error(`OPENCODE_E2E_MODEL must resolve to a paid model: ${providerID}/${modelID}`)
|
||||
}
|
||||
}
|
||||
|
||||
const session = await Session.create({ title })
|
||||
const messageID = MessageID.ascending()
|
||||
const partID = PartID.ascending()
|
||||
|
||||
@@ -210,15 +210,13 @@ Fully migrated (single namespace, InstanceState where needed, flattened facade):
|
||||
- [x] `Vcs` — `project/vcs.ts`
|
||||
- [x] `Worktree` — `worktree/index.ts`
|
||||
|
||||
Still open and likely worth migrating:
|
||||
|
||||
- [x] `Session` — `session/index.ts`
|
||||
- [ ] `SessionProcessor` — blocked by AI SDK v6 PR (#18433)
|
||||
- [ ] `SessionPrompt` — blocked by AI SDK v6 PR (#18433)
|
||||
- [ ] `SessionCompaction` — blocked by AI SDK v6 PR (#18433)
|
||||
- [ ] `Provider` — blocked by AI SDK v6 PR (#18433)
|
||||
- [x] `SessionProcessor` — `session/processor.ts`
|
||||
- [x] `SessionPrompt` — `session/prompt.ts`
|
||||
- [x] `SessionCompaction` — `session/compaction.ts`
|
||||
- [x] `Provider` — `provider/provider.ts`
|
||||
|
||||
Other services not yet migrated:
|
||||
Still open:
|
||||
|
||||
- [ ] `SessionSummary` — `session/summary.ts`
|
||||
- [ ] `SessionTodo` — `session/todo.ts`
|
||||
@@ -235,7 +233,7 @@ Once individual tools are effectified, change `Tool.Info` (`tool/tool.ts`) so `i
|
||||
|
||||
1. Migrate each tool to return Effects
|
||||
2. Update `Tool.define()` factory to work with Effects
|
||||
3. Update `SessionPrompt` to `yield*` tool results instead of `await`ing — blocked by AI SDK v6 PR (#18433)
|
||||
3. Update `SessionPrompt` to `yield*` tool results instead of `await`ing
|
||||
|
||||
Individual tools, ordered by value:
|
||||
|
||||
|
||||
@@ -88,6 +88,7 @@ export default plugin
|
||||
- If package `exports` exists, loader only resolves `./tui` or `./server`; it never falls back to `exports["."]`.
|
||||
- For npm package specs, TUI does not use `package.json` `main` as a fallback entry.
|
||||
- `package.json` `main` is only used for server plugin entrypoint resolution.
|
||||
- If a configured plugin has no target-specific entrypoint, it is skipped with a warning (not a load failure).
|
||||
- If a package supports both server and TUI, use separate files and package `exports` (`./server` and `./tui`) so each target resolves to a target-only module.
|
||||
- File/path plugins must export a non-empty `id`.
|
||||
- npm plugins may omit `id`; package `name` is used.
|
||||
@@ -100,7 +101,10 @@ export default plugin
|
||||
|
||||
## Package manifest and install
|
||||
|
||||
Package manifest is read from `package.json` field `oc-plugin`.
|
||||
Install target detection is inferred from `package.json` entrypoints:
|
||||
|
||||
- `server` target when `exports["./server"]` exists or `main` is set.
|
||||
- `tui` target when `exports["./tui"]` exists.
|
||||
|
||||
Example:
|
||||
|
||||
@@ -108,14 +112,20 @@ Example:
|
||||
{
|
||||
"name": "@acme/opencode-plugin",
|
||||
"type": "module",
|
||||
"main": "./dist/index.js",
|
||||
"main": "./dist/server.js",
|
||||
"exports": {
|
||||
"./server": {
|
||||
"import": "./dist/server.js",
|
||||
"config": { "custom": true }
|
||||
},
|
||||
"./tui": {
|
||||
"import": "./dist/tui.js",
|
||||
"config": { "compact": true }
|
||||
}
|
||||
},
|
||||
"engines": {
|
||||
"opencode": "^1.0.0"
|
||||
},
|
||||
"oc-plugin": [
|
||||
["server", { "custom": true }],
|
||||
["tui", { "compact": true }]
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
@@ -144,12 +154,16 @@ npm plugins can declare a version compatibility range in `package.json` using th
|
||||
- Local installs resolve target dir inside `patchPluginConfig`.
|
||||
- For local scope, path is `<worktree>/.opencode` only when VCS is git and `worktree !== "/"`; otherwise `<directory>/.opencode`.
|
||||
- Root-worktree fallback (`worktree === "/"` uses `<directory>/.opencode`) is covered by regression tests.
|
||||
- `patchPluginConfig` applies all declared manifest targets (`server` and/or `tui`) in one call.
|
||||
- `patchPluginConfig` applies all detected targets (`server` and/or `tui`) in one call.
|
||||
- `patchPluginConfig` returns structured result unions (`ok`, `code`, fields by error kind) instead of custom thrown errors.
|
||||
- `patchPluginConfig` serializes per-target config writes with `Flock.acquire(...)`.
|
||||
- `patchPluginConfig` uses targeted `jsonc-parser` edits, so existing JSONC comments are preserved when plugin entries are added or replaced.
|
||||
- npm plugin package installs are executed with `--ignore-scripts`, so package `install` / `postinstall` lifecycle scripts are not run.
|
||||
- `exports["./server"].config` and `exports["./tui"].config` can provide default plugin options written on first install.
|
||||
- Without `--force`, an already-configured npm package name is a no-op.
|
||||
- With `--force`, replacement matches by package name. If the existing row is `[spec, options]`, those tuple options are kept.
|
||||
- Explicit npm specs with a version suffix (for example `pkg@1.2.3`) are pinned. Runtime install requests that exact version and does not run stale/latest checks for newer registry versions.
|
||||
- Bare npm specs (`pkg`) are treated as `latest` and can refresh when the cached version is stale.
|
||||
- Tuple targets in `oc-plugin` provide default options written into config.
|
||||
- A package can target `server`, `tui`, or both.
|
||||
- If a package targets both, each target must still resolve to a separate target-only module. Do not export `{ server, tui }` from one module.
|
||||
@@ -317,7 +331,6 @@ Slot notes:
|
||||
- `api.plugins.install(spec, { global? })` runs install -> manifest read -> config patch using the same helper flow as CLI install.
|
||||
- `api.plugins.install(...)` returns either `{ ok: false, message, missing? }` or `{ ok: true, dir, tui }`.
|
||||
- `api.plugins.install(...)` does not load plugins into the current session. Call `api.plugins.add(spec)` to load after install.
|
||||
- For packages that declare a tuple `tui` target in `oc-plugin`, `api.plugins.install(...)` stages those tuple options so a following `api.plugins.add(spec)` uses them.
|
||||
- If activation fails, the plugin can remain `enabled=true` and `active=false`.
|
||||
- `api.lifecycle.signal` is aborted before cleanup runs.
|
||||
- `api.lifecycle.onDispose(fn)` registers cleanup and returns an unregister function.
|
||||
|
||||
@@ -50,7 +50,7 @@ export namespace BunProc {
|
||||
}),
|
||||
)
|
||||
|
||||
export async function install(pkg: string, version = "latest") {
|
||||
export async function install(pkg: string, version = "latest", opts?: { ignoreScripts?: boolean }) {
|
||||
// Use lock to ensure only one install at a time
|
||||
using _ = await Lock.write("bun-install")
|
||||
|
||||
@@ -82,6 +82,7 @@ export namespace BunProc {
|
||||
"add",
|
||||
"--force",
|
||||
"--exact",
|
||||
...(opts?.ignoreScripts ? ["--ignore-scripts"] : []),
|
||||
// TODO: get rid of this case (see: https://github.com/oven-sh/bun/issues/19936)
|
||||
...(proxied() || process.env.CI ? ["--no-cache"] : []),
|
||||
"--cwd",
|
||||
|
||||
@@ -114,8 +114,8 @@ export function createPlugTask(input: PlugInput, dep: PlugDeps = defaultPlugDeps
|
||||
|
||||
if (manifest.code === "manifest_no_targets") {
|
||||
inspect.stop("No plugin targets found", 1)
|
||||
dep.log.error(`"${mod}" does not declare supported targets in package.json`)
|
||||
dep.log.info('Expected: "oc-plugin": ["server", "tui"] or tuples like [["tui", { ... }]].')
|
||||
dep.log.error(`"${mod}" does not expose plugin entrypoints in package.json`)
|
||||
dep.log.info('Expected one of: exports["./tui"], exports["./server"], or package.json main for server.')
|
||||
return false
|
||||
}
|
||||
|
||||
|
||||
@@ -250,7 +250,6 @@ function App(props: { onSnapshot?: () => Promise<string[]> }) {
|
||||
const route = useRoute()
|
||||
const dimensions = useTerminalDimensions()
|
||||
const renderer = useRenderer()
|
||||
renderer.disableStdoutInterception()
|
||||
const dialog = useDialog()
|
||||
const local = useLocal()
|
||||
const kv = useKV()
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { BoxRenderable, TextareaRenderable, MouseEvent, PasteEvent, decodePasteBytes, t, dim, fg } from "@opentui/core"
|
||||
import { BoxRenderable, TextareaRenderable, MouseEvent, PasteEvent, decodePasteBytes } from "@opentui/core"
|
||||
import { createEffect, createMemo, type JSX, onMount, createSignal, onCleanup, on, Show, Switch, Match } from "solid-js"
|
||||
import "opentui-spinner/solid"
|
||||
import path from "path"
|
||||
@@ -809,8 +809,20 @@ export function Prompt(props: PromptProps) {
|
||||
return !!current
|
||||
})
|
||||
|
||||
const suggestion = createMemo(() => {
|
||||
if (!props.sessionID) return
|
||||
if (store.mode !== "normal") return
|
||||
if (store.prompt.input) return
|
||||
const current = status()
|
||||
if (current.type !== "idle") return
|
||||
const value = current.suggestion?.trim()
|
||||
if (!value) return
|
||||
return value
|
||||
})
|
||||
|
||||
const placeholderText = createMemo(() => {
|
||||
if (props.showPlaceholder === false) return undefined
|
||||
if (suggestion()) return suggestion()
|
||||
if (store.mode === "shell") {
|
||||
if (!shell().length) return undefined
|
||||
const example = shell()[store.placeholder % shell().length]
|
||||
@@ -898,6 +910,16 @@ export function Prompt(props: PromptProps) {
|
||||
e.preventDefault()
|
||||
return
|
||||
}
|
||||
if (!store.prompt.input && e.name === "right" && !e.ctrl && !e.meta && !e.shift && !e.super) {
|
||||
const value = suggestion()
|
||||
if (value) {
|
||||
input.setText(value)
|
||||
setStore("prompt", "input", value)
|
||||
input.gotoBufferEnd()
|
||||
e.preventDefault()
|
||||
return
|
||||
}
|
||||
}
|
||||
// Check clipboard for images before terminal-handled paste runs.
|
||||
// This helps terminals that forward Ctrl+V to the app; Windows
|
||||
// Terminal 1.25+ usually handles Ctrl+V before this path.
|
||||
|
||||
@@ -62,8 +62,8 @@
|
||||
"light": "frappeText"
|
||||
},
|
||||
"textMuted": {
|
||||
"dark": "frappeSubtext1",
|
||||
"light": "frappeSubtext1"
|
||||
"dark": "frappeOverlay2",
|
||||
"light": "frappeOverlay2"
|
||||
},
|
||||
"background": {
|
||||
"dark": "frappeBase",
|
||||
|
||||
@@ -62,8 +62,8 @@
|
||||
"light": "macText"
|
||||
},
|
||||
"textMuted": {
|
||||
"dark": "macSubtext1",
|
||||
"light": "macSubtext1"
|
||||
"dark": "macOverlay2",
|
||||
"light": "macOverlay2"
|
||||
},
|
||||
"background": {
|
||||
"dark": "macBase",
|
||||
|
||||
@@ -63,7 +63,7 @@
|
||||
"success": { "dark": "darkGreen", "light": "lightGreen" },
|
||||
"info": { "dark": "darkTeal", "light": "lightTeal" },
|
||||
"text": { "dark": "darkText", "light": "lightText" },
|
||||
"textMuted": { "dark": "darkSubtext1", "light": "lightSubtext1" },
|
||||
"textMuted": { "dark": "darkOverlay2", "light": "lightOverlay2" },
|
||||
"background": { "dark": "darkBase", "light": "lightBase" },
|
||||
"backgroundPanel": { "dark": "darkMantle", "light": "lightMantle" },
|
||||
"backgroundElement": { "dark": "darkCrust", "light": "lightCrust" },
|
||||
|
||||
@@ -87,6 +87,11 @@ function fail(message: string, data: Record<string, unknown>) {
|
||||
console.error(`[tui.plugin] ${text}`, next)
|
||||
}
|
||||
|
||||
function warn(message: string, data: Record<string, unknown>) {
|
||||
log.warn(message, data)
|
||||
console.warn(`[tui.plugin] ${message}`, data)
|
||||
}
|
||||
|
||||
type CleanupResult = { type: "ok" } | { type: "error"; error: unknown } | { type: "timeout" }
|
||||
|
||||
function runCleanup(fn: () => unknown, ms: number): Promise<CleanupResult> {
|
||||
@@ -229,6 +234,15 @@ async function loadExternalPlugin(cfg: TuiConfig.PluginRecord, retry = false): P
|
||||
log.info("loading tui plugin", { path: plan.spec, retry })
|
||||
const resolved = await PluginLoader.resolve(plan, "tui")
|
||||
if (!resolved.ok) {
|
||||
if (resolved.stage === "missing") {
|
||||
warn("tui plugin has no entrypoint", {
|
||||
path: plan.spec,
|
||||
retry,
|
||||
message: resolved.message,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
if (resolved.stage === "install") {
|
||||
fail("failed to resolve tui plugin", { path: plan.spec, retry, error: resolved.error })
|
||||
return
|
||||
@@ -753,7 +767,6 @@ async function addPluginBySpec(state: RuntimeState | undefined, raw: string) {
|
||||
return [] as PluginLoad[]
|
||||
})
|
||||
if (!ready.length) {
|
||||
fail("failed to add tui plugin", { path: next })
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -824,7 +837,7 @@ async function installPluginBySpec(
|
||||
if (manifest.code === "manifest_no_targets") {
|
||||
return {
|
||||
ok: false,
|
||||
message: `"${spec}" does not declare supported targets in package.json`,
|
||||
message: `"${spec}" does not expose plugin entrypoints in package.json`,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -121,7 +121,10 @@ export namespace Config {
|
||||
const gitignore = path.join(dir, ".gitignore")
|
||||
const ignore = await Filesystem.exists(gitignore)
|
||||
if (!ignore) {
|
||||
await Filesystem.write(gitignore, ["node_modules", "package.json", "bun.lock", ".gitignore"].join("\n"))
|
||||
await Filesystem.write(
|
||||
gitignore,
|
||||
["node_modules", "package.json", "package-lock.json", "bun.lock", ".gitignore"].join("\n"),
|
||||
)
|
||||
}
|
||||
|
||||
// Bun can race cache writes on Windows when installs run in parallel across dirs.
|
||||
|
||||
@@ -71,6 +71,7 @@ export namespace Flag {
|
||||
export const OPENCODE_EXPERIMENTAL_PLAN_MODE = OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_PLAN_MODE")
|
||||
export const OPENCODE_EXPERIMENTAL_WORKSPACES = OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_WORKSPACES")
|
||||
export const OPENCODE_EXPERIMENTAL_MARKDOWN = !falsy("OPENCODE_EXPERIMENTAL_MARKDOWN")
|
||||
export const OPENCODE_EXPERIMENTAL_NEXT_PROMPT = truthy("OPENCODE_EXPERIMENTAL_NEXT_PROMPT")
|
||||
export const OPENCODE_MODELS_URL = process.env["OPENCODE_MODELS_URL"]
|
||||
export const OPENCODE_MODELS_PATH = process.env["OPENCODE_MODELS_PATH"]
|
||||
export const OPENCODE_DISABLE_EMBEDDED_WEB_UI = truthy("OPENCODE_DISABLE_EMBEDDED_WEB_UI")
|
||||
|
||||
@@ -375,38 +375,6 @@ export async function CodexAuthPlugin(input: PluginInput): Promise<Hooks> {
|
||||
delete provider.models[modelId]
|
||||
}
|
||||
|
||||
if (!provider.models["gpt-5.3-codex"]) {
|
||||
const model = {
|
||||
id: ModelID.make("gpt-5.3-codex"),
|
||||
providerID: ProviderID.openai,
|
||||
api: {
|
||||
id: "gpt-5.3-codex",
|
||||
url: "https://chatgpt.com/backend-api/codex",
|
||||
npm: "@ai-sdk/openai",
|
||||
},
|
||||
name: "GPT-5.3 Codex",
|
||||
capabilities: {
|
||||
temperature: false,
|
||||
reasoning: true,
|
||||
attachment: true,
|
||||
toolcall: true,
|
||||
input: { text: true, audio: false, image: true, video: false, pdf: false },
|
||||
output: { text: true, audio: false, image: false, video: false, pdf: false },
|
||||
interleaved: false,
|
||||
},
|
||||
cost: { input: 0, output: 0, cache: { read: 0, write: 0 } },
|
||||
limit: { context: 400_000, input: 272_000, output: 128_000 },
|
||||
status: "active" as const,
|
||||
options: {},
|
||||
headers: {},
|
||||
release_date: "2026-02-05",
|
||||
variants: {} as Record<string, Record<string, any>>,
|
||||
family: "gpt-codex",
|
||||
}
|
||||
model.variants = ProviderTransform.variants(model)
|
||||
provider.models["gpt-5.3-codex"] = model
|
||||
}
|
||||
|
||||
// Zero out costs for Codex (included with ChatGPT subscription)
|
||||
for (const model of Object.values(provider.models)) {
|
||||
model.cost = {
|
||||
|
||||
@@ -157,6 +157,14 @@ export namespace Plugin {
|
||||
|
||||
const resolved = await PluginLoader.resolve(plan, "server")
|
||||
if (!resolved.ok) {
|
||||
if (resolved.stage === "missing") {
|
||||
log.warn("plugin has no server entrypoint", {
|
||||
path: plan.spec,
|
||||
message: resolved.message,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
const cause =
|
||||
resolved.error instanceof Error ? (resolved.error.cause ?? resolved.error) : resolved.error
|
||||
const message = errorMessage(cause)
|
||||
|
||||
@@ -11,6 +11,7 @@ import { ConfigPaths } from "@/config/paths"
|
||||
import { Global } from "@/global"
|
||||
import { Filesystem } from "@/util/filesystem"
|
||||
import { Flock } from "@/util/flock"
|
||||
import { isRecord } from "@/util/record"
|
||||
|
||||
import { parsePluginSpecifier, readPluginPackage, resolvePluginTarget } from "./shared"
|
||||
|
||||
@@ -101,28 +102,60 @@ function pluginList(data: unknown) {
|
||||
return item.plugin
|
||||
}
|
||||
|
||||
function parseTarget(item: unknown): Target | undefined {
|
||||
if (item === "server" || item === "tui") return { kind: item }
|
||||
if (!Array.isArray(item)) return
|
||||
if (item[0] !== "server" && item[0] !== "tui") return
|
||||
if (item.length < 2) return { kind: item[0] }
|
||||
const opt = item[1]
|
||||
if (!opt || typeof opt !== "object" || Array.isArray(opt)) return { kind: item[0] }
|
||||
return {
|
||||
kind: item[0],
|
||||
opts: opt,
|
||||
function exportValue(value: unknown): string | undefined {
|
||||
if (typeof value === "string") {
|
||||
const next = value.trim()
|
||||
if (next) return next
|
||||
return
|
||||
}
|
||||
if (!isRecord(value)) return
|
||||
for (const key of ["import", "default"]) {
|
||||
const next = value[key]
|
||||
if (typeof next !== "string") continue
|
||||
const hit = next.trim()
|
||||
if (!hit) continue
|
||||
return hit
|
||||
}
|
||||
}
|
||||
|
||||
function parseTargets(raw: unknown) {
|
||||
if (!Array.isArray(raw)) return []
|
||||
const map = new Map<Kind, Target>()
|
||||
for (const item of raw) {
|
||||
const hit = parseTarget(item)
|
||||
if (!hit) continue
|
||||
map.set(hit.kind, hit)
|
||||
function exportOptions(value: unknown): Record<string, unknown> | undefined {
|
||||
if (!isRecord(value)) return
|
||||
const config = value.config
|
||||
if (!isRecord(config)) return
|
||||
return config
|
||||
}
|
||||
|
||||
function exportTarget(pkg: Record<string, unknown>, kind: Kind) {
|
||||
const exports = pkg.exports
|
||||
if (!isRecord(exports)) return
|
||||
const value = exports[`./${kind}`]
|
||||
const entry = exportValue(value)
|
||||
if (!entry) return
|
||||
return {
|
||||
opts: exportOptions(value),
|
||||
}
|
||||
return [...map.values()]
|
||||
}
|
||||
|
||||
function hasMainTarget(pkg: Record<string, unknown>) {
|
||||
const main = pkg.main
|
||||
if (typeof main !== "string") return false
|
||||
return Boolean(main.trim())
|
||||
}
|
||||
|
||||
function packageTargets(pkg: Record<string, unknown>) {
|
||||
const targets: Target[] = []
|
||||
const server = exportTarget(pkg, "server")
|
||||
if (server) {
|
||||
targets.push({ kind: "server", opts: server.opts })
|
||||
} else if (hasMainTarget(pkg)) {
|
||||
targets.push({ kind: "server" })
|
||||
}
|
||||
|
||||
const tui = exportTarget(pkg, "tui")
|
||||
if (tui) {
|
||||
targets.push({ kind: "tui", opts: tui.opts })
|
||||
}
|
||||
return targets
|
||||
}
|
||||
|
||||
function patch(text: string, path: Array<string | number>, value: unknown, insert = false) {
|
||||
@@ -260,7 +293,7 @@ export async function readPluginManifest(target: string): Promise<ManifestResult
|
||||
}
|
||||
}
|
||||
|
||||
const targets = parseTargets(pkg.item.json["oc-plugin"])
|
||||
const targets = packageTargets(pkg.item.json)
|
||||
if (!targets.length) {
|
||||
return {
|
||||
ok: false,
|
||||
@@ -330,7 +363,7 @@ async function patchOne(dir: string, target: Target, spec: string, force: boolea
|
||||
}
|
||||
|
||||
const list = pluginList(data)
|
||||
const item = target.opts ? [spec, target.opts] : spec
|
||||
const item = target.opts ? ([spec, target.opts] as const) : spec
|
||||
const out = patchPluginList(text, list, spec, item, force)
|
||||
if (out.mode === "noop") {
|
||||
return {
|
||||
|
||||
@@ -43,7 +43,9 @@ export namespace PluginLoader {
|
||||
plan: Plan,
|
||||
kind: PluginKind,
|
||||
): Promise<
|
||||
{ ok: true; value: Resolved } | { ok: false; stage: "install" | "entry" | "compatibility"; error: unknown }
|
||||
| { ok: true; value: Resolved }
|
||||
| { ok: false; stage: "missing"; message: string }
|
||||
| { ok: false; stage: "install" | "entry" | "compatibility"; error: unknown }
|
||||
> {
|
||||
let target = ""
|
||||
try {
|
||||
@@ -77,8 +79,8 @@ export namespace PluginLoader {
|
||||
if (!base.entry) {
|
||||
return {
|
||||
ok: false,
|
||||
stage: "entry",
|
||||
error: new Error(`Plugin ${plan.spec} entry is empty`),
|
||||
stage: "missing",
|
||||
message: `Plugin ${plan.spec} does not expose a ${kind} entrypoint`,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ export type PluginEntry = {
|
||||
source: PluginSource
|
||||
target: string
|
||||
pkg?: PluginPackage
|
||||
entry: string
|
||||
entry?: string
|
||||
}
|
||||
|
||||
const INDEX_FILES = ["index.ts", "index.tsx", "index.js", "index.mjs", "index.cjs"]
|
||||
@@ -45,9 +45,9 @@ export function pluginSource(spec: string): PluginSource {
|
||||
}
|
||||
|
||||
function resolveExportPath(raw: string, dir: string) {
|
||||
if (raw.startsWith("./") || raw.startsWith("../")) return path.resolve(dir, raw)
|
||||
if (raw.startsWith("file://")) return fileURLToPath(raw)
|
||||
return raw
|
||||
if (path.isAbsolute(raw)) return raw
|
||||
return path.resolve(dir, raw)
|
||||
}
|
||||
|
||||
function extractExportValue(value: unknown): string | undefined {
|
||||
@@ -93,7 +93,7 @@ function resolvePackageEntrypoint(spec: string, kind: PluginKind, pkg: PluginPac
|
||||
|
||||
function targetPath(target: string) {
|
||||
if (target.startsWith("file://")) return fileURLToPath(target)
|
||||
if (path.isAbsolute(target) || /^[A-Za-z]:[\\/]/.test(target)) return target
|
||||
if (path.isAbsolute(target)) return target
|
||||
}
|
||||
|
||||
async function resolveDirectoryIndex(dir: string) {
|
||||
@@ -128,13 +128,8 @@ async function resolvePluginEntrypoint(spec: string, target: string, kind: Plugi
|
||||
if (index) return pathToFileURL(index).href
|
||||
}
|
||||
|
||||
if (source === "npm") {
|
||||
throw new TypeError(`Plugin ${spec} must define package.json exports["./tui"]`)
|
||||
}
|
||||
|
||||
if (dir) {
|
||||
throw new TypeError(`Plugin ${spec} must define package.json exports["./tui"] or include index file`)
|
||||
}
|
||||
if (source === "npm") return
|
||||
if (dir) return
|
||||
|
||||
return target
|
||||
}
|
||||
@@ -145,7 +140,7 @@ async function resolvePluginEntrypoint(spec: string, target: string, kind: Plugi
|
||||
if (index) return pathToFileURL(index).href
|
||||
}
|
||||
|
||||
throw new TypeError(`Plugin ${spec} must define package.json exports["./server"] or package.json main`)
|
||||
return
|
||||
}
|
||||
|
||||
return target
|
||||
@@ -189,7 +184,7 @@ export async function checkPluginCompatibility(target: string, opencodeVersion:
|
||||
|
||||
export async function resolvePluginTarget(spec: string, parsed = parsePluginSpecifier(spec)) {
|
||||
if (isPathPluginSpec(spec)) return resolvePathPluginTarget(spec)
|
||||
return BunProc.install(parsed.pkg, parsed.version)
|
||||
return BunProc.install(parsed.pkg, parsed.version, { ignoreScripts: true })
|
||||
}
|
||||
|
||||
export async function readPluginPackage(target: string): Promise<PluginPackage> {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -20,6 +20,7 @@ import { Plugin } from "../plugin"
|
||||
import PROMPT_PLAN from "../session/prompt/plan.txt"
|
||||
import BUILD_SWITCH from "../session/prompt/build-switch.txt"
|
||||
import MAX_STEPS from "../session/prompt/max-steps.txt"
|
||||
import PROMPT_SUGGEST_NEXT from "../session/prompt/suggest-next.txt"
|
||||
import { ToolRegistry } from "../tool/registry"
|
||||
import { Runner } from "@/effect/runner"
|
||||
import { MCP } from "../mcp"
|
||||
@@ -243,6 +244,77 @@ export namespace SessionPrompt {
|
||||
)
|
||||
})
|
||||
|
||||
const suggest = Effect.fn("SessionPrompt.suggest")(function* (input: {
|
||||
session: Session.Info
|
||||
sessionID: SessionID
|
||||
message: MessageV2.WithParts
|
||||
}) {
|
||||
if (input.session.parentID) return
|
||||
const message = input.message.info
|
||||
if (message.role !== "assistant") return
|
||||
if (message.error) return
|
||||
if (!message.finish) return
|
||||
if (["tool-calls", "unknown"].includes(message.finish)) return
|
||||
if ((yield* status.get(input.sessionID)).type !== "idle") return
|
||||
|
||||
const ag = yield* agents.get("title")
|
||||
if (!ag) return
|
||||
|
||||
const model = yield* Effect.promise(async () => {
|
||||
const small = await Provider.getSmallModel(message.providerID).catch(() => undefined)
|
||||
if (small) return small
|
||||
return Provider.getModel(message.providerID, message.modelID).catch(() => undefined)
|
||||
})
|
||||
if (!model) return
|
||||
|
||||
const msgs = yield* Effect.promise(() => MessageV2.filterCompacted(MessageV2.stream(input.sessionID)))
|
||||
const history = msgs.slice(-8)
|
||||
const real = (item: MessageV2.WithParts) =>
|
||||
item.info.role === "user" && !item.parts.every((part) => "synthetic" in part && part.synthetic)
|
||||
const parent = msgs.find((item) => item.info.id === message.parentID)
|
||||
const user = parent && real(parent) ? parent.info : msgs.findLast((item) => real(item))?.info
|
||||
if (!user || user.role !== "user") return
|
||||
|
||||
const text = yield* Effect.promise(async (signal) => {
|
||||
const result = await LLM.stream({
|
||||
agent: {
|
||||
...ag,
|
||||
name: "suggest-next",
|
||||
prompt: PROMPT_SUGGEST_NEXT,
|
||||
},
|
||||
user,
|
||||
system: [],
|
||||
small: true,
|
||||
tools: {},
|
||||
model,
|
||||
abort: signal,
|
||||
sessionID: input.sessionID,
|
||||
retries: 1,
|
||||
toolChoice: "none",
|
||||
messages: await MessageV2.toModelMessages(history, model),
|
||||
})
|
||||
return result.text
|
||||
})
|
||||
|
||||
const line = text
|
||||
.replace(/<think>[\s\S]*?<\/think>\s*/g, "")
|
||||
.split("\n")
|
||||
.map((item) => item.trim())
|
||||
.find((item) => item.length > 0)
|
||||
?.replace(/^["'`]+|["'`]+$/g, "")
|
||||
if (!line) return
|
||||
|
||||
const tag = line
|
||||
.toUpperCase()
|
||||
.replace(/[\s-]+/g, "_")
|
||||
.replace(/[^A-Z_]/g, "")
|
||||
if (tag === "NO_SUGGESTION") return
|
||||
|
||||
const suggestion = line.length > 240 ? line.slice(0, 237) + "..." : line
|
||||
if ((yield* status.get(input.sessionID)).type !== "idle") return
|
||||
yield* status.set(input.sessionID, { type: "idle", suggestion })
|
||||
})
|
||||
|
||||
const insertReminders = Effect.fn("SessionPrompt.insertReminders")(function* (input: {
|
||||
messages: MessageV2.WithParts[]
|
||||
agent: Agent.Info
|
||||
@@ -403,7 +475,7 @@ NOTE: At any point in time through this workflow you should feel free to ask the
|
||||
Effect.runPromise(
|
||||
Effect.gen(function* () {
|
||||
const match = input.processor.partFromToolCall(options.toolCallId)
|
||||
if (!match || match.state.status !== "running") return
|
||||
if (!match || !["running", "pending"].includes(match.state.status)) return
|
||||
yield* sessions.updatePart({
|
||||
...match,
|
||||
state: {
|
||||
@@ -1313,7 +1385,15 @@ NOTE: At any point in time through this workflow you should feel free to ask the
|
||||
}
|
||||
|
||||
if (input.noReply === true) return message
|
||||
return yield* loop({ sessionID: input.sessionID })
|
||||
const result = yield* loop({ sessionID: input.sessionID })
|
||||
if (Flag.OPENCODE_EXPERIMENTAL_NEXT_PROMPT) {
|
||||
yield* suggest({
|
||||
session,
|
||||
sessionID: input.sessionID,
|
||||
message: result,
|
||||
}).pipe(Effect.ignore, Effect.forkIn(scope))
|
||||
}
|
||||
return result
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
114
packages/opencode/src/session/prompt/kimi.txt
Normal file
114
packages/opencode/src/session/prompt/kimi.txt
Normal file
@@ -0,0 +1,114 @@
|
||||
You are OpenCode, an interactive general AI agent running on a user's computer.
|
||||
|
||||
Your primary goal is to help users with software engineering tasks by taking action — use the tools available to you to make real changes on the user's system. You should also answer questions when asked. Always adhere strictly to the following system instructions and the user's requirements.
|
||||
|
||||
# Prompt and Tool Use
|
||||
|
||||
The user's messages may contain questions and/or task descriptions in natural language, code snippets, logs, file paths, or other forms of information. Read them, understand them and do what the user requested. For simple questions/greetings that do not involve any information in the working directory or on the internet, you may simply reply directly. For anything else, default to taking action with tools. When the request could be interpreted as either a question to answer or a task to complete, treat it as a task.
|
||||
|
||||
When handling the user's request, if it involves creating, modifying, or running code or files, you MUST use the appropriate tools to make actual changes — do not just describe the solution in text. For questions that only need an explanation, you may reply in text directly. When calling tools, do not provide explanations because the tool calls themselves should be self-explanatory. You MUST follow the description of each tool and its parameters when calling tools.
|
||||
|
||||
If the `task` tool is available, you can use it to delegate a focused subtask to a subagent instance. When delegating, provide a complete prompt with all necessary context because a newly created subagent does not automatically see your current context.
|
||||
|
||||
You have the capability to output any number of tool calls in a single response. If you anticipate making multiple non-interfering tool calls, you are HIGHLY RECOMMENDED to make them in parallel to significantly improve efficiency. This is very important to your performance.
|
||||
|
||||
The results of the tool calls will be returned to you in a tool message. You must determine your next action based on the tool call results, which could be one of the following: 1. Continue working on the task, 2. Inform the user that the task is completed or has failed, or 3. Ask the user for more information.
|
||||
|
||||
Tool results and user messages may include `<system-reminder>` tags. These are authoritative system directives that you MUST follow. They bear no direct relation to the specific tool results or user messages in which they appear. Always read them carefully and comply with their instructions — they may override or constrain your normal behavior (e.g., restricting you to read-only actions during plan mode).
|
||||
|
||||
When responding to the user, you MUST use the SAME language as the user, unless explicitly instructed to do otherwise.
|
||||
|
||||
# General Guidelines for Coding
|
||||
|
||||
When building something from scratch, you should:
|
||||
|
||||
- Understand the user's requirements.
|
||||
- Ask the user for clarification if there is anything unclear.
|
||||
- Design the architecture and make a plan for the implementation.
|
||||
- Write the code in a modular and maintainable way.
|
||||
|
||||
Always use tools to implement your code changes:
|
||||
|
||||
- Use `write`/`edit` to create or modify source files. Code that only appears in your text response is NOT saved to the file system and will not take effect.
|
||||
- Use `bash` to run and test your code after writing it.
|
||||
- Iterate: if tests fail, read the error, fix the code with `write`/`edit`, and re-test with `bash`.
|
||||
|
||||
When working on an existing codebase, you should:
|
||||
|
||||
- Understand the codebase by reading it with tools (`read`, `glob`, `grep`) before making changes. Identify the ultimate goal and the most important criteria to achieve the goal.
|
||||
- For a bug fix, you typically need to check error logs or failed tests, scan over the codebase to find the root cause, and figure out a fix. If user mentioned any failed tests, you should make sure they pass after the changes.
|
||||
- For a feature, you typically need to design the architecture, and write the code in a modular and maintainable way, with minimal intrusions to existing code. Add new tests if the project already has tests.
|
||||
- For a code refactoring, you typically need to update all the places that call the code you are refactoring if the interface changes. DO NOT change any existing logic especially in tests, focus only on fixing any errors caused by the interface changes.
|
||||
- Make MINIMAL changes to achieve the goal. This is very important to your performance.
|
||||
- Follow the coding style of existing code in the project.
|
||||
|
||||
DO NOT run `git commit`, `git push`, `git reset`, `git rebase` and/or do any other git mutations unless explicitly asked to do so. Ask for confirmation each time when you need to do git mutations, even if the user has confirmed in earlier conversations.
|
||||
|
||||
# General Guidelines for Research and Data Processing
|
||||
|
||||
The user may ask you to research on certain topics, process or generate certain multimedia files. When doing such tasks, you must:
|
||||
|
||||
- Understand the user's requirements thoroughly, ask for clarification before you start if needed.
|
||||
- Make plans before doing deep or wide research, to ensure you are always on track.
|
||||
- Search on the Internet if possible, with carefully-designed search queries to improve efficiency and accuracy.
|
||||
- Use proper tools or shell commands or Python packages to process or generate images, videos, PDFs, docs, spreadsheets, presentations, or other multimedia files. Detect if there are already such tools in the environment. If you have to install third-party tools/packages, you MUST ensure that they are installed in a virtual/isolated environment.
|
||||
- Once you generate or edit any images, videos or other media files, try to read it again before proceed, to ensure that the content is as expected.
|
||||
- Avoid installing or deleting anything to/from outside of the current working directory. If you have to do so, ask the user for confirmation.
|
||||
|
||||
# Working Environment
|
||||
|
||||
## Operating System
|
||||
|
||||
The operating environment is not in a sandbox. Any actions you do will immediately affect the user's system. So you MUST be extremely cautious. Unless being explicitly instructed to do so, you should never access (read/write/execute) files outside of the working directory.
|
||||
|
||||
## Working Directory
|
||||
|
||||
The working directory should be considered as the project root if you are instructed to perform tasks on the project. Every file system operation will be relative to the working directory if you do not explicitly specify the absolute path. Tools may require absolute paths for some parameters, IF SO, YOU MUST use absolute paths for these parameters.
|
||||
|
||||
# Project Information
|
||||
|
||||
Markdown files named `AGENTS.md` usually contain the background, structure, coding styles, user preferences and other relevant information about the project. You should use this information to understand the project and the user's preferences. `AGENTS.md` files may exist at different locations in the project, but typically there is one in the project root.
|
||||
|
||||
> Why `AGENTS.md`?
|
||||
>
|
||||
> `README.md` files are for humans: quick starts, project descriptions, and contribution guidelines. `AGENTS.md` complements this by containing the extra, sometimes detailed context coding agents need: build steps, tests, and conventions that might clutter a README or aren’t relevant to human contributors.
|
||||
>
|
||||
> We intentionally kept it separate to:
|
||||
>
|
||||
> - Give agents a clear, predictable place for instructions.
|
||||
> - Keep `README`s concise and focused on human contributors.
|
||||
> - Provide precise, agent-focused guidance that complements existing `README` and docs.
|
||||
If the `AGENTS.md` is empty or insufficient, you may check `README`/`README.md` files or `AGENTS.md` files in subdirectories for more information about specific parts of the project.
|
||||
|
||||
If you modified any files/styles/structures/configurations/workflows/... mentioned in `AGENTS.md` files, you MUST update the corresponding `AGENTS.md` files to keep them up-to-date.
|
||||
|
||||
# Skills
|
||||
|
||||
Skills are reusable, composable capabilities that enhance your abilities. Each skill is a self-contained directory with a `SKILL.md` file that contains instructions, examples, and/or reference material.
|
||||
|
||||
## What are skills?
|
||||
|
||||
Skills are modular extensions that provide:
|
||||
|
||||
- Specialized knowledge: Domain-specific expertise (e.g., PDF processing, data analysis)
|
||||
- Workflow patterns: Best practices for common tasks
|
||||
- Tool integrations: Pre-configured tool chains for specific operations
|
||||
- Reference material: Documentation, templates, and examples
|
||||
|
||||
## How to use skills
|
||||
|
||||
Identify the skills that are likely to be useful for the tasks you are currently working on, use the `skill` tool to load a skill for detailed instructions, guidelines, scripts and more.
|
||||
|
||||
Only load skill details when needed to conserve the context window.
|
||||
|
||||
# Ultimate Reminders
|
||||
|
||||
At any time, you should be HELPFUL, CONCISE, and ACCURATE. Be thorough in your actions — test what you build, verify what you change — not in your explanations.
|
||||
|
||||
- Never diverge from the requirements and the goals of the task you work on. Stay on track.
|
||||
- Never give the user more than what they want.
|
||||
- Try your best to avoid any hallucination. Do fact checking before providing any factual information.
|
||||
- Think about the best approach, then take action decisively.
|
||||
- Do not give up too early.
|
||||
- ALWAYS, keep it stupidly simple. Do not overcomplicate things.
|
||||
- When the task requires creating or modifying files, always use tools to do so. Never treat displaying code in your response as a substitute for actually writing it to the file system.
|
||||
21
packages/opencode/src/session/prompt/suggest-next.txt
Normal file
21
packages/opencode/src/session/prompt/suggest-next.txt
Normal file
@@ -0,0 +1,21 @@
|
||||
You are generating a suggested next user message for the current conversation.
|
||||
|
||||
Goal:
|
||||
- Suggest a useful next step that keeps momentum.
|
||||
|
||||
Rules:
|
||||
- Output exactly one line.
|
||||
- Write as the user speaking to the assistant (for example: "Can you...", "Help me...", "Let's...").
|
||||
- Match the user's tone and language; keep it natural and human.
|
||||
- Prefer a concrete action over a broad question.
|
||||
- If the conversation is vague or small-talk, steer toward a practical starter request.
|
||||
- If there is no meaningful or appropriate next step to suggest, output exactly: NO_SUGGESTION
|
||||
- Avoid corporate or robotic phrasing.
|
||||
- Avoid asking multiple discovery questions in one sentence.
|
||||
- Do not include quotes, labels, markdown, or explanations.
|
||||
|
||||
Examples:
|
||||
- Greeting context -> "Can you scan this repo and suggest the best first task to tackle?"
|
||||
- Bug-fix context -> "Can you reproduce this bug and propose the smallest safe fix?"
|
||||
- Feature context -> "Let's implement this incrementally; start with the MVP version first."
|
||||
- Conversation is complete -> "NO_SUGGESTION"
|
||||
@@ -11,6 +11,7 @@ export namespace SessionStatus {
|
||||
.union([
|
||||
z.object({
|
||||
type: z.literal("idle"),
|
||||
suggestion: z.string().optional(),
|
||||
}),
|
||||
z.object({
|
||||
type: z.literal("retry"),
|
||||
|
||||
@@ -7,6 +7,7 @@ import PROMPT_DEFAULT from "./prompt/default.txt"
|
||||
import PROMPT_BEAST from "./prompt/beast.txt"
|
||||
import PROMPT_GEMINI from "./prompt/gemini.txt"
|
||||
import PROMPT_GPT from "./prompt/gpt.txt"
|
||||
import PROMPT_KIMI from "./prompt/kimi.txt"
|
||||
|
||||
import PROMPT_CODEX from "./prompt/codex.txt"
|
||||
import PROMPT_TRINITY from "./prompt/trinity.txt"
|
||||
@@ -28,6 +29,7 @@ export namespace SystemPrompt {
|
||||
if (model.api.id.includes("gemini-")) return [PROMPT_GEMINI]
|
||||
if (model.api.id.includes("claude")) return [PROMPT_ANTHROPIC]
|
||||
if (model.api.id.toLowerCase().includes("trinity")) return [PROMPT_TRINITY]
|
||||
if (model.api.id.toLowerCase().includes("kimi")) return [PROMPT_KIMI]
|
||||
return [PROMPT_DEFAULT]
|
||||
}
|
||||
|
||||
|
||||
@@ -1,19 +1,17 @@
|
||||
import { Log } from "../util/log"
|
||||
import path from "path"
|
||||
import fs from "fs/promises"
|
||||
import { Global } from "../global"
|
||||
import { Filesystem } from "../util/filesystem"
|
||||
import { lazy } from "../util/lazy"
|
||||
import { Lock } from "../util/lock"
|
||||
import { NamedError } from "@opencode-ai/util/error"
|
||||
import z from "zod"
|
||||
import { Glob } from "../util/glob"
|
||||
import { git } from "@/util/git"
|
||||
import { AppFileSystem } from "@/filesystem"
|
||||
import { makeRuntime } from "@/effect/run-service"
|
||||
import { Effect, Exit, Layer, Option, RcMap, Schema, ServiceMap, TxReentrantLock } from "effect"
|
||||
|
||||
export namespace Storage {
|
||||
const log = Log.create({ service: "storage" })
|
||||
|
||||
type Migration = (dir: string) => Promise<void>
|
||||
type Migration = (dir: string, fs: AppFileSystem.Interface) => Effect.Effect<void, AppFileSystem.Error>
|
||||
|
||||
export const NotFoundError = NamedError.create(
|
||||
"NotFoundError",
|
||||
@@ -22,36 +20,101 @@ export namespace Storage {
|
||||
}),
|
||||
)
|
||||
|
||||
export type Error = AppFileSystem.Error | InstanceType<typeof NotFoundError>
|
||||
|
||||
const RootFile = Schema.Struct({
|
||||
path: Schema.optional(
|
||||
Schema.Struct({
|
||||
root: Schema.optional(Schema.String),
|
||||
}),
|
||||
),
|
||||
})
|
||||
|
||||
const SessionFile = Schema.Struct({
|
||||
id: Schema.String,
|
||||
})
|
||||
|
||||
const MessageFile = Schema.Struct({
|
||||
id: Schema.String,
|
||||
})
|
||||
|
||||
const DiffFile = Schema.Struct({
|
||||
additions: Schema.Number,
|
||||
deletions: Schema.Number,
|
||||
})
|
||||
|
||||
const SummaryFile = Schema.Struct({
|
||||
id: Schema.String,
|
||||
projectID: Schema.String,
|
||||
summary: Schema.Struct({ diffs: Schema.Array(DiffFile) }),
|
||||
})
|
||||
|
||||
const decodeRoot = Schema.decodeUnknownOption(RootFile)
|
||||
const decodeSession = Schema.decodeUnknownOption(SessionFile)
|
||||
const decodeMessage = Schema.decodeUnknownOption(MessageFile)
|
||||
const decodeSummary = Schema.decodeUnknownOption(SummaryFile)
|
||||
|
||||
export interface Interface {
|
||||
readonly remove: (key: string[]) => Effect.Effect<void, AppFileSystem.Error>
|
||||
readonly read: <T>(key: string[]) => Effect.Effect<T, Error>
|
||||
readonly update: <T>(key: string[], fn: (draft: T) => void) => Effect.Effect<T, Error>
|
||||
readonly write: <T>(key: string[], content: T) => Effect.Effect<void, AppFileSystem.Error>
|
||||
readonly list: (prefix: string[]) => Effect.Effect<string[][], AppFileSystem.Error>
|
||||
}
|
||||
|
||||
export class Service extends ServiceMap.Service<Service, Interface>()("@opencode/Storage") {}
|
||||
|
||||
function file(dir: string, key: string[]) {
|
||||
return path.join(dir, ...key) + ".json"
|
||||
}
|
||||
|
||||
function missing(err: unknown) {
|
||||
if (!err || typeof err !== "object") return false
|
||||
if ("code" in err && err.code === "ENOENT") return true
|
||||
if ("reason" in err && err.reason && typeof err.reason === "object" && "_tag" in err.reason) {
|
||||
return err.reason._tag === "NotFound"
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
function parseMigration(text: string) {
|
||||
const value = Number.parseInt(text, 10)
|
||||
return Number.isNaN(value) ? 0 : value
|
||||
}
|
||||
|
||||
const MIGRATIONS: Migration[] = [
|
||||
async (dir) => {
|
||||
Effect.fn("Storage.migration.1")(function* (dir: string, fs: AppFileSystem.Interface) {
|
||||
const project = path.resolve(dir, "../project")
|
||||
if (!(await Filesystem.isDir(project))) return
|
||||
const projectDirs = await Glob.scan("*", {
|
||||
if (!(yield* fs.isDir(project))) return
|
||||
const projectDirs = yield* fs.glob("*", {
|
||||
cwd: project,
|
||||
include: "all",
|
||||
})
|
||||
for (const projectDir of projectDirs) {
|
||||
const fullPath = path.join(project, projectDir)
|
||||
if (!(await Filesystem.isDir(fullPath))) continue
|
||||
const full = path.join(project, projectDir)
|
||||
if (!(yield* fs.isDir(full))) continue
|
||||
log.info(`migrating project ${projectDir}`)
|
||||
let projectID = projectDir
|
||||
const fullProjectDir = path.join(project, projectDir)
|
||||
let worktree = "/"
|
||||
|
||||
if (projectID !== "global") {
|
||||
for (const msgFile of await Glob.scan("storage/session/message/*/*.json", {
|
||||
cwd: path.join(project, projectDir),
|
||||
for (const msgFile of yield* fs.glob("storage/session/message/*/*.json", {
|
||||
cwd: full,
|
||||
absolute: true,
|
||||
})) {
|
||||
const json = await Filesystem.readJson<any>(msgFile)
|
||||
worktree = json.path?.root
|
||||
if (worktree) break
|
||||
const json = decodeRoot(yield* fs.readJson(msgFile), { onExcessProperty: "preserve" })
|
||||
const root = Option.isSome(json) ? json.value.path?.root : undefined
|
||||
if (!root) continue
|
||||
worktree = root
|
||||
break
|
||||
}
|
||||
if (!worktree) continue
|
||||
if (!(await Filesystem.isDir(worktree))) continue
|
||||
const result = await git(["rev-list", "--max-parents=0", "--all"], {
|
||||
cwd: worktree,
|
||||
})
|
||||
if (!(yield* fs.isDir(worktree))) continue
|
||||
const result = yield* Effect.promise(() =>
|
||||
git(["rev-list", "--max-parents=0", "--all"], {
|
||||
cwd: worktree,
|
||||
}),
|
||||
)
|
||||
const [id] = result
|
||||
.text()
|
||||
.split("\n")
|
||||
@@ -61,157 +124,230 @@ export namespace Storage {
|
||||
if (!id) continue
|
||||
projectID = id
|
||||
|
||||
await Filesystem.writeJson(path.join(dir, "project", projectID + ".json"), {
|
||||
id,
|
||||
vcs: "git",
|
||||
worktree,
|
||||
time: {
|
||||
created: Date.now(),
|
||||
initialized: Date.now(),
|
||||
},
|
||||
})
|
||||
yield* fs.writeWithDirs(
|
||||
path.join(dir, "project", projectID + ".json"),
|
||||
JSON.stringify(
|
||||
{
|
||||
id,
|
||||
vcs: "git",
|
||||
worktree,
|
||||
time: {
|
||||
created: Date.now(),
|
||||
initialized: Date.now(),
|
||||
},
|
||||
},
|
||||
null,
|
||||
2,
|
||||
),
|
||||
)
|
||||
|
||||
log.info(`migrating sessions for project ${projectID}`)
|
||||
for (const sessionFile of await Glob.scan("storage/session/info/*.json", {
|
||||
cwd: fullProjectDir,
|
||||
for (const sessionFile of yield* fs.glob("storage/session/info/*.json", {
|
||||
cwd: full,
|
||||
absolute: true,
|
||||
})) {
|
||||
const dest = path.join(dir, "session", projectID, path.basename(sessionFile))
|
||||
log.info("copying", {
|
||||
sessionFile,
|
||||
dest,
|
||||
})
|
||||
const session = await Filesystem.readJson<any>(sessionFile)
|
||||
await Filesystem.writeJson(dest, session)
|
||||
log.info(`migrating messages for session ${session.id}`)
|
||||
for (const msgFile of await Glob.scan(`storage/session/message/${session.id}/*.json`, {
|
||||
cwd: fullProjectDir,
|
||||
log.info("copying", { sessionFile, dest })
|
||||
const session = yield* fs.readJson(sessionFile)
|
||||
const info = decodeSession(session, { onExcessProperty: "preserve" })
|
||||
yield* fs.writeWithDirs(dest, JSON.stringify(session, null, 2))
|
||||
if (Option.isNone(info)) continue
|
||||
log.info(`migrating messages for session ${info.value.id}`)
|
||||
for (const msgFile of yield* fs.glob(`storage/session/message/${info.value.id}/*.json`, {
|
||||
cwd: full,
|
||||
absolute: true,
|
||||
})) {
|
||||
const dest = path.join(dir, "message", session.id, path.basename(msgFile))
|
||||
const next = path.join(dir, "message", info.value.id, path.basename(msgFile))
|
||||
log.info("copying", {
|
||||
msgFile,
|
||||
dest,
|
||||
dest: next,
|
||||
})
|
||||
const message = await Filesystem.readJson<any>(msgFile)
|
||||
await Filesystem.writeJson(dest, message)
|
||||
const message = yield* fs.readJson(msgFile)
|
||||
const item = decodeMessage(message, { onExcessProperty: "preserve" })
|
||||
yield* fs.writeWithDirs(next, JSON.stringify(message, null, 2))
|
||||
if (Option.isNone(item)) continue
|
||||
|
||||
log.info(`migrating parts for message ${message.id}`)
|
||||
for (const partFile of await Glob.scan(`storage/session/part/${session.id}/${message.id}/*.json`, {
|
||||
cwd: fullProjectDir,
|
||||
log.info(`migrating parts for message ${item.value.id}`)
|
||||
for (const partFile of yield* fs.glob(`storage/session/part/${info.value.id}/${item.value.id}/*.json`, {
|
||||
cwd: full,
|
||||
absolute: true,
|
||||
})) {
|
||||
const dest = path.join(dir, "part", message.id, path.basename(partFile))
|
||||
const part = await Filesystem.readJson(partFile)
|
||||
const out = path.join(dir, "part", item.value.id, path.basename(partFile))
|
||||
const part = yield* fs.readJson(partFile)
|
||||
log.info("copying", {
|
||||
partFile,
|
||||
dest,
|
||||
dest: out,
|
||||
})
|
||||
await Filesystem.writeJson(dest, part)
|
||||
yield* fs.writeWithDirs(out, JSON.stringify(part, null, 2))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
async (dir) => {
|
||||
for (const item of await Glob.scan("session/*/*.json", {
|
||||
}),
|
||||
Effect.fn("Storage.migration.2")(function* (dir: string, fs: AppFileSystem.Interface) {
|
||||
for (const item of yield* fs.glob("session/*/*.json", {
|
||||
cwd: dir,
|
||||
absolute: true,
|
||||
})) {
|
||||
const session = await Filesystem.readJson<any>(item)
|
||||
if (!session.projectID) continue
|
||||
if (!session.summary?.diffs) continue
|
||||
const { diffs } = session.summary
|
||||
await Filesystem.write(path.join(dir, "session_diff", session.id + ".json"), JSON.stringify(diffs))
|
||||
await Filesystem.writeJson(path.join(dir, "session", session.projectID, session.id + ".json"), {
|
||||
...session,
|
||||
summary: {
|
||||
additions: diffs.reduce((sum: any, x: any) => sum + x.additions, 0),
|
||||
deletions: diffs.reduce((sum: any, x: any) => sum + x.deletions, 0),
|
||||
},
|
||||
})
|
||||
const raw = yield* fs.readJson(item)
|
||||
const session = decodeSummary(raw, { onExcessProperty: "preserve" })
|
||||
if (Option.isNone(session)) continue
|
||||
const diffs = session.value.summary.diffs
|
||||
yield* fs.writeWithDirs(
|
||||
path.join(dir, "session_diff", session.value.id + ".json"),
|
||||
JSON.stringify(diffs, null, 2),
|
||||
)
|
||||
yield* fs.writeWithDirs(
|
||||
path.join(dir, "session", session.value.projectID, session.value.id + ".json"),
|
||||
JSON.stringify(
|
||||
{
|
||||
...(raw as Record<string, unknown>),
|
||||
summary: {
|
||||
additions: diffs.reduce((sum, x) => sum + x.additions, 0),
|
||||
deletions: diffs.reduce((sum, x) => sum + x.deletions, 0),
|
||||
},
|
||||
},
|
||||
null,
|
||||
2,
|
||||
),
|
||||
)
|
||||
}
|
||||
},
|
||||
}),
|
||||
]
|
||||
|
||||
const state = lazy(async () => {
|
||||
const dir = path.join(Global.Path.data, "storage")
|
||||
const migration = await Filesystem.readJson<string>(path.join(dir, "migration"))
|
||||
.then((x) => parseInt(x))
|
||||
.catch(() => 0)
|
||||
for (let index = migration; index < MIGRATIONS.length; index++) {
|
||||
log.info("running migration", { index })
|
||||
const migration = MIGRATIONS[index]
|
||||
await migration(dir).catch(() => log.error("failed to run migration", { index }))
|
||||
await Filesystem.write(path.join(dir, "migration"), (index + 1).toString())
|
||||
}
|
||||
return {
|
||||
dir,
|
||||
}
|
||||
})
|
||||
export const layer = Layer.effect(
|
||||
Service,
|
||||
Effect.gen(function* () {
|
||||
const fs = yield* AppFileSystem.Service
|
||||
const locks = yield* RcMap.make({
|
||||
lookup: () => TxReentrantLock.make(),
|
||||
idleTimeToLive: 0,
|
||||
})
|
||||
const state = yield* Effect.cached(
|
||||
Effect.gen(function* () {
|
||||
const dir = path.join(Global.Path.data, "storage")
|
||||
const marker = path.join(dir, "migration")
|
||||
const migration = yield* fs.readFileString(marker).pipe(
|
||||
Effect.map(parseMigration),
|
||||
Effect.catchIf(missing, () => Effect.succeed(0)),
|
||||
Effect.orElseSucceed(() => 0),
|
||||
)
|
||||
for (let i = migration; i < MIGRATIONS.length; i++) {
|
||||
log.info("running migration", { index: i })
|
||||
const step = MIGRATIONS[i]!
|
||||
const exit = yield* Effect.exit(step(dir, fs))
|
||||
if (Exit.isFailure(exit)) {
|
||||
log.error("failed to run migration", { index: i, cause: exit.cause })
|
||||
break
|
||||
}
|
||||
yield* fs.writeWithDirs(marker, String(i + 1))
|
||||
}
|
||||
return { dir }
|
||||
}),
|
||||
)
|
||||
|
||||
const fail = (target: string): Effect.Effect<never, InstanceType<typeof NotFoundError>> =>
|
||||
Effect.fail(new NotFoundError({ message: `Resource not found: ${target}` }))
|
||||
|
||||
const wrap = <A>(target: string, body: Effect.Effect<A, AppFileSystem.Error>) =>
|
||||
body.pipe(Effect.catchIf(missing, () => fail(target)))
|
||||
|
||||
const writeJson = Effect.fnUntraced(function* (target: string, content: unknown) {
|
||||
yield* fs.writeWithDirs(target, JSON.stringify(content, null, 2))
|
||||
})
|
||||
|
||||
const withResolved = <A, E>(
|
||||
key: string[],
|
||||
fn: (target: string, rw: TxReentrantLock.TxReentrantLock) => Effect.Effect<A, E>,
|
||||
): Effect.Effect<A, E | AppFileSystem.Error> =>
|
||||
Effect.scoped(
|
||||
Effect.gen(function* () {
|
||||
const target = file((yield* state).dir, key)
|
||||
return yield* fn(target, yield* RcMap.get(locks, target))
|
||||
}),
|
||||
)
|
||||
|
||||
const remove: Interface["remove"] = Effect.fn("Storage.remove")(function* (key: string[]) {
|
||||
yield* withResolved(key, (target, rw) =>
|
||||
TxReentrantLock.withWriteLock(rw, fs.remove(target).pipe(Effect.catchIf(missing, () => Effect.void))),
|
||||
)
|
||||
})
|
||||
|
||||
const read: Interface["read"] = <T>(key: string[]) =>
|
||||
Effect.gen(function* () {
|
||||
const value = yield* withResolved(key, (target, rw) =>
|
||||
TxReentrantLock.withReadLock(rw, wrap(target, fs.readJson(target))),
|
||||
)
|
||||
return value as T
|
||||
})
|
||||
|
||||
const update: Interface["update"] = <T>(key: string[], fn: (draft: T) => void) =>
|
||||
Effect.gen(function* () {
|
||||
const value = yield* withResolved(key, (target, rw) =>
|
||||
TxReentrantLock.withWriteLock(
|
||||
rw,
|
||||
Effect.gen(function* () {
|
||||
const content = yield* wrap(target, fs.readJson(target))
|
||||
fn(content as T)
|
||||
yield* writeJson(target, content)
|
||||
return content
|
||||
}),
|
||||
),
|
||||
)
|
||||
return value as T
|
||||
})
|
||||
|
||||
const write: Interface["write"] = (key: string[], content: unknown) =>
|
||||
Effect.gen(function* () {
|
||||
yield* withResolved(key, (target, rw) => TxReentrantLock.withWriteLock(rw, writeJson(target, content)))
|
||||
})
|
||||
|
||||
const list: Interface["list"] = Effect.fn("Storage.list")(function* (prefix: string[]) {
|
||||
const dir = (yield* state).dir
|
||||
const cwd = path.join(dir, ...prefix)
|
||||
const result = yield* fs
|
||||
.glob("**/*", {
|
||||
cwd,
|
||||
include: "file",
|
||||
})
|
||||
.pipe(Effect.catch(() => Effect.succeed<string[]>([])))
|
||||
return result
|
||||
.map((x) => [...prefix, ...x.slice(0, -5).split(path.sep)])
|
||||
.toSorted((a, b) => a.join("/").localeCompare(b.join("/")))
|
||||
})
|
||||
|
||||
return Service.of({
|
||||
remove,
|
||||
read,
|
||||
update,
|
||||
write,
|
||||
list,
|
||||
})
|
||||
}),
|
||||
)
|
||||
|
||||
export const defaultLayer = layer.pipe(Layer.provide(AppFileSystem.defaultLayer))
|
||||
|
||||
const { runPromise } = makeRuntime(Service, defaultLayer)
|
||||
|
||||
export async function remove(key: string[]) {
|
||||
const dir = await state().then((x) => x.dir)
|
||||
const target = path.join(dir, ...key) + ".json"
|
||||
return withErrorHandling(async () => {
|
||||
await fs.unlink(target).catch(() => {})
|
||||
})
|
||||
return runPromise((svc) => svc.remove(key))
|
||||
}
|
||||
|
||||
export async function read<T>(key: string[]) {
|
||||
const dir = await state().then((x) => x.dir)
|
||||
const target = path.join(dir, ...key) + ".json"
|
||||
return withErrorHandling(async () => {
|
||||
using _ = await Lock.read(target)
|
||||
const result = await Filesystem.readJson<T>(target)
|
||||
return result as T
|
||||
})
|
||||
return runPromise((svc) => svc.read<T>(key))
|
||||
}
|
||||
|
||||
export async function update<T>(key: string[], fn: (draft: T) => void) {
|
||||
const dir = await state().then((x) => x.dir)
|
||||
const target = path.join(dir, ...key) + ".json"
|
||||
return withErrorHandling(async () => {
|
||||
using _ = await Lock.write(target)
|
||||
const content = await Filesystem.readJson<T>(target)
|
||||
fn(content as T)
|
||||
await Filesystem.writeJson(target, content)
|
||||
return content
|
||||
})
|
||||
return runPromise((svc) => svc.update<T>(key, fn))
|
||||
}
|
||||
|
||||
export async function write<T>(key: string[], content: T) {
|
||||
const dir = await state().then((x) => x.dir)
|
||||
const target = path.join(dir, ...key) + ".json"
|
||||
return withErrorHandling(async () => {
|
||||
using _ = await Lock.write(target)
|
||||
await Filesystem.writeJson(target, content)
|
||||
})
|
||||
}
|
||||
|
||||
async function withErrorHandling<T>(body: () => Promise<T>) {
|
||||
return body().catch((e) => {
|
||||
if (!(e instanceof Error)) throw e
|
||||
const errnoException = e as NodeJS.ErrnoException
|
||||
if (errnoException.code === "ENOENT") {
|
||||
throw new NotFoundError({ message: `Resource not found: ${errnoException.path}` })
|
||||
}
|
||||
throw e
|
||||
})
|
||||
return runPromise((svc) => svc.write(key, content))
|
||||
}
|
||||
|
||||
export async function list(prefix: string[]) {
|
||||
const dir = await state().then((x) => x.dir)
|
||||
try {
|
||||
const result = await Glob.scan("**/*", {
|
||||
cwd: path.join(dir, ...prefix),
|
||||
include: "file",
|
||||
}).then((results) => results.map((x) => [...prefix, ...x.slice(0, -5).split(path.sep)]))
|
||||
result.sort()
|
||||
return result
|
||||
} catch {
|
||||
return []
|
||||
}
|
||||
return runPromise((svc) => svc.list(prefix))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
import { describe, expect, test } from "bun:test"
|
||||
import { describe, expect, spyOn, test } from "bun:test"
|
||||
import fs from "fs/promises"
|
||||
import path from "path"
|
||||
import { BunProc } from "../src/bun"
|
||||
import { PackageRegistry } from "../src/bun/registry"
|
||||
import { Global } from "../src/global"
|
||||
import { Process } from "../src/util/process"
|
||||
|
||||
describe("BunProc registry configuration", () => {
|
||||
test("should not contain hardcoded registry parameters", async () => {
|
||||
@@ -51,3 +55,83 @@ describe("BunProc registry configuration", () => {
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe("BunProc install pinning", () => {
|
||||
test("uses pinned cache without touching registry", async () => {
|
||||
const pkg = `pin-test-${Date.now().toString(36)}-${Math.random().toString(36).slice(2, 8)}`
|
||||
const ver = "1.2.3"
|
||||
const mod = path.join(Global.Path.cache, "node_modules", pkg)
|
||||
const data = path.join(Global.Path.cache, "package.json")
|
||||
|
||||
await fs.mkdir(mod, { recursive: true })
|
||||
await Bun.write(path.join(mod, "package.json"), JSON.stringify({ name: pkg, version: ver }, null, 2))
|
||||
|
||||
const src = await fs.readFile(data, "utf8").catch(() => "")
|
||||
const json = src ? ((JSON.parse(src) as { dependencies?: Record<string, string> }) ?? {}) : {}
|
||||
const deps = json.dependencies ?? {}
|
||||
deps[pkg] = ver
|
||||
await Bun.write(data, JSON.stringify({ ...json, dependencies: deps }, null, 2))
|
||||
|
||||
const stale = spyOn(PackageRegistry, "isOutdated").mockImplementation(async () => {
|
||||
throw new Error("unexpected registry check")
|
||||
})
|
||||
const run = spyOn(Process, "run").mockImplementation(async () => {
|
||||
throw new Error("unexpected process.run")
|
||||
})
|
||||
|
||||
try {
|
||||
const out = await BunProc.install(pkg, ver)
|
||||
expect(out).toBe(mod)
|
||||
expect(stale).not.toHaveBeenCalled()
|
||||
expect(run).not.toHaveBeenCalled()
|
||||
} finally {
|
||||
stale.mockRestore()
|
||||
run.mockRestore()
|
||||
|
||||
await fs.rm(mod, { recursive: true, force: true })
|
||||
const end = await fs
|
||||
.readFile(data, "utf8")
|
||||
.then((item) => JSON.parse(item) as { dependencies?: Record<string, string> })
|
||||
.catch(() => undefined)
|
||||
if (end?.dependencies) {
|
||||
delete end.dependencies[pkg]
|
||||
await Bun.write(data, JSON.stringify(end, null, 2))
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
test("passes --ignore-scripts when requested", async () => {
|
||||
const pkg = `ignore-test-${Date.now().toString(36)}-${Math.random().toString(36).slice(2, 8)}`
|
||||
const ver = "4.5.6"
|
||||
const mod = path.join(Global.Path.cache, "node_modules", pkg)
|
||||
const data = path.join(Global.Path.cache, "package.json")
|
||||
|
||||
const run = spyOn(Process, "run").mockImplementation(async () => ({
|
||||
code: 0,
|
||||
stdout: Buffer.alloc(0),
|
||||
stderr: Buffer.alloc(0),
|
||||
}))
|
||||
|
||||
try {
|
||||
await fs.rm(mod, { recursive: true, force: true })
|
||||
await BunProc.install(pkg, ver, { ignoreScripts: true })
|
||||
|
||||
expect(run).toHaveBeenCalled()
|
||||
const call = run.mock.calls[0]?.[0]
|
||||
expect(call).toContain("--ignore-scripts")
|
||||
expect(call).toContain(`${pkg}@${ver}`)
|
||||
} finally {
|
||||
run.mockRestore()
|
||||
await fs.rm(mod, { recursive: true, force: true })
|
||||
|
||||
const end = await fs
|
||||
.readFile(data, "utf8")
|
||||
.then((item) => JSON.parse(item) as { dependencies?: Record<string, string> })
|
||||
.catch(() => undefined)
|
||||
if (end?.dependencies) {
|
||||
delete end.dependencies[pkg]
|
||||
await Bun.write(data, JSON.stringify(end, null, 2))
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
@@ -21,8 +21,12 @@ test("installs plugin without loading it", async () => {
|
||||
{
|
||||
name: "demo-install-plugin",
|
||||
type: "module",
|
||||
main: "./install-plugin.ts",
|
||||
"oc-plugin": [["tui", { marker }]],
|
||||
exports: {
|
||||
"./tui": {
|
||||
import: "./install-plugin.ts",
|
||||
config: { marker },
|
||||
},
|
||||
},
|
||||
},
|
||||
null,
|
||||
2,
|
||||
@@ -46,7 +50,7 @@ test("installs plugin without loading it", async () => {
|
||||
})
|
||||
|
||||
process.env.OPENCODE_PLUGIN_META_FILE = path.join(tmp.path, "plugin-meta.json")
|
||||
let cfg: Awaited<ReturnType<typeof TuiConfig.get>> = {
|
||||
const cfg: Awaited<ReturnType<typeof TuiConfig.get>> = {
|
||||
plugin: [],
|
||||
plugin_records: undefined,
|
||||
}
|
||||
@@ -66,17 +70,6 @@ test("installs plugin without loading it", async () => {
|
||||
|
||||
try {
|
||||
await TuiPluginRuntime.init(api)
|
||||
cfg = {
|
||||
plugin: [[tmp.extra.spec, { marker: tmp.extra.marker }]],
|
||||
plugin_records: [
|
||||
{
|
||||
item: [tmp.extra.spec, { marker: tmp.extra.marker }],
|
||||
scope: "local",
|
||||
source: path.join(tmp.path, "tui.json"),
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
const out = await TuiPluginRuntime.installPlugin(tmp.extra.spec)
|
||||
expect(out).toMatchObject({
|
||||
ok: true,
|
||||
|
||||
@@ -304,17 +304,23 @@ test("does not use npm package main for tui entry", async () => {
|
||||
const wait = spyOn(TuiConfig, "waitForDependencies").mockResolvedValue()
|
||||
const cwd = spyOn(process, "cwd").mockImplementation(() => tmp.path)
|
||||
const install = spyOn(BunProc, "install").mockResolvedValue(tmp.extra.mod)
|
||||
const warn = spyOn(console, "warn").mockImplementation(() => {})
|
||||
const error = spyOn(console, "error").mockImplementation(() => {})
|
||||
|
||||
try {
|
||||
await TuiPluginRuntime.init(createTuiPluginApi())
|
||||
await expect(fs.readFile(tmp.extra.marker, "utf8")).rejects.toThrow()
|
||||
expect(TuiPluginRuntime.list().some((item) => item.spec === tmp.extra.spec)).toBe(false)
|
||||
expect(error).not.toHaveBeenCalled()
|
||||
expect(warn.mock.calls.some((call) => String(call[0]).includes("tui plugin has no entrypoint"))).toBe(true)
|
||||
} finally {
|
||||
await TuiPluginRuntime.dispose()
|
||||
install.mockRestore()
|
||||
cwd.mockRestore()
|
||||
get.mockRestore()
|
||||
wait.mockRestore()
|
||||
warn.mockRestore()
|
||||
error.mockRestore()
|
||||
delete process.env.OPENCODE_PLUGIN_META_FILE
|
||||
}
|
||||
})
|
||||
|
||||
@@ -792,6 +792,7 @@ test("installs dependencies in writable OPENCODE_CONFIG_DIR", async () => {
|
||||
|
||||
expect(await Filesystem.exists(path.join(tmp.extra, "package.json"))).toBe(true)
|
||||
expect(await Filesystem.exists(path.join(tmp.extra, ".gitignore"))).toBe(true)
|
||||
expect(await Filesystem.readText(path.join(tmp.extra, ".gitignore"))).toContain("package-lock.json")
|
||||
} finally {
|
||||
online.mockRestore()
|
||||
run.mockRestore()
|
||||
|
||||
@@ -25,6 +25,11 @@ function run(msg: Msg) {
|
||||
|
||||
async function plugin(dir: string, kinds: Array<"server" | "tui">) {
|
||||
const p = path.join(dir, "plugin")
|
||||
const server = kinds.includes("server")
|
||||
const tui = kinds.includes("tui")
|
||||
const exports: Record<string, string> = {}
|
||||
if (server) exports["./server"] = "./server.js"
|
||||
if (tui) exports["./tui"] = "./tui.js"
|
||||
await fs.mkdir(p, { recursive: true })
|
||||
await Bun.write(
|
||||
path.join(p, "package.json"),
|
||||
@@ -32,7 +37,8 @@ async function plugin(dir: string, kinds: Array<"server" | "tui">) {
|
||||
{
|
||||
name: "acme",
|
||||
version: "1.0.0",
|
||||
"oc-plugin": kinds,
|
||||
...(server ? { main: "./server.js" } : {}),
|
||||
...(Object.keys(exports).length ? { exports } : {}),
|
||||
},
|
||||
null,
|
||||
2,
|
||||
|
||||
@@ -55,8 +55,34 @@ function ctxRoot(dir: string): PlugCtx {
|
||||
}
|
||||
}
|
||||
|
||||
async function plugin(dir: string, kinds?: unknown) {
|
||||
async function plugin(
|
||||
dir: string,
|
||||
kinds?: Array<"server" | "tui">,
|
||||
opts?: {
|
||||
server?: Record<string, unknown>
|
||||
tui?: Record<string, unknown>
|
||||
},
|
||||
) {
|
||||
const p = path.join(dir, "plugin")
|
||||
const server = kinds?.includes("server") ?? false
|
||||
const tui = kinds?.includes("tui") ?? false
|
||||
const exports: Record<string, unknown> = {}
|
||||
if (server) {
|
||||
exports["./server"] = opts?.server
|
||||
? {
|
||||
import: "./server.js",
|
||||
config: opts.server,
|
||||
}
|
||||
: "./server.js"
|
||||
}
|
||||
if (tui) {
|
||||
exports["./tui"] = opts?.tui
|
||||
? {
|
||||
import: "./tui.js",
|
||||
config: opts.tui,
|
||||
}
|
||||
: "./tui.js"
|
||||
}
|
||||
await fs.mkdir(p, { recursive: true })
|
||||
await Bun.write(
|
||||
path.join(p, "package.json"),
|
||||
@@ -64,7 +90,8 @@ async function plugin(dir: string, kinds?: unknown) {
|
||||
{
|
||||
name: "acme",
|
||||
version: "1.0.0",
|
||||
...(kinds === undefined ? {} : { "oc-plugin": kinds }),
|
||||
...(server ? { main: "./server.js" } : {}),
|
||||
...(Object.keys(exports).length ? { exports } : {}),
|
||||
},
|
||||
null,
|
||||
2,
|
||||
@@ -99,12 +126,12 @@ describe("plugin.install.task", () => {
|
||||
expect(tui.plugin).toEqual(["acme@1.2.3"])
|
||||
})
|
||||
|
||||
test("writes default options from tuple manifest targets", async () => {
|
||||
test("writes default options from exports config metadata", async () => {
|
||||
await using tmp = await tmpdir()
|
||||
const target = await plugin(tmp.path, [
|
||||
["server", { custom: true, other: false }],
|
||||
["tui", { compact: true }],
|
||||
])
|
||||
const target = await plugin(tmp.path, ["server", "tui"], {
|
||||
server: { custom: true, other: false },
|
||||
tui: { compact: true },
|
||||
})
|
||||
const run = createPlugTask(
|
||||
{
|
||||
mod: "acme@1.2.3",
|
||||
|
||||
@@ -266,8 +266,8 @@ describe("plugin.loader.shared", () => {
|
||||
try {
|
||||
await load(tmp.path)
|
||||
|
||||
expect(install.mock.calls).toContainEqual(["acme-plugin", "latest"])
|
||||
expect(install.mock.calls).toContainEqual(["scope-plugin", "2.3.4"])
|
||||
expect(install.mock.calls).toContainEqual(["acme-plugin", "latest", { ignoreScripts: true }])
|
||||
expect(install.mock.calls).toContainEqual(["scope-plugin", "2.3.4", { ignoreScripts: true }])
|
||||
} finally {
|
||||
install.mockRestore()
|
||||
}
|
||||
@@ -331,6 +331,117 @@ describe("plugin.loader.shared", () => {
|
||||
}
|
||||
})
|
||||
|
||||
test("loads npm server plugin from package server export without leading dot", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
const mod = path.join(dir, "mods", "acme-plugin")
|
||||
const dist = path.join(mod, "dist")
|
||||
const mark = path.join(dir, "server-called.txt")
|
||||
await fs.mkdir(dist, { recursive: true })
|
||||
|
||||
await Bun.write(
|
||||
path.join(mod, "package.json"),
|
||||
JSON.stringify(
|
||||
{
|
||||
name: "acme-plugin",
|
||||
type: "module",
|
||||
exports: {
|
||||
".": "./index.js",
|
||||
"./server": "dist/server.js",
|
||||
},
|
||||
},
|
||||
null,
|
||||
2,
|
||||
),
|
||||
)
|
||||
await Bun.write(path.join(mod, "index.js"), 'import "./main-throws.js"\nexport default {}\n')
|
||||
await Bun.write(path.join(mod, "main-throws.js"), 'throw new Error("main loaded")\n')
|
||||
await Bun.write(
|
||||
path.join(dist, "server.js"),
|
||||
[
|
||||
"export default {",
|
||||
" server: async () => {",
|
||||
` await Bun.write(${JSON.stringify(mark)}, "called")`,
|
||||
" return {}",
|
||||
" },",
|
||||
"}",
|
||||
"",
|
||||
].join("\n"),
|
||||
)
|
||||
|
||||
await Bun.write(path.join(dir, "opencode.json"), JSON.stringify({ plugin: ["acme-plugin@1.0.0"] }, null, 2))
|
||||
|
||||
return {
|
||||
mod,
|
||||
mark,
|
||||
}
|
||||
},
|
||||
})
|
||||
|
||||
const install = spyOn(BunProc, "install").mockResolvedValue(tmp.extra.mod)
|
||||
|
||||
try {
|
||||
const errors = await errs(tmp.path)
|
||||
expect(errors).toHaveLength(0)
|
||||
expect(await Bun.file(tmp.extra.mark).text()).toBe("called")
|
||||
} finally {
|
||||
install.mockRestore()
|
||||
}
|
||||
})
|
||||
|
||||
test("loads npm server plugin from package main without leading dot", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
const mod = path.join(dir, "mods", "acme-plugin")
|
||||
const dist = path.join(mod, "dist")
|
||||
const mark = path.join(dir, "main-called.txt")
|
||||
await fs.mkdir(dist, { recursive: true })
|
||||
|
||||
await Bun.write(
|
||||
path.join(mod, "package.json"),
|
||||
JSON.stringify(
|
||||
{
|
||||
name: "acme-plugin",
|
||||
type: "module",
|
||||
main: "dist/index.js",
|
||||
},
|
||||
null,
|
||||
2,
|
||||
),
|
||||
)
|
||||
await Bun.write(
|
||||
path.join(dist, "index.js"),
|
||||
[
|
||||
"export default {",
|
||||
" server: async () => {",
|
||||
` await Bun.write(${JSON.stringify(mark)}, "called")`,
|
||||
" return {}",
|
||||
" },",
|
||||
"}",
|
||||
"",
|
||||
].join("\n"),
|
||||
)
|
||||
|
||||
await Bun.write(path.join(dir, "opencode.json"), JSON.stringify({ plugin: ["acme-plugin@1.0.0"] }, null, 2))
|
||||
|
||||
return {
|
||||
mod,
|
||||
mark,
|
||||
}
|
||||
},
|
||||
})
|
||||
|
||||
const install = spyOn(BunProc, "install").mockResolvedValue(tmp.extra.mod)
|
||||
|
||||
try {
|
||||
const errors = await errs(tmp.path)
|
||||
expect(errors).toHaveLength(0)
|
||||
expect(await Bun.file(tmp.extra.mark).text()).toBe("called")
|
||||
} finally {
|
||||
install.mockRestore()
|
||||
}
|
||||
})
|
||||
|
||||
test("does not use npm package exports dot for server entry", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
@@ -376,7 +487,7 @@ describe("plugin.loader.shared", () => {
|
||||
.catch(() => false)
|
||||
|
||||
expect(called).toBe(false)
|
||||
expect(errors.some((x) => x.includes('exports["./server"]') && x.includes("package.json main"))).toBe(true)
|
||||
expect(errors).toHaveLength(0)
|
||||
} finally {
|
||||
install.mockRestore()
|
||||
}
|
||||
|
||||
@@ -13,6 +13,18 @@ afterEach(async () => {
|
||||
await Instance.disposeAll()
|
||||
})
|
||||
|
||||
async function withoutWatcher<T>(fn: () => Promise<T>) {
|
||||
if (process.platform !== "win32") return fn()
|
||||
const prev = process.env.OPENCODE_EXPERIMENTAL_DISABLE_FILEWATCHER
|
||||
process.env.OPENCODE_EXPERIMENTAL_DISABLE_FILEWATCHER = "true"
|
||||
try {
|
||||
return await fn()
|
||||
} finally {
|
||||
if (prev === undefined) delete process.env.OPENCODE_EXPERIMENTAL_DISABLE_FILEWATCHER
|
||||
else process.env.OPENCODE_EXPERIMENTAL_DISABLE_FILEWATCHER = prev
|
||||
}
|
||||
}
|
||||
|
||||
async function fill(sessionID: SessionID, count: number, time = (i: number) => Date.now() + i) {
|
||||
const ids = [] as MessageID[]
|
||||
for (let i = 0; i < count; i++) {
|
||||
@@ -42,86 +54,94 @@ async function fill(sessionID: SessionID, count: number, time = (i: number) => D
|
||||
describe("session messages endpoint", () => {
|
||||
test("returns cursor headers for older pages", async () => {
|
||||
await using tmp = await tmpdir({ git: true })
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const session = await Session.create({})
|
||||
const ids = await fill(session.id, 5)
|
||||
const app = Server.Default()
|
||||
await withoutWatcher(() =>
|
||||
Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const session = await Session.create({})
|
||||
const ids = await fill(session.id, 5)
|
||||
const app = Server.Default()
|
||||
|
||||
const a = await app.request(`/session/${session.id}/message?limit=2`)
|
||||
expect(a.status).toBe(200)
|
||||
const aBody = (await a.json()) as MessageV2.WithParts[]
|
||||
expect(aBody.map((item) => item.info.id)).toEqual(ids.slice(-2))
|
||||
const cursor = a.headers.get("x-next-cursor")
|
||||
expect(cursor).toBeTruthy()
|
||||
expect(a.headers.get("link")).toContain('rel="next"')
|
||||
const a = await app.request(`/session/${session.id}/message?limit=2`)
|
||||
expect(a.status).toBe(200)
|
||||
const aBody = (await a.json()) as MessageV2.WithParts[]
|
||||
expect(aBody.map((item) => item.info.id)).toEqual(ids.slice(-2))
|
||||
const cursor = a.headers.get("x-next-cursor")
|
||||
expect(cursor).toBeTruthy()
|
||||
expect(a.headers.get("link")).toContain('rel="next"')
|
||||
|
||||
const b = await app.request(`/session/${session.id}/message?limit=2&before=${encodeURIComponent(cursor!)}`)
|
||||
expect(b.status).toBe(200)
|
||||
const bBody = (await b.json()) as MessageV2.WithParts[]
|
||||
expect(bBody.map((item) => item.info.id)).toEqual(ids.slice(-4, -2))
|
||||
const b = await app.request(`/session/${session.id}/message?limit=2&before=${encodeURIComponent(cursor!)}`)
|
||||
expect(b.status).toBe(200)
|
||||
const bBody = (await b.json()) as MessageV2.WithParts[]
|
||||
expect(bBody.map((item) => item.info.id)).toEqual(ids.slice(-4, -2))
|
||||
|
||||
await Session.remove(session.id)
|
||||
},
|
||||
})
|
||||
await Session.remove(session.id)
|
||||
},
|
||||
}),
|
||||
)
|
||||
})
|
||||
|
||||
test("keeps full-history responses when limit is omitted", async () => {
|
||||
await using tmp = await tmpdir({ git: true })
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const session = await Session.create({})
|
||||
const ids = await fill(session.id, 3)
|
||||
const app = Server.Default()
|
||||
await withoutWatcher(() =>
|
||||
Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const session = await Session.create({})
|
||||
const ids = await fill(session.id, 3)
|
||||
const app = Server.Default()
|
||||
|
||||
const res = await app.request(`/session/${session.id}/message`)
|
||||
expect(res.status).toBe(200)
|
||||
const body = (await res.json()) as MessageV2.WithParts[]
|
||||
expect(body.map((item) => item.info.id)).toEqual(ids)
|
||||
const res = await app.request(`/session/${session.id}/message`)
|
||||
expect(res.status).toBe(200)
|
||||
const body = (await res.json()) as MessageV2.WithParts[]
|
||||
expect(body.map((item) => item.info.id)).toEqual(ids)
|
||||
|
||||
await Session.remove(session.id)
|
||||
},
|
||||
})
|
||||
await Session.remove(session.id)
|
||||
},
|
||||
}),
|
||||
)
|
||||
})
|
||||
|
||||
test("rejects invalid cursors and missing sessions", async () => {
|
||||
await using tmp = await tmpdir({ git: true })
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const session = await Session.create({})
|
||||
const app = Server.Default()
|
||||
await withoutWatcher(() =>
|
||||
Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const session = await Session.create({})
|
||||
const app = Server.Default()
|
||||
|
||||
const bad = await app.request(`/session/${session.id}/message?limit=2&before=bad`)
|
||||
expect(bad.status).toBe(400)
|
||||
const bad = await app.request(`/session/${session.id}/message?limit=2&before=bad`)
|
||||
expect(bad.status).toBe(400)
|
||||
|
||||
const miss = await app.request(`/session/ses_missing/message?limit=2`)
|
||||
expect(miss.status).toBe(404)
|
||||
const miss = await app.request(`/session/ses_missing/message?limit=2`)
|
||||
expect(miss.status).toBe(404)
|
||||
|
||||
await Session.remove(session.id)
|
||||
},
|
||||
})
|
||||
await Session.remove(session.id)
|
||||
},
|
||||
}),
|
||||
)
|
||||
})
|
||||
|
||||
test("does not truncate large legacy limit requests", async () => {
|
||||
await using tmp = await tmpdir({ git: true })
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const session = await Session.create({})
|
||||
await fill(session.id, 520)
|
||||
const app = Server.Default()
|
||||
await withoutWatcher(() =>
|
||||
Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const session = await Session.create({})
|
||||
await fill(session.id, 520)
|
||||
const app = Server.Default()
|
||||
|
||||
const res = await app.request(`/session/${session.id}/message?limit=510`)
|
||||
expect(res.status).toBe(200)
|
||||
const body = (await res.json()) as MessageV2.WithParts[]
|
||||
expect(body).toHaveLength(510)
|
||||
const res = await app.request(`/session/${session.id}/message?limit=510`)
|
||||
expect(res.status).toBe(200)
|
||||
const body = (await res.json()) as MessageV2.WithParts[]
|
||||
expect(body).toHaveLength(510)
|
||||
|
||||
await Session.remove(session.id)
|
||||
},
|
||||
})
|
||||
await Session.remove(session.id)
|
||||
},
|
||||
}),
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
295
packages/opencode/test/storage/storage.test.ts
Normal file
295
packages/opencode/test/storage/storage.test.ts
Normal file
@@ -0,0 +1,295 @@
|
||||
import { describe, expect, test } from "bun:test"
|
||||
import fs from "fs/promises"
|
||||
import path from "path"
|
||||
import { Effect, Layer, ManagedRuntime } from "effect"
|
||||
import { AppFileSystem } from "../../src/filesystem"
|
||||
import { Global } from "../../src/global"
|
||||
import { Storage } from "../../src/storage/storage"
|
||||
import { tmpdir } from "../fixture/fixture"
|
||||
|
||||
const dir = path.join(Global.Path.data, "storage")
|
||||
|
||||
async function withScope<T>(fn: (root: string[]) => Promise<T>) {
|
||||
const root = ["storage_test", crypto.randomUUID()]
|
||||
try {
|
||||
return await fn(root)
|
||||
} finally {
|
||||
await fs.rm(path.join(dir, ...root), { recursive: true, force: true })
|
||||
}
|
||||
}
|
||||
|
||||
function map(root: string, file: string) {
|
||||
if (file === Global.Path.data) return root
|
||||
if (file.startsWith(Global.Path.data + path.sep)) return path.join(root, path.relative(Global.Path.data, file))
|
||||
return file
|
||||
}
|
||||
|
||||
function layer(root: string) {
|
||||
return Layer.effect(
|
||||
AppFileSystem.Service,
|
||||
Effect.gen(function* () {
|
||||
const fs = yield* AppFileSystem.Service
|
||||
return AppFileSystem.Service.of({
|
||||
...fs,
|
||||
isDir: (file) => fs.isDir(map(root, file)),
|
||||
readJson: (file) => fs.readJson(map(root, file)),
|
||||
writeWithDirs: (file, content, mode) => fs.writeWithDirs(map(root, file), content, mode),
|
||||
readFileString: (file) => fs.readFileString(map(root, file)),
|
||||
remove: (file) => fs.remove(map(root, file)),
|
||||
glob: (pattern, options) =>
|
||||
fs.glob(pattern, options?.cwd ? { ...options, cwd: map(root, options.cwd) } : options),
|
||||
})
|
||||
}),
|
||||
).pipe(Layer.provide(AppFileSystem.defaultLayer))
|
||||
}
|
||||
|
||||
async function withStorage<T>(
|
||||
root: string,
|
||||
fn: (run: <A, E>(body: Effect.Effect<A, E, Storage.Service>) => Promise<A>) => Promise<T>,
|
||||
) {
|
||||
const rt = ManagedRuntime.make(Storage.layer.pipe(Layer.provide(layer(root))))
|
||||
try {
|
||||
return await fn((body) => rt.runPromise(body))
|
||||
} finally {
|
||||
await rt.dispose()
|
||||
}
|
||||
}
|
||||
|
||||
async function write(file: string, value: unknown) {
|
||||
await fs.mkdir(path.dirname(file), { recursive: true })
|
||||
await Bun.write(file, JSON.stringify(value, null, 2))
|
||||
}
|
||||
|
||||
async function text(file: string, value: string) {
|
||||
await fs.mkdir(path.dirname(file), { recursive: true })
|
||||
await Bun.write(file, value)
|
||||
}
|
||||
|
||||
async function exists(file: string) {
|
||||
return fs
|
||||
.stat(file)
|
||||
.then(() => true)
|
||||
.catch(() => false)
|
||||
}
|
||||
|
||||
describe("Storage", () => {
|
||||
test("round-trips JSON content", async () => {
|
||||
await withScope(async (root) => {
|
||||
const key = [...root, "session_diff", "roundtrip"]
|
||||
const value = [{ file: "a.ts", additions: 2, deletions: 1 }]
|
||||
|
||||
await Storage.write(key, value)
|
||||
|
||||
expect(await Storage.read<typeof value>(key)).toEqual(value)
|
||||
})
|
||||
})
|
||||
|
||||
test("maps missing reads to NotFoundError", async () => {
|
||||
await withScope(async (root) => {
|
||||
await expect(Storage.read([...root, "missing", "value"])).rejects.toMatchObject({ name: "NotFoundError" })
|
||||
})
|
||||
})
|
||||
|
||||
test("update on missing key throws NotFoundError", async () => {
|
||||
await withScope(async (root) => {
|
||||
await expect(
|
||||
Storage.update<{ value: number }>([...root, "missing", "key"], (draft) => {
|
||||
draft.value += 1
|
||||
}),
|
||||
).rejects.toMatchObject({ name: "NotFoundError" })
|
||||
})
|
||||
})
|
||||
|
||||
test("write overwrites existing value", async () => {
|
||||
await withScope(async (root) => {
|
||||
const key = [...root, "overwrite", "test"]
|
||||
await Storage.write<{ v: number }>(key, { v: 1 })
|
||||
await Storage.write<{ v: number }>(key, { v: 2 })
|
||||
|
||||
expect(await Storage.read<{ v: number }>(key)).toEqual({ v: 2 })
|
||||
})
|
||||
})
|
||||
|
||||
test("remove on missing key is a no-op", async () => {
|
||||
await withScope(async (root) => {
|
||||
await expect(Storage.remove([...root, "nonexistent", "key"])).resolves.toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
test("list on missing prefix returns empty", async () => {
|
||||
await withScope(async (root) => {
|
||||
expect(await Storage.list([...root, "nonexistent"])).toEqual([])
|
||||
})
|
||||
})
|
||||
|
||||
test("serializes concurrent updates for the same key", async () => {
|
||||
await withScope(async (root) => {
|
||||
const key = [...root, "counter", "shared"]
|
||||
await Storage.write(key, { value: 0 })
|
||||
|
||||
await Promise.all(
|
||||
Array.from({ length: 25 }, () =>
|
||||
Storage.update<{ value: number }>(key, (draft) => {
|
||||
draft.value += 1
|
||||
}),
|
||||
),
|
||||
)
|
||||
|
||||
expect(await Storage.read<{ value: number }>(key)).toEqual({ value: 25 })
|
||||
})
|
||||
})
|
||||
|
||||
test("concurrent reads do not block each other", async () => {
|
||||
await withScope(async (root) => {
|
||||
const key = [...root, "concurrent", "reads"]
|
||||
await Storage.write(key, { ok: true })
|
||||
|
||||
const results = await Promise.all(Array.from({ length: 10 }, () => Storage.read(key)))
|
||||
|
||||
expect(results).toHaveLength(10)
|
||||
for (const r of results) expect(r).toEqual({ ok: true })
|
||||
})
|
||||
})
|
||||
|
||||
test("nested keys create deep paths", async () => {
|
||||
await withScope(async (root) => {
|
||||
const key = [...root, "a", "b", "c", "deep"]
|
||||
await Storage.write<{ nested: boolean }>(key, { nested: true })
|
||||
|
||||
expect(await Storage.read<{ nested: boolean }>(key)).toEqual({ nested: true })
|
||||
expect(await Storage.list([...root, "a"])).toEqual([key])
|
||||
})
|
||||
})
|
||||
|
||||
test("lists and removes stored entries", async () => {
|
||||
await withScope(async (root) => {
|
||||
const a = [...root, "list", "a"]
|
||||
const b = [...root, "list", "b"]
|
||||
const prefix = [...root, "list"]
|
||||
|
||||
await Storage.write(b, { value: 2 })
|
||||
await Storage.write(a, { value: 1 })
|
||||
|
||||
expect(await Storage.list(prefix)).toEqual([a, b])
|
||||
|
||||
await Storage.remove(a)
|
||||
|
||||
expect(await Storage.list(prefix)).toEqual([b])
|
||||
await expect(Storage.read(a)).rejects.toMatchObject({ name: "NotFoundError" })
|
||||
})
|
||||
})
|
||||
|
||||
test("migration 2 runs when marker contents are invalid", async () => {
|
||||
await using tmp = await tmpdir()
|
||||
const storage = path.join(tmp.path, "storage")
|
||||
const diffs = [
|
||||
{ additions: 2, deletions: 1 },
|
||||
{ additions: 3, deletions: 4 },
|
||||
]
|
||||
|
||||
await text(path.join(storage, "migration"), "wat")
|
||||
await write(path.join(storage, "session", "proj_test", "ses_test.json"), {
|
||||
id: "ses_test",
|
||||
projectID: "proj_test",
|
||||
title: "legacy",
|
||||
summary: { diffs },
|
||||
})
|
||||
|
||||
await withStorage(tmp.path, async (run) => {
|
||||
expect(await run(Storage.Service.use((svc) => svc.list(["session_diff"])))).toEqual([
|
||||
["session_diff", "ses_test"],
|
||||
])
|
||||
expect(await run(Storage.Service.use((svc) => svc.read<typeof diffs>(["session_diff", "ses_test"])))).toEqual(
|
||||
diffs,
|
||||
)
|
||||
expect(
|
||||
await run(
|
||||
Storage.Service.use((svc) =>
|
||||
svc.read<{
|
||||
id: string
|
||||
projectID: string
|
||||
title: string
|
||||
summary: {
|
||||
additions: number
|
||||
deletions: number
|
||||
}
|
||||
}>(["session", "proj_test", "ses_test"]),
|
||||
),
|
||||
),
|
||||
).toEqual({
|
||||
id: "ses_test",
|
||||
projectID: "proj_test",
|
||||
title: "legacy",
|
||||
summary: {
|
||||
additions: 5,
|
||||
deletions: 5,
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
expect(await Bun.file(path.join(storage, "migration")).text()).toBe("2")
|
||||
})
|
||||
|
||||
test("migration 1 tolerates malformed legacy records", async () => {
|
||||
await using tmp = await tmpdir({ git: true })
|
||||
const storage = path.join(tmp.path, "storage")
|
||||
const legacy = path.join(tmp.path, "project", "legacy")
|
||||
|
||||
await write(path.join(legacy, "storage", "session", "message", "probe", "0.json"), [])
|
||||
await write(path.join(legacy, "storage", "session", "message", "probe", "1.json"), {
|
||||
path: { root: tmp.path },
|
||||
})
|
||||
await write(path.join(legacy, "storage", "session", "info", "ses_legacy.json"), {
|
||||
id: "ses_legacy",
|
||||
title: "legacy",
|
||||
})
|
||||
await write(path.join(legacy, "storage", "session", "message", "ses_legacy", "msg_legacy.json"), {
|
||||
role: "user",
|
||||
text: "hello",
|
||||
})
|
||||
|
||||
await withStorage(tmp.path, async (run) => {
|
||||
const projects = await run(Storage.Service.use((svc) => svc.list(["project"])))
|
||||
expect(projects).toHaveLength(1)
|
||||
const project = projects[0]![1]
|
||||
|
||||
expect(await run(Storage.Service.use((svc) => svc.list(["session", project])))).toEqual([
|
||||
["session", project, "ses_legacy"],
|
||||
])
|
||||
expect(
|
||||
await run(
|
||||
Storage.Service.use((svc) => svc.read<{ id: string; title: string }>(["session", project, "ses_legacy"])),
|
||||
),
|
||||
).toEqual({
|
||||
id: "ses_legacy",
|
||||
title: "legacy",
|
||||
})
|
||||
expect(
|
||||
await run(
|
||||
Storage.Service.use((svc) =>
|
||||
svc.read<{ role: string; text: string }>(["message", "ses_legacy", "msg_legacy"]),
|
||||
),
|
||||
),
|
||||
).toEqual({
|
||||
role: "user",
|
||||
text: "hello",
|
||||
})
|
||||
})
|
||||
|
||||
expect(await Bun.file(path.join(storage, "migration")).text()).toBe("2")
|
||||
})
|
||||
|
||||
test("failed migrations do not advance the marker", async () => {
|
||||
await using tmp = await tmpdir()
|
||||
const storage = path.join(tmp.path, "storage")
|
||||
const legacy = path.join(tmp.path, "project", "legacy")
|
||||
|
||||
await text(path.join(legacy, "storage", "session", "message", "probe", "0.json"), "{")
|
||||
|
||||
await withStorage(tmp.path, async (run) => {
|
||||
expect(await run(Storage.Service.use((svc) => svc.list(["project"])))).toEqual([])
|
||||
})
|
||||
|
||||
expect(await exists(path.join(storage, "migration"))).toBe(false)
|
||||
})
|
||||
})
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/package.json",
|
||||
"name": "@opencode-ai/plugin",
|
||||
"version": "1.3.8",
|
||||
"version": "1.3.10",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
@@ -21,8 +21,8 @@
|
||||
"zod": "catalog:"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@opentui/core": ">=0.1.92",
|
||||
"@opentui/solid": ">=0.1.92"
|
||||
"@opentui/core": ">=0.1.93",
|
||||
"@opentui/solid": ">=0.1.93"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@opentui/core": {
|
||||
@@ -33,8 +33,8 @@
|
||||
}
|
||||
},
|
||||
"devDependencies": {
|
||||
"@opentui/core": "0.1.92",
|
||||
"@opentui/solid": "0.1.92",
|
||||
"@opentui/core": "0.1.93",
|
||||
"@opentui/solid": "0.1.93",
|
||||
"@tsconfig/node22": "catalog:",
|
||||
"@types/node": "catalog:",
|
||||
"typescript": "catalog:",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/package.json",
|
||||
"name": "@opencode-ai/sdk",
|
||||
"version": "1.3.8",
|
||||
"version": "1.3.10",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
|
||||
@@ -126,6 +126,7 @@ export type EventPermissionReplied = {
|
||||
export type SessionStatus =
|
||||
| {
|
||||
type: "idle"
|
||||
suggestion?: string
|
||||
}
|
||||
| {
|
||||
type: "retry"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/slack",
|
||||
"version": "1.3.8",
|
||||
"version": "1.3.10",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/ui",
|
||||
"version": "1.3.8",
|
||||
"version": "1.3.10",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"exports": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/util",
|
||||
"version": "1.3.8",
|
||||
"version": "1.3.10",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"name": "@opencode-ai/web",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"version": "1.3.8",
|
||||
"version": "1.3.10",
|
||||
"scripts": {
|
||||
"dev": "astro dev",
|
||||
"dev:remote": "VITE_API_URL=https://api.opencode.ai astro dev",
|
||||
|
||||
@@ -1,249 +1,40 @@
|
||||
#!/usr/bin/env bun
|
||||
|
||||
import { $ } from "bun"
|
||||
import { rm } from "fs/promises"
|
||||
import path from "path"
|
||||
import { parseArgs } from "util"
|
||||
|
||||
type Release = {
|
||||
tag_name: string
|
||||
draft: boolean
|
||||
}
|
||||
const root = path.resolve(import.meta.dir, "..")
|
||||
const file = path.join(root, "UPCOMING_CHANGELOG.md")
|
||||
const { values, positionals } = parseArgs({
|
||||
args: Bun.argv.slice(2),
|
||||
options: {
|
||||
from: { type: "string", short: "f" },
|
||||
to: { type: "string", short: "t" },
|
||||
variant: { type: "string", default: "low" },
|
||||
quiet: { type: "boolean", default: false },
|
||||
print: { type: "boolean", default: false },
|
||||
help: { type: "boolean", short: "h", default: false },
|
||||
},
|
||||
allowPositionals: true,
|
||||
})
|
||||
const args = [...positionals]
|
||||
|
||||
type Commit = {
|
||||
hash: string
|
||||
author: string | null
|
||||
message: string
|
||||
areas: Set<string>
|
||||
}
|
||||
if (values.from) args.push("--from", values.from)
|
||||
if (values.to) args.push("--to", values.to)
|
||||
|
||||
type User = Map<string, Set<string>>
|
||||
type Diff = {
|
||||
sha: string
|
||||
login: string | null
|
||||
message: string
|
||||
}
|
||||
|
||||
const repo = process.env.GH_REPO ?? "anomalyco/opencode"
|
||||
const bot = ["actions-user", "opencode", "opencode-agent[bot]"]
|
||||
const team = [
|
||||
...(await Bun.file(new URL("../.github/TEAM_MEMBERS", import.meta.url))
|
||||
.text()
|
||||
.then((x) => x.split(/\r?\n/).map((x) => x.trim()))
|
||||
.then((x) => x.filter((x) => x && !x.startsWith("#")))),
|
||||
...bot,
|
||||
]
|
||||
const order = ["Core", "TUI", "Desktop", "SDK", "Extensions"] as const
|
||||
const sections = {
|
||||
core: "Core",
|
||||
tui: "TUI",
|
||||
app: "Desktop",
|
||||
tauri: "Desktop",
|
||||
sdk: "SDK",
|
||||
plugin: "SDK",
|
||||
"extensions/zed": "Extensions",
|
||||
"extensions/vscode": "Extensions",
|
||||
github: "Extensions",
|
||||
} as const
|
||||
|
||||
function ref(input: string) {
|
||||
if (input === "HEAD") return input
|
||||
if (input.startsWith("v")) return input
|
||||
if (input.match(/^\d+\.\d+\.\d+(?:[-+][0-9A-Za-z.-]+)?$/)) return `v${input}`
|
||||
return input
|
||||
}
|
||||
|
||||
async function latest() {
|
||||
const data = await $`gh api "/repos/${repo}/releases?per_page=100"`.json()
|
||||
const release = (data as Release[]).find((item) => !item.draft)
|
||||
if (!release) throw new Error("No releases found")
|
||||
return release.tag_name.replace(/^v/, "")
|
||||
}
|
||||
|
||||
async function diff(base: string, head: string) {
|
||||
const list: Diff[] = []
|
||||
for (let page = 1; ; page++) {
|
||||
const text =
|
||||
await $`gh api "/repos/${repo}/compare/${base}...${head}?per_page=100&page=${page}" --jq '.commits[] | {sha: .sha, login: .author.login, message: .commit.message}'`.text()
|
||||
const batch = text
|
||||
.split("\n")
|
||||
.filter(Boolean)
|
||||
.map((line) => JSON.parse(line) as Diff)
|
||||
if (batch.length === 0) break
|
||||
list.push(...batch)
|
||||
if (batch.length < 100) break
|
||||
}
|
||||
return list
|
||||
}
|
||||
|
||||
function section(areas: Set<string>) {
|
||||
const priority = ["core", "tui", "app", "tauri", "sdk", "plugin", "extensions/zed", "extensions/vscode", "github"]
|
||||
for (const area of priority) {
|
||||
if (areas.has(area)) return sections[area as keyof typeof sections]
|
||||
}
|
||||
return "Core"
|
||||
}
|
||||
|
||||
function reverted(commits: Commit[]) {
|
||||
const seen = new Map<string, Commit>()
|
||||
|
||||
for (const commit of commits) {
|
||||
const match = commit.message.match(/^Revert "(.+)"$/)
|
||||
if (match) {
|
||||
const msg = match[1]!
|
||||
if (seen.has(msg)) seen.delete(msg)
|
||||
else seen.set(commit.message, commit)
|
||||
continue
|
||||
}
|
||||
|
||||
const revert = `Revert "${commit.message}"`
|
||||
if (seen.has(revert)) {
|
||||
seen.delete(revert)
|
||||
continue
|
||||
}
|
||||
|
||||
seen.set(commit.message, commit)
|
||||
}
|
||||
|
||||
return [...seen.values()]
|
||||
}
|
||||
|
||||
async function commits(from: string, to: string) {
|
||||
const base = ref(from)
|
||||
const head = ref(to)
|
||||
|
||||
const data = new Map<string, { login: string | null; message: string }>()
|
||||
for (const item of await diff(base, head)) {
|
||||
data.set(item.sha, { login: item.login, message: item.message.split("\n")[0] ?? "" })
|
||||
}
|
||||
|
||||
const log =
|
||||
await $`git log ${base}..${head} --format=%H -- packages/opencode packages/sdk packages/plugin packages/desktop packages/app sdks/vscode packages/extensions github`.text()
|
||||
|
||||
const list: Commit[] = []
|
||||
for (const hash of log.split("\n").filter(Boolean)) {
|
||||
const item = data.get(hash)
|
||||
if (!item) continue
|
||||
if (item.message.match(/^(ignore:|test:|chore:|ci:|release:)/i)) continue
|
||||
|
||||
const diff = await $`git diff-tree --no-commit-id --name-only -r ${hash}`.text()
|
||||
const areas = new Set<string>()
|
||||
|
||||
for (const file of diff.split("\n").filter(Boolean)) {
|
||||
if (file.startsWith("packages/opencode/src/cli/cmd/")) areas.add("tui")
|
||||
else if (file.startsWith("packages/opencode/")) areas.add("core")
|
||||
else if (file.startsWith("packages/desktop/src-tauri/")) areas.add("tauri")
|
||||
else if (file.startsWith("packages/desktop/") || file.startsWith("packages/app/")) areas.add("app")
|
||||
else if (file.startsWith("packages/sdk/") || file.startsWith("packages/plugin/")) areas.add("sdk")
|
||||
else if (file.startsWith("packages/extensions/")) areas.add("extensions/zed")
|
||||
else if (file.startsWith("sdks/vscode/") || file.startsWith("github/")) areas.add("extensions/vscode")
|
||||
}
|
||||
|
||||
if (areas.size === 0) continue
|
||||
|
||||
list.push({
|
||||
hash: hash.slice(0, 7),
|
||||
author: item.login,
|
||||
message: item.message,
|
||||
areas,
|
||||
})
|
||||
}
|
||||
|
||||
return reverted(list)
|
||||
}
|
||||
|
||||
async function contributors(from: string, to: string) {
|
||||
const base = ref(from)
|
||||
const head = ref(to)
|
||||
|
||||
const users: User = new Map()
|
||||
for (const item of await diff(base, head)) {
|
||||
const title = item.message.split("\n")[0] ?? ""
|
||||
if (!item.login || team.includes(item.login)) continue
|
||||
if (title.match(/^(ignore:|test:|chore:|ci:|release:)/i)) continue
|
||||
if (!users.has(item.login)) users.set(item.login, new Set())
|
||||
users.get(item.login)!.add(title)
|
||||
}
|
||||
|
||||
return users
|
||||
}
|
||||
|
||||
async function published(to: string) {
|
||||
if (to === "HEAD") return
|
||||
const body = await $`gh release view ${ref(to)} --repo ${repo} --json body --jq .body`.text().catch(() => "")
|
||||
if (!body) return
|
||||
|
||||
const lines = body.split(/\r?\n/)
|
||||
const start = lines.findIndex((line) => line.startsWith("**Thank you to "))
|
||||
if (start < 0) return
|
||||
return lines.slice(start).join("\n").trim()
|
||||
}
|
||||
|
||||
async function thanks(from: string, to: string, reuse: boolean) {
|
||||
const release = reuse ? await published(to) : undefined
|
||||
if (release) return release.split(/\r?\n/)
|
||||
|
||||
const users = await contributors(from, to)
|
||||
if (users.size === 0) return []
|
||||
|
||||
const lines = [`**Thank you to ${users.size} community contributor${users.size > 1 ? "s" : ""}:**`]
|
||||
for (const [name, commits] of users) {
|
||||
lines.push(`- @${name}:`)
|
||||
for (const commit of commits) lines.push(` - ${commit}`)
|
||||
}
|
||||
return lines
|
||||
}
|
||||
|
||||
function format(from: string, to: string, list: Commit[], thanks: string[]) {
|
||||
const grouped = new Map<string, string[]>()
|
||||
for (const title of order) grouped.set(title, [])
|
||||
|
||||
for (const commit of list) {
|
||||
const title = section(commit.areas)
|
||||
const attr = commit.author && !team.includes(commit.author) ? ` (@${commit.author})` : ""
|
||||
grouped.get(title)!.push(`- \`${commit.hash}\` ${commit.message}${attr}`)
|
||||
}
|
||||
|
||||
const lines = [`Last release: ${ref(from)}`, `Target ref: ${to}`, ""]
|
||||
|
||||
if (list.length === 0) {
|
||||
lines.push("No notable changes.")
|
||||
}
|
||||
|
||||
for (const title of order) {
|
||||
const entries = grouped.get(title)
|
||||
if (!entries || entries.length === 0) continue
|
||||
lines.push(`## ${title}`)
|
||||
lines.push(...entries)
|
||||
lines.push("")
|
||||
}
|
||||
|
||||
if (thanks.length > 0) {
|
||||
if (lines.at(-1) !== "") lines.push("")
|
||||
lines.push("## Community Contributors Input")
|
||||
lines.push("")
|
||||
lines.push(...thanks)
|
||||
}
|
||||
|
||||
if (lines.at(-1) === "") lines.pop()
|
||||
return lines.join("\n")
|
||||
}
|
||||
|
||||
if (import.meta.main) {
|
||||
const { values } = parseArgs({
|
||||
args: Bun.argv.slice(2),
|
||||
options: {
|
||||
from: { type: "string", short: "f" },
|
||||
to: { type: "string", short: "t", default: "HEAD" },
|
||||
help: { type: "boolean", short: "h", default: false },
|
||||
},
|
||||
})
|
||||
|
||||
if (values.help) {
|
||||
console.log(`
|
||||
if (values.help) {
|
||||
console.log(`
|
||||
Usage: bun script/changelog.ts [options]
|
||||
|
||||
Generates UPCOMING_CHANGELOG.md by running the opencode changelog command.
|
||||
|
||||
Options:
|
||||
-f, --from <version> Starting version (default: latest non-draft GitHub release)
|
||||
-t, --to <ref> Ending ref (default: HEAD)
|
||||
--variant <name> Thinking variant for opencode run (default: low)
|
||||
--quiet Suppress opencode command output unless it fails
|
||||
--print Print the generated UPCOMING_CHANGELOG.md after success
|
||||
-h, --help Show this help message
|
||||
|
||||
Examples:
|
||||
@@ -251,11 +42,35 @@ Examples:
|
||||
bun script/changelog.ts --from 1.0.200
|
||||
bun script/changelog.ts -f 1.0.200 -t 1.0.205
|
||||
`)
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
const to = values.to!
|
||||
const from = values.from ?? (await latest())
|
||||
const list = await commits(from, to)
|
||||
console.log(format(from, to, list, await thanks(from, to, !values.from)))
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
await rm(file, { force: true })
|
||||
|
||||
const quiet = values.quiet
|
||||
const cmd = ["opencode", "run"]
|
||||
cmd.push("--variant", values.variant)
|
||||
cmd.push("--command", "changelog", "--", ...args)
|
||||
|
||||
const proc = Bun.spawn(cmd, {
|
||||
cwd: root,
|
||||
stdin: "inherit",
|
||||
stdout: quiet ? "pipe" : "inherit",
|
||||
stderr: quiet ? "pipe" : "inherit",
|
||||
})
|
||||
|
||||
const [out, err] = quiet
|
||||
? await Promise.all([new Response(proc.stdout).text(), new Response(proc.stderr).text()])
|
||||
: ["", ""]
|
||||
const code = await proc.exited
|
||||
if (code === 0) {
|
||||
if (values.print) process.stdout.write(await Bun.file(file).text())
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
if (quiet) {
|
||||
if (out) process.stdout.write(out)
|
||||
if (err) process.stderr.write(err)
|
||||
}
|
||||
|
||||
process.exit(code)
|
||||
|
||||
261
script/raw-changelog.ts
Normal file
261
script/raw-changelog.ts
Normal file
@@ -0,0 +1,261 @@
|
||||
#!/usr/bin/env bun
|
||||
|
||||
import { $ } from "bun"
|
||||
import { parseArgs } from "util"
|
||||
|
||||
type Release = {
|
||||
tag_name: string
|
||||
draft: boolean
|
||||
}
|
||||
|
||||
type Commit = {
|
||||
hash: string
|
||||
author: string | null
|
||||
message: string
|
||||
areas: Set<string>
|
||||
}
|
||||
|
||||
type User = Map<string, Set<string>>
|
||||
type Diff = {
|
||||
sha: string
|
||||
login: string | null
|
||||
message: string
|
||||
}
|
||||
|
||||
const repo = process.env.GH_REPO ?? "anomalyco/opencode"
|
||||
const bot = ["actions-user", "opencode", "opencode-agent[bot]"]
|
||||
const team = [
|
||||
...(await Bun.file(new URL("../.github/TEAM_MEMBERS", import.meta.url))
|
||||
.text()
|
||||
.then((x) => x.split(/\r?\n/).map((x) => x.trim()))
|
||||
.then((x) => x.filter((x) => x && !x.startsWith("#")))),
|
||||
...bot,
|
||||
]
|
||||
const order = ["Core", "TUI", "Desktop", "SDK", "Extensions"] as const
|
||||
const sections = {
|
||||
core: "Core",
|
||||
tui: "TUI",
|
||||
app: "Desktop",
|
||||
tauri: "Desktop",
|
||||
sdk: "SDK",
|
||||
plugin: "SDK",
|
||||
"extensions/zed": "Extensions",
|
||||
"extensions/vscode": "Extensions",
|
||||
github: "Extensions",
|
||||
} as const
|
||||
|
||||
function ref(input: string) {
|
||||
if (input === "HEAD") return input
|
||||
if (input.startsWith("v")) return input
|
||||
if (input.match(/^\d+\.\d+\.\d+(?:[-+][0-9A-Za-z.-]+)?$/)) return `v${input}`
|
||||
return input
|
||||
}
|
||||
|
||||
async function latest() {
|
||||
const data = await $`gh api "/repos/${repo}/releases?per_page=100"`.json()
|
||||
const release = (data as Release[]).find((item) => !item.draft)
|
||||
if (!release) throw new Error("No releases found")
|
||||
return release.tag_name.replace(/^v/, "")
|
||||
}
|
||||
|
||||
async function diff(base: string, head: string) {
|
||||
const list: Diff[] = []
|
||||
for (let page = 1; ; page++) {
|
||||
const text =
|
||||
await $`gh api "/repos/${repo}/compare/${base}...${head}?per_page=100&page=${page}" --jq '.commits[] | {sha: .sha, login: .author.login, message: .commit.message}'`.text()
|
||||
const batch = text
|
||||
.split("\n")
|
||||
.filter(Boolean)
|
||||
.map((line) => JSON.parse(line) as Diff)
|
||||
if (batch.length === 0) break
|
||||
list.push(...batch)
|
||||
if (batch.length < 100) break
|
||||
}
|
||||
return list
|
||||
}
|
||||
|
||||
function section(areas: Set<string>) {
|
||||
const priority = ["core", "tui", "app", "tauri", "sdk", "plugin", "extensions/zed", "extensions/vscode", "github"]
|
||||
for (const area of priority) {
|
||||
if (areas.has(area)) return sections[area as keyof typeof sections]
|
||||
}
|
||||
return "Core"
|
||||
}
|
||||
|
||||
function reverted(commits: Commit[]) {
|
||||
const seen = new Map<string, Commit>()
|
||||
|
||||
for (const commit of commits) {
|
||||
const match = commit.message.match(/^Revert "(.+)"$/)
|
||||
if (match) {
|
||||
const msg = match[1]!
|
||||
if (seen.has(msg)) seen.delete(msg)
|
||||
else seen.set(commit.message, commit)
|
||||
continue
|
||||
}
|
||||
|
||||
const revert = `Revert "${commit.message}"`
|
||||
if (seen.has(revert)) {
|
||||
seen.delete(revert)
|
||||
continue
|
||||
}
|
||||
|
||||
seen.set(commit.message, commit)
|
||||
}
|
||||
|
||||
return [...seen.values()]
|
||||
}
|
||||
|
||||
async function commits(from: string, to: string) {
|
||||
const base = ref(from)
|
||||
const head = ref(to)
|
||||
|
||||
const data = new Map<string, { login: string | null; message: string }>()
|
||||
for (const item of await diff(base, head)) {
|
||||
data.set(item.sha, { login: item.login, message: item.message.split("\n")[0] ?? "" })
|
||||
}
|
||||
|
||||
const log =
|
||||
await $`git log ${base}..${head} --format=%H -- packages/opencode packages/sdk packages/plugin packages/desktop packages/app sdks/vscode packages/extensions github`.text()
|
||||
|
||||
const list: Commit[] = []
|
||||
for (const hash of log.split("\n").filter(Boolean)) {
|
||||
const item = data.get(hash)
|
||||
if (!item) continue
|
||||
if (item.message.match(/^(ignore:|test:|chore:|ci:|release:)/i)) continue
|
||||
|
||||
const diff = await $`git diff-tree --no-commit-id --name-only -r ${hash}`.text()
|
||||
const areas = new Set<string>()
|
||||
|
||||
for (const file of diff.split("\n").filter(Boolean)) {
|
||||
if (file.startsWith("packages/opencode/src/cli/cmd/")) areas.add("tui")
|
||||
else if (file.startsWith("packages/opencode/")) areas.add("core")
|
||||
else if (file.startsWith("packages/desktop/src-tauri/")) areas.add("tauri")
|
||||
else if (file.startsWith("packages/desktop/") || file.startsWith("packages/app/")) areas.add("app")
|
||||
else if (file.startsWith("packages/sdk/") || file.startsWith("packages/plugin/")) areas.add("sdk")
|
||||
else if (file.startsWith("packages/extensions/")) areas.add("extensions/zed")
|
||||
else if (file.startsWith("sdks/vscode/") || file.startsWith("github/")) areas.add("extensions/vscode")
|
||||
}
|
||||
|
||||
if (areas.size === 0) continue
|
||||
|
||||
list.push({
|
||||
hash: hash.slice(0, 7),
|
||||
author: item.login,
|
||||
message: item.message,
|
||||
areas,
|
||||
})
|
||||
}
|
||||
|
||||
return reverted(list)
|
||||
}
|
||||
|
||||
async function contributors(from: string, to: string) {
|
||||
const base = ref(from)
|
||||
const head = ref(to)
|
||||
|
||||
const users: User = new Map()
|
||||
for (const item of await diff(base, head)) {
|
||||
const title = item.message.split("\n")[0] ?? ""
|
||||
if (!item.login || team.includes(item.login)) continue
|
||||
if (title.match(/^(ignore:|test:|chore:|ci:|release:)/i)) continue
|
||||
if (!users.has(item.login)) users.set(item.login, new Set())
|
||||
users.get(item.login)!.add(title)
|
||||
}
|
||||
|
||||
return users
|
||||
}
|
||||
|
||||
async function published(to: string) {
|
||||
if (to === "HEAD") return
|
||||
const body = await $`gh release view ${ref(to)} --repo ${repo} --json body --jq .body`.text().catch(() => "")
|
||||
if (!body) return
|
||||
|
||||
const lines = body.split(/\r?\n/)
|
||||
const start = lines.findIndex((line) => line.startsWith("**Thank you to "))
|
||||
if (start < 0) return
|
||||
return lines.slice(start).join("\n").trim()
|
||||
}
|
||||
|
||||
async function thanks(from: string, to: string, reuse: boolean) {
|
||||
const release = reuse ? await published(to) : undefined
|
||||
if (release) return release.split(/\r?\n/)
|
||||
|
||||
const users = await contributors(from, to)
|
||||
if (users.size === 0) return []
|
||||
|
||||
const lines = [`**Thank you to ${users.size} community contributor${users.size > 1 ? "s" : ""}:**`]
|
||||
for (const [name, commits] of users) {
|
||||
lines.push(`- @${name}:`)
|
||||
for (const commit of commits) lines.push(` - ${commit}`)
|
||||
}
|
||||
return lines
|
||||
}
|
||||
|
||||
function format(from: string, to: string, list: Commit[], thanks: string[]) {
|
||||
const grouped = new Map<string, string[]>()
|
||||
for (const title of order) grouped.set(title, [])
|
||||
|
||||
for (const commit of list) {
|
||||
const title = section(commit.areas)
|
||||
const attr = commit.author && !team.includes(commit.author) ? ` (@${commit.author})` : ""
|
||||
grouped.get(title)!.push(`- \`${commit.hash}\` ${commit.message}${attr}`)
|
||||
}
|
||||
|
||||
const lines = [`Last release: ${ref(from)}`, `Target ref: ${to}`, ""]
|
||||
|
||||
if (list.length === 0) {
|
||||
lines.push("No notable changes.")
|
||||
}
|
||||
|
||||
for (const title of order) {
|
||||
const entries = grouped.get(title)
|
||||
if (!entries || entries.length === 0) continue
|
||||
lines.push(`## ${title}`)
|
||||
lines.push(...entries)
|
||||
lines.push("")
|
||||
}
|
||||
|
||||
if (thanks.length > 0) {
|
||||
if (lines.at(-1) !== "") lines.push("")
|
||||
lines.push("## Community Contributors Input")
|
||||
lines.push("")
|
||||
lines.push(...thanks)
|
||||
}
|
||||
|
||||
if (lines.at(-1) === "") lines.pop()
|
||||
return lines.join("\n")
|
||||
}
|
||||
|
||||
if (import.meta.main) {
|
||||
const { values } = parseArgs({
|
||||
args: Bun.argv.slice(2),
|
||||
options: {
|
||||
from: { type: "string", short: "f" },
|
||||
to: { type: "string", short: "t", default: "HEAD" },
|
||||
help: { type: "boolean", short: "h", default: false },
|
||||
},
|
||||
})
|
||||
|
||||
if (values.help) {
|
||||
console.log(`
|
||||
Usage: bun script/raw-changelog.ts [options]
|
||||
|
||||
Options:
|
||||
-f, --from <version> Starting version (default: latest non-draft GitHub release)
|
||||
-t, --to <ref> Ending ref (default: HEAD)
|
||||
-h, --help Show this help message
|
||||
|
||||
Examples:
|
||||
bun script/raw-changelog.ts
|
||||
bun script/raw-changelog.ts --from 1.0.200
|
||||
bun script/raw-changelog.ts -f 1.0.200 -t 1.0.205
|
||||
`)
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
const to = values.to!
|
||||
const from = values.from ?? (await latest())
|
||||
const list = await commits(from, to)
|
||||
console.log(format(from, to, list, await thanks(from, to, !values.from)))
|
||||
}
|
||||
@@ -7,7 +7,7 @@ const output = [`version=${Script.version}`]
|
||||
|
||||
if (!Script.preview) {
|
||||
const sha = process.env.GITHUB_SHA ?? (await $`git rev-parse HEAD`.text()).trim()
|
||||
await $`opencode run --command changelog -- --to ${sha}`.cwd(process.cwd())
|
||||
await $`bun script/changelog.ts --to ${sha}`.cwd(process.cwd())
|
||||
const file = `${process.cwd()}/UPCOMING_CHANGELOG.md`
|
||||
const body = await Bun.file(file)
|
||||
.text()
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"name": "opencode",
|
||||
"displayName": "opencode",
|
||||
"description": "opencode for VS Code",
|
||||
"version": "1.3.8",
|
||||
"version": "1.3.10",
|
||||
"publisher": "sst-dev",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
||||
Reference in New Issue
Block a user