mirror of
https://github.com/anomalyco/opencode.git
synced 2026-02-09 18:34:21 +00:00
Compare commits
90 Commits
fix-tool-o
...
apply-patc
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ac3d0cb5a3 | ||
|
|
06d69ab609 | ||
|
|
c2cc486c7d | ||
|
|
8a6b8e5339 | ||
|
|
cfd6a7ae96 | ||
|
|
4173ee0e0b | ||
|
|
22b5d7e570 | ||
|
|
f1ec28176f | ||
|
|
ab78a46396 | ||
|
|
2ed18ea1fe | ||
|
|
40eddce435 | ||
|
|
78f8cc9418 | ||
|
|
58f7da6e9f | ||
|
|
5a199b04cb | ||
|
|
eb968a6651 | ||
|
|
a813fcb41c | ||
|
|
a58d1be822 | ||
|
|
07dc8d8ce4 | ||
|
|
d377246491 | ||
|
|
7030f49a74 | ||
|
|
c4e4f2a058 | ||
|
|
2729705594 | ||
|
|
ea13b6e8aa | ||
|
|
85ab9798c6 | ||
|
|
33290c54cd | ||
|
|
5d613a038d | ||
|
|
db78a59f03 | ||
|
|
7c3eeeb0fa | ||
|
|
e8357a87b0 | ||
|
|
06c543e938 | ||
|
|
759ce8fb8e | ||
|
|
38847e13bb | ||
|
|
e0c6459faa | ||
|
|
80b278ddab | ||
|
|
ef7ef6538e | ||
|
|
d23c21023a | ||
|
|
dfa2a9f225 | ||
|
|
6f78a71fa7 | ||
|
|
f8f1f46a4f | ||
|
|
ab705dacfa | ||
|
|
d1b93616f7 | ||
|
|
69215d456c | ||
|
|
54e52896a4 | ||
|
|
b18fb16e9c | ||
|
|
1250486ddf | ||
|
|
d645e8bbe1 | ||
|
|
cad415872e | ||
|
|
e8746ddb1d | ||
|
|
80020ade2e | ||
|
|
08ef97b162 | ||
|
|
1aedb265dd | ||
|
|
5c13b209aa | ||
|
|
43a9c50389 | ||
|
|
55224d64a2 | ||
|
|
c325aa1142 | ||
|
|
6e020ef9ef | ||
|
|
aca1eb6b5b | ||
|
|
3d095e7fe7 | ||
|
|
632f20558a | ||
|
|
f96c4badd8 | ||
|
|
cbe1c81470 | ||
|
|
c25155586c | ||
|
|
08b94a6890 | ||
|
|
8cddc9ea55 | ||
|
|
578239e0d0 | ||
|
|
626fa1462b | ||
|
|
968239bb76 | ||
|
|
8c24879246 | ||
|
|
9127055ae7 | ||
|
|
f5a6a4af7f | ||
|
|
6e00348bd7 | ||
|
|
95f7403daf | ||
|
|
14d1e20287 | ||
|
|
b8e2895dfc | ||
|
|
6e028ec2dc | ||
|
|
8e0ddd1ac9 | ||
|
|
da78b758d4 | ||
|
|
360765c591 | ||
|
|
db0078bf17 | ||
|
|
98578d3a7b | ||
|
|
bc3616d9c6 | ||
|
|
71306cbd1f | ||
|
|
0866034946 | ||
|
|
2ccaa10e79 | ||
|
|
e92d5b592c | ||
|
|
00ec29dae6 | ||
|
|
438916de5f | ||
|
|
8d4a67324e | ||
|
|
0d683eaa8e | ||
|
|
8fd1b92e6e |
4
.github/workflows/nix-desktop.yml
vendored
4
.github/workflows/nix-desktop.yml
vendored
@@ -9,6 +9,7 @@ on:
|
||||
- "nix/**"
|
||||
- "packages/app/**"
|
||||
- "packages/desktop/**"
|
||||
- ".github/workflows/nix-desktop.yml"
|
||||
pull_request:
|
||||
paths:
|
||||
- "flake.nix"
|
||||
@@ -16,6 +17,7 @@ on:
|
||||
- "nix/**"
|
||||
- "packages/app/**"
|
||||
- "packages/desktop/**"
|
||||
- ".github/workflows/nix-desktop.yml"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
@@ -26,7 +28,7 @@ jobs:
|
||||
os:
|
||||
- blacksmith-4vcpu-ubuntu-2404
|
||||
- blacksmith-4vcpu-ubuntu-2404-arm
|
||||
- macos-15
|
||||
- macos-15-intel
|
||||
- macos-latest
|
||||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 60
|
||||
|
||||
202
.github/workflows/update-nix-hashes.yml
vendored
202
.github/workflows/update-nix-hashes.yml
vendored
@@ -10,11 +10,13 @@ on:
|
||||
- "bun.lock"
|
||||
- "package.json"
|
||||
- "packages/*/package.json"
|
||||
- ".github/workflows/update-nix-hashes.yml"
|
||||
pull_request:
|
||||
paths:
|
||||
- "bun.lock"
|
||||
- "package.json"
|
||||
- "packages/*/package.json"
|
||||
- ".github/workflows/update-nix-hashes.yml"
|
||||
|
||||
jobs:
|
||||
update-flake:
|
||||
@@ -25,7 +27,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
fetch-depth: 0
|
||||
@@ -43,9 +45,9 @@ jobs:
|
||||
- name: Update ${{ env.TITLE }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "📦 Updating $TITLE..."
|
||||
echo "Updating $TITLE..."
|
||||
nix flake update
|
||||
echo "✅ $TITLE updated successfully"
|
||||
echo "$TITLE updated successfully"
|
||||
|
||||
- name: Commit ${{ env.TITLE }} changes
|
||||
env:
|
||||
@@ -53,7 +55,7 @@ jobs:
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
echo "🔍 Checking for changes in tracked files..."
|
||||
echo "Checking for changes in tracked files..."
|
||||
|
||||
summarize() {
|
||||
local status="$1"
|
||||
@@ -71,29 +73,29 @@ jobs:
|
||||
FILES=(flake.lock flake.nix)
|
||||
STATUS="$(git status --short -- "${FILES[@]}" || true)"
|
||||
if [ -z "$STATUS" ]; then
|
||||
echo "✅ No changes detected."
|
||||
echo "No changes detected."
|
||||
summarize "no changes"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "📝 Changes detected:"
|
||||
echo "Changes detected:"
|
||||
echo "$STATUS"
|
||||
echo "🔗 Staging files..."
|
||||
echo "Staging files..."
|
||||
git add "${FILES[@]}"
|
||||
echo "💾 Committing changes..."
|
||||
echo "Committing changes..."
|
||||
git commit -m "Update $TITLE"
|
||||
echo "✅ Changes committed"
|
||||
echo "Changes committed"
|
||||
|
||||
BRANCH="${TARGET_BRANCH:-${GITHUB_REF_NAME}}"
|
||||
echo "🌳 Pulling latest from branch: $BRANCH"
|
||||
git pull --rebase origin "$BRANCH"
|
||||
echo "🚀 Pushing changes to branch: $BRANCH"
|
||||
echo "Pulling latest from branch: $BRANCH"
|
||||
git pull --rebase --autostash origin "$BRANCH"
|
||||
echo "Pushing changes to branch: $BRANCH"
|
||||
git push origin HEAD:"$BRANCH"
|
||||
echo "✅ Changes pushed successfully"
|
||||
echo "Changes pushed successfully"
|
||||
|
||||
summarize "committed $(git rev-parse --short HEAD)"
|
||||
|
||||
update-node-modules-hash:
|
||||
compute-node-modules-hash:
|
||||
needs: update-flake
|
||||
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository
|
||||
strategy:
|
||||
@@ -111,11 +113,10 @@ jobs:
|
||||
runs-on: ${{ matrix.host }}
|
||||
env:
|
||||
SYSTEM: ${{ matrix.system }}
|
||||
TITLE: node_modules hash (${{ matrix.system }})
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
fetch-depth: 0
|
||||
@@ -125,6 +126,104 @@ jobs:
|
||||
- name: Setup Nix
|
||||
uses: nixbuild/nix-quick-install-action@v34
|
||||
|
||||
- name: Compute node_modules hash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
DUMMY="sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="
|
||||
HASH_FILE="nix/hashes.json"
|
||||
OUTPUT_FILE="hash-${SYSTEM}.txt"
|
||||
|
||||
export NIX_KEEP_OUTPUTS=1
|
||||
export NIX_KEEP_DERIVATIONS=1
|
||||
|
||||
BUILD_LOG=$(mktemp)
|
||||
TMP_JSON=$(mktemp)
|
||||
trap 'rm -f "$BUILD_LOG" "$TMP_JSON"' EXIT
|
||||
|
||||
if [ ! -f "$HASH_FILE" ]; then
|
||||
mkdir -p "$(dirname "$HASH_FILE")"
|
||||
echo '{"nodeModules":{}}' > "$HASH_FILE"
|
||||
fi
|
||||
|
||||
# Set dummy hash to force nix to rebuild and reveal correct hash
|
||||
jq --arg system "$SYSTEM" --arg value "$DUMMY" \
|
||||
'.nodeModules = (.nodeModules // {}) | .nodeModules[$system] = $value' "$HASH_FILE" > "$TMP_JSON"
|
||||
mv "$TMP_JSON" "$HASH_FILE"
|
||||
|
||||
MODULES_ATTR=".#packages.${SYSTEM}.default.node_modules"
|
||||
DRV_PATH="$(nix eval --raw "${MODULES_ATTR}.drvPath")"
|
||||
|
||||
echo "Building node_modules for ${SYSTEM} to discover correct hash..."
|
||||
echo "Attempting to realize derivation: ${DRV_PATH}"
|
||||
REALISE_OUT=$(nix-store --realise "$DRV_PATH" --keep-failed 2>&1 | tee "$BUILD_LOG" || true)
|
||||
|
||||
BUILD_PATH=$(echo "$REALISE_OUT" | grep "^/nix/store/" | head -n1 || true)
|
||||
CORRECT_HASH=""
|
||||
|
||||
if [ -n "$BUILD_PATH" ] && [ -d "$BUILD_PATH" ]; then
|
||||
echo "Realized node_modules output: $BUILD_PATH"
|
||||
CORRECT_HASH=$(nix hash path --sri "$BUILD_PATH" 2>/dev/null || true)
|
||||
fi
|
||||
|
||||
# Try to extract hash from build log
|
||||
if [ -z "$CORRECT_HASH" ]; then
|
||||
CORRECT_HASH="$(grep -E 'got:\s+sha256-[A-Za-z0-9+/=]+' "$BUILD_LOG" | awk '{print $2}' | head -n1 || true)"
|
||||
fi
|
||||
|
||||
if [ -z "$CORRECT_HASH" ]; then
|
||||
CORRECT_HASH="$(grep -A2 'hash mismatch' "$BUILD_LOG" | grep 'got:' | awk '{print $2}' | sed 's/sha256:/sha256-/' || true)"
|
||||
fi
|
||||
|
||||
# Try to hash from kept failed build directory
|
||||
if [ -z "$CORRECT_HASH" ]; then
|
||||
KEPT_DIR=$(grep -oE "build directory.*'[^']+'" "$BUILD_LOG" | grep -oE "'/[^']+'" | tr -d "'" | head -n1 || true)
|
||||
if [ -z "$KEPT_DIR" ]; then
|
||||
KEPT_DIR=$(grep -oE '/nix/var/nix/builds/[^ ]+' "$BUILD_LOG" | head -n1 || true)
|
||||
fi
|
||||
|
||||
if [ -n "$KEPT_DIR" ] && [ -d "$KEPT_DIR" ]; then
|
||||
HASH_PATH="$KEPT_DIR"
|
||||
[ -d "$KEPT_DIR/build" ] && HASH_PATH="$KEPT_DIR/build"
|
||||
|
||||
if [ -d "$HASH_PATH/node_modules" ]; then
|
||||
CORRECT_HASH=$(nix hash path --sri "$HASH_PATH" 2>/dev/null || true)
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$CORRECT_HASH" ]; then
|
||||
echo "Failed to determine correct node_modules hash for ${SYSTEM}."
|
||||
cat "$BUILD_LOG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "$CORRECT_HASH" > "$OUTPUT_FILE"
|
||||
echo "Hash for ${SYSTEM}: $CORRECT_HASH"
|
||||
|
||||
- name: Upload hash artifact
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: hash-${{ matrix.system }}
|
||||
path: hash-${{ matrix.system }}.txt
|
||||
retention-days: 1
|
||||
|
||||
commit-node-modules-hashes:
|
||||
needs: compute-node-modules-hash
|
||||
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository
|
||||
runs-on: blacksmith-4vcpu-ubuntu-2404
|
||||
env:
|
||||
TITLE: node_modules hashes
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
fetch-depth: 0
|
||||
ref: ${{ github.head_ref || github.ref_name }}
|
||||
repository: ${{ github.event.pull_request.head.repo.full_name || github.repository }}
|
||||
|
||||
- name: Configure git
|
||||
run: |
|
||||
git config --global user.email "action@github.com"
|
||||
@@ -135,14 +234,57 @@ jobs:
|
||||
TARGET_BRANCH: ${{ github.head_ref || github.ref_name }}
|
||||
run: |
|
||||
BRANCH="${TARGET_BRANCH:-${GITHUB_REF_NAME}}"
|
||||
git pull origin "$BRANCH"
|
||||
git pull --rebase --autostash origin "$BRANCH"
|
||||
|
||||
- name: Update ${{ env.TITLE }}
|
||||
- name: Download all hash artifacts
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
pattern: hash-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Merge hashes into hashes.json
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "🔄 Updating $TITLE..."
|
||||
nix/scripts/update-hashes.sh
|
||||
echo "✅ $TITLE updated successfully"
|
||||
|
||||
HASH_FILE="nix/hashes.json"
|
||||
|
||||
if [ ! -f "$HASH_FILE" ]; then
|
||||
mkdir -p "$(dirname "$HASH_FILE")"
|
||||
echo '{"nodeModules":{}}' > "$HASH_FILE"
|
||||
fi
|
||||
|
||||
echo "Merging hashes into ${HASH_FILE}..."
|
||||
|
||||
shopt -s nullglob
|
||||
files=(hash-*.txt)
|
||||
if [ ${#files[@]} -eq 0 ]; then
|
||||
echo "No hash files found, nothing to update"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
EXPECTED_SYSTEMS="x86_64-linux aarch64-linux x86_64-darwin aarch64-darwin"
|
||||
for sys in $EXPECTED_SYSTEMS; do
|
||||
if [ ! -f "hash-${sys}.txt" ]; then
|
||||
echo "WARNING: Missing hash file for $sys"
|
||||
fi
|
||||
done
|
||||
|
||||
for f in "${files[@]}"; do
|
||||
system="${f#hash-}"
|
||||
system="${system%.txt}"
|
||||
hash=$(cat "$f")
|
||||
if [ -z "$hash" ]; then
|
||||
echo "WARNING: Empty hash for $system, skipping"
|
||||
continue
|
||||
fi
|
||||
echo " $system: $hash"
|
||||
jq --arg sys "$system" --arg h "$hash" \
|
||||
'.nodeModules = (.nodeModules // {}) | .nodeModules[$sys] = $h' "$HASH_FILE" > "${HASH_FILE}.tmp"
|
||||
mv "${HASH_FILE}.tmp" "$HASH_FILE"
|
||||
done
|
||||
|
||||
echo "All hashes merged:"
|
||||
cat "$HASH_FILE"
|
||||
|
||||
- name: Commit ${{ env.TITLE }} changes
|
||||
env:
|
||||
@@ -150,7 +292,8 @@ jobs:
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
echo "🔍 Checking for changes in tracked files..."
|
||||
HASH_FILE="nix/hashes.json"
|
||||
echo "Checking for changes..."
|
||||
|
||||
summarize() {
|
||||
local status="$1"
|
||||
@@ -166,27 +309,22 @@ jobs:
|
||||
echo "" >> "$GITHUB_STEP_SUMMARY"
|
||||
}
|
||||
|
||||
FILES=(nix/hashes.json)
|
||||
FILES=("$HASH_FILE")
|
||||
STATUS="$(git status --short -- "${FILES[@]}" || true)"
|
||||
if [ -z "$STATUS" ]; then
|
||||
echo "✅ No changes detected."
|
||||
echo "No changes detected."
|
||||
summarize "no changes"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "📝 Changes detected:"
|
||||
echo "Changes detected:"
|
||||
echo "$STATUS"
|
||||
echo "🔗 Staging files..."
|
||||
git add "${FILES[@]}"
|
||||
echo "💾 Committing changes..."
|
||||
git commit -m "Update $TITLE"
|
||||
echo "✅ Changes committed"
|
||||
|
||||
BRANCH="${TARGET_BRANCH:-${GITHUB_REF_NAME}}"
|
||||
echo "🌳 Pulling latest from branch: $BRANCH"
|
||||
git pull --rebase origin "$BRANCH"
|
||||
echo "🚀 Pushing changes to branch: $BRANCH"
|
||||
git pull --rebase --autostash origin "$BRANCH"
|
||||
git push origin HEAD:"$BRANCH"
|
||||
echo "✅ Changes pushed successfully"
|
||||
echo "Changes pushed successfully"
|
||||
|
||||
summarize "committed $(git rev-parse --short HEAD)"
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -20,6 +20,7 @@ opencode.json
|
||||
a.out
|
||||
target
|
||||
.scripts
|
||||
.direnv/
|
||||
|
||||
# Local dev files
|
||||
opencode-dev
|
||||
|
||||
1
STATS.md
1
STATS.md
@@ -202,3 +202,4 @@
|
||||
| 2026-01-13 | 3,297,078 (+243,484) | 1,595,062 (+41,391) | 4,892,140 (+284,875) |
|
||||
| 2026-01-14 | 3,568,928 (+271,850) | 1,645,362 (+50,300) | 5,214,290 (+322,150) |
|
||||
| 2026-01-16 | 4,121,550 (+552,622) | 1,754,418 (+109,056) | 5,875,968 (+661,678) |
|
||||
| 2026-01-17 | 4,389,558 (+268,008) | 1,805,315 (+50,897) | 6,194,873 (+318,905) |
|
||||
|
||||
51
bun.lock
51
bun.lock
@@ -22,7 +22,7 @@
|
||||
},
|
||||
"packages/app": {
|
||||
"name": "@opencode-ai/app",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"dependencies": {
|
||||
"@kobalte/core": "catalog:",
|
||||
"@opencode-ai/sdk": "workspace:*",
|
||||
@@ -70,7 +70,7 @@
|
||||
},
|
||||
"packages/console/app": {
|
||||
"name": "@opencode-ai/console-app",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"dependencies": {
|
||||
"@cloudflare/vite-plugin": "1.15.2",
|
||||
"@ibm/plex": "6.4.1",
|
||||
@@ -104,7 +104,7 @@
|
||||
},
|
||||
"packages/console/core": {
|
||||
"name": "@opencode-ai/console-core",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-sts": "3.782.0",
|
||||
"@jsx-email/render": "1.1.1",
|
||||
@@ -131,7 +131,7 @@
|
||||
},
|
||||
"packages/console/function": {
|
||||
"name": "@opencode-ai/console-function",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"dependencies": {
|
||||
"@ai-sdk/anthropic": "2.0.0",
|
||||
"@ai-sdk/openai": "2.0.2",
|
||||
@@ -155,7 +155,7 @@
|
||||
},
|
||||
"packages/console/mail": {
|
||||
"name": "@opencode-ai/console-mail",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"dependencies": {
|
||||
"@jsx-email/all": "2.2.3",
|
||||
"@jsx-email/cli": "1.4.3",
|
||||
@@ -179,7 +179,7 @@
|
||||
},
|
||||
"packages/desktop": {
|
||||
"name": "@opencode-ai/desktop",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"dependencies": {
|
||||
"@opencode-ai/app": "workspace:*",
|
||||
"@opencode-ai/ui": "workspace:*",
|
||||
@@ -208,7 +208,7 @@
|
||||
},
|
||||
"packages/enterprise": {
|
||||
"name": "@opencode-ai/enterprise",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"dependencies": {
|
||||
"@opencode-ai/ui": "workspace:*",
|
||||
"@opencode-ai/util": "workspace:*",
|
||||
@@ -237,7 +237,7 @@
|
||||
},
|
||||
"packages/function": {
|
||||
"name": "@opencode-ai/function",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"dependencies": {
|
||||
"@octokit/auth-app": "8.0.1",
|
||||
"@octokit/rest": "catalog:",
|
||||
@@ -253,7 +253,7 @@
|
||||
},
|
||||
"packages/opencode": {
|
||||
"name": "opencode",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"bin": {
|
||||
"opencode": "./bin/opencode",
|
||||
},
|
||||
@@ -293,8 +293,8 @@
|
||||
"@opencode-ai/sdk": "workspace:*",
|
||||
"@opencode-ai/util": "workspace:*",
|
||||
"@openrouter/ai-sdk-provider": "1.5.2",
|
||||
"@opentui/core": "0.1.73",
|
||||
"@opentui/solid": "0.1.73",
|
||||
"@opentui/core": "0.1.74",
|
||||
"@opentui/solid": "0.1.74",
|
||||
"@parcel/watcher": "2.5.1",
|
||||
"@pierre/diffs": "catalog:",
|
||||
"@solid-primitives/event-bus": "1.1.2",
|
||||
@@ -357,7 +357,7 @@
|
||||
},
|
||||
"packages/plugin": {
|
||||
"name": "@opencode-ai/plugin",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"dependencies": {
|
||||
"@opencode-ai/sdk": "workspace:*",
|
||||
"zod": "catalog:",
|
||||
@@ -377,7 +377,7 @@
|
||||
},
|
||||
"packages/sdk/js": {
|
||||
"name": "@opencode-ai/sdk",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"devDependencies": {
|
||||
"@hey-api/openapi-ts": "0.90.4",
|
||||
"@tsconfig/node22": "catalog:",
|
||||
@@ -388,7 +388,7 @@
|
||||
},
|
||||
"packages/slack": {
|
||||
"name": "@opencode-ai/slack",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"dependencies": {
|
||||
"@opencode-ai/sdk": "workspace:*",
|
||||
"@slack/bolt": "^3.17.1",
|
||||
@@ -401,7 +401,7 @@
|
||||
},
|
||||
"packages/ui": {
|
||||
"name": "@opencode-ai/ui",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"dependencies": {
|
||||
"@kobalte/core": "catalog:",
|
||||
"@opencode-ai/sdk": "workspace:*",
|
||||
@@ -424,6 +424,7 @@
|
||||
"shiki": "catalog:",
|
||||
"solid-js": "catalog:",
|
||||
"solid-list": "catalog:",
|
||||
"strip-ansi": "7.1.2",
|
||||
"virtua": "catalog:",
|
||||
},
|
||||
"devDependencies": {
|
||||
@@ -441,7 +442,7 @@
|
||||
},
|
||||
"packages/util": {
|
||||
"name": "@opencode-ai/util",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"dependencies": {
|
||||
"zod": "catalog:",
|
||||
},
|
||||
@@ -452,7 +453,7 @@
|
||||
},
|
||||
"packages/web": {
|
||||
"name": "@opencode-ai/web",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"dependencies": {
|
||||
"@astrojs/cloudflare": "12.6.3",
|
||||
"@astrojs/markdown-remark": "6.3.1",
|
||||
@@ -1218,21 +1219,21 @@
|
||||
|
||||
"@opentelemetry/api": ["@opentelemetry/api@1.9.0", "", {}, "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg=="],
|
||||
|
||||
"@opentui/core": ["@opentui/core@0.1.73", "", { "dependencies": { "bun-ffi-structs": "0.1.2", "diff": "8.0.2", "jimp": "1.6.0", "yoga-layout": "3.2.1" }, "optionalDependencies": { "@dimforge/rapier2d-simd-compat": "^0.17.3", "@opentui/core-darwin-arm64": "0.1.73", "@opentui/core-darwin-x64": "0.1.73", "@opentui/core-linux-arm64": "0.1.73", "@opentui/core-linux-x64": "0.1.73", "@opentui/core-win32-arm64": "0.1.73", "@opentui/core-win32-x64": "0.1.73", "bun-webgpu": "0.1.4", "planck": "^1.4.2", "three": "0.177.0" }, "peerDependencies": { "web-tree-sitter": "0.25.10" } }, "sha512-1OqLlArzUh3QjrYXGro5WKNgoCcacGJaaFvwOHg5lAOoSigFQRiqEUEEJLbSo3pyV8u7XEdC3M0rOP6K+oThzw=="],
|
||||
"@opentui/core": ["@opentui/core@0.1.74", "", { "dependencies": { "bun-ffi-structs": "0.1.2", "diff": "8.0.2", "jimp": "1.6.0", "yoga-layout": "3.2.1" }, "optionalDependencies": { "@dimforge/rapier2d-simd-compat": "^0.17.3", "@opentui/core-darwin-arm64": "0.1.74", "@opentui/core-darwin-x64": "0.1.74", "@opentui/core-linux-arm64": "0.1.74", "@opentui/core-linux-x64": "0.1.74", "@opentui/core-win32-arm64": "0.1.74", "@opentui/core-win32-x64": "0.1.74", "bun-webgpu": "0.1.4", "planck": "^1.4.2", "three": "0.177.0" }, "peerDependencies": { "web-tree-sitter": "0.25.10" } }, "sha512-g4W16ymv12JdgZ+9B4t7mpIICvzWy2+eHERfmDf80ALduOQCUedKQdULcBFhVCYUXIkDRtIy6CID5thMAah3FA=="],
|
||||
|
||||
"@opentui/core-darwin-arm64": ["@opentui/core-darwin-arm64@0.1.73", "", { "os": "darwin", "cpu": "arm64" }, "sha512-Xnc8S6kGIVcdwqqTq6jk50UVe1QtOXp+B0v4iH85iNW1Ljf198OoA7RcVA+edFb6o01PVwnhIIPtpkB/A4710w=="],
|
||||
"@opentui/core-darwin-arm64": ["@opentui/core-darwin-arm64@0.1.74", "", { "os": "darwin", "cpu": "arm64" }, "sha512-rfmlDLtm/u17CnuhJgCxPeYMvOST+A2MOdVOk46IurtHO849bdYqK6iudKNlFRs1FOrymgSKF9GlWBHAOKeRjg=="],
|
||||
|
||||
"@opentui/core-darwin-x64": ["@opentui/core-darwin-x64@0.1.73", "", { "os": "darwin", "cpu": "x64" }, "sha512-RlgxQxu+kxsCZzeXRnpYrqbrpxbG8M/lnDf4sTPWmhXUiuDvY5BdB4YiBY5bv8eNdJ1j9HiMLtx6ZxElEviidA=="],
|
||||
"@opentui/core-darwin-x64": ["@opentui/core-darwin-x64@0.1.74", "", { "os": "darwin", "cpu": "x64" }, "sha512-WAD8orsDV0ZdW/5GwjOOB4FY96772xbkz+rcV7WRzEFUVaqoBaC04IuqYzS9d5s+cjkbT5Cpj47hrVYkkVQKng=="],
|
||||
|
||||
"@opentui/core-linux-arm64": ["@opentui/core-linux-arm64@0.1.73", "", { "os": "linux", "cpu": "arm64" }, "sha512-9I88BdZMB3qtDPtDzFTg1EEt6sAGFSpOEmIIMB3MhqZqoq9+WSEyJZxM0/kff5vt4RJnqG7vz4fKMVRwNrUPGA=="],
|
||||
"@opentui/core-linux-arm64": ["@opentui/core-linux-arm64@0.1.74", "", { "os": "linux", "cpu": "arm64" }, "sha512-lgmHzrzLy4e+rgBS+lhtsMLLgIMLbtLNMm6EzVPyYVDlLDGjM7+ulXMem7AtpaRrWrUUl4REiG9BoQUsCFDwYA=="],
|
||||
|
||||
"@opentui/core-linux-x64": ["@opentui/core-linux-x64@0.1.73", "", { "os": "linux", "cpu": "x64" }, "sha512-50cGZkCh/i3nzijsjUnkmtWJtnJ6l9WpdIwSJsO2Id7nZdzupT1b6AkgGZdOgNl23MHXpAitmb+MhEAjAimCRA=="],
|
||||
"@opentui/core-linux-x64": ["@opentui/core-linux-x64@0.1.74", "", { "os": "linux", "cpu": "x64" }, "sha512-8Mn2WbdBQ29xCThuPZezjDhd1N3+fXwKkGvCBOdTI0le6h2A/vCNbfUVjwfr/EGZSRXxCG+Yapol34BAULGpOA=="],
|
||||
|
||||
"@opentui/core-win32-arm64": ["@opentui/core-win32-arm64@0.1.73", "", { "os": "win32", "cpu": "arm64" }, "sha512-mFiEeoiim5cmi6qu8CDfeecl9ivuMilfby/GnqTsr9G8e52qfT6nWF2m9Nevh9ebhXK+D/VnVhJIbObc0WIchA=="],
|
||||
"@opentui/core-win32-arm64": ["@opentui/core-win32-arm64@0.1.74", "", { "os": "win32", "cpu": "arm64" }, "sha512-dvYUXz03avnI6ZluyLp00HPmR0UT/IE/6QS97XBsgJlUTtpnbKkBtB5jD1NHwWkElaRj1Qv2QP36ngFoJqbl9g=="],
|
||||
|
||||
"@opentui/core-win32-x64": ["@opentui/core-win32-x64@0.1.73", "", { "os": "win32", "cpu": "x64" }, "sha512-vzWHUi2vgwImuyxl+hlmK0aeCbnwozeuicIcHJE0orPOwp2PAKyR9WO330szAvfIO5ZPbNkjWfh6xIYnASM0lQ=="],
|
||||
"@opentui/core-win32-x64": ["@opentui/core-win32-x64@0.1.74", "", { "os": "win32", "cpu": "x64" }, "sha512-3wfWXaAKOIlDQz6ZZIESf2M+YGZ7uFHijjTEM8w/STRlLw8Y6+QyGYi1myHSM4d6RSO+/s2EMDxvjDf899W9vQ=="],
|
||||
|
||||
"@opentui/solid": ["@opentui/solid@0.1.73", "", { "dependencies": { "@babel/core": "7.28.0", "@babel/preset-typescript": "7.27.1", "@opentui/core": "0.1.73", "babel-plugin-module-resolver": "5.0.2", "babel-preset-solid": "1.9.9", "s-js": "^0.4.9" }, "peerDependencies": { "solid-js": "1.9.9" } }, "sha512-FBSTiuWl+hHqFxmrJfC93cbJ0PJ4QoFbvRFuD6Gzrea5rH+G7BidjyI8YZuCcNnriDuIYaXTJdvBqe15lgKR1A=="],
|
||||
"@opentui/solid": ["@opentui/solid@0.1.74", "", { "dependencies": { "@babel/core": "7.28.0", "@babel/preset-typescript": "7.27.1", "@opentui/core": "0.1.74", "babel-plugin-module-resolver": "5.0.2", "babel-preset-solid": "1.9.9", "s-js": "^0.4.9" }, "peerDependencies": { "solid-js": "1.9.9" } }, "sha512-Vz82cI8T9YeJjGsVg4ULp6ral4N+xyt1j9A6Tbu3aaQgEKiB74LW03EXREehfjPr1irOFxtKfWPbx5NKH0Upag=="],
|
||||
|
||||
"@oslojs/asn1": ["@oslojs/asn1@1.0.0", "", { "dependencies": { "@oslojs/binary": "1.0.0" } }, "sha512-zw/wn0sj0j0QKbIXfIlnEcTviaCzYOY3V5rAyjR6YtOByFtJiT574+8p9Wlach0lZH9fddD4yb9laEAIl4vXQA=="],
|
||||
|
||||
|
||||
6
flake.lock
generated
6
flake.lock
generated
@@ -2,11 +2,11 @@
|
||||
"nodes": {
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1768395095,
|
||||
"narHash": "sha256-ZhuYJbwbZT32QA95tSkXd9zXHcdZj90EzHpEXBMabaw=",
|
||||
"lastModified": 1768456270,
|
||||
"narHash": "sha256-NgaL2CCiUR6nsqUIY4yxkzz07iQUlUCany44CFv+OxY=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "13868c071cc73a5e9f610c47d7bb08e5da64fdd5",
|
||||
"rev": "f4606b01b39e09065df37905a2133905246db9ed",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
||||
28
flake.nix
28
flake.nix
@@ -7,6 +7,7 @@
|
||||
|
||||
outputs =
|
||||
{
|
||||
self,
|
||||
nixpkgs,
|
||||
...
|
||||
}:
|
||||
@@ -107,33 +108,10 @@
|
||||
};
|
||||
in
|
||||
{
|
||||
default = opencodePkg;
|
||||
default = self.packages.${system}.opencode;
|
||||
opencode = opencodePkg;
|
||||
desktop = desktopPkg;
|
||||
}
|
||||
);
|
||||
|
||||
apps = forEachSystem (
|
||||
system:
|
||||
let
|
||||
pkgs = pkgsFor system;
|
||||
in
|
||||
{
|
||||
opencode-dev = {
|
||||
type = "app";
|
||||
meta = {
|
||||
description = "Nix devshell shell for OpenCode";
|
||||
runtimeInputs = [ pkgs.bun ];
|
||||
};
|
||||
program = "${
|
||||
pkgs.writeShellApplication {
|
||||
name = "opencode-dev";
|
||||
text = ''
|
||||
exec bun run dev "$@"
|
||||
'';
|
||||
}
|
||||
}/bin/opencode-dev";
|
||||
};
|
||||
}
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
@@ -15,6 +15,8 @@
|
||||
cargo,
|
||||
rustc,
|
||||
makeBinaryWrapper,
|
||||
copyDesktopItems,
|
||||
makeDesktopItem,
|
||||
nodejs,
|
||||
jq,
|
||||
}:
|
||||
@@ -57,12 +59,28 @@ rustPlatform.buildRustPackage rec {
|
||||
pkg-config
|
||||
bun
|
||||
makeBinaryWrapper
|
||||
copyDesktopItems
|
||||
cargo
|
||||
rustc
|
||||
nodejs
|
||||
jq
|
||||
];
|
||||
|
||||
# based on packages/desktop/src-tauri/release/appstream.metainfo.xml
|
||||
desktopItems = lib.optionals stdenv.isLinux [
|
||||
(makeDesktopItem {
|
||||
name = "ai.opencode.opencode";
|
||||
desktopName = "OpenCode";
|
||||
comment = "Open source AI coding agent";
|
||||
exec = "opencode-desktop";
|
||||
icon = "opencode";
|
||||
terminal = false;
|
||||
type = "Application";
|
||||
categories = [ "Development" "IDE" ];
|
||||
startupWMClass = "opencode";
|
||||
})
|
||||
];
|
||||
|
||||
buildInputs = [
|
||||
openssl
|
||||
]
|
||||
@@ -121,6 +139,10 @@ rustPlatform.buildRustPackage rec {
|
||||
# It looks for them in the location specified in tauri.conf.json.
|
||||
|
||||
postInstall = lib.optionalString stdenv.isLinux ''
|
||||
# Install icon
|
||||
mkdir -p $out/share/icons/hicolor/128x128/apps
|
||||
cp ../../../packages/desktop/src-tauri/icons/prod/128x128.png $out/share/icons/hicolor/128x128/apps/opencode.png
|
||||
|
||||
# Wrap the binary to ensure it finds the libraries
|
||||
wrapProgram $out/bin/opencode-desktop \
|
||||
--prefix LD_LIBRARY_PATH : ${
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
{
|
||||
"nodeModules": {
|
||||
"x86_64-linux": "sha256-07XxcHLuToM4QfWVyaPLACxjPZ93ZM7gtpX2o08Lp18=",
|
||||
"aarch64-linux": "sha256-E6lyYFApS1cw3jE7ISx5QZxDDJ9V3HU0ICYFdY+aIBw=",
|
||||
"aarch64-darwin": "sha256-U2UvE70nM0OI0VhIku8qnX+ptPbA+Q/y1BGXbFMcyt4=",
|
||||
"x86_64-darwin": "sha256-grPR/YBqYPEUBks4nQKYe6/9f+9N0Fk9l2L9J6ylWkc="
|
||||
"x86_64-linux": "sha256-4zchRpxzvHnPMcwumgL9yaX0deIXS5IGPp131eYsSvg=",
|
||||
"aarch64-linux": "sha256-3/BSRsl5pI0Iz3qAFZxIkOehFLZ2Ox9UsbdDHYzqlVg=",
|
||||
"aarch64-darwin": "sha256-86d/G1q6xiHSSlm+/irXoKLb/yLQbV348uuSrBV70+Q=",
|
||||
"x86_64-darwin": "sha256-WYaP44PWRGtoG1DIuUJUH4DvuaCuFhlJZ9fPzGsiIfE="
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,119 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
DUMMY="sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="
|
||||
SYSTEM=${SYSTEM:-x86_64-linux}
|
||||
DEFAULT_HASH_FILE=${MODULES_HASH_FILE:-nix/hashes.json}
|
||||
HASH_FILE=${HASH_FILE:-$DEFAULT_HASH_FILE}
|
||||
|
||||
if [ ! -f "$HASH_FILE" ]; then
|
||||
cat >"$HASH_FILE" <<EOF
|
||||
{
|
||||
"nodeModules": {}
|
||||
}
|
||||
EOF
|
||||
fi
|
||||
|
||||
if git rev-parse --is-inside-work-tree >/dev/null 2>&1; then
|
||||
if ! git ls-files --error-unmatch "$HASH_FILE" >/dev/null 2>&1; then
|
||||
git add -N "$HASH_FILE" >/dev/null 2>&1 || true
|
||||
fi
|
||||
fi
|
||||
|
||||
export DUMMY
|
||||
export NIX_KEEP_OUTPUTS=1
|
||||
export NIX_KEEP_DERIVATIONS=1
|
||||
|
||||
cleanup() {
|
||||
rm -f "${JSON_OUTPUT:-}" "${BUILD_LOG:-}" "${TMP_EXPR:-}"
|
||||
}
|
||||
|
||||
trap cleanup EXIT
|
||||
|
||||
write_node_modules_hash() {
|
||||
local value="$1"
|
||||
local system="${2:-$SYSTEM}"
|
||||
local temp
|
||||
temp=$(mktemp)
|
||||
|
||||
if jq -e '.nodeModules | type == "object"' "$HASH_FILE" >/dev/null 2>&1; then
|
||||
jq --arg system "$system" --arg value "$value" '.nodeModules[$system] = $value' "$HASH_FILE" >"$temp"
|
||||
else
|
||||
jq --arg system "$system" --arg value "$value" '.nodeModules = {($system): $value}' "$HASH_FILE" >"$temp"
|
||||
fi
|
||||
|
||||
mv "$temp" "$HASH_FILE"
|
||||
}
|
||||
|
||||
TARGET="packages.${SYSTEM}.default"
|
||||
MODULES_ATTR=".#packages.${SYSTEM}.default.node_modules"
|
||||
CORRECT_HASH=""
|
||||
|
||||
DRV_PATH="$(nix eval --raw "${MODULES_ATTR}.drvPath")"
|
||||
|
||||
echo "Setting dummy node_modules outputHash for ${SYSTEM}..."
|
||||
write_node_modules_hash "$DUMMY"
|
||||
|
||||
BUILD_LOG=$(mktemp)
|
||||
JSON_OUTPUT=$(mktemp)
|
||||
|
||||
echo "Building node_modules for ${SYSTEM} to discover correct outputHash..."
|
||||
echo "Attempting to realize derivation: ${DRV_PATH}"
|
||||
REALISE_OUT=$(nix-store --realise "$DRV_PATH" --keep-failed 2>&1 | tee "$BUILD_LOG" || true)
|
||||
|
||||
BUILD_PATH=$(echo "$REALISE_OUT" | grep "^/nix/store/" | head -n1 || true)
|
||||
if [ -n "$BUILD_PATH" ] && [ -d "$BUILD_PATH" ]; then
|
||||
echo "Realized node_modules output: $BUILD_PATH"
|
||||
CORRECT_HASH=$(nix hash path --sri "$BUILD_PATH" 2>/dev/null || true)
|
||||
fi
|
||||
|
||||
if [ -z "$CORRECT_HASH" ]; then
|
||||
CORRECT_HASH="$(grep -E 'got:\s+sha256-[A-Za-z0-9+/=]+' "$BUILD_LOG" | awk '{print $2}' | head -n1 || true)"
|
||||
|
||||
if [ -z "$CORRECT_HASH" ]; then
|
||||
CORRECT_HASH="$(grep -A2 'hash mismatch' "$BUILD_LOG" | grep 'got:' | awk '{print $2}' | sed 's/sha256:/sha256-/' || true)"
|
||||
fi
|
||||
|
||||
if [ -z "$CORRECT_HASH" ]; then
|
||||
echo "Searching for kept failed build directory..."
|
||||
KEPT_DIR=$(grep -oE "build directory.*'[^']+'" "$BUILD_LOG" | grep -oE "'/[^']+'" | tr -d "'" | head -n1)
|
||||
|
||||
if [ -z "$KEPT_DIR" ]; then
|
||||
KEPT_DIR=$(grep -oE '/nix/var/nix/builds/[^ ]+' "$BUILD_LOG" | head -n1)
|
||||
fi
|
||||
|
||||
if [ -n "$KEPT_DIR" ] && [ -d "$KEPT_DIR" ]; then
|
||||
echo "Found kept build directory: $KEPT_DIR"
|
||||
if [ -d "$KEPT_DIR/build" ]; then
|
||||
HASH_PATH="$KEPT_DIR/build"
|
||||
else
|
||||
HASH_PATH="$KEPT_DIR"
|
||||
fi
|
||||
|
||||
echo "Attempting to hash: $HASH_PATH"
|
||||
ls -la "$HASH_PATH" || true
|
||||
|
||||
if [ -d "$HASH_PATH/node_modules" ]; then
|
||||
CORRECT_HASH=$(nix hash path --sri "$HASH_PATH" 2>/dev/null || true)
|
||||
echo "Computed hash from kept build: $CORRECT_HASH"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$CORRECT_HASH" ]; then
|
||||
echo "Failed to determine correct node_modules hash for ${SYSTEM}."
|
||||
echo "Build log:"
|
||||
cat "$BUILD_LOG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
write_node_modules_hash "$CORRECT_HASH"
|
||||
|
||||
jq -e --arg system "$SYSTEM" --arg hash "$CORRECT_HASH" '.nodeModules[$system] == $hash' "$HASH_FILE" >/dev/null
|
||||
|
||||
echo "node_modules hash updated for ${SYSTEM}: $CORRECT_HASH"
|
||||
|
||||
rm -f "$BUILD_LOG"
|
||||
unset BUILD_LOG
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/app",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"description": "",
|
||||
"type": "module",
|
||||
"exports": {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { useDialog } from "@opencode-ai/ui/context/dialog"
|
||||
import { Dialog } from "@opencode-ai/ui/dialog"
|
||||
import { FileIcon } from "@opencode-ai/ui/file-icon"
|
||||
import { Keybind } from "@opencode-ai/ui/keybind"
|
||||
import { List } from "@opencode-ai/ui/list"
|
||||
import { getDirectory, getFilename } from "@opencode-ai/util/path"
|
||||
import { useParams } from "@solidjs/router"
|
||||
@@ -133,14 +134,14 @@ export function DialogSelectFile() {
|
||||
})
|
||||
|
||||
return (
|
||||
<Dialog title="Search">
|
||||
<Dialog class="pt-3 pb-0 !max-h-[480px]">
|
||||
<List
|
||||
search={{ placeholder: "Search files and commands", autofocus: true }}
|
||||
search={{ placeholder: "Search files and commands", autofocus: true, hideIcon: true, class: "pl-3 pr-2 !mb-0" }}
|
||||
emptyMessage="No results found"
|
||||
items={items}
|
||||
key={(item) => item.id}
|
||||
filterKeys={["title", "description", "category"]}
|
||||
groupBy={(item) => (grouped() ? item.category : "")}
|
||||
groupBy={(item) => item.category}
|
||||
onMove={handleMove}
|
||||
onSelect={handleSelect}
|
||||
>
|
||||
@@ -161,7 +162,7 @@ export function DialogSelectFile() {
|
||||
</div>
|
||||
}
|
||||
>
|
||||
<div class="w-full flex items-center justify-between gap-4">
|
||||
<div class="w-full flex items-center justify-between gap-4 pl-1">
|
||||
<div class="flex items-center gap-2 min-w-0">
|
||||
<span class="text-14-regular text-text-strong whitespace-nowrap">{item.title}</span>
|
||||
<Show when={item.description}>
|
||||
@@ -169,7 +170,7 @@ export function DialogSelectFile() {
|
||||
</Show>
|
||||
</div>
|
||||
<Show when={item.keybind}>
|
||||
<span class="text-12-regular text-text-subtle shrink-0">{formatKeybind(item.keybind ?? "")}</span>
|
||||
<Keybind class="rounded-[4px]">{formatKeybind(item.keybind ?? "")}</Keybind>
|
||||
</Show>
|
||||
</div>
|
||||
</Show>
|
||||
|
||||
@@ -16,6 +16,7 @@ import { Button } from "@opencode-ai/ui/button"
|
||||
import { Tooltip, TooltipKeybind } from "@opencode-ai/ui/tooltip"
|
||||
import { Popover } from "@opencode-ai/ui/popover"
|
||||
import { TextField } from "@opencode-ai/ui/text-field"
|
||||
import { Keybind } from "@opencode-ai/ui/keybind"
|
||||
|
||||
export function SessionHeader() {
|
||||
const globalSDK = useGlobalSDK()
|
||||
@@ -59,21 +60,12 @@ export function SessionHeader() {
|
||||
>
|
||||
<div class="flex items-center gap-2">
|
||||
<Icon name="magnifying-glass" size="normal" class="icon-base" />
|
||||
<span class="flex-1 min-w-0 text-14-regular text-text-weak truncate" style={{ "line-height": 1 }}>
|
||||
<span class="flex-1 min-w-0 text-14-regular text-text-weak truncate h-3.5 flex items-center overflow-visible">
|
||||
Search {name()}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<Show when={hotkey()}>
|
||||
{(keybind) => (
|
||||
<span
|
||||
class="shrink-0 flex items-center justify-center h-5 px-2 rounded-[2px] bg-surface-base text-12-medium text-text-weak"
|
||||
style={{ "box-shadow": "var(--shadow-xxs-border)" }}
|
||||
>
|
||||
{keybind()}
|
||||
</span>
|
||||
)}
|
||||
</Show>
|
||||
<Show when={hotkey()}>{(keybind) => <Keybind>{keybind()}</Keybind>}</Show>
|
||||
</button>
|
||||
</Portal>
|
||||
)}
|
||||
|
||||
@@ -14,8 +14,8 @@ export function SortableTerminalTab(props: { terminal: LocalPTY }): JSX.Element
|
||||
<Tabs.Trigger
|
||||
value={props.terminal.id}
|
||||
closeButton={
|
||||
terminal.tabs().length > 1 && (
|
||||
<IconButton icon="close" variant="ghost" onClick={() => terminal.closeTab(props.terminal.tabId)} />
|
||||
terminal.all().length > 1 && (
|
||||
<IconButton icon="close" variant="ghost" onClick={() => terminal.close(props.terminal.id)} />
|
||||
)
|
||||
}
|
||||
>
|
||||
|
||||
@@ -1,322 +0,0 @@
|
||||
import { For, Show, createMemo, createSignal, onCleanup } from "solid-js"
|
||||
import { Terminal } from "./terminal"
|
||||
import { useTerminal, type Panel } from "@/context/terminal"
|
||||
import { IconButton } from "@opencode-ai/ui/icon-button"
|
||||
|
||||
export interface TerminalSplitProps {
|
||||
tabId: string
|
||||
}
|
||||
|
||||
function computeLayout(
|
||||
panels: Record<string, Panel>,
|
||||
panelId: string,
|
||||
bounds: { top: number; left: number; width: number; height: number },
|
||||
): Map<string, { top: number; left: number; width: number; height: number }> {
|
||||
const result = new Map<string, { top: number; left: number; width: number; height: number }>()
|
||||
const panel = panels[panelId]
|
||||
if (!panel) return result
|
||||
|
||||
if (panel.ptyId) {
|
||||
result.set(panel.ptyId, bounds)
|
||||
} else if (panel.children && panel.children.length === 2) {
|
||||
const [leftId, rightId] = panel.children
|
||||
const sizes = panel.sizes ?? [50, 50]
|
||||
|
||||
if (panel.direction === "horizontal") {
|
||||
const topHeight = (bounds.height * sizes[0]) / 100
|
||||
const topBounds = { ...bounds, height: topHeight }
|
||||
const bottomBounds = { ...bounds, top: bounds.top + topHeight, height: bounds.height - topHeight }
|
||||
for (const [k, v] of computeLayout(panels, leftId, topBounds)) result.set(k, v)
|
||||
for (const [k, v] of computeLayout(panels, rightId, bottomBounds)) result.set(k, v)
|
||||
} else {
|
||||
const leftWidth = (bounds.width * sizes[0]) / 100
|
||||
const leftBounds = { ...bounds, width: leftWidth }
|
||||
const rightBounds = { ...bounds, left: bounds.left + leftWidth, width: bounds.width - leftWidth }
|
||||
for (const [k, v] of computeLayout(panels, leftId, leftBounds)) result.set(k, v)
|
||||
for (const [k, v] of computeLayout(panels, rightId, rightBounds)) result.set(k, v)
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
function findPanelForPty(panels: Record<string, Panel>, ptyId: string): string | undefined {
|
||||
for (const [id, panel] of Object.entries(panels)) {
|
||||
if (panel.ptyId === ptyId) return id
|
||||
}
|
||||
}
|
||||
|
||||
export function TerminalSplit(props: TerminalSplitProps) {
|
||||
const terminal = useTerminal()
|
||||
const pane = createMemo(() => terminal.pane(props.tabId))
|
||||
const terminals = createMemo(() => terminal.all().filter((t) => t.tabId === props.tabId))
|
||||
const [containerFocused, setContainerFocused] = createSignal(true)
|
||||
|
||||
const layout = createMemo(() => {
|
||||
const p = pane()
|
||||
if (!p) {
|
||||
const single = terminals()[0]
|
||||
if (!single) return new Map()
|
||||
return new Map([[single.id, { top: 0, left: 0, width: 100, height: 100 }]])
|
||||
}
|
||||
return computeLayout(p.panels, p.root, { top: 0, left: 0, width: 100, height: 100 })
|
||||
})
|
||||
|
||||
const focused = createMemo(() => {
|
||||
const p = pane()
|
||||
if (!p) return props.tabId
|
||||
const focusedPanel = p.panels[p.focused ?? ""]
|
||||
return focusedPanel?.ptyId ?? props.tabId
|
||||
})
|
||||
|
||||
const handleFocus = (ptyId: string) => {
|
||||
const p = pane()
|
||||
if (!p) return
|
||||
const panelId = findPanelForPty(p.panels, ptyId)
|
||||
if (panelId) terminal.focus(props.tabId, panelId)
|
||||
}
|
||||
|
||||
const handleClose = (ptyId: string) => {
|
||||
const pty = terminal.all().find((t) => t.id === ptyId)
|
||||
if (!pty) return
|
||||
|
||||
const p = pane()
|
||||
if (!p) {
|
||||
if (pty.tabId === props.tabId) {
|
||||
terminal.closeTab(props.tabId)
|
||||
}
|
||||
return
|
||||
}
|
||||
const panelId = findPanelForPty(p.panels, ptyId)
|
||||
if (panelId) terminal.closeSplit(props.tabId, panelId)
|
||||
}
|
||||
|
||||
return (
|
||||
<div
|
||||
class="relative size-full"
|
||||
data-terminal-split-container
|
||||
onFocusIn={() => setContainerFocused(true)}
|
||||
onFocusOut={(e) => {
|
||||
const related = e.relatedTarget as Node | null
|
||||
if (!related || !e.currentTarget.contains(related)) {
|
||||
setContainerFocused(false)
|
||||
}
|
||||
}}
|
||||
>
|
||||
<For each={terminals()}>
|
||||
{(pty) => {
|
||||
const bounds = createMemo(() => layout().get(pty.id) ?? { top: 0, left: 0, width: 100, height: 100 })
|
||||
const isFocused = createMemo(() => focused() === pty.id)
|
||||
const hasSplits = createMemo(() => !!pane())
|
||||
|
||||
return (
|
||||
<div
|
||||
class="absolute flex flex-col min-h-0"
|
||||
classList={{
|
||||
"ring-1 ring-inset ring-border-strong-base": containerFocused() && isFocused(),
|
||||
"border-l border-border-weak-base": bounds().left > 0,
|
||||
"border-t border-border-weak-base": bounds().top > 0,
|
||||
}}
|
||||
style={{
|
||||
top: `${bounds().top}%`,
|
||||
left: `${bounds().left}%`,
|
||||
width: `${bounds().width}%`,
|
||||
height: `${bounds().height}%`,
|
||||
}}
|
||||
onClick={() => handleFocus(pty.id)}
|
||||
>
|
||||
<Show when={pane()}>
|
||||
<div class="absolute top-1 right-1 z-10 opacity-0 hover:opacity-100 transition-opacity">
|
||||
<IconButton
|
||||
icon="close"
|
||||
variant="ghost"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
handleClose(pty.id)
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
</Show>
|
||||
<div
|
||||
class="flex-1 min-h-0"
|
||||
classList={{ "opacity-50": !containerFocused() || (hasSplits() && !isFocused()) }}
|
||||
>
|
||||
<Terminal
|
||||
pty={pty}
|
||||
focused={isFocused()}
|
||||
onCleanup={terminal.update}
|
||||
onConnectError={() => terminal.clone(pty.id)}
|
||||
onExit={() => handleClose(pty.id)}
|
||||
class="size-full"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}}
|
||||
</For>
|
||||
<ResizeHandles tabId={props.tabId} />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
function ResizeHandles(props: { tabId: string }) {
|
||||
const terminal = useTerminal()
|
||||
const pane = createMemo(() => terminal.pane(props.tabId))
|
||||
|
||||
const splits = createMemo(() => {
|
||||
const p = pane()
|
||||
if (!p) return []
|
||||
return Object.values(p.panels).filter((panel) => panel.children && panel.children.length === 2)
|
||||
})
|
||||
|
||||
return <For each={splits()}>{(panel) => <ResizeHandle tabId={props.tabId} panelId={panel.id} />}</For>
|
||||
}
|
||||
|
||||
function ResizeHandle(props: { tabId: string; panelId: string }) {
|
||||
const terminal = useTerminal()
|
||||
const pane = createMemo(() => terminal.pane(props.tabId))
|
||||
const panel = createMemo(() => pane()?.panels[props.panelId])
|
||||
|
||||
let cleanup: VoidFunction | undefined
|
||||
|
||||
onCleanup(() => cleanup?.())
|
||||
|
||||
const position = createMemo(() => {
|
||||
const p = pane()
|
||||
if (!p) return null
|
||||
const pan = panel()
|
||||
if (!pan?.children || pan.children.length !== 2) return null
|
||||
|
||||
const bounds = computePanelBounds(p.panels, p.root, props.panelId, {
|
||||
top: 0,
|
||||
left: 0,
|
||||
width: 100,
|
||||
height: 100,
|
||||
})
|
||||
if (!bounds) return null
|
||||
|
||||
const sizes = pan.sizes ?? [50, 50]
|
||||
|
||||
if (pan.direction === "horizontal") {
|
||||
return {
|
||||
horizontal: true,
|
||||
top: bounds.top + (bounds.height * sizes[0]) / 100,
|
||||
left: bounds.left,
|
||||
size: bounds.width,
|
||||
}
|
||||
}
|
||||
return {
|
||||
horizontal: false,
|
||||
top: bounds.top,
|
||||
left: bounds.left + (bounds.width * sizes[0]) / 100,
|
||||
size: bounds.height,
|
||||
}
|
||||
})
|
||||
|
||||
const handleMouseDown = (e: MouseEvent) => {
|
||||
e.preventDefault()
|
||||
|
||||
const pos = position()
|
||||
if (!pos) return
|
||||
|
||||
const container = (e.target as HTMLElement).closest("[data-terminal-split-container]") as HTMLElement
|
||||
if (!container) return
|
||||
|
||||
const rect = container.getBoundingClientRect()
|
||||
const pan = panel()
|
||||
if (!pan) return
|
||||
|
||||
const p = pane()
|
||||
if (!p) return
|
||||
const panelBounds = computePanelBounds(p.panels, p.root, props.panelId, {
|
||||
top: 0,
|
||||
left: 0,
|
||||
width: 100,
|
||||
height: 100,
|
||||
})
|
||||
if (!panelBounds) return
|
||||
|
||||
const handleMouseMove = (e: MouseEvent) => {
|
||||
if (pan.direction === "horizontal") {
|
||||
const totalPx = (rect.height * panelBounds.height) / 100
|
||||
const topPx = (rect.height * panelBounds.top) / 100
|
||||
const posPx = e.clientY - rect.top - topPx
|
||||
const percent = Math.max(10, Math.min(90, (posPx / totalPx) * 100))
|
||||
terminal.resizeSplit(props.tabId, props.panelId, [percent, 100 - percent])
|
||||
} else {
|
||||
const totalPx = (rect.width * panelBounds.width) / 100
|
||||
const leftPx = (rect.width * panelBounds.left) / 100
|
||||
const posPx = e.clientX - rect.left - leftPx
|
||||
const percent = Math.max(10, Math.min(90, (posPx / totalPx) * 100))
|
||||
terminal.resizeSplit(props.tabId, props.panelId, [percent, 100 - percent])
|
||||
}
|
||||
}
|
||||
|
||||
const handleMouseUp = () => {
|
||||
document.removeEventListener("mousemove", handleMouseMove)
|
||||
document.removeEventListener("mouseup", handleMouseUp)
|
||||
cleanup = undefined
|
||||
}
|
||||
|
||||
cleanup = handleMouseUp
|
||||
document.addEventListener("mousemove", handleMouseMove)
|
||||
document.addEventListener("mouseup", handleMouseUp)
|
||||
}
|
||||
|
||||
return (
|
||||
<Show when={position()}>
|
||||
{(pos) => (
|
||||
<div
|
||||
data-component="resize-handle"
|
||||
data-direction={pos().horizontal ? "vertical" : "horizontal"}
|
||||
class="absolute"
|
||||
style={{
|
||||
top: `${pos().top}%`,
|
||||
left: `${pos().left}%`,
|
||||
width: pos().horizontal ? `${pos().size}%` : "8px",
|
||||
height: pos().horizontal ? "8px" : `${pos().size}%`,
|
||||
transform: pos().horizontal ? "translateY(-50%)" : "translateX(-50%)",
|
||||
cursor: pos().horizontal ? "row-resize" : "col-resize",
|
||||
}}
|
||||
onMouseDown={handleMouseDown}
|
||||
/>
|
||||
)}
|
||||
</Show>
|
||||
)
|
||||
}
|
||||
|
||||
function computePanelBounds(
|
||||
panels: Record<string, Panel>,
|
||||
currentId: string,
|
||||
targetId: string,
|
||||
bounds: { top: number; left: number; width: number; height: number },
|
||||
): { top: number; left: number; width: number; height: number } | null {
|
||||
if (currentId === targetId) return bounds
|
||||
|
||||
const panel = panels[currentId]
|
||||
if (!panel?.children || panel.children.length !== 2) return null
|
||||
|
||||
const [leftId, rightId] = panel.children
|
||||
const sizes = panel.sizes ?? [50, 50]
|
||||
const horizontal = panel.direction === "horizontal"
|
||||
|
||||
if (horizontal) {
|
||||
const topHeight = (bounds.height * sizes[0]) / 100
|
||||
const bottomHeight = bounds.height - topHeight
|
||||
const topBounds = { ...bounds, height: topHeight }
|
||||
const bottomBounds = { ...bounds, top: bounds.top + topHeight, height: bottomHeight }
|
||||
return (
|
||||
computePanelBounds(panels, leftId, targetId, topBounds) ??
|
||||
computePanelBounds(panels, rightId, targetId, bottomBounds)
|
||||
)
|
||||
}
|
||||
|
||||
const leftWidth = (bounds.width * sizes[0]) / 100
|
||||
const rightWidth = bounds.width - leftWidth
|
||||
const leftBounds = { ...bounds, width: leftWidth }
|
||||
const rightBounds = { ...bounds, left: bounds.left + leftWidth, width: rightWidth }
|
||||
return (
|
||||
computePanelBounds(panels, leftId, targetId, leftBounds) ??
|
||||
computePanelBounds(panels, rightId, targetId, rightBounds)
|
||||
)
|
||||
}
|
||||
@@ -7,11 +7,9 @@ import { resolveThemeVariant, useTheme, withAlpha, type HexColor } from "@openco
|
||||
|
||||
export interface TerminalProps extends ComponentProps<"div"> {
|
||||
pty: LocalPTY
|
||||
focused?: boolean
|
||||
onSubmit?: () => void
|
||||
onCleanup?: (pty: LocalPTY) => void
|
||||
onConnectError?: (error: unknown) => void
|
||||
onExit?: () => void
|
||||
}
|
||||
|
||||
type TerminalColors = {
|
||||
@@ -40,7 +38,7 @@ export const Terminal = (props: TerminalProps) => {
|
||||
const sdk = useSDK()
|
||||
const theme = useTheme()
|
||||
let container!: HTMLDivElement
|
||||
const [local, others] = splitProps(props, ["pty", "focused", "class", "classList", "onConnectError"])
|
||||
const [local, others] = splitProps(props, ["pty", "class", "classList", "onConnectError"])
|
||||
let ws: WebSocket | undefined
|
||||
let term: Term | undefined
|
||||
let ghostty: Ghostty
|
||||
@@ -51,7 +49,6 @@ export const Terminal = (props: TerminalProps) => {
|
||||
let handleTextareaBlur: () => void
|
||||
let reconnect: number | undefined
|
||||
let disposed = false
|
||||
let cleaning = false
|
||||
|
||||
const getTerminalColors = (): TerminalColors => {
|
||||
const mode = theme.mode()
|
||||
@@ -91,11 +88,6 @@ export const Terminal = (props: TerminalProps) => {
|
||||
t.focus()
|
||||
setTimeout(() => t.textarea?.focus(), 0)
|
||||
}
|
||||
|
||||
createEffect(() => {
|
||||
if (local.focused) focusTerminal()
|
||||
})
|
||||
|
||||
const handlePointerDown = () => {
|
||||
const activeElement = document.activeElement
|
||||
if (activeElement instanceof HTMLElement && activeElement !== container) {
|
||||
@@ -174,11 +166,6 @@ export const Terminal = (props: TerminalProps) => {
|
||||
return true
|
||||
}
|
||||
|
||||
// allow cmd+d and cmd+shift+d for terminal splitting
|
||||
if (event.metaKey && key === "d") {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
})
|
||||
|
||||
@@ -244,6 +231,7 @@ export const Terminal = (props: TerminalProps) => {
|
||||
// console.log("Scroll position:", ydisp)
|
||||
// })
|
||||
socket.addEventListener("open", () => {
|
||||
console.log("WebSocket connected")
|
||||
sdk.client.pty
|
||||
.update({
|
||||
ptyID: local.pty.id,
|
||||
@@ -262,9 +250,7 @@ export const Terminal = (props: TerminalProps) => {
|
||||
props.onConnectError?.(error)
|
||||
})
|
||||
socket.addEventListener("close", () => {
|
||||
if (!cleaning) {
|
||||
props.onExit?.()
|
||||
}
|
||||
console.log("WebSocket disconnected")
|
||||
})
|
||||
})
|
||||
|
||||
@@ -288,7 +274,6 @@ export const Terminal = (props: TerminalProps) => {
|
||||
})
|
||||
}
|
||||
|
||||
cleaning = true
|
||||
ws?.close()
|
||||
t?.dispose()
|
||||
})
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { createMemo, createSignal, onCleanup, onMount, type Accessor } from "solid-js"
|
||||
import { createSimpleContext } from "@opencode-ai/ui/context"
|
||||
import { useDialog } from "@opencode-ai/ui/context/dialog"
|
||||
|
||||
const IS_MAC = typeof navigator === "object" && /(Mac|iPod|iPhone|iPad)/.test(navigator.platform)
|
||||
|
||||
@@ -104,7 +105,15 @@ export function formatKeybind(config: string): string {
|
||||
if (kb.meta) parts.push(IS_MAC ? "⌘" : "Meta")
|
||||
|
||||
if (kb.key) {
|
||||
const displayKey = kb.key.length === 1 ? kb.key.toUpperCase() : kb.key.charAt(0).toUpperCase() + kb.key.slice(1)
|
||||
const arrows: Record<string, string> = {
|
||||
arrowup: "↑",
|
||||
arrowdown: "↓",
|
||||
arrowleft: "←",
|
||||
arrowright: "→",
|
||||
}
|
||||
const displayKey =
|
||||
arrows[kb.key.toLowerCase()] ??
|
||||
(kb.key.length === 1 ? kb.key.toUpperCase() : kb.key.charAt(0).toUpperCase() + kb.key.slice(1))
|
||||
parts.push(displayKey)
|
||||
}
|
||||
|
||||
@@ -114,6 +123,7 @@ export function formatKeybind(config: string): string {
|
||||
export const { use: useCommand, provider: CommandProvider } = createSimpleContext({
|
||||
name: "Command",
|
||||
init: () => {
|
||||
const dialog = useDialog()
|
||||
const [registrations, setRegistrations] = createSignal<Accessor<CommandOption[]>[]>([])
|
||||
const [suspendCount, setSuspendCount] = createSignal(0)
|
||||
|
||||
@@ -157,7 +167,7 @@ export const { use: useCommand, provider: CommandProvider } = createSimpleContex
|
||||
}
|
||||
|
||||
const handleKeyDown = (event: KeyboardEvent) => {
|
||||
if (suspended()) return
|
||||
if (suspended() || dialog.active) return
|
||||
|
||||
const paletteKeybinds = parseKeybind("mod+shift+p")
|
||||
if (matchKeybind(paletteKeybinds, event)) {
|
||||
|
||||
@@ -110,6 +110,7 @@ function createGlobalSync() {
|
||||
})
|
||||
|
||||
const children: Record<string, [Store<State>, SetStoreFunction<State>]> = {}
|
||||
|
||||
function child(directory: string) {
|
||||
if (!directory) console.error("No directory provided")
|
||||
if (!children[directory]) {
|
||||
@@ -122,29 +123,33 @@ function createGlobalSync() {
|
||||
if (!cache) throw new Error("Failed to create persisted cache")
|
||||
vcsCache.set(directory, { store: cache[0], setStore: cache[1], ready: cache[3] })
|
||||
|
||||
children[directory] = createStore<State>({
|
||||
project: "",
|
||||
provider: { all: [], connected: [], default: {} },
|
||||
config: {},
|
||||
path: { state: "", config: "", worktree: "", directory: "", home: "" },
|
||||
status: "loading" as const,
|
||||
agent: [],
|
||||
command: [],
|
||||
session: [],
|
||||
sessionTotal: 0,
|
||||
session_status: {},
|
||||
session_diff: {},
|
||||
todo: {},
|
||||
permission: {},
|
||||
question: {},
|
||||
mcp: {},
|
||||
lsp: [],
|
||||
vcs: cache[0].value,
|
||||
limit: 5,
|
||||
message: {},
|
||||
part: {},
|
||||
})
|
||||
bootstrapInstance(directory)
|
||||
const init = () => {
|
||||
children[directory] = createStore<State>({
|
||||
project: "",
|
||||
provider: { all: [], connected: [], default: {} },
|
||||
config: {},
|
||||
path: { state: "", config: "", worktree: "", directory: "", home: "" },
|
||||
status: "loading" as const,
|
||||
agent: [],
|
||||
command: [],
|
||||
session: [],
|
||||
sessionTotal: 0,
|
||||
session_status: {},
|
||||
session_diff: {},
|
||||
todo: {},
|
||||
permission: {},
|
||||
question: {},
|
||||
mcp: {},
|
||||
lsp: [],
|
||||
vcs: cache[0].value,
|
||||
limit: 5,
|
||||
message: {},
|
||||
part: {},
|
||||
})
|
||||
bootstrapInstance(directory)
|
||||
}
|
||||
|
||||
runWithOwner(owner, init)
|
||||
}
|
||||
const childStore = children[directory]
|
||||
if (!childStore) throw new Error("Failed to create store")
|
||||
@@ -346,6 +351,23 @@ function createGlobalSync() {
|
||||
bootstrapInstance(directory)
|
||||
break
|
||||
}
|
||||
case "session.created": {
|
||||
const result = Binary.search(store.session, event.properties.info.id, (s) => s.id)
|
||||
if (result.found) {
|
||||
setStore("session", result.index, reconcile(event.properties.info))
|
||||
break
|
||||
}
|
||||
setStore(
|
||||
"session",
|
||||
produce((draft) => {
|
||||
draft.splice(result.index, 0, event.properties.info)
|
||||
}),
|
||||
)
|
||||
if (!event.properties.info.parentID) {
|
||||
setStore("sessionTotal", store.sessionTotal + 1)
|
||||
}
|
||||
break
|
||||
}
|
||||
case "session.updated": {
|
||||
const result = Binary.search(store.session, event.properties.info.id, (s) => s.id)
|
||||
if (event.properties.info.time.archived) {
|
||||
@@ -357,6 +379,8 @@ function createGlobalSync() {
|
||||
}),
|
||||
)
|
||||
}
|
||||
if (event.properties.info.parentID) break
|
||||
setStore("sessionTotal", (value) => Math.max(0, value - 1))
|
||||
break
|
||||
}
|
||||
if (result.found) {
|
||||
|
||||
@@ -36,6 +36,7 @@ export const { use: useServer, provider: ServerProvider } = createSimpleContext(
|
||||
createStore({
|
||||
list: [] as string[],
|
||||
projects: {} as Record<string, StoredProject[]>,
|
||||
lastProject: {} as Record<string, string>,
|
||||
}),
|
||||
)
|
||||
|
||||
@@ -197,6 +198,16 @@ export const { use: useServer, provider: ServerProvider } = createSimpleContext(
|
||||
result.splice(toIndex, 0, item)
|
||||
setStore("projects", key, result)
|
||||
},
|
||||
last() {
|
||||
const key = origin()
|
||||
if (!key) return
|
||||
return store.lastProject[key]
|
||||
},
|
||||
touch(directory: string) {
|
||||
const key = origin()
|
||||
if (!key) return
|
||||
setStore("lastProject", key, directory)
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
@@ -9,31 +9,12 @@ export type LocalPTY = {
|
||||
id: string
|
||||
title: string
|
||||
titleNumber: number
|
||||
tabId: string
|
||||
rows?: number
|
||||
cols?: number
|
||||
buffer?: string
|
||||
scrollY?: number
|
||||
}
|
||||
|
||||
export type SplitDirection = "horizontal" | "vertical"
|
||||
|
||||
export type Panel = {
|
||||
id: string
|
||||
parentId?: string
|
||||
ptyId?: string
|
||||
direction?: SplitDirection
|
||||
children?: [string, string]
|
||||
sizes?: [number, number]
|
||||
}
|
||||
|
||||
export type TabPane = {
|
||||
id: string
|
||||
root: string
|
||||
panels: Record<string, Panel>
|
||||
focused?: string
|
||||
}
|
||||
|
||||
const WORKSPACE_KEY = "__workspace__"
|
||||
const MAX_TERMINAL_SESSIONS = 20
|
||||
|
||||
@@ -44,10 +25,6 @@ type TerminalCacheEntry = {
|
||||
dispose: VoidFunction
|
||||
}
|
||||
|
||||
function generateId() {
|
||||
return Math.random().toString(36).slice(2, 10)
|
||||
}
|
||||
|
||||
function createTerminalSession(sdk: ReturnType<typeof useSDK>, dir: string, id: string | undefined) {
|
||||
const legacy = `${dir}/terminal${id ? "/" + id : ""}.v1`
|
||||
|
||||
@@ -56,102 +33,47 @@ function createTerminalSession(sdk: ReturnType<typeof useSDK>, dir: string, id:
|
||||
createStore<{
|
||||
active?: string
|
||||
all: LocalPTY[]
|
||||
panes: Record<string, TabPane>
|
||||
}>({
|
||||
all: [],
|
||||
panes: {},
|
||||
}),
|
||||
)
|
||||
|
||||
const getNextTitleNumber = () => {
|
||||
const existing = new Set(store.all.filter((p) => p.tabId === p.id).map((pty) => pty.titleNumber))
|
||||
let next = 1
|
||||
while (existing.has(next)) next++
|
||||
return next
|
||||
}
|
||||
|
||||
const createPty = async (tabId?: string): Promise<LocalPTY | undefined> => {
|
||||
const tab = tabId ? store.all.find((p) => p.id === tabId) : undefined
|
||||
const num = tab?.titleNumber ?? getNextTitleNumber()
|
||||
const title = tab?.title ?? `Terminal ${num}`
|
||||
const pty = await sdk.client.pty.create({ title }).catch((e) => {
|
||||
console.error("Failed to create terminal", e)
|
||||
return undefined
|
||||
})
|
||||
if (!pty?.data?.id) return undefined
|
||||
return {
|
||||
id: pty.data.id,
|
||||
title,
|
||||
titleNumber: num,
|
||||
tabId: tabId ?? pty.data.id,
|
||||
}
|
||||
}
|
||||
|
||||
const getAllPtyIds = (pane: TabPane, panelId: string): string[] => {
|
||||
const panel = pane.panels[panelId]
|
||||
if (!panel) return []
|
||||
if (panel.ptyId) return [panel.ptyId]
|
||||
if (panel.children && panel.children.length === 2) {
|
||||
return [...getAllPtyIds(pane, panel.children[0]), ...getAllPtyIds(pane, panel.children[1])]
|
||||
}
|
||||
return []
|
||||
}
|
||||
|
||||
const getFirstLeaf = (pane: TabPane, panelId: string): string | undefined => {
|
||||
const panel = pane.panels[panelId]
|
||||
if (!panel) return undefined
|
||||
if (panel.ptyId) return panelId
|
||||
if (panel.children?.[0]) return getFirstLeaf(pane, panel.children[0])
|
||||
return undefined
|
||||
}
|
||||
|
||||
const migrate = (terminals: LocalPTY[]) =>
|
||||
terminals.map((p) => ((p as { tabId?: string }).tabId ? p : { ...p, tabId: p.id }))
|
||||
|
||||
const tabCache = new Map<string, LocalPTY>()
|
||||
const tabs = createMemo(() => {
|
||||
const migrated = migrate(store.all)
|
||||
const seen = new Set<string>()
|
||||
const result: LocalPTY[] = []
|
||||
for (const p of migrated) {
|
||||
if (!seen.has(p.tabId)) {
|
||||
seen.add(p.tabId)
|
||||
const cached = tabCache.get(p.tabId)
|
||||
if (cached) {
|
||||
cached.title = p.title
|
||||
cached.titleNumber = p.titleNumber
|
||||
result.push(cached)
|
||||
} else {
|
||||
const tab = { ...p, id: p.tabId }
|
||||
tabCache.set(p.tabId, tab)
|
||||
result.push(tab)
|
||||
}
|
||||
}
|
||||
}
|
||||
for (const key of tabCache.keys()) {
|
||||
if (!seen.has(key)) tabCache.delete(key)
|
||||
}
|
||||
return result
|
||||
})
|
||||
const all = createMemo(() => migrate(store.all))
|
||||
|
||||
return {
|
||||
ready,
|
||||
tabs,
|
||||
all,
|
||||
active: () => store.active,
|
||||
panes: () => store.panes,
|
||||
pane: (tabId: string) => store.panes[tabId],
|
||||
panel: (tabId: string, panelId: string) => store.panes[tabId]?.panels[panelId],
|
||||
focused: (tabId: string) => store.panes[tabId]?.focused,
|
||||
all: createMemo(() => Object.values(store.all)),
|
||||
active: createMemo(() => store.active),
|
||||
new() {
|
||||
const existingTitleNumbers = new Set(
|
||||
store.all.map((pty) => {
|
||||
const match = pty.titleNumber
|
||||
return match
|
||||
}),
|
||||
)
|
||||
|
||||
async new() {
|
||||
const pty = await createPty()
|
||||
if (!pty) return
|
||||
setStore("all", [...store.all, pty])
|
||||
setStore("active", pty.tabId)
|
||||
let nextNumber = 1
|
||||
while (existingTitleNumbers.has(nextNumber)) {
|
||||
nextNumber++
|
||||
}
|
||||
|
||||
sdk.client.pty
|
||||
.create({ title: `Terminal ${nextNumber}` })
|
||||
.then((pty) => {
|
||||
const id = pty.data?.id
|
||||
if (!id) return
|
||||
setStore("all", [
|
||||
...store.all,
|
||||
{
|
||||
id,
|
||||
title: pty.data?.title ?? "Terminal",
|
||||
titleNumber: nextNumber,
|
||||
},
|
||||
])
|
||||
setStore("active", id)
|
||||
})
|
||||
.catch((e) => {
|
||||
console.error("Failed to create terminal", e)
|
||||
})
|
||||
},
|
||||
|
||||
update(pty: Partial<LocalPTY> & { id: string }) {
|
||||
setStore("all", (x) => x.map((x) => (x.id === pty.id ? { ...x, ...pty } : x)))
|
||||
sdk.client.pty
|
||||
@@ -164,82 +86,46 @@ function createTerminalSession(sdk: ReturnType<typeof useSDK>, dir: string, id:
|
||||
console.error("Failed to update terminal", e)
|
||||
})
|
||||
},
|
||||
|
||||
async clone(id: string) {
|
||||
const index = store.all.findIndex((x) => x.id === id)
|
||||
const pty = store.all[index]
|
||||
if (!pty) return
|
||||
const clone = await sdk.client.pty.create({ title: pty.title }).catch((e) => {
|
||||
console.error("Failed to clone terminal", e)
|
||||
return undefined
|
||||
})
|
||||
const clone = await sdk.client.pty
|
||||
.create({
|
||||
title: pty.title,
|
||||
})
|
||||
.catch((e) => {
|
||||
console.error("Failed to clone terminal", e)
|
||||
return undefined
|
||||
})
|
||||
if (!clone?.data) return
|
||||
setStore("all", index, { ...pty, ...clone.data })
|
||||
if (store.active === pty.tabId) {
|
||||
setStore("active", pty.tabId)
|
||||
setStore("all", index, {
|
||||
...pty,
|
||||
...clone.data,
|
||||
})
|
||||
if (store.active === pty.id) {
|
||||
setStore("active", clone.data.id)
|
||||
}
|
||||
},
|
||||
|
||||
open(id: string) {
|
||||
setStore("active", id)
|
||||
},
|
||||
|
||||
async close(id: string) {
|
||||
const pty = store.all.find((x) => x.id === id)
|
||||
if (!pty) return
|
||||
|
||||
const pane = store.panes[pty.tabId]
|
||||
if (pane) {
|
||||
const panelId = Object.keys(pane.panels).find((key) => pane.panels[key].ptyId === id)
|
||||
if (panelId) {
|
||||
await this.closeSplit(pty.tabId, panelId)
|
||||
return
|
||||
batch(() => {
|
||||
setStore(
|
||||
"all",
|
||||
store.all.filter((x) => x.id !== id),
|
||||
)
|
||||
if (store.active === id) {
|
||||
const index = store.all.findIndex((f) => f.id === id)
|
||||
const previous = store.all[Math.max(0, index - 1)]
|
||||
setStore("active", previous?.id)
|
||||
}
|
||||
}
|
||||
|
||||
if (store.active === pty.tabId) {
|
||||
const remaining = store.all.filter((p) => p.tabId === p.id && p.id !== id)
|
||||
setStore("active", remaining[0]?.tabId)
|
||||
}
|
||||
|
||||
setStore(
|
||||
"all",
|
||||
store.all.filter((x) => x.id !== id),
|
||||
)
|
||||
|
||||
})
|
||||
await sdk.client.pty.remove({ ptyID: id }).catch((e) => {
|
||||
console.error("Failed to close terminal", e)
|
||||
})
|
||||
},
|
||||
|
||||
async closeTab(tabId: string) {
|
||||
const pane = store.panes[tabId]
|
||||
const terminalsInTab = store.all.filter((p) => p.tabId === tabId)
|
||||
const ptyIds = pane ? getAllPtyIds(pane, pane.root) : terminalsInTab.map((p) => p.id)
|
||||
|
||||
const remainingTabs = store.all.filter((p) => p.tabId !== tabId)
|
||||
const uniqueTabIds = [...new Set(remainingTabs.map((p) => p.tabId))]
|
||||
|
||||
setStore(
|
||||
"all",
|
||||
store.all.filter((x) => !ptyIds.includes(x.id)),
|
||||
)
|
||||
setStore(
|
||||
"panes",
|
||||
produce((panes) => {
|
||||
delete panes[tabId]
|
||||
}),
|
||||
)
|
||||
if (store.active === tabId) {
|
||||
setStore("active", uniqueTabIds[0])
|
||||
}
|
||||
for (const ptyId of ptyIds) {
|
||||
await sdk.client.pty.remove({ ptyID: ptyId }).catch((e) => {
|
||||
console.error("Failed to close terminal", e)
|
||||
})
|
||||
}
|
||||
},
|
||||
|
||||
move(id: string, to: number) {
|
||||
const index = store.all.findIndex((f) => f.id === id)
|
||||
if (index === -1) return
|
||||
@@ -250,159 +136,6 @@ function createTerminalSession(sdk: ReturnType<typeof useSDK>, dir: string, id:
|
||||
}),
|
||||
)
|
||||
},
|
||||
|
||||
async split(tabId: string, direction: SplitDirection) {
|
||||
const pane = store.panes[tabId]
|
||||
const newPty = await createPty(tabId)
|
||||
if (!newPty) return
|
||||
|
||||
setStore("all", [...store.all, newPty])
|
||||
|
||||
if (!pane) {
|
||||
const rootId = generateId()
|
||||
const leftId = generateId()
|
||||
const rightId = generateId()
|
||||
|
||||
setStore("panes", tabId, {
|
||||
id: tabId,
|
||||
root: rootId,
|
||||
panels: {
|
||||
[rootId]: {
|
||||
id: rootId,
|
||||
direction,
|
||||
children: [leftId, rightId],
|
||||
sizes: [50, 50],
|
||||
},
|
||||
[leftId]: {
|
||||
id: leftId,
|
||||
parentId: rootId,
|
||||
ptyId: tabId,
|
||||
},
|
||||
[rightId]: {
|
||||
id: rightId,
|
||||
parentId: rootId,
|
||||
ptyId: newPty.id,
|
||||
},
|
||||
},
|
||||
focused: rightId,
|
||||
})
|
||||
} else {
|
||||
const focusedPanelId = pane.focused
|
||||
if (!focusedPanelId) return
|
||||
|
||||
const focusedPanel = pane.panels[focusedPanelId]
|
||||
if (!focusedPanel?.ptyId) return
|
||||
|
||||
const oldPtyId = focusedPanel.ptyId
|
||||
const newSplitId = generateId()
|
||||
const newTerminalId = generateId()
|
||||
|
||||
setStore("panes", tabId, "panels", newSplitId, {
|
||||
id: newSplitId,
|
||||
parentId: focusedPanelId,
|
||||
ptyId: oldPtyId,
|
||||
})
|
||||
setStore("panes", tabId, "panels", newTerminalId, {
|
||||
id: newTerminalId,
|
||||
parentId: focusedPanelId,
|
||||
ptyId: newPty.id,
|
||||
})
|
||||
setStore("panes", tabId, "panels", focusedPanelId, "ptyId", undefined)
|
||||
setStore("panes", tabId, "panels", focusedPanelId, "direction", direction)
|
||||
setStore("panes", tabId, "panels", focusedPanelId, "children", [newSplitId, newTerminalId])
|
||||
setStore("panes", tabId, "panels", focusedPanelId, "sizes", [50, 50])
|
||||
setStore("panes", tabId, "focused", newTerminalId)
|
||||
}
|
||||
},
|
||||
|
||||
focus(tabId: string, panelId: string) {
|
||||
if (store.panes[tabId]) {
|
||||
setStore("panes", tabId, "focused", panelId)
|
||||
}
|
||||
},
|
||||
|
||||
async closeSplit(tabId: string, panelId: string) {
|
||||
const pane = store.panes[tabId]
|
||||
if (!pane) return
|
||||
|
||||
const panel = pane.panels[panelId]
|
||||
if (!panel) return
|
||||
|
||||
const ptyId = panel.ptyId
|
||||
if (!ptyId) return
|
||||
|
||||
if (!panel.parentId) {
|
||||
await this.closeTab(tabId)
|
||||
return
|
||||
}
|
||||
|
||||
const parentPanel = pane.panels[panel.parentId]
|
||||
if (!parentPanel?.children || parentPanel.children.length !== 2) return
|
||||
|
||||
const siblingId = parentPanel.children[0] === panelId ? parentPanel.children[1] : parentPanel.children[0]
|
||||
const sibling = pane.panels[siblingId]
|
||||
if (!sibling) return
|
||||
|
||||
const newFocused = sibling.ptyId ? panel.parentId! : (getFirstLeaf(pane, sibling.children![0]) ?? panel.parentId!)
|
||||
|
||||
batch(() => {
|
||||
setStore(
|
||||
"panes",
|
||||
tabId,
|
||||
"panels",
|
||||
produce((panels) => {
|
||||
const parent = panels[panel.parentId!]
|
||||
if (!parent) return
|
||||
|
||||
if (sibling.ptyId) {
|
||||
parent.ptyId = sibling.ptyId
|
||||
parent.direction = undefined
|
||||
parent.children = undefined
|
||||
parent.sizes = undefined
|
||||
} else if (sibling.children && sibling.children.length === 2) {
|
||||
parent.ptyId = undefined
|
||||
parent.direction = sibling.direction
|
||||
parent.children = sibling.children
|
||||
parent.sizes = sibling.sizes
|
||||
panels[sibling.children[0]].parentId = panel.parentId!
|
||||
panels[sibling.children[1]].parentId = panel.parentId!
|
||||
}
|
||||
|
||||
delete panels[panelId]
|
||||
delete panels[siblingId]
|
||||
}),
|
||||
)
|
||||
|
||||
setStore("panes", tabId, "focused", newFocused)
|
||||
|
||||
setStore(
|
||||
"all",
|
||||
store.all.filter((x) => x.id !== ptyId),
|
||||
)
|
||||
})
|
||||
|
||||
const remainingPanels = Object.values(store.panes[tabId]?.panels ?? {})
|
||||
const shouldCleanupPane = remainingPanels.length === 1 && remainingPanels[0]?.ptyId
|
||||
|
||||
if (shouldCleanupPane) {
|
||||
setStore(
|
||||
"panes",
|
||||
produce((panes) => {
|
||||
delete panes[tabId]
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
await sdk.client.pty.remove({ ptyID: ptyId }).catch((e) => {
|
||||
console.error("Failed to close terminal", e)
|
||||
})
|
||||
},
|
||||
|
||||
resizeSplit(tabId: string, panelId: string, sizes: [number, number]) {
|
||||
if (store.panes[tabId]?.panels[panelId]) {
|
||||
setStore("panes", tabId, "panels", panelId, "sizes", sizes)
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -456,25 +189,14 @@ export const { use: useTerminal, provider: TerminalProvider } = createSimpleCont
|
||||
|
||||
return {
|
||||
ready: () => session().ready(),
|
||||
tabs: () => session().tabs(),
|
||||
all: () => session().all(),
|
||||
active: () => session().active(),
|
||||
panes: () => session().panes(),
|
||||
pane: (tabId: string) => session().pane(tabId),
|
||||
panel: (tabId: string, panelId: string) => session().panel(tabId, panelId),
|
||||
focused: (tabId: string) => session().focused(tabId),
|
||||
new: () => session().new(),
|
||||
update: (pty: Partial<LocalPTY> & { id: string }) => session().update(pty),
|
||||
clone: (id: string) => session().clone(id),
|
||||
open: (id: string) => session().open(id),
|
||||
close: (id: string) => session().close(id),
|
||||
closeTab: (tabId: string) => session().closeTab(tabId),
|
||||
move: (id: string, to: number) => session().move(id, to),
|
||||
split: (tabId: string, direction: SplitDirection) => session().split(tabId, direction),
|
||||
focus: (tabId: string, panelId: string) => session().focus(tabId, panelId),
|
||||
closeSplit: (tabId: string, panelId: string) => session().closeSplit(tabId, panelId),
|
||||
resizeSplit: (tabId: string, panelId: string, sizes: [number, number]) =>
|
||||
session().resizeSplit(tabId, panelId, sizes),
|
||||
}
|
||||
},
|
||||
})
|
||||
|
||||
@@ -9,16 +9,3 @@
|
||||
*[data-tauri-drag-region] {
|
||||
app-region: drag;
|
||||
}
|
||||
|
||||
/* Terminal split resize handles */
|
||||
[data-terminal-split-container] [data-component="resize-handle"] {
|
||||
inset: unset;
|
||||
|
||||
&[data-direction="horizontal"] {
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
&[data-direction="vertical"] {
|
||||
width: 100%;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import { useGlobalSync } from "@/context/global-sync"
|
||||
import { createMemo, For, Match, Show, Switch } from "solid-js"
|
||||
import { Button } from "@opencode-ai/ui/button"
|
||||
import { Logo } from "@opencode-ai/ui/logo"
|
||||
@@ -12,6 +11,7 @@ import { useDialog } from "@opencode-ai/ui/context/dialog"
|
||||
import { DialogSelectDirectory } from "@/components/dialog-select-directory"
|
||||
import { DialogSelectServer } from "@/components/dialog-select-server"
|
||||
import { useServer } from "@/context/server"
|
||||
import { useGlobalSync } from "@/context/global-sync"
|
||||
|
||||
export default function Home() {
|
||||
const sync = useGlobalSync()
|
||||
@@ -24,6 +24,7 @@ export default function Home() {
|
||||
|
||||
function openProject(directory: string) {
|
||||
layout.projects.open(directory)
|
||||
server.projects.touch(directory)
|
||||
navigate(`/${base64Encode(directory)}`)
|
||||
}
|
||||
|
||||
|
||||
@@ -5,12 +5,14 @@ import {
|
||||
createSignal,
|
||||
For,
|
||||
Match,
|
||||
on,
|
||||
onCleanup,
|
||||
onMount,
|
||||
ParentProps,
|
||||
Show,
|
||||
Switch,
|
||||
untrack,
|
||||
type Accessor,
|
||||
type JSX,
|
||||
} from "solid-js"
|
||||
import { A, useNavigate, useParams } from "@solidjs/router"
|
||||
@@ -23,6 +25,7 @@ import { ResizeHandle } from "@opencode-ai/ui/resize-handle"
|
||||
import { Button } from "@opencode-ai/ui/button"
|
||||
import { Icon } from "@opencode-ai/ui/icon"
|
||||
import { IconButton } from "@opencode-ai/ui/icon-button"
|
||||
import { InlineInput } from "@opencode-ai/ui/inline-input"
|
||||
import { Tooltip, TooltipKeybind } from "@opencode-ai/ui/tooltip"
|
||||
import { HoverCard } from "@opencode-ai/ui/hover-card"
|
||||
import { DropdownMenu } from "@opencode-ai/ui/dropdown-menu"
|
||||
@@ -70,6 +73,7 @@ export default function Layout(props: ParentProps) {
|
||||
activeProject: undefined as string | undefined,
|
||||
activeWorkspace: undefined as string | undefined,
|
||||
workspaceOrder: {} as Record<string, string[]>,
|
||||
workspaceName: {} as Record<string, string>,
|
||||
workspaceExpanded: {} as Record<string, boolean>,
|
||||
}),
|
||||
)
|
||||
@@ -84,6 +88,7 @@ export default function Layout(props: ParentProps) {
|
||||
onCleanup(() => xlQuery.removeEventListener("change", handleViewportChange))
|
||||
|
||||
const params = useParams()
|
||||
const [autoselect, setAutoselect] = createSignal(!params.dir)
|
||||
const globalSDK = useGlobalSDK()
|
||||
const globalSync = useGlobalSync()
|
||||
const layout = useLayout()
|
||||
@@ -97,6 +102,7 @@ export default function Layout(props: ParentProps) {
|
||||
const dialog = useDialog()
|
||||
const command = useCommand()
|
||||
const theme = useTheme()
|
||||
const initialDir = params.dir
|
||||
const availableThemeEntries = createMemo(() => Object.entries(theme.themes()))
|
||||
const colorSchemeOrder: ColorScheme[] = ["system", "light", "dark"]
|
||||
const colorSchemeLabel: Record<ColorScheme, string> = {
|
||||
@@ -105,6 +111,104 @@ export default function Layout(props: ParentProps) {
|
||||
dark: "Dark",
|
||||
}
|
||||
|
||||
const [editor, setEditor] = createStore({
|
||||
active: "" as string,
|
||||
value: "",
|
||||
})
|
||||
const editorRef = { current: undefined as HTMLInputElement | undefined }
|
||||
|
||||
const editorOpen = (id: string) => editor.active === id
|
||||
const editorValue = () => editor.value
|
||||
|
||||
const openEditor = (id: string, value: string) => {
|
||||
if (!id) return
|
||||
setEditor({ active: id, value })
|
||||
queueMicrotask(() => editorRef.current?.focus())
|
||||
}
|
||||
|
||||
const closeEditor = () => setEditor({ active: "", value: "" })
|
||||
|
||||
const saveEditor = (callback: (next: string) => void) => {
|
||||
const next = editor.value.trim()
|
||||
if (!next) {
|
||||
closeEditor()
|
||||
return
|
||||
}
|
||||
closeEditor()
|
||||
callback(next)
|
||||
}
|
||||
|
||||
const editorKeyDown = (event: KeyboardEvent, callback: (next: string) => void) => {
|
||||
if (event.key === "Enter") {
|
||||
event.preventDefault()
|
||||
saveEditor(callback)
|
||||
return
|
||||
}
|
||||
if (event.key === "Escape") {
|
||||
event.preventDefault()
|
||||
closeEditor()
|
||||
}
|
||||
}
|
||||
|
||||
const InlineEditor = (props: {
|
||||
id: string
|
||||
value: Accessor<string>
|
||||
onSave: (next: string) => void
|
||||
class?: string
|
||||
displayClass?: string
|
||||
editing?: boolean
|
||||
stopPropagation?: boolean
|
||||
openOnDblClick?: boolean
|
||||
}) => {
|
||||
const isEditing = () => props.editing ?? editorOpen(props.id)
|
||||
const stopEvents = () => props.stopPropagation ?? false
|
||||
const allowDblClick = () => props.openOnDblClick ?? true
|
||||
const stopPropagation = (event: Event) => {
|
||||
if (!stopEvents()) return
|
||||
event.stopPropagation()
|
||||
}
|
||||
const handleDblClick = (event: MouseEvent) => {
|
||||
if (!allowDblClick()) return
|
||||
stopPropagation(event)
|
||||
openEditor(props.id, props.value())
|
||||
}
|
||||
|
||||
return (
|
||||
<Show
|
||||
when={isEditing()}
|
||||
fallback={
|
||||
<span
|
||||
class={props.displayClass ?? props.class}
|
||||
onDblClick={handleDblClick}
|
||||
onPointerDown={stopPropagation}
|
||||
onMouseDown={stopPropagation}
|
||||
onClick={stopPropagation}
|
||||
onTouchStart={stopPropagation}
|
||||
>
|
||||
{props.value()}
|
||||
</span>
|
||||
}
|
||||
>
|
||||
<InlineInput
|
||||
ref={(el) => {
|
||||
editorRef.current = el
|
||||
}}
|
||||
value={editorValue()}
|
||||
class={props.class}
|
||||
onInput={(event) => setEditor("value", event.currentTarget.value)}
|
||||
onKeyDown={(event) => editorKeyDown(event, props.onSave)}
|
||||
onBlur={() => closeEditor()}
|
||||
onPointerDown={stopPropagation}
|
||||
onClick={stopPropagation}
|
||||
onDblClick={stopPropagation}
|
||||
onMouseDown={stopPropagation}
|
||||
onMouseUp={stopPropagation}
|
||||
onTouchStart={stopPropagation}
|
||||
/>
|
||||
</Show>
|
||||
)
|
||||
}
|
||||
|
||||
function cycleTheme(direction = 1) {
|
||||
const ids = availableThemeEntries().map(([id]) => id)
|
||||
if (ids.length === 0) return
|
||||
@@ -275,12 +379,21 @@ export default function Layout(props: ParentProps) {
|
||||
return bUpdated - aUpdated
|
||||
}
|
||||
|
||||
function scrollToSession(sessionId: string) {
|
||||
const [scrollSessionKey, setScrollSessionKey] = createSignal<string | undefined>(undefined)
|
||||
|
||||
function scrollToSession(sessionId: string, sessionKey: string) {
|
||||
if (!scrollContainerRef) return
|
||||
if (scrollSessionKey() === sessionKey) return
|
||||
const element = scrollContainerRef.querySelector(`[data-session-id="${sessionId}"]`)
|
||||
if (element) {
|
||||
element.scrollIntoView({ block: "nearest", behavior: "smooth" })
|
||||
if (!element) return
|
||||
const containerRect = scrollContainerRef.getBoundingClientRect()
|
||||
const elementRect = element.getBoundingClientRect()
|
||||
if (elementRect.top >= containerRect.top && elementRect.bottom <= containerRect.bottom) {
|
||||
setScrollSessionKey(sessionKey)
|
||||
return
|
||||
}
|
||||
setScrollSessionKey(sessionKey)
|
||||
element.scrollIntoView({ block: "nearest", behavior: "smooth" })
|
||||
}
|
||||
|
||||
const currentProject = createMemo(() => {
|
||||
@@ -289,6 +402,49 @@ export default function Layout(props: ParentProps) {
|
||||
return layout.projects.list().find((p) => p.worktree === directory || p.sandboxes?.includes(directory))
|
||||
})
|
||||
|
||||
createEffect(
|
||||
on(
|
||||
() => ({ ready: pageReady(), project: currentProject() }),
|
||||
(value) => {
|
||||
if (!value.ready) return
|
||||
const project = value.project
|
||||
if (!project) return
|
||||
const last = server.projects.last()
|
||||
if (last === project.worktree) return
|
||||
server.projects.touch(project.worktree)
|
||||
},
|
||||
{ defer: true },
|
||||
),
|
||||
)
|
||||
|
||||
createEffect(
|
||||
on(
|
||||
() => ({ ready: pageReady(), layoutReady: layoutReady(), dir: params.dir, list: layout.projects.list() }),
|
||||
(value) => {
|
||||
if (!value.ready) return
|
||||
if (!value.layoutReady) return
|
||||
if (!autoselect()) return
|
||||
if (initialDir) return
|
||||
if (value.dir) return
|
||||
if (value.list.length === 0) return
|
||||
|
||||
const last = server.projects.last()
|
||||
const next = value.list.find((project) => project.worktree === last) ?? value.list[0]
|
||||
if (!next) return
|
||||
setAutoselect(false)
|
||||
openProject(next.worktree, false)
|
||||
navigateToProject(next.worktree)
|
||||
},
|
||||
{ defer: true },
|
||||
),
|
||||
)
|
||||
|
||||
const workspaceName = (directory: string) => store.workspaceName[directory]
|
||||
const workspaceLabel = (directory: string, branch?: string) =>
|
||||
workspaceName(directory) ?? branch ?? getFilename(directory)
|
||||
|
||||
const isWorkspaceEditing = () => editor.active.startsWith("workspace:")
|
||||
|
||||
const workspaceSetting = createMemo(() => {
|
||||
const project = currentProject()
|
||||
if (!project) return false
|
||||
@@ -325,9 +481,12 @@ export default function Layout(props: ParentProps) {
|
||||
createEffect(() => {
|
||||
if (!pageReady()) return
|
||||
if (!layoutReady()) return
|
||||
const projects = layout.projects.list()
|
||||
for (const [directory, expanded] of Object.entries(store.workspaceExpanded)) {
|
||||
if (layout.sidebar.workspaces(directory)()) continue
|
||||
if (!expanded) continue
|
||||
const project = projects.find((item) => item.worktree === directory || item.sandboxes?.includes(directory))
|
||||
if (!project) continue
|
||||
if (layout.sidebar.workspaces(project.worktree)()) continue
|
||||
setStore("workspaceExpanded", directory, false)
|
||||
}
|
||||
})
|
||||
@@ -342,7 +501,7 @@ export default function Layout(props: ParentProps) {
|
||||
const [dirStore] = globalSync.child(dir)
|
||||
const dirSessions = dirStore.session
|
||||
.filter((session) => session.directory === dirStore.path.directory)
|
||||
.filter((session) => !session.parentID)
|
||||
.filter((session) => !session.parentID && !session.time?.archived)
|
||||
.toSorted(sortSessions)
|
||||
result.push(...dirSessions)
|
||||
}
|
||||
@@ -351,7 +510,7 @@ export default function Layout(props: ParentProps) {
|
||||
const [projectStore] = globalSync.child(project.worktree)
|
||||
return projectStore.session
|
||||
.filter((session) => session.directory === projectStore.path.directory)
|
||||
.filter((session) => !session.parentID)
|
||||
.filter((session) => !session.parentID && !session.time?.archived)
|
||||
.toSorted(sortSessions)
|
||||
})
|
||||
|
||||
@@ -533,7 +692,7 @@ export default function Layout(props: ParentProps) {
|
||||
})
|
||||
}
|
||||
navigateToSession(session)
|
||||
queueMicrotask(() => scrollToSession(session.id))
|
||||
queueMicrotask(() => scrollToSession(session.id, `${session.directory}:${session.id}`))
|
||||
}
|
||||
|
||||
async function archiveSession(session: Session) {
|
||||
@@ -671,6 +830,7 @@ export default function Layout(props: ParentProps) {
|
||||
|
||||
function navigateToProject(directory: string | undefined) {
|
||||
if (!directory) return
|
||||
server.projects.touch(directory)
|
||||
const lastSession = store.lastSession[directory]
|
||||
navigate(`/${base64Encode(directory)}${lastSession ? `/session/${lastSession}` : ""}`)
|
||||
layout.mobileSidebar.hide()
|
||||
@@ -687,6 +847,31 @@ export default function Layout(props: ParentProps) {
|
||||
if (navigate) navigateToProject(directory)
|
||||
}
|
||||
|
||||
const displayName = (project: LocalProject) => project.name || getFilename(project.worktree)
|
||||
|
||||
async function renameProject(project: LocalProject, next: string) {
|
||||
if (!project.id) return
|
||||
const current = displayName(project)
|
||||
if (next === current) return
|
||||
const name = next === getFilename(project.worktree) ? "" : next
|
||||
await globalSDK.client.project.update({ projectID: project.id, name })
|
||||
}
|
||||
|
||||
async function renameSession(session: Session, next: string) {
|
||||
if (next === session.title) return
|
||||
await globalSDK.client.session.update({
|
||||
directory: session.directory,
|
||||
sessionID: session.id,
|
||||
title: next,
|
||||
})
|
||||
}
|
||||
|
||||
const renameWorkspace = (directory: string, next: string) => {
|
||||
const current = workspaceName(directory) ?? getFilename(directory)
|
||||
if (current === next) return
|
||||
setStore("workspaceName", directory, next)
|
||||
}
|
||||
|
||||
function closeProject(directory: string) {
|
||||
const index = layout.projects.list().findIndex((x) => x.worktree === directory)
|
||||
const next = layout.projects.list()[index + 1]
|
||||
@@ -721,16 +906,26 @@ export default function Layout(props: ParentProps) {
|
||||
}
|
||||
}
|
||||
|
||||
createEffect(() => {
|
||||
if (!pageReady()) return
|
||||
if (!params.dir || !params.id) return
|
||||
const directory = base64Decode(params.dir)
|
||||
const id = params.id
|
||||
setStore("lastSession", directory, id)
|
||||
notification.session.markViewed(id)
|
||||
untrack(() => setStore("workspaceExpanded", directory, (value) => value ?? true))
|
||||
requestAnimationFrame(() => scrollToSession(id))
|
||||
})
|
||||
createEffect(
|
||||
on(
|
||||
() => ({ ready: pageReady(), dir: params.dir, id: params.id }),
|
||||
(value) => {
|
||||
if (!value.ready) return
|
||||
const dir = value.dir
|
||||
const id = value.id
|
||||
if (!dir || !id) return
|
||||
const directory = base64Decode(dir)
|
||||
setStore("lastSession", directory, id)
|
||||
notification.session.markViewed(id)
|
||||
const expanded = untrack(() => store.workspaceExpanded[directory])
|
||||
if (expanded === false) {
|
||||
setStore("workspaceExpanded", directory, true)
|
||||
}
|
||||
requestAnimationFrame(() => scrollToSession(id, `${directory}:${id}`))
|
||||
},
|
||||
{ defer: true },
|
||||
),
|
||||
)
|
||||
|
||||
createEffect(() => {
|
||||
const project = currentProject()
|
||||
@@ -747,15 +942,6 @@ export default function Layout(props: ParentProps) {
|
||||
globalSync.project.loadSessions(project.worktree)
|
||||
})
|
||||
|
||||
createEffect(() => {
|
||||
if (isLargeViewport()) {
|
||||
const sidebarWidth = layout.sidebar.opened() ? layout.sidebar.width() : 64
|
||||
document.documentElement.style.setProperty("--dialog-left-margin", `${sidebarWidth}px`)
|
||||
return
|
||||
}
|
||||
document.documentElement.style.setProperty("--dialog-left-margin", "0px")
|
||||
})
|
||||
|
||||
function getDraggableId(event: unknown): string | undefined {
|
||||
if (typeof event !== "object" || event === null) return undefined
|
||||
if (!("draggable" in event)) return undefined
|
||||
@@ -930,9 +1116,14 @@ export default function Layout(props: ParentProps) {
|
||||
</Match>
|
||||
</Switch>
|
||||
</div>
|
||||
<span class="text-14-regular text-text-strong grow-1 min-w-0 overflow-hidden text-ellipsis truncate">
|
||||
{props.session.title}
|
||||
</span>
|
||||
<InlineEditor
|
||||
id={`session:${props.session.id}`}
|
||||
value={() => props.session.title}
|
||||
onSave={(next) => renameSession(props.session, next)}
|
||||
class="text-14-regular text-text-strong grow-1 min-w-0 overflow-hidden text-ellipsis truncate"
|
||||
displayClass="text-14-regular text-text-strong grow-1 min-w-0 overflow-hidden text-ellipsis truncate"
|
||||
stopPropagation
|
||||
/>
|
||||
<Show when={props.session.summary}>
|
||||
{(summary) => (
|
||||
<div class="group-hover/session:hidden group-active/session:hidden group-focus-within/session:hidden">
|
||||
@@ -970,116 +1161,6 @@ export default function Layout(props: ParentProps) {
|
||||
)
|
||||
}
|
||||
|
||||
const SortableProject = (props: { project: LocalProject; mobile?: boolean }): JSX.Element => {
|
||||
const sortable = createSortable(props.project.worktree)
|
||||
const selected = createMemo(() => {
|
||||
const current = params.dir ? base64Decode(params.dir) : ""
|
||||
return props.project.worktree === current || props.project.sandboxes?.includes(current)
|
||||
})
|
||||
|
||||
const workspaces = createMemo(() => workspaceIds(props.project).slice(0, 2))
|
||||
const workspaceEnabled = createMemo(() => layout.sidebar.workspaces(props.project.worktree)())
|
||||
const label = (directory: string) => {
|
||||
const [data] = globalSync.child(directory)
|
||||
const kind = directory === props.project.worktree ? "local" : "sandbox"
|
||||
const name = data.vcs?.branch ?? getFilename(directory)
|
||||
return `${kind} : ${name}`
|
||||
}
|
||||
|
||||
const sessions = (directory: string) => {
|
||||
const [data] = globalSync.child(directory)
|
||||
return data.session
|
||||
.filter((session) => session.directory === data.path.directory)
|
||||
.filter((session) => !session.parentID)
|
||||
.toSorted(sortSessions)
|
||||
.slice(0, 2)
|
||||
}
|
||||
|
||||
const projectSessions = () => {
|
||||
const [data] = globalSync.child(props.project.worktree)
|
||||
return data.session
|
||||
.filter((session) => session.directory === data.path.directory)
|
||||
.filter((session) => !session.parentID)
|
||||
.toSorted(sortSessions)
|
||||
.slice(0, 2)
|
||||
}
|
||||
|
||||
const trigger = (
|
||||
<button
|
||||
type="button"
|
||||
classList={{
|
||||
"flex items-center justify-center size-10 p-1 rounded-lg overflow-hidden transition-colors cursor-default": true,
|
||||
"bg-transparent border-2 border-icon-strong-base hover:bg-surface-base-hover": selected(),
|
||||
"bg-transparent border border-transparent hover:bg-surface-base-hover hover:border-border-weak-base":
|
||||
!selected(),
|
||||
}}
|
||||
onClick={() => navigateToProject(props.project.worktree)}
|
||||
>
|
||||
<ProjectIcon project={props.project} notify />
|
||||
</button>
|
||||
)
|
||||
|
||||
return (
|
||||
// @ts-ignore
|
||||
<div use:sortable classList={{ "opacity-30": sortable.isActiveDraggable }}>
|
||||
<HoverCard openDelay={0} closeDelay={0} placement="right-start" gutter={8} trigger={trigger}>
|
||||
<div class="-m-3 flex flex-col w-72">
|
||||
<div class="px-3 py-2 text-12-medium text-text-weak">Recent sessions</div>
|
||||
<div class="px-2 pb-2 flex flex-col gap-2">
|
||||
<Show
|
||||
when={workspaceEnabled()}
|
||||
fallback={
|
||||
<For each={projectSessions()}>
|
||||
{(session) => (
|
||||
<SessionItem
|
||||
session={session}
|
||||
slug={base64Encode(props.project.worktree)}
|
||||
dense
|
||||
mobile={props.mobile}
|
||||
/>
|
||||
)}
|
||||
</For>
|
||||
}
|
||||
>
|
||||
<For each={workspaces()}>
|
||||
{(directory) => (
|
||||
<div class="flex flex-col gap-1">
|
||||
<div class="px-2 py-0.5 flex items-center gap-1 min-w-0">
|
||||
<div class="shrink-0 size-6 flex items-center justify-center">
|
||||
<Icon name="branch" size="small" class="text-icon-base" />
|
||||
</div>
|
||||
<span class="truncate text-14-medium text-text-base">{label(directory)}</span>
|
||||
</div>
|
||||
<For each={sessions(directory)}>
|
||||
{(session) => (
|
||||
<SessionItem session={session} slug={base64Encode(directory)} dense mobile={props.mobile} />
|
||||
)}
|
||||
</For>
|
||||
</div>
|
||||
)}
|
||||
</For>
|
||||
</Show>
|
||||
</div>
|
||||
<Show when={!selected()}>
|
||||
<div class="px-2 py-2 border-t border-border-weak-base">
|
||||
<Button
|
||||
variant="ghost"
|
||||
class="flex w-full text-left justify-start text-text-base px-2 hover:bg-transparent active:bg-transparent"
|
||||
onClick={() => {
|
||||
layout.sidebar.open()
|
||||
navigateToProject(props.project.worktree)
|
||||
}}
|
||||
>
|
||||
View all sessions
|
||||
</Button>
|
||||
</div>
|
||||
</Show>
|
||||
</div>
|
||||
</HoverCard>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
const ProjectDragOverlay = (): JSX.Element => {
|
||||
const project = createMemo(() => layout.projects.list().find((p) => p.worktree === store.activeProject))
|
||||
return (
|
||||
@@ -1102,7 +1183,7 @@ export default function Layout(props: ParentProps) {
|
||||
|
||||
const [workspaceStore] = globalSync.child(directory)
|
||||
const kind = directory === project.worktree ? "local" : "sandbox"
|
||||
const name = workspaceStore.vcs?.branch ?? getFilename(directory)
|
||||
const name = workspaceLabel(directory, workspaceStore.vcs?.branch)
|
||||
return `${kind} : ${name}`
|
||||
})
|
||||
|
||||
@@ -1122,14 +1203,13 @@ export default function Layout(props: ParentProps) {
|
||||
const sessions = createMemo(() =>
|
||||
workspaceStore.session
|
||||
.filter((session) => session.directory === workspaceStore.path.directory)
|
||||
.filter((session) => !session.parentID)
|
||||
.filter((session) => !session.parentID && !session.time?.archived)
|
||||
.toSorted(sortSessions),
|
||||
)
|
||||
const local = createMemo(() => props.directory === props.project.worktree)
|
||||
const title = createMemo(() => {
|
||||
const kind = local() ? "local" : "sandbox"
|
||||
const workspaceValue = createMemo(() => {
|
||||
const name = workspaceStore.vcs?.branch ?? getFilename(props.directory)
|
||||
return `${kind} : ${name}`
|
||||
return workspaceName(props.directory) ?? name
|
||||
})
|
||||
const open = createMemo(() => store.workspaceExpanded[props.directory] ?? true)
|
||||
const loading = createMemo(() => open() && workspaceStore.status !== "complete" && sessions().length === 0)
|
||||
@@ -1140,23 +1220,50 @@ export default function Layout(props: ParentProps) {
|
||||
await globalSync.project.loadSessions(props.directory)
|
||||
}
|
||||
|
||||
const workspaceEditActive = createMemo(() => editorOpen(`workspace:${props.directory}`))
|
||||
|
||||
const openWrapper = (value: boolean) => {
|
||||
setStore("workspaceExpanded", props.directory, value)
|
||||
if (value) return
|
||||
if (editorOpen(`workspace:${props.directory}`)) closeEditor()
|
||||
}
|
||||
|
||||
return (
|
||||
// @ts-ignore
|
||||
<div use:sortable classList={{ "opacity-30": sortable.isActiveDraggable }}>
|
||||
<Collapsible
|
||||
variant="ghost"
|
||||
open={open()}
|
||||
class="shrink-0"
|
||||
onOpenChange={(value) => setStore("workspaceExpanded", props.directory, value)}
|
||||
>
|
||||
<Collapsible variant="ghost" open={open()} class="shrink-0" onOpenChange={openWrapper}>
|
||||
<div class="px-2 py-1">
|
||||
<div class="group/trigger relative">
|
||||
<Collapsible.Trigger class="flex items-center justify-between w-full pl-2 pr-2 py-1.5 rounded-md hover:bg-surface-raised-base-hover transition-all group-hover/trigger:pr-16 group-focus-within/trigger:pr-16">
|
||||
<div class="flex items-center gap-1 min-w-0">
|
||||
<Collapsible.Trigger class="flex items-center justify-between w-full pl-2 pr-16 py-1.5 rounded-md hover:bg-surface-raised-base-hover">
|
||||
<div class="flex items-center gap-1 min-w-0 flex-1">
|
||||
<div class="flex items-center justify-center shrink-0 size-6">
|
||||
<Icon name="branch" size="small" />
|
||||
</div>
|
||||
<span class="truncate text-14-medium text-text-base">{title()}</span>
|
||||
<span class="text-14-medium text-text-base shrink-0">{local() ? "local" : "sandbox"} :</span>
|
||||
<Show
|
||||
when={!local()}
|
||||
fallback={
|
||||
<span class="text-14-medium text-text-base min-w-0 truncate">
|
||||
{workspaceStore.vcs?.branch ?? getFilename(props.directory)}
|
||||
</span>
|
||||
}
|
||||
>
|
||||
<InlineEditor
|
||||
id={`workspace:${props.directory}`}
|
||||
value={workspaceValue}
|
||||
onSave={(next) => {
|
||||
const trimmed = next.trim()
|
||||
if (!trimmed) return
|
||||
renameWorkspace(props.directory, trimmed)
|
||||
setEditor("value", workspaceValue())
|
||||
}}
|
||||
class="text-14-medium text-text-base min-w-0 truncate"
|
||||
displayClass="text-14-medium text-text-base min-w-0 truncate"
|
||||
editing={workspaceEditActive()}
|
||||
stopPropagation={false}
|
||||
openOnDblClick={false}
|
||||
/>
|
||||
</Show>
|
||||
<Icon
|
||||
name={open() ? "chevron-down" : "chevron-right"}
|
||||
size="small"
|
||||
@@ -1222,13 +1329,123 @@ export default function Layout(props: ParentProps) {
|
||||
)
|
||||
}
|
||||
|
||||
const SortableProject = (props: { project: LocalProject; mobile?: boolean }): JSX.Element => {
|
||||
const sortable = createSortable(props.project.worktree)
|
||||
const selected = createMemo(() => {
|
||||
const current = params.dir ? base64Decode(params.dir) : ""
|
||||
return props.project.worktree === current || props.project.sandboxes?.includes(current)
|
||||
})
|
||||
|
||||
const workspaces = createMemo(() => workspaceIds(props.project).slice(0, 2))
|
||||
const workspaceEnabled = createMemo(() => layout.sidebar.workspaces(props.project.worktree)())
|
||||
const label = (directory: string) => {
|
||||
const [data] = globalSync.child(directory)
|
||||
const kind = directory === props.project.worktree ? "local" : "sandbox"
|
||||
const name = workspaceLabel(directory, data.vcs?.branch)
|
||||
return `${kind} : ${name}`
|
||||
}
|
||||
|
||||
const sessions = (directory: string) => {
|
||||
const [data] = globalSync.child(directory)
|
||||
return data.session
|
||||
.filter((session) => session.directory === data.path.directory)
|
||||
.filter((session) => !session.parentID && !session.time?.archived)
|
||||
.toSorted(sortSessions)
|
||||
.slice(0, 2)
|
||||
}
|
||||
|
||||
const projectSessions = () => {
|
||||
const [data] = globalSync.child(props.project.worktree)
|
||||
return data.session
|
||||
.filter((session) => session.directory === data.path.directory)
|
||||
.filter((session) => !session.parentID && !session.time?.archived)
|
||||
.toSorted(sortSessions)
|
||||
.slice(0, 2)
|
||||
}
|
||||
|
||||
const trigger = (
|
||||
<button
|
||||
type="button"
|
||||
classList={{
|
||||
"flex items-center justify-center size-10 p-1 rounded-lg overflow-hidden transition-colors cursor-default": true,
|
||||
"bg-transparent border-2 border-icon-strong-base hover:bg-surface-base-hover": selected(),
|
||||
"bg-transparent border border-transparent hover:bg-surface-base-hover hover:border-border-weak-base":
|
||||
!selected(),
|
||||
}}
|
||||
onClick={() => navigateToProject(props.project.worktree)}
|
||||
>
|
||||
<ProjectIcon project={props.project} notify />
|
||||
</button>
|
||||
)
|
||||
|
||||
return (
|
||||
// @ts-ignore
|
||||
<div use:sortable classList={{ "opacity-30": sortable.isActiveDraggable }}>
|
||||
<HoverCard openDelay={0} closeDelay={0} placement="right-start" gutter={6} trigger={trigger}>
|
||||
<div class="-m-3 flex flex-col w-72">
|
||||
<div class="px-3 py-2 text-12-medium text-text-weak">Recent sessions</div>
|
||||
<div class="px-2 pb-2 flex flex-col gap-2">
|
||||
<Show
|
||||
when={workspaceEnabled()}
|
||||
fallback={
|
||||
<For each={projectSessions()}>
|
||||
{(session) => (
|
||||
<SessionItem
|
||||
session={session}
|
||||
slug={base64Encode(props.project.worktree)}
|
||||
dense
|
||||
mobile={props.mobile}
|
||||
/>
|
||||
)}
|
||||
</For>
|
||||
}
|
||||
>
|
||||
<For each={workspaces()}>
|
||||
{(directory) => (
|
||||
<div class="flex flex-col gap-1">
|
||||
<div class="px-2 py-0.5 flex items-center gap-1 min-w-0">
|
||||
<div class="shrink-0 size-6 flex items-center justify-center">
|
||||
<Icon name="branch" size="small" class="text-icon-base" />
|
||||
</div>
|
||||
<span class="truncate text-14-medium text-text-base">{label(directory)}</span>
|
||||
</div>
|
||||
<For each={sessions(directory)}>
|
||||
{(session) => (
|
||||
<SessionItem session={session} slug={base64Encode(directory)} dense mobile={props.mobile} />
|
||||
)}
|
||||
</For>
|
||||
</div>
|
||||
)}
|
||||
</For>
|
||||
</Show>
|
||||
</div>
|
||||
<Show when={!selected()}>
|
||||
<div class="px-2 py-2 border-t border-border-weak-base">
|
||||
<Button
|
||||
variant="ghost"
|
||||
class="flex w-full text-left justify-start text-text-base px-2 hover:bg-transparent active:bg-transparent"
|
||||
onClick={() => {
|
||||
layout.sidebar.open()
|
||||
navigateToProject(props.project.worktree)
|
||||
}}
|
||||
>
|
||||
View all sessions
|
||||
</Button>
|
||||
</div>
|
||||
</Show>
|
||||
</div>
|
||||
</HoverCard>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
const LocalWorkspace = (props: { project: LocalProject; mobile?: boolean }): JSX.Element => {
|
||||
const [workspaceStore, setWorkspaceStore] = globalSync.child(props.project.worktree)
|
||||
const slug = createMemo(() => base64Encode(props.project.worktree))
|
||||
const sessions = createMemo(() =>
|
||||
workspaceStore.session
|
||||
.filter((session) => session.directory === workspaceStore.path.directory)
|
||||
.filter((session) => !session.parentID)
|
||||
.filter((session) => !session.parentID && !session.time?.archived)
|
||||
.toSorted(sortSessions),
|
||||
)
|
||||
const loading = createMemo(() => workspaceStore.status !== "complete" && sessions().length === 0)
|
||||
@@ -1244,6 +1461,7 @@ export default function Layout(props: ParentProps) {
|
||||
if (!props.mobile) scrollContainerRef = el
|
||||
}}
|
||||
class="size-full flex flex-col py-2 overflow-y-auto no-scrollbar"
|
||||
style={{ "overflow-anchor": "none" }}
|
||||
>
|
||||
<nav class="flex flex-col gap-1 px-2">
|
||||
<Show when={loading()}>
|
||||
@@ -1282,6 +1500,7 @@ export default function Layout(props: ParentProps) {
|
||||
if (!current) return ""
|
||||
return current.name || getFilename(current.worktree)
|
||||
})
|
||||
const projectId = createMemo(() => project()?.id ?? "")
|
||||
const workspaces = createMemo(() => workspaceIds(project()))
|
||||
|
||||
const errorMessage = (err: unknown) => {
|
||||
@@ -1382,13 +1601,22 @@ export default function Layout(props: ParentProps) {
|
||||
<div class="shrink-0 px-2 py-1">
|
||||
<div class="group/project flex items-start justify-between gap-2 p-2 pr-1">
|
||||
<div class="flex flex-col min-w-0">
|
||||
<span class="text-16-medium text-text-strong truncate">{projectName()}</span>
|
||||
<InlineEditor
|
||||
id={`project:${projectId()}`}
|
||||
value={projectName}
|
||||
onSave={(next) => project() && renameProject(project()!, next)}
|
||||
class="text-16-medium text-text-strong truncate"
|
||||
displayClass="text-16-medium text-text-strong truncate"
|
||||
stopPropagation
|
||||
/>
|
||||
|
||||
<Tooltip placement="right" value={project()?.worktree} class="shrink-0">
|
||||
<span class="text-12-regular text-text-base truncate">
|
||||
{project()?.worktree.replace(homedir(), "~")}
|
||||
</span>
|
||||
</Tooltip>
|
||||
</div>
|
||||
|
||||
<DropdownMenu>
|
||||
<DropdownMenu.Trigger
|
||||
as={IconButton}
|
||||
@@ -1445,7 +1673,7 @@ export default function Layout(props: ParentProps) {
|
||||
New workspace
|
||||
</Button>
|
||||
</div>
|
||||
<div class="flex-1 min-h-0">
|
||||
<div class="relative flex-1 min-h-0">
|
||||
<DragDropProvider
|
||||
onDragStart={handleWorkspaceDragStart}
|
||||
onDragEnd={handleWorkspaceDragEnd}
|
||||
@@ -1459,6 +1687,7 @@ export default function Layout(props: ParentProps) {
|
||||
if (!sidebarProps.mobile) scrollContainerRef = el
|
||||
}}
|
||||
class="size-full flex flex-col py-2 gap-4 overflow-y-auto no-scrollbar"
|
||||
style={{ "overflow-anchor": "none" }}
|
||||
>
|
||||
<SortableProvider ids={workspaces()}>
|
||||
<For each={workspaces()}>
|
||||
|
||||
@@ -26,7 +26,6 @@ import { useSync } from "@/context/sync"
|
||||
import { useTerminal, type LocalPTY } from "@/context/terminal"
|
||||
import { useLayout } from "@/context/layout"
|
||||
import { Terminal } from "@/components/terminal"
|
||||
import { TerminalSplit } from "@/components/terminal-split"
|
||||
import { checksum, base64Encode, base64Decode } from "@opencode-ai/util/encode"
|
||||
import { useDialog } from "@opencode-ai/ui/context/dialog"
|
||||
import { DialogSelectFile } from "@/components/dialog-select-file"
|
||||
@@ -171,7 +170,6 @@ export default function Page() {
|
||||
const sessionKey = createMemo(() => `${params.dir}${params.id ? "/" + params.id : ""}`)
|
||||
const tabs = createMemo(() => layout.tabs(sessionKey()))
|
||||
const view = createMemo(() => layout.view(sessionKey()))
|
||||
const activeTerminal = createMemo(() => terminal.active())
|
||||
|
||||
if (import.meta.env.DEV) {
|
||||
createEffect(
|
||||
@@ -382,7 +380,7 @@ export default function Page() {
|
||||
createEffect(() => {
|
||||
if (!view().terminal.opened()) return
|
||||
if (!terminal.ready()) return
|
||||
if (terminal.tabs().length !== 0) return
|
||||
if (terminal.all().length !== 0) return
|
||||
terminal.new()
|
||||
})
|
||||
|
||||
@@ -421,7 +419,6 @@ export default function Page() {
|
||||
{
|
||||
id: "session.new",
|
||||
title: "New session",
|
||||
description: "Create a new session",
|
||||
category: "Session",
|
||||
keybind: "mod+shift+s",
|
||||
slash: "new",
|
||||
@@ -439,7 +436,7 @@ export default function Page() {
|
||||
{
|
||||
id: "terminal.toggle",
|
||||
title: "Toggle terminal",
|
||||
description: "Show or hide the terminal",
|
||||
description: "",
|
||||
category: "View",
|
||||
keybind: "ctrl+`",
|
||||
slash: "terminal",
|
||||
@@ -448,7 +445,7 @@ export default function Page() {
|
||||
{
|
||||
id: "review.toggle",
|
||||
title: "Toggle review",
|
||||
description: "Show or hide the review panel",
|
||||
description: "",
|
||||
category: "View",
|
||||
keybind: "mod+shift+r",
|
||||
onSelect: () => view().reviewPanel.toggle(),
|
||||
@@ -461,30 +458,6 @@ export default function Page() {
|
||||
keybind: "ctrl+shift+`",
|
||||
onSelect: () => terminal.new(),
|
||||
},
|
||||
{
|
||||
id: "terminal.split.vertical",
|
||||
title: "Split terminal right",
|
||||
description: "Split the current terminal vertically",
|
||||
category: "Terminal",
|
||||
keybind: "mod+d",
|
||||
disabled: !terminal.active(),
|
||||
onSelect: () => {
|
||||
const active = terminal.active()
|
||||
if (active) terminal.split(active, "vertical")
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "terminal.split.horizontal",
|
||||
title: "Split terminal down",
|
||||
description: "Split the current terminal horizontally",
|
||||
category: "Terminal",
|
||||
keybind: "mod+shift+d",
|
||||
disabled: !terminal.active(),
|
||||
onSelect: () => {
|
||||
const active = terminal.active()
|
||||
if (active) terminal.split(active, "horizontal")
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "steps.toggle",
|
||||
title: "Toggle steps",
|
||||
@@ -733,7 +706,7 @@ export default function Page() {
|
||||
const handleTerminalDragOver = (event: DragEvent) => {
|
||||
const { draggable, droppable } = event
|
||||
if (draggable && droppable) {
|
||||
const terminals = terminal.tabs()
|
||||
const terminals = terminal.all()
|
||||
const fromIndex = terminals.findIndex((t: LocalPTY) => t.id === draggable.id.toString())
|
||||
const toIndex = terminals.findIndex((t: LocalPTY) => t.id === droppable.id.toString())
|
||||
if (fromIndex !== -1 && toIndex !== -1 && fromIndex !== toIndex) {
|
||||
@@ -1035,7 +1008,7 @@ export default function Page() {
|
||||
|
||||
createEffect(() => {
|
||||
if (!terminal.ready()) return
|
||||
handoff.terminals = terminal.tabs().map((t) => t.title)
|
||||
handoff.terminals = terminal.all().map((t) => t.title)
|
||||
})
|
||||
|
||||
createEffect(() => {
|
||||
@@ -1692,10 +1665,10 @@ export default function Page() {
|
||||
>
|
||||
<DragDropSensors />
|
||||
<ConstrainDragYAxis />
|
||||
<Tabs variant="alt" value={activeTerminal()} onChange={terminal.open}>
|
||||
<Tabs variant="alt" value={terminal.active()} onChange={terminal.open}>
|
||||
<Tabs.List class="h-10">
|
||||
<SortableProvider ids={terminal.tabs().map((t: LocalPTY) => t.id)}>
|
||||
<For each={terminal.tabs()}>{(pty) => <SortableTerminalTab terminal={pty} />}</For>
|
||||
<SortableProvider ids={terminal.all().map((t: LocalPTY) => t.id)}>
|
||||
<For each={terminal.all()}>{(pty) => <SortableTerminalTab terminal={pty} />}</For>
|
||||
</SortableProvider>
|
||||
<div class="h-full flex items-center justify-center">
|
||||
<TooltipKeybind
|
||||
@@ -1707,10 +1680,10 @@ export default function Page() {
|
||||
</TooltipKeybind>
|
||||
</div>
|
||||
</Tabs.List>
|
||||
<For each={terminal.tabs()}>
|
||||
<For each={terminal.all()}>
|
||||
{(pty) => (
|
||||
<Tabs.Content value={pty.id} class="h-[calc(100%-2.5rem)]">
|
||||
<TerminalSplit tabId={pty.id} />
|
||||
<Tabs.Content value={pty.id}>
|
||||
<Terminal pty={pty} onCleanup={terminal.update} onConnectError={() => terminal.clone(pty.id)} />
|
||||
</Tabs.Content>
|
||||
)}
|
||||
</For>
|
||||
@@ -1718,7 +1691,7 @@ export default function Page() {
|
||||
<DragOverlay>
|
||||
<Show when={store.activeTerminalDraggable}>
|
||||
{(draggedId) => {
|
||||
const pty = createMemo(() => terminal.tabs().find((t: LocalPTY) => t.id === draggedId()))
|
||||
const pty = createMemo(() => terminal.all().find((t: LocalPTY) => t.id === draggedId()))
|
||||
return (
|
||||
<Show when={pty()}>
|
||||
{(t) => (
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/console-app",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
|
||||
@@ -183,7 +183,12 @@ export async function POST(input: APIEvent) {
|
||||
.set({
|
||||
customerID,
|
||||
subscriptionID,
|
||||
subscriptionCouponID: couponID,
|
||||
subscription: {
|
||||
status: "subscribed",
|
||||
coupon: couponID,
|
||||
seats: 1,
|
||||
plan: "200",
|
||||
},
|
||||
paymentMethodID: paymentMethod.id,
|
||||
paymentMethodLast4: paymentMethod.card?.last4 ?? null,
|
||||
paymentMethodType: paymentMethod.type,
|
||||
@@ -408,7 +413,7 @@ export async function POST(input: APIEvent) {
|
||||
await Database.transaction(async (tx) => {
|
||||
await tx
|
||||
.update(BillingTable)
|
||||
.set({ subscriptionID: null, subscriptionCouponID: null })
|
||||
.set({ subscriptionID: null, subscription: null })
|
||||
.where(eq(BillingTable.workspaceID, workspaceID))
|
||||
|
||||
await tx.delete(SubscriptionTable).where(eq(SubscriptionTable.workspaceID, workspaceID))
|
||||
|
||||
@@ -65,7 +65,6 @@ export const anthropicHelper: ProviderHelper = ({ reqModel, providerModel }) =>
|
||||
buffer = newBuffer
|
||||
|
||||
const messages = []
|
||||
|
||||
while (buffer.length >= 4) {
|
||||
// first 4 bytes are the total length (big-endian)
|
||||
const totalLength = new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength).getUint32(0, false)
|
||||
@@ -121,7 +120,9 @@ export const anthropicHelper: ProviderHelper = ({ reqModel, providerModel }) =>
|
||||
|
||||
const parsedDataResult = JSON.parse(data)
|
||||
delete parsedDataResult.p
|
||||
const bytes = atob(parsedDataResult.bytes)
|
||||
const binary = atob(parsedDataResult.bytes)
|
||||
const uint8 = Uint8Array.from(binary, (c) => c.charCodeAt(0))
|
||||
const bytes = decoder.decode(uint8)
|
||||
const eventName = JSON.parse(bytes).type
|
||||
messages.push([`event: ${eventName}`, "\n", `data: ${bytes}`, "\n\n"].join(""))
|
||||
}
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
ALTER TABLE `billing` ADD `subscription` json;
|
||||
@@ -0,0 +1 @@
|
||||
ALTER TABLE `billing` DROP COLUMN `subscription_coupon_id`;
|
||||
1242
packages/console/core/migrations/meta/0053_snapshot.json
Normal file
1242
packages/console/core/migrations/meta/0053_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
1235
packages/console/core/migrations/meta/0054_snapshot.json
Normal file
1235
packages/console/core/migrations/meta/0054_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -372,6 +372,20 @@
|
||||
"when": 1768343920467,
|
||||
"tag": "0052_aromatic_agent_zero",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 53,
|
||||
"version": "5",
|
||||
"when": 1768599366758,
|
||||
"tag": "0053_gigantic_hardball",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 54,
|
||||
"version": "5",
|
||||
"when": 1768603665356,
|
||||
"tag": "0054_numerous_annihilus",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/package.json",
|
||||
"name": "@opencode-ai/console-core",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
|
||||
112
packages/console/core/script/black-gift.ts
Normal file
112
packages/console/core/script/black-gift.ts
Normal file
@@ -0,0 +1,112 @@
|
||||
import { Billing } from "../src/billing.js"
|
||||
import { and, Database, eq, isNull, sql } from "../src/drizzle/index.js"
|
||||
import { UserTable } from "../src/schema/user.sql.js"
|
||||
import { BillingTable, PaymentTable, SubscriptionTable } from "../src/schema/billing.sql.js"
|
||||
import { Identifier } from "../src/identifier.js"
|
||||
import { centsToMicroCents } from "../src/util/price.js"
|
||||
import { AuthTable } from "../src/schema/auth.sql.js"
|
||||
|
||||
const plan = "200"
|
||||
const workspaceID = process.argv[2]
|
||||
const seats = parseInt(process.argv[3])
|
||||
|
||||
console.log(`Gifting ${seats} seats of Black to workspace ${workspaceID}`)
|
||||
|
||||
if (!workspaceID || !seats) throw new Error("Usage: bun foo.ts <workspaceID> <seats>")
|
||||
|
||||
// Get workspace user
|
||||
const users = await Database.use((tx) =>
|
||||
tx
|
||||
.select({
|
||||
id: UserTable.id,
|
||||
role: UserTable.role,
|
||||
email: AuthTable.subject,
|
||||
})
|
||||
.from(UserTable)
|
||||
.leftJoin(AuthTable, and(eq(AuthTable.accountID, UserTable.accountID), eq(AuthTable.provider, "email")))
|
||||
.where(and(eq(UserTable.workspaceID, workspaceID), isNull(UserTable.timeDeleted))),
|
||||
)
|
||||
if (users.length === 0) throw new Error(`Error: No users found in workspace ${workspaceID}`)
|
||||
if (users.length !== seats)
|
||||
throw new Error(`Error: Workspace ${workspaceID} has ${users.length} users, expected ${seats}`)
|
||||
const adminUser = users.find((user) => user.role === "admin")
|
||||
if (!adminUser) throw new Error(`Error: No admin user found in workspace ${workspaceID}`)
|
||||
if (!adminUser.email) throw new Error(`Error: Admin user ${adminUser.id} has no email`)
|
||||
|
||||
// Get Billing
|
||||
const billing = await Database.use((tx) =>
|
||||
tx
|
||||
.select({
|
||||
customerID: BillingTable.customerID,
|
||||
subscriptionID: BillingTable.subscriptionID,
|
||||
})
|
||||
.from(BillingTable)
|
||||
.where(eq(BillingTable.workspaceID, workspaceID))
|
||||
.then((rows) => rows[0]),
|
||||
)
|
||||
if (!billing) throw new Error(`Error: Workspace ${workspaceID} has no billing record`)
|
||||
if (billing.subscriptionID) throw new Error(`Error: Workspace ${workspaceID} already has a subscription`)
|
||||
|
||||
// Look up the Stripe customer by email
|
||||
const customerID =
|
||||
billing.customerID ??
|
||||
(await (() =>
|
||||
Billing.stripe()
|
||||
.customers.create({
|
||||
email: adminUser.email,
|
||||
metadata: {
|
||||
workspaceID,
|
||||
},
|
||||
})
|
||||
.then((customer) => customer.id))())
|
||||
console.log(`Customer ID: ${customerID}`)
|
||||
|
||||
const couponID = "JAIr0Pe1"
|
||||
const subscription = await Billing.stripe().subscriptions.create({
|
||||
customer: customerID!,
|
||||
items: [
|
||||
{
|
||||
price: `price_1SmfyI2StuRr0lbXovxJNeZn`,
|
||||
discounts: [{ coupon: couponID }],
|
||||
quantity: 2,
|
||||
},
|
||||
],
|
||||
})
|
||||
console.log(`Subscription ID: ${subscription.id}`)
|
||||
|
||||
await Database.transaction(async (tx) => {
|
||||
// Set customer id, subscription id, and payment method on workspace billing
|
||||
await tx
|
||||
.update(BillingTable)
|
||||
.set({
|
||||
customerID,
|
||||
subscriptionID: subscription.id,
|
||||
subscription: { status: "subscribed", coupon: couponID, seats, plan },
|
||||
})
|
||||
.where(eq(BillingTable.workspaceID, workspaceID))
|
||||
|
||||
// Create a row in subscription table
|
||||
for (const user of users) {
|
||||
await tx.insert(SubscriptionTable).values({
|
||||
workspaceID,
|
||||
id: Identifier.create("subscription"),
|
||||
userID: user.id,
|
||||
})
|
||||
}
|
||||
//
|
||||
// // Create a row in payments table
|
||||
// await tx.insert(PaymentTable).values({
|
||||
// workspaceID,
|
||||
// id: Identifier.create("payment"),
|
||||
// amount: centsToMicroCents(amountInCents),
|
||||
// customerID,
|
||||
// invoiceID,
|
||||
// paymentID,
|
||||
// enrichment: {
|
||||
// type: "subscription",
|
||||
// couponID,
|
||||
// },
|
||||
// })
|
||||
})
|
||||
|
||||
console.log(`done`)
|
||||
@@ -12,7 +12,7 @@ const email = process.argv[3]
|
||||
console.log(`Onboarding workspace ${workspaceID} for email ${email}`)
|
||||
|
||||
if (!workspaceID || !email) {
|
||||
console.error("Usage: bun onboard-zen-black.ts <workspaceID> <email>")
|
||||
console.error("Usage: bun foo.ts <workspaceID> <email>")
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
@@ -50,7 +50,7 @@ const existingSubscription = await Database.use((tx) =>
|
||||
tx
|
||||
.select({ workspaceID: BillingTable.workspaceID })
|
||||
.from(BillingTable)
|
||||
.where(eq(BillingTable.subscriptionID, subscriptionID))
|
||||
.where(sql`JSON_EXTRACT(${BillingTable.subscription}, '$.id') = ${subscriptionID}`)
|
||||
.then((rows) => rows[0]),
|
||||
)
|
||||
if (existingSubscription) {
|
||||
@@ -128,10 +128,15 @@ await Database.transaction(async (tx) => {
|
||||
.set({
|
||||
customerID,
|
||||
subscriptionID,
|
||||
subscriptionCouponID: couponID,
|
||||
paymentMethodID,
|
||||
paymentMethodLast4,
|
||||
paymentMethodType,
|
||||
subscription: {
|
||||
status: "subscribed",
|
||||
coupon: couponID,
|
||||
seats: 1,
|
||||
plan: "200",
|
||||
},
|
||||
})
|
||||
.where(eq(BillingTable.workspaceID, workspaceID))
|
||||
|
||||
@@ -18,7 +18,7 @@ const fromBilling = await Database.use((tx) =>
|
||||
.select({
|
||||
customerID: BillingTable.customerID,
|
||||
subscriptionID: BillingTable.subscriptionID,
|
||||
subscriptionCouponID: BillingTable.subscriptionCouponID,
|
||||
subscription: BillingTable.subscription,
|
||||
paymentMethodID: BillingTable.paymentMethodID,
|
||||
paymentMethodType: BillingTable.paymentMethodType,
|
||||
paymentMethodLast4: BillingTable.paymentMethodLast4,
|
||||
@@ -119,7 +119,7 @@ await Database.transaction(async (tx) => {
|
||||
.set({
|
||||
customerID: fromPrevPayment.customerID,
|
||||
subscriptionID: null,
|
||||
subscriptionCouponID: null,
|
||||
subscription: null,
|
||||
paymentMethodID: fromPrevPaymentMethods.data[0].id,
|
||||
paymentMethodLast4: fromPrevPaymentMethods.data[0].card?.last4 ?? null,
|
||||
paymentMethodType: fromPrevPaymentMethods.data[0].type,
|
||||
@@ -131,7 +131,7 @@ await Database.transaction(async (tx) => {
|
||||
.set({
|
||||
customerID: fromBilling.customerID,
|
||||
subscriptionID: fromBilling.subscriptionID,
|
||||
subscriptionCouponID: fromBilling.subscriptionCouponID,
|
||||
subscription: fromBilling.subscription,
|
||||
paymentMethodID: fromBilling.paymentMethodID,
|
||||
paymentMethodLast4: fromBilling.paymentMethodLast4,
|
||||
paymentMethodType: fromBilling.paymentMethodType,
|
||||
|
||||
@@ -55,8 +55,9 @@ if (identifier.startsWith("wrk_")) {
|
||||
),
|
||||
)
|
||||
|
||||
// Get all payments for these workspaces
|
||||
await Promise.all(users.map((u: { workspaceID: string }) => printWorkspace(u.workspaceID)))
|
||||
for (const user of users) {
|
||||
await printWorkspace(user.workspaceID)
|
||||
}
|
||||
}
|
||||
|
||||
async function printWorkspace(workspaceID: string) {
|
||||
@@ -114,11 +115,11 @@ async function printWorkspace(workspaceID: string) {
|
||||
balance: BillingTable.balance,
|
||||
customerID: BillingTable.customerID,
|
||||
reload: BillingTable.reload,
|
||||
subscriptionID: BillingTable.subscriptionID,
|
||||
subscription: {
|
||||
id: BillingTable.subscriptionID,
|
||||
couponID: BillingTable.subscriptionCouponID,
|
||||
plan: BillingTable.subscriptionPlan,
|
||||
booked: BillingTable.timeSubscriptionBooked,
|
||||
enrichment: BillingTable.subscription,
|
||||
},
|
||||
})
|
||||
.from(BillingTable)
|
||||
@@ -128,8 +129,13 @@ async function printWorkspace(workspaceID: string) {
|
||||
rows.map((row) => ({
|
||||
...row,
|
||||
balance: `$${(row.balance / 100000000).toFixed(2)}`,
|
||||
subscription: row.subscription.id
|
||||
? `Subscribed ${row.subscription.couponID ? `(coupon: ${row.subscription.couponID}) ` : ""}`
|
||||
subscription: row.subscriptionID
|
||||
? [
|
||||
`Black ${row.subscription.enrichment!.plan}`,
|
||||
row.subscription.enrichment!.seats > 1 ? `X ${row.subscription.enrichment!.seats} seats` : "",
|
||||
row.subscription.enrichment!.coupon ? `(coupon: ${row.subscription.enrichment!.coupon})` : "",
|
||||
`(ref: ${row.subscriptionID})`,
|
||||
].join(" ")
|
||||
: row.subscription.booked
|
||||
? `Waitlist ${row.subscription.plan} plan`
|
||||
: undefined,
|
||||
|
||||
@@ -21,8 +21,13 @@ export const BillingTable = mysqlTable(
|
||||
reloadError: varchar("reload_error", { length: 255 }),
|
||||
timeReloadError: utc("time_reload_error"),
|
||||
timeReloadLockedTill: utc("time_reload_locked_till"),
|
||||
subscription: json("subscription").$type<{
|
||||
status: "subscribed"
|
||||
coupon?: string
|
||||
seats: number
|
||||
plan: "20" | "100" | "200"
|
||||
}>(),
|
||||
subscriptionID: varchar("subscription_id", { length: 28 }),
|
||||
subscriptionCouponID: varchar("subscription_coupon_id", { length: 28 }),
|
||||
subscriptionPlan: mysqlEnum("subscription_plan", ["20", "100", "200"] as const),
|
||||
timeSubscriptionBooked: utc("time_subscription_booked"),
|
||||
},
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/console-function",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"$schema": "https://json.schemastore.org/package.json",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/console-mail",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"dependencies": {
|
||||
"@jsx-email/all": "2.2.3",
|
||||
"@jsx-email/cli": "1.4.3",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@opencode-ai/desktop",
|
||||
"private": true,
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
|
||||
@@ -26,6 +26,18 @@ if (import.meta.env.DEV && !(root instanceof HTMLElement)) {
|
||||
)
|
||||
}
|
||||
|
||||
const isWindows = ostype() === "windows"
|
||||
if (isWindows) {
|
||||
const originalGetComputedStyle = window.getComputedStyle
|
||||
window.getComputedStyle = ((elt: Element, pseudoElt?: string | null) => {
|
||||
if (!(elt instanceof Element)) {
|
||||
// WebView2 can call into Floating UI with non-elements; fall back to a safe element.
|
||||
return originalGetComputedStyle(document.documentElement, pseudoElt ?? undefined)
|
||||
}
|
||||
return originalGetComputedStyle(elt, pseudoElt ?? undefined)
|
||||
}) as typeof window.getComputedStyle
|
||||
}
|
||||
|
||||
let update: Update | null = null
|
||||
|
||||
const createPlatform = (password: Accessor<string | null>): Platform => ({
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/enterprise",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
id = "opencode"
|
||||
name = "OpenCode"
|
||||
description = "The open source coding agent."
|
||||
version = "1.1.23"
|
||||
version = "1.1.25"
|
||||
schema_version = 1
|
||||
authors = ["Anomaly"]
|
||||
repository = "https://github.com/anomalyco/opencode"
|
||||
@@ -11,26 +11,26 @@ name = "OpenCode"
|
||||
icon = "./icons/opencode.svg"
|
||||
|
||||
[agent_servers.opencode.targets.darwin-aarch64]
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.1.23/opencode-darwin-arm64.zip"
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.1.25/opencode-darwin-arm64.zip"
|
||||
cmd = "./opencode"
|
||||
args = ["acp"]
|
||||
|
||||
[agent_servers.opencode.targets.darwin-x86_64]
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.1.23/opencode-darwin-x64.zip"
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.1.25/opencode-darwin-x64.zip"
|
||||
cmd = "./opencode"
|
||||
args = ["acp"]
|
||||
|
||||
[agent_servers.opencode.targets.linux-aarch64]
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.1.23/opencode-linux-arm64.tar.gz"
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.1.25/opencode-linux-arm64.tar.gz"
|
||||
cmd = "./opencode"
|
||||
args = ["acp"]
|
||||
|
||||
[agent_servers.opencode.targets.linux-x86_64]
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.1.23/opencode-linux-x64.tar.gz"
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.1.25/opencode-linux-x64.tar.gz"
|
||||
cmd = "./opencode"
|
||||
args = ["acp"]
|
||||
|
||||
[agent_servers.opencode.targets.windows-x86_64]
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.1.23/opencode-windows-x64.zip"
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.1.25/opencode-windows-x64.zip"
|
||||
cmd = "./opencode.exe"
|
||||
args = ["acp"]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/function",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"$schema": "https://json.schemastore.org/package.json",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/package.json",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"name": "opencode",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
@@ -82,8 +82,8 @@
|
||||
"@opencode-ai/sdk": "workspace:*",
|
||||
"@opencode-ai/util": "workspace:*",
|
||||
"@openrouter/ai-sdk-provider": "1.5.2",
|
||||
"@opentui/core": "0.1.73",
|
||||
"@opentui/solid": "0.1.73",
|
||||
"@opentui/core": "0.1.74",
|
||||
"@opentui/solid": "0.1.74",
|
||||
"@parcel/watcher": "2.5.1",
|
||||
"@pierre/diffs": "catalog:",
|
||||
"@solid-primitives/event-bus": "1.1.2",
|
||||
|
||||
@@ -2,6 +2,7 @@ import z from "zod"
|
||||
import { Global } from "../global"
|
||||
import { Log } from "../util/log"
|
||||
import path from "path"
|
||||
import { Filesystem } from "../util/filesystem"
|
||||
import { NamedError } from "@opencode-ai/util/error"
|
||||
import { readableStreamToText } from "bun"
|
||||
import { createRequire } from "module"
|
||||
@@ -71,7 +72,10 @@ export namespace BunProc {
|
||||
await Bun.write(pkgjson.name!, JSON.stringify(result, null, 2))
|
||||
return result
|
||||
})
|
||||
if (parsed.dependencies[pkg] === version) return mod
|
||||
const dependencies = parsed.dependencies ?? {}
|
||||
if (!parsed.dependencies) parsed.dependencies = dependencies
|
||||
const modExists = await Filesystem.exists(mod)
|
||||
if (dependencies[pkg] === version && modExists) return mod
|
||||
|
||||
const proxied = !!(
|
||||
process.env.HTTP_PROXY ||
|
||||
|
||||
@@ -70,8 +70,8 @@ export const AgentCommand = cmd({
|
||||
})
|
||||
|
||||
async function getAvailableTools(agent: Agent.Info) {
|
||||
const providerID = agent.model?.providerID ?? (await Provider.defaultModel()).providerID
|
||||
return ToolRegistry.tools(providerID, agent)
|
||||
const model = agent.model ?? (await Provider.defaultModel())
|
||||
return ToolRegistry.tools(model, agent)
|
||||
}
|
||||
|
||||
async function resolveTools(agent: Agent.Info, availableTools: Awaited<ReturnType<typeof getAvailableTools>>) {
|
||||
|
||||
@@ -6,7 +6,6 @@ import * as prompts from "@clack/prompts"
|
||||
import { UI } from "../ui"
|
||||
import { MCP } from "../../mcp"
|
||||
import { McpAuth } from "../../mcp/auth"
|
||||
import { McpOAuthCallback } from "../../mcp/oauth-callback"
|
||||
import { McpOAuthProvider } from "../../mcp/oauth-provider"
|
||||
import { Config } from "../../config/config"
|
||||
import { Instance } from "../../project/instance"
|
||||
@@ -683,10 +682,6 @@ export const McpDebugCommand = cmd({
|
||||
|
||||
// Try to discover OAuth metadata
|
||||
const oauthConfig = typeof serverConfig.oauth === "object" ? serverConfig.oauth : undefined
|
||||
|
||||
// Start callback server
|
||||
await McpOAuthCallback.ensureRunning(oauthConfig?.redirectUri)
|
||||
|
||||
const authProvider = new McpOAuthProvider(
|
||||
serverName,
|
||||
serverConfig.url,
|
||||
@@ -694,7 +689,6 @@ export const McpDebugCommand = cmd({
|
||||
clientId: oauthConfig?.clientId,
|
||||
clientSecret: oauthConfig?.clientSecret,
|
||||
scope: oauthConfig?.scope,
|
||||
redirectUri: oauthConfig?.redirectUri,
|
||||
},
|
||||
{
|
||||
onRedirect: async () => {},
|
||||
|
||||
@@ -200,11 +200,6 @@ function App() {
|
||||
renderer.console.onCopySelection = async (text: string) => {
|
||||
if (!text || text.length === 0) return
|
||||
|
||||
const base64 = Buffer.from(text).toString("base64")
|
||||
const osc52 = `\x1b]52;c;${base64}\x07`
|
||||
const finalOsc52 = process.env["TMUX"] ? `\x1bPtmux;\x1b${osc52}\x1b\\` : osc52
|
||||
// @ts-expect-error writeOut is not in type definitions
|
||||
renderer.writeOut(finalOsc52)
|
||||
await Clipboard.copy(text)
|
||||
.then(() => toast.show({ message: "Copied to clipboard", variant: "info" }))
|
||||
.catch(toast.error)
|
||||
@@ -627,11 +622,6 @@ function App() {
|
||||
}
|
||||
const text = renderer.getSelection()?.getSelectedText()
|
||||
if (text && text.length > 0) {
|
||||
const base64 = Buffer.from(text).toString("base64")
|
||||
const osc52 = `\x1b]52;c;${base64}\x07`
|
||||
const finalOsc52 = process.env["TMUX"] ? `\x1bPtmux;\x1b${osc52}\x1b\\` : osc52
|
||||
/* @ts-expect-error */
|
||||
renderer.writeOut(finalOsc52)
|
||||
await Clipboard.copy(text)
|
||||
.then(() => toast.show({ message: "Copied to clipboard", variant: "info" }))
|
||||
.catch(toast.error)
|
||||
|
||||
@@ -145,9 +145,9 @@ export function Prompt(props: PromptProps) {
|
||||
const isPrimaryAgent = local.agent.list().some((x) => x.name === msg.agent)
|
||||
if (msg.agent && isPrimaryAgent) {
|
||||
local.agent.set(msg.agent)
|
||||
if (msg.model) local.model.set(msg.model)
|
||||
if (msg.variant) local.model.variant.set(msg.variant)
|
||||
}
|
||||
if (msg.model) local.model.set(msg.model)
|
||||
if (msg.variant) local.model.variant.set(msg.variant)
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@@ -39,7 +39,7 @@ import { TodoWriteTool } from "@/tool/todo"
|
||||
import type { GrepTool } from "@/tool/grep"
|
||||
import type { ListTool } from "@/tool/ls"
|
||||
import type { EditTool } from "@/tool/edit"
|
||||
import type { PatchTool } from "@/tool/patch"
|
||||
import type { ApplyPatchTool } from "@/tool/apply_patch"
|
||||
import type { WebFetchTool } from "@/tool/webfetch"
|
||||
import type { TaskTool } from "@/tool/task"
|
||||
import type { QuestionTool } from "@/tool/question"
|
||||
@@ -697,11 +697,6 @@ export function Session() {
|
||||
return
|
||||
}
|
||||
|
||||
const base64 = Buffer.from(text).toString("base64")
|
||||
const osc52 = `\x1b]52;c;${base64}\x07`
|
||||
const finalOsc52 = process.env["TMUX"] ? `\x1bPtmux;\x1b${osc52}\x1b\\` : osc52
|
||||
/* @ts-expect-error */
|
||||
renderer.writeOut(finalOsc52)
|
||||
Clipboard.copy(text)
|
||||
.then(() => toast.show({ message: "Message copied to clipboard!", variant: "success" }))
|
||||
.catch(() => toast.show({ message: "Failed to copy to clipboard", variant: "error" }))
|
||||
@@ -1390,8 +1385,8 @@ function ToolPart(props: { last: boolean; part: ToolPart; message: AssistantMess
|
||||
<Match when={props.part.tool === "task"}>
|
||||
<Task {...toolprops} />
|
||||
</Match>
|
||||
<Match when={props.part.tool === "patch"}>
|
||||
<Patch {...toolprops} />
|
||||
<Match when={props.part.tool === "apply_patch"}>
|
||||
<ApplyPatch {...toolprops} />
|
||||
</Match>
|
||||
<Match when={props.part.tool === "todowrite"}>
|
||||
<TodoWrite {...toolprops} />
|
||||
@@ -1840,20 +1835,74 @@ function Edit(props: ToolProps<typeof EditTool>) {
|
||||
)
|
||||
}
|
||||
|
||||
function Patch(props: ToolProps<typeof PatchTool>) {
|
||||
const { theme } = useTheme()
|
||||
function ApplyPatch(props: ToolProps<typeof ApplyPatchTool>) {
|
||||
const ctx = use()
|
||||
const { theme, syntax } = useTheme()
|
||||
|
||||
const files = createMemo(() => props.metadata.files ?? [])
|
||||
|
||||
const view = createMemo(() => {
|
||||
const diffStyle = ctx.sync.data.config.tui?.diff_style
|
||||
if (diffStyle === "stacked") return "unified"
|
||||
return ctx.width > 120 ? "split" : "unified"
|
||||
})
|
||||
|
||||
function Diff(p: { diff: string; filePath: string }) {
|
||||
return (
|
||||
<box paddingLeft={1}>
|
||||
<diff
|
||||
diff={p.diff}
|
||||
view={view()}
|
||||
filetype={filetype(p.filePath)}
|
||||
syntaxStyle={syntax()}
|
||||
showLineNumbers={true}
|
||||
width="100%"
|
||||
wrapMode={ctx.diffWrapMode()}
|
||||
fg={theme.text}
|
||||
addedBg={theme.diffAddedBg}
|
||||
removedBg={theme.diffRemovedBg}
|
||||
contextBg={theme.diffContextBg}
|
||||
addedSignColor={theme.diffHighlightAdded}
|
||||
removedSignColor={theme.diffHighlightRemoved}
|
||||
lineNumberFg={theme.diffLineNumber}
|
||||
lineNumberBg={theme.diffContextBg}
|
||||
addedLineNumberBg={theme.diffAddedLineNumberBg}
|
||||
removedLineNumberBg={theme.diffRemovedLineNumberBg}
|
||||
/>
|
||||
</box>
|
||||
)
|
||||
}
|
||||
|
||||
function title(file: { type: string; relativePath: string; filePath: string; deletions: number }) {
|
||||
if (file.type === "delete") return "# Deleted " + file.relativePath
|
||||
if (file.type === "add") return "# Created " + file.relativePath
|
||||
if (file.type === "move") return "# Moved " + normalizePath(file.filePath) + " → " + file.relativePath
|
||||
return "← Patched " + file.relativePath
|
||||
}
|
||||
|
||||
return (
|
||||
<Switch>
|
||||
<Match when={props.output !== undefined}>
|
||||
<BlockTool title="# Patch" part={props.part}>
|
||||
<box>
|
||||
<text fg={theme.text}>{props.output?.trim()}</text>
|
||||
</box>
|
||||
</BlockTool>
|
||||
<Match when={files().length > 0}>
|
||||
<For each={files()}>
|
||||
{(file) => (
|
||||
<BlockTool title={title(file)} part={props.part}>
|
||||
<Show
|
||||
when={file.type !== "delete"}
|
||||
fallback={
|
||||
<text fg={theme.diffRemoved}>
|
||||
-{file.deletions} line{file.deletions !== 1 ? "s" : ""}
|
||||
</text>
|
||||
}
|
||||
>
|
||||
<Diff diff={file.diff} filePath={file.filePath} />
|
||||
</Show>
|
||||
</BlockTool>
|
||||
)}
|
||||
</For>
|
||||
</Match>
|
||||
<Match when={true}>
|
||||
<InlineTool icon="%" pending="Preparing patch..." complete={false} part={props.part}>
|
||||
Patch
|
||||
<InlineTool icon="%" pending="Preparing apply_patch..." complete={false} part={props.part}>
|
||||
apply_patch
|
||||
</InlineTool>
|
||||
</Match>
|
||||
</Switch>
|
||||
|
||||
@@ -161,6 +161,8 @@ export function DialogSelect<T>(props: DialogSelectProps<T>) {
|
||||
if (evt.name === "down" || (evt.ctrl && evt.name === "n")) move(1)
|
||||
if (evt.name === "pageup") move(-10)
|
||||
if (evt.name === "pagedown") move(10)
|
||||
if (evt.name === "home") moveTo(0)
|
||||
if (evt.name === "end") moveTo(flat().length - 1)
|
||||
if (evt.name === "return") {
|
||||
const option = selected()
|
||||
if (option) {
|
||||
|
||||
@@ -141,11 +141,6 @@ export function DialogProvider(props: ParentProps) {
|
||||
onMouseUp={async () => {
|
||||
const text = renderer.getSelection()?.getSelectedText()
|
||||
if (text && text.length > 0) {
|
||||
const base64 = Buffer.from(text).toString("base64")
|
||||
const osc52 = `\x1b]52;c;${base64}\x07`
|
||||
const finalOsc52 = process.env["TMUX"] ? `\x1bPtmux;\x1b${osc52}\x1b\\` : osc52
|
||||
/* @ts-expect-error */
|
||||
renderer.writeOut(finalOsc52)
|
||||
await Clipboard.copy(text)
|
||||
.then(() => toast.show({ message: "Copied to clipboard", variant: "info" }))
|
||||
.catch(toast.error)
|
||||
|
||||
@@ -5,6 +5,21 @@ import { lazy } from "../../../../util/lazy.js"
|
||||
import { tmpdir } from "os"
|
||||
import path from "path"
|
||||
|
||||
/**
|
||||
* Writes text to clipboard via OSC 52 escape sequence.
|
||||
* This allows clipboard operations to work over SSH by having
|
||||
* the terminal emulator handle the clipboard locally.
|
||||
*/
|
||||
function writeOsc52(text: string): void {
|
||||
if (!process.stdout.isTTY) return
|
||||
const base64 = Buffer.from(text).toString("base64")
|
||||
const osc52 = `\x1b]52;c;${base64}\x07`
|
||||
// tmux and screen require DCS passthrough wrapping
|
||||
const passthrough = process.env["TMUX"] || process.env["STY"]
|
||||
const sequence = passthrough ? `\x1bPtmux;\x1b${osc52}\x1b\\` : osc52
|
||||
process.stdout.write(sequence)
|
||||
}
|
||||
|
||||
export namespace Clipboard {
|
||||
export interface Content {
|
||||
data: string
|
||||
@@ -123,6 +138,7 @@ export namespace Clipboard {
|
||||
})
|
||||
|
||||
export async function copy(text: string): Promise<void> {
|
||||
writeOsc52(text)
|
||||
await getCopyMethod()(text)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -60,7 +60,11 @@ export const WebCommand = cmd({
|
||||
}
|
||||
|
||||
if (opts.mdns) {
|
||||
UI.println(UI.Style.TEXT_INFO_BOLD + " mDNS: ", UI.Style.TEXT_NORMAL, "opencode.local")
|
||||
UI.println(
|
||||
UI.Style.TEXT_INFO_BOLD + " mDNS: ",
|
||||
UI.Style.TEXT_NORMAL,
|
||||
`opencode.local:${server.port}`,
|
||||
)
|
||||
}
|
||||
|
||||
// Open localhost in browser
|
||||
|
||||
@@ -435,10 +435,6 @@ export namespace Config {
|
||||
.describe("OAuth client ID. If not provided, dynamic client registration (RFC 7591) will be attempted."),
|
||||
clientSecret: z.string().optional().describe("OAuth client secret (if required by the authorization server)"),
|
||||
scope: z.string().optional().describe("OAuth scopes to request during authorization"),
|
||||
redirectUri: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe("OAuth redirect URI (default: http://127.0.0.1:19876/mcp/oauth/callback)."),
|
||||
})
|
||||
.strict()
|
||||
.meta({
|
||||
|
||||
@@ -308,8 +308,6 @@ export namespace MCP {
|
||||
let authProvider: McpOAuthProvider | undefined
|
||||
|
||||
if (!oauthDisabled) {
|
||||
await McpOAuthCallback.ensureRunning(oauthConfig?.redirectUri)
|
||||
|
||||
authProvider = new McpOAuthProvider(
|
||||
key,
|
||||
mcp.url,
|
||||
@@ -317,7 +315,6 @@ export namespace MCP {
|
||||
clientId: oauthConfig?.clientId,
|
||||
clientSecret: oauthConfig?.clientSecret,
|
||||
scope: oauthConfig?.scope,
|
||||
redirectUri: oauthConfig?.redirectUri,
|
||||
},
|
||||
{
|
||||
onRedirect: async (url) => {
|
||||
@@ -347,7 +344,6 @@ export namespace MCP {
|
||||
|
||||
let lastError: Error | undefined
|
||||
const connectTimeout = mcp.timeout ?? DEFAULT_TIMEOUT
|
||||
|
||||
for (const { name, transport } of transports) {
|
||||
try {
|
||||
const client = new Client({
|
||||
@@ -574,8 +570,7 @@ export namespace MCP {
|
||||
|
||||
for (const [clientName, client] of Object.entries(clientsSnapshot)) {
|
||||
// Only include tools from connected MCPs (skip disabled ones)
|
||||
const clientStatus = s.status[clientName]?.status
|
||||
if (clientStatus !== "connected") {
|
||||
if (s.status[clientName]?.status !== "connected") {
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -725,10 +720,8 @@ export namespace MCP {
|
||||
throw new Error(`MCP server ${mcpName} has OAuth explicitly disabled`)
|
||||
}
|
||||
|
||||
// OAuth config is optional - if not provided, we'll use auto-discovery
|
||||
const oauthConfig = typeof mcpConfig.oauth === "object" ? mcpConfig.oauth : undefined
|
||||
|
||||
await McpOAuthCallback.ensureRunning(oauthConfig?.redirectUri)
|
||||
// Start the callback server
|
||||
await McpOAuthCallback.ensureRunning()
|
||||
|
||||
// Generate and store a cryptographically secure state parameter BEFORE creating the provider
|
||||
// The SDK will call provider.state() to read this value
|
||||
@@ -738,6 +731,8 @@ export namespace MCP {
|
||||
await McpAuth.updateOAuthState(mcpName, oauthState)
|
||||
|
||||
// Create a new auth provider for this flow
|
||||
// OAuth config is optional - if not provided, we'll use auto-discovery
|
||||
const oauthConfig = typeof mcpConfig.oauth === "object" ? mcpConfig.oauth : undefined
|
||||
let capturedUrl: URL | undefined
|
||||
const authProvider = new McpOAuthProvider(
|
||||
mcpName,
|
||||
@@ -746,7 +741,6 @@ export namespace MCP {
|
||||
clientId: oauthConfig?.clientId,
|
||||
clientSecret: oauthConfig?.clientSecret,
|
||||
scope: oauthConfig?.scope,
|
||||
redirectUri: oauthConfig?.redirectUri,
|
||||
},
|
||||
{
|
||||
onRedirect: async (url) => {
|
||||
@@ -775,7 +769,6 @@ export namespace MCP {
|
||||
pendingOAuthTransports.set(mcpName, transport)
|
||||
return { authorizationUrl: capturedUrl.toString() }
|
||||
}
|
||||
|
||||
throw error
|
||||
}
|
||||
}
|
||||
@@ -785,9 +778,9 @@ export namespace MCP {
|
||||
* Opens the browser and waits for callback.
|
||||
*/
|
||||
export async function authenticate(mcpName: string): Promise<Status> {
|
||||
const result = await startAuth(mcpName)
|
||||
const { authorizationUrl } = await startAuth(mcpName)
|
||||
|
||||
if (!result.authorizationUrl) {
|
||||
if (!authorizationUrl) {
|
||||
// Already authenticated
|
||||
const s = await state()
|
||||
return s.status[mcpName] ?? { status: "connected" }
|
||||
@@ -801,9 +794,9 @@ export namespace MCP {
|
||||
|
||||
// The SDK has already added the state parameter to the authorization URL
|
||||
// We just need to open the browser
|
||||
log.info("opening browser for oauth", { mcpName, url: result.authorizationUrl, state: oauthState })
|
||||
log.info("opening browser for oauth", { mcpName, url: authorizationUrl, state: oauthState })
|
||||
try {
|
||||
const subprocess = await open(result.authorizationUrl)
|
||||
const subprocess = await open(authorizationUrl)
|
||||
// The open package spawns a detached process and returns immediately.
|
||||
// We need to listen for errors which fire asynchronously:
|
||||
// - "error" event: command not found (ENOENT)
|
||||
@@ -826,7 +819,7 @@ export namespace MCP {
|
||||
// Browser opening failed (e.g., in remote/headless sessions like SSH, devcontainers)
|
||||
// Emit event so CLI can display the URL for manual opening
|
||||
log.warn("failed to open browser, user must open URL manually", { mcpName, error })
|
||||
Bus.publish(BrowserOpenFailed, { mcpName, url: result.authorizationUrl })
|
||||
Bus.publish(BrowserOpenFailed, { mcpName, url: authorizationUrl })
|
||||
}
|
||||
|
||||
// Wait for callback using the OAuth state parameter
|
||||
|
||||
@@ -1,12 +1,8 @@
|
||||
import { Log } from "../util/log"
|
||||
import { OAUTH_CALLBACK_PORT, OAUTH_CALLBACK_PATH, parseRedirectUri } from "./oauth-provider"
|
||||
import { OAUTH_CALLBACK_PORT, OAUTH_CALLBACK_PATH } from "./oauth-provider"
|
||||
|
||||
const log = Log.create({ service: "mcp.oauth-callback" })
|
||||
|
||||
// Current callback server configuration (may differ from defaults if custom redirectUri is used)
|
||||
let currentPort = OAUTH_CALLBACK_PORT
|
||||
let currentPath = OAUTH_CALLBACK_PATH
|
||||
|
||||
const HTML_SUCCESS = `<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
@@ -60,33 +56,21 @@ export namespace McpOAuthCallback {
|
||||
|
||||
const CALLBACK_TIMEOUT_MS = 5 * 60 * 1000 // 5 minutes
|
||||
|
||||
export async function ensureRunning(redirectUri?: string): Promise<void> {
|
||||
// Parse the redirect URI to get port and path (uses defaults if not provided)
|
||||
const { port, path } = parseRedirectUri(redirectUri)
|
||||
|
||||
// If server is running on a different port/path, stop it first
|
||||
if (server && (currentPort !== port || currentPath !== path)) {
|
||||
log.info("stopping oauth callback server to reconfigure", { oldPort: currentPort, newPort: port })
|
||||
await stop()
|
||||
}
|
||||
|
||||
export async function ensureRunning(): Promise<void> {
|
||||
if (server) return
|
||||
|
||||
const running = await isPortInUse(port)
|
||||
const running = await isPortInUse()
|
||||
if (running) {
|
||||
log.info("oauth callback server already running on another instance", { port })
|
||||
log.info("oauth callback server already running on another instance", { port: OAUTH_CALLBACK_PORT })
|
||||
return
|
||||
}
|
||||
|
||||
currentPort = port
|
||||
currentPath = path
|
||||
|
||||
server = Bun.serve({
|
||||
port: currentPort,
|
||||
port: OAUTH_CALLBACK_PORT,
|
||||
fetch(req) {
|
||||
const url = new URL(req.url)
|
||||
|
||||
if (url.pathname !== currentPath) {
|
||||
if (url.pathname !== OAUTH_CALLBACK_PATH) {
|
||||
return new Response("Not found", { status: 404 })
|
||||
}
|
||||
|
||||
@@ -149,7 +133,7 @@ export namespace McpOAuthCallback {
|
||||
},
|
||||
})
|
||||
|
||||
log.info("oauth callback server started", { port: currentPort, path: currentPath })
|
||||
log.info("oauth callback server started", { port: OAUTH_CALLBACK_PORT })
|
||||
}
|
||||
|
||||
export function waitForCallback(oauthState: string): Promise<string> {
|
||||
@@ -174,11 +158,11 @@ export namespace McpOAuthCallback {
|
||||
}
|
||||
}
|
||||
|
||||
export async function isPortInUse(port: number = OAUTH_CALLBACK_PORT): Promise<boolean> {
|
||||
export async function isPortInUse(): Promise<boolean> {
|
||||
return new Promise((resolve) => {
|
||||
Bun.connect({
|
||||
hostname: "127.0.0.1",
|
||||
port,
|
||||
port: OAUTH_CALLBACK_PORT,
|
||||
socket: {
|
||||
open(socket) {
|
||||
socket.end()
|
||||
|
||||
@@ -17,7 +17,6 @@ export interface McpOAuthConfig {
|
||||
clientId?: string
|
||||
clientSecret?: string
|
||||
scope?: string
|
||||
redirectUri?: string
|
||||
}
|
||||
|
||||
export interface McpOAuthCallbacks {
|
||||
@@ -33,10 +32,6 @@ export class McpOAuthProvider implements OAuthClientProvider {
|
||||
) {}
|
||||
|
||||
get redirectUrl(): string {
|
||||
// Use configured redirectUri if provided, otherwise use OpenCode defaults
|
||||
if (this.config.redirectUri) {
|
||||
return this.config.redirectUri
|
||||
}
|
||||
return `http://127.0.0.1:${OAUTH_CALLBACK_PORT}${OAUTH_CALLBACK_PATH}`
|
||||
}
|
||||
|
||||
@@ -157,22 +152,3 @@ export class McpOAuthProvider implements OAuthClientProvider {
|
||||
}
|
||||
|
||||
export { OAUTH_CALLBACK_PORT, OAUTH_CALLBACK_PATH }
|
||||
|
||||
/**
|
||||
* Parse a redirect URI to extract port and path for the callback server.
|
||||
* Returns defaults if the URI can't be parsed.
|
||||
*/
|
||||
export function parseRedirectUri(redirectUri?: string): { port: number; path: string } {
|
||||
if (!redirectUri) {
|
||||
return { port: OAUTH_CALLBACK_PORT, path: OAUTH_CALLBACK_PATH }
|
||||
}
|
||||
|
||||
try {
|
||||
const url = new URL(redirectUri)
|
||||
const port = url.port ? parseInt(url.port, 10) : url.protocol === "https:" ? 443 : 80
|
||||
const path = url.pathname || OAUTH_CALLBACK_PATH
|
||||
return { port, path }
|
||||
} catch {
|
||||
return { port: OAUTH_CALLBACK_PORT, path: OAUTH_CALLBACK_PATH }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -177,8 +177,18 @@ export namespace Patch {
|
||||
return { content, nextIdx: i }
|
||||
}
|
||||
|
||||
function stripHeredoc(input: string): string {
|
||||
// Match heredoc patterns like: cat <<'EOF'\n...\nEOF or <<EOF\n...\nEOF
|
||||
const heredocMatch = input.match(/^(?:cat\s+)?<<['"]?(\w+)['"]?\s*\n([\s\S]*?)\n\1\s*$/)
|
||||
if (heredocMatch) {
|
||||
return heredocMatch[2]
|
||||
}
|
||||
return input
|
||||
}
|
||||
|
||||
export function parsePatch(patchText: string): { hunks: Hunk[] } {
|
||||
const lines = patchText.split("\n")
|
||||
const cleaned = stripHeredoc(patchText.trim())
|
||||
const lines = cleaned.split("\n")
|
||||
const hunks: Hunk[] = []
|
||||
let i = 0
|
||||
|
||||
@@ -363,7 +373,7 @@ export namespace Patch {
|
||||
// Try to match old lines in the file
|
||||
let pattern = chunk.old_lines
|
||||
let newSlice = chunk.new_lines
|
||||
let found = seekSequence(originalLines, pattern, lineIndex)
|
||||
let found = seekSequence(originalLines, pattern, lineIndex, chunk.is_end_of_file)
|
||||
|
||||
// Retry without trailing empty line if not found
|
||||
if (found === -1 && pattern.length > 0 && pattern[pattern.length - 1] === "") {
|
||||
@@ -371,7 +381,7 @@ export namespace Patch {
|
||||
if (newSlice.length > 0 && newSlice[newSlice.length - 1] === "") {
|
||||
newSlice = newSlice.slice(0, -1)
|
||||
}
|
||||
found = seekSequence(originalLines, pattern, lineIndex)
|
||||
found = seekSequence(originalLines, pattern, lineIndex, chunk.is_end_of_file)
|
||||
}
|
||||
|
||||
if (found !== -1) {
|
||||
@@ -407,28 +417,75 @@ export namespace Patch {
|
||||
return result
|
||||
}
|
||||
|
||||
function seekSequence(lines: string[], pattern: string[], startIndex: number): number {
|
||||
if (pattern.length === 0) return -1
|
||||
// Normalize Unicode punctuation to ASCII equivalents (like Rust's normalize_unicode)
|
||||
function normalizeUnicode(str: string): string {
|
||||
return str
|
||||
.replace(/[\u2018\u2019\u201A\u201B]/g, "'") // single quotes
|
||||
.replace(/[\u201C\u201D\u201E\u201F]/g, '"') // double quotes
|
||||
.replace(/[\u2010\u2011\u2012\u2013\u2014\u2015]/g, "-") // dashes
|
||||
.replace(/\u2026/g, "...") // ellipsis
|
||||
.replace(/\u00A0/g, " ") // non-breaking space
|
||||
}
|
||||
|
||||
// Simple substring search implementation
|
||||
type Comparator = (a: string, b: string) => boolean
|
||||
|
||||
function tryMatch(lines: string[], pattern: string[], startIndex: number, compare: Comparator, eof: boolean): number {
|
||||
// If EOF anchor, try matching from end of file first
|
||||
if (eof) {
|
||||
const fromEnd = lines.length - pattern.length
|
||||
if (fromEnd >= startIndex) {
|
||||
let matches = true
|
||||
for (let j = 0; j < pattern.length; j++) {
|
||||
if (!compare(lines[fromEnd + j], pattern[j])) {
|
||||
matches = false
|
||||
break
|
||||
}
|
||||
}
|
||||
if (matches) return fromEnd
|
||||
}
|
||||
}
|
||||
|
||||
// Forward search from startIndex
|
||||
for (let i = startIndex; i <= lines.length - pattern.length; i++) {
|
||||
let matches = true
|
||||
|
||||
for (let j = 0; j < pattern.length; j++) {
|
||||
if (lines[i + j] !== pattern[j]) {
|
||||
if (!compare(lines[i + j], pattern[j])) {
|
||||
matches = false
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (matches) {
|
||||
return i
|
||||
}
|
||||
if (matches) return i
|
||||
}
|
||||
|
||||
return -1
|
||||
}
|
||||
|
||||
function seekSequence(lines: string[], pattern: string[], startIndex: number, eof = false): number {
|
||||
if (pattern.length === 0) return -1
|
||||
|
||||
// Pass 1: exact match
|
||||
const exact = tryMatch(lines, pattern, startIndex, (a, b) => a === b, eof)
|
||||
if (exact !== -1) return exact
|
||||
|
||||
// Pass 2: rstrip (trim trailing whitespace)
|
||||
const rstrip = tryMatch(lines, pattern, startIndex, (a, b) => a.trimEnd() === b.trimEnd(), eof)
|
||||
if (rstrip !== -1) return rstrip
|
||||
|
||||
// Pass 3: trim (both ends)
|
||||
const trim = tryMatch(lines, pattern, startIndex, (a, b) => a.trim() === b.trim(), eof)
|
||||
if (trim !== -1) return trim
|
||||
|
||||
// Pass 4: normalized (Unicode punctuation to ASCII)
|
||||
const normalized = tryMatch(
|
||||
lines,
|
||||
pattern,
|
||||
startIndex,
|
||||
(a, b) => normalizeUnicode(a.trim()) === normalizeUnicode(b.trim()),
|
||||
eof,
|
||||
)
|
||||
return normalized
|
||||
}
|
||||
|
||||
function generateUnifiedDiff(oldContent: string, newContent: string): string {
|
||||
const oldLines = oldContent.split("\n")
|
||||
const newLines = newContent.split("\n")
|
||||
|
||||
@@ -3,6 +3,9 @@ import { Installation } from "@/installation"
|
||||
import { iife } from "@/util/iife"
|
||||
|
||||
const CLIENT_ID = "Ov23li8tweQw6odWQebz"
|
||||
// Add a small safety buffer when polling to avoid hitting the server
|
||||
// slightly too early due to clock skew / timer drift.
|
||||
const OAUTH_POLLING_SAFETY_MARGIN_MS = 3000 // 3 seconds
|
||||
|
||||
function normalizeDomain(url: string) {
|
||||
return url.replace(/^https?:\/\//, "").replace(/\/$/, "")
|
||||
@@ -204,6 +207,7 @@ export async function CopilotAuthPlugin(input: PluginInput): Promise<Hooks> {
|
||||
const data = (await response.json()) as {
|
||||
access_token?: string
|
||||
error?: string
|
||||
interval?: number
|
||||
}
|
||||
|
||||
if (data.access_token) {
|
||||
@@ -230,13 +234,29 @@ export async function CopilotAuthPlugin(input: PluginInput): Promise<Hooks> {
|
||||
}
|
||||
|
||||
if (data.error === "authorization_pending") {
|
||||
await new Promise((resolve) => setTimeout(resolve, deviceData.interval * 1000))
|
||||
await Bun.sleep(deviceData.interval * 1000 + OAUTH_POLLING_SAFETY_MARGIN_MS)
|
||||
continue
|
||||
}
|
||||
|
||||
if (data.error === "slow_down") {
|
||||
// Based on the RFC spec, we must add 5 seconds to our current polling interval.
|
||||
// (See https://www.rfc-editor.org/rfc/rfc8628#section-3.5)
|
||||
let newInterval = (deviceData.interval + 5) * 1000
|
||||
|
||||
// GitHub OAuth API may return the new interval in seconds in the response.
|
||||
// We should try to use that if provided with safety margin.
|
||||
const serverInterval = data.interval
|
||||
if (serverInterval && typeof serverInterval === "number" && serverInterval > 0) {
|
||||
newInterval = serverInterval * 1000
|
||||
}
|
||||
|
||||
await Bun.sleep(newInterval + OAUTH_POLLING_SAFETY_MARGIN_MS)
|
||||
continue
|
||||
}
|
||||
|
||||
if (data.error) return { type: "failed" as const }
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, deviceData.interval * 1000))
|
||||
await Bun.sleep(deviceData.interval * 1000 + OAUTH_POLLING_SAFETY_MARGIN_MS)
|
||||
continue
|
||||
}
|
||||
},
|
||||
|
||||
@@ -999,6 +999,24 @@ export namespace Provider {
|
||||
opts.signal = combined
|
||||
}
|
||||
|
||||
// Strip openai itemId metadata following what codex does
|
||||
// Codex uses #[serde(skip_serializing)] on id fields for all item types:
|
||||
// Message, Reasoning, FunctionCall, LocalShellCall, CustomToolCall, WebSearchCall
|
||||
// IDs are only re-attached for Azure with store=true
|
||||
if (model.api.npm === "@ai-sdk/openai" && opts.body && opts.method === "POST") {
|
||||
const body = JSON.parse(opts.body as string)
|
||||
const isAzure = model.providerID.includes("azure")
|
||||
const keepIds = isAzure && body.store === true
|
||||
if (!keepIds && Array.isArray(body.input)) {
|
||||
for (const item of body.input) {
|
||||
if ("id" in item) {
|
||||
delete item.id
|
||||
}
|
||||
}
|
||||
opts.body = JSON.stringify(body)
|
||||
}
|
||||
}
|
||||
|
||||
return fetchFn(input, {
|
||||
...opts,
|
||||
// @ts-ignore see here: https://github.com/oven-sh/bun/issues/16682
|
||||
|
||||
@@ -16,38 +16,33 @@ function mimeToModality(mime: string): Modality | undefined {
|
||||
}
|
||||
|
||||
export namespace ProviderTransform {
|
||||
// Maps npm package to the key the AI SDK expects for providerOptions
|
||||
function sdkKey(npm: string): string | undefined {
|
||||
switch (npm) {
|
||||
case "@ai-sdk/github-copilot":
|
||||
case "@ai-sdk/openai":
|
||||
case "@ai-sdk/azure":
|
||||
return "openai"
|
||||
case "@ai-sdk/amazon-bedrock":
|
||||
return "bedrock"
|
||||
case "@ai-sdk/anthropic":
|
||||
return "anthropic"
|
||||
case "@ai-sdk/google-vertex":
|
||||
case "@ai-sdk/google":
|
||||
return "google"
|
||||
case "@ai-sdk/gateway":
|
||||
return "gateway"
|
||||
case "@openrouter/ai-sdk-provider":
|
||||
return "openrouter"
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
|
||||
function normalizeMessages(
|
||||
msgs: ModelMessage[],
|
||||
model: Provider.Model,
|
||||
options: Record<string, unknown>,
|
||||
): ModelMessage[] {
|
||||
// Strip openai itemId metadata following what codex does
|
||||
if (model.api.npm === "@ai-sdk/openai" || options.store === false) {
|
||||
msgs = msgs.map((msg) => {
|
||||
if (msg.providerOptions) {
|
||||
for (const options of Object.values(msg.providerOptions)) {
|
||||
if (options && typeof options === "object") {
|
||||
delete options["itemId"]
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!Array.isArray(msg.content)) {
|
||||
return msg
|
||||
}
|
||||
const content = msg.content.map((part) => {
|
||||
if (part.providerOptions) {
|
||||
for (const options of Object.values(part.providerOptions)) {
|
||||
if (options && typeof options === "object") {
|
||||
delete options["itemId"]
|
||||
}
|
||||
}
|
||||
}
|
||||
return part
|
||||
})
|
||||
return { ...msg, content } as typeof msg
|
||||
})
|
||||
}
|
||||
|
||||
// Anthropic rejects messages with empty content - filter out empty string messages
|
||||
// and remove empty text/reasoning parts from array content
|
||||
if (model.api.npm === "@ai-sdk/anthropic") {
|
||||
@@ -261,6 +256,28 @@ export namespace ProviderTransform {
|
||||
msgs = applyCaching(msgs, model.providerID)
|
||||
}
|
||||
|
||||
// Remap providerOptions keys from stored providerID to expected SDK key
|
||||
const key = sdkKey(model.api.npm)
|
||||
if (key && key !== model.providerID && model.api.npm !== "@ai-sdk/azure") {
|
||||
const remap = (opts: Record<string, any> | undefined) => {
|
||||
if (!opts) return opts
|
||||
if (!(model.providerID in opts)) return opts
|
||||
const result = { ...opts }
|
||||
result[key] = result[model.providerID]
|
||||
delete result[model.providerID]
|
||||
return result
|
||||
}
|
||||
|
||||
msgs = msgs.map((msg) => {
|
||||
if (!Array.isArray(msg.content)) return { ...msg, providerOptions: remap(msg.providerOptions) }
|
||||
return {
|
||||
...msg,
|
||||
providerOptions: remap(msg.providerOptions),
|
||||
content: msg.content.map((part) => ({ ...part, providerOptions: remap(part.providerOptions) })),
|
||||
} as typeof msg
|
||||
})
|
||||
}
|
||||
|
||||
return msgs
|
||||
}
|
||||
|
||||
@@ -578,39 +595,8 @@ export namespace ProviderTransform {
|
||||
}
|
||||
|
||||
export function providerOptions(model: Provider.Model, options: { [x: string]: any }) {
|
||||
switch (model.api.npm) {
|
||||
case "@ai-sdk/github-copilot":
|
||||
case "@ai-sdk/openai":
|
||||
case "@ai-sdk/azure":
|
||||
return {
|
||||
["openai" as string]: options,
|
||||
}
|
||||
case "@ai-sdk/amazon-bedrock":
|
||||
return {
|
||||
["bedrock" as string]: options,
|
||||
}
|
||||
case "@ai-sdk/anthropic":
|
||||
return {
|
||||
["anthropic" as string]: options,
|
||||
}
|
||||
case "@ai-sdk/google-vertex":
|
||||
case "@ai-sdk/google":
|
||||
return {
|
||||
["google" as string]: options,
|
||||
}
|
||||
case "@ai-sdk/gateway":
|
||||
return {
|
||||
["gateway" as string]: options,
|
||||
}
|
||||
case "@openrouter/ai-sdk-provider":
|
||||
return {
|
||||
["openrouter" as string]: options,
|
||||
}
|
||||
default:
|
||||
return {
|
||||
[model.providerID]: options,
|
||||
}
|
||||
}
|
||||
const key = sdkKey(model.api.npm) ?? model.providerID
|
||||
return { [key]: options }
|
||||
}
|
||||
|
||||
export function maxOutputTokens(
|
||||
|
||||
@@ -146,10 +146,6 @@ export namespace Pty {
|
||||
ptyProcess.onExit(({ exitCode }) => {
|
||||
log.info("session exited", { id, exitCode })
|
||||
session.info.status = "exited"
|
||||
for (const ws of session.subscribers) {
|
||||
ws.close()
|
||||
}
|
||||
session.subscribers.clear()
|
||||
Bus.publish(Event.Exited, { id, exitCode })
|
||||
state().delete(id)
|
||||
})
|
||||
|
||||
@@ -7,15 +7,17 @@ export namespace MDNS {
|
||||
let bonjour: Bonjour | undefined
|
||||
let currentPort: number | undefined
|
||||
|
||||
export function publish(port: number, name = "opencode") {
|
||||
export function publish(port: number) {
|
||||
if (currentPort === port) return
|
||||
if (bonjour) unpublish()
|
||||
|
||||
try {
|
||||
const name = `opencode-${port}`
|
||||
bonjour = new Bonjour()
|
||||
const service = bonjour.publish({
|
||||
name,
|
||||
type: "http",
|
||||
host: "opencode.local",
|
||||
port,
|
||||
txt: { path: "/" },
|
||||
})
|
||||
|
||||
@@ -1,79 +0,0 @@
|
||||
import { Hono } from "hono"
|
||||
import { describeRoute, validator } from "hono-openapi"
|
||||
import { resolver } from "hono-openapi"
|
||||
import { Instance } from "../project/instance"
|
||||
import { Project } from "../project/project"
|
||||
import z from "zod"
|
||||
import { errors } from "./error"
|
||||
|
||||
export const ProjectRoute = new Hono()
|
||||
.get(
|
||||
"/",
|
||||
describeRoute({
|
||||
summary: "List all projects",
|
||||
description: "Get a list of projects that have been opened with OpenCode.",
|
||||
operationId: "project.list",
|
||||
responses: {
|
||||
200: {
|
||||
description: "List of projects",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Project.Info.array()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
const projects = await Project.list()
|
||||
return c.json(projects)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/current",
|
||||
describeRoute({
|
||||
summary: "Get current project",
|
||||
description: "Retrieve the currently active project that OpenCode is working with.",
|
||||
operationId: "project.current",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Current project information",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Project.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
return c.json(Instance.project)
|
||||
},
|
||||
)
|
||||
.patch(
|
||||
"/:projectID",
|
||||
describeRoute({
|
||||
summary: "Update project",
|
||||
description: "Update project properties such as name, icon and color.",
|
||||
operationId: "project.update",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Updated project information",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Project.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator("param", z.object({ projectID: z.string() })),
|
||||
validator("json", Project.update.schema.omit({ projectID: true })),
|
||||
async (c) => {
|
||||
const projectID = c.req.valid("param").projectID
|
||||
const body = c.req.valid("json")
|
||||
const project = await Project.update({ ...body, projectID })
|
||||
return c.json(project)
|
||||
},
|
||||
)
|
||||
@@ -1,95 +0,0 @@
|
||||
import { Hono } from "hono"
|
||||
import { describeRoute, validator } from "hono-openapi"
|
||||
import { resolver } from "hono-openapi"
|
||||
import { Question } from "../question"
|
||||
import z from "zod"
|
||||
import { errors } from "./error"
|
||||
|
||||
export const QuestionRoute = new Hono()
|
||||
.get(
|
||||
"/",
|
||||
describeRoute({
|
||||
summary: "List pending questions",
|
||||
description: "Get all pending question requests across all sessions.",
|
||||
operationId: "question.list",
|
||||
responses: {
|
||||
200: {
|
||||
description: "List of pending questions",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Question.Request.array()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
const questions = await Question.list()
|
||||
return c.json(questions)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:requestID/reply",
|
||||
describeRoute({
|
||||
summary: "Reply to question request",
|
||||
description: "Provide answers to a question request from the AI assistant.",
|
||||
operationId: "question.reply",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Question answered successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
requestID: z.string(),
|
||||
}),
|
||||
),
|
||||
validator("json", Question.Reply),
|
||||
async (c) => {
|
||||
const params = c.req.valid("param")
|
||||
const json = c.req.valid("json")
|
||||
await Question.reply({
|
||||
requestID: params.requestID,
|
||||
answers: json.answers,
|
||||
})
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:requestID/reject",
|
||||
describeRoute({
|
||||
summary: "Reject question request",
|
||||
description: "Reject a question request from the AI assistant.",
|
||||
operationId: "question.reject",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Question rejected successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
requestID: z.string(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const params = c.req.valid("param")
|
||||
await Question.reject(params.requestID)
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
92
packages/opencode/src/server/routes/config.ts
Normal file
92
packages/opencode/src/server/routes/config.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
import { Hono } from "hono"
|
||||
import { describeRoute, validator, resolver } from "hono-openapi"
|
||||
import z from "zod"
|
||||
import { Config } from "../../config/config"
|
||||
import { Provider } from "../../provider/provider"
|
||||
import { mapValues } from "remeda"
|
||||
import { errors } from "../error"
|
||||
import { Log } from "../../util/log"
|
||||
import { lazy } from "../../util/lazy"
|
||||
|
||||
const log = Log.create({ service: "server" })
|
||||
|
||||
export const ConfigRoutes = lazy(() =>
|
||||
new Hono()
|
||||
.get(
|
||||
"/",
|
||||
describeRoute({
|
||||
summary: "Get configuration",
|
||||
description: "Retrieve the current OpenCode configuration settings and preferences.",
|
||||
operationId: "config.get",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Get config info",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Config.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
return c.json(await Config.get())
|
||||
},
|
||||
)
|
||||
.patch(
|
||||
"/",
|
||||
describeRoute({
|
||||
summary: "Update configuration",
|
||||
description: "Update OpenCode configuration settings and preferences.",
|
||||
operationId: "config.update",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Successfully updated config",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Config.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400),
|
||||
},
|
||||
}),
|
||||
validator("json", Config.Info),
|
||||
async (c) => {
|
||||
const config = c.req.valid("json")
|
||||
await Config.update(config)
|
||||
return c.json(config)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/providers",
|
||||
describeRoute({
|
||||
summary: "List config providers",
|
||||
description: "Get a list of all configured AI providers and their default models.",
|
||||
operationId: "config.providers",
|
||||
responses: {
|
||||
200: {
|
||||
description: "List of providers",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(
|
||||
z.object({
|
||||
providers: Provider.Info.array(),
|
||||
default: z.record(z.string(), z.string()),
|
||||
}),
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
using _ = log.time("providers")
|
||||
const providers = await Provider.list().then((x) => mapValues(x, (item) => item))
|
||||
return c.json({
|
||||
providers: Object.values(providers),
|
||||
default: mapValues(providers, (item) => Provider.sort(Object.values(item.models))[0].id),
|
||||
})
|
||||
},
|
||||
),
|
||||
)
|
||||
157
packages/opencode/src/server/routes/experimental.ts
Normal file
157
packages/opencode/src/server/routes/experimental.ts
Normal file
@@ -0,0 +1,157 @@
|
||||
import { Hono } from "hono"
|
||||
import { describeRoute, validator, resolver } from "hono-openapi"
|
||||
import z from "zod"
|
||||
import { ToolRegistry } from "../../tool/registry"
|
||||
import { Worktree } from "../../worktree"
|
||||
import { Instance } from "../../project/instance"
|
||||
import { Project } from "../../project/project"
|
||||
import { MCP } from "../../mcp"
|
||||
import { zodToJsonSchema } from "zod-to-json-schema"
|
||||
import { errors } from "../error"
|
||||
import { lazy } from "../../util/lazy"
|
||||
|
||||
export const ExperimentalRoutes = lazy(() =>
|
||||
new Hono()
|
||||
.get(
|
||||
"/tool/ids",
|
||||
describeRoute({
|
||||
summary: "List tool IDs",
|
||||
description:
|
||||
"Get a list of all available tool IDs, including both built-in tools and dynamically registered tools.",
|
||||
operationId: "tool.ids",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Tool IDs",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.array(z.string()).meta({ ref: "ToolIDs" })),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400),
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
return c.json(await ToolRegistry.ids())
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/tool",
|
||||
describeRoute({
|
||||
summary: "List tools",
|
||||
description:
|
||||
"Get a list of available tools with their JSON schema parameters for a specific provider and model combination.",
|
||||
operationId: "tool.list",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Tools",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(
|
||||
z
|
||||
.array(
|
||||
z
|
||||
.object({
|
||||
id: z.string(),
|
||||
description: z.string(),
|
||||
parameters: z.any(),
|
||||
})
|
||||
.meta({ ref: "ToolListItem" }),
|
||||
)
|
||||
.meta({ ref: "ToolList" }),
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"query",
|
||||
z.object({
|
||||
provider: z.string(),
|
||||
model: z.string(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const { provider, model } = c.req.valid("query")
|
||||
const tools = await ToolRegistry.tools({ providerID: provider, modelID: model })
|
||||
return c.json(
|
||||
tools.map((t) => ({
|
||||
id: t.id,
|
||||
description: t.description,
|
||||
// Handle both Zod schemas and plain JSON schemas
|
||||
parameters: (t.parameters as any)?._def ? zodToJsonSchema(t.parameters as any) : t.parameters,
|
||||
})),
|
||||
)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/worktree",
|
||||
describeRoute({
|
||||
summary: "Create worktree",
|
||||
description: "Create a new git worktree for the current project.",
|
||||
operationId: "worktree.create",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Worktree created",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Worktree.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400),
|
||||
},
|
||||
}),
|
||||
validator("json", Worktree.create.schema),
|
||||
async (c) => {
|
||||
const body = c.req.valid("json")
|
||||
const worktree = await Worktree.create(body)
|
||||
return c.json(worktree)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/worktree",
|
||||
describeRoute({
|
||||
summary: "List worktrees",
|
||||
description: "List all sandbox worktrees for the current project.",
|
||||
operationId: "worktree.list",
|
||||
responses: {
|
||||
200: {
|
||||
description: "List of worktree directories",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.array(z.string())),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
const sandboxes = await Project.sandboxes(Instance.project.id)
|
||||
return c.json(sandboxes)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/resource",
|
||||
describeRoute({
|
||||
summary: "Get MCP resources",
|
||||
description: "Get all available MCP resources from connected servers. Optionally filter by name.",
|
||||
operationId: "experimental.resource.list",
|
||||
responses: {
|
||||
200: {
|
||||
description: "MCP resources",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.record(z.string(), MCP.Resource)),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
return c.json(await MCP.resources())
|
||||
},
|
||||
),
|
||||
)
|
||||
197
packages/opencode/src/server/routes/file.ts
Normal file
197
packages/opencode/src/server/routes/file.ts
Normal file
@@ -0,0 +1,197 @@
|
||||
import { Hono } from "hono"
|
||||
import { describeRoute, validator, resolver } from "hono-openapi"
|
||||
import z from "zod"
|
||||
import { File } from "../../file"
|
||||
import { Ripgrep } from "../../file/ripgrep"
|
||||
import { LSP } from "../../lsp"
|
||||
import { Instance } from "../../project/instance"
|
||||
import { lazy } from "../../util/lazy"
|
||||
|
||||
export const FileRoutes = lazy(() =>
|
||||
new Hono()
|
||||
.get(
|
||||
"/find",
|
||||
describeRoute({
|
||||
summary: "Find text",
|
||||
description: "Search for text patterns across files in the project using ripgrep.",
|
||||
operationId: "find.text",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Matches",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Ripgrep.Match.shape.data.array()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"query",
|
||||
z.object({
|
||||
pattern: z.string(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const pattern = c.req.valid("query").pattern
|
||||
const result = await Ripgrep.search({
|
||||
cwd: Instance.directory,
|
||||
pattern,
|
||||
limit: 10,
|
||||
})
|
||||
return c.json(result)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/find/file",
|
||||
describeRoute({
|
||||
summary: "Find files",
|
||||
description: "Search for files or directories by name or pattern in the project directory.",
|
||||
operationId: "find.files",
|
||||
responses: {
|
||||
200: {
|
||||
description: "File paths",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.string().array()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"query",
|
||||
z.object({
|
||||
query: z.string(),
|
||||
dirs: z.enum(["true", "false"]).optional(),
|
||||
type: z.enum(["file", "directory"]).optional(),
|
||||
limit: z.coerce.number().int().min(1).max(200).optional(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const query = c.req.valid("query").query
|
||||
const dirs = c.req.valid("query").dirs
|
||||
const type = c.req.valid("query").type
|
||||
const limit = c.req.valid("query").limit
|
||||
const results = await File.search({
|
||||
query,
|
||||
limit: limit ?? 10,
|
||||
dirs: dirs !== "false",
|
||||
type,
|
||||
})
|
||||
return c.json(results)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/find/symbol",
|
||||
describeRoute({
|
||||
summary: "Find symbols",
|
||||
description: "Search for workspace symbols like functions, classes, and variables using LSP.",
|
||||
operationId: "find.symbols",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Symbols",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(LSP.Symbol.array()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"query",
|
||||
z.object({
|
||||
query: z.string(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
/*
|
||||
const query = c.req.valid("query").query
|
||||
const result = await LSP.workspaceSymbol(query)
|
||||
return c.json(result)
|
||||
*/
|
||||
return c.json([])
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/file",
|
||||
describeRoute({
|
||||
summary: "List files",
|
||||
description: "List files and directories in a specified path.",
|
||||
operationId: "file.list",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Files and directories",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(File.Node.array()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"query",
|
||||
z.object({
|
||||
path: z.string(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const path = c.req.valid("query").path
|
||||
const content = await File.list(path)
|
||||
return c.json(content)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/file/content",
|
||||
describeRoute({
|
||||
summary: "Read file",
|
||||
description: "Read the content of a specified file.",
|
||||
operationId: "file.read",
|
||||
responses: {
|
||||
200: {
|
||||
description: "File content",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(File.Content),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"query",
|
||||
z.object({
|
||||
path: z.string(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const path = c.req.valid("query").path
|
||||
const content = await File.read(path)
|
||||
return c.json(content)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/file/status",
|
||||
describeRoute({
|
||||
summary: "Get file status",
|
||||
description: "Get the git status of all files in the project.",
|
||||
operationId: "file.status",
|
||||
responses: {
|
||||
200: {
|
||||
description: "File status",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(File.Info.array()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
const content = await File.status()
|
||||
return c.json(content)
|
||||
},
|
||||
),
|
||||
)
|
||||
135
packages/opencode/src/server/routes/global.ts
Normal file
135
packages/opencode/src/server/routes/global.ts
Normal file
@@ -0,0 +1,135 @@
|
||||
import { Hono } from "hono"
|
||||
import { describeRoute, resolver } from "hono-openapi"
|
||||
import { streamSSE } from "hono/streaming"
|
||||
import z from "zod"
|
||||
import { BusEvent } from "@/bus/bus-event"
|
||||
import { GlobalBus } from "@/bus/global"
|
||||
import { Instance } from "../../project/instance"
|
||||
import { Installation } from "@/installation"
|
||||
import { Log } from "../../util/log"
|
||||
import { lazy } from "../../util/lazy"
|
||||
|
||||
const log = Log.create({ service: "server" })
|
||||
|
||||
export const GlobalDisposedEvent = BusEvent.define("global.disposed", z.object({}))
|
||||
|
||||
export const GlobalRoutes = lazy(() =>
|
||||
new Hono()
|
||||
.get(
|
||||
"/health",
|
||||
describeRoute({
|
||||
summary: "Get health",
|
||||
description: "Get health information about the OpenCode server.",
|
||||
operationId: "global.health",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Health information",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.object({ healthy: z.literal(true), version: z.string() })),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
return c.json({ healthy: true, version: Installation.VERSION })
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/event",
|
||||
describeRoute({
|
||||
summary: "Get global events",
|
||||
description: "Subscribe to global events from the OpenCode system using server-sent events.",
|
||||
operationId: "global.event",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Event stream",
|
||||
content: {
|
||||
"text/event-stream": {
|
||||
schema: resolver(
|
||||
z
|
||||
.object({
|
||||
directory: z.string(),
|
||||
payload: BusEvent.payloads(),
|
||||
})
|
||||
.meta({
|
||||
ref: "GlobalEvent",
|
||||
}),
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
log.info("global event connected")
|
||||
return streamSSE(c, async (stream) => {
|
||||
stream.writeSSE({
|
||||
data: JSON.stringify({
|
||||
payload: {
|
||||
type: "server.connected",
|
||||
properties: {},
|
||||
},
|
||||
}),
|
||||
})
|
||||
async function handler(event: any) {
|
||||
await stream.writeSSE({
|
||||
data: JSON.stringify(event),
|
||||
})
|
||||
}
|
||||
GlobalBus.on("event", handler)
|
||||
|
||||
// Send heartbeat every 30s to prevent WKWebView timeout (60s default)
|
||||
const heartbeat = setInterval(() => {
|
||||
stream.writeSSE({
|
||||
data: JSON.stringify({
|
||||
payload: {
|
||||
type: "server.heartbeat",
|
||||
properties: {},
|
||||
},
|
||||
}),
|
||||
})
|
||||
}, 30000)
|
||||
|
||||
await new Promise<void>((resolve) => {
|
||||
stream.onAbort(() => {
|
||||
clearInterval(heartbeat)
|
||||
GlobalBus.off("event", handler)
|
||||
resolve()
|
||||
log.info("global event disconnected")
|
||||
})
|
||||
})
|
||||
})
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/dispose",
|
||||
describeRoute({
|
||||
summary: "Dispose instance",
|
||||
description: "Clean up and dispose all OpenCode instances, releasing all resources.",
|
||||
operationId: "global.dispose",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Global disposed",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
await Instance.disposeAll()
|
||||
GlobalBus.emit("event", {
|
||||
directory: "global",
|
||||
payload: {
|
||||
type: GlobalDisposedEvent.type,
|
||||
properties: {},
|
||||
},
|
||||
})
|
||||
return c.json(true)
|
||||
},
|
||||
),
|
||||
)
|
||||
225
packages/opencode/src/server/routes/mcp.ts
Normal file
225
packages/opencode/src/server/routes/mcp.ts
Normal file
@@ -0,0 +1,225 @@
|
||||
import { Hono } from "hono"
|
||||
import { describeRoute, validator, resolver } from "hono-openapi"
|
||||
import z from "zod"
|
||||
import { MCP } from "../../mcp"
|
||||
import { Config } from "../../config/config"
|
||||
import { errors } from "../error"
|
||||
import { lazy } from "../../util/lazy"
|
||||
|
||||
export const McpRoutes = lazy(() =>
|
||||
new Hono()
|
||||
.get(
|
||||
"/",
|
||||
describeRoute({
|
||||
summary: "Get MCP status",
|
||||
description: "Get the status of all Model Context Protocol (MCP) servers.",
|
||||
operationId: "mcp.status",
|
||||
responses: {
|
||||
200: {
|
||||
description: "MCP server status",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.record(z.string(), MCP.Status)),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
return c.json(await MCP.status())
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/",
|
||||
describeRoute({
|
||||
summary: "Add MCP server",
|
||||
description: "Dynamically add a new Model Context Protocol (MCP) server to the system.",
|
||||
operationId: "mcp.add",
|
||||
responses: {
|
||||
200: {
|
||||
description: "MCP server added successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.record(z.string(), MCP.Status)),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"json",
|
||||
z.object({
|
||||
name: z.string(),
|
||||
config: Config.Mcp,
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const { name, config } = c.req.valid("json")
|
||||
const result = await MCP.add(name, config)
|
||||
return c.json(result.status)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:name/auth",
|
||||
describeRoute({
|
||||
summary: "Start MCP OAuth",
|
||||
description: "Start OAuth authentication flow for a Model Context Protocol (MCP) server.",
|
||||
operationId: "mcp.auth.start",
|
||||
responses: {
|
||||
200: {
|
||||
description: "OAuth flow started",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(
|
||||
z.object({
|
||||
authorizationUrl: z.string().describe("URL to open in browser for authorization"),
|
||||
}),
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
const name = c.req.param("name")
|
||||
const supportsOAuth = await MCP.supportsOAuth(name)
|
||||
if (!supportsOAuth) {
|
||||
return c.json({ error: `MCP server ${name} does not support OAuth` }, 400)
|
||||
}
|
||||
const result = await MCP.startAuth(name)
|
||||
return c.json(result)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:name/auth/callback",
|
||||
describeRoute({
|
||||
summary: "Complete MCP OAuth",
|
||||
description:
|
||||
"Complete OAuth authentication for a Model Context Protocol (MCP) server using the authorization code.",
|
||||
operationId: "mcp.auth.callback",
|
||||
responses: {
|
||||
200: {
|
||||
description: "OAuth authentication completed",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(MCP.Status),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"json",
|
||||
z.object({
|
||||
code: z.string().describe("Authorization code from OAuth callback"),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const name = c.req.param("name")
|
||||
const { code } = c.req.valid("json")
|
||||
const status = await MCP.finishAuth(name, code)
|
||||
return c.json(status)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:name/auth/authenticate",
|
||||
describeRoute({
|
||||
summary: "Authenticate MCP OAuth",
|
||||
description: "Start OAuth flow and wait for callback (opens browser)",
|
||||
operationId: "mcp.auth.authenticate",
|
||||
responses: {
|
||||
200: {
|
||||
description: "OAuth authentication completed",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(MCP.Status),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
const name = c.req.param("name")
|
||||
const supportsOAuth = await MCP.supportsOAuth(name)
|
||||
if (!supportsOAuth) {
|
||||
return c.json({ error: `MCP server ${name} does not support OAuth` }, 400)
|
||||
}
|
||||
const status = await MCP.authenticate(name)
|
||||
return c.json(status)
|
||||
},
|
||||
)
|
||||
.delete(
|
||||
"/:name/auth",
|
||||
describeRoute({
|
||||
summary: "Remove MCP OAuth",
|
||||
description: "Remove OAuth credentials for an MCP server",
|
||||
operationId: "mcp.auth.remove",
|
||||
responses: {
|
||||
200: {
|
||||
description: "OAuth credentials removed",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.object({ success: z.literal(true) })),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(404),
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
const name = c.req.param("name")
|
||||
await MCP.removeAuth(name)
|
||||
return c.json({ success: true as const })
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:name/connect",
|
||||
describeRoute({
|
||||
description: "Connect an MCP server",
|
||||
operationId: "mcp.connect",
|
||||
responses: {
|
||||
200: {
|
||||
description: "MCP server connected successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
validator("param", z.object({ name: z.string() })),
|
||||
async (c) => {
|
||||
const { name } = c.req.valid("param")
|
||||
await MCP.connect(name)
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:name/disconnect",
|
||||
describeRoute({
|
||||
description: "Disconnect an MCP server",
|
||||
operationId: "mcp.disconnect",
|
||||
responses: {
|
||||
200: {
|
||||
description: "MCP server disconnected successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
validator("param", z.object({ name: z.string() })),
|
||||
async (c) => {
|
||||
const { name } = c.req.valid("param")
|
||||
await MCP.disconnect(name)
|
||||
return c.json(true)
|
||||
},
|
||||
),
|
||||
)
|
||||
68
packages/opencode/src/server/routes/permission.ts
Normal file
68
packages/opencode/src/server/routes/permission.ts
Normal file
@@ -0,0 +1,68 @@
|
||||
import { Hono } from "hono"
|
||||
import { describeRoute, validator, resolver } from "hono-openapi"
|
||||
import z from "zod"
|
||||
import { PermissionNext } from "@/permission/next"
|
||||
import { errors } from "../error"
|
||||
import { lazy } from "../../util/lazy"
|
||||
|
||||
export const PermissionRoutes = lazy(() =>
|
||||
new Hono()
|
||||
.post(
|
||||
"/:requestID/reply",
|
||||
describeRoute({
|
||||
summary: "Respond to permission request",
|
||||
description: "Approve or deny a permission request from the AI assistant.",
|
||||
operationId: "permission.reply",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Permission processed successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
requestID: z.string(),
|
||||
}),
|
||||
),
|
||||
validator("json", z.object({ reply: PermissionNext.Reply, message: z.string().optional() })),
|
||||
async (c) => {
|
||||
const params = c.req.valid("param")
|
||||
const json = c.req.valid("json")
|
||||
await PermissionNext.reply({
|
||||
requestID: params.requestID,
|
||||
reply: json.reply,
|
||||
message: json.message,
|
||||
})
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/",
|
||||
describeRoute({
|
||||
summary: "List pending permissions",
|
||||
description: "Get all pending permission requests across all sessions.",
|
||||
operationId: "permission.list",
|
||||
responses: {
|
||||
200: {
|
||||
description: "List of pending permissions",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(PermissionNext.Request.array()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
const permissions = await PermissionNext.list()
|
||||
return c.json(permissions)
|
||||
},
|
||||
),
|
||||
)
|
||||
82
packages/opencode/src/server/routes/project.ts
Normal file
82
packages/opencode/src/server/routes/project.ts
Normal file
@@ -0,0 +1,82 @@
|
||||
import { Hono } from "hono"
|
||||
import { describeRoute, validator } from "hono-openapi"
|
||||
import { resolver } from "hono-openapi"
|
||||
import { Instance } from "../../project/instance"
|
||||
import { Project } from "../../project/project"
|
||||
import z from "zod"
|
||||
import { errors } from "../error"
|
||||
import { lazy } from "../../util/lazy"
|
||||
|
||||
export const ProjectRoutes = lazy(() =>
|
||||
new Hono()
|
||||
.get(
|
||||
"/",
|
||||
describeRoute({
|
||||
summary: "List all projects",
|
||||
description: "Get a list of projects that have been opened with OpenCode.",
|
||||
operationId: "project.list",
|
||||
responses: {
|
||||
200: {
|
||||
description: "List of projects",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Project.Info.array()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
const projects = await Project.list()
|
||||
return c.json(projects)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/current",
|
||||
describeRoute({
|
||||
summary: "Get current project",
|
||||
description: "Retrieve the currently active project that OpenCode is working with.",
|
||||
operationId: "project.current",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Current project information",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Project.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
return c.json(Instance.project)
|
||||
},
|
||||
)
|
||||
.patch(
|
||||
"/:projectID",
|
||||
describeRoute({
|
||||
summary: "Update project",
|
||||
description: "Update project properties such as name, icon and color.",
|
||||
operationId: "project.update",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Updated project information",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Project.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator("param", z.object({ projectID: z.string() })),
|
||||
validator("json", Project.update.schema.omit({ projectID: true })),
|
||||
async (c) => {
|
||||
const projectID = c.req.valid("param").projectID
|
||||
const body = c.req.valid("json")
|
||||
const project = await Project.update({ ...body, projectID })
|
||||
return c.json(project)
|
||||
},
|
||||
),
|
||||
)
|
||||
165
packages/opencode/src/server/routes/provider.ts
Normal file
165
packages/opencode/src/server/routes/provider.ts
Normal file
@@ -0,0 +1,165 @@
|
||||
import { Hono } from "hono"
|
||||
import { describeRoute, validator, resolver } from "hono-openapi"
|
||||
import z from "zod"
|
||||
import { Config } from "../../config/config"
|
||||
import { Provider } from "../../provider/provider"
|
||||
import { ModelsDev } from "../../provider/models"
|
||||
import { ProviderAuth } from "../../provider/auth"
|
||||
import { mapValues } from "remeda"
|
||||
import { errors } from "../error"
|
||||
import { lazy } from "../../util/lazy"
|
||||
|
||||
export const ProviderRoutes = lazy(() =>
|
||||
new Hono()
|
||||
.get(
|
||||
"/",
|
||||
describeRoute({
|
||||
summary: "List providers",
|
||||
description: "Get a list of all available AI providers, including both available and connected ones.",
|
||||
operationId: "provider.list",
|
||||
responses: {
|
||||
200: {
|
||||
description: "List of providers",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(
|
||||
z.object({
|
||||
all: ModelsDev.Provider.array(),
|
||||
default: z.record(z.string(), z.string()),
|
||||
connected: z.array(z.string()),
|
||||
}),
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
const config = await Config.get()
|
||||
const disabled = new Set(config.disabled_providers ?? [])
|
||||
const enabled = config.enabled_providers ? new Set(config.enabled_providers) : undefined
|
||||
|
||||
const allProviders = await ModelsDev.get()
|
||||
const filteredProviders: Record<string, (typeof allProviders)[string]> = {}
|
||||
for (const [key, value] of Object.entries(allProviders)) {
|
||||
if ((enabled ? enabled.has(key) : true) && !disabled.has(key)) {
|
||||
filteredProviders[key] = value
|
||||
}
|
||||
}
|
||||
|
||||
const connected = await Provider.list()
|
||||
const providers = Object.assign(
|
||||
mapValues(filteredProviders, (x) => Provider.fromModelsDevProvider(x)),
|
||||
connected,
|
||||
)
|
||||
return c.json({
|
||||
all: Object.values(providers),
|
||||
default: mapValues(providers, (item) => Provider.sort(Object.values(item.models))[0].id),
|
||||
connected: Object.keys(connected),
|
||||
})
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/auth",
|
||||
describeRoute({
|
||||
summary: "Get provider auth methods",
|
||||
description: "Retrieve available authentication methods for all AI providers.",
|
||||
operationId: "provider.auth",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Provider auth methods",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.record(z.string(), z.array(ProviderAuth.Method))),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
return c.json(await ProviderAuth.methods())
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:providerID/oauth/authorize",
|
||||
describeRoute({
|
||||
summary: "OAuth authorize",
|
||||
description: "Initiate OAuth authorization for a specific AI provider to get an authorization URL.",
|
||||
operationId: "provider.oauth.authorize",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Authorization URL and method",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(ProviderAuth.Authorization.optional()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
providerID: z.string().meta({ description: "Provider ID" }),
|
||||
}),
|
||||
),
|
||||
validator(
|
||||
"json",
|
||||
z.object({
|
||||
method: z.number().meta({ description: "Auth method index" }),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const providerID = c.req.valid("param").providerID
|
||||
const { method } = c.req.valid("json")
|
||||
const result = await ProviderAuth.authorize({
|
||||
providerID,
|
||||
method,
|
||||
})
|
||||
return c.json(result)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:providerID/oauth/callback",
|
||||
describeRoute({
|
||||
summary: "OAuth callback",
|
||||
description: "Handle the OAuth callback from a provider after user authorization.",
|
||||
operationId: "provider.oauth.callback",
|
||||
responses: {
|
||||
200: {
|
||||
description: "OAuth callback processed successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
providerID: z.string().meta({ description: "Provider ID" }),
|
||||
}),
|
||||
),
|
||||
validator(
|
||||
"json",
|
||||
z.object({
|
||||
method: z.number().meta({ description: "Auth method index" }),
|
||||
code: z.string().optional().meta({ description: "OAuth authorization code" }),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const providerID = c.req.valid("param").providerID
|
||||
const { method, code } = c.req.valid("json")
|
||||
await ProviderAuth.callback({
|
||||
providerID,
|
||||
method,
|
||||
code,
|
||||
})
|
||||
return c.json(true)
|
||||
},
|
||||
),
|
||||
)
|
||||
169
packages/opencode/src/server/routes/pty.ts
Normal file
169
packages/opencode/src/server/routes/pty.ts
Normal file
@@ -0,0 +1,169 @@
|
||||
import { Hono } from "hono"
|
||||
import { describeRoute, validator, resolver } from "hono-openapi"
|
||||
import { upgradeWebSocket } from "hono/bun"
|
||||
import z from "zod"
|
||||
import { Pty } from "@/pty"
|
||||
import { Storage } from "../../storage/storage"
|
||||
import { errors } from "../error"
|
||||
import { lazy } from "../../util/lazy"
|
||||
|
||||
export const PtyRoutes = lazy(() =>
|
||||
new Hono()
|
||||
.get(
|
||||
"/",
|
||||
describeRoute({
|
||||
summary: "List PTY sessions",
|
||||
description: "Get a list of all active pseudo-terminal (PTY) sessions managed by OpenCode.",
|
||||
operationId: "pty.list",
|
||||
responses: {
|
||||
200: {
|
||||
description: "List of sessions",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Pty.Info.array()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
return c.json(Pty.list())
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/",
|
||||
describeRoute({
|
||||
summary: "Create PTY session",
|
||||
description: "Create a new pseudo-terminal (PTY) session for running shell commands and processes.",
|
||||
operationId: "pty.create",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Created session",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Pty.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400),
|
||||
},
|
||||
}),
|
||||
validator("json", Pty.CreateInput),
|
||||
async (c) => {
|
||||
const info = await Pty.create(c.req.valid("json"))
|
||||
return c.json(info)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/:ptyID",
|
||||
describeRoute({
|
||||
summary: "Get PTY session",
|
||||
description: "Retrieve detailed information about a specific pseudo-terminal (PTY) session.",
|
||||
operationId: "pty.get",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Session info",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Pty.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(404),
|
||||
},
|
||||
}),
|
||||
validator("param", z.object({ ptyID: z.string() })),
|
||||
async (c) => {
|
||||
const info = Pty.get(c.req.valid("param").ptyID)
|
||||
if (!info) {
|
||||
throw new Storage.NotFoundError({ message: "Session not found" })
|
||||
}
|
||||
return c.json(info)
|
||||
},
|
||||
)
|
||||
.put(
|
||||
"/:ptyID",
|
||||
describeRoute({
|
||||
summary: "Update PTY session",
|
||||
description: "Update properties of an existing pseudo-terminal (PTY) session.",
|
||||
operationId: "pty.update",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Updated session",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Pty.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400),
|
||||
},
|
||||
}),
|
||||
validator("param", z.object({ ptyID: z.string() })),
|
||||
validator("json", Pty.UpdateInput),
|
||||
async (c) => {
|
||||
const info = await Pty.update(c.req.valid("param").ptyID, c.req.valid("json"))
|
||||
return c.json(info)
|
||||
},
|
||||
)
|
||||
.delete(
|
||||
"/:ptyID",
|
||||
describeRoute({
|
||||
summary: "Remove PTY session",
|
||||
description: "Remove and terminate a specific pseudo-terminal (PTY) session.",
|
||||
operationId: "pty.remove",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Session removed",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(404),
|
||||
},
|
||||
}),
|
||||
validator("param", z.object({ ptyID: z.string() })),
|
||||
async (c) => {
|
||||
await Pty.remove(c.req.valid("param").ptyID)
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/:ptyID/connect",
|
||||
describeRoute({
|
||||
summary: "Connect to PTY session",
|
||||
description: "Establish a WebSocket connection to interact with a pseudo-terminal (PTY) session in real-time.",
|
||||
operationId: "pty.connect",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Connected session",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(404),
|
||||
},
|
||||
}),
|
||||
validator("param", z.object({ ptyID: z.string() })),
|
||||
upgradeWebSocket((c) => {
|
||||
const id = c.req.param("ptyID")
|
||||
let handler: ReturnType<typeof Pty.connect>
|
||||
if (!Pty.get(id)) throw new Error("Session not found")
|
||||
return {
|
||||
onOpen(_event, ws) {
|
||||
handler = Pty.connect(id, ws)
|
||||
},
|
||||
onMessage(event) {
|
||||
handler?.onMessage(String(event.data))
|
||||
},
|
||||
onClose() {
|
||||
handler?.onClose()
|
||||
},
|
||||
}
|
||||
}),
|
||||
),
|
||||
)
|
||||
98
packages/opencode/src/server/routes/question.ts
Normal file
98
packages/opencode/src/server/routes/question.ts
Normal file
@@ -0,0 +1,98 @@
|
||||
import { Hono } from "hono"
|
||||
import { describeRoute, validator } from "hono-openapi"
|
||||
import { resolver } from "hono-openapi"
|
||||
import { Question } from "../../question"
|
||||
import z from "zod"
|
||||
import { errors } from "../error"
|
||||
import { lazy } from "../../util/lazy"
|
||||
|
||||
export const QuestionRoutes = lazy(() =>
|
||||
new Hono()
|
||||
.get(
|
||||
"/",
|
||||
describeRoute({
|
||||
summary: "List pending questions",
|
||||
description: "Get all pending question requests across all sessions.",
|
||||
operationId: "question.list",
|
||||
responses: {
|
||||
200: {
|
||||
description: "List of pending questions",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Question.Request.array()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
const questions = await Question.list()
|
||||
return c.json(questions)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:requestID/reply",
|
||||
describeRoute({
|
||||
summary: "Reply to question request",
|
||||
description: "Provide answers to a question request from the AI assistant.",
|
||||
operationId: "question.reply",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Question answered successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
requestID: z.string(),
|
||||
}),
|
||||
),
|
||||
validator("json", Question.Reply),
|
||||
async (c) => {
|
||||
const params = c.req.valid("param")
|
||||
const json = c.req.valid("json")
|
||||
await Question.reply({
|
||||
requestID: params.requestID,
|
||||
answers: json.answers,
|
||||
})
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:requestID/reject",
|
||||
describeRoute({
|
||||
summary: "Reject question request",
|
||||
description: "Reject a question request from the AI assistant.",
|
||||
operationId: "question.reject",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Question rejected successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
requestID: z.string(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const params = c.req.valid("param")
|
||||
await Question.reject(params.requestID)
|
||||
return c.json(true)
|
||||
},
|
||||
),
|
||||
)
|
||||
935
packages/opencode/src/server/routes/session.ts
Normal file
935
packages/opencode/src/server/routes/session.ts
Normal file
@@ -0,0 +1,935 @@
|
||||
import { Hono } from "hono"
|
||||
import { stream } from "hono/streaming"
|
||||
import { describeRoute, validator, resolver } from "hono-openapi"
|
||||
import z from "zod"
|
||||
import { Session } from "../../session"
|
||||
import { MessageV2 } from "../../session/message-v2"
|
||||
import { SessionPrompt } from "../../session/prompt"
|
||||
import { SessionCompaction } from "../../session/compaction"
|
||||
import { SessionRevert } from "../../session/revert"
|
||||
import { SessionStatus } from "@/session/status"
|
||||
import { SessionSummary } from "@/session/summary"
|
||||
import { Todo } from "../../session/todo"
|
||||
import { Agent } from "../../agent/agent"
|
||||
import { Snapshot } from "@/snapshot"
|
||||
import { Log } from "../../util/log"
|
||||
import { PermissionNext } from "@/permission/next"
|
||||
import { errors } from "../error"
|
||||
import { lazy } from "../../util/lazy"
|
||||
|
||||
const log = Log.create({ service: "server" })
|
||||
|
||||
export const SessionRoutes = lazy(() =>
|
||||
new Hono()
|
||||
.get(
|
||||
"/",
|
||||
describeRoute({
|
||||
summary: "List sessions",
|
||||
description: "Get a list of all OpenCode sessions, sorted by most recently updated.",
|
||||
operationId: "session.list",
|
||||
responses: {
|
||||
200: {
|
||||
description: "List of sessions",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Session.Info.array()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"query",
|
||||
z.object({
|
||||
directory: z.string().optional().meta({ description: "Filter sessions by project directory" }),
|
||||
roots: z.coerce.boolean().optional().meta({ description: "Only return root sessions (no parentID)" }),
|
||||
start: z.coerce
|
||||
.number()
|
||||
.optional()
|
||||
.meta({ description: "Filter sessions updated on or after this timestamp (milliseconds since epoch)" }),
|
||||
search: z.string().optional().meta({ description: "Filter sessions by title (case-insensitive)" }),
|
||||
limit: z.coerce.number().optional().meta({ description: "Maximum number of sessions to return" }),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const query = c.req.valid("query")
|
||||
const term = query.search?.toLowerCase()
|
||||
const sessions: Session.Info[] = []
|
||||
for await (const session of Session.list()) {
|
||||
if (query.directory !== undefined && session.directory !== query.directory) continue
|
||||
if (query.roots && session.parentID) continue
|
||||
if (query.start !== undefined && session.time.updated < query.start) continue
|
||||
if (term !== undefined && !session.title.toLowerCase().includes(term)) continue
|
||||
sessions.push(session)
|
||||
if (query.limit !== undefined && sessions.length >= query.limit) break
|
||||
}
|
||||
return c.json(sessions)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/status",
|
||||
describeRoute({
|
||||
summary: "Get session status",
|
||||
description: "Retrieve the current status of all sessions, including active, idle, and completed states.",
|
||||
operationId: "session.status",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Get session status",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.record(z.string(), SessionStatus.Info)),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400),
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
const result = SessionStatus.list()
|
||||
return c.json(result)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/:sessionID",
|
||||
describeRoute({
|
||||
summary: "Get session",
|
||||
description: "Retrieve detailed information about a specific OpenCode session.",
|
||||
tags: ["Session"],
|
||||
operationId: "session.get",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Get session",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Session.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: Session.get.schema,
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const sessionID = c.req.valid("param").sessionID
|
||||
log.info("SEARCH", { url: c.req.url })
|
||||
const session = await Session.get(sessionID)
|
||||
return c.json(session)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/:sessionID/children",
|
||||
describeRoute({
|
||||
summary: "Get session children",
|
||||
tags: ["Session"],
|
||||
description: "Retrieve all child sessions that were forked from the specified parent session.",
|
||||
operationId: "session.children",
|
||||
responses: {
|
||||
200: {
|
||||
description: "List of children",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Session.Info.array()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: Session.children.schema,
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const sessionID = c.req.valid("param").sessionID
|
||||
const session = await Session.children(sessionID)
|
||||
return c.json(session)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/:sessionID/todo",
|
||||
describeRoute({
|
||||
summary: "Get session todos",
|
||||
description: "Retrieve the todo list associated with a specific session, showing tasks and action items.",
|
||||
operationId: "session.todo",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Todo list",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Todo.Info.array()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: z.string().meta({ description: "Session ID" }),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const sessionID = c.req.valid("param").sessionID
|
||||
const todos = await Todo.get(sessionID)
|
||||
return c.json(todos)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/",
|
||||
describeRoute({
|
||||
summary: "Create session",
|
||||
description: "Create a new OpenCode session for interacting with AI assistants and managing conversations.",
|
||||
operationId: "session.create",
|
||||
responses: {
|
||||
...errors(400),
|
||||
200: {
|
||||
description: "Successfully created session",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Session.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
validator("json", Session.create.schema.optional()),
|
||||
async (c) => {
|
||||
const body = c.req.valid("json") ?? {}
|
||||
const session = await Session.create(body)
|
||||
return c.json(session)
|
||||
},
|
||||
)
|
||||
.delete(
|
||||
"/:sessionID",
|
||||
describeRoute({
|
||||
summary: "Delete session",
|
||||
description: "Delete a session and permanently remove all associated data, including messages and history.",
|
||||
operationId: "session.delete",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Successfully deleted session",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: Session.remove.schema,
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const sessionID = c.req.valid("param").sessionID
|
||||
await Session.remove(sessionID)
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.patch(
|
||||
"/:sessionID",
|
||||
describeRoute({
|
||||
summary: "Update session",
|
||||
description: "Update properties of an existing session, such as title or other metadata.",
|
||||
operationId: "session.update",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Successfully updated session",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Session.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: z.string(),
|
||||
}),
|
||||
),
|
||||
validator(
|
||||
"json",
|
||||
z.object({
|
||||
title: z.string().optional(),
|
||||
time: z
|
||||
.object({
|
||||
archived: z.number().optional(),
|
||||
})
|
||||
.optional(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const sessionID = c.req.valid("param").sessionID
|
||||
const updates = c.req.valid("json")
|
||||
|
||||
const updatedSession = await Session.update(sessionID, (session) => {
|
||||
if (updates.title !== undefined) {
|
||||
session.title = updates.title
|
||||
}
|
||||
if (updates.time?.archived !== undefined) session.time.archived = updates.time.archived
|
||||
})
|
||||
|
||||
return c.json(updatedSession)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:sessionID/init",
|
||||
describeRoute({
|
||||
summary: "Initialize session",
|
||||
description:
|
||||
"Analyze the current application and create an AGENTS.md file with project-specific agent configurations.",
|
||||
operationId: "session.init",
|
||||
responses: {
|
||||
200: {
|
||||
description: "200",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: z.string().meta({ description: "Session ID" }),
|
||||
}),
|
||||
),
|
||||
validator("json", Session.initialize.schema.omit({ sessionID: true })),
|
||||
async (c) => {
|
||||
const sessionID = c.req.valid("param").sessionID
|
||||
const body = c.req.valid("json")
|
||||
await Session.initialize({ ...body, sessionID })
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:sessionID/fork",
|
||||
describeRoute({
|
||||
summary: "Fork session",
|
||||
description: "Create a new session by forking an existing session at a specific message point.",
|
||||
operationId: "session.fork",
|
||||
responses: {
|
||||
200: {
|
||||
description: "200",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Session.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: Session.fork.schema.shape.sessionID,
|
||||
}),
|
||||
),
|
||||
validator("json", Session.fork.schema.omit({ sessionID: true })),
|
||||
async (c) => {
|
||||
const sessionID = c.req.valid("param").sessionID
|
||||
const body = c.req.valid("json")
|
||||
const result = await Session.fork({ ...body, sessionID })
|
||||
return c.json(result)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:sessionID/abort",
|
||||
describeRoute({
|
||||
summary: "Abort session",
|
||||
description: "Abort an active session and stop any ongoing AI processing or command execution.",
|
||||
operationId: "session.abort",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Aborted session",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: z.string(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
SessionPrompt.cancel(c.req.valid("param").sessionID)
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:sessionID/share",
|
||||
describeRoute({
|
||||
summary: "Share session",
|
||||
description: "Create a shareable link for a session, allowing others to view the conversation.",
|
||||
operationId: "session.share",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Successfully shared session",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Session.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: z.string(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const sessionID = c.req.valid("param").sessionID
|
||||
await Session.share(sessionID)
|
||||
const session = await Session.get(sessionID)
|
||||
return c.json(session)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/:sessionID/diff",
|
||||
describeRoute({
|
||||
summary: "Get message diff",
|
||||
description: "Get the file changes (diff) that resulted from a specific user message in the session.",
|
||||
operationId: "session.diff",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Successfully retrieved diff",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Snapshot.FileDiff.array()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: SessionSummary.diff.schema.shape.sessionID,
|
||||
}),
|
||||
),
|
||||
validator(
|
||||
"query",
|
||||
z.object({
|
||||
messageID: SessionSummary.diff.schema.shape.messageID,
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const query = c.req.valid("query")
|
||||
const params = c.req.valid("param")
|
||||
const result = await SessionSummary.diff({
|
||||
sessionID: params.sessionID,
|
||||
messageID: query.messageID,
|
||||
})
|
||||
return c.json(result)
|
||||
},
|
||||
)
|
||||
.delete(
|
||||
"/:sessionID/share",
|
||||
describeRoute({
|
||||
summary: "Unshare session",
|
||||
description: "Remove the shareable link for a session, making it private again.",
|
||||
operationId: "session.unshare",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Successfully unshared session",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Session.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: Session.unshare.schema,
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const sessionID = c.req.valid("param").sessionID
|
||||
await Session.unshare(sessionID)
|
||||
const session = await Session.get(sessionID)
|
||||
return c.json(session)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:sessionID/summarize",
|
||||
describeRoute({
|
||||
summary: "Summarize session",
|
||||
description: "Generate a concise summary of the session using AI compaction to preserve key information.",
|
||||
operationId: "session.summarize",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Summarized session",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: z.string().meta({ description: "Session ID" }),
|
||||
}),
|
||||
),
|
||||
validator(
|
||||
"json",
|
||||
z.object({
|
||||
providerID: z.string(),
|
||||
modelID: z.string(),
|
||||
auto: z.boolean().optional().default(false),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const sessionID = c.req.valid("param").sessionID
|
||||
const body = c.req.valid("json")
|
||||
const session = await Session.get(sessionID)
|
||||
await SessionRevert.cleanup(session)
|
||||
const msgs = await Session.messages({ sessionID })
|
||||
let currentAgent = await Agent.defaultAgent()
|
||||
for (let i = msgs.length - 1; i >= 0; i--) {
|
||||
const info = msgs[i].info
|
||||
if (info.role === "user") {
|
||||
currentAgent = info.agent || (await Agent.defaultAgent())
|
||||
break
|
||||
}
|
||||
}
|
||||
await SessionCompaction.create({
|
||||
sessionID,
|
||||
agent: currentAgent,
|
||||
model: {
|
||||
providerID: body.providerID,
|
||||
modelID: body.modelID,
|
||||
},
|
||||
auto: body.auto,
|
||||
})
|
||||
await SessionPrompt.loop(sessionID)
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/:sessionID/message",
|
||||
describeRoute({
|
||||
summary: "Get session messages",
|
||||
description: "Retrieve all messages in a session, including user prompts and AI responses.",
|
||||
operationId: "session.messages",
|
||||
responses: {
|
||||
200: {
|
||||
description: "List of messages",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(MessageV2.WithParts.array()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: z.string().meta({ description: "Session ID" }),
|
||||
}),
|
||||
),
|
||||
validator(
|
||||
"query",
|
||||
z.object({
|
||||
limit: z.coerce.number().optional(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const query = c.req.valid("query")
|
||||
const messages = await Session.messages({
|
||||
sessionID: c.req.valid("param").sessionID,
|
||||
limit: query.limit,
|
||||
})
|
||||
return c.json(messages)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/:sessionID/message/:messageID",
|
||||
describeRoute({
|
||||
summary: "Get message",
|
||||
description: "Retrieve a specific message from a session by its message ID.",
|
||||
operationId: "session.message",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Message",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(
|
||||
z.object({
|
||||
info: MessageV2.Info,
|
||||
parts: MessageV2.Part.array(),
|
||||
}),
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: z.string().meta({ description: "Session ID" }),
|
||||
messageID: z.string().meta({ description: "Message ID" }),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const params = c.req.valid("param")
|
||||
const message = await MessageV2.get({
|
||||
sessionID: params.sessionID,
|
||||
messageID: params.messageID,
|
||||
})
|
||||
return c.json(message)
|
||||
},
|
||||
)
|
||||
.delete(
|
||||
"/:sessionID/message/:messageID/part/:partID",
|
||||
describeRoute({
|
||||
description: "Delete a part from a message",
|
||||
operationId: "part.delete",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Successfully deleted part",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: z.string().meta({ description: "Session ID" }),
|
||||
messageID: z.string().meta({ description: "Message ID" }),
|
||||
partID: z.string().meta({ description: "Part ID" }),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const params = c.req.valid("param")
|
||||
await Session.removePart({
|
||||
sessionID: params.sessionID,
|
||||
messageID: params.messageID,
|
||||
partID: params.partID,
|
||||
})
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.patch(
|
||||
"/:sessionID/message/:messageID/part/:partID",
|
||||
describeRoute({
|
||||
description: "Update a part in a message",
|
||||
operationId: "part.update",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Successfully updated part",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(MessageV2.Part),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: z.string().meta({ description: "Session ID" }),
|
||||
messageID: z.string().meta({ description: "Message ID" }),
|
||||
partID: z.string().meta({ description: "Part ID" }),
|
||||
}),
|
||||
),
|
||||
validator("json", MessageV2.Part),
|
||||
async (c) => {
|
||||
const params = c.req.valid("param")
|
||||
const body = c.req.valid("json")
|
||||
if (body.id !== params.partID || body.messageID !== params.messageID || body.sessionID !== params.sessionID) {
|
||||
throw new Error(
|
||||
`Part mismatch: body.id='${body.id}' vs partID='${params.partID}', body.messageID='${body.messageID}' vs messageID='${params.messageID}', body.sessionID='${body.sessionID}' vs sessionID='${params.sessionID}'`,
|
||||
)
|
||||
}
|
||||
const part = await Session.updatePart(body)
|
||||
return c.json(part)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:sessionID/message",
|
||||
describeRoute({
|
||||
summary: "Send message",
|
||||
description: "Create and send a new message to a session, streaming the AI response.",
|
||||
operationId: "session.prompt",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Created message",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(
|
||||
z.object({
|
||||
info: MessageV2.Assistant,
|
||||
parts: MessageV2.Part.array(),
|
||||
}),
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: z.string().meta({ description: "Session ID" }),
|
||||
}),
|
||||
),
|
||||
validator("json", SessionPrompt.PromptInput.omit({ sessionID: true })),
|
||||
async (c) => {
|
||||
c.status(200)
|
||||
c.header("Content-Type", "application/json")
|
||||
return stream(c, async (stream) => {
|
||||
const sessionID = c.req.valid("param").sessionID
|
||||
const body = c.req.valid("json")
|
||||
const msg = await SessionPrompt.prompt({ ...body, sessionID })
|
||||
stream.write(JSON.stringify(msg))
|
||||
})
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:sessionID/prompt_async",
|
||||
describeRoute({
|
||||
summary: "Send async message",
|
||||
description:
|
||||
"Create and send a new message to a session asynchronously, starting the session if needed and returning immediately.",
|
||||
operationId: "session.prompt_async",
|
||||
responses: {
|
||||
204: {
|
||||
description: "Prompt accepted",
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: z.string().meta({ description: "Session ID" }),
|
||||
}),
|
||||
),
|
||||
validator("json", SessionPrompt.PromptInput.omit({ sessionID: true })),
|
||||
async (c) => {
|
||||
c.status(204)
|
||||
c.header("Content-Type", "application/json")
|
||||
return stream(c, async () => {
|
||||
const sessionID = c.req.valid("param").sessionID
|
||||
const body = c.req.valid("json")
|
||||
SessionPrompt.prompt({ ...body, sessionID })
|
||||
})
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:sessionID/command",
|
||||
describeRoute({
|
||||
summary: "Send command",
|
||||
description: "Send a new command to a session for execution by the AI assistant.",
|
||||
operationId: "session.command",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Created message",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(
|
||||
z.object({
|
||||
info: MessageV2.Assistant,
|
||||
parts: MessageV2.Part.array(),
|
||||
}),
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: z.string().meta({ description: "Session ID" }),
|
||||
}),
|
||||
),
|
||||
validator("json", SessionPrompt.CommandInput.omit({ sessionID: true })),
|
||||
async (c) => {
|
||||
const sessionID = c.req.valid("param").sessionID
|
||||
const body = c.req.valid("json")
|
||||
const msg = await SessionPrompt.command({ ...body, sessionID })
|
||||
return c.json(msg)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:sessionID/shell",
|
||||
describeRoute({
|
||||
summary: "Run shell command",
|
||||
description: "Execute a shell command within the session context and return the AI's response.",
|
||||
operationId: "session.shell",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Created message",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(MessageV2.Assistant),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: z.string().meta({ description: "Session ID" }),
|
||||
}),
|
||||
),
|
||||
validator("json", SessionPrompt.ShellInput.omit({ sessionID: true })),
|
||||
async (c) => {
|
||||
const sessionID = c.req.valid("param").sessionID
|
||||
const body = c.req.valid("json")
|
||||
const msg = await SessionPrompt.shell({ ...body, sessionID })
|
||||
return c.json(msg)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:sessionID/revert",
|
||||
describeRoute({
|
||||
summary: "Revert message",
|
||||
description: "Revert a specific message in a session, undoing its effects and restoring the previous state.",
|
||||
operationId: "session.revert",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Updated session",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Session.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: z.string(),
|
||||
}),
|
||||
),
|
||||
validator("json", SessionRevert.RevertInput.omit({ sessionID: true })),
|
||||
async (c) => {
|
||||
const sessionID = c.req.valid("param").sessionID
|
||||
log.info("revert", c.req.valid("json"))
|
||||
const session = await SessionRevert.revert({
|
||||
sessionID,
|
||||
...c.req.valid("json"),
|
||||
})
|
||||
return c.json(session)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:sessionID/unrevert",
|
||||
describeRoute({
|
||||
summary: "Restore reverted messages",
|
||||
description: "Restore all previously reverted messages in a session.",
|
||||
operationId: "session.unrevert",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Updated session",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Session.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: z.string(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const sessionID = c.req.valid("param").sessionID
|
||||
const session = await SessionRevert.unrevert({ sessionID })
|
||||
return c.json(session)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:sessionID/permissions/:permissionID",
|
||||
describeRoute({
|
||||
summary: "Respond to permission",
|
||||
deprecated: true,
|
||||
description: "Approve or deny a permission request from the AI assistant.",
|
||||
operationId: "permission.respond",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Permission processed successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: z.string(),
|
||||
permissionID: z.string(),
|
||||
}),
|
||||
),
|
||||
validator("json", z.object({ response: PermissionNext.Reply })),
|
||||
async (c) => {
|
||||
const params = c.req.valid("param")
|
||||
PermissionNext.reply({
|
||||
requestID: params.permissionID,
|
||||
reply: c.req.valid("json").response,
|
||||
})
|
||||
return c.json(true)
|
||||
},
|
||||
),
|
||||
)
|
||||
377
packages/opencode/src/server/routes/tui.ts
Normal file
377
packages/opencode/src/server/routes/tui.ts
Normal file
@@ -0,0 +1,377 @@
|
||||
import { Hono, type Context } from "hono"
|
||||
import { describeRoute, validator, resolver } from "hono-openapi"
|
||||
import z from "zod"
|
||||
import { Bus } from "../../bus"
|
||||
import { Session } from "../../session"
|
||||
import { TuiEvent } from "@/cli/cmd/tui/event"
|
||||
import { AsyncQueue } from "../../util/queue"
|
||||
import { errors } from "../error"
|
||||
import { lazy } from "../../util/lazy"
|
||||
|
||||
const TuiRequest = z.object({
|
||||
path: z.string(),
|
||||
body: z.any(),
|
||||
})
|
||||
|
||||
type TuiRequest = z.infer<typeof TuiRequest>
|
||||
|
||||
const request = new AsyncQueue<TuiRequest>()
|
||||
const response = new AsyncQueue<any>()
|
||||
|
||||
export async function callTui(ctx: Context) {
|
||||
const body = await ctx.req.json()
|
||||
request.push({
|
||||
path: ctx.req.path,
|
||||
body,
|
||||
})
|
||||
return response.next()
|
||||
}
|
||||
|
||||
const TuiControlRoutes = new Hono()
|
||||
.get(
|
||||
"/next",
|
||||
describeRoute({
|
||||
summary: "Get next TUI request",
|
||||
description: "Retrieve the next TUI (Terminal User Interface) request from the queue for processing.",
|
||||
operationId: "tui.control.next",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Next TUI request",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(TuiRequest),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
const req = await request.next()
|
||||
return c.json(req)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/response",
|
||||
describeRoute({
|
||||
summary: "Submit TUI response",
|
||||
description: "Submit a response to the TUI request queue to complete a pending request.",
|
||||
operationId: "tui.control.response",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Response submitted successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
validator("json", z.any()),
|
||||
async (c) => {
|
||||
const body = c.req.valid("json")
|
||||
response.push(body)
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
|
||||
export const TuiRoutes = lazy(() =>
|
||||
new Hono()
|
||||
.post(
|
||||
"/append-prompt",
|
||||
describeRoute({
|
||||
summary: "Append TUI prompt",
|
||||
description: "Append prompt to the TUI",
|
||||
operationId: "tui.appendPrompt",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Prompt processed successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400),
|
||||
},
|
||||
}),
|
||||
validator("json", TuiEvent.PromptAppend.properties),
|
||||
async (c) => {
|
||||
await Bus.publish(TuiEvent.PromptAppend, c.req.valid("json"))
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/open-help",
|
||||
describeRoute({
|
||||
summary: "Open help dialog",
|
||||
description: "Open the help dialog in the TUI to display user assistance information.",
|
||||
operationId: "tui.openHelp",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Help dialog opened successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
await Bus.publish(TuiEvent.CommandExecute, {
|
||||
command: "help.show",
|
||||
})
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/open-sessions",
|
||||
describeRoute({
|
||||
summary: "Open sessions dialog",
|
||||
description: "Open the session dialog",
|
||||
operationId: "tui.openSessions",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Session dialog opened successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
await Bus.publish(TuiEvent.CommandExecute, {
|
||||
command: "session.list",
|
||||
})
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/open-themes",
|
||||
describeRoute({
|
||||
summary: "Open themes dialog",
|
||||
description: "Open the theme dialog",
|
||||
operationId: "tui.openThemes",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Theme dialog opened successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
await Bus.publish(TuiEvent.CommandExecute, {
|
||||
command: "session.list",
|
||||
})
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/open-models",
|
||||
describeRoute({
|
||||
summary: "Open models dialog",
|
||||
description: "Open the model dialog",
|
||||
operationId: "tui.openModels",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Model dialog opened successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
await Bus.publish(TuiEvent.CommandExecute, {
|
||||
command: "model.list",
|
||||
})
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/submit-prompt",
|
||||
describeRoute({
|
||||
summary: "Submit TUI prompt",
|
||||
description: "Submit the prompt",
|
||||
operationId: "tui.submitPrompt",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Prompt submitted successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
await Bus.publish(TuiEvent.CommandExecute, {
|
||||
command: "prompt.submit",
|
||||
})
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/clear-prompt",
|
||||
describeRoute({
|
||||
summary: "Clear TUI prompt",
|
||||
description: "Clear the prompt",
|
||||
operationId: "tui.clearPrompt",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Prompt cleared successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
await Bus.publish(TuiEvent.CommandExecute, {
|
||||
command: "prompt.clear",
|
||||
})
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/execute-command",
|
||||
describeRoute({
|
||||
summary: "Execute TUI command",
|
||||
description: "Execute a TUI command (e.g. agent_cycle)",
|
||||
operationId: "tui.executeCommand",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Command executed successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400),
|
||||
},
|
||||
}),
|
||||
validator("json", z.object({ command: z.string() })),
|
||||
async (c) => {
|
||||
const command = c.req.valid("json").command
|
||||
await Bus.publish(TuiEvent.CommandExecute, {
|
||||
// @ts-expect-error
|
||||
command: {
|
||||
session_new: "session.new",
|
||||
session_share: "session.share",
|
||||
session_interrupt: "session.interrupt",
|
||||
session_compact: "session.compact",
|
||||
messages_page_up: "session.page.up",
|
||||
messages_page_down: "session.page.down",
|
||||
messages_half_page_up: "session.half.page.up",
|
||||
messages_half_page_down: "session.half.page.down",
|
||||
messages_first: "session.first",
|
||||
messages_last: "session.last",
|
||||
agent_cycle: "agent.cycle",
|
||||
}[command],
|
||||
})
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/show-toast",
|
||||
describeRoute({
|
||||
summary: "Show TUI toast",
|
||||
description: "Show a toast notification in the TUI",
|
||||
operationId: "tui.showToast",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Toast notification shown successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
validator("json", TuiEvent.ToastShow.properties),
|
||||
async (c) => {
|
||||
await Bus.publish(TuiEvent.ToastShow, c.req.valid("json"))
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/publish",
|
||||
describeRoute({
|
||||
summary: "Publish TUI event",
|
||||
description: "Publish a TUI event",
|
||||
operationId: "tui.publish",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Event published successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"json",
|
||||
z.union(
|
||||
Object.values(TuiEvent).map((def) => {
|
||||
return z
|
||||
.object({
|
||||
type: z.literal(def.type),
|
||||
properties: def.properties,
|
||||
})
|
||||
.meta({
|
||||
ref: "Event" + "." + def.type,
|
||||
})
|
||||
}),
|
||||
),
|
||||
),
|
||||
async (c) => {
|
||||
const evt = c.req.valid("json")
|
||||
await Bus.publish(Object.values(TuiEvent).find((def) => def.type === evt.type)!, evt.properties)
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/select-session",
|
||||
describeRoute({
|
||||
summary: "Select session",
|
||||
description: "Navigate the TUI to display the specified session.",
|
||||
operationId: "tui.selectSession",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Session selected successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator("json", TuiEvent.SessionSelect.properties),
|
||||
async (c) => {
|
||||
const { sessionID } = c.req.valid("json")
|
||||
await Session.get(sessionID)
|
||||
await Bus.publish(TuiEvent.SessionSelect, { sessionID })
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.route("/control", TuiControlRoutes),
|
||||
)
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,71 +0,0 @@
|
||||
import { Hono, type Context } from "hono"
|
||||
import { describeRoute, resolver, validator } from "hono-openapi"
|
||||
import { z } from "zod"
|
||||
import { AsyncQueue } from "../util/queue"
|
||||
|
||||
const TuiRequest = z.object({
|
||||
path: z.string(),
|
||||
body: z.any(),
|
||||
})
|
||||
|
||||
type TuiRequest = z.infer<typeof TuiRequest>
|
||||
|
||||
const request = new AsyncQueue<TuiRequest>()
|
||||
const response = new AsyncQueue<any>()
|
||||
|
||||
export async function callTui(ctx: Context) {
|
||||
const body = await ctx.req.json()
|
||||
request.push({
|
||||
path: ctx.req.path,
|
||||
body,
|
||||
})
|
||||
return response.next()
|
||||
}
|
||||
|
||||
export const TuiRoute = new Hono()
|
||||
.get(
|
||||
"/next",
|
||||
describeRoute({
|
||||
summary: "Get next TUI request",
|
||||
description: "Retrieve the next TUI (Terminal User Interface) request from the queue for processing.",
|
||||
operationId: "tui.control.next",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Next TUI request",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(TuiRequest),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
const req = await request.next()
|
||||
return c.json(req)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/response",
|
||||
describeRoute({
|
||||
summary: "Submit TUI response",
|
||||
description: "Submit a response to the TUI request queue to complete a pending request.",
|
||||
operationId: "tui.control.response",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Response submitted successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
validator("json", z.any()),
|
||||
async (c) => {
|
||||
const body = c.req.valid("json")
|
||||
response.push(body)
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
@@ -1,14 +1,7 @@
|
||||
import { BusEvent } from "@/bus/bus-event"
|
||||
import z from "zod"
|
||||
import { NamedError } from "@opencode-ai/util/error"
|
||||
import {
|
||||
APICallError,
|
||||
convertToModelMessages,
|
||||
LoadAPIKeyError,
|
||||
type ModelMessage,
|
||||
type ToolSet,
|
||||
type UIMessage,
|
||||
} from "ai"
|
||||
import { APICallError, convertToModelMessages, LoadAPIKeyError, type ModelMessage, type UIMessage } from "ai"
|
||||
import { Identifier } from "../id/id"
|
||||
import { LSP } from "../lsp"
|
||||
import { Snapshot } from "@/snapshot"
|
||||
@@ -439,7 +432,7 @@ export namespace MessageV2 {
|
||||
})
|
||||
export type WithParts = z.infer<typeof WithParts>
|
||||
|
||||
export function toModelMessage(input: WithParts[], options?: { tools?: ToolSet }): ModelMessage[] {
|
||||
export function toModelMessage(input: WithParts[]): ModelMessage[] {
|
||||
const result: UIMessage[] = []
|
||||
|
||||
for (const msg of input) {
|
||||
@@ -510,14 +503,30 @@ export namespace MessageV2 {
|
||||
})
|
||||
if (part.type === "tool") {
|
||||
if (part.state.status === "completed") {
|
||||
if (part.state.attachments?.length) {
|
||||
result.push({
|
||||
id: Identifier.ascending("message"),
|
||||
role: "user",
|
||||
parts: [
|
||||
{
|
||||
type: "text",
|
||||
text: `The tool ${part.tool} returned the following attachments:`,
|
||||
},
|
||||
...part.state.attachments.map((attachment) => ({
|
||||
type: "file" as const,
|
||||
url: attachment.url,
|
||||
mediaType: attachment.mime,
|
||||
filename: attachment.filename,
|
||||
})),
|
||||
],
|
||||
})
|
||||
}
|
||||
assistantMessage.parts.push({
|
||||
type: ("tool-" + part.tool) as `tool-${string}`,
|
||||
state: "output-available",
|
||||
toolCallId: part.callID,
|
||||
input: part.state.input,
|
||||
output: part.state.time.compacted
|
||||
? { output: "[Old tool result content cleared]" }
|
||||
: { output: part.state.output, attachments: part.state.attachments },
|
||||
output: part.state.time.compacted ? "[Old tool result content cleared]" : part.state.output,
|
||||
callProviderMetadata: part.metadata,
|
||||
})
|
||||
}
|
||||
@@ -556,10 +565,7 @@ export namespace MessageV2 {
|
||||
}
|
||||
}
|
||||
|
||||
return convertToModelMessages(
|
||||
result.filter((msg) => msg.parts.some((part) => part.type !== "step-start")),
|
||||
{ tools: options?.tools },
|
||||
)
|
||||
return convertToModelMessages(result.filter((msg) => msg.parts.some((part) => part.type !== "step-start")))
|
||||
}
|
||||
|
||||
export const stream = fn(Identifier.schema("session"), async function* (sessionID) {
|
||||
|
||||
@@ -597,7 +597,7 @@ export namespace SessionPrompt {
|
||||
sessionID,
|
||||
system: [...(await SystemPrompt.environment()), ...(await SystemPrompt.custom())],
|
||||
messages: [
|
||||
...MessageV2.toModelMessage(sessionMessages, { tools }),
|
||||
...MessageV2.toModelMessage(sessionMessages),
|
||||
...(isLastStep
|
||||
? [
|
||||
{
|
||||
@@ -685,7 +685,10 @@ export namespace SessionPrompt {
|
||||
},
|
||||
})
|
||||
|
||||
for (const item of await ToolRegistry.tools(input.model.providerID, input.agent)) {
|
||||
for (const item of await ToolRegistry.tools(
|
||||
{ modelID: input.model.api.id, providerID: input.model.providerID },
|
||||
input.agent,
|
||||
)) {
|
||||
const schema = ProviderTransform.schema(input.model, z.toJSONSchema(item.parameters))
|
||||
tools[item.id] = tool({
|
||||
id: item.id as any,
|
||||
@@ -718,22 +721,8 @@ export namespace SessionPrompt {
|
||||
},
|
||||
toModelOutput(result) {
|
||||
return {
|
||||
type: "content",
|
||||
value: [
|
||||
{
|
||||
type: "text",
|
||||
text: result.output,
|
||||
},
|
||||
...(result.attachments?.map((attachment: MessageV2.FilePart) => {
|
||||
const base64 = attachment.url.startsWith("data:") ? attachment.url.split(",", 2)[1] : attachment.url
|
||||
|
||||
return {
|
||||
type: "media",
|
||||
data: base64,
|
||||
mediaType: attachment.mime,
|
||||
}
|
||||
}) ?? []),
|
||||
],
|
||||
type: "text",
|
||||
value: result.output,
|
||||
}
|
||||
},
|
||||
})
|
||||
@@ -822,22 +811,8 @@ export namespace SessionPrompt {
|
||||
}
|
||||
item.toModelOutput = (result) => {
|
||||
return {
|
||||
type: "content",
|
||||
value: [
|
||||
{
|
||||
type: "text",
|
||||
text: result.output,
|
||||
},
|
||||
...(result.attachments?.map((attachment: MessageV2.FilePart) => {
|
||||
const base64 = attachment.url.startsWith("data:") ? attachment.url.split(",", 2)[1] : attachment.url
|
||||
|
||||
return {
|
||||
type: "media",
|
||||
data: base64,
|
||||
mediaType: attachment.mime,
|
||||
}
|
||||
}) ?? []),
|
||||
],
|
||||
type: "text",
|
||||
value: result.output,
|
||||
}
|
||||
}
|
||||
tools[key] = item
|
||||
|
||||
@@ -5,6 +5,7 @@ You are an interactive CLI tool that helps users with software engineering tasks
|
||||
## Editing constraints
|
||||
- Default to ASCII when editing or creating files. Only introduce non-ASCII or other Unicode characters when there is a clear justification and the file already uses them.
|
||||
- Only add comments if they are necessary to make a non-obvious block easier to understand.
|
||||
- Try to use apply_patch for single file edits, but it is fine to explore other options to make the edit if it does not work well. Do not use apply_patch for changes that are auto-generated (i.e. generating package.json or running a lint or format command like gofmt) or when scripting is more efficient (such as search and replacing a string across a codebase).
|
||||
|
||||
## Tool usage
|
||||
- Prefer specialized tools over shell for file operations:
|
||||
|
||||
277
packages/opencode/src/tool/apply_patch.ts
Normal file
277
packages/opencode/src/tool/apply_patch.ts
Normal file
@@ -0,0 +1,277 @@
|
||||
import z from "zod"
|
||||
import * as path from "path"
|
||||
import * as fs from "fs/promises"
|
||||
import { Tool } from "./tool"
|
||||
import { FileTime } from "../file/time"
|
||||
import { Bus } from "../bus"
|
||||
import { FileWatcher } from "../file/watcher"
|
||||
import { Instance } from "../project/instance"
|
||||
import { Patch } from "../patch"
|
||||
import { createTwoFilesPatch, diffLines } from "diff"
|
||||
import { assertExternalDirectory } from "./external-directory"
|
||||
import { trimDiff } from "./edit"
|
||||
import { LSP } from "../lsp"
|
||||
import { Filesystem } from "../util/filesystem"
|
||||
|
||||
const PatchParams = z.object({
|
||||
patchText: z.string().describe("The full patch text that describes all changes to be made"),
|
||||
})
|
||||
|
||||
export const ApplyPatchTool = Tool.define("apply_patch", {
|
||||
description: "Use the `apply_patch` tool to edit files. This is a FREEFORM tool, so do not wrap the patch in JSON.",
|
||||
parameters: PatchParams,
|
||||
async execute(params, ctx) {
|
||||
if (!params.patchText) {
|
||||
throw new Error("patchText is required")
|
||||
}
|
||||
|
||||
// Parse the patch to get hunks
|
||||
let hunks: Patch.Hunk[]
|
||||
try {
|
||||
const parseResult = Patch.parsePatch(params.patchText)
|
||||
hunks = parseResult.hunks
|
||||
} catch (error) {
|
||||
throw new Error(`apply_patch verification failed: ${error}`)
|
||||
}
|
||||
|
||||
if (hunks.length === 0) {
|
||||
const normalized = params.patchText.replace(/\r\n/g, "\n").replace(/\r/g, "\n").trim()
|
||||
if (normalized === "*** Begin Patch\n*** End Patch") {
|
||||
throw new Error("patch rejected: empty patch")
|
||||
}
|
||||
throw new Error("apply_patch verification failed: no hunks found")
|
||||
}
|
||||
|
||||
// Validate file paths and check permissions
|
||||
const fileChanges: Array<{
|
||||
filePath: string
|
||||
oldContent: string
|
||||
newContent: string
|
||||
type: "add" | "update" | "delete" | "move"
|
||||
movePath?: string
|
||||
diff: string
|
||||
additions: number
|
||||
deletions: number
|
||||
}> = []
|
||||
|
||||
let totalDiff = ""
|
||||
|
||||
for (const hunk of hunks) {
|
||||
const filePath = path.resolve(Instance.directory, hunk.path)
|
||||
await assertExternalDirectory(ctx, filePath)
|
||||
|
||||
switch (hunk.type) {
|
||||
case "add": {
|
||||
const oldContent = ""
|
||||
const newContent =
|
||||
hunk.contents.length === 0 || hunk.contents.endsWith("\n") ? hunk.contents : `${hunk.contents}\n`
|
||||
const diff = trimDiff(createTwoFilesPatch(filePath, filePath, oldContent, newContent))
|
||||
|
||||
let additions = 0
|
||||
let deletions = 0
|
||||
for (const change of diffLines(oldContent, newContent)) {
|
||||
if (change.added) additions += change.count || 0
|
||||
if (change.removed) deletions += change.count || 0
|
||||
}
|
||||
|
||||
fileChanges.push({
|
||||
filePath,
|
||||
oldContent,
|
||||
newContent,
|
||||
type: "add",
|
||||
diff,
|
||||
additions,
|
||||
deletions,
|
||||
})
|
||||
|
||||
totalDiff += diff + "\n"
|
||||
break
|
||||
}
|
||||
|
||||
case "update": {
|
||||
// Check if file exists for update
|
||||
const stats = await fs.stat(filePath).catch(() => null)
|
||||
if (!stats || stats.isDirectory()) {
|
||||
throw new Error(`apply_patch verification failed: Failed to read file to update: ${filePath}`)
|
||||
}
|
||||
|
||||
// Read file and update time tracking (like edit tool does)
|
||||
await FileTime.assert(ctx.sessionID, filePath)
|
||||
const oldContent = await fs.readFile(filePath, "utf-8")
|
||||
let newContent = oldContent
|
||||
|
||||
// Apply the update chunks to get new content
|
||||
try {
|
||||
const fileUpdate = Patch.deriveNewContentsFromChunks(filePath, hunk.chunks)
|
||||
newContent = fileUpdate.content
|
||||
} catch (error) {
|
||||
throw new Error(`apply_patch verification failed: ${error}`)
|
||||
}
|
||||
|
||||
const diff = trimDiff(createTwoFilesPatch(filePath, filePath, oldContent, newContent))
|
||||
|
||||
let additions = 0
|
||||
let deletions = 0
|
||||
for (const change of diffLines(oldContent, newContent)) {
|
||||
if (change.added) additions += change.count || 0
|
||||
if (change.removed) deletions += change.count || 0
|
||||
}
|
||||
|
||||
const movePath = hunk.move_path ? path.resolve(Instance.directory, hunk.move_path) : undefined
|
||||
await assertExternalDirectory(ctx, movePath)
|
||||
|
||||
fileChanges.push({
|
||||
filePath,
|
||||
oldContent,
|
||||
newContent,
|
||||
type: hunk.move_path ? "move" : "update",
|
||||
movePath,
|
||||
diff,
|
||||
additions,
|
||||
deletions,
|
||||
})
|
||||
|
||||
totalDiff += diff + "\n"
|
||||
break
|
||||
}
|
||||
|
||||
case "delete": {
|
||||
const contentToDelete = await fs.readFile(filePath, "utf-8").catch((error) => {
|
||||
throw new Error(`apply_patch verification failed: ${error}`)
|
||||
})
|
||||
const deleteDiff = trimDiff(createTwoFilesPatch(filePath, filePath, contentToDelete, ""))
|
||||
|
||||
const deletions = contentToDelete.split("\n").length
|
||||
|
||||
fileChanges.push({
|
||||
filePath,
|
||||
oldContent: contentToDelete,
|
||||
newContent: "",
|
||||
type: "delete",
|
||||
diff: deleteDiff,
|
||||
additions: 0,
|
||||
deletions,
|
||||
})
|
||||
|
||||
totalDiff += deleteDiff + "\n"
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check permissions if needed
|
||||
await ctx.ask({
|
||||
permission: "edit",
|
||||
patterns: fileChanges.map((c) => path.relative(Instance.worktree, c.filePath)),
|
||||
always: ["*"],
|
||||
metadata: {
|
||||
diff: totalDiff,
|
||||
},
|
||||
})
|
||||
|
||||
// Apply the changes
|
||||
const changedFiles: string[] = []
|
||||
|
||||
for (const change of fileChanges) {
|
||||
switch (change.type) {
|
||||
case "add":
|
||||
// Create parent directories (recursive: true is safe on existing/root dirs)
|
||||
await fs.mkdir(path.dirname(change.filePath), { recursive: true })
|
||||
await fs.writeFile(change.filePath, change.newContent, "utf-8")
|
||||
changedFiles.push(change.filePath)
|
||||
break
|
||||
|
||||
case "update":
|
||||
await fs.writeFile(change.filePath, change.newContent, "utf-8")
|
||||
changedFiles.push(change.filePath)
|
||||
break
|
||||
|
||||
case "move":
|
||||
if (change.movePath) {
|
||||
// Create parent directories (recursive: true is safe on existing/root dirs)
|
||||
await fs.mkdir(path.dirname(change.movePath), { recursive: true })
|
||||
await fs.writeFile(change.movePath, change.newContent, "utf-8")
|
||||
await fs.unlink(change.filePath)
|
||||
changedFiles.push(change.movePath)
|
||||
}
|
||||
break
|
||||
|
||||
case "delete":
|
||||
await fs.unlink(change.filePath)
|
||||
changedFiles.push(change.filePath)
|
||||
break
|
||||
}
|
||||
|
||||
// Update file time tracking
|
||||
FileTime.read(ctx.sessionID, change.filePath)
|
||||
if (change.movePath) {
|
||||
FileTime.read(ctx.sessionID, change.movePath)
|
||||
}
|
||||
}
|
||||
|
||||
// Publish file change events
|
||||
for (const filePath of changedFiles) {
|
||||
await Bus.publish(FileWatcher.Event.Updated, { file: filePath, event: "change" })
|
||||
}
|
||||
|
||||
// Notify LSP of file changes and collect diagnostics
|
||||
for (const change of fileChanges) {
|
||||
if (change.type === "delete") continue
|
||||
const target = change.movePath ?? change.filePath
|
||||
await LSP.touchFile(target, true)
|
||||
}
|
||||
const diagnostics = await LSP.diagnostics()
|
||||
|
||||
// Generate output summary
|
||||
const summaryLines = fileChanges.map((change) => {
|
||||
if (change.type === "add") {
|
||||
return `A ${path.relative(Instance.worktree, change.filePath)}`
|
||||
}
|
||||
if (change.type === "delete") {
|
||||
return `D ${path.relative(Instance.worktree, change.filePath)}`
|
||||
}
|
||||
const target = change.movePath ?? change.filePath
|
||||
return `M ${path.relative(Instance.worktree, target)}`
|
||||
})
|
||||
let output = `Success. Updated the following files:\n${summaryLines.join("\n")}`
|
||||
|
||||
// Report LSP errors for changed files
|
||||
const MAX_DIAGNOSTICS_PER_FILE = 20
|
||||
for (const change of fileChanges) {
|
||||
if (change.type === "delete") continue
|
||||
const target = change.movePath ?? change.filePath
|
||||
const normalized = Filesystem.normalizePath(target)
|
||||
const issues = diagnostics[normalized] ?? []
|
||||
const errors = issues.filter((item) => item.severity === 1)
|
||||
if (errors.length > 0) {
|
||||
const limited = errors.slice(0, MAX_DIAGNOSTICS_PER_FILE)
|
||||
const suffix =
|
||||
errors.length > MAX_DIAGNOSTICS_PER_FILE ? `\n... and ${errors.length - MAX_DIAGNOSTICS_PER_FILE} more` : ""
|
||||
output += `\n\nLSP errors detected in ${path.relative(Instance.worktree, target)}, please fix:\n<diagnostics file="${target}">\n${limited.map(LSP.Diagnostic.pretty).join("\n")}${suffix}\n</diagnostics>`
|
||||
}
|
||||
}
|
||||
|
||||
// Build per-file metadata for UI rendering
|
||||
const files = fileChanges.map((change) => ({
|
||||
filePath: change.filePath,
|
||||
relativePath: path.relative(Instance.worktree, change.movePath ?? change.filePath),
|
||||
type: change.type,
|
||||
diff: change.diff,
|
||||
before: change.oldContent,
|
||||
after: change.newContent,
|
||||
additions: change.additions,
|
||||
deletions: change.deletions,
|
||||
movePath: change.movePath,
|
||||
}))
|
||||
|
||||
return {
|
||||
title: output,
|
||||
metadata: {
|
||||
diff: totalDiff,
|
||||
files,
|
||||
diagnostics,
|
||||
},
|
||||
output,
|
||||
}
|
||||
},
|
||||
})
|
||||
1
packages/opencode/src/tool/apply_patch.txt
Normal file
1
packages/opencode/src/tool/apply_patch.txt
Normal file
@@ -0,0 +1 @@
|
||||
Use the `apply_patch` tool to edit files. This is a FREEFORM tool, so do not wrap the patch in JSON.
|
||||
@@ -37,7 +37,7 @@ export const BatchTool = Tool.define("batch", async () => {
|
||||
const discardedCalls = params.tool_calls.slice(10)
|
||||
|
||||
const { ToolRegistry } = await import("./registry")
|
||||
const availableTools = await ToolRegistry.tools("")
|
||||
const availableTools = await ToolRegistry.tools({ modelID: "", providerID: "" })
|
||||
const toolMap = new Map(availableTools.map((t) => [t.id, t]))
|
||||
|
||||
const executeCall = async (call: (typeof toolCalls)[0]) => {
|
||||
|
||||
@@ -1,201 +0,0 @@
|
||||
import z from "zod"
|
||||
import * as path from "path"
|
||||
import * as fs from "fs/promises"
|
||||
import { Tool } from "./tool"
|
||||
import { FileTime } from "../file/time"
|
||||
import { Bus } from "../bus"
|
||||
import { FileWatcher } from "../file/watcher"
|
||||
import { Instance } from "../project/instance"
|
||||
import { Patch } from "../patch"
|
||||
import { createTwoFilesPatch } from "diff"
|
||||
import { assertExternalDirectory } from "./external-directory"
|
||||
|
||||
const PatchParams = z.object({
|
||||
patchText: z.string().describe("The full patch text that describes all changes to be made"),
|
||||
})
|
||||
|
||||
export const PatchTool = Tool.define("patch", {
|
||||
description:
|
||||
"Apply a patch to modify multiple files. Supports adding, updating, and deleting files with context-aware changes.",
|
||||
parameters: PatchParams,
|
||||
async execute(params, ctx) {
|
||||
if (!params.patchText) {
|
||||
throw new Error("patchText is required")
|
||||
}
|
||||
|
||||
// Parse the patch to get hunks
|
||||
let hunks: Patch.Hunk[]
|
||||
try {
|
||||
const parseResult = Patch.parsePatch(params.patchText)
|
||||
hunks = parseResult.hunks
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to parse patch: ${error}`)
|
||||
}
|
||||
|
||||
if (hunks.length === 0) {
|
||||
throw new Error("No file changes found in patch")
|
||||
}
|
||||
|
||||
// Validate file paths and check permissions
|
||||
const fileChanges: Array<{
|
||||
filePath: string
|
||||
oldContent: string
|
||||
newContent: string
|
||||
type: "add" | "update" | "delete" | "move"
|
||||
movePath?: string
|
||||
}> = []
|
||||
|
||||
let totalDiff = ""
|
||||
|
||||
for (const hunk of hunks) {
|
||||
const filePath = path.resolve(Instance.directory, hunk.path)
|
||||
await assertExternalDirectory(ctx, filePath)
|
||||
|
||||
switch (hunk.type) {
|
||||
case "add":
|
||||
if (hunk.type === "add") {
|
||||
const oldContent = ""
|
||||
const newContent = hunk.contents
|
||||
const diff = createTwoFilesPatch(filePath, filePath, oldContent, newContent)
|
||||
|
||||
fileChanges.push({
|
||||
filePath,
|
||||
oldContent,
|
||||
newContent,
|
||||
type: "add",
|
||||
})
|
||||
|
||||
totalDiff += diff + "\n"
|
||||
}
|
||||
break
|
||||
|
||||
case "update":
|
||||
// Check if file exists for update
|
||||
const stats = await fs.stat(filePath).catch(() => null)
|
||||
if (!stats || stats.isDirectory()) {
|
||||
throw new Error(`File not found or is directory: ${filePath}`)
|
||||
}
|
||||
|
||||
// Read file and update time tracking (like edit tool does)
|
||||
await FileTime.assert(ctx.sessionID, filePath)
|
||||
const oldContent = await fs.readFile(filePath, "utf-8")
|
||||
let newContent = oldContent
|
||||
|
||||
// Apply the update chunks to get new content
|
||||
try {
|
||||
const fileUpdate = Patch.deriveNewContentsFromChunks(filePath, hunk.chunks)
|
||||
newContent = fileUpdate.content
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to apply update to ${filePath}: ${error}`)
|
||||
}
|
||||
|
||||
const diff = createTwoFilesPatch(filePath, filePath, oldContent, newContent)
|
||||
|
||||
const movePath = hunk.move_path ? path.resolve(Instance.directory, hunk.move_path) : undefined
|
||||
await assertExternalDirectory(ctx, movePath)
|
||||
|
||||
fileChanges.push({
|
||||
filePath,
|
||||
oldContent,
|
||||
newContent,
|
||||
type: hunk.move_path ? "move" : "update",
|
||||
movePath,
|
||||
})
|
||||
|
||||
totalDiff += diff + "\n"
|
||||
break
|
||||
|
||||
case "delete":
|
||||
// Check if file exists for deletion
|
||||
await FileTime.assert(ctx.sessionID, filePath)
|
||||
const contentToDelete = await fs.readFile(filePath, "utf-8")
|
||||
const deleteDiff = createTwoFilesPatch(filePath, filePath, contentToDelete, "")
|
||||
|
||||
fileChanges.push({
|
||||
filePath,
|
||||
oldContent: contentToDelete,
|
||||
newContent: "",
|
||||
type: "delete",
|
||||
})
|
||||
|
||||
totalDiff += deleteDiff + "\n"
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Check permissions if needed
|
||||
await ctx.ask({
|
||||
permission: "edit",
|
||||
patterns: fileChanges.map((c) => path.relative(Instance.worktree, c.filePath)),
|
||||
always: ["*"],
|
||||
metadata: {
|
||||
diff: totalDiff,
|
||||
},
|
||||
})
|
||||
|
||||
// Apply the changes
|
||||
const changedFiles: string[] = []
|
||||
|
||||
for (const change of fileChanges) {
|
||||
switch (change.type) {
|
||||
case "add":
|
||||
// Create parent directories
|
||||
const addDir = path.dirname(change.filePath)
|
||||
if (addDir !== "." && addDir !== "/") {
|
||||
await fs.mkdir(addDir, { recursive: true })
|
||||
}
|
||||
await fs.writeFile(change.filePath, change.newContent, "utf-8")
|
||||
changedFiles.push(change.filePath)
|
||||
break
|
||||
|
||||
case "update":
|
||||
await fs.writeFile(change.filePath, change.newContent, "utf-8")
|
||||
changedFiles.push(change.filePath)
|
||||
break
|
||||
|
||||
case "move":
|
||||
if (change.movePath) {
|
||||
// Create parent directories for destination
|
||||
const moveDir = path.dirname(change.movePath)
|
||||
if (moveDir !== "." && moveDir !== "/") {
|
||||
await fs.mkdir(moveDir, { recursive: true })
|
||||
}
|
||||
// Write to new location
|
||||
await fs.writeFile(change.movePath, change.newContent, "utf-8")
|
||||
// Remove original
|
||||
await fs.unlink(change.filePath)
|
||||
changedFiles.push(change.movePath)
|
||||
}
|
||||
break
|
||||
|
||||
case "delete":
|
||||
await fs.unlink(change.filePath)
|
||||
changedFiles.push(change.filePath)
|
||||
break
|
||||
}
|
||||
|
||||
// Update file time tracking
|
||||
FileTime.read(ctx.sessionID, change.filePath)
|
||||
if (change.movePath) {
|
||||
FileTime.read(ctx.sessionID, change.movePath)
|
||||
}
|
||||
}
|
||||
|
||||
// Publish file change events
|
||||
for (const filePath of changedFiles) {
|
||||
await Bus.publish(FileWatcher.Event.Updated, { file: filePath, event: "change" })
|
||||
}
|
||||
|
||||
// Generate output summary
|
||||
const relativePaths = changedFiles.map((filePath) => path.relative(Instance.worktree, filePath))
|
||||
const summary = `${fileChanges.length} files changed`
|
||||
|
||||
return {
|
||||
title: summary,
|
||||
metadata: {
|
||||
diff: totalDiff,
|
||||
},
|
||||
output: `Patch applied successfully. ${summary}:\n${relativePaths.map((p) => ` ${p}`).join("\n")}`,
|
||||
}
|
||||
},
|
||||
})
|
||||
@@ -1 +0,0 @@
|
||||
do not use
|
||||
@@ -26,6 +26,7 @@ import { Log } from "@/util/log"
|
||||
import { LspTool } from "./lsp"
|
||||
import { Truncate } from "./truncation"
|
||||
import { PlanExitTool, PlanEnterTool } from "./plan"
|
||||
import { ApplyPatchTool } from "./apply_patch"
|
||||
|
||||
export namespace ToolRegistry {
|
||||
const log = Log.create({ service: "tool.registry" })
|
||||
@@ -108,6 +109,7 @@ export namespace ToolRegistry {
|
||||
WebSearchTool,
|
||||
CodeSearchTool,
|
||||
SkillTool,
|
||||
ApplyPatchTool,
|
||||
...(Flag.OPENCODE_EXPERIMENTAL_LSP_TOOL ? [LspTool] : []),
|
||||
...(config.experimental?.batch_tool === true ? [BatchTool] : []),
|
||||
...(Flag.OPENCODE_EXPERIMENTAL_PLAN_MODE && Flag.OPENCODE_CLIENT === "cli" ? [PlanExitTool, PlanEnterTool] : []),
|
||||
@@ -119,15 +121,28 @@ export namespace ToolRegistry {
|
||||
return all().then((x) => x.map((t) => t.id))
|
||||
}
|
||||
|
||||
export async function tools(providerID: string, agent?: Agent.Info) {
|
||||
export async function tools(
|
||||
model: {
|
||||
providerID: string
|
||||
modelID: string
|
||||
},
|
||||
agent?: Agent.Info,
|
||||
) {
|
||||
const tools = await all()
|
||||
const result = await Promise.all(
|
||||
tools
|
||||
.filter((t) => {
|
||||
// Enable websearch/codesearch for zen users OR via enable flag
|
||||
if (t.id === "codesearch" || t.id === "websearch") {
|
||||
return providerID === "opencode" || Flag.OPENCODE_ENABLE_EXA
|
||||
return model.providerID === "opencode" || Flag.OPENCODE_ENABLE_EXA
|
||||
}
|
||||
|
||||
// use apply tool in same format as codex
|
||||
const usePatch =
|
||||
model.modelID.includes("gpt-") && !model.modelID.includes("oss") && !model.modelID.includes("gpt-4")
|
||||
if (t.id === "apply_patch") return usePatch
|
||||
if (t.id === "edit" || t.id === "write") return !usePatch
|
||||
|
||||
return true
|
||||
})
|
||||
.map(async (t) => {
|
||||
|
||||
@@ -1,75 +0,0 @@
|
||||
import { test, expect, describe, afterEach } from "bun:test"
|
||||
import { McpOAuthCallback } from "../../src/mcp/oauth-callback"
|
||||
import { parseRedirectUri } from "../../src/mcp/oauth-provider"
|
||||
|
||||
describe("McpOAuthCallback.ensureRunning", () => {
|
||||
afterEach(async () => {
|
||||
await McpOAuthCallback.stop()
|
||||
})
|
||||
|
||||
test("starts server with default config when no redirectUri provided", async () => {
|
||||
await McpOAuthCallback.ensureRunning()
|
||||
expect(McpOAuthCallback.isRunning()).toBe(true)
|
||||
})
|
||||
|
||||
test("starts server with custom redirectUri", async () => {
|
||||
await McpOAuthCallback.ensureRunning("http://127.0.0.1:18000/custom/callback")
|
||||
expect(McpOAuthCallback.isRunning()).toBe(true)
|
||||
})
|
||||
|
||||
test("is idempotent when called with same redirectUri", async () => {
|
||||
await McpOAuthCallback.ensureRunning("http://127.0.0.1:18001/callback")
|
||||
await McpOAuthCallback.ensureRunning("http://127.0.0.1:18001/callback")
|
||||
expect(McpOAuthCallback.isRunning()).toBe(true)
|
||||
})
|
||||
|
||||
test("restarts server when redirectUri changes", async () => {
|
||||
await McpOAuthCallback.ensureRunning("http://127.0.0.1:18002/path1")
|
||||
expect(McpOAuthCallback.isRunning()).toBe(true)
|
||||
|
||||
await McpOAuthCallback.ensureRunning("http://127.0.0.1:18003/path2")
|
||||
expect(McpOAuthCallback.isRunning()).toBe(true)
|
||||
})
|
||||
|
||||
test("isRunning returns false when not started", async () => {
|
||||
expect(McpOAuthCallback.isRunning()).toBe(false)
|
||||
})
|
||||
|
||||
test("isRunning returns false after stop", async () => {
|
||||
await McpOAuthCallback.ensureRunning()
|
||||
await McpOAuthCallback.stop()
|
||||
expect(McpOAuthCallback.isRunning()).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe("parseRedirectUri", () => {
|
||||
test("returns defaults when no URI provided", () => {
|
||||
const result = parseRedirectUri()
|
||||
expect(result.port).toBe(19876)
|
||||
expect(result.path).toBe("/mcp/oauth/callback")
|
||||
})
|
||||
|
||||
test("parses port and path from URI", () => {
|
||||
const result = parseRedirectUri("http://127.0.0.1:8080/oauth/callback")
|
||||
expect(result.port).toBe(8080)
|
||||
expect(result.path).toBe("/oauth/callback")
|
||||
})
|
||||
|
||||
test("defaults to port 80 for http without explicit port", () => {
|
||||
const result = parseRedirectUri("http://127.0.0.1/callback")
|
||||
expect(result.port).toBe(80)
|
||||
expect(result.path).toBe("/callback")
|
||||
})
|
||||
|
||||
test("defaults to port 443 for https without explicit port", () => {
|
||||
const result = parseRedirectUri("https://127.0.0.1/callback")
|
||||
expect(result.port).toBe(443)
|
||||
expect(result.path).toBe("/callback")
|
||||
})
|
||||
|
||||
test("returns defaults for invalid URI", () => {
|
||||
const result = parseRedirectUri("not-a-valid-url")
|
||||
expect(result.port).toBe(19876)
|
||||
expect(result.path).toBe("/mcp/oauth/callback")
|
||||
})
|
||||
})
|
||||
@@ -649,7 +649,7 @@ describe("ProviderTransform.message - strip openai metadata when store=false", (
|
||||
headers: {},
|
||||
} as any
|
||||
|
||||
test("strips itemId and reasoningEncryptedContent when store=false", () => {
|
||||
test("preserves itemId and reasoningEncryptedContent when store=false", () => {
|
||||
const msgs = [
|
||||
{
|
||||
role: "assistant",
|
||||
@@ -680,11 +680,11 @@ describe("ProviderTransform.message - strip openai metadata when store=false", (
|
||||
const result = ProviderTransform.message(msgs, openaiModel, { store: false }) as any[]
|
||||
|
||||
expect(result).toHaveLength(1)
|
||||
expect(result[0].content[0].providerOptions?.openai?.itemId).toBeUndefined()
|
||||
expect(result[0].content[1].providerOptions?.openai?.itemId).toBeUndefined()
|
||||
expect(result[0].content[0].providerOptions?.openai?.itemId).toBe("rs_123")
|
||||
expect(result[0].content[1].providerOptions?.openai?.itemId).toBe("msg_456")
|
||||
})
|
||||
|
||||
test("strips itemId and reasoningEncryptedContent when store=false even when not openai", () => {
|
||||
test("preserves itemId and reasoningEncryptedContent when store=false even when not openai", () => {
|
||||
const zenModel = {
|
||||
...openaiModel,
|
||||
providerID: "zen",
|
||||
@@ -719,11 +719,11 @@ describe("ProviderTransform.message - strip openai metadata when store=false", (
|
||||
const result = ProviderTransform.message(msgs, zenModel, { store: false }) as any[]
|
||||
|
||||
expect(result).toHaveLength(1)
|
||||
expect(result[0].content[0].providerOptions?.openai?.itemId).toBeUndefined()
|
||||
expect(result[0].content[1].providerOptions?.openai?.itemId).toBeUndefined()
|
||||
expect(result[0].content[0].providerOptions?.openai?.itemId).toBe("rs_123")
|
||||
expect(result[0].content[1].providerOptions?.openai?.itemId).toBe("msg_456")
|
||||
})
|
||||
|
||||
test("preserves other openai options when stripping itemId", () => {
|
||||
test("preserves other openai options including itemId", () => {
|
||||
const msgs = [
|
||||
{
|
||||
role: "assistant",
|
||||
@@ -744,11 +744,11 @@ describe("ProviderTransform.message - strip openai metadata when store=false", (
|
||||
|
||||
const result = ProviderTransform.message(msgs, openaiModel, { store: false }) as any[]
|
||||
|
||||
expect(result[0].content[0].providerOptions?.openai?.itemId).toBeUndefined()
|
||||
expect(result[0].content[0].providerOptions?.openai?.itemId).toBe("msg_123")
|
||||
expect(result[0].content[0].providerOptions?.openai?.otherOption).toBe("value")
|
||||
})
|
||||
|
||||
test("strips metadata for openai package even when store is true", () => {
|
||||
test("preserves metadata for openai package when store is true", () => {
|
||||
const msgs = [
|
||||
{
|
||||
role: "assistant",
|
||||
@@ -766,13 +766,13 @@ describe("ProviderTransform.message - strip openai metadata when store=false", (
|
||||
},
|
||||
] as any[]
|
||||
|
||||
// openai package always strips itemId regardless of store value
|
||||
// openai package preserves itemId regardless of store value
|
||||
const result = ProviderTransform.message(msgs, openaiModel, { store: true }) as any[]
|
||||
|
||||
expect(result[0].content[0].providerOptions?.openai?.itemId).toBeUndefined()
|
||||
expect(result[0].content[0].providerOptions?.openai?.itemId).toBe("msg_123")
|
||||
})
|
||||
|
||||
test("strips metadata for non-openai packages when store is false", () => {
|
||||
test("preserves metadata for non-openai packages when store is false", () => {
|
||||
const anthropicModel = {
|
||||
...openaiModel,
|
||||
providerID: "anthropic",
|
||||
@@ -799,13 +799,13 @@ describe("ProviderTransform.message - strip openai metadata when store=false", (
|
||||
},
|
||||
] as any[]
|
||||
|
||||
// store=false triggers stripping even for non-openai packages
|
||||
// store=false preserves metadata for non-openai packages
|
||||
const result = ProviderTransform.message(msgs, anthropicModel, { store: false }) as any[]
|
||||
|
||||
expect(result[0].content[0].providerOptions?.openai?.itemId).toBeUndefined()
|
||||
expect(result[0].content[0].providerOptions?.openai?.itemId).toBe("msg_123")
|
||||
})
|
||||
|
||||
test("strips metadata using providerID key when store is false", () => {
|
||||
test("preserves metadata using providerID key when store is false", () => {
|
||||
const opencodeModel = {
|
||||
...openaiModel,
|
||||
providerID: "opencode",
|
||||
@@ -835,11 +835,11 @@ describe("ProviderTransform.message - strip openai metadata when store=false", (
|
||||
|
||||
const result = ProviderTransform.message(msgs, opencodeModel, { store: false }) as any[]
|
||||
|
||||
expect(result[0].content[0].providerOptions?.opencode?.itemId).toBeUndefined()
|
||||
expect(result[0].content[0].providerOptions?.opencode?.itemId).toBe("msg_123")
|
||||
expect(result[0].content[0].providerOptions?.opencode?.otherOption).toBe("value")
|
||||
})
|
||||
|
||||
test("strips itemId across all providerOptions keys", () => {
|
||||
test("preserves itemId across all providerOptions keys", () => {
|
||||
const opencodeModel = {
|
||||
...openaiModel,
|
||||
providerID: "opencode",
|
||||
@@ -873,12 +873,12 @@ describe("ProviderTransform.message - strip openai metadata when store=false", (
|
||||
|
||||
const result = ProviderTransform.message(msgs, opencodeModel, { store: false }) as any[]
|
||||
|
||||
expect(result[0].providerOptions?.openai?.itemId).toBeUndefined()
|
||||
expect(result[0].providerOptions?.opencode?.itemId).toBeUndefined()
|
||||
expect(result[0].providerOptions?.extra?.itemId).toBeUndefined()
|
||||
expect(result[0].content[0].providerOptions?.openai?.itemId).toBeUndefined()
|
||||
expect(result[0].content[0].providerOptions?.opencode?.itemId).toBeUndefined()
|
||||
expect(result[0].content[0].providerOptions?.extra?.itemId).toBeUndefined()
|
||||
expect(result[0].providerOptions?.openai?.itemId).toBe("msg_root")
|
||||
expect(result[0].providerOptions?.opencode?.itemId).toBe("msg_opencode")
|
||||
expect(result[0].providerOptions?.extra?.itemId).toBe("msg_extra")
|
||||
expect(result[0].content[0].providerOptions?.openai?.itemId).toBe("msg_openai_part")
|
||||
expect(result[0].content[0].providerOptions?.opencode?.itemId).toBe("msg_opencode_part")
|
||||
expect(result[0].content[0].providerOptions?.extra?.itemId).toBe("msg_extra_part")
|
||||
})
|
||||
|
||||
test("does not strip metadata for non-openai packages when store is not false", () => {
|
||||
@@ -914,6 +914,88 @@ describe("ProviderTransform.message - strip openai metadata when store=false", (
|
||||
})
|
||||
})
|
||||
|
||||
describe("ProviderTransform.message - providerOptions key remapping", () => {
|
||||
const createModel = (providerID: string, npm: string) =>
|
||||
({
|
||||
id: `${providerID}/test-model`,
|
||||
providerID,
|
||||
api: {
|
||||
id: "test-model",
|
||||
url: "https://api.test.com",
|
||||
npm,
|
||||
},
|
||||
name: "Test Model",
|
||||
capabilities: {
|
||||
temperature: true,
|
||||
reasoning: false,
|
||||
attachment: true,
|
||||
toolcall: true,
|
||||
input: { text: true, audio: false, image: true, video: false, pdf: true },
|
||||
output: { text: true, audio: false, image: false, video: false, pdf: false },
|
||||
interleaved: false,
|
||||
},
|
||||
cost: { input: 0.001, output: 0.002, cache: { read: 0.0001, write: 0.0002 } },
|
||||
limit: { context: 128000, output: 8192 },
|
||||
status: "active",
|
||||
options: {},
|
||||
headers: {},
|
||||
}) as any
|
||||
|
||||
test("azure keeps 'azure' key and does not remap to 'openai'", () => {
|
||||
const model = createModel("azure", "@ai-sdk/azure")
|
||||
const msgs = [
|
||||
{
|
||||
role: "user",
|
||||
content: "Hello",
|
||||
providerOptions: {
|
||||
azure: { someOption: "value" },
|
||||
},
|
||||
},
|
||||
] as any[]
|
||||
|
||||
const result = ProviderTransform.message(msgs, model, {})
|
||||
|
||||
expect(result[0].providerOptions?.azure).toEqual({ someOption: "value" })
|
||||
expect(result[0].providerOptions?.openai).toBeUndefined()
|
||||
})
|
||||
|
||||
test("openai with github-copilot npm remaps providerID to 'openai'", () => {
|
||||
const model = createModel("github-copilot", "@ai-sdk/github-copilot")
|
||||
const msgs = [
|
||||
{
|
||||
role: "user",
|
||||
content: "Hello",
|
||||
providerOptions: {
|
||||
"github-copilot": { someOption: "value" },
|
||||
},
|
||||
},
|
||||
] as any[]
|
||||
|
||||
const result = ProviderTransform.message(msgs, model, {})
|
||||
|
||||
expect(result[0].providerOptions?.openai).toEqual({ someOption: "value" })
|
||||
expect(result[0].providerOptions?.["github-copilot"]).toBeUndefined()
|
||||
})
|
||||
|
||||
test("bedrock remaps providerID to 'bedrock' key", () => {
|
||||
const model = createModel("my-bedrock", "@ai-sdk/amazon-bedrock")
|
||||
const msgs = [
|
||||
{
|
||||
role: "user",
|
||||
content: "Hello",
|
||||
providerOptions: {
|
||||
"my-bedrock": { someOption: "value" },
|
||||
},
|
||||
},
|
||||
] as any[]
|
||||
|
||||
const result = ProviderTransform.message(msgs, model, {})
|
||||
|
||||
expect(result[0].providerOptions?.bedrock).toEqual({ someOption: "value" })
|
||||
expect(result[0].providerOptions?.["my-bedrock"]).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
describe("ProviderTransform.variants", () => {
|
||||
const createMockModel = (overrides: Partial<any> = {}): any => ({
|
||||
id: "test/test-model",
|
||||
|
||||
@@ -1,35 +1,8 @@
|
||||
import { describe, expect, test } from "bun:test"
|
||||
import { MessageV2 } from "../../src/session/message-v2"
|
||||
import type { ToolSet } from "ai"
|
||||
|
||||
const sessionID = "session"
|
||||
|
||||
// Mock tool that transforms output to content format with media support
|
||||
function createMockTools(): ToolSet {
|
||||
return {
|
||||
bash: {
|
||||
description: "mock bash tool",
|
||||
inputSchema: { type: "object", properties: {} } as any,
|
||||
toModelOutput(result: { output: string; attachments?: MessageV2.FilePart[] }) {
|
||||
return {
|
||||
type: "content" as const,
|
||||
value: [
|
||||
{ type: "text" as const, text: result.output },
|
||||
...(result.attachments?.map((attachment) => {
|
||||
const base64 = attachment.url.startsWith("data:") ? attachment.url.split(",", 2)[1] : attachment.url
|
||||
return {
|
||||
type: "media" as const,
|
||||
data: base64,
|
||||
mediaType: attachment.mime,
|
||||
}
|
||||
}) ?? []),
|
||||
],
|
||||
}
|
||||
},
|
||||
},
|
||||
} as ToolSet
|
||||
}
|
||||
|
||||
function userInfo(id: string): MessageV2.User {
|
||||
return {
|
||||
id,
|
||||
@@ -286,11 +259,23 @@ describe("session.message-v2.toModelMessage", () => {
|
||||
},
|
||||
]
|
||||
|
||||
expect(MessageV2.toModelMessage(input, { tools: createMockTools() })).toStrictEqual([
|
||||
expect(MessageV2.toModelMessage(input)).toStrictEqual([
|
||||
{
|
||||
role: "user",
|
||||
content: [{ type: "text", text: "run tool" }],
|
||||
},
|
||||
{
|
||||
role: "user",
|
||||
content: [
|
||||
{ type: "text", text: "The tool bash returned the following attachments:" },
|
||||
{
|
||||
type: "file",
|
||||
mediaType: "image/png",
|
||||
filename: "attachment.png",
|
||||
data: "https://example.com/attachment.png",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
role: "assistant",
|
||||
content: [
|
||||
@@ -312,13 +297,7 @@ describe("session.message-v2.toModelMessage", () => {
|
||||
type: "tool-result",
|
||||
toolCallId: "call-1",
|
||||
toolName: "bash",
|
||||
output: {
|
||||
type: "content",
|
||||
value: [
|
||||
{ type: "text", text: "ok" },
|
||||
{ type: "media", data: "https://example.com/attachment.png", mediaType: "image/png" },
|
||||
],
|
||||
},
|
||||
output: { type: "text", value: "ok" },
|
||||
providerOptions: { openai: { tool: "meta" } },
|
||||
},
|
||||
],
|
||||
@@ -362,7 +341,7 @@ describe("session.message-v2.toModelMessage", () => {
|
||||
},
|
||||
]
|
||||
|
||||
expect(MessageV2.toModelMessage(input, { tools: createMockTools() })).toStrictEqual([
|
||||
expect(MessageV2.toModelMessage(input)).toStrictEqual([
|
||||
{
|
||||
role: "user",
|
||||
content: [{ type: "text", text: "run tool" }],
|
||||
@@ -386,10 +365,7 @@ describe("session.message-v2.toModelMessage", () => {
|
||||
type: "tool-result",
|
||||
toolCallId: "call-1",
|
||||
toolName: "bash",
|
||||
output: {
|
||||
type: "content",
|
||||
value: [{ type: "text", text: "[Old tool result content cleared]" }],
|
||||
},
|
||||
output: { type: "text", value: "[Old tool result content cleared]" },
|
||||
},
|
||||
],
|
||||
},
|
||||
|
||||
515
packages/opencode/test/tool/apply_patch.test.ts
Normal file
515
packages/opencode/test/tool/apply_patch.test.ts
Normal file
@@ -0,0 +1,515 @@
|
||||
import { describe, expect, test } from "bun:test"
|
||||
import path from "path"
|
||||
import * as fs from "fs/promises"
|
||||
import { ApplyPatchTool } from "../../src/tool/apply_patch"
|
||||
import { Instance } from "../../src/project/instance"
|
||||
import { FileTime } from "../../src/file/time"
|
||||
import { tmpdir } from "../fixture/fixture"
|
||||
|
||||
const baseCtx = {
|
||||
sessionID: "test",
|
||||
messageID: "",
|
||||
callID: "",
|
||||
agent: "build",
|
||||
abort: AbortSignal.any([]),
|
||||
metadata: () => {},
|
||||
}
|
||||
|
||||
type AskInput = {
|
||||
permission: string
|
||||
patterns: string[]
|
||||
always: string[]
|
||||
metadata: { diff: string }
|
||||
}
|
||||
|
||||
type ToolCtx = typeof baseCtx & {
|
||||
ask: (input: AskInput) => Promise<void>
|
||||
}
|
||||
|
||||
const execute = async (params: { patchText: string }, ctx: ToolCtx) => {
|
||||
const tool = await ApplyPatchTool.init()
|
||||
return tool.execute(params, ctx)
|
||||
}
|
||||
|
||||
const makeCtx = () => {
|
||||
const calls: AskInput[] = []
|
||||
const ctx: ToolCtx = {
|
||||
...baseCtx,
|
||||
ask: async (input) => {
|
||||
calls.push(input)
|
||||
},
|
||||
}
|
||||
|
||||
return { ctx, calls }
|
||||
}
|
||||
|
||||
describe("tool.apply_patch freeform", () => {
|
||||
test("requires patchText", async () => {
|
||||
const { ctx } = makeCtx()
|
||||
await expect(execute({ patchText: "" }, ctx)).rejects.toThrow("patchText is required")
|
||||
})
|
||||
|
||||
test("rejects invalid patch format", async () => {
|
||||
const { ctx } = makeCtx()
|
||||
await expect(execute({ patchText: "invalid patch" }, ctx)).rejects.toThrow("apply_patch verification failed")
|
||||
})
|
||||
|
||||
test("rejects empty patch", async () => {
|
||||
const { ctx } = makeCtx()
|
||||
const emptyPatch = "*** Begin Patch\n*** End Patch"
|
||||
await expect(execute({ patchText: emptyPatch }, ctx)).rejects.toThrow("patch rejected: empty patch")
|
||||
})
|
||||
|
||||
test("applies add/update/delete in one patch", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx, calls } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const modifyPath = path.join(fixture.path, "modify.txt")
|
||||
const deletePath = path.join(fixture.path, "delete.txt")
|
||||
await fs.writeFile(modifyPath, "line1\nline2\n", "utf-8")
|
||||
await fs.writeFile(deletePath, "obsolete\n", "utf-8")
|
||||
FileTime.read(ctx.sessionID, modifyPath)
|
||||
FileTime.read(ctx.sessionID, deletePath)
|
||||
|
||||
const patchText =
|
||||
"*** Begin Patch\n*** Add File: nested/new.txt\n+created\n*** Delete File: delete.txt\n*** Update File: modify.txt\n@@\n-line2\n+changed\n*** End Patch"
|
||||
|
||||
const result = await execute({ patchText }, ctx)
|
||||
|
||||
expect(result.title).toContain("Success. Updated the following files")
|
||||
expect(result.output).toContain("Success. Updated the following files")
|
||||
expect(result.metadata.diff).toContain("Index:")
|
||||
expect(calls.length).toBe(1)
|
||||
|
||||
const added = await fs.readFile(path.join(fixture.path, "nested", "new.txt"), "utf-8")
|
||||
expect(added).toBe("created\n")
|
||||
expect(await fs.readFile(modifyPath, "utf-8")).toBe("line1\nchanged\n")
|
||||
await expect(fs.readFile(deletePath, "utf-8")).rejects.toThrow()
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("applies multiple hunks to one file", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const target = path.join(fixture.path, "multi.txt")
|
||||
await fs.writeFile(target, "line1\nline2\nline3\nline4\n", "utf-8")
|
||||
FileTime.read(ctx.sessionID, target)
|
||||
|
||||
const patchText =
|
||||
"*** Begin Patch\n*** Update File: multi.txt\n@@\n-line2\n+changed2\n@@\n-line4\n+changed4\n*** End Patch"
|
||||
|
||||
await execute({ patchText }, ctx)
|
||||
|
||||
expect(await fs.readFile(target, "utf-8")).toBe("line1\nchanged2\nline3\nchanged4\n")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("inserts lines with insert-only hunk", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const target = path.join(fixture.path, "insert_only.txt")
|
||||
await fs.writeFile(target, "alpha\nomega\n", "utf-8")
|
||||
FileTime.read(ctx.sessionID, target)
|
||||
|
||||
const patchText = "*** Begin Patch\n*** Update File: insert_only.txt\n@@\n alpha\n+beta\n omega\n*** End Patch"
|
||||
|
||||
await execute({ patchText }, ctx)
|
||||
|
||||
expect(await fs.readFile(target, "utf-8")).toBe("alpha\nbeta\nomega\n")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("appends trailing newline on update", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const target = path.join(fixture.path, "no_newline.txt")
|
||||
await fs.writeFile(target, "no newline at end", "utf-8")
|
||||
FileTime.read(ctx.sessionID, target)
|
||||
|
||||
const patchText =
|
||||
"*** Begin Patch\n*** Update File: no_newline.txt\n@@\n-no newline at end\n+first line\n+second line\n*** End Patch"
|
||||
|
||||
await execute({ patchText }, ctx)
|
||||
|
||||
const contents = await fs.readFile(target, "utf-8")
|
||||
expect(contents.endsWith("\n")).toBe(true)
|
||||
expect(contents).toBe("first line\nsecond line\n")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("moves file to a new directory", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const original = path.join(fixture.path, "old", "name.txt")
|
||||
await fs.mkdir(path.dirname(original), { recursive: true })
|
||||
await fs.writeFile(original, "old content\n", "utf-8")
|
||||
FileTime.read(ctx.sessionID, original)
|
||||
|
||||
const patchText =
|
||||
"*** Begin Patch\n*** Update File: old/name.txt\n*** Move to: renamed/dir/name.txt\n@@\n-old content\n+new content\n*** End Patch"
|
||||
|
||||
await execute({ patchText }, ctx)
|
||||
|
||||
const moved = path.join(fixture.path, "renamed", "dir", "name.txt")
|
||||
await expect(fs.readFile(original, "utf-8")).rejects.toThrow()
|
||||
expect(await fs.readFile(moved, "utf-8")).toBe("new content\n")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("moves file overwriting existing destination", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const original = path.join(fixture.path, "old", "name.txt")
|
||||
const destination = path.join(fixture.path, "renamed", "dir", "name.txt")
|
||||
await fs.mkdir(path.dirname(original), { recursive: true })
|
||||
await fs.mkdir(path.dirname(destination), { recursive: true })
|
||||
await fs.writeFile(original, "from\n", "utf-8")
|
||||
await fs.writeFile(destination, "existing\n", "utf-8")
|
||||
FileTime.read(ctx.sessionID, original)
|
||||
|
||||
const patchText =
|
||||
"*** Begin Patch\n*** Update File: old/name.txt\n*** Move to: renamed/dir/name.txt\n@@\n-from\n+new\n*** End Patch"
|
||||
|
||||
await execute({ patchText }, ctx)
|
||||
|
||||
await expect(fs.readFile(original, "utf-8")).rejects.toThrow()
|
||||
expect(await fs.readFile(destination, "utf-8")).toBe("new\n")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("adds file overwriting existing file", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const target = path.join(fixture.path, "duplicate.txt")
|
||||
await fs.writeFile(target, "old content\n", "utf-8")
|
||||
|
||||
const patchText = "*** Begin Patch\n*** Add File: duplicate.txt\n+new content\n*** End Patch"
|
||||
|
||||
await execute({ patchText }, ctx)
|
||||
expect(await fs.readFile(target, "utf-8")).toBe("new content\n")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("rejects update when target file is missing", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const patchText = "*** Begin Patch\n*** Update File: missing.txt\n@@\n-nope\n+better\n*** End Patch"
|
||||
|
||||
await expect(execute({ patchText }, ctx)).rejects.toThrow(
|
||||
"apply_patch verification failed: Failed to read file to update",
|
||||
)
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("rejects delete when file is missing", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const patchText = "*** Begin Patch\n*** Delete File: missing.txt\n*** End Patch"
|
||||
|
||||
await expect(execute({ patchText }, ctx)).rejects.toThrow()
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("rejects delete when target is a directory", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const dirPath = path.join(fixture.path, "dir")
|
||||
await fs.mkdir(dirPath)
|
||||
|
||||
const patchText = "*** Begin Patch\n*** Delete File: dir\n*** End Patch"
|
||||
|
||||
await expect(execute({ patchText }, ctx)).rejects.toThrow()
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("rejects invalid hunk header", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const patchText = "*** Begin Patch\n*** Frobnicate File: foo\n*** End Patch"
|
||||
|
||||
await expect(execute({ patchText }, ctx)).rejects.toThrow("apply_patch verification failed")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("rejects update with missing context", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const target = path.join(fixture.path, "modify.txt")
|
||||
await fs.writeFile(target, "line1\nline2\n", "utf-8")
|
||||
FileTime.read(ctx.sessionID, target)
|
||||
|
||||
const patchText = "*** Begin Patch\n*** Update File: modify.txt\n@@\n-missing\n+changed\n*** End Patch"
|
||||
|
||||
await expect(execute({ patchText }, ctx)).rejects.toThrow("apply_patch verification failed")
|
||||
expect(await fs.readFile(target, "utf-8")).toBe("line1\nline2\n")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("verification failure leaves no side effects", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const patchText =
|
||||
"*** Begin Patch\n*** Add File: created.txt\n+hello\n*** Update File: missing.txt\n@@\n-old\n+new\n*** End Patch"
|
||||
|
||||
await expect(execute({ patchText }, ctx)).rejects.toThrow()
|
||||
|
||||
const createdPath = path.join(fixture.path, "created.txt")
|
||||
await expect(fs.readFile(createdPath, "utf-8")).rejects.toThrow()
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("supports end of file anchor", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const target = path.join(fixture.path, "tail.txt")
|
||||
await fs.writeFile(target, "alpha\nlast\n", "utf-8")
|
||||
FileTime.read(ctx.sessionID, target)
|
||||
|
||||
const patchText = "*** Begin Patch\n*** Update File: tail.txt\n@@\n-last\n+end\n*** End of File\n*** End Patch"
|
||||
|
||||
await execute({ patchText }, ctx)
|
||||
expect(await fs.readFile(target, "utf-8")).toBe("alpha\nend\n")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("rejects missing second chunk context", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const target = path.join(fixture.path, "two_chunks.txt")
|
||||
await fs.writeFile(target, "a\nb\nc\nd\n", "utf-8")
|
||||
FileTime.read(ctx.sessionID, target)
|
||||
|
||||
const patchText = "*** Begin Patch\n*** Update File: two_chunks.txt\n@@\n-b\n+B\n\n-d\n+D\n*** End Patch"
|
||||
|
||||
await expect(execute({ patchText }, ctx)).rejects.toThrow()
|
||||
expect(await fs.readFile(target, "utf-8")).toBe("a\nb\nc\nd\n")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("disambiguates change context with @@ header", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const target = path.join(fixture.path, "multi_ctx.txt")
|
||||
await fs.writeFile(target, "fn a\nx=10\ny=2\nfn b\nx=10\ny=20\n", "utf-8")
|
||||
FileTime.read(ctx.sessionID, target)
|
||||
|
||||
const patchText = "*** Begin Patch\n*** Update File: multi_ctx.txt\n@@ fn b\n-x=10\n+x=11\n*** End Patch"
|
||||
|
||||
await execute({ patchText }, ctx)
|
||||
expect(await fs.readFile(target, "utf-8")).toBe("fn a\nx=10\ny=2\nfn b\nx=11\ny=20\n")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("EOF anchor matches from end of file first", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const target = path.join(fixture.path, "eof_anchor.txt")
|
||||
// File has duplicate "marker" lines - one in middle, one at end
|
||||
await fs.writeFile(target, "start\nmarker\nmiddle\nmarker\nend\n", "utf-8")
|
||||
FileTime.read(ctx.sessionID, target)
|
||||
|
||||
// With EOF anchor, should match the LAST "marker" line, not the first
|
||||
const patchText =
|
||||
"*** Begin Patch\n*** Update File: eof_anchor.txt\n@@\n-marker\n-end\n+marker-changed\n+end\n*** End of File\n*** End Patch"
|
||||
|
||||
await execute({ patchText }, ctx)
|
||||
// First marker unchanged, second marker changed
|
||||
expect(await fs.readFile(target, "utf-8")).toBe("start\nmarker\nmiddle\nmarker-changed\nend\n")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("parses heredoc-wrapped patch", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const patchText = `cat <<'EOF'
|
||||
*** Begin Patch
|
||||
*** Add File: heredoc_test.txt
|
||||
+heredoc content
|
||||
*** End Patch
|
||||
EOF`
|
||||
|
||||
await execute({ patchText }, ctx)
|
||||
const content = await fs.readFile(path.join(fixture.path, "heredoc_test.txt"), "utf-8")
|
||||
expect(content).toBe("heredoc content\n")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("parses heredoc-wrapped patch without cat", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const patchText = `<<EOF
|
||||
*** Begin Patch
|
||||
*** Add File: heredoc_no_cat.txt
|
||||
+no cat prefix
|
||||
*** End Patch
|
||||
EOF`
|
||||
|
||||
await execute({ patchText }, ctx)
|
||||
const content = await fs.readFile(path.join(fixture.path, "heredoc_no_cat.txt"), "utf-8")
|
||||
expect(content).toBe("no cat prefix\n")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("matches with trailing whitespace differences", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const target = path.join(fixture.path, "trailing_ws.txt")
|
||||
// File has trailing spaces on some lines
|
||||
await fs.writeFile(target, "line1 \nline2\nline3 \n", "utf-8")
|
||||
FileTime.read(ctx.sessionID, target)
|
||||
|
||||
// Patch doesn't have trailing spaces - should still match via rstrip pass
|
||||
const patchText = "*** Begin Patch\n*** Update File: trailing_ws.txt\n@@\n-line2\n+changed\n*** End Patch"
|
||||
|
||||
await execute({ patchText }, ctx)
|
||||
expect(await fs.readFile(target, "utf-8")).toBe("line1 \nchanged\nline3 \n")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("matches with leading whitespace differences", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const target = path.join(fixture.path, "leading_ws.txt")
|
||||
// File has leading spaces
|
||||
await fs.writeFile(target, " line1\nline2\n line3\n", "utf-8")
|
||||
FileTime.read(ctx.sessionID, target)
|
||||
|
||||
// Patch without leading spaces - should match via trim pass
|
||||
const patchText = "*** Begin Patch\n*** Update File: leading_ws.txt\n@@\n-line2\n+changed\n*** End Patch"
|
||||
|
||||
await execute({ patchText }, ctx)
|
||||
expect(await fs.readFile(target, "utf-8")).toBe(" line1\nchanged\n line3\n")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("matches with Unicode punctuation differences", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const target = path.join(fixture.path, "unicode.txt")
|
||||
// File has fancy Unicode quotes (U+201C, U+201D) and em-dash (U+2014)
|
||||
const leftQuote = "\u201C"
|
||||
const rightQuote = "\u201D"
|
||||
const emDash = "\u2014"
|
||||
await fs.writeFile(target, `He said ${leftQuote}hello${rightQuote}\nsome${emDash}dash\nend\n`, "utf-8")
|
||||
FileTime.read(ctx.sessionID, target)
|
||||
|
||||
// Patch uses ASCII equivalents - should match via normalized pass
|
||||
// The replacement uses ASCII quotes from the patch (not preserving Unicode)
|
||||
const patchText =
|
||||
'*** Begin Patch\n*** Update File: unicode.txt\n@@\n-He said "hello"\n+He said "hi"\n*** End Patch'
|
||||
|
||||
await execute({ patchText }, ctx)
|
||||
// Result has ASCII quotes because that's what the patch specifies
|
||||
expect(await fs.readFile(target, "utf-8")).toBe(`He said "hi"\nsome${emDash}dash\nend\n`)
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,261 +0,0 @@
|
||||
import { describe, expect, test } from "bun:test"
|
||||
import path from "path"
|
||||
import { PatchTool } from "../../src/tool/patch"
|
||||
import { Instance } from "../../src/project/instance"
|
||||
import { tmpdir } from "../fixture/fixture"
|
||||
import { PermissionNext } from "../../src/permission/next"
|
||||
import * as fs from "fs/promises"
|
||||
|
||||
const ctx = {
|
||||
sessionID: "test",
|
||||
messageID: "",
|
||||
callID: "",
|
||||
agent: "build",
|
||||
abort: AbortSignal.any([]),
|
||||
metadata: () => {},
|
||||
ask: async () => {},
|
||||
}
|
||||
|
||||
const patchTool = await PatchTool.init()
|
||||
|
||||
describe("tool.patch", () => {
|
||||
test("should validate required parameters", async () => {
|
||||
await Instance.provide({
|
||||
directory: "/tmp",
|
||||
fn: async () => {
|
||||
expect(patchTool.execute({ patchText: "" }, ctx)).rejects.toThrow("patchText is required")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("should validate patch format", async () => {
|
||||
await Instance.provide({
|
||||
directory: "/tmp",
|
||||
fn: async () => {
|
||||
expect(patchTool.execute({ patchText: "invalid patch" }, ctx)).rejects.toThrow("Failed to parse patch")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("should handle empty patch", async () => {
|
||||
await Instance.provide({
|
||||
directory: "/tmp",
|
||||
fn: async () => {
|
||||
const emptyPatch = `*** Begin Patch
|
||||
*** End Patch`
|
||||
|
||||
expect(patchTool.execute({ patchText: emptyPatch }, ctx)).rejects.toThrow("No file changes found in patch")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test.skip("should ask permission for files outside working directory", async () => {
|
||||
await Instance.provide({
|
||||
directory: "/tmp",
|
||||
fn: async () => {
|
||||
const maliciousPatch = `*** Begin Patch
|
||||
*** Add File: /etc/passwd
|
||||
+malicious content
|
||||
*** End Patch`
|
||||
patchTool.execute({ patchText: maliciousPatch }, ctx)
|
||||
// TODO: this sucks
|
||||
await new Promise((resolve) => setTimeout(resolve, 1000))
|
||||
const pending = await PermissionNext.list()
|
||||
expect(pending.find((p) => p.sessionID === ctx.sessionID)).toBeDefined()
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("should handle simple add file operation", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const patchText = `*** Begin Patch
|
||||
*** Add File: test-file.txt
|
||||
+Hello World
|
||||
+This is a test file
|
||||
*** End Patch`
|
||||
|
||||
const result = await patchTool.execute({ patchText }, ctx)
|
||||
|
||||
expect(result.title).toContain("files changed")
|
||||
expect(result.metadata.diff).toBeDefined()
|
||||
expect(result.output).toContain("Patch applied successfully")
|
||||
|
||||
// Verify file was created
|
||||
const filePath = path.join(fixture.path, "test-file.txt")
|
||||
const content = await fs.readFile(filePath, "utf-8")
|
||||
expect(content).toBe("Hello World\nThis is a test file")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("should handle file with context update", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const patchText = `*** Begin Patch
|
||||
*** Add File: config.js
|
||||
+const API_KEY = "test-key"
|
||||
+const DEBUG = false
|
||||
+const VERSION = "1.0"
|
||||
*** End Patch`
|
||||
|
||||
const result = await patchTool.execute({ patchText }, ctx)
|
||||
|
||||
expect(result.title).toContain("files changed")
|
||||
expect(result.metadata.diff).toBeDefined()
|
||||
expect(result.output).toContain("Patch applied successfully")
|
||||
|
||||
// Verify file was created with correct content
|
||||
const filePath = path.join(fixture.path, "config.js")
|
||||
const content = await fs.readFile(filePath, "utf-8")
|
||||
expect(content).toBe('const API_KEY = "test-key"\nconst DEBUG = false\nconst VERSION = "1.0"')
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("should handle multiple file operations", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const patchText = `*** Begin Patch
|
||||
*** Add File: file1.txt
|
||||
+Content of file 1
|
||||
*** Add File: file2.txt
|
||||
+Content of file 2
|
||||
*** Add File: file3.txt
|
||||
+Content of file 3
|
||||
*** End Patch`
|
||||
|
||||
const result = await patchTool.execute({ patchText }, ctx)
|
||||
|
||||
expect(result.title).toContain("3 files changed")
|
||||
expect(result.metadata.diff).toBeDefined()
|
||||
expect(result.output).toContain("Patch applied successfully")
|
||||
|
||||
// Verify all files were created
|
||||
for (let i = 1; i <= 3; i++) {
|
||||
const filePath = path.join(fixture.path, `file${i}.txt`)
|
||||
const content = await fs.readFile(filePath, "utf-8")
|
||||
expect(content).toBe(`Content of file ${i}`)
|
||||
}
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("should create parent directories when adding nested files", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const patchText = `*** Begin Patch
|
||||
*** Add File: deep/nested/file.txt
|
||||
+Deep nested content
|
||||
*** End Patch`
|
||||
|
||||
const result = await patchTool.execute({ patchText }, ctx)
|
||||
|
||||
expect(result.title).toContain("files changed")
|
||||
expect(result.output).toContain("Patch applied successfully")
|
||||
|
||||
// Verify nested file was created
|
||||
const nestedPath = path.join(fixture.path, "deep", "nested", "file.txt")
|
||||
const exists = await fs
|
||||
.access(nestedPath)
|
||||
.then(() => true)
|
||||
.catch(() => false)
|
||||
expect(exists).toBe(true)
|
||||
|
||||
const content = await fs.readFile(nestedPath, "utf-8")
|
||||
expect(content).toBe("Deep nested content")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("should generate proper unified diff in metadata", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
// First create a file with simple content
|
||||
const patchText1 = `*** Begin Patch
|
||||
*** Add File: test.txt
|
||||
+line 1
|
||||
+line 2
|
||||
+line 3
|
||||
*** End Patch`
|
||||
|
||||
await patchTool.execute({ patchText: patchText1 }, ctx)
|
||||
|
||||
// Now create an update patch
|
||||
const patchText2 = `*** Begin Patch
|
||||
*** Update File: test.txt
|
||||
@@
|
||||
line 1
|
||||
-line 2
|
||||
+line 2 updated
|
||||
line 3
|
||||
*** End Patch`
|
||||
|
||||
const result = await patchTool.execute({ patchText: patchText2 }, ctx)
|
||||
|
||||
expect(result.metadata.diff).toBeDefined()
|
||||
expect(result.metadata.diff).toContain("@@")
|
||||
expect(result.metadata.diff).toContain("-line 2")
|
||||
expect(result.metadata.diff).toContain("+line 2 updated")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("should handle complex patch with multiple operations", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const patchText = `*** Begin Patch
|
||||
*** Add File: new.txt
|
||||
+This is a new file
|
||||
+with multiple lines
|
||||
*** Add File: existing.txt
|
||||
+old content
|
||||
+new line
|
||||
+more content
|
||||
*** Add File: config.json
|
||||
+{
|
||||
+ "version": "1.0",
|
||||
+ "debug": true
|
||||
+}
|
||||
*** End Patch`
|
||||
|
||||
const result = await patchTool.execute({ patchText }, ctx)
|
||||
|
||||
expect(result.title).toContain("3 files changed")
|
||||
expect(result.metadata.diff).toBeDefined()
|
||||
expect(result.output).toContain("Patch applied successfully")
|
||||
|
||||
// Verify all files were created
|
||||
const newPath = path.join(fixture.path, "new.txt")
|
||||
const newContent = await fs.readFile(newPath, "utf-8")
|
||||
expect(newContent).toBe("This is a new file\nwith multiple lines")
|
||||
|
||||
const existingPath = path.join(fixture.path, "existing.txt")
|
||||
const existingContent = await fs.readFile(existingPath, "utf-8")
|
||||
expect(existingContent).toBe("old content\nnew line\nmore content")
|
||||
|
||||
const configPath = path.join(fixture.path, "config.json")
|
||||
const configContent = await fs.readFile(configPath, "utf-8")
|
||||
expect(configContent).toBe('{\n "version": "1.0",\n "debug": true\n}')
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/package.json",
|
||||
"name": "@opencode-ai/plugin",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/package.json",
|
||||
"name": "@opencode-ai/sdk",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
|
||||
@@ -7,6 +7,7 @@ import type {
|
||||
AppAgentsResponses,
|
||||
AppLogErrors,
|
||||
AppLogResponses,
|
||||
AppSkillsResponses,
|
||||
Auth as Auth3,
|
||||
AuthSetErrors,
|
||||
AuthSetResponses,
|
||||
@@ -100,7 +101,6 @@ import type {
|
||||
SessionCreateResponses,
|
||||
SessionDeleteErrors,
|
||||
SessionDeleteResponses,
|
||||
SessionDiffErrors,
|
||||
SessionDiffResponses,
|
||||
SessionForkResponses,
|
||||
SessionGetErrors,
|
||||
@@ -653,48 +653,6 @@ export class Tool extends HeyApiClient {
|
||||
}
|
||||
}
|
||||
|
||||
export class Instance extends HeyApiClient {
|
||||
/**
|
||||
* Dispose instance
|
||||
*
|
||||
* Clean up and dispose the current OpenCode instance, releasing all resources.
|
||||
*/
|
||||
public dispose<ThrowOnError extends boolean = false>(
|
||||
parameters?: {
|
||||
directory?: string
|
||||
},
|
||||
options?: Options<never, ThrowOnError>,
|
||||
) {
|
||||
const params = buildClientParams([parameters], [{ args: [{ in: "query", key: "directory" }] }])
|
||||
return (options?.client ?? this.client).post<InstanceDisposeResponses, unknown, ThrowOnError>({
|
||||
url: "/instance/dispose",
|
||||
...options,
|
||||
...params,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export class Path extends HeyApiClient {
|
||||
/**
|
||||
* Get paths
|
||||
*
|
||||
* Retrieve the current working directory and related path information for the OpenCode instance.
|
||||
*/
|
||||
public get<ThrowOnError extends boolean = false>(
|
||||
parameters?: {
|
||||
directory?: string
|
||||
},
|
||||
options?: Options<never, ThrowOnError>,
|
||||
) {
|
||||
const params = buildClientParams([parameters], [{ args: [{ in: "query", key: "directory" }] }])
|
||||
return (options?.client ?? this.client).get<PathGetResponses, unknown, ThrowOnError>({
|
||||
url: "/path",
|
||||
...options,
|
||||
...params,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export class Worktree extends HeyApiClient {
|
||||
/**
|
||||
* List worktrees
|
||||
@@ -751,27 +709,34 @@ export class Worktree extends HeyApiClient {
|
||||
}
|
||||
}
|
||||
|
||||
export class Vcs extends HeyApiClient {
|
||||
export class Resource extends HeyApiClient {
|
||||
/**
|
||||
* Get VCS info
|
||||
* Get MCP resources
|
||||
*
|
||||
* Retrieve version control system (VCS) information for the current project, such as git branch.
|
||||
* Get all available MCP resources from connected servers. Optionally filter by name.
|
||||
*/
|
||||
public get<ThrowOnError extends boolean = false>(
|
||||
public list<ThrowOnError extends boolean = false>(
|
||||
parameters?: {
|
||||
directory?: string
|
||||
},
|
||||
options?: Options<never, ThrowOnError>,
|
||||
) {
|
||||
const params = buildClientParams([parameters], [{ args: [{ in: "query", key: "directory" }] }])
|
||||
return (options?.client ?? this.client).get<VcsGetResponses, unknown, ThrowOnError>({
|
||||
url: "/vcs",
|
||||
return (options?.client ?? this.client).get<ExperimentalResourceListResponses, unknown, ThrowOnError>({
|
||||
url: "/experimental/resource",
|
||||
...options,
|
||||
...params,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export class Experimental extends HeyApiClient {
|
||||
private _resource?: Resource
|
||||
get resource(): Resource {
|
||||
return (this._resource ??= new Resource({ client: this.client }))
|
||||
}
|
||||
}
|
||||
|
||||
export class Session extends HeyApiClient {
|
||||
/**
|
||||
* List sessions
|
||||
@@ -1197,9 +1162,9 @@ export class Session extends HeyApiClient {
|
||||
}
|
||||
|
||||
/**
|
||||
* Get session diff
|
||||
* Get message diff
|
||||
*
|
||||
* Get all file changes (diffs) made during this session.
|
||||
* Get the file changes (diff) that resulted from a specific user message in the session.
|
||||
*/
|
||||
public diff<ThrowOnError extends boolean = false>(
|
||||
parameters: {
|
||||
@@ -1221,7 +1186,7 @@ export class Session extends HeyApiClient {
|
||||
},
|
||||
],
|
||||
)
|
||||
return (options?.client ?? this.client).get<SessionDiffResponses, SessionDiffErrors, ThrowOnError>({
|
||||
return (options?.client ?? this.client).get<SessionDiffResponses, unknown, ThrowOnError>({
|
||||
url: "/session/{sessionID}/diff",
|
||||
...options,
|
||||
...params,
|
||||
@@ -1877,27 +1842,6 @@ export class Question extends HeyApiClient {
|
||||
}
|
||||
}
|
||||
|
||||
export class Command extends HeyApiClient {
|
||||
/**
|
||||
* List commands
|
||||
*
|
||||
* Get a list of all available commands in the OpenCode system.
|
||||
*/
|
||||
public list<ThrowOnError extends boolean = false>(
|
||||
parameters?: {
|
||||
directory?: string
|
||||
},
|
||||
options?: Options<never, ThrowOnError>,
|
||||
) {
|
||||
const params = buildClientParams([parameters], [{ args: [{ in: "query", key: "directory" }] }])
|
||||
return (options?.client ?? this.client).get<CommandListResponses, unknown, ThrowOnError>({
|
||||
url: "/command",
|
||||
...options,
|
||||
...params,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export class Oauth extends HeyApiClient {
|
||||
/**
|
||||
* OAuth authorize
|
||||
@@ -2208,70 +2152,6 @@ export class File extends HeyApiClient {
|
||||
}
|
||||
}
|
||||
|
||||
export class App extends HeyApiClient {
|
||||
/**
|
||||
* Write log
|
||||
*
|
||||
* Write a log entry to the server logs with specified level and metadata.
|
||||
*/
|
||||
public log<ThrowOnError extends boolean = false>(
|
||||
parameters?: {
|
||||
directory?: string
|
||||
service?: string
|
||||
level?: "debug" | "info" | "error" | "warn"
|
||||
message?: string
|
||||
extra?: {
|
||||
[key: string]: unknown
|
||||
}
|
||||
},
|
||||
options?: Options<never, ThrowOnError>,
|
||||
) {
|
||||
const params = buildClientParams(
|
||||
[parameters],
|
||||
[
|
||||
{
|
||||
args: [
|
||||
{ in: "query", key: "directory" },
|
||||
{ in: "body", key: "service" },
|
||||
{ in: "body", key: "level" },
|
||||
{ in: "body", key: "message" },
|
||||
{ in: "body", key: "extra" },
|
||||
],
|
||||
},
|
||||
],
|
||||
)
|
||||
return (options?.client ?? this.client).post<AppLogResponses, AppLogErrors, ThrowOnError>({
|
||||
url: "/log",
|
||||
...options,
|
||||
...params,
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
...options?.headers,
|
||||
...params.headers,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* List agents
|
||||
*
|
||||
* Get a list of all available AI agents in the OpenCode system.
|
||||
*/
|
||||
public agents<ThrowOnError extends boolean = false>(
|
||||
parameters?: {
|
||||
directory?: string
|
||||
},
|
||||
options?: Options<never, ThrowOnError>,
|
||||
) {
|
||||
const params = buildClientParams([parameters], [{ args: [{ in: "query", key: "directory" }] }])
|
||||
return (options?.client ?? this.client).get<AppAgentsResponses, unknown, ThrowOnError>({
|
||||
url: "/agent",
|
||||
...options,
|
||||
...params,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export class Auth extends HeyApiClient {
|
||||
/**
|
||||
* Remove MCP OAuth
|
||||
@@ -2522,76 +2402,6 @@ export class Mcp extends HeyApiClient {
|
||||
}
|
||||
}
|
||||
|
||||
export class Resource extends HeyApiClient {
|
||||
/**
|
||||
* Get MCP resources
|
||||
*
|
||||
* Get all available MCP resources from connected servers. Optionally filter by name.
|
||||
*/
|
||||
public list<ThrowOnError extends boolean = false>(
|
||||
parameters?: {
|
||||
directory?: string
|
||||
},
|
||||
options?: Options<never, ThrowOnError>,
|
||||
) {
|
||||
const params = buildClientParams([parameters], [{ args: [{ in: "query", key: "directory" }] }])
|
||||
return (options?.client ?? this.client).get<ExperimentalResourceListResponses, unknown, ThrowOnError>({
|
||||
url: "/experimental/resource",
|
||||
...options,
|
||||
...params,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export class Experimental extends HeyApiClient {
|
||||
private _resource?: Resource
|
||||
get resource(): Resource {
|
||||
return (this._resource ??= new Resource({ client: this.client }))
|
||||
}
|
||||
}
|
||||
|
||||
export class Lsp extends HeyApiClient {
|
||||
/**
|
||||
* Get LSP status
|
||||
*
|
||||
* Get LSP server status
|
||||
*/
|
||||
public status<ThrowOnError extends boolean = false>(
|
||||
parameters?: {
|
||||
directory?: string
|
||||
},
|
||||
options?: Options<never, ThrowOnError>,
|
||||
) {
|
||||
const params = buildClientParams([parameters], [{ args: [{ in: "query", key: "directory" }] }])
|
||||
return (options?.client ?? this.client).get<LspStatusResponses, unknown, ThrowOnError>({
|
||||
url: "/lsp",
|
||||
...options,
|
||||
...params,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export class Formatter extends HeyApiClient {
|
||||
/**
|
||||
* Get formatter status
|
||||
*
|
||||
* Get formatter status
|
||||
*/
|
||||
public status<ThrowOnError extends boolean = false>(
|
||||
parameters?: {
|
||||
directory?: string
|
||||
},
|
||||
options?: Options<never, ThrowOnError>,
|
||||
) {
|
||||
const params = buildClientParams([parameters], [{ args: [{ in: "query", key: "directory" }] }])
|
||||
return (options?.client ?? this.client).get<FormatterStatusResponses, unknown, ThrowOnError>({
|
||||
url: "/formatter",
|
||||
...options,
|
||||
...params,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export class Control extends HeyApiClient {
|
||||
/**
|
||||
* Get next TUI request
|
||||
@@ -2930,6 +2740,215 @@ export class Tui extends HeyApiClient {
|
||||
}
|
||||
}
|
||||
|
||||
export class Instance extends HeyApiClient {
|
||||
/**
|
||||
* Dispose instance
|
||||
*
|
||||
* Clean up and dispose the current OpenCode instance, releasing all resources.
|
||||
*/
|
||||
public dispose<ThrowOnError extends boolean = false>(
|
||||
parameters?: {
|
||||
directory?: string
|
||||
},
|
||||
options?: Options<never, ThrowOnError>,
|
||||
) {
|
||||
const params = buildClientParams([parameters], [{ args: [{ in: "query", key: "directory" }] }])
|
||||
return (options?.client ?? this.client).post<InstanceDisposeResponses, unknown, ThrowOnError>({
|
||||
url: "/instance/dispose",
|
||||
...options,
|
||||
...params,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export class Path extends HeyApiClient {
|
||||
/**
|
||||
* Get paths
|
||||
*
|
||||
* Retrieve the current working directory and related path information for the OpenCode instance.
|
||||
*/
|
||||
public get<ThrowOnError extends boolean = false>(
|
||||
parameters?: {
|
||||
directory?: string
|
||||
},
|
||||
options?: Options<never, ThrowOnError>,
|
||||
) {
|
||||
const params = buildClientParams([parameters], [{ args: [{ in: "query", key: "directory" }] }])
|
||||
return (options?.client ?? this.client).get<PathGetResponses, unknown, ThrowOnError>({
|
||||
url: "/path",
|
||||
...options,
|
||||
...params,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export class Vcs extends HeyApiClient {
|
||||
/**
|
||||
* Get VCS info
|
||||
*
|
||||
* Retrieve version control system (VCS) information for the current project, such as git branch.
|
||||
*/
|
||||
public get<ThrowOnError extends boolean = false>(
|
||||
parameters?: {
|
||||
directory?: string
|
||||
},
|
||||
options?: Options<never, ThrowOnError>,
|
||||
) {
|
||||
const params = buildClientParams([parameters], [{ args: [{ in: "query", key: "directory" }] }])
|
||||
return (options?.client ?? this.client).get<VcsGetResponses, unknown, ThrowOnError>({
|
||||
url: "/vcs",
|
||||
...options,
|
||||
...params,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export class Command extends HeyApiClient {
|
||||
/**
|
||||
* List commands
|
||||
*
|
||||
* Get a list of all available commands in the OpenCode system.
|
||||
*/
|
||||
public list<ThrowOnError extends boolean = false>(
|
||||
parameters?: {
|
||||
directory?: string
|
||||
},
|
||||
options?: Options<never, ThrowOnError>,
|
||||
) {
|
||||
const params = buildClientParams([parameters], [{ args: [{ in: "query", key: "directory" }] }])
|
||||
return (options?.client ?? this.client).get<CommandListResponses, unknown, ThrowOnError>({
|
||||
url: "/command",
|
||||
...options,
|
||||
...params,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export class App extends HeyApiClient {
|
||||
/**
|
||||
* Write log
|
||||
*
|
||||
* Write a log entry to the server logs with specified level and metadata.
|
||||
*/
|
||||
public log<ThrowOnError extends boolean = false>(
|
||||
parameters?: {
|
||||
directory?: string
|
||||
service?: string
|
||||
level?: "debug" | "info" | "error" | "warn"
|
||||
message?: string
|
||||
extra?: {
|
||||
[key: string]: unknown
|
||||
}
|
||||
},
|
||||
options?: Options<never, ThrowOnError>,
|
||||
) {
|
||||
const params = buildClientParams(
|
||||
[parameters],
|
||||
[
|
||||
{
|
||||
args: [
|
||||
{ in: "query", key: "directory" },
|
||||
{ in: "body", key: "service" },
|
||||
{ in: "body", key: "level" },
|
||||
{ in: "body", key: "message" },
|
||||
{ in: "body", key: "extra" },
|
||||
],
|
||||
},
|
||||
],
|
||||
)
|
||||
return (options?.client ?? this.client).post<AppLogResponses, AppLogErrors, ThrowOnError>({
|
||||
url: "/log",
|
||||
...options,
|
||||
...params,
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
...options?.headers,
|
||||
...params.headers,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* List agents
|
||||
*
|
||||
* Get a list of all available AI agents in the OpenCode system.
|
||||
*/
|
||||
public agents<ThrowOnError extends boolean = false>(
|
||||
parameters?: {
|
||||
directory?: string
|
||||
},
|
||||
options?: Options<never, ThrowOnError>,
|
||||
) {
|
||||
const params = buildClientParams([parameters], [{ args: [{ in: "query", key: "directory" }] }])
|
||||
return (options?.client ?? this.client).get<AppAgentsResponses, unknown, ThrowOnError>({
|
||||
url: "/agent",
|
||||
...options,
|
||||
...params,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* List skills
|
||||
*
|
||||
* Get a list of all available skills in the OpenCode system.
|
||||
*/
|
||||
public skills<ThrowOnError extends boolean = false>(
|
||||
parameters?: {
|
||||
directory?: string
|
||||
},
|
||||
options?: Options<never, ThrowOnError>,
|
||||
) {
|
||||
const params = buildClientParams([parameters], [{ args: [{ in: "query", key: "directory" }] }])
|
||||
return (options?.client ?? this.client).get<AppSkillsResponses, unknown, ThrowOnError>({
|
||||
url: "/skill",
|
||||
...options,
|
||||
...params,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export class Lsp extends HeyApiClient {
|
||||
/**
|
||||
* Get LSP status
|
||||
*
|
||||
* Get LSP server status
|
||||
*/
|
||||
public status<ThrowOnError extends boolean = false>(
|
||||
parameters?: {
|
||||
directory?: string
|
||||
},
|
||||
options?: Options<never, ThrowOnError>,
|
||||
) {
|
||||
const params = buildClientParams([parameters], [{ args: [{ in: "query", key: "directory" }] }])
|
||||
return (options?.client ?? this.client).get<LspStatusResponses, unknown, ThrowOnError>({
|
||||
url: "/lsp",
|
||||
...options,
|
||||
...params,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export class Formatter extends HeyApiClient {
|
||||
/**
|
||||
* Get formatter status
|
||||
*
|
||||
* Get formatter status
|
||||
*/
|
||||
public status<ThrowOnError extends boolean = false>(
|
||||
parameters?: {
|
||||
directory?: string
|
||||
},
|
||||
options?: Options<never, ThrowOnError>,
|
||||
) {
|
||||
const params = buildClientParams([parameters], [{ args: [{ in: "query", key: "directory" }] }])
|
||||
return (options?.client ?? this.client).get<FormatterStatusResponses, unknown, ThrowOnError>({
|
||||
url: "/formatter",
|
||||
...options,
|
||||
...params,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export class Auth2 extends HeyApiClient {
|
||||
/**
|
||||
* Set auth credentials
|
||||
@@ -3023,24 +3042,14 @@ export class OpencodeClient extends HeyApiClient {
|
||||
return (this._tool ??= new Tool({ client: this.client }))
|
||||
}
|
||||
|
||||
private _instance?: Instance
|
||||
get instance(): Instance {
|
||||
return (this._instance ??= new Instance({ client: this.client }))
|
||||
}
|
||||
|
||||
private _path?: Path
|
||||
get path(): Path {
|
||||
return (this._path ??= new Path({ client: this.client }))
|
||||
}
|
||||
|
||||
private _worktree?: Worktree
|
||||
get worktree(): Worktree {
|
||||
return (this._worktree ??= new Worktree({ client: this.client }))
|
||||
}
|
||||
|
||||
private _vcs?: Vcs
|
||||
get vcs(): Vcs {
|
||||
return (this._vcs ??= new Vcs({ client: this.client }))
|
||||
private _experimental?: Experimental
|
||||
get experimental(): Experimental {
|
||||
return (this._experimental ??= new Experimental({ client: this.client }))
|
||||
}
|
||||
|
||||
private _session?: Session
|
||||
@@ -3063,11 +3072,6 @@ export class OpencodeClient extends HeyApiClient {
|
||||
return (this._question ??= new Question({ client: this.client }))
|
||||
}
|
||||
|
||||
private _command?: Command
|
||||
get command(): Command {
|
||||
return (this._command ??= new Command({ client: this.client }))
|
||||
}
|
||||
|
||||
private _provider?: Provider
|
||||
get provider(): Provider {
|
||||
return (this._provider ??= new Provider({ client: this.client }))
|
||||
@@ -3083,19 +3087,39 @@ export class OpencodeClient extends HeyApiClient {
|
||||
return (this._file ??= new File({ client: this.client }))
|
||||
}
|
||||
|
||||
private _app?: App
|
||||
get app(): App {
|
||||
return (this._app ??= new App({ client: this.client }))
|
||||
}
|
||||
|
||||
private _mcp?: Mcp
|
||||
get mcp(): Mcp {
|
||||
return (this._mcp ??= new Mcp({ client: this.client }))
|
||||
}
|
||||
|
||||
private _experimental?: Experimental
|
||||
get experimental(): Experimental {
|
||||
return (this._experimental ??= new Experimental({ client: this.client }))
|
||||
private _tui?: Tui
|
||||
get tui(): Tui {
|
||||
return (this._tui ??= new Tui({ client: this.client }))
|
||||
}
|
||||
|
||||
private _instance?: Instance
|
||||
get instance(): Instance {
|
||||
return (this._instance ??= new Instance({ client: this.client }))
|
||||
}
|
||||
|
||||
private _path?: Path
|
||||
get path(): Path {
|
||||
return (this._path ??= new Path({ client: this.client }))
|
||||
}
|
||||
|
||||
private _vcs?: Vcs
|
||||
get vcs(): Vcs {
|
||||
return (this._vcs ??= new Vcs({ client: this.client }))
|
||||
}
|
||||
|
||||
private _command?: Command
|
||||
get command(): Command {
|
||||
return (this._command ??= new Command({ client: this.client }))
|
||||
}
|
||||
|
||||
private _app?: App
|
||||
get app(): App {
|
||||
return (this._app ??= new App({ client: this.client }))
|
||||
}
|
||||
|
||||
private _lsp?: Lsp
|
||||
@@ -3108,11 +3132,6 @@ export class OpencodeClient extends HeyApiClient {
|
||||
return (this._formatter ??= new Formatter({ client: this.client }))
|
||||
}
|
||||
|
||||
private _tui?: Tui
|
||||
get tui(): Tui {
|
||||
return (this._tui ??= new Tui({ client: this.client }))
|
||||
}
|
||||
|
||||
private _auth?: Auth2
|
||||
get auth(): Auth2 {
|
||||
return (this._auth ??= new Auth2({ client: this.client }))
|
||||
|
||||
@@ -62,6 +62,13 @@ export type EventLspUpdated = {
|
||||
}
|
||||
}
|
||||
|
||||
export type EventFileEdited = {
|
||||
type: "file.edited"
|
||||
properties: {
|
||||
file: string
|
||||
}
|
||||
}
|
||||
|
||||
export type FileDiff = {
|
||||
file: string
|
||||
before: string
|
||||
@@ -599,13 +606,6 @@ export type EventSessionCompacted = {
|
||||
}
|
||||
}
|
||||
|
||||
export type EventFileEdited = {
|
||||
type: "file.edited"
|
||||
properties: {
|
||||
file: string
|
||||
}
|
||||
}
|
||||
|
||||
export type Todo = {
|
||||
/**
|
||||
* Brief description of the task
|
||||
@@ -843,15 +843,15 @@ export type EventPtyDeleted = {
|
||||
}
|
||||
}
|
||||
|
||||
export type EventServerConnected = {
|
||||
type: "server.connected"
|
||||
export type EventGlobalDisposed = {
|
||||
type: "global.disposed"
|
||||
properties: {
|
||||
[key: string]: unknown
|
||||
}
|
||||
}
|
||||
|
||||
export type EventGlobalDisposed = {
|
||||
type: "global.disposed"
|
||||
export type EventServerConnected = {
|
||||
type: "server.connected"
|
||||
properties: {
|
||||
[key: string]: unknown
|
||||
}
|
||||
@@ -864,6 +864,7 @@ export type Event =
|
||||
| EventServerInstanceDisposed
|
||||
| EventLspClientDiagnostics
|
||||
| EventLspUpdated
|
||||
| EventFileEdited
|
||||
| EventMessageUpdated
|
||||
| EventMessageRemoved
|
||||
| EventMessagePartUpdated
|
||||
@@ -876,7 +877,6 @@ export type Event =
|
||||
| EventQuestionReplied
|
||||
| EventQuestionRejected
|
||||
| EventSessionCompacted
|
||||
| EventFileEdited
|
||||
| EventTodoUpdated
|
||||
| EventTuiPromptAppend
|
||||
| EventTuiCommandExecute
|
||||
@@ -896,8 +896,8 @@ export type Event =
|
||||
| EventPtyUpdated
|
||||
| EventPtyExited
|
||||
| EventPtyDeleted
|
||||
| EventServerConnected
|
||||
| EventGlobalDisposed
|
||||
| EventServerConnected
|
||||
|
||||
export type GlobalEvent = {
|
||||
directory: string
|
||||
@@ -1530,10 +1530,6 @@ export type McpOAuthConfig = {
|
||||
* OAuth scopes to request during authorization
|
||||
*/
|
||||
scope?: string
|
||||
/**
|
||||
* OAuth redirect URI (default: http://127.0.0.1:19876/mcp/oauth/callback).
|
||||
*/
|
||||
redirectUri?: string
|
||||
}
|
||||
|
||||
export type McpRemoteConfig = {
|
||||
@@ -1796,98 +1792,6 @@ export type Config = {
|
||||
}
|
||||
}
|
||||
|
||||
export type ToolIds = Array<string>
|
||||
|
||||
export type ToolListItem = {
|
||||
id: string
|
||||
description: string
|
||||
parameters: unknown
|
||||
}
|
||||
|
||||
export type ToolList = Array<ToolListItem>
|
||||
|
||||
export type Path = {
|
||||
home: string
|
||||
state: string
|
||||
config: string
|
||||
worktree: string
|
||||
directory: string
|
||||
}
|
||||
|
||||
export type Worktree = {
|
||||
name: string
|
||||
branch: string
|
||||
directory: string
|
||||
}
|
||||
|
||||
export type WorktreeCreateInput = {
|
||||
name?: string
|
||||
startCommand?: string
|
||||
}
|
||||
|
||||
export type VcsInfo = {
|
||||
branch: string
|
||||
}
|
||||
|
||||
export type TextPartInput = {
|
||||
id?: string
|
||||
type: "text"
|
||||
text: string
|
||||
synthetic?: boolean
|
||||
ignored?: boolean
|
||||
time?: {
|
||||
start: number
|
||||
end?: number
|
||||
}
|
||||
metadata?: {
|
||||
[key: string]: unknown
|
||||
}
|
||||
}
|
||||
|
||||
export type FilePartInput = {
|
||||
id?: string
|
||||
type: "file"
|
||||
mime: string
|
||||
filename?: string
|
||||
url: string
|
||||
source?: FilePartSource
|
||||
}
|
||||
|
||||
export type AgentPartInput = {
|
||||
id?: string
|
||||
type: "agent"
|
||||
name: string
|
||||
source?: {
|
||||
value: string
|
||||
start: number
|
||||
end: number
|
||||
}
|
||||
}
|
||||
|
||||
export type SubtaskPartInput = {
|
||||
id?: string
|
||||
type: "subtask"
|
||||
prompt: string
|
||||
description: string
|
||||
agent: string
|
||||
model?: {
|
||||
providerID: string
|
||||
modelID: string
|
||||
}
|
||||
command?: string
|
||||
}
|
||||
|
||||
export type Command = {
|
||||
name: string
|
||||
description?: string
|
||||
agent?: string
|
||||
model?: string
|
||||
mcp?: boolean
|
||||
template: string
|
||||
subtask?: boolean
|
||||
hints: Array<string>
|
||||
}
|
||||
|
||||
export type Model = {
|
||||
id: string
|
||||
providerID: string
|
||||
@@ -1973,6 +1877,83 @@ export type Provider = {
|
||||
}
|
||||
}
|
||||
|
||||
export type ToolIds = Array<string>
|
||||
|
||||
export type ToolListItem = {
|
||||
id: string
|
||||
description: string
|
||||
parameters: unknown
|
||||
}
|
||||
|
||||
export type ToolList = Array<ToolListItem>
|
||||
|
||||
export type Worktree = {
|
||||
name: string
|
||||
branch: string
|
||||
directory: string
|
||||
}
|
||||
|
||||
export type WorktreeCreateInput = {
|
||||
name?: string
|
||||
startCommand?: string
|
||||
}
|
||||
|
||||
export type McpResource = {
|
||||
name: string
|
||||
uri: string
|
||||
description?: string
|
||||
mimeType?: string
|
||||
client: string
|
||||
}
|
||||
|
||||
export type TextPartInput = {
|
||||
id?: string
|
||||
type: "text"
|
||||
text: string
|
||||
synthetic?: boolean
|
||||
ignored?: boolean
|
||||
time?: {
|
||||
start: number
|
||||
end?: number
|
||||
}
|
||||
metadata?: {
|
||||
[key: string]: unknown
|
||||
}
|
||||
}
|
||||
|
||||
export type FilePartInput = {
|
||||
id?: string
|
||||
type: "file"
|
||||
mime: string
|
||||
filename?: string
|
||||
url: string
|
||||
source?: FilePartSource
|
||||
}
|
||||
|
||||
export type AgentPartInput = {
|
||||
id?: string
|
||||
type: "agent"
|
||||
name: string
|
||||
source?: {
|
||||
value: string
|
||||
start: number
|
||||
end: number
|
||||
}
|
||||
}
|
||||
|
||||
export type SubtaskPartInput = {
|
||||
id?: string
|
||||
type: "subtask"
|
||||
prompt: string
|
||||
description: string
|
||||
agent: string
|
||||
model?: {
|
||||
providerID: string
|
||||
modelID: string
|
||||
}
|
||||
command?: string
|
||||
}
|
||||
|
||||
export type ProviderAuthMethod = {
|
||||
type: "oauth" | "api"
|
||||
label: string
|
||||
@@ -2030,27 +2011,6 @@ export type File = {
|
||||
status: "added" | "deleted" | "modified"
|
||||
}
|
||||
|
||||
export type Agent = {
|
||||
name: string
|
||||
description?: string
|
||||
mode: "subagent" | "primary" | "all"
|
||||
native?: boolean
|
||||
hidden?: boolean
|
||||
topP?: number
|
||||
temperature?: number
|
||||
color?: string
|
||||
permission: PermissionRuleset
|
||||
model?: {
|
||||
modelID: string
|
||||
providerID: string
|
||||
}
|
||||
prompt?: string
|
||||
options: {
|
||||
[key: string]: unknown
|
||||
}
|
||||
steps?: number
|
||||
}
|
||||
|
||||
export type McpStatusConnected = {
|
||||
status: "connected"
|
||||
}
|
||||
@@ -2080,12 +2040,48 @@ export type McpStatus =
|
||||
| McpStatusNeedsAuth
|
||||
| McpStatusNeedsClientRegistration
|
||||
|
||||
export type McpResource = {
|
||||
export type Path = {
|
||||
home: string
|
||||
state: string
|
||||
config: string
|
||||
worktree: string
|
||||
directory: string
|
||||
}
|
||||
|
||||
export type VcsInfo = {
|
||||
branch: string
|
||||
}
|
||||
|
||||
export type Command = {
|
||||
name: string
|
||||
uri: string
|
||||
description?: string
|
||||
mimeType?: string
|
||||
client: string
|
||||
agent?: string
|
||||
model?: string
|
||||
mcp?: boolean
|
||||
template: string
|
||||
subtask?: boolean
|
||||
hints: Array<string>
|
||||
}
|
||||
|
||||
export type Agent = {
|
||||
name: string
|
||||
description?: string
|
||||
mode: "subagent" | "primary" | "all"
|
||||
native?: boolean
|
||||
hidden?: boolean
|
||||
topP?: number
|
||||
temperature?: number
|
||||
color?: string
|
||||
permission: PermissionRuleset
|
||||
model?: {
|
||||
modelID: string
|
||||
providerID: string
|
||||
}
|
||||
prompt?: string
|
||||
options: {
|
||||
[key: string]: unknown
|
||||
}
|
||||
steps?: number
|
||||
}
|
||||
|
||||
export type LspStatus = {
|
||||
@@ -2469,6 +2465,29 @@ export type ConfigUpdateResponses = {
|
||||
|
||||
export type ConfigUpdateResponse = ConfigUpdateResponses[keyof ConfigUpdateResponses]
|
||||
|
||||
export type ConfigProvidersData = {
|
||||
body?: never
|
||||
path?: never
|
||||
query?: {
|
||||
directory?: string
|
||||
}
|
||||
url: "/config/providers"
|
||||
}
|
||||
|
||||
export type ConfigProvidersResponses = {
|
||||
/**
|
||||
* List of providers
|
||||
*/
|
||||
200: {
|
||||
providers: Array<Provider>
|
||||
default: {
|
||||
[key: string]: string
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export type ConfigProvidersResponse = ConfigProvidersResponses[keyof ConfigProvidersResponses]
|
||||
|
||||
export type ToolIdsData = {
|
||||
body?: never
|
||||
path?: never
|
||||
@@ -2525,42 +2544,6 @@ export type ToolListResponses = {
|
||||
|
||||
export type ToolListResponse = ToolListResponses[keyof ToolListResponses]
|
||||
|
||||
export type InstanceDisposeData = {
|
||||
body?: never
|
||||
path?: never
|
||||
query?: {
|
||||
directory?: string
|
||||
}
|
||||
url: "/instance/dispose"
|
||||
}
|
||||
|
||||
export type InstanceDisposeResponses = {
|
||||
/**
|
||||
* Instance disposed
|
||||
*/
|
||||
200: boolean
|
||||
}
|
||||
|
||||
export type InstanceDisposeResponse = InstanceDisposeResponses[keyof InstanceDisposeResponses]
|
||||
|
||||
export type PathGetData = {
|
||||
body?: never
|
||||
path?: never
|
||||
query?: {
|
||||
directory?: string
|
||||
}
|
||||
url: "/path"
|
||||
}
|
||||
|
||||
export type PathGetResponses = {
|
||||
/**
|
||||
* Path
|
||||
*/
|
||||
200: Path
|
||||
}
|
||||
|
||||
export type PathGetResponse = PathGetResponses[keyof PathGetResponses]
|
||||
|
||||
export type WorktreeListData = {
|
||||
body?: never
|
||||
path?: never
|
||||
@@ -2606,23 +2589,26 @@ export type WorktreeCreateResponses = {
|
||||
|
||||
export type WorktreeCreateResponse = WorktreeCreateResponses[keyof WorktreeCreateResponses]
|
||||
|
||||
export type VcsGetData = {
|
||||
export type ExperimentalResourceListData = {
|
||||
body?: never
|
||||
path?: never
|
||||
query?: {
|
||||
directory?: string
|
||||
}
|
||||
url: "/vcs"
|
||||
url: "/experimental/resource"
|
||||
}
|
||||
|
||||
export type VcsGetResponses = {
|
||||
export type ExperimentalResourceListResponses = {
|
||||
/**
|
||||
* VCS info
|
||||
* MCP resources
|
||||
*/
|
||||
200: VcsInfo
|
||||
200: {
|
||||
[key: string]: McpResource
|
||||
}
|
||||
}
|
||||
|
||||
export type VcsGetResponse = VcsGetResponses[keyof VcsGetResponses]
|
||||
export type ExperimentalResourceListResponse =
|
||||
ExperimentalResourceListResponses[keyof ExperimentalResourceListResponses]
|
||||
|
||||
export type SessionListData = {
|
||||
body?: never
|
||||
@@ -3058,9 +3044,6 @@ export type SessionShareResponse = SessionShareResponses[keyof SessionShareRespo
|
||||
export type SessionDiffData = {
|
||||
body?: never
|
||||
path: {
|
||||
/**
|
||||
* Session ID
|
||||
*/
|
||||
sessionID: string
|
||||
}
|
||||
query?: {
|
||||
@@ -3070,22 +3053,9 @@ export type SessionDiffData = {
|
||||
url: "/session/{sessionID}/diff"
|
||||
}
|
||||
|
||||
export type SessionDiffErrors = {
|
||||
/**
|
||||
* Bad request
|
||||
*/
|
||||
400: BadRequestError
|
||||
/**
|
||||
* Not found
|
||||
*/
|
||||
404: NotFoundError
|
||||
}
|
||||
|
||||
export type SessionDiffError = SessionDiffErrors[keyof SessionDiffErrors]
|
||||
|
||||
export type SessionDiffResponses = {
|
||||
/**
|
||||
* List of diffs
|
||||
* Successfully retrieved diff
|
||||
*/
|
||||
200: Array<FileDiff>
|
||||
}
|
||||
@@ -3757,47 +3727,6 @@ export type QuestionRejectResponses = {
|
||||
|
||||
export type QuestionRejectResponse = QuestionRejectResponses[keyof QuestionRejectResponses]
|
||||
|
||||
export type CommandListData = {
|
||||
body?: never
|
||||
path?: never
|
||||
query?: {
|
||||
directory?: string
|
||||
}
|
||||
url: "/command"
|
||||
}
|
||||
|
||||
export type CommandListResponses = {
|
||||
/**
|
||||
* List of commands
|
||||
*/
|
||||
200: Array<Command>
|
||||
}
|
||||
|
||||
export type CommandListResponse = CommandListResponses[keyof CommandListResponses]
|
||||
|
||||
export type ConfigProvidersData = {
|
||||
body?: never
|
||||
path?: never
|
||||
query?: {
|
||||
directory?: string
|
||||
}
|
||||
url: "/config/providers"
|
||||
}
|
||||
|
||||
export type ConfigProvidersResponses = {
|
||||
/**
|
||||
* List of providers
|
||||
*/
|
||||
200: {
|
||||
providers: Array<Provider>
|
||||
default: {
|
||||
[key: string]: string
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export type ConfigProvidersResponse = ConfigProvidersResponses[keyof ConfigProvidersResponses]
|
||||
|
||||
export type ProviderListData = {
|
||||
body?: never
|
||||
path?: never
|
||||
@@ -4112,70 +4041,6 @@ export type FileStatusResponses = {
|
||||
|
||||
export type FileStatusResponse = FileStatusResponses[keyof FileStatusResponses]
|
||||
|
||||
export type AppLogData = {
|
||||
body?: {
|
||||
/**
|
||||
* Service name for the log entry
|
||||
*/
|
||||
service: string
|
||||
/**
|
||||
* Log level
|
||||
*/
|
||||
level: "debug" | "info" | "error" | "warn"
|
||||
/**
|
||||
* Log message
|
||||
*/
|
||||
message: string
|
||||
/**
|
||||
* Additional metadata for the log entry
|
||||
*/
|
||||
extra?: {
|
||||
[key: string]: unknown
|
||||
}
|
||||
}
|
||||
path?: never
|
||||
query?: {
|
||||
directory?: string
|
||||
}
|
||||
url: "/log"
|
||||
}
|
||||
|
||||
export type AppLogErrors = {
|
||||
/**
|
||||
* Bad request
|
||||
*/
|
||||
400: BadRequestError
|
||||
}
|
||||
|
||||
export type AppLogError = AppLogErrors[keyof AppLogErrors]
|
||||
|
||||
export type AppLogResponses = {
|
||||
/**
|
||||
* Log entry written successfully
|
||||
*/
|
||||
200: boolean
|
||||
}
|
||||
|
||||
export type AppLogResponse = AppLogResponses[keyof AppLogResponses]
|
||||
|
||||
export type AppAgentsData = {
|
||||
body?: never
|
||||
path?: never
|
||||
query?: {
|
||||
directory?: string
|
||||
}
|
||||
url: "/agent"
|
||||
}
|
||||
|
||||
export type AppAgentsResponses = {
|
||||
/**
|
||||
* List of agents
|
||||
*/
|
||||
200: Array<Agent>
|
||||
}
|
||||
|
||||
export type AppAgentsResponse = AppAgentsResponses[keyof AppAgentsResponses]
|
||||
|
||||
export type McpStatusData = {
|
||||
body?: never
|
||||
path?: never
|
||||
@@ -4408,63 +4273,6 @@ export type McpDisconnectResponses = {
|
||||
|
||||
export type McpDisconnectResponse = McpDisconnectResponses[keyof McpDisconnectResponses]
|
||||
|
||||
export type ExperimentalResourceListData = {
|
||||
body?: never
|
||||
path?: never
|
||||
query?: {
|
||||
directory?: string
|
||||
}
|
||||
url: "/experimental/resource"
|
||||
}
|
||||
|
||||
export type ExperimentalResourceListResponses = {
|
||||
/**
|
||||
* MCP resources
|
||||
*/
|
||||
200: {
|
||||
[key: string]: McpResource
|
||||
}
|
||||
}
|
||||
|
||||
export type ExperimentalResourceListResponse =
|
||||
ExperimentalResourceListResponses[keyof ExperimentalResourceListResponses]
|
||||
|
||||
export type LspStatusData = {
|
||||
body?: never
|
||||
path?: never
|
||||
query?: {
|
||||
directory?: string
|
||||
}
|
||||
url: "/lsp"
|
||||
}
|
||||
|
||||
export type LspStatusResponses = {
|
||||
/**
|
||||
* LSP server status
|
||||
*/
|
||||
200: Array<LspStatus>
|
||||
}
|
||||
|
||||
export type LspStatusResponse = LspStatusResponses[keyof LspStatusResponses]
|
||||
|
||||
export type FormatterStatusData = {
|
||||
body?: never
|
||||
path?: never
|
||||
query?: {
|
||||
directory?: string
|
||||
}
|
||||
url: "/formatter"
|
||||
}
|
||||
|
||||
export type FormatterStatusResponses = {
|
||||
/**
|
||||
* Formatter status
|
||||
*/
|
||||
200: Array<FormatterStatus>
|
||||
}
|
||||
|
||||
export type FormatterStatusResponse = FormatterStatusResponses[keyof FormatterStatusResponses]
|
||||
|
||||
export type TuiAppendPromptData = {
|
||||
body?: {
|
||||
text: string
|
||||
@@ -4759,6 +4567,200 @@ export type TuiControlResponseResponses = {
|
||||
|
||||
export type TuiControlResponseResponse = TuiControlResponseResponses[keyof TuiControlResponseResponses]
|
||||
|
||||
export type InstanceDisposeData = {
|
||||
body?: never
|
||||
path?: never
|
||||
query?: {
|
||||
directory?: string
|
||||
}
|
||||
url: "/instance/dispose"
|
||||
}
|
||||
|
||||
export type InstanceDisposeResponses = {
|
||||
/**
|
||||
* Instance disposed
|
||||
*/
|
||||
200: boolean
|
||||
}
|
||||
|
||||
export type InstanceDisposeResponse = InstanceDisposeResponses[keyof InstanceDisposeResponses]
|
||||
|
||||
export type PathGetData = {
|
||||
body?: never
|
||||
path?: never
|
||||
query?: {
|
||||
directory?: string
|
||||
}
|
||||
url: "/path"
|
||||
}
|
||||
|
||||
export type PathGetResponses = {
|
||||
/**
|
||||
* Path
|
||||
*/
|
||||
200: Path
|
||||
}
|
||||
|
||||
export type PathGetResponse = PathGetResponses[keyof PathGetResponses]
|
||||
|
||||
export type VcsGetData = {
|
||||
body?: never
|
||||
path?: never
|
||||
query?: {
|
||||
directory?: string
|
||||
}
|
||||
url: "/vcs"
|
||||
}
|
||||
|
||||
export type VcsGetResponses = {
|
||||
/**
|
||||
* VCS info
|
||||
*/
|
||||
200: VcsInfo
|
||||
}
|
||||
|
||||
export type VcsGetResponse = VcsGetResponses[keyof VcsGetResponses]
|
||||
|
||||
export type CommandListData = {
|
||||
body?: never
|
||||
path?: never
|
||||
query?: {
|
||||
directory?: string
|
||||
}
|
||||
url: "/command"
|
||||
}
|
||||
|
||||
export type CommandListResponses = {
|
||||
/**
|
||||
* List of commands
|
||||
*/
|
||||
200: Array<Command>
|
||||
}
|
||||
|
||||
export type CommandListResponse = CommandListResponses[keyof CommandListResponses]
|
||||
|
||||
export type AppLogData = {
|
||||
body?: {
|
||||
/**
|
||||
* Service name for the log entry
|
||||
*/
|
||||
service: string
|
||||
/**
|
||||
* Log level
|
||||
*/
|
||||
level: "debug" | "info" | "error" | "warn"
|
||||
/**
|
||||
* Log message
|
||||
*/
|
||||
message: string
|
||||
/**
|
||||
* Additional metadata for the log entry
|
||||
*/
|
||||
extra?: {
|
||||
[key: string]: unknown
|
||||
}
|
||||
}
|
||||
path?: never
|
||||
query?: {
|
||||
directory?: string
|
||||
}
|
||||
url: "/log"
|
||||
}
|
||||
|
||||
export type AppLogErrors = {
|
||||
/**
|
||||
* Bad request
|
||||
*/
|
||||
400: BadRequestError
|
||||
}
|
||||
|
||||
export type AppLogError = AppLogErrors[keyof AppLogErrors]
|
||||
|
||||
export type AppLogResponses = {
|
||||
/**
|
||||
* Log entry written successfully
|
||||
*/
|
||||
200: boolean
|
||||
}
|
||||
|
||||
export type AppLogResponse = AppLogResponses[keyof AppLogResponses]
|
||||
|
||||
export type AppAgentsData = {
|
||||
body?: never
|
||||
path?: never
|
||||
query?: {
|
||||
directory?: string
|
||||
}
|
||||
url: "/agent"
|
||||
}
|
||||
|
||||
export type AppAgentsResponses = {
|
||||
/**
|
||||
* List of agents
|
||||
*/
|
||||
200: Array<Agent>
|
||||
}
|
||||
|
||||
export type AppAgentsResponse = AppAgentsResponses[keyof AppAgentsResponses]
|
||||
|
||||
export type AppSkillsData = {
|
||||
body?: never
|
||||
path?: never
|
||||
query?: {
|
||||
directory?: string
|
||||
}
|
||||
url: "/skill"
|
||||
}
|
||||
|
||||
export type AppSkillsResponses = {
|
||||
/**
|
||||
* List of skills
|
||||
*/
|
||||
200: Array<{
|
||||
name: string
|
||||
description: string
|
||||
location: string
|
||||
}>
|
||||
}
|
||||
|
||||
export type AppSkillsResponse = AppSkillsResponses[keyof AppSkillsResponses]
|
||||
|
||||
export type LspStatusData = {
|
||||
body?: never
|
||||
path?: never
|
||||
query?: {
|
||||
directory?: string
|
||||
}
|
||||
url: "/lsp"
|
||||
}
|
||||
|
||||
export type LspStatusResponses = {
|
||||
/**
|
||||
* LSP server status
|
||||
*/
|
||||
200: Array<LspStatus>
|
||||
}
|
||||
|
||||
export type LspStatusResponse = LspStatusResponses[keyof LspStatusResponses]
|
||||
|
||||
export type FormatterStatusData = {
|
||||
body?: never
|
||||
path?: never
|
||||
query?: {
|
||||
directory?: string
|
||||
}
|
||||
url: "/formatter"
|
||||
}
|
||||
|
||||
export type FormatterStatusResponses = {
|
||||
/**
|
||||
* Formatter status
|
||||
*/
|
||||
200: Array<FormatterStatus>
|
||||
}
|
||||
|
||||
export type FormatterStatusResponse = FormatterStatusResponses[keyof FormatterStatusResponses]
|
||||
|
||||
export type AuthSetData = {
|
||||
body?: Auth
|
||||
path: {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user