mirror of
https://github.com/anomalyco/opencode.git
synced 2026-04-23 06:14:53 +00:00
Compare commits
124 Commits
updated-bl
...
apply-patc
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ac3d0cb5a3 | ||
|
|
06d69ab609 | ||
|
|
c2cc486c7d | ||
|
|
8a6b8e5339 | ||
|
|
cfd6a7ae96 | ||
|
|
4173ee0e0b | ||
|
|
22b5d7e570 | ||
|
|
f1ec28176f | ||
|
|
ab78a46396 | ||
|
|
2ed18ea1fe | ||
|
|
40eddce435 | ||
|
|
78f8cc9418 | ||
|
|
58f7da6e9f | ||
|
|
5a199b04cb | ||
|
|
eb968a6651 | ||
|
|
a813fcb41c | ||
|
|
a58d1be822 | ||
|
|
07dc8d8ce4 | ||
|
|
d377246491 | ||
|
|
7030f49a74 | ||
|
|
c4e4f2a058 | ||
|
|
2729705594 | ||
|
|
ea13b6e8aa | ||
|
|
85ab9798c6 | ||
|
|
33290c54cd | ||
|
|
5d613a038d | ||
|
|
db78a59f03 | ||
|
|
7c3eeeb0fa | ||
|
|
e8357a87b0 | ||
|
|
06c543e938 | ||
|
|
759ce8fb8e | ||
|
|
38847e13bb | ||
|
|
e0c6459faa | ||
|
|
80b278ddab | ||
|
|
ef7ef6538e | ||
|
|
d23c21023a | ||
|
|
dfa2a9f225 | ||
|
|
6f78a71fa7 | ||
|
|
f8f1f46a4f | ||
|
|
ab705dacfa | ||
|
|
d1b93616f7 | ||
|
|
69215d456c | ||
|
|
54e52896a4 | ||
|
|
b18fb16e9c | ||
|
|
1250486ddf | ||
|
|
d645e8bbe1 | ||
|
|
cad415872e | ||
|
|
e8746ddb1d | ||
|
|
80020ade2e | ||
|
|
08ef97b162 | ||
|
|
1aedb265dd | ||
|
|
5c13b209aa | ||
|
|
43a9c50389 | ||
|
|
55224d64a2 | ||
|
|
c325aa1142 | ||
|
|
6e020ef9ef | ||
|
|
aca1eb6b5b | ||
|
|
3d095e7fe7 | ||
|
|
632f20558a | ||
|
|
f96c4badd8 | ||
|
|
cbe1c81470 | ||
|
|
c25155586c | ||
|
|
08b94a6890 | ||
|
|
8cddc9ea55 | ||
|
|
578239e0d0 | ||
|
|
626fa1462b | ||
|
|
968239bb76 | ||
|
|
8c24879246 | ||
|
|
9127055ae7 | ||
|
|
f5a6a4af7f | ||
|
|
6e00348bd7 | ||
|
|
95f7403daf | ||
|
|
14d1e20287 | ||
|
|
b8e2895dfc | ||
|
|
6e028ec2dc | ||
|
|
8e0ddd1ac9 | ||
|
|
da78b758d4 | ||
|
|
360765c591 | ||
|
|
db0078bf17 | ||
|
|
98578d3a7b | ||
|
|
bc3616d9c6 | ||
|
|
71306cbd1f | ||
|
|
0866034946 | ||
|
|
2ccaa10e79 | ||
|
|
e92d5b592c | ||
|
|
00ec29dae6 | ||
|
|
438916de5f | ||
|
|
8d4a67324e | ||
|
|
0d683eaa8e | ||
|
|
8fd1b92e6e | ||
|
|
22e3240296 | ||
|
|
e1d0b2ba6e | ||
|
|
ccc27e23df | ||
|
|
ad4bdd9f0f | ||
|
|
5479928a9d | ||
|
|
40836e9683 | ||
|
|
9a48f8e9e3 | ||
|
|
91b8ba2186 | ||
|
|
d075c097ac | ||
|
|
88fd6a294b | ||
|
|
ea8ef37d50 | ||
|
|
d510bd52a4 | ||
|
|
e0a854f035 | ||
|
|
bd914a8c06 | ||
|
|
a49102db01 | ||
|
|
21012fab4b | ||
|
|
9a71a73f50 | ||
|
|
2190e8c656 | ||
|
|
1fd496a5e2 | ||
|
|
74d584af34 | ||
|
|
46f415ecb0 | ||
|
|
d0399045da | ||
|
|
4be0ba19ca | ||
|
|
d5a5e6e062 | ||
|
|
e8dad85233 | ||
|
|
f197b8a0cd | ||
|
|
efaf854e09 | ||
|
|
704276753b | ||
|
|
2c5437791b | ||
|
|
94ab87ffad | ||
|
|
416f419a81 | ||
|
|
3ba03a97dc | ||
|
|
b1a22e08f5 | ||
|
|
c551a4b6e3 |
4
.github/workflows/nix-desktop.yml
vendored
4
.github/workflows/nix-desktop.yml
vendored
@@ -9,6 +9,7 @@ on:
|
||||
- "nix/**"
|
||||
- "packages/app/**"
|
||||
- "packages/desktop/**"
|
||||
- ".github/workflows/nix-desktop.yml"
|
||||
pull_request:
|
||||
paths:
|
||||
- "flake.nix"
|
||||
@@ -16,6 +17,7 @@ on:
|
||||
- "nix/**"
|
||||
- "packages/app/**"
|
||||
- "packages/desktop/**"
|
||||
- ".github/workflows/nix-desktop.yml"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
@@ -26,7 +28,7 @@ jobs:
|
||||
os:
|
||||
- blacksmith-4vcpu-ubuntu-2404
|
||||
- blacksmith-4vcpu-ubuntu-2404-arm
|
||||
- macos-15
|
||||
- macos-15-intel
|
||||
- macos-latest
|
||||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 60
|
||||
|
||||
202
.github/workflows/update-nix-hashes.yml
vendored
202
.github/workflows/update-nix-hashes.yml
vendored
@@ -10,11 +10,13 @@ on:
|
||||
- "bun.lock"
|
||||
- "package.json"
|
||||
- "packages/*/package.json"
|
||||
- ".github/workflows/update-nix-hashes.yml"
|
||||
pull_request:
|
||||
paths:
|
||||
- "bun.lock"
|
||||
- "package.json"
|
||||
- "packages/*/package.json"
|
||||
- ".github/workflows/update-nix-hashes.yml"
|
||||
|
||||
jobs:
|
||||
update-flake:
|
||||
@@ -25,7 +27,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
fetch-depth: 0
|
||||
@@ -43,9 +45,9 @@ jobs:
|
||||
- name: Update ${{ env.TITLE }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "📦 Updating $TITLE..."
|
||||
echo "Updating $TITLE..."
|
||||
nix flake update
|
||||
echo "✅ $TITLE updated successfully"
|
||||
echo "$TITLE updated successfully"
|
||||
|
||||
- name: Commit ${{ env.TITLE }} changes
|
||||
env:
|
||||
@@ -53,7 +55,7 @@ jobs:
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
echo "🔍 Checking for changes in tracked files..."
|
||||
echo "Checking for changes in tracked files..."
|
||||
|
||||
summarize() {
|
||||
local status="$1"
|
||||
@@ -71,29 +73,29 @@ jobs:
|
||||
FILES=(flake.lock flake.nix)
|
||||
STATUS="$(git status --short -- "${FILES[@]}" || true)"
|
||||
if [ -z "$STATUS" ]; then
|
||||
echo "✅ No changes detected."
|
||||
echo "No changes detected."
|
||||
summarize "no changes"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "📝 Changes detected:"
|
||||
echo "Changes detected:"
|
||||
echo "$STATUS"
|
||||
echo "🔗 Staging files..."
|
||||
echo "Staging files..."
|
||||
git add "${FILES[@]}"
|
||||
echo "💾 Committing changes..."
|
||||
echo "Committing changes..."
|
||||
git commit -m "Update $TITLE"
|
||||
echo "✅ Changes committed"
|
||||
echo "Changes committed"
|
||||
|
||||
BRANCH="${TARGET_BRANCH:-${GITHUB_REF_NAME}}"
|
||||
echo "🌳 Pulling latest from branch: $BRANCH"
|
||||
git pull --rebase origin "$BRANCH"
|
||||
echo "🚀 Pushing changes to branch: $BRANCH"
|
||||
echo "Pulling latest from branch: $BRANCH"
|
||||
git pull --rebase --autostash origin "$BRANCH"
|
||||
echo "Pushing changes to branch: $BRANCH"
|
||||
git push origin HEAD:"$BRANCH"
|
||||
echo "✅ Changes pushed successfully"
|
||||
echo "Changes pushed successfully"
|
||||
|
||||
summarize "committed $(git rev-parse --short HEAD)"
|
||||
|
||||
update-node-modules-hash:
|
||||
compute-node-modules-hash:
|
||||
needs: update-flake
|
||||
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository
|
||||
strategy:
|
||||
@@ -111,11 +113,10 @@ jobs:
|
||||
runs-on: ${{ matrix.host }}
|
||||
env:
|
||||
SYSTEM: ${{ matrix.system }}
|
||||
TITLE: node_modules hash (${{ matrix.system }})
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
fetch-depth: 0
|
||||
@@ -125,6 +126,104 @@ jobs:
|
||||
- name: Setup Nix
|
||||
uses: nixbuild/nix-quick-install-action@v34
|
||||
|
||||
- name: Compute node_modules hash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
DUMMY="sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="
|
||||
HASH_FILE="nix/hashes.json"
|
||||
OUTPUT_FILE="hash-${SYSTEM}.txt"
|
||||
|
||||
export NIX_KEEP_OUTPUTS=1
|
||||
export NIX_KEEP_DERIVATIONS=1
|
||||
|
||||
BUILD_LOG=$(mktemp)
|
||||
TMP_JSON=$(mktemp)
|
||||
trap 'rm -f "$BUILD_LOG" "$TMP_JSON"' EXIT
|
||||
|
||||
if [ ! -f "$HASH_FILE" ]; then
|
||||
mkdir -p "$(dirname "$HASH_FILE")"
|
||||
echo '{"nodeModules":{}}' > "$HASH_FILE"
|
||||
fi
|
||||
|
||||
# Set dummy hash to force nix to rebuild and reveal correct hash
|
||||
jq --arg system "$SYSTEM" --arg value "$DUMMY" \
|
||||
'.nodeModules = (.nodeModules // {}) | .nodeModules[$system] = $value' "$HASH_FILE" > "$TMP_JSON"
|
||||
mv "$TMP_JSON" "$HASH_FILE"
|
||||
|
||||
MODULES_ATTR=".#packages.${SYSTEM}.default.node_modules"
|
||||
DRV_PATH="$(nix eval --raw "${MODULES_ATTR}.drvPath")"
|
||||
|
||||
echo "Building node_modules for ${SYSTEM} to discover correct hash..."
|
||||
echo "Attempting to realize derivation: ${DRV_PATH}"
|
||||
REALISE_OUT=$(nix-store --realise "$DRV_PATH" --keep-failed 2>&1 | tee "$BUILD_LOG" || true)
|
||||
|
||||
BUILD_PATH=$(echo "$REALISE_OUT" | grep "^/nix/store/" | head -n1 || true)
|
||||
CORRECT_HASH=""
|
||||
|
||||
if [ -n "$BUILD_PATH" ] && [ -d "$BUILD_PATH" ]; then
|
||||
echo "Realized node_modules output: $BUILD_PATH"
|
||||
CORRECT_HASH=$(nix hash path --sri "$BUILD_PATH" 2>/dev/null || true)
|
||||
fi
|
||||
|
||||
# Try to extract hash from build log
|
||||
if [ -z "$CORRECT_HASH" ]; then
|
||||
CORRECT_HASH="$(grep -E 'got:\s+sha256-[A-Za-z0-9+/=]+' "$BUILD_LOG" | awk '{print $2}' | head -n1 || true)"
|
||||
fi
|
||||
|
||||
if [ -z "$CORRECT_HASH" ]; then
|
||||
CORRECT_HASH="$(grep -A2 'hash mismatch' "$BUILD_LOG" | grep 'got:' | awk '{print $2}' | sed 's/sha256:/sha256-/' || true)"
|
||||
fi
|
||||
|
||||
# Try to hash from kept failed build directory
|
||||
if [ -z "$CORRECT_HASH" ]; then
|
||||
KEPT_DIR=$(grep -oE "build directory.*'[^']+'" "$BUILD_LOG" | grep -oE "'/[^']+'" | tr -d "'" | head -n1 || true)
|
||||
if [ -z "$KEPT_DIR" ]; then
|
||||
KEPT_DIR=$(grep -oE '/nix/var/nix/builds/[^ ]+' "$BUILD_LOG" | head -n1 || true)
|
||||
fi
|
||||
|
||||
if [ -n "$KEPT_DIR" ] && [ -d "$KEPT_DIR" ]; then
|
||||
HASH_PATH="$KEPT_DIR"
|
||||
[ -d "$KEPT_DIR/build" ] && HASH_PATH="$KEPT_DIR/build"
|
||||
|
||||
if [ -d "$HASH_PATH/node_modules" ]; then
|
||||
CORRECT_HASH=$(nix hash path --sri "$HASH_PATH" 2>/dev/null || true)
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$CORRECT_HASH" ]; then
|
||||
echo "Failed to determine correct node_modules hash for ${SYSTEM}."
|
||||
cat "$BUILD_LOG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "$CORRECT_HASH" > "$OUTPUT_FILE"
|
||||
echo "Hash for ${SYSTEM}: $CORRECT_HASH"
|
||||
|
||||
- name: Upload hash artifact
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: hash-${{ matrix.system }}
|
||||
path: hash-${{ matrix.system }}.txt
|
||||
retention-days: 1
|
||||
|
||||
commit-node-modules-hashes:
|
||||
needs: compute-node-modules-hash
|
||||
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository
|
||||
runs-on: blacksmith-4vcpu-ubuntu-2404
|
||||
env:
|
||||
TITLE: node_modules hashes
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
fetch-depth: 0
|
||||
ref: ${{ github.head_ref || github.ref_name }}
|
||||
repository: ${{ github.event.pull_request.head.repo.full_name || github.repository }}
|
||||
|
||||
- name: Configure git
|
||||
run: |
|
||||
git config --global user.email "action@github.com"
|
||||
@@ -135,14 +234,57 @@ jobs:
|
||||
TARGET_BRANCH: ${{ github.head_ref || github.ref_name }}
|
||||
run: |
|
||||
BRANCH="${TARGET_BRANCH:-${GITHUB_REF_NAME}}"
|
||||
git pull origin "$BRANCH"
|
||||
git pull --rebase --autostash origin "$BRANCH"
|
||||
|
||||
- name: Update ${{ env.TITLE }}
|
||||
- name: Download all hash artifacts
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
pattern: hash-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Merge hashes into hashes.json
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "🔄 Updating $TITLE..."
|
||||
nix/scripts/update-hashes.sh
|
||||
echo "✅ $TITLE updated successfully"
|
||||
|
||||
HASH_FILE="nix/hashes.json"
|
||||
|
||||
if [ ! -f "$HASH_FILE" ]; then
|
||||
mkdir -p "$(dirname "$HASH_FILE")"
|
||||
echo '{"nodeModules":{}}' > "$HASH_FILE"
|
||||
fi
|
||||
|
||||
echo "Merging hashes into ${HASH_FILE}..."
|
||||
|
||||
shopt -s nullglob
|
||||
files=(hash-*.txt)
|
||||
if [ ${#files[@]} -eq 0 ]; then
|
||||
echo "No hash files found, nothing to update"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
EXPECTED_SYSTEMS="x86_64-linux aarch64-linux x86_64-darwin aarch64-darwin"
|
||||
for sys in $EXPECTED_SYSTEMS; do
|
||||
if [ ! -f "hash-${sys}.txt" ]; then
|
||||
echo "WARNING: Missing hash file for $sys"
|
||||
fi
|
||||
done
|
||||
|
||||
for f in "${files[@]}"; do
|
||||
system="${f#hash-}"
|
||||
system="${system%.txt}"
|
||||
hash=$(cat "$f")
|
||||
if [ -z "$hash" ]; then
|
||||
echo "WARNING: Empty hash for $system, skipping"
|
||||
continue
|
||||
fi
|
||||
echo " $system: $hash"
|
||||
jq --arg sys "$system" --arg h "$hash" \
|
||||
'.nodeModules = (.nodeModules // {}) | .nodeModules[$sys] = $h' "$HASH_FILE" > "${HASH_FILE}.tmp"
|
||||
mv "${HASH_FILE}.tmp" "$HASH_FILE"
|
||||
done
|
||||
|
||||
echo "All hashes merged:"
|
||||
cat "$HASH_FILE"
|
||||
|
||||
- name: Commit ${{ env.TITLE }} changes
|
||||
env:
|
||||
@@ -150,7 +292,8 @@ jobs:
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
echo "🔍 Checking for changes in tracked files..."
|
||||
HASH_FILE="nix/hashes.json"
|
||||
echo "Checking for changes..."
|
||||
|
||||
summarize() {
|
||||
local status="$1"
|
||||
@@ -166,27 +309,22 @@ jobs:
|
||||
echo "" >> "$GITHUB_STEP_SUMMARY"
|
||||
}
|
||||
|
||||
FILES=(nix/hashes.json)
|
||||
FILES=("$HASH_FILE")
|
||||
STATUS="$(git status --short -- "${FILES[@]}" || true)"
|
||||
if [ -z "$STATUS" ]; then
|
||||
echo "✅ No changes detected."
|
||||
echo "No changes detected."
|
||||
summarize "no changes"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "📝 Changes detected:"
|
||||
echo "Changes detected:"
|
||||
echo "$STATUS"
|
||||
echo "🔗 Staging files..."
|
||||
git add "${FILES[@]}"
|
||||
echo "💾 Committing changes..."
|
||||
git commit -m "Update $TITLE"
|
||||
echo "✅ Changes committed"
|
||||
|
||||
BRANCH="${TARGET_BRANCH:-${GITHUB_REF_NAME}}"
|
||||
echo "🌳 Pulling latest from branch: $BRANCH"
|
||||
git pull --rebase origin "$BRANCH"
|
||||
echo "🚀 Pushing changes to branch: $BRANCH"
|
||||
git pull --rebase --autostash origin "$BRANCH"
|
||||
git push origin HEAD:"$BRANCH"
|
||||
echo "✅ Changes pushed successfully"
|
||||
echo "Changes pushed successfully"
|
||||
|
||||
summarize "committed $(git rev-parse --short HEAD)"
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -20,6 +20,7 @@ opencode.json
|
||||
a.out
|
||||
target
|
||||
.scripts
|
||||
.direnv/
|
||||
|
||||
# Local dev files
|
||||
opencode-dev
|
||||
|
||||
404
STATS.md
404
STATS.md
@@ -1,203 +1,205 @@
|
||||
# Download Stats
|
||||
|
||||
| Date | GitHub Downloads | npm Downloads | Total |
|
||||
| ---------- | -------------------- | ------------------- | -------------------- |
|
||||
| 2025-06-29 | 18,789 (+0) | 39,420 (+0) | 58,209 (+0) |
|
||||
| 2025-06-30 | 20,127 (+1,338) | 41,059 (+1,639) | 61,186 (+2,977) |
|
||||
| 2025-07-01 | 22,108 (+1,981) | 43,745 (+2,686) | 65,853 (+4,667) |
|
||||
| 2025-07-02 | 24,814 (+2,706) | 46,168 (+2,423) | 70,982 (+5,129) |
|
||||
| 2025-07-03 | 27,834 (+3,020) | 49,955 (+3,787) | 77,789 (+6,807) |
|
||||
| 2025-07-04 | 30,608 (+2,774) | 54,758 (+4,803) | 85,366 (+7,577) |
|
||||
| 2025-07-05 | 32,524 (+1,916) | 58,371 (+3,613) | 90,895 (+5,529) |
|
||||
| 2025-07-06 | 33,766 (+1,242) | 59,694 (+1,323) | 93,460 (+2,565) |
|
||||
| 2025-07-08 | 38,052 (+4,286) | 64,468 (+4,774) | 102,520 (+9,060) |
|
||||
| 2025-07-09 | 40,924 (+2,872) | 67,935 (+3,467) | 108,859 (+6,339) |
|
||||
| 2025-07-10 | 43,796 (+2,872) | 71,402 (+3,467) | 115,198 (+6,339) |
|
||||
| 2025-07-11 | 46,982 (+3,186) | 77,462 (+6,060) | 124,444 (+9,246) |
|
||||
| 2025-07-12 | 49,302 (+2,320) | 82,177 (+4,715) | 131,479 (+7,035) |
|
||||
| 2025-07-13 | 50,803 (+1,501) | 86,394 (+4,217) | 137,197 (+5,718) |
|
||||
| 2025-07-14 | 53,283 (+2,480) | 87,860 (+1,466) | 141,143 (+3,946) |
|
||||
| 2025-07-15 | 57,590 (+4,307) | 91,036 (+3,176) | 148,626 (+7,483) |
|
||||
| 2025-07-16 | 62,313 (+4,723) | 95,258 (+4,222) | 157,571 (+8,945) |
|
||||
| 2025-07-17 | 66,684 (+4,371) | 100,048 (+4,790) | 166,732 (+9,161) |
|
||||
| 2025-07-18 | 70,379 (+3,695) | 102,587 (+2,539) | 172,966 (+6,234) |
|
||||
| 2025-07-19 | 73,497 (+3,117) | 105,904 (+3,317) | 179,401 (+6,434) |
|
||||
| 2025-07-20 | 76,453 (+2,956) | 109,044 (+3,140) | 185,497 (+6,096) |
|
||||
| 2025-07-21 | 80,197 (+3,744) | 113,537 (+4,493) | 193,734 (+8,237) |
|
||||
| 2025-07-22 | 84,251 (+4,054) | 118,073 (+4,536) | 202,324 (+8,590) |
|
||||
| 2025-07-23 | 88,589 (+4,338) | 121,436 (+3,363) | 210,025 (+7,701) |
|
||||
| 2025-07-24 | 92,469 (+3,880) | 124,091 (+2,655) | 216,560 (+6,535) |
|
||||
| 2025-07-25 | 96,417 (+3,948) | 126,985 (+2,894) | 223,402 (+6,842) |
|
||||
| 2025-07-26 | 100,646 (+4,229) | 131,411 (+4,426) | 232,057 (+8,655) |
|
||||
| 2025-07-27 | 102,644 (+1,998) | 134,736 (+3,325) | 237,380 (+5,323) |
|
||||
| 2025-07-28 | 105,446 (+2,802) | 136,016 (+1,280) | 241,462 (+4,082) |
|
||||
| 2025-07-29 | 108,998 (+3,552) | 137,542 (+1,526) | 246,540 (+5,078) |
|
||||
| 2025-07-30 | 113,544 (+4,546) | 140,317 (+2,775) | 253,861 (+7,321) |
|
||||
| 2025-07-31 | 118,339 (+4,795) | 143,344 (+3,027) | 261,683 (+7,822) |
|
||||
| 2025-08-01 | 123,539 (+5,200) | 146,680 (+3,336) | 270,219 (+8,536) |
|
||||
| 2025-08-02 | 127,864 (+4,325) | 149,236 (+2,556) | 277,100 (+6,881) |
|
||||
| 2025-08-03 | 131,397 (+3,533) | 150,451 (+1,215) | 281,848 (+4,748) |
|
||||
| 2025-08-04 | 136,266 (+4,869) | 153,260 (+2,809) | 289,526 (+7,678) |
|
||||
| 2025-08-05 | 141,596 (+5,330) | 155,752 (+2,492) | 297,348 (+7,822) |
|
||||
| 2025-08-06 | 147,067 (+5,471) | 158,309 (+2,557) | 305,376 (+8,028) |
|
||||
| 2025-08-07 | 152,591 (+5,524) | 160,889 (+2,580) | 313,480 (+8,104) |
|
||||
| 2025-08-08 | 158,187 (+5,596) | 163,448 (+2,559) | 321,635 (+8,155) |
|
||||
| 2025-08-09 | 162,770 (+4,583) | 165,721 (+2,273) | 328,491 (+6,856) |
|
||||
| 2025-08-10 | 165,695 (+2,925) | 167,109 (+1,388) | 332,804 (+4,313) |
|
||||
| 2025-08-11 | 169,297 (+3,602) | 167,953 (+844) | 337,250 (+4,446) |
|
||||
| 2025-08-12 | 176,307 (+7,010) | 171,876 (+3,923) | 348,183 (+10,933) |
|
||||
| 2025-08-13 | 182,997 (+6,690) | 177,182 (+5,306) | 360,179 (+11,996) |
|
||||
| 2025-08-14 | 189,063 (+6,066) | 179,741 (+2,559) | 368,804 (+8,625) |
|
||||
| 2025-08-15 | 193,608 (+4,545) | 181,792 (+2,051) | 375,400 (+6,596) |
|
||||
| 2025-08-16 | 198,118 (+4,510) | 184,558 (+2,766) | 382,676 (+7,276) |
|
||||
| 2025-08-17 | 201,299 (+3,181) | 186,269 (+1,711) | 387,568 (+4,892) |
|
||||
| 2025-08-18 | 204,559 (+3,260) | 187,399 (+1,130) | 391,958 (+4,390) |
|
||||
| 2025-08-19 | 209,814 (+5,255) | 189,668 (+2,269) | 399,482 (+7,524) |
|
||||
| 2025-08-20 | 214,497 (+4,683) | 191,481 (+1,813) | 405,978 (+6,496) |
|
||||
| 2025-08-21 | 220,465 (+5,968) | 194,784 (+3,303) | 415,249 (+9,271) |
|
||||
| 2025-08-22 | 225,899 (+5,434) | 197,204 (+2,420) | 423,103 (+7,854) |
|
||||
| 2025-08-23 | 229,005 (+3,106) | 199,238 (+2,034) | 428,243 (+5,140) |
|
||||
| 2025-08-24 | 232,098 (+3,093) | 201,157 (+1,919) | 433,255 (+5,012) |
|
||||
| 2025-08-25 | 236,607 (+4,509) | 202,650 (+1,493) | 439,257 (+6,002) |
|
||||
| 2025-08-26 | 242,783 (+6,176) | 205,242 (+2,592) | 448,025 (+8,768) |
|
||||
| 2025-08-27 | 248,409 (+5,626) | 205,242 (+0) | 453,651 (+5,626) |
|
||||
| 2025-08-28 | 252,796 (+4,387) | 205,242 (+0) | 458,038 (+4,387) |
|
||||
| 2025-08-29 | 256,045 (+3,249) | 211,075 (+5,833) | 467,120 (+9,082) |
|
||||
| 2025-08-30 | 258,863 (+2,818) | 212,397 (+1,322) | 471,260 (+4,140) |
|
||||
| 2025-08-31 | 262,004 (+3,141) | 213,944 (+1,547) | 475,948 (+4,688) |
|
||||
| 2025-09-01 | 265,359 (+3,355) | 215,115 (+1,171) | 480,474 (+4,526) |
|
||||
| 2025-09-02 | 270,483 (+5,124) | 217,075 (+1,960) | 487,558 (+7,084) |
|
||||
| 2025-09-03 | 274,793 (+4,310) | 219,755 (+2,680) | 494,548 (+6,990) |
|
||||
| 2025-09-04 | 280,430 (+5,637) | 222,103 (+2,348) | 502,533 (+7,985) |
|
||||
| 2025-09-05 | 283,769 (+3,339) | 223,793 (+1,690) | 507,562 (+5,029) |
|
||||
| 2025-09-06 | 286,245 (+2,476) | 225,036 (+1,243) | 511,281 (+3,719) |
|
||||
| 2025-09-07 | 288,623 (+2,378) | 225,866 (+830) | 514,489 (+3,208) |
|
||||
| 2025-09-08 | 293,341 (+4,718) | 227,073 (+1,207) | 520,414 (+5,925) |
|
||||
| 2025-09-09 | 300,036 (+6,695) | 229,788 (+2,715) | 529,824 (+9,410) |
|
||||
| 2025-09-10 | 307,287 (+7,251) | 233,435 (+3,647) | 540,722 (+10,898) |
|
||||
| 2025-09-11 | 314,083 (+6,796) | 237,356 (+3,921) | 551,439 (+10,717) |
|
||||
| 2025-09-12 | 321,046 (+6,963) | 240,728 (+3,372) | 561,774 (+10,335) |
|
||||
| 2025-09-13 | 324,894 (+3,848) | 245,539 (+4,811) | 570,433 (+8,659) |
|
||||
| 2025-09-14 | 328,876 (+3,982) | 248,245 (+2,706) | 577,121 (+6,688) |
|
||||
| 2025-09-15 | 334,201 (+5,325) | 250,983 (+2,738) | 585,184 (+8,063) |
|
||||
| 2025-09-16 | 342,609 (+8,408) | 255,264 (+4,281) | 597,873 (+12,689) |
|
||||
| 2025-09-17 | 351,117 (+8,508) | 260,970 (+5,706) | 612,087 (+14,214) |
|
||||
| 2025-09-18 | 358,717 (+7,600) | 266,922 (+5,952) | 625,639 (+13,552) |
|
||||
| 2025-09-19 | 365,401 (+6,684) | 271,859 (+4,937) | 637,260 (+11,621) |
|
||||
| 2025-09-20 | 372,092 (+6,691) | 276,917 (+5,058) | 649,009 (+11,749) |
|
||||
| 2025-09-21 | 377,079 (+4,987) | 280,261 (+3,344) | 657,340 (+8,331) |
|
||||
| 2025-09-22 | 382,492 (+5,413) | 284,009 (+3,748) | 666,501 (+9,161) |
|
||||
| 2025-09-23 | 387,008 (+4,516) | 289,129 (+5,120) | 676,137 (+9,636) |
|
||||
| 2025-09-24 | 393,325 (+6,317) | 294,927 (+5,798) | 688,252 (+12,115) |
|
||||
| 2025-09-25 | 398,879 (+5,554) | 301,663 (+6,736) | 700,542 (+12,290) |
|
||||
| 2025-09-26 | 404,334 (+5,455) | 306,713 (+5,050) | 711,047 (+10,505) |
|
||||
| 2025-09-27 | 411,618 (+7,284) | 317,763 (+11,050) | 729,381 (+18,334) |
|
||||
| 2025-09-28 | 414,910 (+3,292) | 322,522 (+4,759) | 737,432 (+8,051) |
|
||||
| 2025-09-29 | 419,919 (+5,009) | 328,033 (+5,511) | 747,952 (+10,520) |
|
||||
| 2025-09-30 | 427,991 (+8,072) | 336,472 (+8,439) | 764,463 (+16,511) |
|
||||
| 2025-10-01 | 433,591 (+5,600) | 341,742 (+5,270) | 775,333 (+10,870) |
|
||||
| 2025-10-02 | 440,852 (+7,261) | 348,099 (+6,357) | 788,951 (+13,618) |
|
||||
| 2025-10-03 | 446,829 (+5,977) | 359,937 (+11,838) | 806,766 (+17,815) |
|
||||
| 2025-10-04 | 452,561 (+5,732) | 370,386 (+10,449) | 822,947 (+16,181) |
|
||||
| 2025-10-05 | 455,559 (+2,998) | 374,745 (+4,359) | 830,304 (+7,357) |
|
||||
| 2025-10-06 | 460,927 (+5,368) | 379,489 (+4,744) | 840,416 (+10,112) |
|
||||
| 2025-10-07 | 467,336 (+6,409) | 385,438 (+5,949) | 852,774 (+12,358) |
|
||||
| 2025-10-08 | 474,643 (+7,307) | 394,139 (+8,701) | 868,782 (+16,008) |
|
||||
| 2025-10-09 | 479,203 (+4,560) | 400,526 (+6,387) | 879,729 (+10,947) |
|
||||
| 2025-10-10 | 484,374 (+5,171) | 406,015 (+5,489) | 890,389 (+10,660) |
|
||||
| 2025-10-11 | 488,427 (+4,053) | 414,699 (+8,684) | 903,126 (+12,737) |
|
||||
| 2025-10-12 | 492,125 (+3,698) | 418,745 (+4,046) | 910,870 (+7,744) |
|
||||
| 2025-10-14 | 505,130 (+13,005) | 429,286 (+10,541) | 934,416 (+23,546) |
|
||||
| 2025-10-15 | 512,717 (+7,587) | 439,290 (+10,004) | 952,007 (+17,591) |
|
||||
| 2025-10-16 | 517,719 (+5,002) | 447,137 (+7,847) | 964,856 (+12,849) |
|
||||
| 2025-10-17 | 526,239 (+8,520) | 457,467 (+10,330) | 983,706 (+18,850) |
|
||||
| 2025-10-18 | 531,564 (+5,325) | 465,272 (+7,805) | 996,836 (+13,130) |
|
||||
| 2025-10-19 | 536,209 (+4,645) | 469,078 (+3,806) | 1,005,287 (+8,451) |
|
||||
| 2025-10-20 | 541,264 (+5,055) | 472,952 (+3,874) | 1,014,216 (+8,929) |
|
||||
| 2025-10-21 | 548,721 (+7,457) | 479,703 (+6,751) | 1,028,424 (+14,208) |
|
||||
| 2025-10-22 | 557,949 (+9,228) | 491,395 (+11,692) | 1,049,344 (+20,920) |
|
||||
| 2025-10-23 | 564,716 (+6,767) | 498,736 (+7,341) | 1,063,452 (+14,108) |
|
||||
| 2025-10-24 | 572,692 (+7,976) | 506,905 (+8,169) | 1,079,597 (+16,145) |
|
||||
| 2025-10-25 | 578,927 (+6,235) | 516,129 (+9,224) | 1,095,056 (+15,459) |
|
||||
| 2025-10-26 | 584,409 (+5,482) | 521,179 (+5,050) | 1,105,588 (+10,532) |
|
||||
| 2025-10-27 | 589,999 (+5,590) | 526,001 (+4,822) | 1,116,000 (+10,412) |
|
||||
| 2025-10-28 | 595,776 (+5,777) | 532,438 (+6,437) | 1,128,214 (+12,214) |
|
||||
| 2025-10-29 | 606,259 (+10,483) | 542,064 (+9,626) | 1,148,323 (+20,109) |
|
||||
| 2025-10-30 | 613,746 (+7,487) | 542,064 (+0) | 1,155,810 (+7,487) |
|
||||
| 2025-10-30 | 617,846 (+4,100) | 555,026 (+12,962) | 1,172,872 (+17,062) |
|
||||
| 2025-10-31 | 626,612 (+8,766) | 564,579 (+9,553) | 1,191,191 (+18,319) |
|
||||
| 2025-11-01 | 636,100 (+9,488) | 581,806 (+17,227) | 1,217,906 (+26,715) |
|
||||
| 2025-11-02 | 644,067 (+7,967) | 590,004 (+8,198) | 1,234,071 (+16,165) |
|
||||
| 2025-11-03 | 653,130 (+9,063) | 597,139 (+7,135) | 1,250,269 (+16,198) |
|
||||
| 2025-11-04 | 663,912 (+10,782) | 608,056 (+10,917) | 1,271,968 (+21,699) |
|
||||
| 2025-11-05 | 675,074 (+11,162) | 619,690 (+11,634) | 1,294,764 (+22,796) |
|
||||
| 2025-11-06 | 686,252 (+11,178) | 630,885 (+11,195) | 1,317,137 (+22,373) |
|
||||
| 2025-11-07 | 696,646 (+10,394) | 642,146 (+11,261) | 1,338,792 (+21,655) |
|
||||
| 2025-11-08 | 706,035 (+9,389) | 653,489 (+11,343) | 1,359,524 (+20,732) |
|
||||
| 2025-11-09 | 713,462 (+7,427) | 660,459 (+6,970) | 1,373,921 (+14,397) |
|
||||
| 2025-11-10 | 722,288 (+8,826) | 668,225 (+7,766) | 1,390,513 (+16,592) |
|
||||
| 2025-11-11 | 729,769 (+7,481) | 677,501 (+9,276) | 1,407,270 (+16,757) |
|
||||
| 2025-11-12 | 740,180 (+10,411) | 686,454 (+8,953) | 1,426,634 (+19,364) |
|
||||
| 2025-11-13 | 749,905 (+9,725) | 696,157 (+9,703) | 1,446,062 (+19,428) |
|
||||
| 2025-11-14 | 759,928 (+10,023) | 705,237 (+9,080) | 1,465,165 (+19,103) |
|
||||
| 2025-11-15 | 765,955 (+6,027) | 712,870 (+7,633) | 1,478,825 (+13,660) |
|
||||
| 2025-11-16 | 771,069 (+5,114) | 716,596 (+3,726) | 1,487,665 (+8,840) |
|
||||
| 2025-11-17 | 780,161 (+9,092) | 723,339 (+6,743) | 1,503,500 (+15,835) |
|
||||
| 2025-11-18 | 791,563 (+11,402) | 732,544 (+9,205) | 1,524,107 (+20,607) |
|
||||
| 2025-11-19 | 804,409 (+12,846) | 747,624 (+15,080) | 1,552,033 (+27,926) |
|
||||
| 2025-11-20 | 814,620 (+10,211) | 757,907 (+10,283) | 1,572,527 (+20,494) |
|
||||
| 2025-11-21 | 826,309 (+11,689) | 769,307 (+11,400) | 1,595,616 (+23,089) |
|
||||
| 2025-11-22 | 837,269 (+10,960) | 780,996 (+11,689) | 1,618,265 (+22,649) |
|
||||
| 2025-11-23 | 846,609 (+9,340) | 795,069 (+14,073) | 1,641,678 (+23,413) |
|
||||
| 2025-11-24 | 856,733 (+10,124) | 804,033 (+8,964) | 1,660,766 (+19,088) |
|
||||
| 2025-11-25 | 869,423 (+12,690) | 817,339 (+13,306) | 1,686,762 (+25,996) |
|
||||
| 2025-11-26 | 881,414 (+11,991) | 832,518 (+15,179) | 1,713,932 (+27,170) |
|
||||
| 2025-11-27 | 893,960 (+12,546) | 846,180 (+13,662) | 1,740,140 (+26,208) |
|
||||
| 2025-11-28 | 901,741 (+7,781) | 856,482 (+10,302) | 1,758,223 (+18,083) |
|
||||
| 2025-11-29 | 908,689 (+6,948) | 863,361 (+6,879) | 1,772,050 (+13,827) |
|
||||
| 2025-11-30 | 916,116 (+7,427) | 870,194 (+6,833) | 1,786,310 (+14,260) |
|
||||
| 2025-12-01 | 925,898 (+9,782) | 876,500 (+6,306) | 1,802,398 (+16,088) |
|
||||
| 2025-12-02 | 939,250 (+13,352) | 890,919 (+14,419) | 1,830,169 (+27,771) |
|
||||
| 2025-12-03 | 952,249 (+12,999) | 903,713 (+12,794) | 1,855,962 (+25,793) |
|
||||
| 2025-12-04 | 965,611 (+13,362) | 916,471 (+12,758) | 1,882,082 (+26,120) |
|
||||
| 2025-12-05 | 977,996 (+12,385) | 930,616 (+14,145) | 1,908,612 (+26,530) |
|
||||
| 2025-12-06 | 987,884 (+9,888) | 943,773 (+13,157) | 1,931,657 (+23,045) |
|
||||
| 2025-12-07 | 994,046 (+6,162) | 951,425 (+7,652) | 1,945,471 (+13,814) |
|
||||
| 2025-12-08 | 1,000,898 (+6,852) | 957,149 (+5,724) | 1,958,047 (+12,576) |
|
||||
| 2025-12-09 | 1,011,488 (+10,590) | 973,922 (+16,773) | 1,985,410 (+27,363) |
|
||||
| 2025-12-10 | 1,025,891 (+14,403) | 991,708 (+17,786) | 2,017,599 (+32,189) |
|
||||
| 2025-12-11 | 1,045,110 (+19,219) | 1,010,559 (+18,851) | 2,055,669 (+38,070) |
|
||||
| 2025-12-12 | 1,061,340 (+16,230) | 1,030,838 (+20,279) | 2,092,178 (+36,509) |
|
||||
| 2025-12-13 | 1,073,561 (+12,221) | 1,044,608 (+13,770) | 2,118,169 (+25,991) |
|
||||
| 2025-12-14 | 1,082,042 (+8,481) | 1,052,425 (+7,817) | 2,134,467 (+16,298) |
|
||||
| 2025-12-15 | 1,093,632 (+11,590) | 1,059,078 (+6,653) | 2,152,710 (+18,243) |
|
||||
| 2025-12-16 | 1,120,477 (+26,845) | 1,078,022 (+18,944) | 2,198,499 (+45,789) |
|
||||
| 2025-12-17 | 1,151,067 (+30,590) | 1,097,661 (+19,639) | 2,248,728 (+50,229) |
|
||||
| 2025-12-18 | 1,178,658 (+27,591) | 1,113,418 (+15,757) | 2,292,076 (+43,348) |
|
||||
| 2025-12-19 | 1,203,485 (+24,827) | 1,129,698 (+16,280) | 2,333,183 (+41,107) |
|
||||
| 2025-12-20 | 1,223,000 (+19,515) | 1,146,258 (+16,560) | 2,369,258 (+36,075) |
|
||||
| 2025-12-21 | 1,242,675 (+19,675) | 1,158,909 (+12,651) | 2,401,584 (+32,326) |
|
||||
| 2025-12-22 | 1,262,522 (+19,847) | 1,169,121 (+10,212) | 2,431,643 (+30,059) |
|
||||
| 2025-12-23 | 1,286,548 (+24,026) | 1,186,439 (+17,318) | 2,472,987 (+41,344) |
|
||||
| 2025-12-24 | 1,309,323 (+22,775) | 1,203,767 (+17,328) | 2,513,090 (+40,103) |
|
||||
| 2025-12-25 | 1,333,032 (+23,709) | 1,217,283 (+13,516) | 2,550,315 (+37,225) |
|
||||
| 2025-12-26 | 1,352,411 (+19,379) | 1,227,615 (+10,332) | 2,580,026 (+29,711) |
|
||||
| 2025-12-27 | 1,371,771 (+19,360) | 1,238,236 (+10,621) | 2,610,007 (+29,981) |
|
||||
| 2025-12-28 | 1,390,388 (+18,617) | 1,245,690 (+7,454) | 2,636,078 (+26,071) |
|
||||
| 2025-12-29 | 1,415,560 (+25,172) | 1,257,101 (+11,411) | 2,672,661 (+36,583) |
|
||||
| 2025-12-30 | 1,445,450 (+29,890) | 1,272,689 (+15,588) | 2,718,139 (+45,478) |
|
||||
| 2025-12-31 | 1,479,598 (+34,148) | 1,293,235 (+20,546) | 2,772,833 (+54,694) |
|
||||
| 2026-01-01 | 1,508,883 (+29,285) | 1,309,874 (+16,639) | 2,818,757 (+45,924) |
|
||||
| 2026-01-02 | 1,563,474 (+54,591) | 1,320,959 (+11,085) | 2,884,433 (+65,676) |
|
||||
| 2026-01-03 | 1,618,065 (+54,591) | 1,331,914 (+10,955) | 2,949,979 (+65,546) |
|
||||
| 2026-01-04 | 1,672,656 (+39,702) | 1,339,883 (+7,969) | 3,012,539 (+62,560) |
|
||||
| 2026-01-05 | 1,738,171 (+65,515) | 1,353,043 (+13,160) | 3,091,214 (+78,675) |
|
||||
| 2026-01-06 | 1,960,988 (+222,817) | 1,377,377 (+24,334) | 3,338,365 (+247,151) |
|
||||
| 2026-01-07 | 2,123,239 (+162,251) | 1,398,648 (+21,271) | 3,521,887 (+183,522) |
|
||||
| 2026-01-08 | 2,272,630 (+149,391) | 1,432,480 (+33,832) | 3,705,110 (+183,223) |
|
||||
| 2026-01-09 | 2,443,565 (+170,935) | 1,469,451 (+36,971) | 3,913,016 (+207,906) |
|
||||
| 2026-01-10 | 2,632,023 (+188,458) | 1,503,670 (+34,219) | 4,135,693 (+222,677) |
|
||||
| 2026-01-11 | 2,836,394 (+204,371) | 1,530,479 (+26,809) | 4,366,873 (+231,180) |
|
||||
| 2026-01-12 | 3,053,594 (+217,200) | 1,553,671 (+23,192) | 4,607,265 (+240,392) |
|
||||
| 2026-01-13 | 3,297,078 (+243,484) | 1,595,062 (+41,391) | 4,892,140 (+284,875) |
|
||||
| 2026-01-14 | 3,568,928 (+271,850) | 1,645,362 (+50,300) | 5,214,290 (+322,150) |
|
||||
| Date | GitHub Downloads | npm Downloads | Total |
|
||||
| ---------- | -------------------- | -------------------- | -------------------- |
|
||||
| 2025-06-29 | 18,789 (+0) | 39,420 (+0) | 58,209 (+0) |
|
||||
| 2025-06-30 | 20,127 (+1,338) | 41,059 (+1,639) | 61,186 (+2,977) |
|
||||
| 2025-07-01 | 22,108 (+1,981) | 43,745 (+2,686) | 65,853 (+4,667) |
|
||||
| 2025-07-02 | 24,814 (+2,706) | 46,168 (+2,423) | 70,982 (+5,129) |
|
||||
| 2025-07-03 | 27,834 (+3,020) | 49,955 (+3,787) | 77,789 (+6,807) |
|
||||
| 2025-07-04 | 30,608 (+2,774) | 54,758 (+4,803) | 85,366 (+7,577) |
|
||||
| 2025-07-05 | 32,524 (+1,916) | 58,371 (+3,613) | 90,895 (+5,529) |
|
||||
| 2025-07-06 | 33,766 (+1,242) | 59,694 (+1,323) | 93,460 (+2,565) |
|
||||
| 2025-07-08 | 38,052 (+4,286) | 64,468 (+4,774) | 102,520 (+9,060) |
|
||||
| 2025-07-09 | 40,924 (+2,872) | 67,935 (+3,467) | 108,859 (+6,339) |
|
||||
| 2025-07-10 | 43,796 (+2,872) | 71,402 (+3,467) | 115,198 (+6,339) |
|
||||
| 2025-07-11 | 46,982 (+3,186) | 77,462 (+6,060) | 124,444 (+9,246) |
|
||||
| 2025-07-12 | 49,302 (+2,320) | 82,177 (+4,715) | 131,479 (+7,035) |
|
||||
| 2025-07-13 | 50,803 (+1,501) | 86,394 (+4,217) | 137,197 (+5,718) |
|
||||
| 2025-07-14 | 53,283 (+2,480) | 87,860 (+1,466) | 141,143 (+3,946) |
|
||||
| 2025-07-15 | 57,590 (+4,307) | 91,036 (+3,176) | 148,626 (+7,483) |
|
||||
| 2025-07-16 | 62,313 (+4,723) | 95,258 (+4,222) | 157,571 (+8,945) |
|
||||
| 2025-07-17 | 66,684 (+4,371) | 100,048 (+4,790) | 166,732 (+9,161) |
|
||||
| 2025-07-18 | 70,379 (+3,695) | 102,587 (+2,539) | 172,966 (+6,234) |
|
||||
| 2025-07-19 | 73,497 (+3,117) | 105,904 (+3,317) | 179,401 (+6,434) |
|
||||
| 2025-07-20 | 76,453 (+2,956) | 109,044 (+3,140) | 185,497 (+6,096) |
|
||||
| 2025-07-21 | 80,197 (+3,744) | 113,537 (+4,493) | 193,734 (+8,237) |
|
||||
| 2025-07-22 | 84,251 (+4,054) | 118,073 (+4,536) | 202,324 (+8,590) |
|
||||
| 2025-07-23 | 88,589 (+4,338) | 121,436 (+3,363) | 210,025 (+7,701) |
|
||||
| 2025-07-24 | 92,469 (+3,880) | 124,091 (+2,655) | 216,560 (+6,535) |
|
||||
| 2025-07-25 | 96,417 (+3,948) | 126,985 (+2,894) | 223,402 (+6,842) |
|
||||
| 2025-07-26 | 100,646 (+4,229) | 131,411 (+4,426) | 232,057 (+8,655) |
|
||||
| 2025-07-27 | 102,644 (+1,998) | 134,736 (+3,325) | 237,380 (+5,323) |
|
||||
| 2025-07-28 | 105,446 (+2,802) | 136,016 (+1,280) | 241,462 (+4,082) |
|
||||
| 2025-07-29 | 108,998 (+3,552) | 137,542 (+1,526) | 246,540 (+5,078) |
|
||||
| 2025-07-30 | 113,544 (+4,546) | 140,317 (+2,775) | 253,861 (+7,321) |
|
||||
| 2025-07-31 | 118,339 (+4,795) | 143,344 (+3,027) | 261,683 (+7,822) |
|
||||
| 2025-08-01 | 123,539 (+5,200) | 146,680 (+3,336) | 270,219 (+8,536) |
|
||||
| 2025-08-02 | 127,864 (+4,325) | 149,236 (+2,556) | 277,100 (+6,881) |
|
||||
| 2025-08-03 | 131,397 (+3,533) | 150,451 (+1,215) | 281,848 (+4,748) |
|
||||
| 2025-08-04 | 136,266 (+4,869) | 153,260 (+2,809) | 289,526 (+7,678) |
|
||||
| 2025-08-05 | 141,596 (+5,330) | 155,752 (+2,492) | 297,348 (+7,822) |
|
||||
| 2025-08-06 | 147,067 (+5,471) | 158,309 (+2,557) | 305,376 (+8,028) |
|
||||
| 2025-08-07 | 152,591 (+5,524) | 160,889 (+2,580) | 313,480 (+8,104) |
|
||||
| 2025-08-08 | 158,187 (+5,596) | 163,448 (+2,559) | 321,635 (+8,155) |
|
||||
| 2025-08-09 | 162,770 (+4,583) | 165,721 (+2,273) | 328,491 (+6,856) |
|
||||
| 2025-08-10 | 165,695 (+2,925) | 167,109 (+1,388) | 332,804 (+4,313) |
|
||||
| 2025-08-11 | 169,297 (+3,602) | 167,953 (+844) | 337,250 (+4,446) |
|
||||
| 2025-08-12 | 176,307 (+7,010) | 171,876 (+3,923) | 348,183 (+10,933) |
|
||||
| 2025-08-13 | 182,997 (+6,690) | 177,182 (+5,306) | 360,179 (+11,996) |
|
||||
| 2025-08-14 | 189,063 (+6,066) | 179,741 (+2,559) | 368,804 (+8,625) |
|
||||
| 2025-08-15 | 193,608 (+4,545) | 181,792 (+2,051) | 375,400 (+6,596) |
|
||||
| 2025-08-16 | 198,118 (+4,510) | 184,558 (+2,766) | 382,676 (+7,276) |
|
||||
| 2025-08-17 | 201,299 (+3,181) | 186,269 (+1,711) | 387,568 (+4,892) |
|
||||
| 2025-08-18 | 204,559 (+3,260) | 187,399 (+1,130) | 391,958 (+4,390) |
|
||||
| 2025-08-19 | 209,814 (+5,255) | 189,668 (+2,269) | 399,482 (+7,524) |
|
||||
| 2025-08-20 | 214,497 (+4,683) | 191,481 (+1,813) | 405,978 (+6,496) |
|
||||
| 2025-08-21 | 220,465 (+5,968) | 194,784 (+3,303) | 415,249 (+9,271) |
|
||||
| 2025-08-22 | 225,899 (+5,434) | 197,204 (+2,420) | 423,103 (+7,854) |
|
||||
| 2025-08-23 | 229,005 (+3,106) | 199,238 (+2,034) | 428,243 (+5,140) |
|
||||
| 2025-08-24 | 232,098 (+3,093) | 201,157 (+1,919) | 433,255 (+5,012) |
|
||||
| 2025-08-25 | 236,607 (+4,509) | 202,650 (+1,493) | 439,257 (+6,002) |
|
||||
| 2025-08-26 | 242,783 (+6,176) | 205,242 (+2,592) | 448,025 (+8,768) |
|
||||
| 2025-08-27 | 248,409 (+5,626) | 205,242 (+0) | 453,651 (+5,626) |
|
||||
| 2025-08-28 | 252,796 (+4,387) | 205,242 (+0) | 458,038 (+4,387) |
|
||||
| 2025-08-29 | 256,045 (+3,249) | 211,075 (+5,833) | 467,120 (+9,082) |
|
||||
| 2025-08-30 | 258,863 (+2,818) | 212,397 (+1,322) | 471,260 (+4,140) |
|
||||
| 2025-08-31 | 262,004 (+3,141) | 213,944 (+1,547) | 475,948 (+4,688) |
|
||||
| 2025-09-01 | 265,359 (+3,355) | 215,115 (+1,171) | 480,474 (+4,526) |
|
||||
| 2025-09-02 | 270,483 (+5,124) | 217,075 (+1,960) | 487,558 (+7,084) |
|
||||
| 2025-09-03 | 274,793 (+4,310) | 219,755 (+2,680) | 494,548 (+6,990) |
|
||||
| 2025-09-04 | 280,430 (+5,637) | 222,103 (+2,348) | 502,533 (+7,985) |
|
||||
| 2025-09-05 | 283,769 (+3,339) | 223,793 (+1,690) | 507,562 (+5,029) |
|
||||
| 2025-09-06 | 286,245 (+2,476) | 225,036 (+1,243) | 511,281 (+3,719) |
|
||||
| 2025-09-07 | 288,623 (+2,378) | 225,866 (+830) | 514,489 (+3,208) |
|
||||
| 2025-09-08 | 293,341 (+4,718) | 227,073 (+1,207) | 520,414 (+5,925) |
|
||||
| 2025-09-09 | 300,036 (+6,695) | 229,788 (+2,715) | 529,824 (+9,410) |
|
||||
| 2025-09-10 | 307,287 (+7,251) | 233,435 (+3,647) | 540,722 (+10,898) |
|
||||
| 2025-09-11 | 314,083 (+6,796) | 237,356 (+3,921) | 551,439 (+10,717) |
|
||||
| 2025-09-12 | 321,046 (+6,963) | 240,728 (+3,372) | 561,774 (+10,335) |
|
||||
| 2025-09-13 | 324,894 (+3,848) | 245,539 (+4,811) | 570,433 (+8,659) |
|
||||
| 2025-09-14 | 328,876 (+3,982) | 248,245 (+2,706) | 577,121 (+6,688) |
|
||||
| 2025-09-15 | 334,201 (+5,325) | 250,983 (+2,738) | 585,184 (+8,063) |
|
||||
| 2025-09-16 | 342,609 (+8,408) | 255,264 (+4,281) | 597,873 (+12,689) |
|
||||
| 2025-09-17 | 351,117 (+8,508) | 260,970 (+5,706) | 612,087 (+14,214) |
|
||||
| 2025-09-18 | 358,717 (+7,600) | 266,922 (+5,952) | 625,639 (+13,552) |
|
||||
| 2025-09-19 | 365,401 (+6,684) | 271,859 (+4,937) | 637,260 (+11,621) |
|
||||
| 2025-09-20 | 372,092 (+6,691) | 276,917 (+5,058) | 649,009 (+11,749) |
|
||||
| 2025-09-21 | 377,079 (+4,987) | 280,261 (+3,344) | 657,340 (+8,331) |
|
||||
| 2025-09-22 | 382,492 (+5,413) | 284,009 (+3,748) | 666,501 (+9,161) |
|
||||
| 2025-09-23 | 387,008 (+4,516) | 289,129 (+5,120) | 676,137 (+9,636) |
|
||||
| 2025-09-24 | 393,325 (+6,317) | 294,927 (+5,798) | 688,252 (+12,115) |
|
||||
| 2025-09-25 | 398,879 (+5,554) | 301,663 (+6,736) | 700,542 (+12,290) |
|
||||
| 2025-09-26 | 404,334 (+5,455) | 306,713 (+5,050) | 711,047 (+10,505) |
|
||||
| 2025-09-27 | 411,618 (+7,284) | 317,763 (+11,050) | 729,381 (+18,334) |
|
||||
| 2025-09-28 | 414,910 (+3,292) | 322,522 (+4,759) | 737,432 (+8,051) |
|
||||
| 2025-09-29 | 419,919 (+5,009) | 328,033 (+5,511) | 747,952 (+10,520) |
|
||||
| 2025-09-30 | 427,991 (+8,072) | 336,472 (+8,439) | 764,463 (+16,511) |
|
||||
| 2025-10-01 | 433,591 (+5,600) | 341,742 (+5,270) | 775,333 (+10,870) |
|
||||
| 2025-10-02 | 440,852 (+7,261) | 348,099 (+6,357) | 788,951 (+13,618) |
|
||||
| 2025-10-03 | 446,829 (+5,977) | 359,937 (+11,838) | 806,766 (+17,815) |
|
||||
| 2025-10-04 | 452,561 (+5,732) | 370,386 (+10,449) | 822,947 (+16,181) |
|
||||
| 2025-10-05 | 455,559 (+2,998) | 374,745 (+4,359) | 830,304 (+7,357) |
|
||||
| 2025-10-06 | 460,927 (+5,368) | 379,489 (+4,744) | 840,416 (+10,112) |
|
||||
| 2025-10-07 | 467,336 (+6,409) | 385,438 (+5,949) | 852,774 (+12,358) |
|
||||
| 2025-10-08 | 474,643 (+7,307) | 394,139 (+8,701) | 868,782 (+16,008) |
|
||||
| 2025-10-09 | 479,203 (+4,560) | 400,526 (+6,387) | 879,729 (+10,947) |
|
||||
| 2025-10-10 | 484,374 (+5,171) | 406,015 (+5,489) | 890,389 (+10,660) |
|
||||
| 2025-10-11 | 488,427 (+4,053) | 414,699 (+8,684) | 903,126 (+12,737) |
|
||||
| 2025-10-12 | 492,125 (+3,698) | 418,745 (+4,046) | 910,870 (+7,744) |
|
||||
| 2025-10-14 | 505,130 (+13,005) | 429,286 (+10,541) | 934,416 (+23,546) |
|
||||
| 2025-10-15 | 512,717 (+7,587) | 439,290 (+10,004) | 952,007 (+17,591) |
|
||||
| 2025-10-16 | 517,719 (+5,002) | 447,137 (+7,847) | 964,856 (+12,849) |
|
||||
| 2025-10-17 | 526,239 (+8,520) | 457,467 (+10,330) | 983,706 (+18,850) |
|
||||
| 2025-10-18 | 531,564 (+5,325) | 465,272 (+7,805) | 996,836 (+13,130) |
|
||||
| 2025-10-19 | 536,209 (+4,645) | 469,078 (+3,806) | 1,005,287 (+8,451) |
|
||||
| 2025-10-20 | 541,264 (+5,055) | 472,952 (+3,874) | 1,014,216 (+8,929) |
|
||||
| 2025-10-21 | 548,721 (+7,457) | 479,703 (+6,751) | 1,028,424 (+14,208) |
|
||||
| 2025-10-22 | 557,949 (+9,228) | 491,395 (+11,692) | 1,049,344 (+20,920) |
|
||||
| 2025-10-23 | 564,716 (+6,767) | 498,736 (+7,341) | 1,063,452 (+14,108) |
|
||||
| 2025-10-24 | 572,692 (+7,976) | 506,905 (+8,169) | 1,079,597 (+16,145) |
|
||||
| 2025-10-25 | 578,927 (+6,235) | 516,129 (+9,224) | 1,095,056 (+15,459) |
|
||||
| 2025-10-26 | 584,409 (+5,482) | 521,179 (+5,050) | 1,105,588 (+10,532) |
|
||||
| 2025-10-27 | 589,999 (+5,590) | 526,001 (+4,822) | 1,116,000 (+10,412) |
|
||||
| 2025-10-28 | 595,776 (+5,777) | 532,438 (+6,437) | 1,128,214 (+12,214) |
|
||||
| 2025-10-29 | 606,259 (+10,483) | 542,064 (+9,626) | 1,148,323 (+20,109) |
|
||||
| 2025-10-30 | 613,746 (+7,487) | 542,064 (+0) | 1,155,810 (+7,487) |
|
||||
| 2025-10-30 | 617,846 (+4,100) | 555,026 (+12,962) | 1,172,872 (+17,062) |
|
||||
| 2025-10-31 | 626,612 (+8,766) | 564,579 (+9,553) | 1,191,191 (+18,319) |
|
||||
| 2025-11-01 | 636,100 (+9,488) | 581,806 (+17,227) | 1,217,906 (+26,715) |
|
||||
| 2025-11-02 | 644,067 (+7,967) | 590,004 (+8,198) | 1,234,071 (+16,165) |
|
||||
| 2025-11-03 | 653,130 (+9,063) | 597,139 (+7,135) | 1,250,269 (+16,198) |
|
||||
| 2025-11-04 | 663,912 (+10,782) | 608,056 (+10,917) | 1,271,968 (+21,699) |
|
||||
| 2025-11-05 | 675,074 (+11,162) | 619,690 (+11,634) | 1,294,764 (+22,796) |
|
||||
| 2025-11-06 | 686,252 (+11,178) | 630,885 (+11,195) | 1,317,137 (+22,373) |
|
||||
| 2025-11-07 | 696,646 (+10,394) | 642,146 (+11,261) | 1,338,792 (+21,655) |
|
||||
| 2025-11-08 | 706,035 (+9,389) | 653,489 (+11,343) | 1,359,524 (+20,732) |
|
||||
| 2025-11-09 | 713,462 (+7,427) | 660,459 (+6,970) | 1,373,921 (+14,397) |
|
||||
| 2025-11-10 | 722,288 (+8,826) | 668,225 (+7,766) | 1,390,513 (+16,592) |
|
||||
| 2025-11-11 | 729,769 (+7,481) | 677,501 (+9,276) | 1,407,270 (+16,757) |
|
||||
| 2025-11-12 | 740,180 (+10,411) | 686,454 (+8,953) | 1,426,634 (+19,364) |
|
||||
| 2025-11-13 | 749,905 (+9,725) | 696,157 (+9,703) | 1,446,062 (+19,428) |
|
||||
| 2025-11-14 | 759,928 (+10,023) | 705,237 (+9,080) | 1,465,165 (+19,103) |
|
||||
| 2025-11-15 | 765,955 (+6,027) | 712,870 (+7,633) | 1,478,825 (+13,660) |
|
||||
| 2025-11-16 | 771,069 (+5,114) | 716,596 (+3,726) | 1,487,665 (+8,840) |
|
||||
| 2025-11-17 | 780,161 (+9,092) | 723,339 (+6,743) | 1,503,500 (+15,835) |
|
||||
| 2025-11-18 | 791,563 (+11,402) | 732,544 (+9,205) | 1,524,107 (+20,607) |
|
||||
| 2025-11-19 | 804,409 (+12,846) | 747,624 (+15,080) | 1,552,033 (+27,926) |
|
||||
| 2025-11-20 | 814,620 (+10,211) | 757,907 (+10,283) | 1,572,527 (+20,494) |
|
||||
| 2025-11-21 | 826,309 (+11,689) | 769,307 (+11,400) | 1,595,616 (+23,089) |
|
||||
| 2025-11-22 | 837,269 (+10,960) | 780,996 (+11,689) | 1,618,265 (+22,649) |
|
||||
| 2025-11-23 | 846,609 (+9,340) | 795,069 (+14,073) | 1,641,678 (+23,413) |
|
||||
| 2025-11-24 | 856,733 (+10,124) | 804,033 (+8,964) | 1,660,766 (+19,088) |
|
||||
| 2025-11-25 | 869,423 (+12,690) | 817,339 (+13,306) | 1,686,762 (+25,996) |
|
||||
| 2025-11-26 | 881,414 (+11,991) | 832,518 (+15,179) | 1,713,932 (+27,170) |
|
||||
| 2025-11-27 | 893,960 (+12,546) | 846,180 (+13,662) | 1,740,140 (+26,208) |
|
||||
| 2025-11-28 | 901,741 (+7,781) | 856,482 (+10,302) | 1,758,223 (+18,083) |
|
||||
| 2025-11-29 | 908,689 (+6,948) | 863,361 (+6,879) | 1,772,050 (+13,827) |
|
||||
| 2025-11-30 | 916,116 (+7,427) | 870,194 (+6,833) | 1,786,310 (+14,260) |
|
||||
| 2025-12-01 | 925,898 (+9,782) | 876,500 (+6,306) | 1,802,398 (+16,088) |
|
||||
| 2025-12-02 | 939,250 (+13,352) | 890,919 (+14,419) | 1,830,169 (+27,771) |
|
||||
| 2025-12-03 | 952,249 (+12,999) | 903,713 (+12,794) | 1,855,962 (+25,793) |
|
||||
| 2025-12-04 | 965,611 (+13,362) | 916,471 (+12,758) | 1,882,082 (+26,120) |
|
||||
| 2025-12-05 | 977,996 (+12,385) | 930,616 (+14,145) | 1,908,612 (+26,530) |
|
||||
| 2025-12-06 | 987,884 (+9,888) | 943,773 (+13,157) | 1,931,657 (+23,045) |
|
||||
| 2025-12-07 | 994,046 (+6,162) | 951,425 (+7,652) | 1,945,471 (+13,814) |
|
||||
| 2025-12-08 | 1,000,898 (+6,852) | 957,149 (+5,724) | 1,958,047 (+12,576) |
|
||||
| 2025-12-09 | 1,011,488 (+10,590) | 973,922 (+16,773) | 1,985,410 (+27,363) |
|
||||
| 2025-12-10 | 1,025,891 (+14,403) | 991,708 (+17,786) | 2,017,599 (+32,189) |
|
||||
| 2025-12-11 | 1,045,110 (+19,219) | 1,010,559 (+18,851) | 2,055,669 (+38,070) |
|
||||
| 2025-12-12 | 1,061,340 (+16,230) | 1,030,838 (+20,279) | 2,092,178 (+36,509) |
|
||||
| 2025-12-13 | 1,073,561 (+12,221) | 1,044,608 (+13,770) | 2,118,169 (+25,991) |
|
||||
| 2025-12-14 | 1,082,042 (+8,481) | 1,052,425 (+7,817) | 2,134,467 (+16,298) |
|
||||
| 2025-12-15 | 1,093,632 (+11,590) | 1,059,078 (+6,653) | 2,152,710 (+18,243) |
|
||||
| 2025-12-16 | 1,120,477 (+26,845) | 1,078,022 (+18,944) | 2,198,499 (+45,789) |
|
||||
| 2025-12-17 | 1,151,067 (+30,590) | 1,097,661 (+19,639) | 2,248,728 (+50,229) |
|
||||
| 2025-12-18 | 1,178,658 (+27,591) | 1,113,418 (+15,757) | 2,292,076 (+43,348) |
|
||||
| 2025-12-19 | 1,203,485 (+24,827) | 1,129,698 (+16,280) | 2,333,183 (+41,107) |
|
||||
| 2025-12-20 | 1,223,000 (+19,515) | 1,146,258 (+16,560) | 2,369,258 (+36,075) |
|
||||
| 2025-12-21 | 1,242,675 (+19,675) | 1,158,909 (+12,651) | 2,401,584 (+32,326) |
|
||||
| 2025-12-22 | 1,262,522 (+19,847) | 1,169,121 (+10,212) | 2,431,643 (+30,059) |
|
||||
| 2025-12-23 | 1,286,548 (+24,026) | 1,186,439 (+17,318) | 2,472,987 (+41,344) |
|
||||
| 2025-12-24 | 1,309,323 (+22,775) | 1,203,767 (+17,328) | 2,513,090 (+40,103) |
|
||||
| 2025-12-25 | 1,333,032 (+23,709) | 1,217,283 (+13,516) | 2,550,315 (+37,225) |
|
||||
| 2025-12-26 | 1,352,411 (+19,379) | 1,227,615 (+10,332) | 2,580,026 (+29,711) |
|
||||
| 2025-12-27 | 1,371,771 (+19,360) | 1,238,236 (+10,621) | 2,610,007 (+29,981) |
|
||||
| 2025-12-28 | 1,390,388 (+18,617) | 1,245,690 (+7,454) | 2,636,078 (+26,071) |
|
||||
| 2025-12-29 | 1,415,560 (+25,172) | 1,257,101 (+11,411) | 2,672,661 (+36,583) |
|
||||
| 2025-12-30 | 1,445,450 (+29,890) | 1,272,689 (+15,588) | 2,718,139 (+45,478) |
|
||||
| 2025-12-31 | 1,479,598 (+34,148) | 1,293,235 (+20,546) | 2,772,833 (+54,694) |
|
||||
| 2026-01-01 | 1,508,883 (+29,285) | 1,309,874 (+16,639) | 2,818,757 (+45,924) |
|
||||
| 2026-01-02 | 1,563,474 (+54,591) | 1,320,959 (+11,085) | 2,884,433 (+65,676) |
|
||||
| 2026-01-03 | 1,618,065 (+54,591) | 1,331,914 (+10,955) | 2,949,979 (+65,546) |
|
||||
| 2026-01-04 | 1,672,656 (+39,702) | 1,339,883 (+7,969) | 3,012,539 (+62,560) |
|
||||
| 2026-01-05 | 1,738,171 (+65,515) | 1,353,043 (+13,160) | 3,091,214 (+78,675) |
|
||||
| 2026-01-06 | 1,960,988 (+222,817) | 1,377,377 (+24,334) | 3,338,365 (+247,151) |
|
||||
| 2026-01-07 | 2,123,239 (+162,251) | 1,398,648 (+21,271) | 3,521,887 (+183,522) |
|
||||
| 2026-01-08 | 2,272,630 (+149,391) | 1,432,480 (+33,832) | 3,705,110 (+183,223) |
|
||||
| 2026-01-09 | 2,443,565 (+170,935) | 1,469,451 (+36,971) | 3,913,016 (+207,906) |
|
||||
| 2026-01-10 | 2,632,023 (+188,458) | 1,503,670 (+34,219) | 4,135,693 (+222,677) |
|
||||
| 2026-01-11 | 2,836,394 (+204,371) | 1,530,479 (+26,809) | 4,366,873 (+231,180) |
|
||||
| 2026-01-12 | 3,053,594 (+217,200) | 1,553,671 (+23,192) | 4,607,265 (+240,392) |
|
||||
| 2026-01-13 | 3,297,078 (+243,484) | 1,595,062 (+41,391) | 4,892,140 (+284,875) |
|
||||
| 2026-01-14 | 3,568,928 (+271,850) | 1,645,362 (+50,300) | 5,214,290 (+322,150) |
|
||||
| 2026-01-16 | 4,121,550 (+552,622) | 1,754,418 (+109,056) | 5,875,968 (+661,678) |
|
||||
| 2026-01-17 | 4,389,558 (+268,008) | 1,805,315 (+50,897) | 6,194,873 (+318,905) |
|
||||
|
||||
75
bun.lock
75
bun.lock
@@ -22,7 +22,7 @@
|
||||
},
|
||||
"packages/app": {
|
||||
"name": "@opencode-ai/app",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"dependencies": {
|
||||
"@kobalte/core": "catalog:",
|
||||
"@opencode-ai/sdk": "workspace:*",
|
||||
@@ -70,7 +70,7 @@
|
||||
},
|
||||
"packages/console/app": {
|
||||
"name": "@opencode-ai/console-app",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"dependencies": {
|
||||
"@cloudflare/vite-plugin": "1.15.2",
|
||||
"@ibm/plex": "6.4.1",
|
||||
@@ -104,7 +104,7 @@
|
||||
},
|
||||
"packages/console/core": {
|
||||
"name": "@opencode-ai/console-core",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-sts": "3.782.0",
|
||||
"@jsx-email/render": "1.1.1",
|
||||
@@ -131,7 +131,7 @@
|
||||
},
|
||||
"packages/console/function": {
|
||||
"name": "@opencode-ai/console-function",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"dependencies": {
|
||||
"@ai-sdk/anthropic": "2.0.0",
|
||||
"@ai-sdk/openai": "2.0.2",
|
||||
@@ -155,7 +155,7 @@
|
||||
},
|
||||
"packages/console/mail": {
|
||||
"name": "@opencode-ai/console-mail",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"dependencies": {
|
||||
"@jsx-email/all": "2.2.3",
|
||||
"@jsx-email/cli": "1.4.3",
|
||||
@@ -179,7 +179,7 @@
|
||||
},
|
||||
"packages/desktop": {
|
||||
"name": "@opencode-ai/desktop",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"dependencies": {
|
||||
"@opencode-ai/app": "workspace:*",
|
||||
"@opencode-ai/ui": "workspace:*",
|
||||
@@ -208,7 +208,7 @@
|
||||
},
|
||||
"packages/enterprise": {
|
||||
"name": "@opencode-ai/enterprise",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"dependencies": {
|
||||
"@opencode-ai/ui": "workspace:*",
|
||||
"@opencode-ai/util": "workspace:*",
|
||||
@@ -237,7 +237,7 @@
|
||||
},
|
||||
"packages/function": {
|
||||
"name": "@opencode-ai/function",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"dependencies": {
|
||||
"@octokit/auth-app": "8.0.1",
|
||||
"@octokit/rest": "catalog:",
|
||||
@@ -253,7 +253,7 @@
|
||||
},
|
||||
"packages/opencode": {
|
||||
"name": "opencode",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"bin": {
|
||||
"opencode": "./bin/opencode",
|
||||
},
|
||||
@@ -293,8 +293,8 @@
|
||||
"@opencode-ai/sdk": "workspace:*",
|
||||
"@opencode-ai/util": "workspace:*",
|
||||
"@openrouter/ai-sdk-provider": "1.5.2",
|
||||
"@opentui/core": "0.1.73",
|
||||
"@opentui/solid": "0.1.73",
|
||||
"@opentui/core": "0.1.74",
|
||||
"@opentui/solid": "0.1.74",
|
||||
"@parcel/watcher": "2.5.1",
|
||||
"@pierre/diffs": "catalog:",
|
||||
"@solid-primitives/event-bus": "1.1.2",
|
||||
@@ -357,7 +357,7 @@
|
||||
},
|
||||
"packages/plugin": {
|
||||
"name": "@opencode-ai/plugin",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"dependencies": {
|
||||
"@opencode-ai/sdk": "workspace:*",
|
||||
"zod": "catalog:",
|
||||
@@ -377,9 +377,9 @@
|
||||
},
|
||||
"packages/sdk/js": {
|
||||
"name": "@opencode-ai/sdk",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"devDependencies": {
|
||||
"@hey-api/openapi-ts": "0.88.1",
|
||||
"@hey-api/openapi-ts": "0.90.4",
|
||||
"@tsconfig/node22": "catalog:",
|
||||
"@types/node": "catalog:",
|
||||
"@typescript/native-preview": "catalog:",
|
||||
@@ -388,7 +388,7 @@
|
||||
},
|
||||
"packages/slack": {
|
||||
"name": "@opencode-ai/slack",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"dependencies": {
|
||||
"@opencode-ai/sdk": "workspace:*",
|
||||
"@slack/bolt": "^3.17.1",
|
||||
@@ -401,7 +401,7 @@
|
||||
},
|
||||
"packages/ui": {
|
||||
"name": "@opencode-ai/ui",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"dependencies": {
|
||||
"@kobalte/core": "catalog:",
|
||||
"@opencode-ai/sdk": "workspace:*",
|
||||
@@ -424,6 +424,7 @@
|
||||
"shiki": "catalog:",
|
||||
"solid-js": "catalog:",
|
||||
"solid-list": "catalog:",
|
||||
"strip-ansi": "7.1.2",
|
||||
"virtua": "catalog:",
|
||||
},
|
||||
"devDependencies": {
|
||||
@@ -441,7 +442,7 @@
|
||||
},
|
||||
"packages/util": {
|
||||
"name": "@opencode-ai/util",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"dependencies": {
|
||||
"zod": "catalog:",
|
||||
},
|
||||
@@ -452,7 +453,7 @@
|
||||
},
|
||||
"packages/web": {
|
||||
"name": "@opencode-ai/web",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"dependencies": {
|
||||
"@astrojs/cloudflare": "12.6.3",
|
||||
"@astrojs/markdown-remark": "6.3.1",
|
||||
@@ -922,11 +923,11 @@
|
||||
|
||||
"@happy-dom/global-registrator": ["@happy-dom/global-registrator@20.0.11", "", { "dependencies": { "@types/node": "^20.0.0", "happy-dom": "^20.0.11" } }, "sha512-GqNqiShBT/lzkHTMC/slKBrvN0DsD4Di8ssBk4aDaVgEn+2WMzE6DXxq701ndSXj7/0cJ8mNT71pM7Bnrr6JRw=="],
|
||||
|
||||
"@hey-api/codegen-core": ["@hey-api/codegen-core@0.3.3", "", { "peerDependencies": { "typescript": ">=5.5.3" } }, "sha512-vArVDtrvdzFewu1hnjUm4jX1NBITlSCeO81EdWq676MxQbyxsGcDPAgohaSA+Wvr4HjPSvsg2/1s2zYxUtXebg=="],
|
||||
"@hey-api/codegen-core": ["@hey-api/codegen-core@0.5.2", "", { "dependencies": { "ansi-colors": "4.1.3", "color-support": "1.1.3" }, "peerDependencies": { "typescript": ">=5.5.3" } }, "sha512-88cqrrB2cLXN8nMOHidQTcVOnZsJ5kebEbBefjMCifaUCwTA30ouSSWvTZqrOX4O104zjJyu7M8Gcv/NNYQuaA=="],
|
||||
|
||||
"@hey-api/json-schema-ref-parser": ["@hey-api/json-schema-ref-parser@1.2.2", "", { "dependencies": { "@jsdevtools/ono": "^7.1.3", "@types/json-schema": "^7.0.15", "js-yaml": "^4.1.1", "lodash": "^4.17.21" } }, "sha512-oS+5yAdwnK20lSeFO1d53Ku+yaGCsY8PcrmSq2GtSs3bsBfRnHAbpPKSVzQcaxAOrzj5NB+f34WhZglVrNayBA=="],
|
||||
|
||||
"@hey-api/openapi-ts": ["@hey-api/openapi-ts@0.88.1", "", { "dependencies": { "@hey-api/codegen-core": "^0.3.3", "@hey-api/json-schema-ref-parser": "1.2.2", "ansi-colors": "4.1.3", "c12": "3.3.2", "color-support": "1.1.3", "commander": "14.0.2", "open": "11.0.0", "semver": "7.7.2" }, "peerDependencies": { "typescript": ">=5.5.3" }, "bin": { "openapi-ts": "bin/run.js" } }, "sha512-x/nDTupOnV9VuSeNIiJpgIpc915GHduhyseJeMTnI0JMsXaObmpa0rgPr3ASVEYMLgpvqozIEG1RTOOnal6zLQ=="],
|
||||
"@hey-api/openapi-ts": ["@hey-api/openapi-ts@0.90.4", "", { "dependencies": { "@hey-api/codegen-core": "^0.5.2", "@hey-api/json-schema-ref-parser": "1.2.2", "ansi-colors": "4.1.3", "c12": "3.3.3", "color-support": "1.1.3", "commander": "14.0.2", "open": "11.0.0", "semver": "7.7.3" }, "peerDependencies": { "typescript": ">=5.5.3" }, "bin": { "openapi-ts": "bin/run.js" } }, "sha512-9l++kjcb0ui4JqPlueZ6OZ9zKn6eK/8//Z2jHcIXb5MRwDRgubOOSpTU5llEv3uvWfT10VzcMp99dySWq0AASw=="],
|
||||
|
||||
"@hono/node-server": ["@hono/node-server@1.19.7", "", { "peerDependencies": { "hono": "^4" } }, "sha512-vUcD0uauS7EU2caukW8z5lJKtoGMokxNbJtBiwHgpqxEXokaHCBkQUmCHhjFB1VUTWdqj25QoMkMKzgjq+uhrw=="],
|
||||
|
||||
@@ -1218,21 +1219,21 @@
|
||||
|
||||
"@opentelemetry/api": ["@opentelemetry/api@1.9.0", "", {}, "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg=="],
|
||||
|
||||
"@opentui/core": ["@opentui/core@0.1.73", "", { "dependencies": { "bun-ffi-structs": "0.1.2", "diff": "8.0.2", "jimp": "1.6.0", "yoga-layout": "3.2.1" }, "optionalDependencies": { "@dimforge/rapier2d-simd-compat": "^0.17.3", "@opentui/core-darwin-arm64": "0.1.73", "@opentui/core-darwin-x64": "0.1.73", "@opentui/core-linux-arm64": "0.1.73", "@opentui/core-linux-x64": "0.1.73", "@opentui/core-win32-arm64": "0.1.73", "@opentui/core-win32-x64": "0.1.73", "bun-webgpu": "0.1.4", "planck": "^1.4.2", "three": "0.177.0" }, "peerDependencies": { "web-tree-sitter": "0.25.10" } }, "sha512-1OqLlArzUh3QjrYXGro5WKNgoCcacGJaaFvwOHg5lAOoSigFQRiqEUEEJLbSo3pyV8u7XEdC3M0rOP6K+oThzw=="],
|
||||
"@opentui/core": ["@opentui/core@0.1.74", "", { "dependencies": { "bun-ffi-structs": "0.1.2", "diff": "8.0.2", "jimp": "1.6.0", "yoga-layout": "3.2.1" }, "optionalDependencies": { "@dimforge/rapier2d-simd-compat": "^0.17.3", "@opentui/core-darwin-arm64": "0.1.74", "@opentui/core-darwin-x64": "0.1.74", "@opentui/core-linux-arm64": "0.1.74", "@opentui/core-linux-x64": "0.1.74", "@opentui/core-win32-arm64": "0.1.74", "@opentui/core-win32-x64": "0.1.74", "bun-webgpu": "0.1.4", "planck": "^1.4.2", "three": "0.177.0" }, "peerDependencies": { "web-tree-sitter": "0.25.10" } }, "sha512-g4W16ymv12JdgZ+9B4t7mpIICvzWy2+eHERfmDf80ALduOQCUedKQdULcBFhVCYUXIkDRtIy6CID5thMAah3FA=="],
|
||||
|
||||
"@opentui/core-darwin-arm64": ["@opentui/core-darwin-arm64@0.1.73", "", { "os": "darwin", "cpu": "arm64" }, "sha512-Xnc8S6kGIVcdwqqTq6jk50UVe1QtOXp+B0v4iH85iNW1Ljf198OoA7RcVA+edFb6o01PVwnhIIPtpkB/A4710w=="],
|
||||
"@opentui/core-darwin-arm64": ["@opentui/core-darwin-arm64@0.1.74", "", { "os": "darwin", "cpu": "arm64" }, "sha512-rfmlDLtm/u17CnuhJgCxPeYMvOST+A2MOdVOk46IurtHO849bdYqK6iudKNlFRs1FOrymgSKF9GlWBHAOKeRjg=="],
|
||||
|
||||
"@opentui/core-darwin-x64": ["@opentui/core-darwin-x64@0.1.73", "", { "os": "darwin", "cpu": "x64" }, "sha512-RlgxQxu+kxsCZzeXRnpYrqbrpxbG8M/lnDf4sTPWmhXUiuDvY5BdB4YiBY5bv8eNdJ1j9HiMLtx6ZxElEviidA=="],
|
||||
"@opentui/core-darwin-x64": ["@opentui/core-darwin-x64@0.1.74", "", { "os": "darwin", "cpu": "x64" }, "sha512-WAD8orsDV0ZdW/5GwjOOB4FY96772xbkz+rcV7WRzEFUVaqoBaC04IuqYzS9d5s+cjkbT5Cpj47hrVYkkVQKng=="],
|
||||
|
||||
"@opentui/core-linux-arm64": ["@opentui/core-linux-arm64@0.1.73", "", { "os": "linux", "cpu": "arm64" }, "sha512-9I88BdZMB3qtDPtDzFTg1EEt6sAGFSpOEmIIMB3MhqZqoq9+WSEyJZxM0/kff5vt4RJnqG7vz4fKMVRwNrUPGA=="],
|
||||
"@opentui/core-linux-arm64": ["@opentui/core-linux-arm64@0.1.74", "", { "os": "linux", "cpu": "arm64" }, "sha512-lgmHzrzLy4e+rgBS+lhtsMLLgIMLbtLNMm6EzVPyYVDlLDGjM7+ulXMem7AtpaRrWrUUl4REiG9BoQUsCFDwYA=="],
|
||||
|
||||
"@opentui/core-linux-x64": ["@opentui/core-linux-x64@0.1.73", "", { "os": "linux", "cpu": "x64" }, "sha512-50cGZkCh/i3nzijsjUnkmtWJtnJ6l9WpdIwSJsO2Id7nZdzupT1b6AkgGZdOgNl23MHXpAitmb+MhEAjAimCRA=="],
|
||||
"@opentui/core-linux-x64": ["@opentui/core-linux-x64@0.1.74", "", { "os": "linux", "cpu": "x64" }, "sha512-8Mn2WbdBQ29xCThuPZezjDhd1N3+fXwKkGvCBOdTI0le6h2A/vCNbfUVjwfr/EGZSRXxCG+Yapol34BAULGpOA=="],
|
||||
|
||||
"@opentui/core-win32-arm64": ["@opentui/core-win32-arm64@0.1.73", "", { "os": "win32", "cpu": "arm64" }, "sha512-mFiEeoiim5cmi6qu8CDfeecl9ivuMilfby/GnqTsr9G8e52qfT6nWF2m9Nevh9ebhXK+D/VnVhJIbObc0WIchA=="],
|
||||
"@opentui/core-win32-arm64": ["@opentui/core-win32-arm64@0.1.74", "", { "os": "win32", "cpu": "arm64" }, "sha512-dvYUXz03avnI6ZluyLp00HPmR0UT/IE/6QS97XBsgJlUTtpnbKkBtB5jD1NHwWkElaRj1Qv2QP36ngFoJqbl9g=="],
|
||||
|
||||
"@opentui/core-win32-x64": ["@opentui/core-win32-x64@0.1.73", "", { "os": "win32", "cpu": "x64" }, "sha512-vzWHUi2vgwImuyxl+hlmK0aeCbnwozeuicIcHJE0orPOwp2PAKyR9WO330szAvfIO5ZPbNkjWfh6xIYnASM0lQ=="],
|
||||
"@opentui/core-win32-x64": ["@opentui/core-win32-x64@0.1.74", "", { "os": "win32", "cpu": "x64" }, "sha512-3wfWXaAKOIlDQz6ZZIESf2M+YGZ7uFHijjTEM8w/STRlLw8Y6+QyGYi1myHSM4d6RSO+/s2EMDxvjDf899W9vQ=="],
|
||||
|
||||
"@opentui/solid": ["@opentui/solid@0.1.73", "", { "dependencies": { "@babel/core": "7.28.0", "@babel/preset-typescript": "7.27.1", "@opentui/core": "0.1.73", "babel-plugin-module-resolver": "5.0.2", "babel-preset-solid": "1.9.9", "s-js": "^0.4.9" }, "peerDependencies": { "solid-js": "1.9.9" } }, "sha512-FBSTiuWl+hHqFxmrJfC93cbJ0PJ4QoFbvRFuD6Gzrea5rH+G7BidjyI8YZuCcNnriDuIYaXTJdvBqe15lgKR1A=="],
|
||||
"@opentui/solid": ["@opentui/solid@0.1.74", "", { "dependencies": { "@babel/core": "7.28.0", "@babel/preset-typescript": "7.27.1", "@opentui/core": "0.1.74", "babel-plugin-module-resolver": "5.0.2", "babel-preset-solid": "1.9.9", "s-js": "^0.4.9" }, "peerDependencies": { "solid-js": "1.9.9" } }, "sha512-Vz82cI8T9YeJjGsVg4ULp6ral4N+xyt1j9A6Tbu3aaQgEKiB74LW03EXREehfjPr1irOFxtKfWPbx5NKH0Upag=="],
|
||||
|
||||
"@oslojs/asn1": ["@oslojs/asn1@1.0.0", "", { "dependencies": { "@oslojs/binary": "1.0.0" } }, "sha512-zw/wn0sj0j0QKbIXfIlnEcTviaCzYOY3V5rAyjR6YtOByFtJiT574+8p9Wlach0lZH9fddD4yb9laEAIl4vXQA=="],
|
||||
|
||||
@@ -2094,7 +2095,7 @@
|
||||
|
||||
"bytes": ["bytes@3.1.2", "", {}, "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg=="],
|
||||
|
||||
"c12": ["c12@3.3.2", "", { "dependencies": { "chokidar": "^4.0.3", "confbox": "^0.2.2", "defu": "^6.1.4", "dotenv": "^17.2.3", "exsolve": "^1.0.8", "giget": "^2.0.0", "jiti": "^2.6.1", "ohash": "^2.0.11", "pathe": "^2.0.3", "perfect-debounce": "^2.0.0", "pkg-types": "^2.3.0", "rc9": "^2.1.2" }, "peerDependencies": { "magicast": "*" }, "optionalPeers": ["magicast"] }, "sha512-QkikB2X5voO1okL3QsES0N690Sn/K9WokXqUsDQsWy5SnYb+psYQFGA10iy1bZHj3fjISKsI67Q90gruvWWM3A=="],
|
||||
"c12": ["c12@3.3.3", "", { "dependencies": { "chokidar": "^5.0.0", "confbox": "^0.2.2", "defu": "^6.1.4", "dotenv": "^17.2.3", "exsolve": "^1.0.8", "giget": "^2.0.0", "jiti": "^2.6.1", "ohash": "^2.0.11", "pathe": "^2.0.3", "perfect-debounce": "^2.0.0", "pkg-types": "^2.3.0", "rc9": "^2.1.2" }, "peerDependencies": { "magicast": "*" }, "optionalPeers": ["magicast"] }, "sha512-750hTRvgBy5kcMNPdh95Qo+XUBeGo8C7nsKSmedDmaQI+E0r82DwHeM6vBewDe4rGFbnxoa4V9pw+sPh5+Iz8Q=="],
|
||||
|
||||
"call-bind": ["call-bind@1.0.8", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.0", "es-define-property": "^1.0.0", "get-intrinsic": "^1.2.4", "set-function-length": "^1.2.2" } }, "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww=="],
|
||||
|
||||
@@ -3494,7 +3495,7 @@
|
||||
|
||||
"selderee": ["selderee@0.11.0", "", { "dependencies": { "parseley": "^0.12.0" } }, "sha512-5TF+l7p4+OsnP8BCCvSyZiSPc4x4//p5uPwK8TCnVPJYRmU2aYKMpOXvw8zM5a5JvuuCGN1jmsMwuU2W02ukfA=="],
|
||||
|
||||
"semver": ["semver@7.7.2", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA=="],
|
||||
"semver": ["semver@7.7.3", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q=="],
|
||||
|
||||
"send": ["send@0.19.0", "", { "dependencies": { "debug": "2.6.9", "depd": "2.0.0", "destroy": "1.2.0", "encodeurl": "~1.0.2", "escape-html": "~1.0.3", "etag": "~1.8.1", "fresh": "0.5.2", "http-errors": "2.0.0", "mime": "1.6.0", "ms": "2.1.3", "on-finished": "2.4.1", "range-parser": "~1.2.1", "statuses": "2.0.1" } }, "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw=="],
|
||||
|
||||
@@ -4280,6 +4281,8 @@
|
||||
|
||||
"astro/diff": ["diff@5.2.0", "", {}, "sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A=="],
|
||||
|
||||
"astro/semver": ["semver@7.7.2", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA=="],
|
||||
|
||||
"astro/shiki": ["shiki@3.15.0", "", { "dependencies": { "@shikijs/core": "3.15.0", "@shikijs/engine-javascript": "3.15.0", "@shikijs/engine-oniguruma": "3.15.0", "@shikijs/langs": "3.15.0", "@shikijs/themes": "3.15.0", "@shikijs/types": "3.15.0", "@shikijs/vscode-textmate": "^10.0.2", "@types/hast": "^3.0.4" } }, "sha512-kLdkY6iV3dYbtPwS9KXU7mjfmDm25f5m0IPNFnaXO7TBPcvbUOY72PYXSuSqDzwp+vlH/d7MXpHlKO/x+QoLXw=="],
|
||||
|
||||
"astro/unstorage": ["unstorage@1.17.3", "", { "dependencies": { "anymatch": "^3.1.3", "chokidar": "^4.0.3", "destr": "^2.0.5", "h3": "^1.15.4", "lru-cache": "^10.4.3", "node-fetch-native": "^1.6.7", "ofetch": "^1.5.1", "ufo": "^1.6.1" }, "peerDependencies": { "@azure/app-configuration": "^1.8.0", "@azure/cosmos": "^4.2.0", "@azure/data-tables": "^13.3.0", "@azure/identity": "^4.6.0", "@azure/keyvault-secrets": "^4.9.0", "@azure/storage-blob": "^12.26.0", "@capacitor/preferences": "^6.0.3 || ^7.0.0", "@deno/kv": ">=0.9.0", "@netlify/blobs": "^6.5.0 || ^7.0.0 || ^8.1.0 || ^9.0.0 || ^10.0.0", "@planetscale/database": "^1.19.0", "@upstash/redis": "^1.34.3", "@vercel/blob": ">=0.27.1", "@vercel/functions": "^2.2.12 || ^3.0.0", "@vercel/kv": "^1.0.1", "aws4fetch": "^1.0.20", "db0": ">=0.2.1", "idb-keyval": "^6.2.1", "ioredis": "^5.4.2", "uploadthing": "^7.4.4" }, "optionalPeers": ["@azure/app-configuration", "@azure/cosmos", "@azure/data-tables", "@azure/identity", "@azure/keyvault-secrets", "@azure/storage-blob", "@capacitor/preferences", "@deno/kv", "@netlify/blobs", "@planetscale/database", "@upstash/redis", "@vercel/blob", "@vercel/functions", "@vercel/kv", "aws4fetch", "db0", "idb-keyval", "ioredis", "uploadthing"] }, "sha512-i+JYyy0DoKmQ3FximTHbGadmIYb8JEpq7lxUjnjeB702bCPum0vzo6oy5Mfu0lpqISw7hCyMW2yj4nWC8bqJ3Q=="],
|
||||
@@ -4302,6 +4305,8 @@
|
||||
|
||||
"bun-webgpu/@webgpu/types": ["@webgpu/types@0.1.66", "", {}, "sha512-YA2hLrwLpDsRueNDXIMqN9NTzD6bCDkuXbOSe0heS+f8YE8usA6Gbv1prj81pzVHrbaAma7zObnIC+I6/sXJgA=="],
|
||||
|
||||
"c12/chokidar": ["chokidar@5.0.0", "", { "dependencies": { "readdirp": "^5.0.0" } }, "sha512-TQMmc3w+5AxjpL8iIiwebF73dRDF4fBIieAqGn9RGCWaEVwQ6Fb2cGe31Yns0RRIzii5goJ1Y7xbMwo1TxMplw=="],
|
||||
|
||||
"clean-css/source-map": ["source-map@0.6.1", "", {}, "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="],
|
||||
|
||||
"compress-commons/is-stream": ["is-stream@2.0.1", "", {}, "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg=="],
|
||||
@@ -4318,6 +4323,8 @@
|
||||
|
||||
"editorconfig/minimatch": ["minimatch@9.0.1", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-0jWhJpD/MdhPXwPuiRkCbfYfSKp2qnn2eOc279qI7f+osl/l+prKSrvhg157zSYvx/1nmgn2NqdT6k2Z7zSH9w=="],
|
||||
|
||||
"editorconfig/semver": ["semver@7.7.2", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA=="],
|
||||
|
||||
"engine.io-client/ws": ["ws@8.18.3", "", { "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": ">=5.0.2" }, "optionalPeers": ["bufferutil", "utf-8-validate"] }, "sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg=="],
|
||||
|
||||
"es-get-iterator/isarray": ["isarray@2.0.5", "", {}, "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw=="],
|
||||
@@ -4344,6 +4351,8 @@
|
||||
|
||||
"gaxios/node-fetch": ["node-fetch@3.3.2", "", { "dependencies": { "data-uri-to-buffer": "^4.0.0", "fetch-blob": "^3.1.4", "formdata-polyfill": "^4.0.10" } }, "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA=="],
|
||||
|
||||
"gel/semver": ["semver@7.7.2", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA=="],
|
||||
|
||||
"glob/minimatch": ["minimatch@10.1.1", "", { "dependencies": { "@isaacs/brace-expansion": "^5.0.0" } }, "sha512-enIvLvRAFZYXJzkCYG5RKmPfrFArdLv+R+lbQ53BmIMLIry74bjKzX6iHAm8WYamJkhSSEabrWN5D97XnKObjQ=="],
|
||||
|
||||
"globby/ignore": ["ignore@5.3.2", "", {}, "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g=="],
|
||||
@@ -4358,6 +4367,8 @@
|
||||
|
||||
"jsonwebtoken/jws": ["jws@3.2.2", "", { "dependencies": { "jwa": "^1.4.1", "safe-buffer": "^5.0.1" } }, "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA=="],
|
||||
|
||||
"jsonwebtoken/semver": ["semver@7.7.2", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA=="],
|
||||
|
||||
"katex/commander": ["commander@8.3.0", "", {}, "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww=="],
|
||||
|
||||
"lazystream/readable-stream": ["readable-stream@2.3.8", "", { "dependencies": { "core-util-is": "~1.0.0", "inherits": "~2.0.3", "isarray": "~1.0.0", "process-nextick-args": "~2.0.0", "safe-buffer": "~5.1.1", "string_decoder": "~1.1.1", "util-deprecate": "~1.0.1" } }, "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA=="],
|
||||
@@ -4442,6 +4453,8 @@
|
||||
|
||||
"sharp/detect-libc": ["detect-libc@2.1.2", "", {}, "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ=="],
|
||||
|
||||
"sharp/semver": ["semver@7.7.2", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA=="],
|
||||
|
||||
"shiki/@shikijs/core": ["@shikijs/core@3.20.0", "", { "dependencies": { "@shikijs/types": "3.20.0", "@shikijs/vscode-textmate": "^10.0.2", "@types/hast": "^3.0.4", "hast-util-to-html": "^9.0.5" } }, "sha512-f2ED7HYV4JEk827mtMDwe/yQ25pRiXZmtHjWF8uzZKuKiEsJR7Ce1nuQ+HhV9FzDcbIo4ObBCD9GPTzNuy9S1g=="],
|
||||
|
||||
"shiki/@shikijs/types": ["@shikijs/types@3.20.0", "", { "dependencies": { "@shikijs/vscode-textmate": "^10.0.2", "@types/hast": "^3.0.4" } }, "sha512-lhYAATn10nkZcBQ0BlzSbJA3wcmL5MXUUF8d2Zzon6saZDlToKaiRX60n2+ZaHJCmXEcZRWNzn+k9vplr8Jhsw=="],
|
||||
@@ -4920,6 +4933,8 @@
|
||||
|
||||
"body-parser/debug/ms": ["ms@2.0.0", "", {}, "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="],
|
||||
|
||||
"c12/chokidar/readdirp": ["readdirp@5.0.0", "", {}, "sha512-9u/XQ1pvrQtYyMpZe7DXKv2p5CNvyVwzUB6uhLAnQwHMSgKMBR62lc7AHljaeteeHXn11XTAaLLUVZYVZyuRBQ=="],
|
||||
|
||||
"cross-spawn/which/isexe": ["isexe@2.0.0", "", {}, "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.19.12", "", { "os": "aix", "cpu": "ppc64" }, "sha512-bmoCYyWdEL3wDQIVbcyzRyeKLgk2WtWLTWz1ZIAZF/EGbNOwSA6ew3PftJ1PqMiOOGu0OyFMzG53L0zqIpPeNA=="],
|
||||
|
||||
6
flake.lock
generated
6
flake.lock
generated
@@ -2,11 +2,11 @@
|
||||
"nodes": {
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1768395095,
|
||||
"narHash": "sha256-ZhuYJbwbZT32QA95tSkXd9zXHcdZj90EzHpEXBMabaw=",
|
||||
"lastModified": 1768456270,
|
||||
"narHash": "sha256-NgaL2CCiUR6nsqUIY4yxkzz07iQUlUCany44CFv+OxY=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "13868c071cc73a5e9f610c47d7bb08e5da64fdd5",
|
||||
"rev": "f4606b01b39e09065df37905a2133905246db9ed",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
||||
28
flake.nix
28
flake.nix
@@ -7,6 +7,7 @@
|
||||
|
||||
outputs =
|
||||
{
|
||||
self,
|
||||
nixpkgs,
|
||||
...
|
||||
}:
|
||||
@@ -107,33 +108,10 @@
|
||||
};
|
||||
in
|
||||
{
|
||||
default = opencodePkg;
|
||||
default = self.packages.${system}.opencode;
|
||||
opencode = opencodePkg;
|
||||
desktop = desktopPkg;
|
||||
}
|
||||
);
|
||||
|
||||
apps = forEachSystem (
|
||||
system:
|
||||
let
|
||||
pkgs = pkgsFor system;
|
||||
in
|
||||
{
|
||||
opencode-dev = {
|
||||
type = "app";
|
||||
meta = {
|
||||
description = "Nix devshell shell for OpenCode";
|
||||
runtimeInputs = [ pkgs.bun ];
|
||||
};
|
||||
program = "${
|
||||
pkgs.writeShellApplication {
|
||||
name = "opencode-dev";
|
||||
text = ''
|
||||
exec bun run dev "$@"
|
||||
'';
|
||||
}
|
||||
}/bin/opencode-dev";
|
||||
};
|
||||
}
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
@@ -15,6 +15,8 @@
|
||||
cargo,
|
||||
rustc,
|
||||
makeBinaryWrapper,
|
||||
copyDesktopItems,
|
||||
makeDesktopItem,
|
||||
nodejs,
|
||||
jq,
|
||||
}:
|
||||
@@ -57,12 +59,28 @@ rustPlatform.buildRustPackage rec {
|
||||
pkg-config
|
||||
bun
|
||||
makeBinaryWrapper
|
||||
copyDesktopItems
|
||||
cargo
|
||||
rustc
|
||||
nodejs
|
||||
jq
|
||||
];
|
||||
|
||||
# based on packages/desktop/src-tauri/release/appstream.metainfo.xml
|
||||
desktopItems = lib.optionals stdenv.isLinux [
|
||||
(makeDesktopItem {
|
||||
name = "ai.opencode.opencode";
|
||||
desktopName = "OpenCode";
|
||||
comment = "Open source AI coding agent";
|
||||
exec = "opencode-desktop";
|
||||
icon = "opencode";
|
||||
terminal = false;
|
||||
type = "Application";
|
||||
categories = [ "Development" "IDE" ];
|
||||
startupWMClass = "opencode";
|
||||
})
|
||||
];
|
||||
|
||||
buildInputs = [
|
||||
openssl
|
||||
]
|
||||
@@ -121,6 +139,10 @@ rustPlatform.buildRustPackage rec {
|
||||
# It looks for them in the location specified in tauri.conf.json.
|
||||
|
||||
postInstall = lib.optionalString stdenv.isLinux ''
|
||||
# Install icon
|
||||
mkdir -p $out/share/icons/hicolor/128x128/apps
|
||||
cp ../../../packages/desktop/src-tauri/icons/prod/128x128.png $out/share/icons/hicolor/128x128/apps/opencode.png
|
||||
|
||||
# Wrap the binary to ensure it finds the libraries
|
||||
wrapProgram $out/bin/opencode-desktop \
|
||||
--prefix LD_LIBRARY_PATH : ${
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
{
|
||||
"nodeModules": {
|
||||
"x86_64-linux": "sha256-qjXrRkNAJsarbUBMiEL18lGkr65w74YvCsFVjrSCQHI=",
|
||||
"aarch64-linux": "sha256-E6lyYFApS1cw3jE7ISx5QZxDDJ9V3HU0ICYFdY+aIBw=",
|
||||
"aarch64-darwin": "sha256-7UajHu40n7JKqurU/+CGlitErsVFA2qDneUytI8+/zQ=",
|
||||
"x86_64-darwin": "sha256-LxBsYdq5AzInQJzF89taXvS2vigew5C5hjaIEH8rTb8="
|
||||
"x86_64-linux": "sha256-4zchRpxzvHnPMcwumgL9yaX0deIXS5IGPp131eYsSvg=",
|
||||
"aarch64-linux": "sha256-3/BSRsl5pI0Iz3qAFZxIkOehFLZ2Ox9UsbdDHYzqlVg=",
|
||||
"aarch64-darwin": "sha256-86d/G1q6xiHSSlm+/irXoKLb/yLQbV348uuSrBV70+Q=",
|
||||
"x86_64-darwin": "sha256-WYaP44PWRGtoG1DIuUJUH4DvuaCuFhlJZ9fPzGsiIfE="
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,119 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
DUMMY="sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="
|
||||
SYSTEM=${SYSTEM:-x86_64-linux}
|
||||
DEFAULT_HASH_FILE=${MODULES_HASH_FILE:-nix/hashes.json}
|
||||
HASH_FILE=${HASH_FILE:-$DEFAULT_HASH_FILE}
|
||||
|
||||
if [ ! -f "$HASH_FILE" ]; then
|
||||
cat >"$HASH_FILE" <<EOF
|
||||
{
|
||||
"nodeModules": {}
|
||||
}
|
||||
EOF
|
||||
fi
|
||||
|
||||
if git rev-parse --is-inside-work-tree >/dev/null 2>&1; then
|
||||
if ! git ls-files --error-unmatch "$HASH_FILE" >/dev/null 2>&1; then
|
||||
git add -N "$HASH_FILE" >/dev/null 2>&1 || true
|
||||
fi
|
||||
fi
|
||||
|
||||
export DUMMY
|
||||
export NIX_KEEP_OUTPUTS=1
|
||||
export NIX_KEEP_DERIVATIONS=1
|
||||
|
||||
cleanup() {
|
||||
rm -f "${JSON_OUTPUT:-}" "${BUILD_LOG:-}" "${TMP_EXPR:-}"
|
||||
}
|
||||
|
||||
trap cleanup EXIT
|
||||
|
||||
write_node_modules_hash() {
|
||||
local value="$1"
|
||||
local system="${2:-$SYSTEM}"
|
||||
local temp
|
||||
temp=$(mktemp)
|
||||
|
||||
if jq -e '.nodeModules | type == "object"' "$HASH_FILE" >/dev/null 2>&1; then
|
||||
jq --arg system "$system" --arg value "$value" '.nodeModules[$system] = $value' "$HASH_FILE" >"$temp"
|
||||
else
|
||||
jq --arg system "$system" --arg value "$value" '.nodeModules = {($system): $value}' "$HASH_FILE" >"$temp"
|
||||
fi
|
||||
|
||||
mv "$temp" "$HASH_FILE"
|
||||
}
|
||||
|
||||
TARGET="packages.${SYSTEM}.default"
|
||||
MODULES_ATTR=".#packages.${SYSTEM}.default.node_modules"
|
||||
CORRECT_HASH=""
|
||||
|
||||
DRV_PATH="$(nix eval --raw "${MODULES_ATTR}.drvPath")"
|
||||
|
||||
echo "Setting dummy node_modules outputHash for ${SYSTEM}..."
|
||||
write_node_modules_hash "$DUMMY"
|
||||
|
||||
BUILD_LOG=$(mktemp)
|
||||
JSON_OUTPUT=$(mktemp)
|
||||
|
||||
echo "Building node_modules for ${SYSTEM} to discover correct outputHash..."
|
||||
echo "Attempting to realize derivation: ${DRV_PATH}"
|
||||
REALISE_OUT=$(nix-store --realise "$DRV_PATH" --keep-failed 2>&1 | tee "$BUILD_LOG" || true)
|
||||
|
||||
BUILD_PATH=$(echo "$REALISE_OUT" | grep "^/nix/store/" | head -n1 || true)
|
||||
if [ -n "$BUILD_PATH" ] && [ -d "$BUILD_PATH" ]; then
|
||||
echo "Realized node_modules output: $BUILD_PATH"
|
||||
CORRECT_HASH=$(nix hash path --sri "$BUILD_PATH" 2>/dev/null || true)
|
||||
fi
|
||||
|
||||
if [ -z "$CORRECT_HASH" ]; then
|
||||
CORRECT_HASH="$(grep -E 'got:\s+sha256-[A-Za-z0-9+/=]+' "$BUILD_LOG" | awk '{print $2}' | head -n1 || true)"
|
||||
|
||||
if [ -z "$CORRECT_HASH" ]; then
|
||||
CORRECT_HASH="$(grep -A2 'hash mismatch' "$BUILD_LOG" | grep 'got:' | awk '{print $2}' | sed 's/sha256:/sha256-/' || true)"
|
||||
fi
|
||||
|
||||
if [ -z "$CORRECT_HASH" ]; then
|
||||
echo "Searching for kept failed build directory..."
|
||||
KEPT_DIR=$(grep -oE "build directory.*'[^']+'" "$BUILD_LOG" | grep -oE "'/[^']+'" | tr -d "'" | head -n1)
|
||||
|
||||
if [ -z "$KEPT_DIR" ]; then
|
||||
KEPT_DIR=$(grep -oE '/nix/var/nix/builds/[^ ]+' "$BUILD_LOG" | head -n1)
|
||||
fi
|
||||
|
||||
if [ -n "$KEPT_DIR" ] && [ -d "$KEPT_DIR" ]; then
|
||||
echo "Found kept build directory: $KEPT_DIR"
|
||||
if [ -d "$KEPT_DIR/build" ]; then
|
||||
HASH_PATH="$KEPT_DIR/build"
|
||||
else
|
||||
HASH_PATH="$KEPT_DIR"
|
||||
fi
|
||||
|
||||
echo "Attempting to hash: $HASH_PATH"
|
||||
ls -la "$HASH_PATH" || true
|
||||
|
||||
if [ -d "$HASH_PATH/node_modules" ]; then
|
||||
CORRECT_HASH=$(nix hash path --sri "$HASH_PATH" 2>/dev/null || true)
|
||||
echo "Computed hash from kept build: $CORRECT_HASH"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$CORRECT_HASH" ]; then
|
||||
echo "Failed to determine correct node_modules hash for ${SYSTEM}."
|
||||
echo "Build log:"
|
||||
cat "$BUILD_LOG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
write_node_modules_hash "$CORRECT_HASH"
|
||||
|
||||
jq -e --arg system "$SYSTEM" --arg hash "$CORRECT_HASH" '.nodeModules[$system] == $hash' "$HASH_FILE" >/dev/null
|
||||
|
||||
echo "node_modules hash updated for ${SYSTEM}: $CORRECT_HASH"
|
||||
|
||||
rm -f "$BUILD_LOG"
|
||||
unset BUILD_LOG
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/app",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"description": "",
|
||||
"type": "module",
|
||||
"exports": {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { useDialog } from "@opencode-ai/ui/context/dialog"
|
||||
import { Dialog } from "@opencode-ai/ui/dialog"
|
||||
import { FileIcon } from "@opencode-ai/ui/file-icon"
|
||||
import { Keybind } from "@opencode-ai/ui/keybind"
|
||||
import { List } from "@opencode-ai/ui/list"
|
||||
import { getDirectory, getFilename } from "@opencode-ai/util/path"
|
||||
import { useParams } from "@solidjs/router"
|
||||
@@ -133,14 +134,14 @@ export function DialogSelectFile() {
|
||||
})
|
||||
|
||||
return (
|
||||
<Dialog title="Search">
|
||||
<Dialog class="pt-3 pb-0 !max-h-[480px]">
|
||||
<List
|
||||
search={{ placeholder: "Search files and commands", autofocus: true }}
|
||||
search={{ placeholder: "Search files and commands", autofocus: true, hideIcon: true, class: "pl-3 pr-2 !mb-0" }}
|
||||
emptyMessage="No results found"
|
||||
items={items}
|
||||
key={(item) => item.id}
|
||||
filterKeys={["title", "description", "category"]}
|
||||
groupBy={(item) => (grouped() ? item.category : "")}
|
||||
groupBy={(item) => item.category}
|
||||
onMove={handleMove}
|
||||
onSelect={handleSelect}
|
||||
>
|
||||
@@ -161,7 +162,7 @@ export function DialogSelectFile() {
|
||||
</div>
|
||||
}
|
||||
>
|
||||
<div class="w-full flex items-center justify-between gap-4">
|
||||
<div class="w-full flex items-center justify-between gap-4 pl-1">
|
||||
<div class="flex items-center gap-2 min-w-0">
|
||||
<span class="text-14-regular text-text-strong whitespace-nowrap">{item.title}</span>
|
||||
<Show when={item.description}>
|
||||
@@ -169,7 +170,7 @@ export function DialogSelectFile() {
|
||||
</Show>
|
||||
</div>
|
||||
<Show when={item.keybind}>
|
||||
<span class="text-12-regular text-text-subtle shrink-0">{formatKeybind(item.keybind ?? "")}</span>
|
||||
<Keybind class="rounded-[4px]">{formatKeybind(item.keybind ?? "")}</Keybind>
|
||||
</Show>
|
||||
</div>
|
||||
</Show>
|
||||
|
||||
@@ -16,6 +16,7 @@ import { Button } from "@opencode-ai/ui/button"
|
||||
import { Tooltip, TooltipKeybind } from "@opencode-ai/ui/tooltip"
|
||||
import { Popover } from "@opencode-ai/ui/popover"
|
||||
import { TextField } from "@opencode-ai/ui/text-field"
|
||||
import { Keybind } from "@opencode-ai/ui/keybind"
|
||||
|
||||
export function SessionHeader() {
|
||||
const globalSDK = useGlobalSDK()
|
||||
@@ -54,21 +55,17 @@ export function SessionHeader() {
|
||||
<Portal mount={mount()}>
|
||||
<button
|
||||
type="button"
|
||||
class="hidden md:flex w-[320px] h-8 p-1.5 items-center gap-2 justify-between rounded-md border border-border-weak-base bg-surface-raised-base transition-colors cursor-default hover:bg-surface-raised-base-hover focus:bg-surface-raised-base-hover active:bg-surface-raised-base-active"
|
||||
class="hidden md:flex w-[320px] p-1 pl-1.5 items-center gap-2 justify-between rounded-md border border-border-weak-base bg-surface-raised-base transition-colors cursor-default hover:bg-surface-raised-base-hover focus:bg-surface-raised-base-hover active:bg-surface-raised-base-active"
|
||||
onClick={() => command.trigger("file.open")}
|
||||
>
|
||||
<div class="flex items-center gap-2">
|
||||
<Icon name="magnifying-glass" size="normal" class="icon-base" />
|
||||
<span class="flex-1 min-w-0 text-14-regular text-text-weak truncate">Search {name()}</span>
|
||||
<span class="flex-1 min-w-0 text-14-regular text-text-weak truncate h-3.5 flex items-center overflow-visible">
|
||||
Search {name()}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<Show when={hotkey()}>
|
||||
{(keybind) => (
|
||||
<span class="shrink-0 flex items-center justify-center h-5 px-2 rounded-[2px] border border-border-weak-base bg-surface-base text-12-medium text-text-weak">
|
||||
{keybind()}
|
||||
</span>
|
||||
)}
|
||||
</Show>
|
||||
<Show when={hotkey()}>{(keybind) => <Keybind>{keybind()}</Keybind>}</Show>
|
||||
</button>
|
||||
</Portal>
|
||||
)}
|
||||
|
||||
@@ -81,13 +81,15 @@ export function Titlebar() {
|
||||
>
|
||||
<Show when={mac()}>
|
||||
<div class="w-[72px] h-full shrink-0" data-tauri-drag-region />
|
||||
<div class="xl:hidden w-10 shrink-0 flex items-center justify-center">
|
||||
<IconButton icon="menu" variant="ghost" class="size-8 rounded-md" onClick={layout.mobileSidebar.toggle} />
|
||||
</div>
|
||||
</Show>
|
||||
<Show when={!mac()}>
|
||||
<div class="xl:hidden w-[48px] shrink-0 flex items-center justify-center">
|
||||
<IconButton icon="menu" variant="ghost" class="size-8 rounded-md" onClick={layout.mobileSidebar.toggle} />
|
||||
</div>
|
||||
</Show>
|
||||
<IconButton
|
||||
icon="menu"
|
||||
variant="ghost"
|
||||
class="xl:hidden size-8 rounded-md"
|
||||
onClick={layout.mobileSidebar.toggle}
|
||||
/>
|
||||
<TooltipKeybind
|
||||
class={web() ? "hidden xl:flex shrink-0 ml-14" : "hidden xl:flex shrink-0"}
|
||||
placement="bottom"
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { createMemo, createSignal, onCleanup, onMount, type Accessor } from "solid-js"
|
||||
import { createSimpleContext } from "@opencode-ai/ui/context"
|
||||
import { useDialog } from "@opencode-ai/ui/context/dialog"
|
||||
|
||||
const IS_MAC = typeof navigator === "object" && /(Mac|iPod|iPhone|iPad)/.test(navigator.platform)
|
||||
|
||||
@@ -104,7 +105,15 @@ export function formatKeybind(config: string): string {
|
||||
if (kb.meta) parts.push(IS_MAC ? "⌘" : "Meta")
|
||||
|
||||
if (kb.key) {
|
||||
const displayKey = kb.key.length === 1 ? kb.key.toUpperCase() : kb.key.charAt(0).toUpperCase() + kb.key.slice(1)
|
||||
const arrows: Record<string, string> = {
|
||||
arrowup: "↑",
|
||||
arrowdown: "↓",
|
||||
arrowleft: "←",
|
||||
arrowright: "→",
|
||||
}
|
||||
const displayKey =
|
||||
arrows[kb.key.toLowerCase()] ??
|
||||
(kb.key.length === 1 ? kb.key.toUpperCase() : kb.key.charAt(0).toUpperCase() + kb.key.slice(1))
|
||||
parts.push(displayKey)
|
||||
}
|
||||
|
||||
@@ -114,6 +123,7 @@ export function formatKeybind(config: string): string {
|
||||
export const { use: useCommand, provider: CommandProvider } = createSimpleContext({
|
||||
name: "Command",
|
||||
init: () => {
|
||||
const dialog = useDialog()
|
||||
const [registrations, setRegistrations] = createSignal<Accessor<CommandOption[]>[]>([])
|
||||
const [suspendCount, setSuspendCount] = createSignal(0)
|
||||
|
||||
@@ -157,7 +167,7 @@ export const { use: useCommand, provider: CommandProvider } = createSimpleContex
|
||||
}
|
||||
|
||||
const handleKeyDown = (event: KeyboardEvent) => {
|
||||
if (suspended()) return
|
||||
if (suspended() || dialog.active) return
|
||||
|
||||
const paletteKeybinds = parseKeybind("mod+shift+p")
|
||||
if (matchKeybind(paletteKeybinds, event)) {
|
||||
|
||||
@@ -110,6 +110,7 @@ function createGlobalSync() {
|
||||
})
|
||||
|
||||
const children: Record<string, [Store<State>, SetStoreFunction<State>]> = {}
|
||||
|
||||
function child(directory: string) {
|
||||
if (!directory) console.error("No directory provided")
|
||||
if (!children[directory]) {
|
||||
@@ -122,29 +123,33 @@ function createGlobalSync() {
|
||||
if (!cache) throw new Error("Failed to create persisted cache")
|
||||
vcsCache.set(directory, { store: cache[0], setStore: cache[1], ready: cache[3] })
|
||||
|
||||
children[directory] = createStore<State>({
|
||||
project: "",
|
||||
provider: { all: [], connected: [], default: {} },
|
||||
config: {},
|
||||
path: { state: "", config: "", worktree: "", directory: "", home: "" },
|
||||
status: "loading" as const,
|
||||
agent: [],
|
||||
command: [],
|
||||
session: [],
|
||||
sessionTotal: 0,
|
||||
session_status: {},
|
||||
session_diff: {},
|
||||
todo: {},
|
||||
permission: {},
|
||||
question: {},
|
||||
mcp: {},
|
||||
lsp: [],
|
||||
vcs: cache[0].value,
|
||||
limit: 5,
|
||||
message: {},
|
||||
part: {},
|
||||
})
|
||||
bootstrapInstance(directory)
|
||||
const init = () => {
|
||||
children[directory] = createStore<State>({
|
||||
project: "",
|
||||
provider: { all: [], connected: [], default: {} },
|
||||
config: {},
|
||||
path: { state: "", config: "", worktree: "", directory: "", home: "" },
|
||||
status: "loading" as const,
|
||||
agent: [],
|
||||
command: [],
|
||||
session: [],
|
||||
sessionTotal: 0,
|
||||
session_status: {},
|
||||
session_diff: {},
|
||||
todo: {},
|
||||
permission: {},
|
||||
question: {},
|
||||
mcp: {},
|
||||
lsp: [],
|
||||
vcs: cache[0].value,
|
||||
limit: 5,
|
||||
message: {},
|
||||
part: {},
|
||||
})
|
||||
bootstrapInstance(directory)
|
||||
}
|
||||
|
||||
runWithOwner(owner, init)
|
||||
}
|
||||
const childStore = children[directory]
|
||||
if (!childStore) throw new Error("Failed to create store")
|
||||
@@ -346,6 +351,23 @@ function createGlobalSync() {
|
||||
bootstrapInstance(directory)
|
||||
break
|
||||
}
|
||||
case "session.created": {
|
||||
const result = Binary.search(store.session, event.properties.info.id, (s) => s.id)
|
||||
if (result.found) {
|
||||
setStore("session", result.index, reconcile(event.properties.info))
|
||||
break
|
||||
}
|
||||
setStore(
|
||||
"session",
|
||||
produce((draft) => {
|
||||
draft.splice(result.index, 0, event.properties.info)
|
||||
}),
|
||||
)
|
||||
if (!event.properties.info.parentID) {
|
||||
setStore("sessionTotal", store.sessionTotal + 1)
|
||||
}
|
||||
break
|
||||
}
|
||||
case "session.updated": {
|
||||
const result = Binary.search(store.session, event.properties.info.id, (s) => s.id)
|
||||
if (event.properties.info.time.archived) {
|
||||
@@ -357,6 +379,8 @@ function createGlobalSync() {
|
||||
}),
|
||||
)
|
||||
}
|
||||
if (event.properties.info.parentID) break
|
||||
setStore("sessionTotal", (value) => Math.max(0, value - 1))
|
||||
break
|
||||
}
|
||||
if (result.found) {
|
||||
|
||||
@@ -72,7 +72,7 @@ export const { use: useLayout, provider: LayoutProvider } = createSimpleContext(
|
||||
createStore({
|
||||
sidebar: {
|
||||
opened: false,
|
||||
width: 280,
|
||||
width: 344,
|
||||
workspaces: {} as Record<string, boolean>,
|
||||
workspacesDefault: false,
|
||||
},
|
||||
|
||||
@@ -36,6 +36,7 @@ export const { use: useServer, provider: ServerProvider } = createSimpleContext(
|
||||
createStore({
|
||||
list: [] as string[],
|
||||
projects: {} as Record<string, StoredProject[]>,
|
||||
lastProject: {} as Record<string, string>,
|
||||
}),
|
||||
)
|
||||
|
||||
@@ -197,6 +198,16 @@ export const { use: useServer, provider: ServerProvider } = createSimpleContext(
|
||||
result.splice(toIndex, 0, item)
|
||||
setStore("projects", key, result)
|
||||
},
|
||||
last() {
|
||||
const key = origin()
|
||||
if (!key) return
|
||||
return store.lastProject[key]
|
||||
},
|
||||
touch(directory: string) {
|
||||
const key = origin()
|
||||
if (!key) return
|
||||
setStore("lastProject", key, directory)
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import { useGlobalSync } from "@/context/global-sync"
|
||||
import { createMemo, For, Match, Show, Switch } from "solid-js"
|
||||
import { Button } from "@opencode-ai/ui/button"
|
||||
import { Logo } from "@opencode-ai/ui/logo"
|
||||
@@ -12,6 +11,7 @@ import { useDialog } from "@opencode-ai/ui/context/dialog"
|
||||
import { DialogSelectDirectory } from "@/components/dialog-select-directory"
|
||||
import { DialogSelectServer } from "@/components/dialog-select-server"
|
||||
import { useServer } from "@/context/server"
|
||||
import { useGlobalSync } from "@/context/global-sync"
|
||||
|
||||
export default function Home() {
|
||||
const sync = useGlobalSync()
|
||||
@@ -24,6 +24,7 @@ export default function Home() {
|
||||
|
||||
function openProject(directory: string) {
|
||||
layout.projects.open(directory)
|
||||
server.projects.touch(directory)
|
||||
navigate(`/${base64Encode(directory)}`)
|
||||
}
|
||||
|
||||
|
||||
@@ -5,12 +5,14 @@ import {
|
||||
createSignal,
|
||||
For,
|
||||
Match,
|
||||
on,
|
||||
onCleanup,
|
||||
onMount,
|
||||
ParentProps,
|
||||
Show,
|
||||
Switch,
|
||||
untrack,
|
||||
type Accessor,
|
||||
type JSX,
|
||||
} from "solid-js"
|
||||
import { A, useNavigate, useParams } from "@solidjs/router"
|
||||
@@ -23,6 +25,7 @@ import { ResizeHandle } from "@opencode-ai/ui/resize-handle"
|
||||
import { Button } from "@opencode-ai/ui/button"
|
||||
import { Icon } from "@opencode-ai/ui/icon"
|
||||
import { IconButton } from "@opencode-ai/ui/icon-button"
|
||||
import { InlineInput } from "@opencode-ai/ui/inline-input"
|
||||
import { Tooltip, TooltipKeybind } from "@opencode-ai/ui/tooltip"
|
||||
import { HoverCard } from "@opencode-ai/ui/hover-card"
|
||||
import { DropdownMenu } from "@opencode-ai/ui/dropdown-menu"
|
||||
@@ -64,16 +67,19 @@ import { useServer } from "@/context/server"
|
||||
|
||||
export default function Layout(props: ParentProps) {
|
||||
const [store, setStore, , ready] = persisted(
|
||||
Persist.global("layout", ["layout.v6"]),
|
||||
Persist.global("layout.page", ["layout.page.v1"]),
|
||||
createStore({
|
||||
lastSession: {} as { [directory: string]: string },
|
||||
activeProject: undefined as string | undefined,
|
||||
activeWorkspace: undefined as string | undefined,
|
||||
workspaceOrder: {} as Record<string, string[]>,
|
||||
workspaceName: {} as Record<string, string>,
|
||||
workspaceExpanded: {} as Record<string, boolean>,
|
||||
}),
|
||||
)
|
||||
|
||||
const pageReady = createMemo(() => ready())
|
||||
|
||||
let scrollContainerRef: HTMLDivElement | undefined
|
||||
const xlQuery = window.matchMedia("(min-width: 1280px)")
|
||||
const [isLargeViewport, setIsLargeViewport] = createSignal(xlQuery.matches)
|
||||
@@ -82,9 +88,11 @@ export default function Layout(props: ParentProps) {
|
||||
onCleanup(() => xlQuery.removeEventListener("change", handleViewportChange))
|
||||
|
||||
const params = useParams()
|
||||
const [autoselect, setAutoselect] = createSignal(!params.dir)
|
||||
const globalSDK = useGlobalSDK()
|
||||
const globalSync = useGlobalSync()
|
||||
const layout = useLayout()
|
||||
const layoutReady = createMemo(() => layout.ready())
|
||||
const platform = usePlatform()
|
||||
const server = useServer()
|
||||
const notification = useNotification()
|
||||
@@ -94,6 +102,7 @@ export default function Layout(props: ParentProps) {
|
||||
const dialog = useDialog()
|
||||
const command = useCommand()
|
||||
const theme = useTheme()
|
||||
const initialDir = params.dir
|
||||
const availableThemeEntries = createMemo(() => Object.entries(theme.themes()))
|
||||
const colorSchemeOrder: ColorScheme[] = ["system", "light", "dark"]
|
||||
const colorSchemeLabel: Record<ColorScheme, string> = {
|
||||
@@ -102,6 +111,104 @@ export default function Layout(props: ParentProps) {
|
||||
dark: "Dark",
|
||||
}
|
||||
|
||||
const [editor, setEditor] = createStore({
|
||||
active: "" as string,
|
||||
value: "",
|
||||
})
|
||||
const editorRef = { current: undefined as HTMLInputElement | undefined }
|
||||
|
||||
const editorOpen = (id: string) => editor.active === id
|
||||
const editorValue = () => editor.value
|
||||
|
||||
const openEditor = (id: string, value: string) => {
|
||||
if (!id) return
|
||||
setEditor({ active: id, value })
|
||||
queueMicrotask(() => editorRef.current?.focus())
|
||||
}
|
||||
|
||||
const closeEditor = () => setEditor({ active: "", value: "" })
|
||||
|
||||
const saveEditor = (callback: (next: string) => void) => {
|
||||
const next = editor.value.trim()
|
||||
if (!next) {
|
||||
closeEditor()
|
||||
return
|
||||
}
|
||||
closeEditor()
|
||||
callback(next)
|
||||
}
|
||||
|
||||
const editorKeyDown = (event: KeyboardEvent, callback: (next: string) => void) => {
|
||||
if (event.key === "Enter") {
|
||||
event.preventDefault()
|
||||
saveEditor(callback)
|
||||
return
|
||||
}
|
||||
if (event.key === "Escape") {
|
||||
event.preventDefault()
|
||||
closeEditor()
|
||||
}
|
||||
}
|
||||
|
||||
const InlineEditor = (props: {
|
||||
id: string
|
||||
value: Accessor<string>
|
||||
onSave: (next: string) => void
|
||||
class?: string
|
||||
displayClass?: string
|
||||
editing?: boolean
|
||||
stopPropagation?: boolean
|
||||
openOnDblClick?: boolean
|
||||
}) => {
|
||||
const isEditing = () => props.editing ?? editorOpen(props.id)
|
||||
const stopEvents = () => props.stopPropagation ?? false
|
||||
const allowDblClick = () => props.openOnDblClick ?? true
|
||||
const stopPropagation = (event: Event) => {
|
||||
if (!stopEvents()) return
|
||||
event.stopPropagation()
|
||||
}
|
||||
const handleDblClick = (event: MouseEvent) => {
|
||||
if (!allowDblClick()) return
|
||||
stopPropagation(event)
|
||||
openEditor(props.id, props.value())
|
||||
}
|
||||
|
||||
return (
|
||||
<Show
|
||||
when={isEditing()}
|
||||
fallback={
|
||||
<span
|
||||
class={props.displayClass ?? props.class}
|
||||
onDblClick={handleDblClick}
|
||||
onPointerDown={stopPropagation}
|
||||
onMouseDown={stopPropagation}
|
||||
onClick={stopPropagation}
|
||||
onTouchStart={stopPropagation}
|
||||
>
|
||||
{props.value()}
|
||||
</span>
|
||||
}
|
||||
>
|
||||
<InlineInput
|
||||
ref={(el) => {
|
||||
editorRef.current = el
|
||||
}}
|
||||
value={editorValue()}
|
||||
class={props.class}
|
||||
onInput={(event) => setEditor("value", event.currentTarget.value)}
|
||||
onKeyDown={(event) => editorKeyDown(event, props.onSave)}
|
||||
onBlur={() => closeEditor()}
|
||||
onPointerDown={stopPropagation}
|
||||
onClick={stopPropagation}
|
||||
onDblClick={stopPropagation}
|
||||
onMouseDown={stopPropagation}
|
||||
onMouseUp={stopPropagation}
|
||||
onTouchStart={stopPropagation}
|
||||
/>
|
||||
</Show>
|
||||
)
|
||||
}
|
||||
|
||||
function cycleTheme(direction = 1) {
|
||||
const ids = availableThemeEntries().map(([id]) => id)
|
||||
if (ids.length === 0) return
|
||||
@@ -272,12 +379,21 @@ export default function Layout(props: ParentProps) {
|
||||
return bUpdated - aUpdated
|
||||
}
|
||||
|
||||
function scrollToSession(sessionId: string) {
|
||||
const [scrollSessionKey, setScrollSessionKey] = createSignal<string | undefined>(undefined)
|
||||
|
||||
function scrollToSession(sessionId: string, sessionKey: string) {
|
||||
if (!scrollContainerRef) return
|
||||
if (scrollSessionKey() === sessionKey) return
|
||||
const element = scrollContainerRef.querySelector(`[data-session-id="${sessionId}"]`)
|
||||
if (element) {
|
||||
element.scrollIntoView({ block: "nearest", behavior: "smooth" })
|
||||
if (!element) return
|
||||
const containerRect = scrollContainerRef.getBoundingClientRect()
|
||||
const elementRect = element.getBoundingClientRect()
|
||||
if (elementRect.top >= containerRect.top && elementRect.bottom <= containerRect.bottom) {
|
||||
setScrollSessionKey(sessionKey)
|
||||
return
|
||||
}
|
||||
setScrollSessionKey(sessionKey)
|
||||
element.scrollIntoView({ block: "nearest", behavior: "smooth" })
|
||||
}
|
||||
|
||||
const currentProject = createMemo(() => {
|
||||
@@ -286,6 +402,49 @@ export default function Layout(props: ParentProps) {
|
||||
return layout.projects.list().find((p) => p.worktree === directory || p.sandboxes?.includes(directory))
|
||||
})
|
||||
|
||||
createEffect(
|
||||
on(
|
||||
() => ({ ready: pageReady(), project: currentProject() }),
|
||||
(value) => {
|
||||
if (!value.ready) return
|
||||
const project = value.project
|
||||
if (!project) return
|
||||
const last = server.projects.last()
|
||||
if (last === project.worktree) return
|
||||
server.projects.touch(project.worktree)
|
||||
},
|
||||
{ defer: true },
|
||||
),
|
||||
)
|
||||
|
||||
createEffect(
|
||||
on(
|
||||
() => ({ ready: pageReady(), layoutReady: layoutReady(), dir: params.dir, list: layout.projects.list() }),
|
||||
(value) => {
|
||||
if (!value.ready) return
|
||||
if (!value.layoutReady) return
|
||||
if (!autoselect()) return
|
||||
if (initialDir) return
|
||||
if (value.dir) return
|
||||
if (value.list.length === 0) return
|
||||
|
||||
const last = server.projects.last()
|
||||
const next = value.list.find((project) => project.worktree === last) ?? value.list[0]
|
||||
if (!next) return
|
||||
setAutoselect(false)
|
||||
openProject(next.worktree, false)
|
||||
navigateToProject(next.worktree)
|
||||
},
|
||||
{ defer: true },
|
||||
),
|
||||
)
|
||||
|
||||
const workspaceName = (directory: string) => store.workspaceName[directory]
|
||||
const workspaceLabel = (directory: string, branch?: string) =>
|
||||
workspaceName(directory) ?? branch ?? getFilename(directory)
|
||||
|
||||
const isWorkspaceEditing = () => editor.active.startsWith("workspace:")
|
||||
|
||||
const workspaceSetting = createMemo(() => {
|
||||
const project = currentProject()
|
||||
if (!project) return false
|
||||
@@ -293,7 +452,8 @@ export default function Layout(props: ParentProps) {
|
||||
})
|
||||
|
||||
createEffect(() => {
|
||||
if (!ready()) return
|
||||
if (!pageReady()) return
|
||||
if (!layoutReady()) return
|
||||
const project = currentProject()
|
||||
if (!project) return
|
||||
|
||||
@@ -318,6 +478,19 @@ export default function Layout(props: ParentProps) {
|
||||
}
|
||||
})
|
||||
|
||||
createEffect(() => {
|
||||
if (!pageReady()) return
|
||||
if (!layoutReady()) return
|
||||
const projects = layout.projects.list()
|
||||
for (const [directory, expanded] of Object.entries(store.workspaceExpanded)) {
|
||||
if (!expanded) continue
|
||||
const project = projects.find((item) => item.worktree === directory || item.sandboxes?.includes(directory))
|
||||
if (!project) continue
|
||||
if (layout.sidebar.workspaces(project.worktree)()) continue
|
||||
setStore("workspaceExpanded", directory, false)
|
||||
}
|
||||
})
|
||||
|
||||
const currentSessions = createMemo(() => {
|
||||
const project = currentProject()
|
||||
if (!project) return [] as Session[]
|
||||
@@ -328,7 +501,7 @@ export default function Layout(props: ParentProps) {
|
||||
const [dirStore] = globalSync.child(dir)
|
||||
const dirSessions = dirStore.session
|
||||
.filter((session) => session.directory === dirStore.path.directory)
|
||||
.filter((session) => !session.parentID)
|
||||
.filter((session) => !session.parentID && !session.time?.archived)
|
||||
.toSorted(sortSessions)
|
||||
result.push(...dirSessions)
|
||||
}
|
||||
@@ -337,7 +510,7 @@ export default function Layout(props: ParentProps) {
|
||||
const [projectStore] = globalSync.child(project.worktree)
|
||||
return projectStore.session
|
||||
.filter((session) => session.directory === projectStore.path.directory)
|
||||
.filter((session) => !session.parentID)
|
||||
.filter((session) => !session.parentID && !session.time?.archived)
|
||||
.toSorted(sortSessions)
|
||||
})
|
||||
|
||||
@@ -519,7 +692,7 @@ export default function Layout(props: ParentProps) {
|
||||
})
|
||||
}
|
||||
navigateToSession(session)
|
||||
queueMicrotask(() => scrollToSession(session.id))
|
||||
queueMicrotask(() => scrollToSession(session.id, `${session.directory}:${session.id}`))
|
||||
}
|
||||
|
||||
async function archiveSession(session: Session) {
|
||||
@@ -657,6 +830,7 @@ export default function Layout(props: ParentProps) {
|
||||
|
||||
function navigateToProject(directory: string | undefined) {
|
||||
if (!directory) return
|
||||
server.projects.touch(directory)
|
||||
const lastSession = store.lastSession[directory]
|
||||
navigate(`/${base64Encode(directory)}${lastSession ? `/session/${lastSession}` : ""}`)
|
||||
layout.mobileSidebar.hide()
|
||||
@@ -673,6 +847,31 @@ export default function Layout(props: ParentProps) {
|
||||
if (navigate) navigateToProject(directory)
|
||||
}
|
||||
|
||||
const displayName = (project: LocalProject) => project.name || getFilename(project.worktree)
|
||||
|
||||
async function renameProject(project: LocalProject, next: string) {
|
||||
if (!project.id) return
|
||||
const current = displayName(project)
|
||||
if (next === current) return
|
||||
const name = next === getFilename(project.worktree) ? "" : next
|
||||
await globalSDK.client.project.update({ projectID: project.id, name })
|
||||
}
|
||||
|
||||
async function renameSession(session: Session, next: string) {
|
||||
if (next === session.title) return
|
||||
await globalSDK.client.session.update({
|
||||
directory: session.directory,
|
||||
sessionID: session.id,
|
||||
title: next,
|
||||
})
|
||||
}
|
||||
|
||||
const renameWorkspace = (directory: string, next: string) => {
|
||||
const current = workspaceName(directory) ?? getFilename(directory)
|
||||
if (current === next) return
|
||||
setStore("workspaceName", directory, next)
|
||||
}
|
||||
|
||||
function closeProject(directory: string) {
|
||||
const index = layout.projects.list().findIndex((x) => x.worktree === directory)
|
||||
const next = layout.projects.list()[index + 1]
|
||||
@@ -707,15 +906,26 @@ export default function Layout(props: ParentProps) {
|
||||
}
|
||||
}
|
||||
|
||||
createEffect(() => {
|
||||
if (!params.dir || !params.id) return
|
||||
const directory = base64Decode(params.dir)
|
||||
const id = params.id
|
||||
setStore("lastSession", directory, id)
|
||||
notification.session.markViewed(id)
|
||||
untrack(() => setStore("workspaceExpanded", directory, (value) => value ?? true))
|
||||
requestAnimationFrame(() => scrollToSession(id))
|
||||
})
|
||||
createEffect(
|
||||
on(
|
||||
() => ({ ready: pageReady(), dir: params.dir, id: params.id }),
|
||||
(value) => {
|
||||
if (!value.ready) return
|
||||
const dir = value.dir
|
||||
const id = value.id
|
||||
if (!dir || !id) return
|
||||
const directory = base64Decode(dir)
|
||||
setStore("lastSession", directory, id)
|
||||
notification.session.markViewed(id)
|
||||
const expanded = untrack(() => store.workspaceExpanded[directory])
|
||||
if (expanded === false) {
|
||||
setStore("workspaceExpanded", directory, true)
|
||||
}
|
||||
requestAnimationFrame(() => scrollToSession(id, `${directory}:${id}`))
|
||||
},
|
||||
{ defer: true },
|
||||
),
|
||||
)
|
||||
|
||||
createEffect(() => {
|
||||
const project = currentProject()
|
||||
@@ -732,15 +942,6 @@ export default function Layout(props: ParentProps) {
|
||||
globalSync.project.loadSessions(project.worktree)
|
||||
})
|
||||
|
||||
createEffect(() => {
|
||||
if (isLargeViewport()) {
|
||||
const sidebarWidth = layout.sidebar.opened() ? layout.sidebar.width() : 64
|
||||
document.documentElement.style.setProperty("--dialog-left-margin", `${sidebarWidth}px`)
|
||||
return
|
||||
}
|
||||
document.documentElement.style.setProperty("--dialog-left-margin", "0px")
|
||||
})
|
||||
|
||||
function getDraggableId(event: unknown): string | undefined {
|
||||
if (typeof event !== "object" || event === null) return undefined
|
||||
if (!("draggable" in event)) return undefined
|
||||
@@ -885,7 +1086,7 @@ export default function Layout(props: ParentProps) {
|
||||
return (
|
||||
<div
|
||||
data-session-id={props.session.id}
|
||||
class="group/session relative w-full rounded-md cursor-default transition-colors px-3
|
||||
class="group/session relative w-full rounded-md cursor-default transition-colors pl-2 pr-3
|
||||
hover:bg-surface-raised-base-hover focus-within:bg-surface-raised-base-hover has-[.active]:bg-surface-base-active"
|
||||
>
|
||||
<Tooltip placement={props.mobile ? "bottom" : "right"} value={props.session.title} gutter={16} openDelay={1000}>
|
||||
@@ -900,7 +1101,7 @@ export default function Layout(props: ParentProps) {
|
||||
class="shrink-0 size-6 flex items-center justify-center"
|
||||
style={{ color: tint() ?? "var(--icon-interactive-base)" }}
|
||||
>
|
||||
<Switch>
|
||||
<Switch fallback={<Icon name="dash" size="small" class="text-icon-weak" />}>
|
||||
<Match when={isWorking()}>
|
||||
<Spinner class="size-[15px]" />
|
||||
</Match>
|
||||
@@ -915,9 +1116,14 @@ export default function Layout(props: ParentProps) {
|
||||
</Match>
|
||||
</Switch>
|
||||
</div>
|
||||
<span class="text-14-regular text-text-strong grow-1 min-w-0 overflow-hidden text-ellipsis truncate">
|
||||
{props.session.title}
|
||||
</span>
|
||||
<InlineEditor
|
||||
id={`session:${props.session.id}`}
|
||||
value={() => props.session.title}
|
||||
onSave={(next) => renameSession(props.session, next)}
|
||||
class="text-14-regular text-text-strong grow-1 min-w-0 overflow-hidden text-ellipsis truncate"
|
||||
displayClass="text-14-regular text-text-strong grow-1 min-w-0 overflow-hidden text-ellipsis truncate"
|
||||
stopPropagation
|
||||
/>
|
||||
<Show when={props.session.summary}>
|
||||
{(summary) => (
|
||||
<div class="group-hover/session:hidden group-active/session:hidden group-focus-within/session:hidden">
|
||||
@@ -955,115 +1161,6 @@ export default function Layout(props: ParentProps) {
|
||||
)
|
||||
}
|
||||
|
||||
const SortableProject = (props: { project: LocalProject; mobile?: boolean }): JSX.Element => {
|
||||
const sortable = createSortable(props.project.worktree)
|
||||
const selected = createMemo(() => {
|
||||
const current = params.dir ? base64Decode(params.dir) : ""
|
||||
return props.project.worktree === current || props.project.sandboxes?.includes(current)
|
||||
})
|
||||
|
||||
const workspaces = createMemo(() => workspaceIds(props.project).slice(0, 2))
|
||||
const workspaceEnabled = createMemo(() => layout.sidebar.workspaces(props.project.worktree)())
|
||||
const label = (directory: string) => {
|
||||
const [data] = globalSync.child(directory)
|
||||
const kind = directory === props.project.worktree ? "local" : "sandbox"
|
||||
const name = data.vcs?.branch ?? getFilename(directory)
|
||||
return `${kind} : ${name}`
|
||||
}
|
||||
|
||||
const sessions = (directory: string) => {
|
||||
const [data] = globalSync.child(directory)
|
||||
return data.session
|
||||
.filter((session) => session.directory === data.path.directory)
|
||||
.filter((session) => !session.parentID)
|
||||
.toSorted(sortSessions)
|
||||
.slice(0, 2)
|
||||
}
|
||||
|
||||
const projectSessions = () => {
|
||||
const [data] = globalSync.child(props.project.worktree)
|
||||
return data.session
|
||||
.filter((session) => session.directory === data.path.directory)
|
||||
.filter((session) => !session.parentID)
|
||||
.toSorted(sortSessions)
|
||||
.slice(0, 2)
|
||||
}
|
||||
|
||||
const trigger = (
|
||||
<button
|
||||
type="button"
|
||||
classList={{
|
||||
"flex items-center justify-center size-10 p-1 rounded-lg border transition-colors cursor-default": true,
|
||||
"bg-transparent border-icon-strong-base hover:bg-surface-base-hover": selected(),
|
||||
"bg-transparent border-transparent hover:bg-surface-base-hover hover:border-border-weak-base": !selected(),
|
||||
}}
|
||||
onClick={() => navigateToProject(props.project.worktree)}
|
||||
>
|
||||
<ProjectIcon project={props.project} notify />
|
||||
</button>
|
||||
)
|
||||
|
||||
return (
|
||||
// @ts-ignore
|
||||
<div use:sortable classList={{ "opacity-30": sortable.isActiveDraggable }}>
|
||||
<HoverCard openDelay={0} closeDelay={0} placement="right-start" gutter={8} trigger={trigger}>
|
||||
<div class="-m-3 flex flex-col w-72">
|
||||
<div class="px-3 py-2 text-12-medium text-text-weak">Recent sessions</div>
|
||||
<div class="px-2 pb-2 flex flex-col gap-2">
|
||||
<Show
|
||||
when={workspaceEnabled()}
|
||||
fallback={
|
||||
<For each={projectSessions()}>
|
||||
{(session) => (
|
||||
<SessionItem
|
||||
session={session}
|
||||
slug={base64Encode(props.project.worktree)}
|
||||
dense
|
||||
mobile={props.mobile}
|
||||
/>
|
||||
)}
|
||||
</For>
|
||||
}
|
||||
>
|
||||
<For each={workspaces()}>
|
||||
{(directory) => (
|
||||
<div class="flex flex-col gap-1">
|
||||
<div class="px-2 py-0.5 flex items-center gap-1 min-w-0">
|
||||
<div class="shrink-0 size-6 flex items-center justify-center">
|
||||
<Icon name="branch" size="small" class="text-icon-base" />
|
||||
</div>
|
||||
<span class="truncate text-14-medium text-text-base">{label(directory)}</span>
|
||||
</div>
|
||||
<For each={sessions(directory)}>
|
||||
{(session) => (
|
||||
<SessionItem session={session} slug={base64Encode(directory)} dense mobile={props.mobile} />
|
||||
)}
|
||||
</For>
|
||||
</div>
|
||||
)}
|
||||
</For>
|
||||
</Show>
|
||||
</div>
|
||||
<Show when={!selected()}>
|
||||
<div class="px-2 py-2 border-t border-border-weak-base">
|
||||
<Button
|
||||
variant="ghost"
|
||||
class="flex w-full text-left justify-start text-text-base px-2"
|
||||
onClick={() => {
|
||||
layout.sidebar.open()
|
||||
navigateToProject(props.project.worktree)
|
||||
}}
|
||||
>
|
||||
View all sessions
|
||||
</Button>
|
||||
</div>
|
||||
</Show>
|
||||
</div>
|
||||
</HoverCard>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
const ProjectDragOverlay = (): JSX.Element => {
|
||||
const project = createMemo(() => layout.projects.list().find((p) => p.worktree === store.activeProject))
|
||||
return (
|
||||
@@ -1086,7 +1183,7 @@ export default function Layout(props: ParentProps) {
|
||||
|
||||
const [workspaceStore] = globalSync.child(directory)
|
||||
const kind = directory === project.worktree ? "local" : "sandbox"
|
||||
const name = workspaceStore.vcs?.branch ?? getFilename(directory)
|
||||
const name = workspaceLabel(directory, workspaceStore.vcs?.branch)
|
||||
return `${kind} : ${name}`
|
||||
})
|
||||
|
||||
@@ -1106,14 +1203,13 @@ export default function Layout(props: ParentProps) {
|
||||
const sessions = createMemo(() =>
|
||||
workspaceStore.session
|
||||
.filter((session) => session.directory === workspaceStore.path.directory)
|
||||
.filter((session) => !session.parentID)
|
||||
.filter((session) => !session.parentID && !session.time?.archived)
|
||||
.toSorted(sortSessions),
|
||||
)
|
||||
const local = createMemo(() => props.directory === props.project.worktree)
|
||||
const title = createMemo(() => {
|
||||
const kind = local() ? "local" : "sandbox"
|
||||
const workspaceValue = createMemo(() => {
|
||||
const name = workspaceStore.vcs?.branch ?? getFilename(props.directory)
|
||||
return `${kind} : ${name}`
|
||||
return workspaceName(props.directory) ?? name
|
||||
})
|
||||
const open = createMemo(() => store.workspaceExpanded[props.directory] ?? true)
|
||||
const loading = createMemo(() => open() && workspaceStore.status !== "complete" && sessions().length === 0)
|
||||
@@ -1124,23 +1220,50 @@ export default function Layout(props: ParentProps) {
|
||||
await globalSync.project.loadSessions(props.directory)
|
||||
}
|
||||
|
||||
const workspaceEditActive = createMemo(() => editorOpen(`workspace:${props.directory}`))
|
||||
|
||||
const openWrapper = (value: boolean) => {
|
||||
setStore("workspaceExpanded", props.directory, value)
|
||||
if (value) return
|
||||
if (editorOpen(`workspace:${props.directory}`)) closeEditor()
|
||||
}
|
||||
|
||||
return (
|
||||
// @ts-ignore
|
||||
<div use:sortable classList={{ "opacity-30": sortable.isActiveDraggable }}>
|
||||
<Collapsible
|
||||
variant="ghost"
|
||||
open={open()}
|
||||
class="shrink-0"
|
||||
onOpenChange={(value) => setStore("workspaceExpanded", props.directory, value)}
|
||||
>
|
||||
<Collapsible variant="ghost" open={open()} class="shrink-0" onOpenChange={openWrapper}>
|
||||
<div class="px-2 py-1">
|
||||
<div class="group/trigger relative">
|
||||
<Collapsible.Trigger class="flex items-center justify-between w-full pl-2 pr-16 py-1.5 rounded-md hover:bg-surface-raised-base-hover">
|
||||
<div class="flex items-center gap-1 min-w-0">
|
||||
<div class="flex items-center gap-1 min-w-0 flex-1">
|
||||
<div class="flex items-center justify-center shrink-0 size-6">
|
||||
<Icon name="branch" size="small" />
|
||||
</div>
|
||||
<span class="truncate text-14-medium text-text-base">{title()}</span>
|
||||
<span class="text-14-medium text-text-base shrink-0">{local() ? "local" : "sandbox"} :</span>
|
||||
<Show
|
||||
when={!local()}
|
||||
fallback={
|
||||
<span class="text-14-medium text-text-base min-w-0 truncate">
|
||||
{workspaceStore.vcs?.branch ?? getFilename(props.directory)}
|
||||
</span>
|
||||
}
|
||||
>
|
||||
<InlineEditor
|
||||
id={`workspace:${props.directory}`}
|
||||
value={workspaceValue}
|
||||
onSave={(next) => {
|
||||
const trimmed = next.trim()
|
||||
if (!trimmed) return
|
||||
renameWorkspace(props.directory, trimmed)
|
||||
setEditor("value", workspaceValue())
|
||||
}}
|
||||
class="text-14-medium text-text-base min-w-0 truncate"
|
||||
displayClass="text-14-medium text-text-base min-w-0 truncate"
|
||||
editing={workspaceEditActive()}
|
||||
stopPropagation={false}
|
||||
openOnDblClick={false}
|
||||
/>
|
||||
</Show>
|
||||
<Icon
|
||||
name={open() ? "chevron-down" : "chevron-right"}
|
||||
size="small"
|
||||
@@ -1188,7 +1311,7 @@ export default function Layout(props: ParentProps) {
|
||||
<div class="relative w-full py-1">
|
||||
<Button
|
||||
variant="ghost"
|
||||
class="flex w-full text-left justify-start text-14-regular text-text-weak px-10"
|
||||
class="flex w-full text-left justify-start text-14-regular text-text-weak pl-9 pr-10"
|
||||
size="large"
|
||||
onClick={(e: MouseEvent) => {
|
||||
loadMore()
|
||||
@@ -1206,13 +1329,123 @@ export default function Layout(props: ParentProps) {
|
||||
)
|
||||
}
|
||||
|
||||
const SortableProject = (props: { project: LocalProject; mobile?: boolean }): JSX.Element => {
|
||||
const sortable = createSortable(props.project.worktree)
|
||||
const selected = createMemo(() => {
|
||||
const current = params.dir ? base64Decode(params.dir) : ""
|
||||
return props.project.worktree === current || props.project.sandboxes?.includes(current)
|
||||
})
|
||||
|
||||
const workspaces = createMemo(() => workspaceIds(props.project).slice(0, 2))
|
||||
const workspaceEnabled = createMemo(() => layout.sidebar.workspaces(props.project.worktree)())
|
||||
const label = (directory: string) => {
|
||||
const [data] = globalSync.child(directory)
|
||||
const kind = directory === props.project.worktree ? "local" : "sandbox"
|
||||
const name = workspaceLabel(directory, data.vcs?.branch)
|
||||
return `${kind} : ${name}`
|
||||
}
|
||||
|
||||
const sessions = (directory: string) => {
|
||||
const [data] = globalSync.child(directory)
|
||||
return data.session
|
||||
.filter((session) => session.directory === data.path.directory)
|
||||
.filter((session) => !session.parentID && !session.time?.archived)
|
||||
.toSorted(sortSessions)
|
||||
.slice(0, 2)
|
||||
}
|
||||
|
||||
const projectSessions = () => {
|
||||
const [data] = globalSync.child(props.project.worktree)
|
||||
return data.session
|
||||
.filter((session) => session.directory === data.path.directory)
|
||||
.filter((session) => !session.parentID && !session.time?.archived)
|
||||
.toSorted(sortSessions)
|
||||
.slice(0, 2)
|
||||
}
|
||||
|
||||
const trigger = (
|
||||
<button
|
||||
type="button"
|
||||
classList={{
|
||||
"flex items-center justify-center size-10 p-1 rounded-lg overflow-hidden transition-colors cursor-default": true,
|
||||
"bg-transparent border-2 border-icon-strong-base hover:bg-surface-base-hover": selected(),
|
||||
"bg-transparent border border-transparent hover:bg-surface-base-hover hover:border-border-weak-base":
|
||||
!selected(),
|
||||
}}
|
||||
onClick={() => navigateToProject(props.project.worktree)}
|
||||
>
|
||||
<ProjectIcon project={props.project} notify />
|
||||
</button>
|
||||
)
|
||||
|
||||
return (
|
||||
// @ts-ignore
|
||||
<div use:sortable classList={{ "opacity-30": sortable.isActiveDraggable }}>
|
||||
<HoverCard openDelay={0} closeDelay={0} placement="right-start" gutter={6} trigger={trigger}>
|
||||
<div class="-m-3 flex flex-col w-72">
|
||||
<div class="px-3 py-2 text-12-medium text-text-weak">Recent sessions</div>
|
||||
<div class="px-2 pb-2 flex flex-col gap-2">
|
||||
<Show
|
||||
when={workspaceEnabled()}
|
||||
fallback={
|
||||
<For each={projectSessions()}>
|
||||
{(session) => (
|
||||
<SessionItem
|
||||
session={session}
|
||||
slug={base64Encode(props.project.worktree)}
|
||||
dense
|
||||
mobile={props.mobile}
|
||||
/>
|
||||
)}
|
||||
</For>
|
||||
}
|
||||
>
|
||||
<For each={workspaces()}>
|
||||
{(directory) => (
|
||||
<div class="flex flex-col gap-1">
|
||||
<div class="px-2 py-0.5 flex items-center gap-1 min-w-0">
|
||||
<div class="shrink-0 size-6 flex items-center justify-center">
|
||||
<Icon name="branch" size="small" class="text-icon-base" />
|
||||
</div>
|
||||
<span class="truncate text-14-medium text-text-base">{label(directory)}</span>
|
||||
</div>
|
||||
<For each={sessions(directory)}>
|
||||
{(session) => (
|
||||
<SessionItem session={session} slug={base64Encode(directory)} dense mobile={props.mobile} />
|
||||
)}
|
||||
</For>
|
||||
</div>
|
||||
)}
|
||||
</For>
|
||||
</Show>
|
||||
</div>
|
||||
<Show when={!selected()}>
|
||||
<div class="px-2 py-2 border-t border-border-weak-base">
|
||||
<Button
|
||||
variant="ghost"
|
||||
class="flex w-full text-left justify-start text-text-base px-2 hover:bg-transparent active:bg-transparent"
|
||||
onClick={() => {
|
||||
layout.sidebar.open()
|
||||
navigateToProject(props.project.worktree)
|
||||
}}
|
||||
>
|
||||
View all sessions
|
||||
</Button>
|
||||
</div>
|
||||
</Show>
|
||||
</div>
|
||||
</HoverCard>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
const LocalWorkspace = (props: { project: LocalProject; mobile?: boolean }): JSX.Element => {
|
||||
const [workspaceStore, setWorkspaceStore] = globalSync.child(props.project.worktree)
|
||||
const slug = createMemo(() => base64Encode(props.project.worktree))
|
||||
const sessions = createMemo(() =>
|
||||
workspaceStore.session
|
||||
.filter((session) => session.directory === workspaceStore.path.directory)
|
||||
.filter((session) => !session.parentID)
|
||||
.filter((session) => !session.parentID && !session.time?.archived)
|
||||
.toSorted(sortSessions),
|
||||
)
|
||||
const loading = createMemo(() => workspaceStore.status !== "complete" && sessions().length === 0)
|
||||
@@ -1228,6 +1461,7 @@ export default function Layout(props: ParentProps) {
|
||||
if (!props.mobile) scrollContainerRef = el
|
||||
}}
|
||||
class="size-full flex flex-col py-2 overflow-y-auto no-scrollbar"
|
||||
style={{ "overflow-anchor": "none" }}
|
||||
>
|
||||
<nav class="flex flex-col gap-1 px-2">
|
||||
<Show when={loading()}>
|
||||
@@ -1240,7 +1474,7 @@ export default function Layout(props: ParentProps) {
|
||||
<div class="relative w-full py-1">
|
||||
<Button
|
||||
variant="ghost"
|
||||
class="flex w-full text-left justify-start text-14-regular text-text-weak px-10"
|
||||
class="flex w-full text-left justify-start text-14-regular text-text-weak pl-9 pr-10"
|
||||
size="large"
|
||||
onClick={(e: MouseEvent) => {
|
||||
loadMore()
|
||||
@@ -1266,6 +1500,7 @@ export default function Layout(props: ParentProps) {
|
||||
if (!current) return ""
|
||||
return current.name || getFilename(current.worktree)
|
||||
})
|
||||
const projectId = createMemo(() => project()?.id ?? "")
|
||||
const workspaces = createMemo(() => workspaceIds(project()))
|
||||
|
||||
const errorMessage = (err: unknown) => {
|
||||
@@ -1366,13 +1601,22 @@ export default function Layout(props: ParentProps) {
|
||||
<div class="shrink-0 px-2 py-1">
|
||||
<div class="group/project flex items-start justify-between gap-2 p-2 pr-1">
|
||||
<div class="flex flex-col min-w-0">
|
||||
<span class="text-16-medium text-text-strong truncate">{projectName()}</span>
|
||||
<InlineEditor
|
||||
id={`project:${projectId()}`}
|
||||
value={projectName}
|
||||
onSave={(next) => project() && renameProject(project()!, next)}
|
||||
class="text-16-medium text-text-strong truncate"
|
||||
displayClass="text-16-medium text-text-strong truncate"
|
||||
stopPropagation
|
||||
/>
|
||||
|
||||
<Tooltip placement="right" value={project()?.worktree} class="shrink-0">
|
||||
<span class="text-12-regular text-text-base truncate">
|
||||
{project()?.worktree.replace(homedir(), "~")}
|
||||
</span>
|
||||
</Tooltip>
|
||||
</div>
|
||||
|
||||
<DropdownMenu>
|
||||
<DropdownMenu.Trigger
|
||||
as={IconButton}
|
||||
@@ -1408,7 +1652,7 @@ export default function Layout(props: ParentProps) {
|
||||
<Button
|
||||
size="large"
|
||||
icon="plus-small"
|
||||
class="w-full"
|
||||
class="w-full max-w-[256px]"
|
||||
onClick={() => {
|
||||
navigate(`/${base64Encode(p.worktree)}/session`)
|
||||
layout.mobileSidebar.hide()
|
||||
@@ -1425,11 +1669,11 @@ export default function Layout(props: ParentProps) {
|
||||
>
|
||||
<>
|
||||
<div class="py-4 px-3">
|
||||
<Button size="large" icon="plus-small" class="w-full" onClick={createWorkspace}>
|
||||
<Button size="large" icon="plus-small" class="w-full max-w-[256px]" onClick={createWorkspace}>
|
||||
New workspace
|
||||
</Button>
|
||||
</div>
|
||||
<div class="flex-1 min-h-0">
|
||||
<div class="relative flex-1 min-h-0">
|
||||
<DragDropProvider
|
||||
onDragStart={handleWorkspaceDragStart}
|
||||
onDragEnd={handleWorkspaceDragEnd}
|
||||
@@ -1443,6 +1687,7 @@ export default function Layout(props: ParentProps) {
|
||||
if (!sidebarProps.mobile) scrollContainerRef = el
|
||||
}}
|
||||
class="size-full flex flex-col py-2 gap-4 overflow-y-auto no-scrollbar"
|
||||
style={{ "overflow-anchor": "none" }}
|
||||
>
|
||||
<SortableProvider ids={workspaces()}>
|
||||
<For each={workspaces()}>
|
||||
@@ -1496,7 +1741,7 @@ export default function Layout(props: ParentProps) {
|
||||
"hidden xl:block": true,
|
||||
"relative shrink-0": true,
|
||||
}}
|
||||
style={{ width: layout.sidebar.opened() ? `${layout.sidebar.width()}px` : "64px" }}
|
||||
style={{ width: layout.sidebar.opened() ? `${Math.max(layout.sidebar.width(), 244)}px` : "64px" }}
|
||||
>
|
||||
<div class="@container w-full h-full contain-strict">
|
||||
<SidebarContent />
|
||||
@@ -1505,9 +1750,9 @@ export default function Layout(props: ParentProps) {
|
||||
<ResizeHandle
|
||||
direction="horizontal"
|
||||
size={layout.sidebar.width()}
|
||||
min={214}
|
||||
min={244}
|
||||
max={window.innerWidth * 0.3 + 64}
|
||||
collapseThreshold={144}
|
||||
collapseThreshold={244}
|
||||
onResize={layout.sidebar.resize}
|
||||
onCollapse={layout.sidebar.close}
|
||||
/>
|
||||
@@ -1516,7 +1761,7 @@ export default function Layout(props: ParentProps) {
|
||||
<div class="xl:hidden">
|
||||
<div
|
||||
classList={{
|
||||
"fixed inset-0 z-40 transition-opacity duration-200": true,
|
||||
"fixed inset-x-0 top-10 bottom-0 z-40 transition-opacity duration-200": true,
|
||||
"opacity-100 pointer-events-auto": layout.mobileSidebar.opened(),
|
||||
"opacity-0 pointer-events-none": !layout.mobileSidebar.opened(),
|
||||
}}
|
||||
@@ -1526,7 +1771,7 @@ export default function Layout(props: ParentProps) {
|
||||
/>
|
||||
<div
|
||||
classList={{
|
||||
"@container fixed inset-y-0 left-0 z-50 w-72 bg-background-base transition-transform duration-200 ease-out": true,
|
||||
"@container fixed top-10 bottom-0 left-0 z-50 w-72 bg-background-base transition-transform duration-200 ease-out": true,
|
||||
"translate-x-0": layout.mobileSidebar.opened(),
|
||||
"-translate-x-full": !layout.mobileSidebar.opened(),
|
||||
}}
|
||||
@@ -1539,7 +1784,7 @@ export default function Layout(props: ParentProps) {
|
||||
<main
|
||||
classList={{
|
||||
"size-full overflow-x-hidden flex flex-col items-start contain-strict border-t border-border-weak-base": true,
|
||||
"border-l rounded-tl-sm": !layout.sidebar.opened(),
|
||||
"xl:border-l xl:rounded-tl-sm": !layout.sidebar.opened(),
|
||||
}}
|
||||
>
|
||||
{props.children}
|
||||
|
||||
@@ -419,7 +419,6 @@ export default function Page() {
|
||||
{
|
||||
id: "session.new",
|
||||
title: "New session",
|
||||
description: "Create a new session",
|
||||
category: "Session",
|
||||
keybind: "mod+shift+s",
|
||||
slash: "new",
|
||||
@@ -437,7 +436,7 @@ export default function Page() {
|
||||
{
|
||||
id: "terminal.toggle",
|
||||
title: "Toggle terminal",
|
||||
description: "Show or hide the terminal",
|
||||
description: "",
|
||||
category: "View",
|
||||
keybind: "ctrl+`",
|
||||
slash: "terminal",
|
||||
@@ -446,7 +445,7 @@ export default function Page() {
|
||||
{
|
||||
id: "review.toggle",
|
||||
title: "Toggle review",
|
||||
description: "Show or hide the review panel",
|
||||
description: "",
|
||||
category: "View",
|
||||
keybind: "mod+shift+r",
|
||||
onSelect: () => view().reviewPanel.toggle(),
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/console-app",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
|
||||
@@ -1,186 +0,0 @@
|
||||
.light-rays-container {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
pointer-events: none;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.light-rays-container canvas {
|
||||
display: block;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
.light-rays-controls {
|
||||
position: fixed;
|
||||
top: 16px;
|
||||
right: 16px;
|
||||
z-index: 9999;
|
||||
font-family: var(--font-mono, monospace);
|
||||
font-size: 12px;
|
||||
color: #fff;
|
||||
}
|
||||
|
||||
.light-rays-controls-toggle {
|
||||
background: rgba(0, 0, 0, 0.8);
|
||||
border: 1px solid rgba(255, 255, 255, 0.2);
|
||||
border-radius: 4px;
|
||||
padding: 8px 12px;
|
||||
color: #fff;
|
||||
cursor: pointer;
|
||||
font-family: inherit;
|
||||
font-size: inherit;
|
||||
width: 100%;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.light-rays-controls-toggle:hover {
|
||||
background: rgba(0, 0, 0, 0.9);
|
||||
border-color: rgba(255, 255, 255, 0.3);
|
||||
}
|
||||
|
||||
.light-rays-controls-panel {
|
||||
background: rgba(0, 0, 0, 0.85);
|
||||
border: 1px solid rgba(255, 255, 255, 0.2);
|
||||
border-radius: 4px;
|
||||
padding: 12px;
|
||||
margin-top: 4px;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 10px;
|
||||
min-width: 240px;
|
||||
max-height: calc(100vh - 100px);
|
||||
overflow-y: auto;
|
||||
backdrop-filter: blur(8px);
|
||||
}
|
||||
|
||||
.control-group {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 4px;
|
||||
}
|
||||
|
||||
.control-group label {
|
||||
color: rgba(255, 255, 255, 0.7);
|
||||
font-size: 11px;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.5px;
|
||||
}
|
||||
|
||||
.control-group.checkbox {
|
||||
flex-direction: row;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.control-group.checkbox label {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
cursor: pointer;
|
||||
text-transform: none;
|
||||
}
|
||||
|
||||
.control-group input[type="range"] {
|
||||
-webkit-appearance: none;
|
||||
appearance: none;
|
||||
width: 100%;
|
||||
height: 4px;
|
||||
background: rgba(255, 255, 255, 0.2);
|
||||
border-radius: 2px;
|
||||
outline: none;
|
||||
}
|
||||
|
||||
.control-group input[type="range"]::-webkit-slider-thumb {
|
||||
-webkit-appearance: none;
|
||||
appearance: none;
|
||||
width: 14px;
|
||||
height: 14px;
|
||||
background: #fff;
|
||||
border-radius: 50%;
|
||||
cursor: pointer;
|
||||
transition: transform 0.1s;
|
||||
}
|
||||
|
||||
.control-group input[type="range"]::-webkit-slider-thumb:hover {
|
||||
transform: scale(1.1);
|
||||
}
|
||||
|
||||
.control-group input[type="range"]::-moz-range-thumb {
|
||||
width: 14px;
|
||||
height: 14px;
|
||||
background: #fff;
|
||||
border-radius: 50%;
|
||||
cursor: pointer;
|
||||
border: none;
|
||||
}
|
||||
|
||||
.control-group input[type="color"] {
|
||||
-webkit-appearance: none;
|
||||
appearance: none;
|
||||
width: 100%;
|
||||
height: 32px;
|
||||
border: 1px solid rgba(255, 255, 255, 0.2);
|
||||
border-radius: 4px;
|
||||
background: transparent;
|
||||
cursor: pointer;
|
||||
padding: 2px;
|
||||
}
|
||||
|
||||
.control-group input[type="color"]::-webkit-color-swatch-wrapper {
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.control-group input[type="color"]::-webkit-color-swatch {
|
||||
border: none;
|
||||
border-radius: 2px;
|
||||
}
|
||||
|
||||
.control-group select {
|
||||
background: rgba(255, 255, 255, 0.1);
|
||||
border: 1px solid rgba(255, 255, 255, 0.2);
|
||||
border-radius: 4px;
|
||||
padding: 6px 8px;
|
||||
color: #fff;
|
||||
font-family: inherit;
|
||||
font-size: inherit;
|
||||
cursor: pointer;
|
||||
outline: none;
|
||||
}
|
||||
|
||||
.control-group select:hover {
|
||||
border-color: rgba(255, 255, 255, 0.3);
|
||||
}
|
||||
|
||||
.control-group select option {
|
||||
background: #1a1a1a;
|
||||
color: #fff;
|
||||
}
|
||||
|
||||
.control-group input[type="checkbox"] {
|
||||
width: 16px;
|
||||
height: 16px;
|
||||
accent-color: #fff;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.reset-button {
|
||||
background: rgba(255, 255, 255, 0.1);
|
||||
border: 1px solid rgba(255, 255, 255, 0.2);
|
||||
border-radius: 4px;
|
||||
padding: 8px 12px;
|
||||
color: rgba(255, 255, 255, 0.7);
|
||||
cursor: pointer;
|
||||
font-family: inherit;
|
||||
font-size: inherit;
|
||||
margin-top: 4px;
|
||||
transition: all 0.15s;
|
||||
}
|
||||
|
||||
.reset-button:hover {
|
||||
background: rgba(255, 255, 255, 0.15);
|
||||
border-color: rgba(255, 255, 255, 0.3);
|
||||
color: #fff;
|
||||
}
|
||||
@@ -1,924 +0,0 @@
|
||||
import { createSignal, createEffect, onMount, onCleanup, Show, For, Accessor, Setter } from "solid-js"
|
||||
import "./light-rays.css"
|
||||
|
||||
export type RaysOrigin =
|
||||
| "top-center"
|
||||
| "top-left"
|
||||
| "top-right"
|
||||
| "right"
|
||||
| "left"
|
||||
| "bottom-center"
|
||||
| "bottom-right"
|
||||
| "bottom-left"
|
||||
|
||||
export interface LightRaysConfig {
|
||||
raysOrigin: RaysOrigin
|
||||
raysColor: string
|
||||
raysSpeed: number
|
||||
lightSpread: number
|
||||
rayLength: number
|
||||
sourceWidth: number
|
||||
pulsating: boolean
|
||||
pulsatingMin: number
|
||||
pulsatingMax: number
|
||||
fadeDistance: number
|
||||
saturation: number
|
||||
followMouse: boolean
|
||||
mouseInfluence: number
|
||||
noiseAmount: number
|
||||
distortion: number
|
||||
opacity: number
|
||||
}
|
||||
|
||||
export const defaultConfig: LightRaysConfig = {
|
||||
raysOrigin: "top-center",
|
||||
raysColor: "#ffffff",
|
||||
raysSpeed: 1.0,
|
||||
lightSpread: 1.2,
|
||||
rayLength: 4.5,
|
||||
sourceWidth: 0.1,
|
||||
pulsating: true,
|
||||
pulsatingMin: 0.9,
|
||||
pulsatingMax: 1.05,
|
||||
fadeDistance: 1.25,
|
||||
saturation: 0.35,
|
||||
followMouse: false,
|
||||
mouseInfluence: 0.05,
|
||||
noiseAmount: 0.5,
|
||||
distortion: 0.0,
|
||||
opacity: 0.35,
|
||||
}
|
||||
|
||||
export interface LightRaysAnimationState {
|
||||
time: number
|
||||
intensity: number
|
||||
pulseValue: number
|
||||
}
|
||||
|
||||
interface LightRaysProps {
|
||||
config: Accessor<LightRaysConfig>
|
||||
class?: string
|
||||
onAnimationFrame?: (state: LightRaysAnimationState) => void
|
||||
}
|
||||
|
||||
const hexToRgb = (hex: string): [number, number, number] => {
|
||||
const m = /^#?([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})$/i.exec(hex)
|
||||
return m ? [parseInt(m[1], 16) / 255, parseInt(m[2], 16) / 255, parseInt(m[3], 16) / 255] : [1, 1, 1]
|
||||
}
|
||||
|
||||
const getAnchorAndDir = (
|
||||
origin: RaysOrigin,
|
||||
w: number,
|
||||
h: number,
|
||||
): { anchor: [number, number]; dir: [number, number] } => {
|
||||
const outside = 0.2
|
||||
switch (origin) {
|
||||
case "top-left":
|
||||
return { anchor: [0, -outside * h], dir: [0, 1] }
|
||||
case "top-right":
|
||||
return { anchor: [w, -outside * h], dir: [0, 1] }
|
||||
case "left":
|
||||
return { anchor: [-outside * w, 0.5 * h], dir: [1, 0] }
|
||||
case "right":
|
||||
return { anchor: [(1 + outside) * w, 0.5 * h], dir: [-1, 0] }
|
||||
case "bottom-left":
|
||||
return { anchor: [0, (1 + outside) * h], dir: [0, -1] }
|
||||
case "bottom-center":
|
||||
return { anchor: [0.5 * w, (1 + outside) * h], dir: [0, -1] }
|
||||
case "bottom-right":
|
||||
return { anchor: [w, (1 + outside) * h], dir: [0, -1] }
|
||||
default: // "top-center"
|
||||
return { anchor: [0.5 * w, -outside * h], dir: [0, 1] }
|
||||
}
|
||||
}
|
||||
|
||||
interface UniformData {
|
||||
iTime: number
|
||||
iResolution: [number, number]
|
||||
rayPos: [number, number]
|
||||
rayDir: [number, number]
|
||||
raysColor: [number, number, number]
|
||||
raysSpeed: number
|
||||
lightSpread: number
|
||||
rayLength: number
|
||||
sourceWidth: number
|
||||
pulsating: number
|
||||
pulsatingMin: number
|
||||
pulsatingMax: number
|
||||
fadeDistance: number
|
||||
saturation: number
|
||||
mousePos: [number, number]
|
||||
mouseInfluence: number
|
||||
noiseAmount: number
|
||||
distortion: number
|
||||
}
|
||||
|
||||
const WGSL_SHADER = `
|
||||
struct Uniforms {
|
||||
iTime: f32,
|
||||
_pad0: f32,
|
||||
iResolution: vec2<f32>,
|
||||
rayPos: vec2<f32>,
|
||||
rayDir: vec2<f32>,
|
||||
raysColor: vec3<f32>,
|
||||
raysSpeed: f32,
|
||||
lightSpread: f32,
|
||||
rayLength: f32,
|
||||
sourceWidth: f32,
|
||||
pulsating: f32,
|
||||
pulsatingMin: f32,
|
||||
pulsatingMax: f32,
|
||||
fadeDistance: f32,
|
||||
saturation: f32,
|
||||
mousePos: vec2<f32>,
|
||||
mouseInfluence: f32,
|
||||
noiseAmount: f32,
|
||||
distortion: f32,
|
||||
_pad1: f32,
|
||||
_pad2: f32,
|
||||
_pad3: f32,
|
||||
};
|
||||
|
||||
@group(0) @binding(0) var<uniform> uniforms: Uniforms;
|
||||
|
||||
struct VertexOutput {
|
||||
@builtin(position) position: vec4<f32>,
|
||||
@location(0) vUv: vec2<f32>,
|
||||
};
|
||||
|
||||
@vertex
|
||||
fn vertexMain(@builtin(vertex_index) vertexIndex: u32) -> VertexOutput {
|
||||
var positions = array<vec2<f32>, 3>(
|
||||
vec2<f32>(-1.0, -1.0),
|
||||
vec2<f32>(3.0, -1.0),
|
||||
vec2<f32>(-1.0, 3.0)
|
||||
);
|
||||
|
||||
var output: VertexOutput;
|
||||
let pos = positions[vertexIndex];
|
||||
output.position = vec4<f32>(pos, 0.0, 1.0);
|
||||
output.vUv = pos * 0.5 + 0.5;
|
||||
return output;
|
||||
}
|
||||
|
||||
fn noise(st: vec2<f32>) -> f32 {
|
||||
return fract(sin(dot(st, vec2<f32>(12.9898, 78.233))) * 43758.5453123);
|
||||
}
|
||||
|
||||
fn rayStrength(raySource: vec2<f32>, rayRefDirection: vec2<f32>, coord: vec2<f32>,
|
||||
seedA: f32, seedB: f32, speed: f32) -> f32 {
|
||||
let sourceToCoord = coord - raySource;
|
||||
let dirNorm = normalize(sourceToCoord);
|
||||
let cosAngle = dot(dirNorm, rayRefDirection);
|
||||
|
||||
let distortedAngle = cosAngle + uniforms.distortion * sin(uniforms.iTime * 2.0 + length(sourceToCoord) * 0.01) * 0.2;
|
||||
|
||||
let spreadFactor = pow(max(distortedAngle, 0.0), 1.0 / max(uniforms.lightSpread, 0.001));
|
||||
|
||||
let distance = length(sourceToCoord);
|
||||
let maxDistance = uniforms.iResolution.x * uniforms.rayLength;
|
||||
let lengthFalloff = clamp((maxDistance - distance) / maxDistance, 0.0, 1.0);
|
||||
|
||||
let fadeFalloff = clamp((uniforms.iResolution.x * uniforms.fadeDistance - distance) / (uniforms.iResolution.x * uniforms.fadeDistance), 0.5, 1.0);
|
||||
let pulseCenter = (uniforms.pulsatingMin + uniforms.pulsatingMax) * 0.5;
|
||||
let pulseAmplitude = (uniforms.pulsatingMax - uniforms.pulsatingMin) * 0.5;
|
||||
var pulse: f32;
|
||||
if (uniforms.pulsating > 0.5) {
|
||||
pulse = pulseCenter + pulseAmplitude * sin(uniforms.iTime * speed * 3.0);
|
||||
} else {
|
||||
pulse = 1.0;
|
||||
}
|
||||
|
||||
let baseStrength = clamp(
|
||||
(0.45 + 0.15 * sin(distortedAngle * seedA + uniforms.iTime * speed)) +
|
||||
(0.3 + 0.2 * cos(-distortedAngle * seedB + uniforms.iTime * speed)),
|
||||
0.0, 1.0
|
||||
);
|
||||
|
||||
return baseStrength * lengthFalloff * fadeFalloff * spreadFactor * pulse;
|
||||
}
|
||||
|
||||
@fragment
|
||||
fn fragmentMain(@builtin(position) fragCoord: vec4<f32>, @location(0) vUv: vec2<f32>) -> @location(0) vec4<f32> {
|
||||
let coord = vec2<f32>(fragCoord.x, fragCoord.y);
|
||||
|
||||
let normalizedX = (coord.x / uniforms.iResolution.x) - 0.5;
|
||||
let widthOffset = -normalizedX * uniforms.sourceWidth * uniforms.iResolution.x;
|
||||
|
||||
let perpDir = vec2<f32>(-uniforms.rayDir.y, uniforms.rayDir.x);
|
||||
let adjustedRayPos = uniforms.rayPos + perpDir * widthOffset;
|
||||
|
||||
var finalRayDir = uniforms.rayDir;
|
||||
if (uniforms.mouseInfluence > 0.0) {
|
||||
let mouseScreenPos = uniforms.mousePos * uniforms.iResolution;
|
||||
let mouseDirection = normalize(mouseScreenPos - adjustedRayPos);
|
||||
finalRayDir = normalize(mix(uniforms.rayDir, mouseDirection, uniforms.mouseInfluence));
|
||||
}
|
||||
|
||||
let rays1 = vec4<f32>(1.0) *
|
||||
rayStrength(adjustedRayPos, finalRayDir, coord, 36.2214, 21.11349,
|
||||
1.5 * uniforms.raysSpeed);
|
||||
let rays2 = vec4<f32>(1.0) *
|
||||
rayStrength(adjustedRayPos, finalRayDir, coord, 22.3991, 18.0234,
|
||||
1.1 * uniforms.raysSpeed);
|
||||
|
||||
var fragColor = rays1 * 0.5 + rays2 * 0.4;
|
||||
|
||||
if (uniforms.noiseAmount > 0.0) {
|
||||
let n = noise(coord * 0.01 + uniforms.iTime * 0.1);
|
||||
fragColor = vec4<f32>(fragColor.rgb * (1.0 - uniforms.noiseAmount + uniforms.noiseAmount * n), fragColor.a);
|
||||
}
|
||||
|
||||
let brightness = 1.0 - (coord.y / uniforms.iResolution.y);
|
||||
fragColor.x = fragColor.x * (0.1 + brightness * 0.8);
|
||||
fragColor.y = fragColor.y * (0.3 + brightness * 0.6);
|
||||
fragColor.z = fragColor.z * (0.5 + brightness * 0.5);
|
||||
|
||||
if (uniforms.saturation != 1.0) {
|
||||
let gray = dot(fragColor.rgb, vec3<f32>(0.299, 0.587, 0.114));
|
||||
fragColor = vec4<f32>(mix(vec3<f32>(gray), fragColor.rgb, uniforms.saturation), fragColor.a);
|
||||
}
|
||||
|
||||
fragColor = vec4<f32>(fragColor.rgb * uniforms.raysColor, fragColor.a);
|
||||
|
||||
return fragColor;
|
||||
}
|
||||
`
|
||||
|
||||
const UNIFORM_BUFFER_SIZE = 96
|
||||
|
||||
function createUniformBuffer(data: UniformData): Float32Array {
|
||||
const buffer = new Float32Array(24)
|
||||
buffer[0] = data.iTime
|
||||
buffer[1] = 0
|
||||
buffer[2] = data.iResolution[0]
|
||||
buffer[3] = data.iResolution[1]
|
||||
buffer[4] = data.rayPos[0]
|
||||
buffer[5] = data.rayPos[1]
|
||||
buffer[6] = data.rayDir[0]
|
||||
buffer[7] = data.rayDir[1]
|
||||
buffer[8] = data.raysColor[0]
|
||||
buffer[9] = data.raysColor[1]
|
||||
buffer[10] = data.raysColor[2]
|
||||
buffer[11] = data.raysSpeed
|
||||
buffer[12] = data.lightSpread
|
||||
buffer[13] = data.rayLength
|
||||
buffer[14] = data.sourceWidth
|
||||
buffer[15] = data.pulsating
|
||||
buffer[16] = data.pulsatingMin
|
||||
buffer[17] = data.pulsatingMax
|
||||
buffer[18] = data.fadeDistance
|
||||
buffer[19] = data.saturation
|
||||
buffer[20] = data.mousePos[0]
|
||||
buffer[21] = data.mousePos[1]
|
||||
buffer[22] = data.mouseInfluence
|
||||
buffer[23] = data.noiseAmount
|
||||
return buffer
|
||||
}
|
||||
|
||||
const UNIFORM_BUFFER_SIZE_CORRECTED = 112
|
||||
|
||||
function createUniformBufferCorrected(data: UniformData): Float32Array {
|
||||
const buffer = new Float32Array(28)
|
||||
buffer[0] = data.iTime
|
||||
buffer[1] = 0
|
||||
buffer[2] = data.iResolution[0]
|
||||
buffer[3] = data.iResolution[1]
|
||||
buffer[4] = data.rayPos[0]
|
||||
buffer[5] = data.rayPos[1]
|
||||
buffer[6] = data.rayDir[0]
|
||||
buffer[7] = data.rayDir[1]
|
||||
buffer[8] = data.raysColor[0]
|
||||
buffer[9] = data.raysColor[1]
|
||||
buffer[10] = data.raysColor[2]
|
||||
buffer[11] = data.raysSpeed
|
||||
buffer[12] = data.lightSpread
|
||||
buffer[13] = data.rayLength
|
||||
buffer[14] = data.sourceWidth
|
||||
buffer[15] = data.pulsating
|
||||
buffer[16] = data.pulsatingMin
|
||||
buffer[17] = data.pulsatingMax
|
||||
buffer[18] = data.fadeDistance
|
||||
buffer[19] = data.saturation
|
||||
buffer[20] = data.mousePos[0]
|
||||
buffer[21] = data.mousePos[1]
|
||||
buffer[22] = data.mouseInfluence
|
||||
buffer[23] = data.noiseAmount
|
||||
buffer[24] = data.distortion
|
||||
buffer[25] = 0
|
||||
buffer[26] = 0
|
||||
buffer[27] = 0
|
||||
return buffer
|
||||
}
|
||||
|
||||
export default function LightRays(props: LightRaysProps) {
|
||||
let containerRef: HTMLDivElement | undefined
|
||||
let canvasRef: HTMLCanvasElement | null = null
|
||||
let deviceRef: GPUDevice | null = null
|
||||
let contextRef: GPUCanvasContext | null = null
|
||||
let pipelineRef: GPURenderPipeline | null = null
|
||||
let uniformBufferRef: GPUBuffer | null = null
|
||||
let bindGroupRef: GPUBindGroup | null = null
|
||||
let animationIdRef: number | null = null
|
||||
let cleanupFunctionRef: (() => void) | null = null
|
||||
let uniformDataRef: UniformData | null = null
|
||||
|
||||
const mouseRef = { x: 0.5, y: 0.5 }
|
||||
const smoothMouseRef = { x: 0.5, y: 0.5 }
|
||||
|
||||
const [isVisible, setIsVisible] = createSignal(false)
|
||||
|
||||
onMount(() => {
|
||||
if (!containerRef) return
|
||||
|
||||
const observer = new IntersectionObserver(
|
||||
(entries) => {
|
||||
const entry = entries[0]
|
||||
setIsVisible(entry.isIntersecting)
|
||||
},
|
||||
{ threshold: 0.1 },
|
||||
)
|
||||
|
||||
observer.observe(containerRef)
|
||||
|
||||
onCleanup(() => {
|
||||
observer.disconnect()
|
||||
})
|
||||
})
|
||||
|
||||
createEffect(() => {
|
||||
const visible = isVisible()
|
||||
const config = props.config()
|
||||
if (!visible || !containerRef) {
|
||||
return
|
||||
}
|
||||
|
||||
if (cleanupFunctionRef) {
|
||||
cleanupFunctionRef()
|
||||
cleanupFunctionRef = null
|
||||
}
|
||||
|
||||
const initializeWebGPU = async () => {
|
||||
if (!containerRef) {
|
||||
return
|
||||
}
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 10))
|
||||
|
||||
if (!containerRef) {
|
||||
return
|
||||
}
|
||||
|
||||
if (!navigator.gpu) {
|
||||
console.warn("WebGPU is not supported in this browser")
|
||||
return
|
||||
}
|
||||
|
||||
const adapter = await navigator.gpu.requestAdapter()
|
||||
if (!adapter) {
|
||||
console.warn("Failed to get WebGPU adapter")
|
||||
return
|
||||
}
|
||||
|
||||
const device = await adapter.requestDevice()
|
||||
deviceRef = device
|
||||
|
||||
const canvas = document.createElement("canvas")
|
||||
canvas.style.width = "100%"
|
||||
canvas.style.height = "100%"
|
||||
canvasRef = canvas
|
||||
|
||||
while (containerRef.firstChild) {
|
||||
containerRef.removeChild(containerRef.firstChild)
|
||||
}
|
||||
containerRef.appendChild(canvas)
|
||||
|
||||
const context = canvas.getContext("webgpu")
|
||||
if (!context) {
|
||||
console.warn("Failed to get WebGPU context")
|
||||
return
|
||||
}
|
||||
contextRef = context
|
||||
|
||||
const presentationFormat = navigator.gpu.getPreferredCanvasFormat()
|
||||
context.configure({
|
||||
device,
|
||||
format: presentationFormat,
|
||||
alphaMode: "premultiplied",
|
||||
})
|
||||
|
||||
const shaderModule = device.createShaderModule({
|
||||
code: WGSL_SHADER,
|
||||
})
|
||||
|
||||
const uniformBuffer = device.createBuffer({
|
||||
size: UNIFORM_BUFFER_SIZE_CORRECTED,
|
||||
usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST,
|
||||
})
|
||||
uniformBufferRef = uniformBuffer
|
||||
|
||||
const bindGroupLayout = device.createBindGroupLayout({
|
||||
entries: [
|
||||
{
|
||||
binding: 0,
|
||||
visibility: GPUShaderStage.VERTEX | GPUShaderStage.FRAGMENT,
|
||||
buffer: { type: "uniform" },
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
const bindGroup = device.createBindGroup({
|
||||
layout: bindGroupLayout,
|
||||
entries: [
|
||||
{
|
||||
binding: 0,
|
||||
resource: { buffer: uniformBuffer },
|
||||
},
|
||||
],
|
||||
})
|
||||
bindGroupRef = bindGroup
|
||||
|
||||
const pipelineLayout = device.createPipelineLayout({
|
||||
bindGroupLayouts: [bindGroupLayout],
|
||||
})
|
||||
|
||||
const pipeline = device.createRenderPipeline({
|
||||
layout: pipelineLayout,
|
||||
vertex: {
|
||||
module: shaderModule,
|
||||
entryPoint: "vertexMain",
|
||||
},
|
||||
fragment: {
|
||||
module: shaderModule,
|
||||
entryPoint: "fragmentMain",
|
||||
targets: [
|
||||
{
|
||||
format: presentationFormat,
|
||||
blend: {
|
||||
color: {
|
||||
srcFactor: "src-alpha",
|
||||
dstFactor: "one-minus-src-alpha",
|
||||
operation: "add",
|
||||
},
|
||||
alpha: {
|
||||
srcFactor: "one",
|
||||
dstFactor: "one-minus-src-alpha",
|
||||
operation: "add",
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
primitive: {
|
||||
topology: "triangle-list",
|
||||
},
|
||||
})
|
||||
pipelineRef = pipeline
|
||||
|
||||
const { clientWidth: wCSS, clientHeight: hCSS } = containerRef
|
||||
const dpr = Math.min(window.devicePixelRatio, 2)
|
||||
const w = wCSS * dpr
|
||||
const h = hCSS * dpr
|
||||
const { anchor, dir } = getAnchorAndDir(config.raysOrigin, w, h)
|
||||
|
||||
uniformDataRef = {
|
||||
iTime: 0,
|
||||
iResolution: [w, h],
|
||||
rayPos: anchor,
|
||||
rayDir: dir,
|
||||
raysColor: hexToRgb(config.raysColor),
|
||||
raysSpeed: config.raysSpeed,
|
||||
lightSpread: config.lightSpread,
|
||||
rayLength: config.rayLength,
|
||||
sourceWidth: config.sourceWidth,
|
||||
pulsating: config.pulsating ? 1.0 : 0.0,
|
||||
pulsatingMin: config.pulsatingMin,
|
||||
pulsatingMax: config.pulsatingMax,
|
||||
fadeDistance: config.fadeDistance,
|
||||
saturation: config.saturation,
|
||||
mousePos: [0.5, 0.5],
|
||||
mouseInfluence: config.mouseInfluence,
|
||||
noiseAmount: config.noiseAmount,
|
||||
distortion: config.distortion,
|
||||
}
|
||||
|
||||
const updatePlacement = () => {
|
||||
if (!containerRef || !canvasRef || !uniformDataRef) {
|
||||
return
|
||||
}
|
||||
|
||||
const dpr = Math.min(window.devicePixelRatio, 2)
|
||||
const { clientWidth: wCSS, clientHeight: hCSS } = containerRef
|
||||
const w = Math.floor(wCSS * dpr)
|
||||
const h = Math.floor(hCSS * dpr)
|
||||
|
||||
canvasRef.width = w
|
||||
canvasRef.height = h
|
||||
|
||||
uniformDataRef.iResolution = [w, h]
|
||||
|
||||
const currentConfig = props.config()
|
||||
const { anchor, dir } = getAnchorAndDir(currentConfig.raysOrigin, w, h)
|
||||
uniformDataRef.rayPos = anchor
|
||||
uniformDataRef.rayDir = dir
|
||||
}
|
||||
|
||||
const loop = (t: number) => {
|
||||
if (!deviceRef || !contextRef || !pipelineRef || !uniformBufferRef || !bindGroupRef || !uniformDataRef) {
|
||||
return
|
||||
}
|
||||
|
||||
const currentConfig = props.config()
|
||||
const timeSeconds = t * 0.001
|
||||
uniformDataRef.iTime = timeSeconds
|
||||
|
||||
if (currentConfig.followMouse && currentConfig.mouseInfluence > 0.0) {
|
||||
const smoothing = 0.92
|
||||
|
||||
smoothMouseRef.x = smoothMouseRef.x * smoothing + mouseRef.x * (1 - smoothing)
|
||||
smoothMouseRef.y = smoothMouseRef.y * smoothing + mouseRef.y * (1 - smoothing)
|
||||
|
||||
uniformDataRef.mousePos = [smoothMouseRef.x, smoothMouseRef.y]
|
||||
}
|
||||
|
||||
if (props.onAnimationFrame) {
|
||||
const pulseCenter = (currentConfig.pulsatingMin + currentConfig.pulsatingMax) * 0.5
|
||||
const pulseAmplitude = (currentConfig.pulsatingMax - currentConfig.pulsatingMin) * 0.5
|
||||
const pulseValue = currentConfig.pulsating
|
||||
? pulseCenter + pulseAmplitude * Math.sin(timeSeconds * currentConfig.raysSpeed * 3.0)
|
||||
: 1.0
|
||||
|
||||
const baseIntensity1 = 0.45 + 0.15 * Math.sin(timeSeconds * currentConfig.raysSpeed * 1.5)
|
||||
const baseIntensity2 = 0.3 + 0.2 * Math.cos(timeSeconds * currentConfig.raysSpeed * 1.1)
|
||||
const intensity = (baseIntensity1 + baseIntensity2) * pulseValue
|
||||
|
||||
props.onAnimationFrame({
|
||||
time: timeSeconds,
|
||||
intensity,
|
||||
pulseValue,
|
||||
})
|
||||
}
|
||||
|
||||
try {
|
||||
const uniformData = createUniformBufferCorrected(uniformDataRef)
|
||||
deviceRef.queue.writeBuffer(uniformBufferRef, 0, uniformData.buffer)
|
||||
|
||||
const commandEncoder = deviceRef.createCommandEncoder()
|
||||
|
||||
const textureView = contextRef.getCurrentTexture().createView()
|
||||
|
||||
const renderPass = commandEncoder.beginRenderPass({
|
||||
colorAttachments: [
|
||||
{
|
||||
view: textureView,
|
||||
clearValue: { r: 0, g: 0, b: 0, a: 0 },
|
||||
loadOp: "clear",
|
||||
storeOp: "store",
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
renderPass.setPipeline(pipelineRef)
|
||||
renderPass.setBindGroup(0, bindGroupRef)
|
||||
renderPass.draw(3)
|
||||
renderPass.end()
|
||||
|
||||
deviceRef.queue.submit([commandEncoder.finish()])
|
||||
|
||||
animationIdRef = requestAnimationFrame(loop)
|
||||
} catch (error) {
|
||||
console.warn("WebGPU rendering error:", error)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
window.addEventListener("resize", updatePlacement)
|
||||
updatePlacement()
|
||||
animationIdRef = requestAnimationFrame(loop)
|
||||
|
||||
cleanupFunctionRef = () => {
|
||||
if (animationIdRef) {
|
||||
cancelAnimationFrame(animationIdRef)
|
||||
animationIdRef = null
|
||||
}
|
||||
|
||||
window.removeEventListener("resize", updatePlacement)
|
||||
|
||||
if (uniformBufferRef) {
|
||||
uniformBufferRef.destroy()
|
||||
uniformBufferRef = null
|
||||
}
|
||||
|
||||
if (deviceRef) {
|
||||
deviceRef.destroy()
|
||||
deviceRef = null
|
||||
}
|
||||
|
||||
if (canvasRef && canvasRef.parentNode) {
|
||||
canvasRef.parentNode.removeChild(canvasRef)
|
||||
}
|
||||
|
||||
canvasRef = null
|
||||
contextRef = null
|
||||
pipelineRef = null
|
||||
bindGroupRef = null
|
||||
uniformDataRef = null
|
||||
}
|
||||
}
|
||||
|
||||
initializeWebGPU()
|
||||
|
||||
onCleanup(() => {
|
||||
if (cleanupFunctionRef) {
|
||||
cleanupFunctionRef()
|
||||
cleanupFunctionRef = null
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
createEffect(() => {
|
||||
if (!uniformDataRef || !containerRef) {
|
||||
return
|
||||
}
|
||||
|
||||
const config = props.config()
|
||||
|
||||
uniformDataRef.raysColor = hexToRgb(config.raysColor)
|
||||
uniformDataRef.raysSpeed = config.raysSpeed
|
||||
uniformDataRef.lightSpread = config.lightSpread
|
||||
uniformDataRef.rayLength = config.rayLength
|
||||
uniformDataRef.sourceWidth = config.sourceWidth
|
||||
uniformDataRef.pulsating = config.pulsating ? 1.0 : 0.0
|
||||
uniformDataRef.pulsatingMin = config.pulsatingMin
|
||||
uniformDataRef.pulsatingMax = config.pulsatingMax
|
||||
uniformDataRef.fadeDistance = config.fadeDistance
|
||||
uniformDataRef.saturation = config.saturation
|
||||
uniformDataRef.mouseInfluence = config.mouseInfluence
|
||||
uniformDataRef.noiseAmount = config.noiseAmount
|
||||
uniformDataRef.distortion = config.distortion
|
||||
|
||||
const dpr = Math.min(window.devicePixelRatio, 2)
|
||||
const { clientWidth: wCSS, clientHeight: hCSS } = containerRef
|
||||
const { anchor, dir } = getAnchorAndDir(config.raysOrigin, wCSS * dpr, hCSS * dpr)
|
||||
uniformDataRef.rayPos = anchor
|
||||
uniformDataRef.rayDir = dir
|
||||
})
|
||||
|
||||
createEffect(() => {
|
||||
const config = props.config()
|
||||
if (!config.followMouse) {
|
||||
return
|
||||
}
|
||||
|
||||
const handleMouseMove = (e: MouseEvent) => {
|
||||
if (!containerRef) {
|
||||
return
|
||||
}
|
||||
const rect = containerRef.getBoundingClientRect()
|
||||
const x = (e.clientX - rect.left) / rect.width
|
||||
const y = (e.clientY - rect.top) / rect.height
|
||||
mouseRef.x = x
|
||||
mouseRef.y = y
|
||||
}
|
||||
|
||||
window.addEventListener("mousemove", handleMouseMove)
|
||||
|
||||
onCleanup(() => {
|
||||
window.removeEventListener("mousemove", handleMouseMove)
|
||||
})
|
||||
})
|
||||
|
||||
return (
|
||||
<div
|
||||
ref={containerRef}
|
||||
class={`light-rays-container ${props.class ?? ""}`.trim()}
|
||||
style={{ opacity: props.config().opacity }}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
interface LightRaysControlsProps {
|
||||
config: Accessor<LightRaysConfig>
|
||||
setConfig: Setter<LightRaysConfig>
|
||||
}
|
||||
|
||||
export function LightRaysControls(props: LightRaysControlsProps) {
|
||||
const [isOpen, setIsOpen] = createSignal(true)
|
||||
|
||||
const updateConfig = <K extends keyof LightRaysConfig>(key: K, value: LightRaysConfig[K]) => {
|
||||
props.setConfig((prev) => ({ ...prev, [key]: value }))
|
||||
}
|
||||
|
||||
const origins: RaysOrigin[] = [
|
||||
"top-center",
|
||||
"top-left",
|
||||
"top-right",
|
||||
"left",
|
||||
"right",
|
||||
"bottom-center",
|
||||
"bottom-left",
|
||||
"bottom-right",
|
||||
]
|
||||
|
||||
return (
|
||||
<div class="light-rays-controls">
|
||||
<button class="light-rays-controls-toggle" onClick={() => setIsOpen(!isOpen())}>
|
||||
{isOpen() ? "▼" : "▶"} Light Rays
|
||||
</button>
|
||||
<Show when={isOpen()}>
|
||||
<div class="light-rays-controls-panel">
|
||||
<div class="control-group">
|
||||
<label>Origin</label>
|
||||
<select
|
||||
value={props.config().raysOrigin}
|
||||
onChange={(e) => updateConfig("raysOrigin", e.currentTarget.value as RaysOrigin)}
|
||||
>
|
||||
<For each={origins}>{(origin) => <option value={origin}>{origin}</option>}</For>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<div class="control-group">
|
||||
<label>Color</label>
|
||||
<input
|
||||
type="color"
|
||||
value={props.config().raysColor}
|
||||
onInput={(e) => updateConfig("raysColor", e.currentTarget.value)}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div class="control-group">
|
||||
<label>Speed: {props.config().raysSpeed.toFixed(2)}</label>
|
||||
<input
|
||||
type="range"
|
||||
min="0"
|
||||
max="3"
|
||||
step="0.01"
|
||||
value={props.config().raysSpeed}
|
||||
onInput={(e) => updateConfig("raysSpeed", parseFloat(e.currentTarget.value))}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div class="control-group">
|
||||
<label>Light Spread: {props.config().lightSpread.toFixed(2)}</label>
|
||||
<input
|
||||
type="range"
|
||||
min="0.1"
|
||||
max="5"
|
||||
step="0.01"
|
||||
value={props.config().lightSpread}
|
||||
onInput={(e) => updateConfig("lightSpread", parseFloat(e.currentTarget.value))}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div class="control-group">
|
||||
<label>Ray Length: {props.config().rayLength.toFixed(2)}</label>
|
||||
<input
|
||||
type="range"
|
||||
min="0.1"
|
||||
max="5"
|
||||
step="0.01"
|
||||
value={props.config().rayLength}
|
||||
onInput={(e) => updateConfig("rayLength", parseFloat(e.currentTarget.value))}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div class="control-group">
|
||||
<label>Source Width: {props.config().sourceWidth.toFixed(2)}</label>
|
||||
<input
|
||||
type="range"
|
||||
min="0"
|
||||
max="2"
|
||||
step="0.01"
|
||||
value={props.config().sourceWidth}
|
||||
onInput={(e) => updateConfig("sourceWidth", parseFloat(e.currentTarget.value))}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div class="control-group">
|
||||
<label>Fade Distance: {props.config().fadeDistance.toFixed(2)}</label>
|
||||
<input
|
||||
type="range"
|
||||
min="0.1"
|
||||
max="3"
|
||||
step="0.01"
|
||||
value={props.config().fadeDistance}
|
||||
onInput={(e) => updateConfig("fadeDistance", parseFloat(e.currentTarget.value))}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div class="control-group">
|
||||
<label>Saturation: {props.config().saturation.toFixed(2)}</label>
|
||||
<input
|
||||
type="range"
|
||||
min="0"
|
||||
max="2"
|
||||
step="0.01"
|
||||
value={props.config().saturation}
|
||||
onInput={(e) => updateConfig("saturation", parseFloat(e.currentTarget.value))}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div class="control-group">
|
||||
<label>Mouse Influence: {props.config().mouseInfluence.toFixed(2)}</label>
|
||||
<input
|
||||
type="range"
|
||||
min="0"
|
||||
max="1"
|
||||
step="0.01"
|
||||
value={props.config().mouseInfluence}
|
||||
onInput={(e) => updateConfig("mouseInfluence", parseFloat(e.currentTarget.value))}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div class="control-group">
|
||||
<label>Noise: {props.config().noiseAmount.toFixed(2)}</label>
|
||||
<input
|
||||
type="range"
|
||||
min="0"
|
||||
max="1"
|
||||
step="0.01"
|
||||
value={props.config().noiseAmount}
|
||||
onInput={(e) => updateConfig("noiseAmount", parseFloat(e.currentTarget.value))}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div class="control-group">
|
||||
<label>Distortion: {props.config().distortion.toFixed(2)}</label>
|
||||
<input
|
||||
type="range"
|
||||
min="0"
|
||||
max="2"
|
||||
step="0.01"
|
||||
value={props.config().distortion}
|
||||
onInput={(e) => updateConfig("distortion", parseFloat(e.currentTarget.value))}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div class="control-group">
|
||||
<label>Opacity: {props.config().opacity.toFixed(2)}</label>
|
||||
<input
|
||||
type="range"
|
||||
min="0"
|
||||
max="1"
|
||||
step="0.01"
|
||||
value={props.config().opacity}
|
||||
onInput={(e) => updateConfig("opacity", parseFloat(e.currentTarget.value))}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div class="control-group checkbox">
|
||||
<label>
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={props.config().pulsating}
|
||||
onChange={(e) => updateConfig("pulsating", e.currentTarget.checked)}
|
||||
/>
|
||||
Pulsating
|
||||
</label>
|
||||
</div>
|
||||
|
||||
<Show when={props.config().pulsating}>
|
||||
<div class="control-group">
|
||||
<label>Pulse Min: {props.config().pulsatingMin.toFixed(2)}</label>
|
||||
<input
|
||||
type="range"
|
||||
min="0"
|
||||
max="1"
|
||||
step="0.01"
|
||||
value={props.config().pulsatingMin}
|
||||
onInput={(e) => updateConfig("pulsatingMin", parseFloat(e.currentTarget.value))}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div class="control-group">
|
||||
<label>Pulse Max: {props.config().pulsatingMax.toFixed(2)}</label>
|
||||
<input
|
||||
type="range"
|
||||
min="0"
|
||||
max="2"
|
||||
step="0.01"
|
||||
value={props.config().pulsatingMax}
|
||||
onInput={(e) => updateConfig("pulsatingMax", parseFloat(e.currentTarget.value))}
|
||||
/>
|
||||
</div>
|
||||
</Show>
|
||||
|
||||
<div class="control-group checkbox">
|
||||
<label>
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={props.config().followMouse}
|
||||
onChange={(e) => updateConfig("followMouse", e.currentTarget.checked)}
|
||||
/>
|
||||
Follow Mouse
|
||||
</label>
|
||||
</div>
|
||||
|
||||
<button class="reset-button" onClick={() => props.setConfig(defaultConfig)}>
|
||||
Reset to Defaults
|
||||
</button>
|
||||
</div>
|
||||
</Show>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
15
packages/console/app/src/component/spotlight.css
Normal file
15
packages/console/app/src/component/spotlight.css
Normal file
@@ -0,0 +1,15 @@
|
||||
.spotlight-container {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
width: 100%;
|
||||
height: 50dvh;
|
||||
pointer-events: none;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.spotlight-container canvas {
|
||||
display: block;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
820
packages/console/app/src/component/spotlight.tsx
Normal file
820
packages/console/app/src/component/spotlight.tsx
Normal file
@@ -0,0 +1,820 @@
|
||||
import { createSignal, createEffect, onMount, onCleanup, Accessor } from "solid-js"
|
||||
import "./spotlight.css"
|
||||
|
||||
export interface ParticlesConfig {
|
||||
enabled: boolean
|
||||
amount: number
|
||||
size: [number, number]
|
||||
speed: number
|
||||
opacity: number
|
||||
drift: number
|
||||
}
|
||||
|
||||
export interface SpotlightConfig {
|
||||
placement: [number, number]
|
||||
color: string
|
||||
speed: number
|
||||
spread: number
|
||||
length: number
|
||||
width: number
|
||||
pulsating: false | [number, number]
|
||||
distance: number
|
||||
saturation: number
|
||||
noiseAmount: number
|
||||
distortion: number
|
||||
opacity: number
|
||||
particles: ParticlesConfig
|
||||
}
|
||||
|
||||
export const defaultConfig: SpotlightConfig = {
|
||||
placement: [0.5, -0.15],
|
||||
color: "#ffffff",
|
||||
speed: 0.8,
|
||||
spread: 0.5,
|
||||
length: 4.0,
|
||||
width: 0.15,
|
||||
pulsating: [0.95, 1.1],
|
||||
distance: 3.5,
|
||||
saturation: 0.35,
|
||||
noiseAmount: 0.15,
|
||||
distortion: 0.05,
|
||||
opacity: 0.325,
|
||||
particles: {
|
||||
enabled: true,
|
||||
amount: 70,
|
||||
size: [1.25, 1.5],
|
||||
speed: 0.75,
|
||||
opacity: 0.9,
|
||||
drift: 1.5,
|
||||
},
|
||||
}
|
||||
|
||||
export interface SpotlightAnimationState {
|
||||
time: number
|
||||
intensity: number
|
||||
pulseValue: number
|
||||
}
|
||||
|
||||
interface SpotlightProps {
|
||||
config: Accessor<SpotlightConfig>
|
||||
class?: string
|
||||
onAnimationFrame?: (state: SpotlightAnimationState) => void
|
||||
}
|
||||
|
||||
const hexToRgb = (hex: string): [number, number, number] => {
|
||||
const m = /^#?([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})$/i.exec(hex)
|
||||
return m ? [parseInt(m[1], 16) / 255, parseInt(m[2], 16) / 255, parseInt(m[3], 16) / 255] : [1, 1, 1]
|
||||
}
|
||||
|
||||
const getAnchorAndDir = (
|
||||
placement: [number, number],
|
||||
w: number,
|
||||
h: number,
|
||||
): { anchor: [number, number]; dir: [number, number] } => {
|
||||
const [px, py] = placement
|
||||
const outside = 0.2
|
||||
|
||||
let anchorX = px * w
|
||||
let anchorY = py * h
|
||||
let dirX = 0
|
||||
let dirY = 0
|
||||
|
||||
const centerX = 0.5
|
||||
const centerY = 0.5
|
||||
|
||||
if (py <= 0.25) {
|
||||
anchorY = -outside * h + py * h
|
||||
dirY = 1
|
||||
dirX = (centerX - px) * 0.5
|
||||
} else if (py >= 0.75) {
|
||||
anchorY = (1 + outside) * h - (1 - py) * h
|
||||
dirY = -1
|
||||
dirX = (centerX - px) * 0.5
|
||||
} else if (px <= 0.25) {
|
||||
anchorX = -outside * w + px * w
|
||||
dirX = 1
|
||||
dirY = (centerY - py) * 0.5
|
||||
} else if (px >= 0.75) {
|
||||
anchorX = (1 + outside) * w - (1 - px) * w
|
||||
dirX = -1
|
||||
dirY = (centerY - py) * 0.5
|
||||
} else {
|
||||
dirY = 1
|
||||
}
|
||||
|
||||
const len = Math.sqrt(dirX * dirX + dirY * dirY)
|
||||
if (len > 0) {
|
||||
dirX /= len
|
||||
dirY /= len
|
||||
}
|
||||
|
||||
return { anchor: [anchorX, anchorY], dir: [dirX, dirY] }
|
||||
}
|
||||
|
||||
interface UniformData {
|
||||
iTime: number
|
||||
iResolution: [number, number]
|
||||
lightPos: [number, number]
|
||||
lightDir: [number, number]
|
||||
color: [number, number, number]
|
||||
speed: number
|
||||
lightSpread: number
|
||||
lightLength: number
|
||||
sourceWidth: number
|
||||
pulsating: number
|
||||
pulsatingMin: number
|
||||
pulsatingMax: number
|
||||
fadeDistance: number
|
||||
saturation: number
|
||||
noiseAmount: number
|
||||
distortion: number
|
||||
particlesEnabled: number
|
||||
particleAmount: number
|
||||
particleSizeMin: number
|
||||
particleSizeMax: number
|
||||
particleSpeed: number
|
||||
particleOpacity: number
|
||||
particleDrift: number
|
||||
}
|
||||
|
||||
const WGSL_SHADER = `
|
||||
struct Uniforms {
|
||||
iTime: f32,
|
||||
_pad0: f32,
|
||||
iResolution: vec2<f32>,
|
||||
lightPos: vec2<f32>,
|
||||
lightDir: vec2<f32>,
|
||||
color: vec3<f32>,
|
||||
speed: f32,
|
||||
lightSpread: f32,
|
||||
lightLength: f32,
|
||||
sourceWidth: f32,
|
||||
pulsating: f32,
|
||||
pulsatingMin: f32,
|
||||
pulsatingMax: f32,
|
||||
fadeDistance: f32,
|
||||
saturation: f32,
|
||||
noiseAmount: f32,
|
||||
distortion: f32,
|
||||
particlesEnabled: f32,
|
||||
particleAmount: f32,
|
||||
particleSizeMin: f32,
|
||||
particleSizeMax: f32,
|
||||
particleSpeed: f32,
|
||||
particleOpacity: f32,
|
||||
particleDrift: f32,
|
||||
_pad1: f32,
|
||||
_pad2: f32,
|
||||
};
|
||||
|
||||
@group(0) @binding(0) var<uniform> uniforms: Uniforms;
|
||||
|
||||
struct VertexOutput {
|
||||
@builtin(position) position: vec4<f32>,
|
||||
@location(0) vUv: vec2<f32>,
|
||||
};
|
||||
|
||||
@vertex
|
||||
fn vertexMain(@builtin(vertex_index) vertexIndex: u32) -> VertexOutput {
|
||||
var positions = array<vec2<f32>, 3>(
|
||||
vec2<f32>(-1.0, -1.0),
|
||||
vec2<f32>(3.0, -1.0),
|
||||
vec2<f32>(-1.0, 3.0)
|
||||
);
|
||||
|
||||
var output: VertexOutput;
|
||||
let pos = positions[vertexIndex];
|
||||
output.position = vec4<f32>(pos, 0.0, 1.0);
|
||||
output.vUv = pos * 0.5 + 0.5;
|
||||
return output;
|
||||
}
|
||||
|
||||
fn hash(p: vec2<f32>) -> f32 {
|
||||
let p3 = fract(p.xyx * 0.1031);
|
||||
return fract((p3.x + p3.y) * p3.z + dot(p3, p3.yzx + 33.33));
|
||||
}
|
||||
|
||||
fn hash2(p: vec2<f32>) -> vec2<f32> {
|
||||
let n = sin(dot(p, vec2<f32>(41.0, 289.0)));
|
||||
return fract(vec2<f32>(n * 262144.0, n * 32768.0));
|
||||
}
|
||||
|
||||
fn fastNoise(st: vec2<f32>) -> f32 {
|
||||
return fract(sin(dot(st, vec2<f32>(12.9898, 78.233))) * 43758.5453);
|
||||
}
|
||||
|
||||
fn lightStrengthCombined(lightSource: vec2<f32>, lightRefDirection: vec2<f32>, coord: vec2<f32>) -> f32 {
|
||||
let sourceToCoord = coord - lightSource;
|
||||
let distSq = dot(sourceToCoord, sourceToCoord);
|
||||
let distance = sqrt(distSq);
|
||||
|
||||
let baseSize = min(uniforms.iResolution.x, uniforms.iResolution.y);
|
||||
let maxDistance = max(baseSize * uniforms.lightLength, 0.001);
|
||||
if (distance > maxDistance) {
|
||||
return 0.0;
|
||||
}
|
||||
|
||||
let invDist = 1.0 / max(distance, 0.001);
|
||||
let dirNorm = sourceToCoord * invDist;
|
||||
let cosAngle = dot(dirNorm, lightRefDirection);
|
||||
|
||||
if (cosAngle < 0.0) {
|
||||
return 0.0;
|
||||
}
|
||||
|
||||
let side = dot(dirNorm, vec2<f32>(-lightRefDirection.y, lightRefDirection.x));
|
||||
let time = uniforms.iTime;
|
||||
let speed = uniforms.speed;
|
||||
|
||||
let asymNoise = fastNoise(vec2<f32>(side * 6.0 + time * 0.12, distance * 0.004 + cosAngle * 2.0));
|
||||
let asymShift = (asymNoise - 0.5) * uniforms.distortion * 0.6;
|
||||
|
||||
let distortPhase = time * 1.4 + distance * 0.006 + cosAngle * 4.5 + side * 1.7;
|
||||
let distortedAngle = cosAngle + uniforms.distortion * sin(distortPhase) * 0.22 + asymShift;
|
||||
|
||||
let flickerSeed = cosAngle * 9.0 + side * 4.0 + time * speed * 0.35;
|
||||
let flicker = 0.86 + fastNoise(vec2<f32>(flickerSeed, distance * 0.01)) * 0.28;
|
||||
|
||||
let asymSpread = max(uniforms.lightSpread * (0.9 + (asymNoise - 0.5) * 0.25), 0.001);
|
||||
let spreadFactor = pow(max(distortedAngle, 0.0), 1.0 / asymSpread);
|
||||
let lengthFalloff = clamp(1.0 - distance / maxDistance, 0.0, 1.0);
|
||||
|
||||
let fadeMaxDist = max(baseSize * uniforms.fadeDistance, 0.001);
|
||||
let fadeFalloff = clamp((fadeMaxDist - distance) / fadeMaxDist, 0.0, 1.0);
|
||||
|
||||
var pulse: f32 = 1.0;
|
||||
if (uniforms.pulsating > 0.5) {
|
||||
let pulseCenter = (uniforms.pulsatingMin + uniforms.pulsatingMax) * 0.5;
|
||||
let pulseAmplitude = (uniforms.pulsatingMax - uniforms.pulsatingMin) * 0.5;
|
||||
pulse = pulseCenter + pulseAmplitude * sin(time * speed * 3.0);
|
||||
}
|
||||
|
||||
let timeSpeed = time * speed;
|
||||
let wave = 0.5
|
||||
+ 0.25 * sin(cosAngle * 28.0 + side * 8.0 + timeSpeed * 1.2)
|
||||
+ 0.18 * cos(cosAngle * 22.0 - timeSpeed * 0.95 + side * 6.0)
|
||||
+ 0.12 * sin(cosAngle * 35.0 + timeSpeed * 1.6 + asymNoise * 3.0);
|
||||
let minStrength = 0.14 + asymNoise * 0.06;
|
||||
let baseStrength = max(clamp(wave * (0.85 + asymNoise * 0.3), 0.0, 1.0), minStrength);
|
||||
|
||||
let lightStrength = baseStrength * lengthFalloff * fadeFalloff * spreadFactor * pulse * flicker;
|
||||
let ambientLight = (0.06 + asymNoise * 0.04) * lengthFalloff * fadeFalloff * spreadFactor;
|
||||
|
||||
return max(lightStrength, ambientLight);
|
||||
}
|
||||
|
||||
fn particle(coord: vec2<f32>, particlePos: vec2<f32>, size: f32) -> f32 {
|
||||
let delta = coord - particlePos;
|
||||
let distSq = dot(delta, delta);
|
||||
let sizeSq = size * size;
|
||||
|
||||
if (distSq > sizeSq * 9.0) {
|
||||
return 0.0;
|
||||
}
|
||||
|
||||
let d = sqrt(distSq);
|
||||
let core = smoothstep(size, size * 0.35, d);
|
||||
let glow = smoothstep(size * 3.0, 0.0, d) * 0.55;
|
||||
return core + glow;
|
||||
}
|
||||
|
||||
fn renderParticles(coord: vec2<f32>, lightSource: vec2<f32>, lightDir: vec2<f32>) -> f32 {
|
||||
if (uniforms.particlesEnabled < 0.5 || uniforms.particleAmount < 1.0) {
|
||||
return 0.0;
|
||||
}
|
||||
|
||||
var particleSum: f32 = 0.0;
|
||||
let particleCount = i32(uniforms.particleAmount);
|
||||
let time = uniforms.iTime * uniforms.particleSpeed;
|
||||
let perpDir = vec2<f32>(-lightDir.y, lightDir.x);
|
||||
let baseSize = min(uniforms.iResolution.x, uniforms.iResolution.y);
|
||||
let maxDist = max(baseSize * uniforms.lightLength, 1.0);
|
||||
let spreadScale = uniforms.lightSpread * baseSize * 0.65;
|
||||
let coneHalfWidth = uniforms.lightSpread * baseSize * 0.55;
|
||||
|
||||
for (var i: i32 = 0; i < particleCount; i = i + 1) {
|
||||
let fi = f32(i);
|
||||
let seed = vec2<f32>(fi * 127.1, fi * 311.7);
|
||||
let rnd = hash2(seed);
|
||||
|
||||
let lifeDuration = 2.0 + hash(seed + vec2<f32>(19.0, 73.0)) * 3.0;
|
||||
let lifeOffset = hash(seed + vec2<f32>(91.0, 37.0)) * lifeDuration;
|
||||
let lifeProgress = fract((time + lifeOffset) / lifeDuration);
|
||||
|
||||
let fadeIn = smoothstep(0.0, 0.2, lifeProgress);
|
||||
let fadeOut = 1.0 - smoothstep(0.8, 1.0, lifeProgress);
|
||||
let lifeFade = fadeIn * fadeOut;
|
||||
if (lifeFade < 0.01) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let alongLight = rnd.x * maxDist * 0.8;
|
||||
let perpOffset = (rnd.y - 0.5) * spreadScale;
|
||||
|
||||
let floatPhase = rnd.y * 6.28318 + fi * 0.37;
|
||||
let floatSpeed = 0.35 + rnd.x * 0.9;
|
||||
let drift = vec2<f32>(
|
||||
sin(time * floatSpeed + floatPhase),
|
||||
cos(time * floatSpeed * 0.85 + floatPhase * 1.3)
|
||||
) * uniforms.particleDrift * baseSize * 0.08;
|
||||
|
||||
let wobble = vec2<f32>(
|
||||
sin(time * 1.4 + floatPhase * 2.1),
|
||||
cos(time * 1.1 + floatPhase * 1.6)
|
||||
) * uniforms.particleDrift * baseSize * 0.03;
|
||||
|
||||
let flowOffset = (rnd.x - 0.5) * baseSize * 0.12 + fract(time * 0.06 + rnd.y) * baseSize * 0.1;
|
||||
|
||||
let basePos = lightSource + lightDir * (alongLight + flowOffset) + perpDir * perpOffset + drift + wobble;
|
||||
|
||||
let toParticle = basePos - lightSource;
|
||||
let projLen = dot(toParticle, lightDir);
|
||||
if (projLen < 0.0 || projLen > maxDist) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let sideDist = abs(dot(toParticle, perpDir));
|
||||
if (sideDist > coneHalfWidth) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let size = mix(uniforms.particleSizeMin, uniforms.particleSizeMax, rnd.x);
|
||||
let twinkle = 0.7 + 0.3 * sin(time * (1.5 + rnd.y * 2.0) + floatPhase);
|
||||
let distFade = 1.0 - smoothstep(maxDist * 0.2, maxDist * 0.95, projLen);
|
||||
if (distFade < 0.01) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let p = particle(coord, basePos, size);
|
||||
if (p > 0.0) {
|
||||
particleSum = particleSum + p * lifeFade * twinkle * distFade * uniforms.particleOpacity;
|
||||
if (particleSum >= 1.0) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return min(particleSum, 1.0);
|
||||
}
|
||||
|
||||
@fragment
|
||||
fn fragmentMain(@builtin(position) fragCoord: vec4<f32>, @location(0) vUv: vec2<f32>) -> @location(0) vec4<f32> {
|
||||
let coord = vec2<f32>(fragCoord.x, fragCoord.y);
|
||||
|
||||
let normalizedX = (coord.x / uniforms.iResolution.x) - 0.5;
|
||||
let widthOffset = -normalizedX * uniforms.sourceWidth * uniforms.iResolution.x;
|
||||
|
||||
let perpDir = vec2<f32>(-uniforms.lightDir.y, uniforms.lightDir.x);
|
||||
let adjustedLightPos = uniforms.lightPos + perpDir * widthOffset;
|
||||
|
||||
let lightValue = lightStrengthCombined(adjustedLightPos, uniforms.lightDir, coord);
|
||||
|
||||
if (lightValue < 0.001) {
|
||||
let particles = renderParticles(coord, adjustedLightPos, uniforms.lightDir);
|
||||
if (particles < 0.001) {
|
||||
return vec4<f32>(0.0, 0.0, 0.0, 0.0);
|
||||
}
|
||||
let particleBrightness = particles * 1.8;
|
||||
return vec4<f32>(uniforms.color * particleBrightness, particles * 0.9);
|
||||
}
|
||||
|
||||
var fragColor = vec4<f32>(lightValue, lightValue, lightValue, lightValue);
|
||||
|
||||
if (uniforms.noiseAmount > 0.01) {
|
||||
let n = fastNoise(coord * 0.5 + uniforms.iTime * 0.5);
|
||||
let grain = mix(1.0, n, uniforms.noiseAmount * 0.5);
|
||||
fragColor = vec4<f32>(fragColor.rgb * grain, fragColor.a);
|
||||
}
|
||||
|
||||
let brightness = 1.0 - (coord.y / uniforms.iResolution.y);
|
||||
fragColor = vec4<f32>(
|
||||
fragColor.x * (0.15 + brightness * 0.85),
|
||||
fragColor.y * (0.35 + brightness * 0.65),
|
||||
fragColor.z * (0.55 + brightness * 0.45),
|
||||
fragColor.a
|
||||
);
|
||||
|
||||
if (abs(uniforms.saturation - 1.0) > 0.01) {
|
||||
let gray = dot(fragColor.rgb, vec3<f32>(0.299, 0.587, 0.114));
|
||||
fragColor = vec4<f32>(mix(vec3<f32>(gray), fragColor.rgb, uniforms.saturation), fragColor.a);
|
||||
}
|
||||
|
||||
fragColor = vec4<f32>(fragColor.rgb * uniforms.color, fragColor.a);
|
||||
|
||||
let particles = renderParticles(coord, adjustedLightPos, uniforms.lightDir);
|
||||
if (particles > 0.001) {
|
||||
let particleBrightness = particles * 1.8;
|
||||
fragColor = vec4<f32>(fragColor.rgb + uniforms.color * particleBrightness, max(fragColor.a, particles * 0.9));
|
||||
}
|
||||
|
||||
return fragColor;
|
||||
}
|
||||
`
|
||||
|
||||
const UNIFORM_BUFFER_SIZE = 144
|
||||
|
||||
function updateUniformBuffer(buffer: Float32Array, data: UniformData): void {
|
||||
buffer[0] = data.iTime
|
||||
buffer[2] = data.iResolution[0]
|
||||
buffer[3] = data.iResolution[1]
|
||||
buffer[4] = data.lightPos[0]
|
||||
buffer[5] = data.lightPos[1]
|
||||
buffer[6] = data.lightDir[0]
|
||||
buffer[7] = data.lightDir[1]
|
||||
buffer[8] = data.color[0]
|
||||
buffer[9] = data.color[1]
|
||||
buffer[10] = data.color[2]
|
||||
buffer[11] = data.speed
|
||||
buffer[12] = data.lightSpread
|
||||
buffer[13] = data.lightLength
|
||||
buffer[14] = data.sourceWidth
|
||||
buffer[15] = data.pulsating
|
||||
buffer[16] = data.pulsatingMin
|
||||
buffer[17] = data.pulsatingMax
|
||||
buffer[18] = data.fadeDistance
|
||||
buffer[19] = data.saturation
|
||||
buffer[20] = data.noiseAmount
|
||||
buffer[21] = data.distortion
|
||||
buffer[22] = data.particlesEnabled
|
||||
buffer[23] = data.particleAmount
|
||||
buffer[24] = data.particleSizeMin
|
||||
buffer[25] = data.particleSizeMax
|
||||
buffer[26] = data.particleSpeed
|
||||
buffer[27] = data.particleOpacity
|
||||
buffer[28] = data.particleDrift
|
||||
}
|
||||
|
||||
export default function Spotlight(props: SpotlightProps) {
|
||||
let containerRef: HTMLDivElement | undefined
|
||||
let canvasRef: HTMLCanvasElement | null = null
|
||||
let deviceRef: GPUDevice | null = null
|
||||
let contextRef: GPUCanvasContext | null = null
|
||||
let pipelineRef: GPURenderPipeline | null = null
|
||||
let uniformBufferRef: GPUBuffer | null = null
|
||||
let bindGroupRef: GPUBindGroup | null = null
|
||||
let animationIdRef: number | null = null
|
||||
let cleanupFunctionRef: (() => void) | null = null
|
||||
let uniformDataRef: UniformData | null = null
|
||||
let uniformArrayRef: Float32Array | null = null
|
||||
let configRef: SpotlightConfig = props.config()
|
||||
let frameCount = 0
|
||||
|
||||
const [isVisible, setIsVisible] = createSignal(false)
|
||||
|
||||
createEffect(() => {
|
||||
configRef = props.config()
|
||||
})
|
||||
|
||||
onMount(() => {
|
||||
if (!containerRef) return
|
||||
|
||||
const observer = new IntersectionObserver(
|
||||
(entries) => {
|
||||
const entry = entries[0]
|
||||
setIsVisible(entry.isIntersecting)
|
||||
},
|
||||
{ threshold: 0.1 },
|
||||
)
|
||||
|
||||
observer.observe(containerRef)
|
||||
|
||||
onCleanup(() => {
|
||||
observer.disconnect()
|
||||
})
|
||||
})
|
||||
|
||||
createEffect(() => {
|
||||
const visible = isVisible()
|
||||
const config = props.config()
|
||||
if (!visible || !containerRef) {
|
||||
return
|
||||
}
|
||||
|
||||
if (cleanupFunctionRef) {
|
||||
cleanupFunctionRef()
|
||||
cleanupFunctionRef = null
|
||||
}
|
||||
|
||||
const initializeWebGPU = async () => {
|
||||
if (!containerRef) {
|
||||
return
|
||||
}
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 10))
|
||||
|
||||
if (!containerRef) {
|
||||
return
|
||||
}
|
||||
|
||||
if (!navigator.gpu) {
|
||||
console.warn("WebGPU is not supported in this browser")
|
||||
return
|
||||
}
|
||||
|
||||
const adapter = await navigator.gpu.requestAdapter({
|
||||
powerPreference: "high-performance",
|
||||
})
|
||||
if (!adapter) {
|
||||
console.warn("Failed to get WebGPU adapter")
|
||||
return
|
||||
}
|
||||
|
||||
const device = await adapter.requestDevice()
|
||||
deviceRef = device
|
||||
|
||||
const canvas = document.createElement("canvas")
|
||||
canvas.style.width = "100%"
|
||||
canvas.style.height = "100%"
|
||||
canvasRef = canvas
|
||||
|
||||
while (containerRef.firstChild) {
|
||||
containerRef.removeChild(containerRef.firstChild)
|
||||
}
|
||||
containerRef.appendChild(canvas)
|
||||
|
||||
const context = canvas.getContext("webgpu")
|
||||
if (!context) {
|
||||
console.warn("Failed to get WebGPU context")
|
||||
return
|
||||
}
|
||||
contextRef = context
|
||||
|
||||
const presentationFormat = navigator.gpu.getPreferredCanvasFormat()
|
||||
context.configure({
|
||||
device,
|
||||
format: presentationFormat,
|
||||
alphaMode: "premultiplied",
|
||||
})
|
||||
|
||||
const shaderModule = device.createShaderModule({
|
||||
code: WGSL_SHADER,
|
||||
})
|
||||
|
||||
const uniformBuffer = device.createBuffer({
|
||||
size: UNIFORM_BUFFER_SIZE,
|
||||
usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST,
|
||||
})
|
||||
uniformBufferRef = uniformBuffer
|
||||
|
||||
const bindGroupLayout = device.createBindGroupLayout({
|
||||
entries: [
|
||||
{
|
||||
binding: 0,
|
||||
visibility: GPUShaderStage.VERTEX | GPUShaderStage.FRAGMENT,
|
||||
buffer: { type: "uniform" },
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
const bindGroup = device.createBindGroup({
|
||||
layout: bindGroupLayout,
|
||||
entries: [
|
||||
{
|
||||
binding: 0,
|
||||
resource: { buffer: uniformBuffer },
|
||||
},
|
||||
],
|
||||
})
|
||||
bindGroupRef = bindGroup
|
||||
|
||||
const pipelineLayout = device.createPipelineLayout({
|
||||
bindGroupLayouts: [bindGroupLayout],
|
||||
})
|
||||
|
||||
const pipeline = device.createRenderPipeline({
|
||||
layout: pipelineLayout,
|
||||
vertex: {
|
||||
module: shaderModule,
|
||||
entryPoint: "vertexMain",
|
||||
},
|
||||
fragment: {
|
||||
module: shaderModule,
|
||||
entryPoint: "fragmentMain",
|
||||
targets: [
|
||||
{
|
||||
format: presentationFormat,
|
||||
blend: {
|
||||
color: {
|
||||
srcFactor: "src-alpha",
|
||||
dstFactor: "one-minus-src-alpha",
|
||||
operation: "add",
|
||||
},
|
||||
alpha: {
|
||||
srcFactor: "one",
|
||||
dstFactor: "one-minus-src-alpha",
|
||||
operation: "add",
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
primitive: {
|
||||
topology: "triangle-list",
|
||||
},
|
||||
})
|
||||
pipelineRef = pipeline
|
||||
|
||||
const { clientWidth: wCSS, clientHeight: hCSS } = containerRef
|
||||
const dpr = Math.min(window.devicePixelRatio, 2)
|
||||
const w = wCSS * dpr
|
||||
const h = hCSS * dpr
|
||||
const { anchor, dir } = getAnchorAndDir(config.placement, w, h)
|
||||
|
||||
uniformDataRef = {
|
||||
iTime: 0,
|
||||
iResolution: [w, h],
|
||||
lightPos: anchor,
|
||||
lightDir: dir,
|
||||
color: hexToRgb(config.color),
|
||||
speed: config.speed,
|
||||
lightSpread: config.spread,
|
||||
lightLength: config.length,
|
||||
sourceWidth: config.width,
|
||||
pulsating: config.pulsating !== false ? 1.0 : 0.0,
|
||||
pulsatingMin: config.pulsating !== false ? config.pulsating[0] : 1.0,
|
||||
pulsatingMax: config.pulsating !== false ? config.pulsating[1] : 1.0,
|
||||
fadeDistance: config.distance,
|
||||
saturation: config.saturation,
|
||||
noiseAmount: config.noiseAmount,
|
||||
distortion: config.distortion,
|
||||
particlesEnabled: config.particles.enabled ? 1.0 : 0.0,
|
||||
particleAmount: config.particles.amount,
|
||||
particleSizeMin: config.particles.size[0],
|
||||
particleSizeMax: config.particles.size[1],
|
||||
particleSpeed: config.particles.speed,
|
||||
particleOpacity: config.particles.opacity,
|
||||
particleDrift: config.particles.drift,
|
||||
}
|
||||
|
||||
const updatePlacement = () => {
|
||||
if (!containerRef || !canvasRef || !uniformDataRef) {
|
||||
return
|
||||
}
|
||||
|
||||
const dpr = Math.min(window.devicePixelRatio, 2)
|
||||
const { clientWidth: wCSS, clientHeight: hCSS } = containerRef
|
||||
const w = Math.floor(wCSS * dpr)
|
||||
const h = Math.floor(hCSS * dpr)
|
||||
|
||||
canvasRef.width = w
|
||||
canvasRef.height = h
|
||||
|
||||
uniformDataRef.iResolution = [w, h]
|
||||
|
||||
const { anchor, dir } = getAnchorAndDir(configRef.placement, w, h)
|
||||
uniformDataRef.lightPos = anchor
|
||||
uniformDataRef.lightDir = dir
|
||||
}
|
||||
|
||||
const loop = (t: number) => {
|
||||
if (!deviceRef || !contextRef || !pipelineRef || !uniformBufferRef || !bindGroupRef || !uniformDataRef) {
|
||||
return
|
||||
}
|
||||
|
||||
const timeSeconds = t * 0.001
|
||||
uniformDataRef.iTime = timeSeconds
|
||||
frameCount++
|
||||
|
||||
if (props.onAnimationFrame && frameCount % 2 === 0) {
|
||||
const pulsatingMin = configRef.pulsating !== false ? configRef.pulsating[0] : 1.0
|
||||
const pulsatingMax = configRef.pulsating !== false ? configRef.pulsating[1] : 1.0
|
||||
const pulseCenter = (pulsatingMin + pulsatingMax) * 0.5
|
||||
const pulseAmplitude = (pulsatingMax - pulsatingMin) * 0.5
|
||||
const pulseValue =
|
||||
configRef.pulsating !== false
|
||||
? pulseCenter + pulseAmplitude * Math.sin(timeSeconds * configRef.speed * 3.0)
|
||||
: 1.0
|
||||
|
||||
const baseIntensity1 = 0.45 + 0.15 * Math.sin(timeSeconds * configRef.speed * 1.5)
|
||||
const baseIntensity2 = 0.3 + 0.2 * Math.cos(timeSeconds * configRef.speed * 1.1)
|
||||
const intensity = Math.max((baseIntensity1 + baseIntensity2) * pulseValue, 0.55)
|
||||
|
||||
props.onAnimationFrame({
|
||||
time: timeSeconds,
|
||||
intensity,
|
||||
pulseValue: Math.max(pulseValue, 0.9),
|
||||
})
|
||||
}
|
||||
|
||||
try {
|
||||
if (!uniformArrayRef) {
|
||||
uniformArrayRef = new Float32Array(36)
|
||||
}
|
||||
updateUniformBuffer(uniformArrayRef, uniformDataRef)
|
||||
deviceRef.queue.writeBuffer(uniformBufferRef, 0, uniformArrayRef.buffer)
|
||||
|
||||
const commandEncoder = deviceRef.createCommandEncoder()
|
||||
|
||||
const textureView = contextRef.getCurrentTexture().createView()
|
||||
|
||||
const renderPass = commandEncoder.beginRenderPass({
|
||||
colorAttachments: [
|
||||
{
|
||||
view: textureView,
|
||||
clearValue: { r: 0, g: 0, b: 0, a: 0 },
|
||||
loadOp: "clear",
|
||||
storeOp: "store",
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
renderPass.setPipeline(pipelineRef)
|
||||
renderPass.setBindGroup(0, bindGroupRef)
|
||||
renderPass.draw(3)
|
||||
renderPass.end()
|
||||
|
||||
deviceRef.queue.submit([commandEncoder.finish()])
|
||||
|
||||
animationIdRef = requestAnimationFrame(loop)
|
||||
} catch (error) {
|
||||
console.warn("WebGPU rendering error:", error)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
window.addEventListener("resize", updatePlacement)
|
||||
updatePlacement()
|
||||
animationIdRef = requestAnimationFrame(loop)
|
||||
|
||||
cleanupFunctionRef = () => {
|
||||
if (animationIdRef) {
|
||||
cancelAnimationFrame(animationIdRef)
|
||||
animationIdRef = null
|
||||
}
|
||||
|
||||
window.removeEventListener("resize", updatePlacement)
|
||||
|
||||
if (uniformBufferRef) {
|
||||
uniformBufferRef.destroy()
|
||||
uniformBufferRef = null
|
||||
}
|
||||
|
||||
if (deviceRef) {
|
||||
deviceRef.destroy()
|
||||
deviceRef = null
|
||||
}
|
||||
|
||||
if (canvasRef && canvasRef.parentNode) {
|
||||
canvasRef.parentNode.removeChild(canvasRef)
|
||||
}
|
||||
|
||||
canvasRef = null
|
||||
contextRef = null
|
||||
pipelineRef = null
|
||||
bindGroupRef = null
|
||||
uniformDataRef = null
|
||||
}
|
||||
}
|
||||
|
||||
initializeWebGPU()
|
||||
|
||||
onCleanup(() => {
|
||||
if (cleanupFunctionRef) {
|
||||
cleanupFunctionRef()
|
||||
cleanupFunctionRef = null
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
createEffect(() => {
|
||||
if (!uniformDataRef || !containerRef) {
|
||||
return
|
||||
}
|
||||
|
||||
const config = props.config()
|
||||
|
||||
uniformDataRef.color = hexToRgb(config.color)
|
||||
uniformDataRef.speed = config.speed
|
||||
uniformDataRef.lightSpread = config.spread
|
||||
uniformDataRef.lightLength = config.length
|
||||
uniformDataRef.sourceWidth = config.width
|
||||
uniformDataRef.pulsating = config.pulsating !== false ? 1.0 : 0.0
|
||||
uniformDataRef.pulsatingMin = config.pulsating !== false ? config.pulsating[0] : 1.0
|
||||
uniformDataRef.pulsatingMax = config.pulsating !== false ? config.pulsating[1] : 1.0
|
||||
uniformDataRef.fadeDistance = config.distance
|
||||
uniformDataRef.saturation = config.saturation
|
||||
uniformDataRef.noiseAmount = config.noiseAmount
|
||||
uniformDataRef.distortion = config.distortion
|
||||
uniformDataRef.particlesEnabled = config.particles.enabled ? 1.0 : 0.0
|
||||
uniformDataRef.particleAmount = config.particles.amount
|
||||
uniformDataRef.particleSizeMin = config.particles.size[0]
|
||||
uniformDataRef.particleSizeMax = config.particles.size[1]
|
||||
uniformDataRef.particleSpeed = config.particles.speed
|
||||
uniformDataRef.particleOpacity = config.particles.opacity
|
||||
uniformDataRef.particleDrift = config.particles.drift
|
||||
|
||||
const dpr = Math.min(window.devicePixelRatio, 2)
|
||||
const { clientWidth: wCSS, clientHeight: hCSS } = containerRef
|
||||
const { anchor, dir } = getAnchorAndDir(config.placement, wCSS * dpr, hCSS * dpr)
|
||||
uniformDataRef.lightPos = anchor
|
||||
uniformDataRef.lightDir = dir
|
||||
})
|
||||
|
||||
return (
|
||||
<div
|
||||
ref={containerRef}
|
||||
class={`spotlight-container ${props.class ?? ""}`.trim()}
|
||||
style={{ opacity: props.config().opacity }}
|
||||
/>
|
||||
)
|
||||
}
|
||||
@@ -3,7 +3,7 @@ import { Title, Meta, Link } from "@solidjs/meta"
|
||||
import { createMemo, createSignal } from "solid-js"
|
||||
import { github } from "~/lib/github"
|
||||
import { config } from "~/config"
|
||||
import LightRays, { defaultConfig, type LightRaysConfig, type LightRaysAnimationState } from "~/component/light-rays"
|
||||
import Spotlight, { defaultConfig, type SpotlightAnimationState } from "~/component/spotlight"
|
||||
import "./black.css"
|
||||
|
||||
export default function BlackLayout(props: RouteSectionProps) {
|
||||
@@ -17,15 +17,14 @@ export default function BlackLayout(props: RouteSectionProps) {
|
||||
: config.github.starsFormatted.compact,
|
||||
)
|
||||
|
||||
const [lightRaysConfig, setLightRaysConfig] = createSignal<LightRaysConfig>(defaultConfig)
|
||||
const [rayAnimationState, setRayAnimationState] = createSignal<LightRaysAnimationState>({
|
||||
const [spotlightAnimationState, setSpotlightAnimationState] = createSignal<SpotlightAnimationState>({
|
||||
time: 0,
|
||||
intensity: 0.5,
|
||||
pulseValue: 1,
|
||||
})
|
||||
|
||||
const svgLightingValues = createMemo(() => {
|
||||
const state = rayAnimationState()
|
||||
const state = spotlightAnimationState()
|
||||
const t = state.time
|
||||
|
||||
const wave1 = Math.sin(t * 1.5) * 0.5 + 0.5
|
||||
@@ -33,11 +32,11 @@ export default function BlackLayout(props: RouteSectionProps) {
|
||||
const wave3 = Math.sin(t * 0.8 + 2.5) * 0.5 + 0.5
|
||||
|
||||
const shimmerPos = Math.sin(t * 0.7) * 0.5 + 0.5
|
||||
const glowIntensity = state.intensity * state.pulseValue * 0.35
|
||||
const fillOpacity = 0.1 + wave1 * 0.08 * state.pulseValue
|
||||
const strokeBrightness = 55 + wave2 * 25 * state.pulseValue
|
||||
const glowIntensity = Math.max(state.intensity * state.pulseValue * 0.35, 0.15)
|
||||
const fillOpacity = Math.max(0.1 + wave1 * 0.08 * state.pulseValue, 0.12)
|
||||
const strokeBrightness = Math.max(55 + wave2 * 25 * state.pulseValue, 60)
|
||||
|
||||
const shimmerIntensity = wave3 * 0.15 * state.pulseValue
|
||||
const shimmerIntensity = Math.max(wave3 * 0.15 * state.pulseValue, 0.08)
|
||||
|
||||
return {
|
||||
glowIntensity,
|
||||
@@ -56,10 +55,12 @@ export default function BlackLayout(props: RouteSectionProps) {
|
||||
} as Record<string, string>
|
||||
})
|
||||
|
||||
const handleAnimationFrame = (state: LightRaysAnimationState) => {
|
||||
setRayAnimationState(state)
|
||||
const handleAnimationFrame = (state: SpotlightAnimationState) => {
|
||||
setSpotlightAnimationState(state)
|
||||
}
|
||||
|
||||
const spotlightConfig = () => defaultConfig
|
||||
|
||||
return (
|
||||
<div data-page="black">
|
||||
<Title>OpenCode Black | Access all the world's best coding models</Title>
|
||||
@@ -84,7 +85,7 @@ export default function BlackLayout(props: RouteSectionProps) {
|
||||
/>
|
||||
<Meta name="twitter:image" content="/social-share-black.png" />
|
||||
|
||||
<LightRays config={lightRaysConfig} class="header-light-rays" onAnimationFrame={handleAnimationFrame} />
|
||||
<Spotlight config={spotlightConfig} class="header-spotlight" onAnimationFrame={handleAnimationFrame} />
|
||||
|
||||
<header data-component="header">
|
||||
<A href="/" data-component="header-logo">
|
||||
|
||||
@@ -183,7 +183,12 @@ export async function POST(input: APIEvent) {
|
||||
.set({
|
||||
customerID,
|
||||
subscriptionID,
|
||||
subscriptionCouponID: couponID,
|
||||
subscription: {
|
||||
status: "subscribed",
|
||||
coupon: couponID,
|
||||
seats: 1,
|
||||
plan: "200",
|
||||
},
|
||||
paymentMethodID: paymentMethod.id,
|
||||
paymentMethodLast4: paymentMethod.card?.last4 ?? null,
|
||||
paymentMethodType: paymentMethod.type,
|
||||
@@ -408,7 +413,7 @@ export async function POST(input: APIEvent) {
|
||||
await Database.transaction(async (tx) => {
|
||||
await tx
|
||||
.update(BillingTable)
|
||||
.set({ subscriptionID: null, subscriptionCouponID: null })
|
||||
.set({ subscriptionID: null, subscription: null })
|
||||
.where(eq(BillingTable.workspaceID, workspaceID))
|
||||
|
||||
await tx.delete(SubscriptionTable).where(eq(SubscriptionTable.workspaceID, workspaceID))
|
||||
|
||||
@@ -64,23 +64,20 @@ export const anthropicHelper: ProviderHelper = ({ reqModel, providerModel }) =>
|
||||
newBuffer.set(value, buffer.length)
|
||||
buffer = newBuffer
|
||||
|
||||
if (buffer.length < 4) return
|
||||
// The first 4 bytes are the total length (big-endian).
|
||||
const totalLength = new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength).getUint32(0, false)
|
||||
const messages = []
|
||||
while (buffer.length >= 4) {
|
||||
// first 4 bytes are the total length (big-endian)
|
||||
const totalLength = new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength).getUint32(0, false)
|
||||
|
||||
// If we don't have the full message yet, wait for more chunks.
|
||||
if (buffer.length < totalLength) return
|
||||
// wait for more chunks
|
||||
if (buffer.length < totalLength) break
|
||||
|
||||
try {
|
||||
// Decode exactly the sub-slice for this event.
|
||||
const subView = buffer.subarray(0, totalLength)
|
||||
const decoded = codec.decode(subView)
|
||||
try {
|
||||
const subView = buffer.subarray(0, totalLength)
|
||||
const decoded = codec.decode(subView)
|
||||
buffer = buffer.slice(totalLength)
|
||||
|
||||
// Slice the used bytes out of the buffer, removing this message.
|
||||
buffer = buffer.slice(totalLength)
|
||||
|
||||
// Process message
|
||||
/* Example of Bedrock data
|
||||
/* Example of Bedrock data
|
||||
```
|
||||
{
|
||||
bytes: 'eyJ0eXBlIjoibWVzc2FnZV9zdGFydCIsIm1lc3NhZ2UiOnsibW9kZWwiOiJjbGF1ZGUtb3B1cy00LTUtMjAyNTExMDEiLCJpZCI6Im1zZ19iZHJrXzAxMjVGdHRGb2lkNGlwWmZ4SzZMbktxeCIsInR5cGUiOiJtZXNzYWdlIiwicm9sZSI6ImFzc2lzdGFudCIsImNvbnRlbnQiOltdLCJzdG9wX3JlYXNvbiI6bnVsbCwic3RvcF9zZXF1ZW5jZSI6bnVsbCwidXNhZ2UiOnsiaW5wdXRfdG9rZW5zIjo0LCJjYWNoZV9jcmVhdGlvbl9pbnB1dF90b2tlbnMiOjEsImNhY2hlX3JlYWRfaW5wdXRfdG9rZW5zIjoxMTk2MywiY2FjaGVfY3JlYXRpb24iOnsiZXBoZW1lcmFsXzVtX2lucHV0X3Rva2VucyI6MSwiZXBoZW1lcmFsXzFoX2lucHV0X3Rva2VucyI6MH0sIm91dHB1dF90b2tlbnMiOjF9fX0=',
|
||||
@@ -112,22 +109,30 @@ export const anthropicHelper: ProviderHelper = ({ reqModel, providerModel }) =>
|
||||
```
|
||||
*/
|
||||
|
||||
/* Example of Anthropic data
|
||||
/* Example of Anthropic data
|
||||
```
|
||||
event: message_delta
|
||||
data: {"type":"message_start","message":{"model":"claude-opus-4-5-20251101","id":"msg_01ETvwVWSKULxzPdkQ1xAnk2","type":"message","role":"assistant","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":3,"cache_creation_input_tokens":11543,"cache_read_input_tokens":0,"cache_creation":{"ephemeral_5m_input_tokens":11543,"ephemeral_1h_input_tokens":0},"output_tokens":1,"service_tier":"standard"}}}
|
||||
```
|
||||
*/
|
||||
if (decoded.headers[":message-type"]?.value !== "event") return
|
||||
const data = decoder.decode(decoded.body, { stream: true })
|
||||
if (decoded.headers[":message-type"]?.value === "event") {
|
||||
const data = decoder.decode(decoded.body, { stream: true })
|
||||
|
||||
const parsedDataResult = JSON.parse(data)
|
||||
delete parsedDataResult.p
|
||||
const utf8 = atob(parsedDataResult.bytes)
|
||||
return encoder.encode(["event: message_start", "\n", "data: " + utf8, "\n\n"].join(""))
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
const parsedDataResult = JSON.parse(data)
|
||||
delete parsedDataResult.p
|
||||
const binary = atob(parsedDataResult.bytes)
|
||||
const uint8 = Uint8Array.from(binary, (c) => c.charCodeAt(0))
|
||||
const bytes = decoder.decode(uint8)
|
||||
const eventName = JSON.parse(bytes).type
|
||||
messages.push([`event: ${eventName}`, "\n", `data: ${bytes}`, "\n\n"].join(""))
|
||||
}
|
||||
} catch (e) {
|
||||
console.log("@@@EE@@@")
|
||||
console.log(e)
|
||||
break
|
||||
}
|
||||
}
|
||||
return encoder.encode(messages.join(""))
|
||||
}
|
||||
},
|
||||
streamSeparator: "\n\n",
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
ALTER TABLE `billing` ADD `subscription` json;
|
||||
@@ -0,0 +1 @@
|
||||
ALTER TABLE `billing` DROP COLUMN `subscription_coupon_id`;
|
||||
1242
packages/console/core/migrations/meta/0053_snapshot.json
Normal file
1242
packages/console/core/migrations/meta/0053_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
1235
packages/console/core/migrations/meta/0054_snapshot.json
Normal file
1235
packages/console/core/migrations/meta/0054_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -372,6 +372,20 @@
|
||||
"when": 1768343920467,
|
||||
"tag": "0052_aromatic_agent_zero",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 53,
|
||||
"version": "5",
|
||||
"when": 1768599366758,
|
||||
"tag": "0053_gigantic_hardball",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 54,
|
||||
"version": "5",
|
||||
"when": 1768603665356,
|
||||
"tag": "0054_numerous_annihilus",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/package.json",
|
||||
"name": "@opencode-ai/console-core",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
|
||||
112
packages/console/core/script/black-gift.ts
Normal file
112
packages/console/core/script/black-gift.ts
Normal file
@@ -0,0 +1,112 @@
|
||||
import { Billing } from "../src/billing.js"
|
||||
import { and, Database, eq, isNull, sql } from "../src/drizzle/index.js"
|
||||
import { UserTable } from "../src/schema/user.sql.js"
|
||||
import { BillingTable, PaymentTable, SubscriptionTable } from "../src/schema/billing.sql.js"
|
||||
import { Identifier } from "../src/identifier.js"
|
||||
import { centsToMicroCents } from "../src/util/price.js"
|
||||
import { AuthTable } from "../src/schema/auth.sql.js"
|
||||
|
||||
const plan = "200"
|
||||
const workspaceID = process.argv[2]
|
||||
const seats = parseInt(process.argv[3])
|
||||
|
||||
console.log(`Gifting ${seats} seats of Black to workspace ${workspaceID}`)
|
||||
|
||||
if (!workspaceID || !seats) throw new Error("Usage: bun foo.ts <workspaceID> <seats>")
|
||||
|
||||
// Get workspace user
|
||||
const users = await Database.use((tx) =>
|
||||
tx
|
||||
.select({
|
||||
id: UserTable.id,
|
||||
role: UserTable.role,
|
||||
email: AuthTable.subject,
|
||||
})
|
||||
.from(UserTable)
|
||||
.leftJoin(AuthTable, and(eq(AuthTable.accountID, UserTable.accountID), eq(AuthTable.provider, "email")))
|
||||
.where(and(eq(UserTable.workspaceID, workspaceID), isNull(UserTable.timeDeleted))),
|
||||
)
|
||||
if (users.length === 0) throw new Error(`Error: No users found in workspace ${workspaceID}`)
|
||||
if (users.length !== seats)
|
||||
throw new Error(`Error: Workspace ${workspaceID} has ${users.length} users, expected ${seats}`)
|
||||
const adminUser = users.find((user) => user.role === "admin")
|
||||
if (!adminUser) throw new Error(`Error: No admin user found in workspace ${workspaceID}`)
|
||||
if (!adminUser.email) throw new Error(`Error: Admin user ${adminUser.id} has no email`)
|
||||
|
||||
// Get Billing
|
||||
const billing = await Database.use((tx) =>
|
||||
tx
|
||||
.select({
|
||||
customerID: BillingTable.customerID,
|
||||
subscriptionID: BillingTable.subscriptionID,
|
||||
})
|
||||
.from(BillingTable)
|
||||
.where(eq(BillingTable.workspaceID, workspaceID))
|
||||
.then((rows) => rows[0]),
|
||||
)
|
||||
if (!billing) throw new Error(`Error: Workspace ${workspaceID} has no billing record`)
|
||||
if (billing.subscriptionID) throw new Error(`Error: Workspace ${workspaceID} already has a subscription`)
|
||||
|
||||
// Look up the Stripe customer by email
|
||||
const customerID =
|
||||
billing.customerID ??
|
||||
(await (() =>
|
||||
Billing.stripe()
|
||||
.customers.create({
|
||||
email: adminUser.email,
|
||||
metadata: {
|
||||
workspaceID,
|
||||
},
|
||||
})
|
||||
.then((customer) => customer.id))())
|
||||
console.log(`Customer ID: ${customerID}`)
|
||||
|
||||
const couponID = "JAIr0Pe1"
|
||||
const subscription = await Billing.stripe().subscriptions.create({
|
||||
customer: customerID!,
|
||||
items: [
|
||||
{
|
||||
price: `price_1SmfyI2StuRr0lbXovxJNeZn`,
|
||||
discounts: [{ coupon: couponID }],
|
||||
quantity: 2,
|
||||
},
|
||||
],
|
||||
})
|
||||
console.log(`Subscription ID: ${subscription.id}`)
|
||||
|
||||
await Database.transaction(async (tx) => {
|
||||
// Set customer id, subscription id, and payment method on workspace billing
|
||||
await tx
|
||||
.update(BillingTable)
|
||||
.set({
|
||||
customerID,
|
||||
subscriptionID: subscription.id,
|
||||
subscription: { status: "subscribed", coupon: couponID, seats, plan },
|
||||
})
|
||||
.where(eq(BillingTable.workspaceID, workspaceID))
|
||||
|
||||
// Create a row in subscription table
|
||||
for (const user of users) {
|
||||
await tx.insert(SubscriptionTable).values({
|
||||
workspaceID,
|
||||
id: Identifier.create("subscription"),
|
||||
userID: user.id,
|
||||
})
|
||||
}
|
||||
//
|
||||
// // Create a row in payments table
|
||||
// await tx.insert(PaymentTable).values({
|
||||
// workspaceID,
|
||||
// id: Identifier.create("payment"),
|
||||
// amount: centsToMicroCents(amountInCents),
|
||||
// customerID,
|
||||
// invoiceID,
|
||||
// paymentID,
|
||||
// enrichment: {
|
||||
// type: "subscription",
|
||||
// couponID,
|
||||
// },
|
||||
// })
|
||||
})
|
||||
|
||||
console.log(`done`)
|
||||
@@ -12,7 +12,7 @@ const email = process.argv[3]
|
||||
console.log(`Onboarding workspace ${workspaceID} for email ${email}`)
|
||||
|
||||
if (!workspaceID || !email) {
|
||||
console.error("Usage: bun onboard-zen-black.ts <workspaceID> <email>")
|
||||
console.error("Usage: bun foo.ts <workspaceID> <email>")
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
@@ -50,7 +50,7 @@ const existingSubscription = await Database.use((tx) =>
|
||||
tx
|
||||
.select({ workspaceID: BillingTable.workspaceID })
|
||||
.from(BillingTable)
|
||||
.where(eq(BillingTable.subscriptionID, subscriptionID))
|
||||
.where(sql`JSON_EXTRACT(${BillingTable.subscription}, '$.id') = ${subscriptionID}`)
|
||||
.then((rows) => rows[0]),
|
||||
)
|
||||
if (existingSubscription) {
|
||||
@@ -128,10 +128,15 @@ await Database.transaction(async (tx) => {
|
||||
.set({
|
||||
customerID,
|
||||
subscriptionID,
|
||||
subscriptionCouponID: couponID,
|
||||
paymentMethodID,
|
||||
paymentMethodLast4,
|
||||
paymentMethodType,
|
||||
subscription: {
|
||||
status: "subscribed",
|
||||
coupon: couponID,
|
||||
seats: 1,
|
||||
plan: "200",
|
||||
},
|
||||
})
|
||||
.where(eq(BillingTable.workspaceID, workspaceID))
|
||||
|
||||
@@ -18,7 +18,7 @@ const fromBilling = await Database.use((tx) =>
|
||||
.select({
|
||||
customerID: BillingTable.customerID,
|
||||
subscriptionID: BillingTable.subscriptionID,
|
||||
subscriptionCouponID: BillingTable.subscriptionCouponID,
|
||||
subscription: BillingTable.subscription,
|
||||
paymentMethodID: BillingTable.paymentMethodID,
|
||||
paymentMethodType: BillingTable.paymentMethodType,
|
||||
paymentMethodLast4: BillingTable.paymentMethodLast4,
|
||||
@@ -119,7 +119,7 @@ await Database.transaction(async (tx) => {
|
||||
.set({
|
||||
customerID: fromPrevPayment.customerID,
|
||||
subscriptionID: null,
|
||||
subscriptionCouponID: null,
|
||||
subscription: null,
|
||||
paymentMethodID: fromPrevPaymentMethods.data[0].id,
|
||||
paymentMethodLast4: fromPrevPaymentMethods.data[0].card?.last4 ?? null,
|
||||
paymentMethodType: fromPrevPaymentMethods.data[0].type,
|
||||
@@ -131,7 +131,7 @@ await Database.transaction(async (tx) => {
|
||||
.set({
|
||||
customerID: fromBilling.customerID,
|
||||
subscriptionID: fromBilling.subscriptionID,
|
||||
subscriptionCouponID: fromBilling.subscriptionCouponID,
|
||||
subscription: fromBilling.subscription,
|
||||
paymentMethodID: fromBilling.paymentMethodID,
|
||||
paymentMethodLast4: fromBilling.paymentMethodLast4,
|
||||
paymentMethodType: fromBilling.paymentMethodType,
|
||||
|
||||
@@ -55,8 +55,9 @@ if (identifier.startsWith("wrk_")) {
|
||||
),
|
||||
)
|
||||
|
||||
// Get all payments for these workspaces
|
||||
await Promise.all(users.map((u: { workspaceID: string }) => printWorkspace(u.workspaceID)))
|
||||
for (const user of users) {
|
||||
await printWorkspace(user.workspaceID)
|
||||
}
|
||||
}
|
||||
|
||||
async function printWorkspace(workspaceID: string) {
|
||||
@@ -114,11 +115,11 @@ async function printWorkspace(workspaceID: string) {
|
||||
balance: BillingTable.balance,
|
||||
customerID: BillingTable.customerID,
|
||||
reload: BillingTable.reload,
|
||||
subscriptionID: BillingTable.subscriptionID,
|
||||
subscription: {
|
||||
id: BillingTable.subscriptionID,
|
||||
couponID: BillingTable.subscriptionCouponID,
|
||||
plan: BillingTable.subscriptionPlan,
|
||||
booked: BillingTable.timeSubscriptionBooked,
|
||||
enrichment: BillingTable.subscription,
|
||||
},
|
||||
})
|
||||
.from(BillingTable)
|
||||
@@ -128,8 +129,13 @@ async function printWorkspace(workspaceID: string) {
|
||||
rows.map((row) => ({
|
||||
...row,
|
||||
balance: `$${(row.balance / 100000000).toFixed(2)}`,
|
||||
subscription: row.subscription.id
|
||||
? `Subscribed ${row.subscription.couponID ? `(coupon: ${row.subscription.couponID}) ` : ""}`
|
||||
subscription: row.subscriptionID
|
||||
? [
|
||||
`Black ${row.subscription.enrichment!.plan}`,
|
||||
row.subscription.enrichment!.seats > 1 ? `X ${row.subscription.enrichment!.seats} seats` : "",
|
||||
row.subscription.enrichment!.coupon ? `(coupon: ${row.subscription.enrichment!.coupon})` : "",
|
||||
`(ref: ${row.subscriptionID})`,
|
||||
].join(" ")
|
||||
: row.subscription.booked
|
||||
? `Waitlist ${row.subscription.plan} plan`
|
||||
: undefined,
|
||||
|
||||
@@ -21,8 +21,13 @@ export const BillingTable = mysqlTable(
|
||||
reloadError: varchar("reload_error", { length: 255 }),
|
||||
timeReloadError: utc("time_reload_error"),
|
||||
timeReloadLockedTill: utc("time_reload_locked_till"),
|
||||
subscription: json("subscription").$type<{
|
||||
status: "subscribed"
|
||||
coupon?: string
|
||||
seats: number
|
||||
plan: "20" | "100" | "200"
|
||||
}>(),
|
||||
subscriptionID: varchar("subscription_id", { length: 28 }),
|
||||
subscriptionCouponID: varchar("subscription_coupon_id", { length: 28 }),
|
||||
subscriptionPlan: mysqlEnum("subscription_plan", ["20", "100", "200"] as const),
|
||||
timeSubscriptionBooked: utc("time_subscription_booked"),
|
||||
},
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/console-function",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"$schema": "https://json.schemastore.org/package.json",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/console-mail",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"dependencies": {
|
||||
"@jsx-email/all": "2.2.3",
|
||||
"@jsx-email/cli": "1.4.3",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@opencode-ai/desktop",
|
||||
"private": true,
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
|
||||
@@ -26,6 +26,18 @@ if (import.meta.env.DEV && !(root instanceof HTMLElement)) {
|
||||
)
|
||||
}
|
||||
|
||||
const isWindows = ostype() === "windows"
|
||||
if (isWindows) {
|
||||
const originalGetComputedStyle = window.getComputedStyle
|
||||
window.getComputedStyle = ((elt: Element, pseudoElt?: string | null) => {
|
||||
if (!(elt instanceof Element)) {
|
||||
// WebView2 can call into Floating UI with non-elements; fall back to a safe element.
|
||||
return originalGetComputedStyle(document.documentElement, pseudoElt ?? undefined)
|
||||
}
|
||||
return originalGetComputedStyle(elt, pseudoElt ?? undefined)
|
||||
}) as typeof window.getComputedStyle
|
||||
}
|
||||
|
||||
let update: Update | null = null
|
||||
|
||||
const createPlatform = (password: Accessor<string | null>): Platform => ({
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/enterprise",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
id = "opencode"
|
||||
name = "OpenCode"
|
||||
description = "The open source coding agent."
|
||||
version = "1.1.23"
|
||||
version = "1.1.25"
|
||||
schema_version = 1
|
||||
authors = ["Anomaly"]
|
||||
repository = "https://github.com/anomalyco/opencode"
|
||||
@@ -11,26 +11,26 @@ name = "OpenCode"
|
||||
icon = "./icons/opencode.svg"
|
||||
|
||||
[agent_servers.opencode.targets.darwin-aarch64]
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.1.23/opencode-darwin-arm64.zip"
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.1.25/opencode-darwin-arm64.zip"
|
||||
cmd = "./opencode"
|
||||
args = ["acp"]
|
||||
|
||||
[agent_servers.opencode.targets.darwin-x86_64]
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.1.23/opencode-darwin-x64.zip"
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.1.25/opencode-darwin-x64.zip"
|
||||
cmd = "./opencode"
|
||||
args = ["acp"]
|
||||
|
||||
[agent_servers.opencode.targets.linux-aarch64]
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.1.23/opencode-linux-arm64.tar.gz"
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.1.25/opencode-linux-arm64.tar.gz"
|
||||
cmd = "./opencode"
|
||||
args = ["acp"]
|
||||
|
||||
[agent_servers.opencode.targets.linux-x86_64]
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.1.23/opencode-linux-x64.tar.gz"
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.1.25/opencode-linux-x64.tar.gz"
|
||||
cmd = "./opencode"
|
||||
args = ["acp"]
|
||||
|
||||
[agent_servers.opencode.targets.windows-x86_64]
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.1.23/opencode-windows-x64.zip"
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.1.25/opencode-windows-x64.zip"
|
||||
cmd = "./opencode.exe"
|
||||
args = ["acp"]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/function",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"$schema": "https://json.schemastore.org/package.json",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/package.json",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"name": "opencode",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
@@ -82,8 +82,8 @@
|
||||
"@opencode-ai/sdk": "workspace:*",
|
||||
"@opencode-ai/util": "workspace:*",
|
||||
"@openrouter/ai-sdk-provider": "1.5.2",
|
||||
"@opentui/core": "0.1.73",
|
||||
"@opentui/solid": "0.1.73",
|
||||
"@opentui/core": "0.1.74",
|
||||
"@opentui/solid": "0.1.74",
|
||||
"@parcel/watcher": "2.5.1",
|
||||
"@pierre/diffs": "catalog:",
|
||||
"@solid-primitives/event-bus": "1.1.2",
|
||||
|
||||
@@ -2,6 +2,7 @@ import z from "zod"
|
||||
import { Global } from "../global"
|
||||
import { Log } from "../util/log"
|
||||
import path from "path"
|
||||
import { Filesystem } from "../util/filesystem"
|
||||
import { NamedError } from "@opencode-ai/util/error"
|
||||
import { readableStreamToText } from "bun"
|
||||
import { createRequire } from "module"
|
||||
@@ -71,7 +72,10 @@ export namespace BunProc {
|
||||
await Bun.write(pkgjson.name!, JSON.stringify(result, null, 2))
|
||||
return result
|
||||
})
|
||||
if (parsed.dependencies[pkg] === version) return mod
|
||||
const dependencies = parsed.dependencies ?? {}
|
||||
if (!parsed.dependencies) parsed.dependencies = dependencies
|
||||
const modExists = await Filesystem.exists(mod)
|
||||
if (dependencies[pkg] === version && modExists) return mod
|
||||
|
||||
const proxied = !!(
|
||||
process.env.HTTP_PROXY ||
|
||||
|
||||
@@ -70,8 +70,8 @@ export const AgentCommand = cmd({
|
||||
})
|
||||
|
||||
async function getAvailableTools(agent: Agent.Info) {
|
||||
const providerID = agent.model?.providerID ?? (await Provider.defaultModel()).providerID
|
||||
return ToolRegistry.tools(providerID, agent)
|
||||
const model = agent.model ?? (await Provider.defaultModel())
|
||||
return ToolRegistry.tools(model, agent)
|
||||
}
|
||||
|
||||
async function resolveTools(agent: Agent.Info, availableTools: Awaited<ReturnType<typeof getAvailableTools>>) {
|
||||
|
||||
@@ -6,7 +6,6 @@ import * as prompts from "@clack/prompts"
|
||||
import { UI } from "../ui"
|
||||
import { MCP } from "../../mcp"
|
||||
import { McpAuth } from "../../mcp/auth"
|
||||
import { McpOAuthCallback } from "../../mcp/oauth-callback"
|
||||
import { McpOAuthProvider } from "../../mcp/oauth-provider"
|
||||
import { Config } from "../../config/config"
|
||||
import { Instance } from "../../project/instance"
|
||||
@@ -683,10 +682,6 @@ export const McpDebugCommand = cmd({
|
||||
|
||||
// Try to discover OAuth metadata
|
||||
const oauthConfig = typeof serverConfig.oauth === "object" ? serverConfig.oauth : undefined
|
||||
|
||||
// Start callback server
|
||||
await McpOAuthCallback.ensureRunning(oauthConfig?.redirectUri)
|
||||
|
||||
const authProvider = new McpOAuthProvider(
|
||||
serverName,
|
||||
serverConfig.url,
|
||||
@@ -694,7 +689,6 @@ export const McpDebugCommand = cmd({
|
||||
clientId: oauthConfig?.clientId,
|
||||
clientSecret: oauthConfig?.clientSecret,
|
||||
scope: oauthConfig?.scope,
|
||||
redirectUri: oauthConfig?.redirectUri,
|
||||
},
|
||||
{
|
||||
onRedirect: async () => {},
|
||||
|
||||
@@ -200,11 +200,6 @@ function App() {
|
||||
renderer.console.onCopySelection = async (text: string) => {
|
||||
if (!text || text.length === 0) return
|
||||
|
||||
const base64 = Buffer.from(text).toString("base64")
|
||||
const osc52 = `\x1b]52;c;${base64}\x07`
|
||||
const finalOsc52 = process.env["TMUX"] ? `\x1bPtmux;\x1b${osc52}\x1b\\` : osc52
|
||||
// @ts-expect-error writeOut is not in type definitions
|
||||
renderer.writeOut(finalOsc52)
|
||||
await Clipboard.copy(text)
|
||||
.then(() => toast.show({ message: "Copied to clipboard", variant: "info" }))
|
||||
.catch(toast.error)
|
||||
@@ -627,11 +622,6 @@ function App() {
|
||||
}
|
||||
const text = renderer.getSelection()?.getSelectedText()
|
||||
if (text && text.length > 0) {
|
||||
const base64 = Buffer.from(text).toString("base64")
|
||||
const osc52 = `\x1b]52;c;${base64}\x07`
|
||||
const finalOsc52 = process.env["TMUX"] ? `\x1bPtmux;\x1b${osc52}\x1b\\` : osc52
|
||||
/* @ts-expect-error */
|
||||
renderer.writeOut(finalOsc52)
|
||||
await Clipboard.copy(text)
|
||||
.then(() => toast.show({ message: "Copied to clipboard", variant: "info" }))
|
||||
.catch(toast.error)
|
||||
|
||||
@@ -145,9 +145,9 @@ export function Prompt(props: PromptProps) {
|
||||
const isPrimaryAgent = local.agent.list().some((x) => x.name === msg.agent)
|
||||
if (msg.agent && isPrimaryAgent) {
|
||||
local.agent.set(msg.agent)
|
||||
if (msg.model) local.model.set(msg.model)
|
||||
if (msg.variant) local.model.variant.set(msg.variant)
|
||||
}
|
||||
if (msg.model) local.model.set(msg.model)
|
||||
if (msg.variant) local.model.variant.set(msg.variant)
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@@ -39,7 +39,7 @@ import { TodoWriteTool } from "@/tool/todo"
|
||||
import type { GrepTool } from "@/tool/grep"
|
||||
import type { ListTool } from "@/tool/ls"
|
||||
import type { EditTool } from "@/tool/edit"
|
||||
import type { PatchTool } from "@/tool/patch"
|
||||
import type { ApplyPatchTool } from "@/tool/apply_patch"
|
||||
import type { WebFetchTool } from "@/tool/webfetch"
|
||||
import type { TaskTool } from "@/tool/task"
|
||||
import type { QuestionTool } from "@/tool/question"
|
||||
@@ -697,11 +697,6 @@ export function Session() {
|
||||
return
|
||||
}
|
||||
|
||||
const base64 = Buffer.from(text).toString("base64")
|
||||
const osc52 = `\x1b]52;c;${base64}\x07`
|
||||
const finalOsc52 = process.env["TMUX"] ? `\x1bPtmux;\x1b${osc52}\x1b\\` : osc52
|
||||
/* @ts-expect-error */
|
||||
renderer.writeOut(finalOsc52)
|
||||
Clipboard.copy(text)
|
||||
.then(() => toast.show({ message: "Message copied to clipboard!", variant: "success" }))
|
||||
.catch(() => toast.show({ message: "Failed to copy to clipboard", variant: "error" }))
|
||||
@@ -1390,8 +1385,8 @@ function ToolPart(props: { last: boolean; part: ToolPart; message: AssistantMess
|
||||
<Match when={props.part.tool === "task"}>
|
||||
<Task {...toolprops} />
|
||||
</Match>
|
||||
<Match when={props.part.tool === "patch"}>
|
||||
<Patch {...toolprops} />
|
||||
<Match when={props.part.tool === "apply_patch"}>
|
||||
<ApplyPatch {...toolprops} />
|
||||
</Match>
|
||||
<Match when={props.part.tool === "todowrite"}>
|
||||
<TodoWrite {...toolprops} />
|
||||
@@ -1840,20 +1835,74 @@ function Edit(props: ToolProps<typeof EditTool>) {
|
||||
)
|
||||
}
|
||||
|
||||
function Patch(props: ToolProps<typeof PatchTool>) {
|
||||
const { theme } = useTheme()
|
||||
function ApplyPatch(props: ToolProps<typeof ApplyPatchTool>) {
|
||||
const ctx = use()
|
||||
const { theme, syntax } = useTheme()
|
||||
|
||||
const files = createMemo(() => props.metadata.files ?? [])
|
||||
|
||||
const view = createMemo(() => {
|
||||
const diffStyle = ctx.sync.data.config.tui?.diff_style
|
||||
if (diffStyle === "stacked") return "unified"
|
||||
return ctx.width > 120 ? "split" : "unified"
|
||||
})
|
||||
|
||||
function Diff(p: { diff: string; filePath: string }) {
|
||||
return (
|
||||
<box paddingLeft={1}>
|
||||
<diff
|
||||
diff={p.diff}
|
||||
view={view()}
|
||||
filetype={filetype(p.filePath)}
|
||||
syntaxStyle={syntax()}
|
||||
showLineNumbers={true}
|
||||
width="100%"
|
||||
wrapMode={ctx.diffWrapMode()}
|
||||
fg={theme.text}
|
||||
addedBg={theme.diffAddedBg}
|
||||
removedBg={theme.diffRemovedBg}
|
||||
contextBg={theme.diffContextBg}
|
||||
addedSignColor={theme.diffHighlightAdded}
|
||||
removedSignColor={theme.diffHighlightRemoved}
|
||||
lineNumberFg={theme.diffLineNumber}
|
||||
lineNumberBg={theme.diffContextBg}
|
||||
addedLineNumberBg={theme.diffAddedLineNumberBg}
|
||||
removedLineNumberBg={theme.diffRemovedLineNumberBg}
|
||||
/>
|
||||
</box>
|
||||
)
|
||||
}
|
||||
|
||||
function title(file: { type: string; relativePath: string; filePath: string; deletions: number }) {
|
||||
if (file.type === "delete") return "# Deleted " + file.relativePath
|
||||
if (file.type === "add") return "# Created " + file.relativePath
|
||||
if (file.type === "move") return "# Moved " + normalizePath(file.filePath) + " → " + file.relativePath
|
||||
return "← Patched " + file.relativePath
|
||||
}
|
||||
|
||||
return (
|
||||
<Switch>
|
||||
<Match when={props.output !== undefined}>
|
||||
<BlockTool title="# Patch" part={props.part}>
|
||||
<box>
|
||||
<text fg={theme.text}>{props.output?.trim()}</text>
|
||||
</box>
|
||||
</BlockTool>
|
||||
<Match when={files().length > 0}>
|
||||
<For each={files()}>
|
||||
{(file) => (
|
||||
<BlockTool title={title(file)} part={props.part}>
|
||||
<Show
|
||||
when={file.type !== "delete"}
|
||||
fallback={
|
||||
<text fg={theme.diffRemoved}>
|
||||
-{file.deletions} line{file.deletions !== 1 ? "s" : ""}
|
||||
</text>
|
||||
}
|
||||
>
|
||||
<Diff diff={file.diff} filePath={file.filePath} />
|
||||
</Show>
|
||||
</BlockTool>
|
||||
)}
|
||||
</For>
|
||||
</Match>
|
||||
<Match when={true}>
|
||||
<InlineTool icon="%" pending="Preparing patch..." complete={false} part={props.part}>
|
||||
Patch
|
||||
<InlineTool icon="%" pending="Preparing apply_patch..." complete={false} part={props.part}>
|
||||
apply_patch
|
||||
</InlineTool>
|
||||
</Match>
|
||||
</Switch>
|
||||
|
||||
@@ -157,26 +157,12 @@ export function DialogSelect<T>(props: DialogSelectProps<T>) {
|
||||
|
||||
const keybind = useKeybind()
|
||||
useKeyboard((evt) => {
|
||||
if (evt.name === "up" || (evt.ctrl && evt.name === "p")) {
|
||||
evt.preventDefault()
|
||||
evt.stopPropagation()
|
||||
move(-1)
|
||||
}
|
||||
if (evt.name === "down" || (evt.ctrl && evt.name === "n")) {
|
||||
evt.preventDefault()
|
||||
evt.stopPropagation()
|
||||
move(1)
|
||||
}
|
||||
if (evt.name === "pageup") {
|
||||
evt.preventDefault()
|
||||
evt.stopPropagation()
|
||||
move(-10)
|
||||
}
|
||||
if (evt.name === "pagedown") {
|
||||
evt.preventDefault()
|
||||
evt.stopPropagation()
|
||||
move(10)
|
||||
}
|
||||
if (evt.name === "up" || (evt.ctrl && evt.name === "p")) move(-1)
|
||||
if (evt.name === "down" || (evt.ctrl && evt.name === "n")) move(1)
|
||||
if (evt.name === "pageup") move(-10)
|
||||
if (evt.name === "pagedown") move(10)
|
||||
if (evt.name === "home") moveTo(0)
|
||||
if (evt.name === "end") moveTo(flat().length - 1)
|
||||
if (evt.name === "return") {
|
||||
const option = selected()
|
||||
if (option) {
|
||||
|
||||
@@ -141,11 +141,6 @@ export function DialogProvider(props: ParentProps) {
|
||||
onMouseUp={async () => {
|
||||
const text = renderer.getSelection()?.getSelectedText()
|
||||
if (text && text.length > 0) {
|
||||
const base64 = Buffer.from(text).toString("base64")
|
||||
const osc52 = `\x1b]52;c;${base64}\x07`
|
||||
const finalOsc52 = process.env["TMUX"] ? `\x1bPtmux;\x1b${osc52}\x1b\\` : osc52
|
||||
/* @ts-expect-error */
|
||||
renderer.writeOut(finalOsc52)
|
||||
await Clipboard.copy(text)
|
||||
.then(() => toast.show({ message: "Copied to clipboard", variant: "info" }))
|
||||
.catch(toast.error)
|
||||
|
||||
@@ -5,6 +5,21 @@ import { lazy } from "../../../../util/lazy.js"
|
||||
import { tmpdir } from "os"
|
||||
import path from "path"
|
||||
|
||||
/**
|
||||
* Writes text to clipboard via OSC 52 escape sequence.
|
||||
* This allows clipboard operations to work over SSH by having
|
||||
* the terminal emulator handle the clipboard locally.
|
||||
*/
|
||||
function writeOsc52(text: string): void {
|
||||
if (!process.stdout.isTTY) return
|
||||
const base64 = Buffer.from(text).toString("base64")
|
||||
const osc52 = `\x1b]52;c;${base64}\x07`
|
||||
// tmux and screen require DCS passthrough wrapping
|
||||
const passthrough = process.env["TMUX"] || process.env["STY"]
|
||||
const sequence = passthrough ? `\x1bPtmux;\x1b${osc52}\x1b\\` : osc52
|
||||
process.stdout.write(sequence)
|
||||
}
|
||||
|
||||
export namespace Clipboard {
|
||||
export interface Content {
|
||||
data: string
|
||||
@@ -123,6 +138,7 @@ export namespace Clipboard {
|
||||
})
|
||||
|
||||
export async function copy(text: string): Promise<void> {
|
||||
writeOsc52(text)
|
||||
await getCopyMethod()(text)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -60,7 +60,11 @@ export const WebCommand = cmd({
|
||||
}
|
||||
|
||||
if (opts.mdns) {
|
||||
UI.println(UI.Style.TEXT_INFO_BOLD + " mDNS: ", UI.Style.TEXT_NORMAL, "opencode.local")
|
||||
UI.println(
|
||||
UI.Style.TEXT_INFO_BOLD + " mDNS: ",
|
||||
UI.Style.TEXT_NORMAL,
|
||||
`opencode.local:${server.port}`,
|
||||
)
|
||||
}
|
||||
|
||||
// Open localhost in browser
|
||||
|
||||
@@ -20,7 +20,6 @@ import { Installation } from "@/installation"
|
||||
import { ConfigMarkdown } from "./markdown"
|
||||
import { existsSync } from "fs"
|
||||
import { Bus } from "@/bus"
|
||||
import { Session } from "@/session"
|
||||
|
||||
export namespace Config {
|
||||
const log = Log.create({ service: "config" })
|
||||
@@ -233,10 +232,11 @@ export namespace Config {
|
||||
dot: true,
|
||||
cwd: dir,
|
||||
})) {
|
||||
const md = await ConfigMarkdown.parse(item).catch((err) => {
|
||||
const md = await ConfigMarkdown.parse(item).catch(async (err) => {
|
||||
const message = ConfigMarkdown.FrontmatterError.isInstance(err)
|
||||
? err.data.message
|
||||
: `Failed to parse command ${item}`
|
||||
const { Session } = await import("@/session")
|
||||
Bus.publish(Session.Event.Error, { error: new NamedError.Unknown({ message }).toObject() })
|
||||
log.error("failed to load command", { command: item, err })
|
||||
return undefined
|
||||
@@ -272,10 +272,11 @@ export namespace Config {
|
||||
dot: true,
|
||||
cwd: dir,
|
||||
})) {
|
||||
const md = await ConfigMarkdown.parse(item).catch((err) => {
|
||||
const md = await ConfigMarkdown.parse(item).catch(async (err) => {
|
||||
const message = ConfigMarkdown.FrontmatterError.isInstance(err)
|
||||
? err.data.message
|
||||
: `Failed to parse agent ${item}`
|
||||
const { Session } = await import("@/session")
|
||||
Bus.publish(Session.Event.Error, { error: new NamedError.Unknown({ message }).toObject() })
|
||||
log.error("failed to load agent", { agent: item, err })
|
||||
return undefined
|
||||
@@ -310,10 +311,11 @@ export namespace Config {
|
||||
dot: true,
|
||||
cwd: dir,
|
||||
})) {
|
||||
const md = await ConfigMarkdown.parse(item).catch((err) => {
|
||||
const md = await ConfigMarkdown.parse(item).catch(async (err) => {
|
||||
const message = ConfigMarkdown.FrontmatterError.isInstance(err)
|
||||
? err.data.message
|
||||
: `Failed to parse mode ${item}`
|
||||
const { Session } = await import("@/session")
|
||||
Bus.publish(Session.Event.Error, { error: new NamedError.Unknown({ message }).toObject() })
|
||||
log.error("failed to load mode", { mode: item, err })
|
||||
return undefined
|
||||
@@ -433,10 +435,6 @@ export namespace Config {
|
||||
.describe("OAuth client ID. If not provided, dynamic client registration (RFC 7591) will be attempted."),
|
||||
clientSecret: z.string().optional().describe("OAuth client secret (if required by the authorization server)"),
|
||||
scope: z.string().optional().describe("OAuth scopes to request during authorization"),
|
||||
redirectUri: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe("OAuth redirect URI (default: http://127.0.0.1:19876/mcp/oauth/callback)."),
|
||||
})
|
||||
.strict()
|
||||
.meta({
|
||||
@@ -942,7 +940,7 @@ export namespace Config {
|
||||
})
|
||||
.catchall(Agent)
|
||||
.optional()
|
||||
.describe("Agent configuration, see https://opencode.ai/docs/agent"),
|
||||
.describe("Agent configuration, see https://opencode.ai/docs/agents"),
|
||||
provider: z
|
||||
.record(z.string(), Provider)
|
||||
.optional()
|
||||
|
||||
@@ -308,8 +308,6 @@ export namespace MCP {
|
||||
let authProvider: McpOAuthProvider | undefined
|
||||
|
||||
if (!oauthDisabled) {
|
||||
await McpOAuthCallback.ensureRunning(oauthConfig?.redirectUri)
|
||||
|
||||
authProvider = new McpOAuthProvider(
|
||||
key,
|
||||
mcp.url,
|
||||
@@ -317,7 +315,6 @@ export namespace MCP {
|
||||
clientId: oauthConfig?.clientId,
|
||||
clientSecret: oauthConfig?.clientSecret,
|
||||
scope: oauthConfig?.scope,
|
||||
redirectUri: oauthConfig?.redirectUri,
|
||||
},
|
||||
{
|
||||
onRedirect: async (url) => {
|
||||
@@ -347,7 +344,6 @@ export namespace MCP {
|
||||
|
||||
let lastError: Error | undefined
|
||||
const connectTimeout = mcp.timeout ?? DEFAULT_TIMEOUT
|
||||
|
||||
for (const { name, transport } of transports) {
|
||||
try {
|
||||
const client = new Client({
|
||||
@@ -574,8 +570,7 @@ export namespace MCP {
|
||||
|
||||
for (const [clientName, client] of Object.entries(clientsSnapshot)) {
|
||||
// Only include tools from connected MCPs (skip disabled ones)
|
||||
const clientStatus = s.status[clientName]?.status
|
||||
if (clientStatus !== "connected") {
|
||||
if (s.status[clientName]?.status !== "connected") {
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -725,10 +720,8 @@ export namespace MCP {
|
||||
throw new Error(`MCP server ${mcpName} has OAuth explicitly disabled`)
|
||||
}
|
||||
|
||||
// OAuth config is optional - if not provided, we'll use auto-discovery
|
||||
const oauthConfig = typeof mcpConfig.oauth === "object" ? mcpConfig.oauth : undefined
|
||||
|
||||
await McpOAuthCallback.ensureRunning(oauthConfig?.redirectUri)
|
||||
// Start the callback server
|
||||
await McpOAuthCallback.ensureRunning()
|
||||
|
||||
// Generate and store a cryptographically secure state parameter BEFORE creating the provider
|
||||
// The SDK will call provider.state() to read this value
|
||||
@@ -738,6 +731,8 @@ export namespace MCP {
|
||||
await McpAuth.updateOAuthState(mcpName, oauthState)
|
||||
|
||||
// Create a new auth provider for this flow
|
||||
// OAuth config is optional - if not provided, we'll use auto-discovery
|
||||
const oauthConfig = typeof mcpConfig.oauth === "object" ? mcpConfig.oauth : undefined
|
||||
let capturedUrl: URL | undefined
|
||||
const authProvider = new McpOAuthProvider(
|
||||
mcpName,
|
||||
@@ -746,7 +741,6 @@ export namespace MCP {
|
||||
clientId: oauthConfig?.clientId,
|
||||
clientSecret: oauthConfig?.clientSecret,
|
||||
scope: oauthConfig?.scope,
|
||||
redirectUri: oauthConfig?.redirectUri,
|
||||
},
|
||||
{
|
||||
onRedirect: async (url) => {
|
||||
@@ -775,7 +769,6 @@ export namespace MCP {
|
||||
pendingOAuthTransports.set(mcpName, transport)
|
||||
return { authorizationUrl: capturedUrl.toString() }
|
||||
}
|
||||
|
||||
throw error
|
||||
}
|
||||
}
|
||||
@@ -785,9 +778,9 @@ export namespace MCP {
|
||||
* Opens the browser and waits for callback.
|
||||
*/
|
||||
export async function authenticate(mcpName: string): Promise<Status> {
|
||||
const result = await startAuth(mcpName)
|
||||
const { authorizationUrl } = await startAuth(mcpName)
|
||||
|
||||
if (!result.authorizationUrl) {
|
||||
if (!authorizationUrl) {
|
||||
// Already authenticated
|
||||
const s = await state()
|
||||
return s.status[mcpName] ?? { status: "connected" }
|
||||
@@ -801,9 +794,9 @@ export namespace MCP {
|
||||
|
||||
// The SDK has already added the state parameter to the authorization URL
|
||||
// We just need to open the browser
|
||||
log.info("opening browser for oauth", { mcpName, url: result.authorizationUrl, state: oauthState })
|
||||
log.info("opening browser for oauth", { mcpName, url: authorizationUrl, state: oauthState })
|
||||
try {
|
||||
const subprocess = await open(result.authorizationUrl)
|
||||
const subprocess = await open(authorizationUrl)
|
||||
// The open package spawns a detached process and returns immediately.
|
||||
// We need to listen for errors which fire asynchronously:
|
||||
// - "error" event: command not found (ENOENT)
|
||||
@@ -826,7 +819,7 @@ export namespace MCP {
|
||||
// Browser opening failed (e.g., in remote/headless sessions like SSH, devcontainers)
|
||||
// Emit event so CLI can display the URL for manual opening
|
||||
log.warn("failed to open browser, user must open URL manually", { mcpName, error })
|
||||
Bus.publish(BrowserOpenFailed, { mcpName, url: result.authorizationUrl })
|
||||
Bus.publish(BrowserOpenFailed, { mcpName, url: authorizationUrl })
|
||||
}
|
||||
|
||||
// Wait for callback using the OAuth state parameter
|
||||
|
||||
@@ -1,12 +1,8 @@
|
||||
import { Log } from "../util/log"
|
||||
import { OAUTH_CALLBACK_PORT, OAUTH_CALLBACK_PATH, parseRedirectUri } from "./oauth-provider"
|
||||
import { OAUTH_CALLBACK_PORT, OAUTH_CALLBACK_PATH } from "./oauth-provider"
|
||||
|
||||
const log = Log.create({ service: "mcp.oauth-callback" })
|
||||
|
||||
// Current callback server configuration (may differ from defaults if custom redirectUri is used)
|
||||
let currentPort = OAUTH_CALLBACK_PORT
|
||||
let currentPath = OAUTH_CALLBACK_PATH
|
||||
|
||||
const HTML_SUCCESS = `<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
@@ -60,33 +56,21 @@ export namespace McpOAuthCallback {
|
||||
|
||||
const CALLBACK_TIMEOUT_MS = 5 * 60 * 1000 // 5 minutes
|
||||
|
||||
export async function ensureRunning(redirectUri?: string): Promise<void> {
|
||||
// Parse the redirect URI to get port and path (uses defaults if not provided)
|
||||
const { port, path } = parseRedirectUri(redirectUri)
|
||||
|
||||
// If server is running on a different port/path, stop it first
|
||||
if (server && (currentPort !== port || currentPath !== path)) {
|
||||
log.info("stopping oauth callback server to reconfigure", { oldPort: currentPort, newPort: port })
|
||||
await stop()
|
||||
}
|
||||
|
||||
export async function ensureRunning(): Promise<void> {
|
||||
if (server) return
|
||||
|
||||
const running = await isPortInUse(port)
|
||||
const running = await isPortInUse()
|
||||
if (running) {
|
||||
log.info("oauth callback server already running on another instance", { port })
|
||||
log.info("oauth callback server already running on another instance", { port: OAUTH_CALLBACK_PORT })
|
||||
return
|
||||
}
|
||||
|
||||
currentPort = port
|
||||
currentPath = path
|
||||
|
||||
server = Bun.serve({
|
||||
port: currentPort,
|
||||
port: OAUTH_CALLBACK_PORT,
|
||||
fetch(req) {
|
||||
const url = new URL(req.url)
|
||||
|
||||
if (url.pathname !== currentPath) {
|
||||
if (url.pathname !== OAUTH_CALLBACK_PATH) {
|
||||
return new Response("Not found", { status: 404 })
|
||||
}
|
||||
|
||||
@@ -149,7 +133,7 @@ export namespace McpOAuthCallback {
|
||||
},
|
||||
})
|
||||
|
||||
log.info("oauth callback server started", { port: currentPort, path: currentPath })
|
||||
log.info("oauth callback server started", { port: OAUTH_CALLBACK_PORT })
|
||||
}
|
||||
|
||||
export function waitForCallback(oauthState: string): Promise<string> {
|
||||
@@ -174,11 +158,11 @@ export namespace McpOAuthCallback {
|
||||
}
|
||||
}
|
||||
|
||||
export async function isPortInUse(port: number = OAUTH_CALLBACK_PORT): Promise<boolean> {
|
||||
export async function isPortInUse(): Promise<boolean> {
|
||||
return new Promise((resolve) => {
|
||||
Bun.connect({
|
||||
hostname: "127.0.0.1",
|
||||
port,
|
||||
port: OAUTH_CALLBACK_PORT,
|
||||
socket: {
|
||||
open(socket) {
|
||||
socket.end()
|
||||
|
||||
@@ -17,7 +17,6 @@ export interface McpOAuthConfig {
|
||||
clientId?: string
|
||||
clientSecret?: string
|
||||
scope?: string
|
||||
redirectUri?: string
|
||||
}
|
||||
|
||||
export interface McpOAuthCallbacks {
|
||||
@@ -33,10 +32,6 @@ export class McpOAuthProvider implements OAuthClientProvider {
|
||||
) {}
|
||||
|
||||
get redirectUrl(): string {
|
||||
// Use configured redirectUri if provided, otherwise use OpenCode defaults
|
||||
if (this.config.redirectUri) {
|
||||
return this.config.redirectUri
|
||||
}
|
||||
return `http://127.0.0.1:${OAUTH_CALLBACK_PORT}${OAUTH_CALLBACK_PATH}`
|
||||
}
|
||||
|
||||
@@ -157,22 +152,3 @@ export class McpOAuthProvider implements OAuthClientProvider {
|
||||
}
|
||||
|
||||
export { OAUTH_CALLBACK_PORT, OAUTH_CALLBACK_PATH }
|
||||
|
||||
/**
|
||||
* Parse a redirect URI to extract port and path for the callback server.
|
||||
* Returns defaults if the URI can't be parsed.
|
||||
*/
|
||||
export function parseRedirectUri(redirectUri?: string): { port: number; path: string } {
|
||||
if (!redirectUri) {
|
||||
return { port: OAUTH_CALLBACK_PORT, path: OAUTH_CALLBACK_PATH }
|
||||
}
|
||||
|
||||
try {
|
||||
const url = new URL(redirectUri)
|
||||
const port = url.port ? parseInt(url.port, 10) : url.protocol === "https:" ? 443 : 80
|
||||
const path = url.pathname || OAUTH_CALLBACK_PATH
|
||||
return { port, path }
|
||||
} catch {
|
||||
return { port: OAUTH_CALLBACK_PORT, path: OAUTH_CALLBACK_PATH }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -177,8 +177,18 @@ export namespace Patch {
|
||||
return { content, nextIdx: i }
|
||||
}
|
||||
|
||||
function stripHeredoc(input: string): string {
|
||||
// Match heredoc patterns like: cat <<'EOF'\n...\nEOF or <<EOF\n...\nEOF
|
||||
const heredocMatch = input.match(/^(?:cat\s+)?<<['"]?(\w+)['"]?\s*\n([\s\S]*?)\n\1\s*$/)
|
||||
if (heredocMatch) {
|
||||
return heredocMatch[2]
|
||||
}
|
||||
return input
|
||||
}
|
||||
|
||||
export function parsePatch(patchText: string): { hunks: Hunk[] } {
|
||||
const lines = patchText.split("\n")
|
||||
const cleaned = stripHeredoc(patchText.trim())
|
||||
const lines = cleaned.split("\n")
|
||||
const hunks: Hunk[] = []
|
||||
let i = 0
|
||||
|
||||
@@ -363,7 +373,7 @@ export namespace Patch {
|
||||
// Try to match old lines in the file
|
||||
let pattern = chunk.old_lines
|
||||
let newSlice = chunk.new_lines
|
||||
let found = seekSequence(originalLines, pattern, lineIndex)
|
||||
let found = seekSequence(originalLines, pattern, lineIndex, chunk.is_end_of_file)
|
||||
|
||||
// Retry without trailing empty line if not found
|
||||
if (found === -1 && pattern.length > 0 && pattern[pattern.length - 1] === "") {
|
||||
@@ -371,7 +381,7 @@ export namespace Patch {
|
||||
if (newSlice.length > 0 && newSlice[newSlice.length - 1] === "") {
|
||||
newSlice = newSlice.slice(0, -1)
|
||||
}
|
||||
found = seekSequence(originalLines, pattern, lineIndex)
|
||||
found = seekSequence(originalLines, pattern, lineIndex, chunk.is_end_of_file)
|
||||
}
|
||||
|
||||
if (found !== -1) {
|
||||
@@ -407,28 +417,75 @@ export namespace Patch {
|
||||
return result
|
||||
}
|
||||
|
||||
function seekSequence(lines: string[], pattern: string[], startIndex: number): number {
|
||||
if (pattern.length === 0) return -1
|
||||
// Normalize Unicode punctuation to ASCII equivalents (like Rust's normalize_unicode)
|
||||
function normalizeUnicode(str: string): string {
|
||||
return str
|
||||
.replace(/[\u2018\u2019\u201A\u201B]/g, "'") // single quotes
|
||||
.replace(/[\u201C\u201D\u201E\u201F]/g, '"') // double quotes
|
||||
.replace(/[\u2010\u2011\u2012\u2013\u2014\u2015]/g, "-") // dashes
|
||||
.replace(/\u2026/g, "...") // ellipsis
|
||||
.replace(/\u00A0/g, " ") // non-breaking space
|
||||
}
|
||||
|
||||
// Simple substring search implementation
|
||||
type Comparator = (a: string, b: string) => boolean
|
||||
|
||||
function tryMatch(lines: string[], pattern: string[], startIndex: number, compare: Comparator, eof: boolean): number {
|
||||
// If EOF anchor, try matching from end of file first
|
||||
if (eof) {
|
||||
const fromEnd = lines.length - pattern.length
|
||||
if (fromEnd >= startIndex) {
|
||||
let matches = true
|
||||
for (let j = 0; j < pattern.length; j++) {
|
||||
if (!compare(lines[fromEnd + j], pattern[j])) {
|
||||
matches = false
|
||||
break
|
||||
}
|
||||
}
|
||||
if (matches) return fromEnd
|
||||
}
|
||||
}
|
||||
|
||||
// Forward search from startIndex
|
||||
for (let i = startIndex; i <= lines.length - pattern.length; i++) {
|
||||
let matches = true
|
||||
|
||||
for (let j = 0; j < pattern.length; j++) {
|
||||
if (lines[i + j] !== pattern[j]) {
|
||||
if (!compare(lines[i + j], pattern[j])) {
|
||||
matches = false
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (matches) {
|
||||
return i
|
||||
}
|
||||
if (matches) return i
|
||||
}
|
||||
|
||||
return -1
|
||||
}
|
||||
|
||||
function seekSequence(lines: string[], pattern: string[], startIndex: number, eof = false): number {
|
||||
if (pattern.length === 0) return -1
|
||||
|
||||
// Pass 1: exact match
|
||||
const exact = tryMatch(lines, pattern, startIndex, (a, b) => a === b, eof)
|
||||
if (exact !== -1) return exact
|
||||
|
||||
// Pass 2: rstrip (trim trailing whitespace)
|
||||
const rstrip = tryMatch(lines, pattern, startIndex, (a, b) => a.trimEnd() === b.trimEnd(), eof)
|
||||
if (rstrip !== -1) return rstrip
|
||||
|
||||
// Pass 3: trim (both ends)
|
||||
const trim = tryMatch(lines, pattern, startIndex, (a, b) => a.trim() === b.trim(), eof)
|
||||
if (trim !== -1) return trim
|
||||
|
||||
// Pass 4: normalized (Unicode punctuation to ASCII)
|
||||
const normalized = tryMatch(
|
||||
lines,
|
||||
pattern,
|
||||
startIndex,
|
||||
(a, b) => normalizeUnicode(a.trim()) === normalizeUnicode(b.trim()),
|
||||
eof,
|
||||
)
|
||||
return normalized
|
||||
}
|
||||
|
||||
function generateUnifiedDiff(oldContent: string, newContent: string): string {
|
||||
const oldLines = oldContent.split("\n")
|
||||
const newLines = newContent.split("\n")
|
||||
|
||||
@@ -3,6 +3,9 @@ import { Installation } from "@/installation"
|
||||
import { iife } from "@/util/iife"
|
||||
|
||||
const CLIENT_ID = "Ov23li8tweQw6odWQebz"
|
||||
// Add a small safety buffer when polling to avoid hitting the server
|
||||
// slightly too early due to clock skew / timer drift.
|
||||
const OAUTH_POLLING_SAFETY_MARGIN_MS = 3000 // 3 seconds
|
||||
|
||||
function normalizeDomain(url: string) {
|
||||
return url.replace(/^https?:\/\//, "").replace(/\/$/, "")
|
||||
@@ -204,6 +207,7 @@ export async function CopilotAuthPlugin(input: PluginInput): Promise<Hooks> {
|
||||
const data = (await response.json()) as {
|
||||
access_token?: string
|
||||
error?: string
|
||||
interval?: number
|
||||
}
|
||||
|
||||
if (data.access_token) {
|
||||
@@ -230,13 +234,29 @@ export async function CopilotAuthPlugin(input: PluginInput): Promise<Hooks> {
|
||||
}
|
||||
|
||||
if (data.error === "authorization_pending") {
|
||||
await new Promise((resolve) => setTimeout(resolve, deviceData.interval * 1000))
|
||||
await Bun.sleep(deviceData.interval * 1000 + OAUTH_POLLING_SAFETY_MARGIN_MS)
|
||||
continue
|
||||
}
|
||||
|
||||
if (data.error === "slow_down") {
|
||||
// Based on the RFC spec, we must add 5 seconds to our current polling interval.
|
||||
// (See https://www.rfc-editor.org/rfc/rfc8628#section-3.5)
|
||||
let newInterval = (deviceData.interval + 5) * 1000
|
||||
|
||||
// GitHub OAuth API may return the new interval in seconds in the response.
|
||||
// We should try to use that if provided with safety margin.
|
||||
const serverInterval = data.interval
|
||||
if (serverInterval && typeof serverInterval === "number" && serverInterval > 0) {
|
||||
newInterval = serverInterval * 1000
|
||||
}
|
||||
|
||||
await Bun.sleep(newInterval + OAUTH_POLLING_SAFETY_MARGIN_MS)
|
||||
continue
|
||||
}
|
||||
|
||||
if (data.error) return { type: "failed" as const }
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, deviceData.interval * 1000))
|
||||
await Bun.sleep(deviceData.interval * 1000 + OAUTH_POLLING_SAFETY_MARGIN_MS)
|
||||
continue
|
||||
}
|
||||
},
|
||||
|
||||
@@ -999,6 +999,24 @@ export namespace Provider {
|
||||
opts.signal = combined
|
||||
}
|
||||
|
||||
// Strip openai itemId metadata following what codex does
|
||||
// Codex uses #[serde(skip_serializing)] on id fields for all item types:
|
||||
// Message, Reasoning, FunctionCall, LocalShellCall, CustomToolCall, WebSearchCall
|
||||
// IDs are only re-attached for Azure with store=true
|
||||
if (model.api.npm === "@ai-sdk/openai" && opts.body && opts.method === "POST") {
|
||||
const body = JSON.parse(opts.body as string)
|
||||
const isAzure = model.providerID.includes("azure")
|
||||
const keepIds = isAzure && body.store === true
|
||||
if (!keepIds && Array.isArray(body.input)) {
|
||||
for (const item of body.input) {
|
||||
if ("id" in item) {
|
||||
delete item.id
|
||||
}
|
||||
}
|
||||
opts.body = JSON.stringify(body)
|
||||
}
|
||||
}
|
||||
|
||||
return fetchFn(input, {
|
||||
...opts,
|
||||
// @ts-ignore see here: https://github.com/oven-sh/bun/issues/16682
|
||||
|
||||
@@ -16,30 +16,33 @@ function mimeToModality(mime: string): Modality | undefined {
|
||||
}
|
||||
|
||||
export namespace ProviderTransform {
|
||||
// Maps npm package to the key the AI SDK expects for providerOptions
|
||||
function sdkKey(npm: string): string | undefined {
|
||||
switch (npm) {
|
||||
case "@ai-sdk/github-copilot":
|
||||
case "@ai-sdk/openai":
|
||||
case "@ai-sdk/azure":
|
||||
return "openai"
|
||||
case "@ai-sdk/amazon-bedrock":
|
||||
return "bedrock"
|
||||
case "@ai-sdk/anthropic":
|
||||
return "anthropic"
|
||||
case "@ai-sdk/google-vertex":
|
||||
case "@ai-sdk/google":
|
||||
return "google"
|
||||
case "@ai-sdk/gateway":
|
||||
return "gateway"
|
||||
case "@openrouter/ai-sdk-provider":
|
||||
return "openrouter"
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
|
||||
function normalizeMessages(
|
||||
msgs: ModelMessage[],
|
||||
model: Provider.Model,
|
||||
options: Record<string, unknown>,
|
||||
): ModelMessage[] {
|
||||
// Strip openai itemId metadata following what codex does
|
||||
if (model.api.npm === "@ai-sdk/openai" || options.store === false) {
|
||||
msgs = msgs.map((msg) => {
|
||||
if (msg.providerOptions?.openai) {
|
||||
delete msg.providerOptions.openai["itemId"]
|
||||
}
|
||||
if (!Array.isArray(msg.content)) {
|
||||
return msg
|
||||
}
|
||||
const content = msg.content.map((part) => {
|
||||
if (part.providerOptions?.openai) {
|
||||
delete part.providerOptions.openai["itemId"]
|
||||
}
|
||||
return part
|
||||
})
|
||||
return { ...msg, content } as typeof msg
|
||||
})
|
||||
}
|
||||
|
||||
// Anthropic rejects messages with empty content - filter out empty string messages
|
||||
// and remove empty text/reasoning parts from array content
|
||||
if (model.api.npm === "@ai-sdk/anthropic") {
|
||||
@@ -253,6 +256,28 @@ export namespace ProviderTransform {
|
||||
msgs = applyCaching(msgs, model.providerID)
|
||||
}
|
||||
|
||||
// Remap providerOptions keys from stored providerID to expected SDK key
|
||||
const key = sdkKey(model.api.npm)
|
||||
if (key && key !== model.providerID && model.api.npm !== "@ai-sdk/azure") {
|
||||
const remap = (opts: Record<string, any> | undefined) => {
|
||||
if (!opts) return opts
|
||||
if (!(model.providerID in opts)) return opts
|
||||
const result = { ...opts }
|
||||
result[key] = result[model.providerID]
|
||||
delete result[model.providerID]
|
||||
return result
|
||||
}
|
||||
|
||||
msgs = msgs.map((msg) => {
|
||||
if (!Array.isArray(msg.content)) return { ...msg, providerOptions: remap(msg.providerOptions) }
|
||||
return {
|
||||
...msg,
|
||||
providerOptions: remap(msg.providerOptions),
|
||||
content: msg.content.map((part) => ({ ...part, providerOptions: remap(part.providerOptions) })),
|
||||
} as typeof msg
|
||||
})
|
||||
}
|
||||
|
||||
return msgs
|
||||
}
|
||||
|
||||
@@ -570,39 +595,8 @@ export namespace ProviderTransform {
|
||||
}
|
||||
|
||||
export function providerOptions(model: Provider.Model, options: { [x: string]: any }) {
|
||||
switch (model.api.npm) {
|
||||
case "@ai-sdk/github-copilot":
|
||||
case "@ai-sdk/openai":
|
||||
case "@ai-sdk/azure":
|
||||
return {
|
||||
["openai" as string]: options,
|
||||
}
|
||||
case "@ai-sdk/amazon-bedrock":
|
||||
return {
|
||||
["bedrock" as string]: options,
|
||||
}
|
||||
case "@ai-sdk/anthropic":
|
||||
return {
|
||||
["anthropic" as string]: options,
|
||||
}
|
||||
case "@ai-sdk/google-vertex":
|
||||
case "@ai-sdk/google":
|
||||
return {
|
||||
["google" as string]: options,
|
||||
}
|
||||
case "@ai-sdk/gateway":
|
||||
return {
|
||||
["gateway" as string]: options,
|
||||
}
|
||||
case "@openrouter/ai-sdk-provider":
|
||||
return {
|
||||
["openrouter" as string]: options,
|
||||
}
|
||||
default:
|
||||
return {
|
||||
[model.providerID]: options,
|
||||
}
|
||||
}
|
||||
const key = sdkKey(model.api.npm) ?? model.providerID
|
||||
return { [key]: options }
|
||||
}
|
||||
|
||||
export function maxOutputTokens(
|
||||
|
||||
@@ -7,15 +7,17 @@ export namespace MDNS {
|
||||
let bonjour: Bonjour | undefined
|
||||
let currentPort: number | undefined
|
||||
|
||||
export function publish(port: number, name = "opencode") {
|
||||
export function publish(port: number) {
|
||||
if (currentPort === port) return
|
||||
if (bonjour) unpublish()
|
||||
|
||||
try {
|
||||
const name = `opencode-${port}`
|
||||
bonjour = new Bonjour()
|
||||
const service = bonjour.publish({
|
||||
name,
|
||||
type: "http",
|
||||
host: "opencode.local",
|
||||
port,
|
||||
txt: { path: "/" },
|
||||
})
|
||||
|
||||
@@ -1,79 +0,0 @@
|
||||
import { Hono } from "hono"
|
||||
import { describeRoute, validator } from "hono-openapi"
|
||||
import { resolver } from "hono-openapi"
|
||||
import { Instance } from "../project/instance"
|
||||
import { Project } from "../project/project"
|
||||
import z from "zod"
|
||||
import { errors } from "./error"
|
||||
|
||||
export const ProjectRoute = new Hono()
|
||||
.get(
|
||||
"/",
|
||||
describeRoute({
|
||||
summary: "List all projects",
|
||||
description: "Get a list of projects that have been opened with OpenCode.",
|
||||
operationId: "project.list",
|
||||
responses: {
|
||||
200: {
|
||||
description: "List of projects",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Project.Info.array()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
const projects = await Project.list()
|
||||
return c.json(projects)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/current",
|
||||
describeRoute({
|
||||
summary: "Get current project",
|
||||
description: "Retrieve the currently active project that OpenCode is working with.",
|
||||
operationId: "project.current",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Current project information",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Project.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
return c.json(Instance.project)
|
||||
},
|
||||
)
|
||||
.patch(
|
||||
"/:projectID",
|
||||
describeRoute({
|
||||
summary: "Update project",
|
||||
description: "Update project properties such as name, icon and color.",
|
||||
operationId: "project.update",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Updated project information",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Project.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator("param", z.object({ projectID: z.string() })),
|
||||
validator("json", Project.update.schema.omit({ projectID: true })),
|
||||
async (c) => {
|
||||
const projectID = c.req.valid("param").projectID
|
||||
const body = c.req.valid("json")
|
||||
const project = await Project.update({ ...body, projectID })
|
||||
return c.json(project)
|
||||
},
|
||||
)
|
||||
@@ -1,95 +0,0 @@
|
||||
import { Hono } from "hono"
|
||||
import { describeRoute, validator } from "hono-openapi"
|
||||
import { resolver } from "hono-openapi"
|
||||
import { Question } from "../question"
|
||||
import z from "zod"
|
||||
import { errors } from "./error"
|
||||
|
||||
export const QuestionRoute = new Hono()
|
||||
.get(
|
||||
"/",
|
||||
describeRoute({
|
||||
summary: "List pending questions",
|
||||
description: "Get all pending question requests across all sessions.",
|
||||
operationId: "question.list",
|
||||
responses: {
|
||||
200: {
|
||||
description: "List of pending questions",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Question.Request.array()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
const questions = await Question.list()
|
||||
return c.json(questions)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:requestID/reply",
|
||||
describeRoute({
|
||||
summary: "Reply to question request",
|
||||
description: "Provide answers to a question request from the AI assistant.",
|
||||
operationId: "question.reply",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Question answered successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
requestID: z.string(),
|
||||
}),
|
||||
),
|
||||
validator("json", Question.Reply),
|
||||
async (c) => {
|
||||
const params = c.req.valid("param")
|
||||
const json = c.req.valid("json")
|
||||
await Question.reply({
|
||||
requestID: params.requestID,
|
||||
answers: json.answers,
|
||||
})
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:requestID/reject",
|
||||
describeRoute({
|
||||
summary: "Reject question request",
|
||||
description: "Reject a question request from the AI assistant.",
|
||||
operationId: "question.reject",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Question rejected successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
requestID: z.string(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const params = c.req.valid("param")
|
||||
await Question.reject(params.requestID)
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
92
packages/opencode/src/server/routes/config.ts
Normal file
92
packages/opencode/src/server/routes/config.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
import { Hono } from "hono"
|
||||
import { describeRoute, validator, resolver } from "hono-openapi"
|
||||
import z from "zod"
|
||||
import { Config } from "../../config/config"
|
||||
import { Provider } from "../../provider/provider"
|
||||
import { mapValues } from "remeda"
|
||||
import { errors } from "../error"
|
||||
import { Log } from "../../util/log"
|
||||
import { lazy } from "../../util/lazy"
|
||||
|
||||
const log = Log.create({ service: "server" })
|
||||
|
||||
export const ConfigRoutes = lazy(() =>
|
||||
new Hono()
|
||||
.get(
|
||||
"/",
|
||||
describeRoute({
|
||||
summary: "Get configuration",
|
||||
description: "Retrieve the current OpenCode configuration settings and preferences.",
|
||||
operationId: "config.get",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Get config info",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Config.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
return c.json(await Config.get())
|
||||
},
|
||||
)
|
||||
.patch(
|
||||
"/",
|
||||
describeRoute({
|
||||
summary: "Update configuration",
|
||||
description: "Update OpenCode configuration settings and preferences.",
|
||||
operationId: "config.update",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Successfully updated config",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Config.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400),
|
||||
},
|
||||
}),
|
||||
validator("json", Config.Info),
|
||||
async (c) => {
|
||||
const config = c.req.valid("json")
|
||||
await Config.update(config)
|
||||
return c.json(config)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/providers",
|
||||
describeRoute({
|
||||
summary: "List config providers",
|
||||
description: "Get a list of all configured AI providers and their default models.",
|
||||
operationId: "config.providers",
|
||||
responses: {
|
||||
200: {
|
||||
description: "List of providers",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(
|
||||
z.object({
|
||||
providers: Provider.Info.array(),
|
||||
default: z.record(z.string(), z.string()),
|
||||
}),
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
using _ = log.time("providers")
|
||||
const providers = await Provider.list().then((x) => mapValues(x, (item) => item))
|
||||
return c.json({
|
||||
providers: Object.values(providers),
|
||||
default: mapValues(providers, (item) => Provider.sort(Object.values(item.models))[0].id),
|
||||
})
|
||||
},
|
||||
),
|
||||
)
|
||||
157
packages/opencode/src/server/routes/experimental.ts
Normal file
157
packages/opencode/src/server/routes/experimental.ts
Normal file
@@ -0,0 +1,157 @@
|
||||
import { Hono } from "hono"
|
||||
import { describeRoute, validator, resolver } from "hono-openapi"
|
||||
import z from "zod"
|
||||
import { ToolRegistry } from "../../tool/registry"
|
||||
import { Worktree } from "../../worktree"
|
||||
import { Instance } from "../../project/instance"
|
||||
import { Project } from "../../project/project"
|
||||
import { MCP } from "../../mcp"
|
||||
import { zodToJsonSchema } from "zod-to-json-schema"
|
||||
import { errors } from "../error"
|
||||
import { lazy } from "../../util/lazy"
|
||||
|
||||
export const ExperimentalRoutes = lazy(() =>
|
||||
new Hono()
|
||||
.get(
|
||||
"/tool/ids",
|
||||
describeRoute({
|
||||
summary: "List tool IDs",
|
||||
description:
|
||||
"Get a list of all available tool IDs, including both built-in tools and dynamically registered tools.",
|
||||
operationId: "tool.ids",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Tool IDs",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.array(z.string()).meta({ ref: "ToolIDs" })),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400),
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
return c.json(await ToolRegistry.ids())
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/tool",
|
||||
describeRoute({
|
||||
summary: "List tools",
|
||||
description:
|
||||
"Get a list of available tools with their JSON schema parameters for a specific provider and model combination.",
|
||||
operationId: "tool.list",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Tools",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(
|
||||
z
|
||||
.array(
|
||||
z
|
||||
.object({
|
||||
id: z.string(),
|
||||
description: z.string(),
|
||||
parameters: z.any(),
|
||||
})
|
||||
.meta({ ref: "ToolListItem" }),
|
||||
)
|
||||
.meta({ ref: "ToolList" }),
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"query",
|
||||
z.object({
|
||||
provider: z.string(),
|
||||
model: z.string(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const { provider, model } = c.req.valid("query")
|
||||
const tools = await ToolRegistry.tools({ providerID: provider, modelID: model })
|
||||
return c.json(
|
||||
tools.map((t) => ({
|
||||
id: t.id,
|
||||
description: t.description,
|
||||
// Handle both Zod schemas and plain JSON schemas
|
||||
parameters: (t.parameters as any)?._def ? zodToJsonSchema(t.parameters as any) : t.parameters,
|
||||
})),
|
||||
)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/worktree",
|
||||
describeRoute({
|
||||
summary: "Create worktree",
|
||||
description: "Create a new git worktree for the current project.",
|
||||
operationId: "worktree.create",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Worktree created",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Worktree.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400),
|
||||
},
|
||||
}),
|
||||
validator("json", Worktree.create.schema),
|
||||
async (c) => {
|
||||
const body = c.req.valid("json")
|
||||
const worktree = await Worktree.create(body)
|
||||
return c.json(worktree)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/worktree",
|
||||
describeRoute({
|
||||
summary: "List worktrees",
|
||||
description: "List all sandbox worktrees for the current project.",
|
||||
operationId: "worktree.list",
|
||||
responses: {
|
||||
200: {
|
||||
description: "List of worktree directories",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.array(z.string())),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
const sandboxes = await Project.sandboxes(Instance.project.id)
|
||||
return c.json(sandboxes)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/resource",
|
||||
describeRoute({
|
||||
summary: "Get MCP resources",
|
||||
description: "Get all available MCP resources from connected servers. Optionally filter by name.",
|
||||
operationId: "experimental.resource.list",
|
||||
responses: {
|
||||
200: {
|
||||
description: "MCP resources",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.record(z.string(), MCP.Resource)),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
return c.json(await MCP.resources())
|
||||
},
|
||||
),
|
||||
)
|
||||
197
packages/opencode/src/server/routes/file.ts
Normal file
197
packages/opencode/src/server/routes/file.ts
Normal file
@@ -0,0 +1,197 @@
|
||||
import { Hono } from "hono"
|
||||
import { describeRoute, validator, resolver } from "hono-openapi"
|
||||
import z from "zod"
|
||||
import { File } from "../../file"
|
||||
import { Ripgrep } from "../../file/ripgrep"
|
||||
import { LSP } from "../../lsp"
|
||||
import { Instance } from "../../project/instance"
|
||||
import { lazy } from "../../util/lazy"
|
||||
|
||||
export const FileRoutes = lazy(() =>
|
||||
new Hono()
|
||||
.get(
|
||||
"/find",
|
||||
describeRoute({
|
||||
summary: "Find text",
|
||||
description: "Search for text patterns across files in the project using ripgrep.",
|
||||
operationId: "find.text",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Matches",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Ripgrep.Match.shape.data.array()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"query",
|
||||
z.object({
|
||||
pattern: z.string(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const pattern = c.req.valid("query").pattern
|
||||
const result = await Ripgrep.search({
|
||||
cwd: Instance.directory,
|
||||
pattern,
|
||||
limit: 10,
|
||||
})
|
||||
return c.json(result)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/find/file",
|
||||
describeRoute({
|
||||
summary: "Find files",
|
||||
description: "Search for files or directories by name or pattern in the project directory.",
|
||||
operationId: "find.files",
|
||||
responses: {
|
||||
200: {
|
||||
description: "File paths",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.string().array()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"query",
|
||||
z.object({
|
||||
query: z.string(),
|
||||
dirs: z.enum(["true", "false"]).optional(),
|
||||
type: z.enum(["file", "directory"]).optional(),
|
||||
limit: z.coerce.number().int().min(1).max(200).optional(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const query = c.req.valid("query").query
|
||||
const dirs = c.req.valid("query").dirs
|
||||
const type = c.req.valid("query").type
|
||||
const limit = c.req.valid("query").limit
|
||||
const results = await File.search({
|
||||
query,
|
||||
limit: limit ?? 10,
|
||||
dirs: dirs !== "false",
|
||||
type,
|
||||
})
|
||||
return c.json(results)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/find/symbol",
|
||||
describeRoute({
|
||||
summary: "Find symbols",
|
||||
description: "Search for workspace symbols like functions, classes, and variables using LSP.",
|
||||
operationId: "find.symbols",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Symbols",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(LSP.Symbol.array()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"query",
|
||||
z.object({
|
||||
query: z.string(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
/*
|
||||
const query = c.req.valid("query").query
|
||||
const result = await LSP.workspaceSymbol(query)
|
||||
return c.json(result)
|
||||
*/
|
||||
return c.json([])
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/file",
|
||||
describeRoute({
|
||||
summary: "List files",
|
||||
description: "List files and directories in a specified path.",
|
||||
operationId: "file.list",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Files and directories",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(File.Node.array()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"query",
|
||||
z.object({
|
||||
path: z.string(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const path = c.req.valid("query").path
|
||||
const content = await File.list(path)
|
||||
return c.json(content)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/file/content",
|
||||
describeRoute({
|
||||
summary: "Read file",
|
||||
description: "Read the content of a specified file.",
|
||||
operationId: "file.read",
|
||||
responses: {
|
||||
200: {
|
||||
description: "File content",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(File.Content),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"query",
|
||||
z.object({
|
||||
path: z.string(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const path = c.req.valid("query").path
|
||||
const content = await File.read(path)
|
||||
return c.json(content)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/file/status",
|
||||
describeRoute({
|
||||
summary: "Get file status",
|
||||
description: "Get the git status of all files in the project.",
|
||||
operationId: "file.status",
|
||||
responses: {
|
||||
200: {
|
||||
description: "File status",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(File.Info.array()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
const content = await File.status()
|
||||
return c.json(content)
|
||||
},
|
||||
),
|
||||
)
|
||||
135
packages/opencode/src/server/routes/global.ts
Normal file
135
packages/opencode/src/server/routes/global.ts
Normal file
@@ -0,0 +1,135 @@
|
||||
import { Hono } from "hono"
|
||||
import { describeRoute, resolver } from "hono-openapi"
|
||||
import { streamSSE } from "hono/streaming"
|
||||
import z from "zod"
|
||||
import { BusEvent } from "@/bus/bus-event"
|
||||
import { GlobalBus } from "@/bus/global"
|
||||
import { Instance } from "../../project/instance"
|
||||
import { Installation } from "@/installation"
|
||||
import { Log } from "../../util/log"
|
||||
import { lazy } from "../../util/lazy"
|
||||
|
||||
const log = Log.create({ service: "server" })
|
||||
|
||||
export const GlobalDisposedEvent = BusEvent.define("global.disposed", z.object({}))
|
||||
|
||||
export const GlobalRoutes = lazy(() =>
|
||||
new Hono()
|
||||
.get(
|
||||
"/health",
|
||||
describeRoute({
|
||||
summary: "Get health",
|
||||
description: "Get health information about the OpenCode server.",
|
||||
operationId: "global.health",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Health information",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.object({ healthy: z.literal(true), version: z.string() })),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
return c.json({ healthy: true, version: Installation.VERSION })
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/event",
|
||||
describeRoute({
|
||||
summary: "Get global events",
|
||||
description: "Subscribe to global events from the OpenCode system using server-sent events.",
|
||||
operationId: "global.event",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Event stream",
|
||||
content: {
|
||||
"text/event-stream": {
|
||||
schema: resolver(
|
||||
z
|
||||
.object({
|
||||
directory: z.string(),
|
||||
payload: BusEvent.payloads(),
|
||||
})
|
||||
.meta({
|
||||
ref: "GlobalEvent",
|
||||
}),
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
log.info("global event connected")
|
||||
return streamSSE(c, async (stream) => {
|
||||
stream.writeSSE({
|
||||
data: JSON.stringify({
|
||||
payload: {
|
||||
type: "server.connected",
|
||||
properties: {},
|
||||
},
|
||||
}),
|
||||
})
|
||||
async function handler(event: any) {
|
||||
await stream.writeSSE({
|
||||
data: JSON.stringify(event),
|
||||
})
|
||||
}
|
||||
GlobalBus.on("event", handler)
|
||||
|
||||
// Send heartbeat every 30s to prevent WKWebView timeout (60s default)
|
||||
const heartbeat = setInterval(() => {
|
||||
stream.writeSSE({
|
||||
data: JSON.stringify({
|
||||
payload: {
|
||||
type: "server.heartbeat",
|
||||
properties: {},
|
||||
},
|
||||
}),
|
||||
})
|
||||
}, 30000)
|
||||
|
||||
await new Promise<void>((resolve) => {
|
||||
stream.onAbort(() => {
|
||||
clearInterval(heartbeat)
|
||||
GlobalBus.off("event", handler)
|
||||
resolve()
|
||||
log.info("global event disconnected")
|
||||
})
|
||||
})
|
||||
})
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/dispose",
|
||||
describeRoute({
|
||||
summary: "Dispose instance",
|
||||
description: "Clean up and dispose all OpenCode instances, releasing all resources.",
|
||||
operationId: "global.dispose",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Global disposed",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
await Instance.disposeAll()
|
||||
GlobalBus.emit("event", {
|
||||
directory: "global",
|
||||
payload: {
|
||||
type: GlobalDisposedEvent.type,
|
||||
properties: {},
|
||||
},
|
||||
})
|
||||
return c.json(true)
|
||||
},
|
||||
),
|
||||
)
|
||||
225
packages/opencode/src/server/routes/mcp.ts
Normal file
225
packages/opencode/src/server/routes/mcp.ts
Normal file
@@ -0,0 +1,225 @@
|
||||
import { Hono } from "hono"
|
||||
import { describeRoute, validator, resolver } from "hono-openapi"
|
||||
import z from "zod"
|
||||
import { MCP } from "../../mcp"
|
||||
import { Config } from "../../config/config"
|
||||
import { errors } from "../error"
|
||||
import { lazy } from "../../util/lazy"
|
||||
|
||||
export const McpRoutes = lazy(() =>
|
||||
new Hono()
|
||||
.get(
|
||||
"/",
|
||||
describeRoute({
|
||||
summary: "Get MCP status",
|
||||
description: "Get the status of all Model Context Protocol (MCP) servers.",
|
||||
operationId: "mcp.status",
|
||||
responses: {
|
||||
200: {
|
||||
description: "MCP server status",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.record(z.string(), MCP.Status)),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
return c.json(await MCP.status())
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/",
|
||||
describeRoute({
|
||||
summary: "Add MCP server",
|
||||
description: "Dynamically add a new Model Context Protocol (MCP) server to the system.",
|
||||
operationId: "mcp.add",
|
||||
responses: {
|
||||
200: {
|
||||
description: "MCP server added successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.record(z.string(), MCP.Status)),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"json",
|
||||
z.object({
|
||||
name: z.string(),
|
||||
config: Config.Mcp,
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const { name, config } = c.req.valid("json")
|
||||
const result = await MCP.add(name, config)
|
||||
return c.json(result.status)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:name/auth",
|
||||
describeRoute({
|
||||
summary: "Start MCP OAuth",
|
||||
description: "Start OAuth authentication flow for a Model Context Protocol (MCP) server.",
|
||||
operationId: "mcp.auth.start",
|
||||
responses: {
|
||||
200: {
|
||||
description: "OAuth flow started",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(
|
||||
z.object({
|
||||
authorizationUrl: z.string().describe("URL to open in browser for authorization"),
|
||||
}),
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
const name = c.req.param("name")
|
||||
const supportsOAuth = await MCP.supportsOAuth(name)
|
||||
if (!supportsOAuth) {
|
||||
return c.json({ error: `MCP server ${name} does not support OAuth` }, 400)
|
||||
}
|
||||
const result = await MCP.startAuth(name)
|
||||
return c.json(result)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:name/auth/callback",
|
||||
describeRoute({
|
||||
summary: "Complete MCP OAuth",
|
||||
description:
|
||||
"Complete OAuth authentication for a Model Context Protocol (MCP) server using the authorization code.",
|
||||
operationId: "mcp.auth.callback",
|
||||
responses: {
|
||||
200: {
|
||||
description: "OAuth authentication completed",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(MCP.Status),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"json",
|
||||
z.object({
|
||||
code: z.string().describe("Authorization code from OAuth callback"),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const name = c.req.param("name")
|
||||
const { code } = c.req.valid("json")
|
||||
const status = await MCP.finishAuth(name, code)
|
||||
return c.json(status)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:name/auth/authenticate",
|
||||
describeRoute({
|
||||
summary: "Authenticate MCP OAuth",
|
||||
description: "Start OAuth flow and wait for callback (opens browser)",
|
||||
operationId: "mcp.auth.authenticate",
|
||||
responses: {
|
||||
200: {
|
||||
description: "OAuth authentication completed",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(MCP.Status),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
const name = c.req.param("name")
|
||||
const supportsOAuth = await MCP.supportsOAuth(name)
|
||||
if (!supportsOAuth) {
|
||||
return c.json({ error: `MCP server ${name} does not support OAuth` }, 400)
|
||||
}
|
||||
const status = await MCP.authenticate(name)
|
||||
return c.json(status)
|
||||
},
|
||||
)
|
||||
.delete(
|
||||
"/:name/auth",
|
||||
describeRoute({
|
||||
summary: "Remove MCP OAuth",
|
||||
description: "Remove OAuth credentials for an MCP server",
|
||||
operationId: "mcp.auth.remove",
|
||||
responses: {
|
||||
200: {
|
||||
description: "OAuth credentials removed",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.object({ success: z.literal(true) })),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(404),
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
const name = c.req.param("name")
|
||||
await MCP.removeAuth(name)
|
||||
return c.json({ success: true as const })
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:name/connect",
|
||||
describeRoute({
|
||||
description: "Connect an MCP server",
|
||||
operationId: "mcp.connect",
|
||||
responses: {
|
||||
200: {
|
||||
description: "MCP server connected successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
validator("param", z.object({ name: z.string() })),
|
||||
async (c) => {
|
||||
const { name } = c.req.valid("param")
|
||||
await MCP.connect(name)
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:name/disconnect",
|
||||
describeRoute({
|
||||
description: "Disconnect an MCP server",
|
||||
operationId: "mcp.disconnect",
|
||||
responses: {
|
||||
200: {
|
||||
description: "MCP server disconnected successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
validator("param", z.object({ name: z.string() })),
|
||||
async (c) => {
|
||||
const { name } = c.req.valid("param")
|
||||
await MCP.disconnect(name)
|
||||
return c.json(true)
|
||||
},
|
||||
),
|
||||
)
|
||||
68
packages/opencode/src/server/routes/permission.ts
Normal file
68
packages/opencode/src/server/routes/permission.ts
Normal file
@@ -0,0 +1,68 @@
|
||||
import { Hono } from "hono"
|
||||
import { describeRoute, validator, resolver } from "hono-openapi"
|
||||
import z from "zod"
|
||||
import { PermissionNext } from "@/permission/next"
|
||||
import { errors } from "../error"
|
||||
import { lazy } from "../../util/lazy"
|
||||
|
||||
export const PermissionRoutes = lazy(() =>
|
||||
new Hono()
|
||||
.post(
|
||||
"/:requestID/reply",
|
||||
describeRoute({
|
||||
summary: "Respond to permission request",
|
||||
description: "Approve or deny a permission request from the AI assistant.",
|
||||
operationId: "permission.reply",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Permission processed successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
requestID: z.string(),
|
||||
}),
|
||||
),
|
||||
validator("json", z.object({ reply: PermissionNext.Reply, message: z.string().optional() })),
|
||||
async (c) => {
|
||||
const params = c.req.valid("param")
|
||||
const json = c.req.valid("json")
|
||||
await PermissionNext.reply({
|
||||
requestID: params.requestID,
|
||||
reply: json.reply,
|
||||
message: json.message,
|
||||
})
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/",
|
||||
describeRoute({
|
||||
summary: "List pending permissions",
|
||||
description: "Get all pending permission requests across all sessions.",
|
||||
operationId: "permission.list",
|
||||
responses: {
|
||||
200: {
|
||||
description: "List of pending permissions",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(PermissionNext.Request.array()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
const permissions = await PermissionNext.list()
|
||||
return c.json(permissions)
|
||||
},
|
||||
),
|
||||
)
|
||||
82
packages/opencode/src/server/routes/project.ts
Normal file
82
packages/opencode/src/server/routes/project.ts
Normal file
@@ -0,0 +1,82 @@
|
||||
import { Hono } from "hono"
|
||||
import { describeRoute, validator } from "hono-openapi"
|
||||
import { resolver } from "hono-openapi"
|
||||
import { Instance } from "../../project/instance"
|
||||
import { Project } from "../../project/project"
|
||||
import z from "zod"
|
||||
import { errors } from "../error"
|
||||
import { lazy } from "../../util/lazy"
|
||||
|
||||
export const ProjectRoutes = lazy(() =>
|
||||
new Hono()
|
||||
.get(
|
||||
"/",
|
||||
describeRoute({
|
||||
summary: "List all projects",
|
||||
description: "Get a list of projects that have been opened with OpenCode.",
|
||||
operationId: "project.list",
|
||||
responses: {
|
||||
200: {
|
||||
description: "List of projects",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Project.Info.array()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
const projects = await Project.list()
|
||||
return c.json(projects)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/current",
|
||||
describeRoute({
|
||||
summary: "Get current project",
|
||||
description: "Retrieve the currently active project that OpenCode is working with.",
|
||||
operationId: "project.current",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Current project information",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Project.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
return c.json(Instance.project)
|
||||
},
|
||||
)
|
||||
.patch(
|
||||
"/:projectID",
|
||||
describeRoute({
|
||||
summary: "Update project",
|
||||
description: "Update project properties such as name, icon and color.",
|
||||
operationId: "project.update",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Updated project information",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Project.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator("param", z.object({ projectID: z.string() })),
|
||||
validator("json", Project.update.schema.omit({ projectID: true })),
|
||||
async (c) => {
|
||||
const projectID = c.req.valid("param").projectID
|
||||
const body = c.req.valid("json")
|
||||
const project = await Project.update({ ...body, projectID })
|
||||
return c.json(project)
|
||||
},
|
||||
),
|
||||
)
|
||||
165
packages/opencode/src/server/routes/provider.ts
Normal file
165
packages/opencode/src/server/routes/provider.ts
Normal file
@@ -0,0 +1,165 @@
|
||||
import { Hono } from "hono"
|
||||
import { describeRoute, validator, resolver } from "hono-openapi"
|
||||
import z from "zod"
|
||||
import { Config } from "../../config/config"
|
||||
import { Provider } from "../../provider/provider"
|
||||
import { ModelsDev } from "../../provider/models"
|
||||
import { ProviderAuth } from "../../provider/auth"
|
||||
import { mapValues } from "remeda"
|
||||
import { errors } from "../error"
|
||||
import { lazy } from "../../util/lazy"
|
||||
|
||||
export const ProviderRoutes = lazy(() =>
|
||||
new Hono()
|
||||
.get(
|
||||
"/",
|
||||
describeRoute({
|
||||
summary: "List providers",
|
||||
description: "Get a list of all available AI providers, including both available and connected ones.",
|
||||
operationId: "provider.list",
|
||||
responses: {
|
||||
200: {
|
||||
description: "List of providers",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(
|
||||
z.object({
|
||||
all: ModelsDev.Provider.array(),
|
||||
default: z.record(z.string(), z.string()),
|
||||
connected: z.array(z.string()),
|
||||
}),
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
const config = await Config.get()
|
||||
const disabled = new Set(config.disabled_providers ?? [])
|
||||
const enabled = config.enabled_providers ? new Set(config.enabled_providers) : undefined
|
||||
|
||||
const allProviders = await ModelsDev.get()
|
||||
const filteredProviders: Record<string, (typeof allProviders)[string]> = {}
|
||||
for (const [key, value] of Object.entries(allProviders)) {
|
||||
if ((enabled ? enabled.has(key) : true) && !disabled.has(key)) {
|
||||
filteredProviders[key] = value
|
||||
}
|
||||
}
|
||||
|
||||
const connected = await Provider.list()
|
||||
const providers = Object.assign(
|
||||
mapValues(filteredProviders, (x) => Provider.fromModelsDevProvider(x)),
|
||||
connected,
|
||||
)
|
||||
return c.json({
|
||||
all: Object.values(providers),
|
||||
default: mapValues(providers, (item) => Provider.sort(Object.values(item.models))[0].id),
|
||||
connected: Object.keys(connected),
|
||||
})
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/auth",
|
||||
describeRoute({
|
||||
summary: "Get provider auth methods",
|
||||
description: "Retrieve available authentication methods for all AI providers.",
|
||||
operationId: "provider.auth",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Provider auth methods",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.record(z.string(), z.array(ProviderAuth.Method))),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
return c.json(await ProviderAuth.methods())
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:providerID/oauth/authorize",
|
||||
describeRoute({
|
||||
summary: "OAuth authorize",
|
||||
description: "Initiate OAuth authorization for a specific AI provider to get an authorization URL.",
|
||||
operationId: "provider.oauth.authorize",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Authorization URL and method",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(ProviderAuth.Authorization.optional()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
providerID: z.string().meta({ description: "Provider ID" }),
|
||||
}),
|
||||
),
|
||||
validator(
|
||||
"json",
|
||||
z.object({
|
||||
method: z.number().meta({ description: "Auth method index" }),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const providerID = c.req.valid("param").providerID
|
||||
const { method } = c.req.valid("json")
|
||||
const result = await ProviderAuth.authorize({
|
||||
providerID,
|
||||
method,
|
||||
})
|
||||
return c.json(result)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:providerID/oauth/callback",
|
||||
describeRoute({
|
||||
summary: "OAuth callback",
|
||||
description: "Handle the OAuth callback from a provider after user authorization.",
|
||||
operationId: "provider.oauth.callback",
|
||||
responses: {
|
||||
200: {
|
||||
description: "OAuth callback processed successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
providerID: z.string().meta({ description: "Provider ID" }),
|
||||
}),
|
||||
),
|
||||
validator(
|
||||
"json",
|
||||
z.object({
|
||||
method: z.number().meta({ description: "Auth method index" }),
|
||||
code: z.string().optional().meta({ description: "OAuth authorization code" }),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const providerID = c.req.valid("param").providerID
|
||||
const { method, code } = c.req.valid("json")
|
||||
await ProviderAuth.callback({
|
||||
providerID,
|
||||
method,
|
||||
code,
|
||||
})
|
||||
return c.json(true)
|
||||
},
|
||||
),
|
||||
)
|
||||
169
packages/opencode/src/server/routes/pty.ts
Normal file
169
packages/opencode/src/server/routes/pty.ts
Normal file
@@ -0,0 +1,169 @@
|
||||
import { Hono } from "hono"
|
||||
import { describeRoute, validator, resolver } from "hono-openapi"
|
||||
import { upgradeWebSocket } from "hono/bun"
|
||||
import z from "zod"
|
||||
import { Pty } from "@/pty"
|
||||
import { Storage } from "../../storage/storage"
|
||||
import { errors } from "../error"
|
||||
import { lazy } from "../../util/lazy"
|
||||
|
||||
export const PtyRoutes = lazy(() =>
|
||||
new Hono()
|
||||
.get(
|
||||
"/",
|
||||
describeRoute({
|
||||
summary: "List PTY sessions",
|
||||
description: "Get a list of all active pseudo-terminal (PTY) sessions managed by OpenCode.",
|
||||
operationId: "pty.list",
|
||||
responses: {
|
||||
200: {
|
||||
description: "List of sessions",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Pty.Info.array()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
return c.json(Pty.list())
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/",
|
||||
describeRoute({
|
||||
summary: "Create PTY session",
|
||||
description: "Create a new pseudo-terminal (PTY) session for running shell commands and processes.",
|
||||
operationId: "pty.create",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Created session",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Pty.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400),
|
||||
},
|
||||
}),
|
||||
validator("json", Pty.CreateInput),
|
||||
async (c) => {
|
||||
const info = await Pty.create(c.req.valid("json"))
|
||||
return c.json(info)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/:ptyID",
|
||||
describeRoute({
|
||||
summary: "Get PTY session",
|
||||
description: "Retrieve detailed information about a specific pseudo-terminal (PTY) session.",
|
||||
operationId: "pty.get",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Session info",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Pty.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(404),
|
||||
},
|
||||
}),
|
||||
validator("param", z.object({ ptyID: z.string() })),
|
||||
async (c) => {
|
||||
const info = Pty.get(c.req.valid("param").ptyID)
|
||||
if (!info) {
|
||||
throw new Storage.NotFoundError({ message: "Session not found" })
|
||||
}
|
||||
return c.json(info)
|
||||
},
|
||||
)
|
||||
.put(
|
||||
"/:ptyID",
|
||||
describeRoute({
|
||||
summary: "Update PTY session",
|
||||
description: "Update properties of an existing pseudo-terminal (PTY) session.",
|
||||
operationId: "pty.update",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Updated session",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Pty.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400),
|
||||
},
|
||||
}),
|
||||
validator("param", z.object({ ptyID: z.string() })),
|
||||
validator("json", Pty.UpdateInput),
|
||||
async (c) => {
|
||||
const info = await Pty.update(c.req.valid("param").ptyID, c.req.valid("json"))
|
||||
return c.json(info)
|
||||
},
|
||||
)
|
||||
.delete(
|
||||
"/:ptyID",
|
||||
describeRoute({
|
||||
summary: "Remove PTY session",
|
||||
description: "Remove and terminate a specific pseudo-terminal (PTY) session.",
|
||||
operationId: "pty.remove",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Session removed",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(404),
|
||||
},
|
||||
}),
|
||||
validator("param", z.object({ ptyID: z.string() })),
|
||||
async (c) => {
|
||||
await Pty.remove(c.req.valid("param").ptyID)
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/:ptyID/connect",
|
||||
describeRoute({
|
||||
summary: "Connect to PTY session",
|
||||
description: "Establish a WebSocket connection to interact with a pseudo-terminal (PTY) session in real-time.",
|
||||
operationId: "pty.connect",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Connected session",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(404),
|
||||
},
|
||||
}),
|
||||
validator("param", z.object({ ptyID: z.string() })),
|
||||
upgradeWebSocket((c) => {
|
||||
const id = c.req.param("ptyID")
|
||||
let handler: ReturnType<typeof Pty.connect>
|
||||
if (!Pty.get(id)) throw new Error("Session not found")
|
||||
return {
|
||||
onOpen(_event, ws) {
|
||||
handler = Pty.connect(id, ws)
|
||||
},
|
||||
onMessage(event) {
|
||||
handler?.onMessage(String(event.data))
|
||||
},
|
||||
onClose() {
|
||||
handler?.onClose()
|
||||
},
|
||||
}
|
||||
}),
|
||||
),
|
||||
)
|
||||
98
packages/opencode/src/server/routes/question.ts
Normal file
98
packages/opencode/src/server/routes/question.ts
Normal file
@@ -0,0 +1,98 @@
|
||||
import { Hono } from "hono"
|
||||
import { describeRoute, validator } from "hono-openapi"
|
||||
import { resolver } from "hono-openapi"
|
||||
import { Question } from "../../question"
|
||||
import z from "zod"
|
||||
import { errors } from "../error"
|
||||
import { lazy } from "../../util/lazy"
|
||||
|
||||
export const QuestionRoutes = lazy(() =>
|
||||
new Hono()
|
||||
.get(
|
||||
"/",
|
||||
describeRoute({
|
||||
summary: "List pending questions",
|
||||
description: "Get all pending question requests across all sessions.",
|
||||
operationId: "question.list",
|
||||
responses: {
|
||||
200: {
|
||||
description: "List of pending questions",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Question.Request.array()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
const questions = await Question.list()
|
||||
return c.json(questions)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:requestID/reply",
|
||||
describeRoute({
|
||||
summary: "Reply to question request",
|
||||
description: "Provide answers to a question request from the AI assistant.",
|
||||
operationId: "question.reply",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Question answered successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
requestID: z.string(),
|
||||
}),
|
||||
),
|
||||
validator("json", Question.Reply),
|
||||
async (c) => {
|
||||
const params = c.req.valid("param")
|
||||
const json = c.req.valid("json")
|
||||
await Question.reply({
|
||||
requestID: params.requestID,
|
||||
answers: json.answers,
|
||||
})
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:requestID/reject",
|
||||
describeRoute({
|
||||
summary: "Reject question request",
|
||||
description: "Reject a question request from the AI assistant.",
|
||||
operationId: "question.reject",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Question rejected successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
requestID: z.string(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const params = c.req.valid("param")
|
||||
await Question.reject(params.requestID)
|
||||
return c.json(true)
|
||||
},
|
||||
),
|
||||
)
|
||||
935
packages/opencode/src/server/routes/session.ts
Normal file
935
packages/opencode/src/server/routes/session.ts
Normal file
@@ -0,0 +1,935 @@
|
||||
import { Hono } from "hono"
|
||||
import { stream } from "hono/streaming"
|
||||
import { describeRoute, validator, resolver } from "hono-openapi"
|
||||
import z from "zod"
|
||||
import { Session } from "../../session"
|
||||
import { MessageV2 } from "../../session/message-v2"
|
||||
import { SessionPrompt } from "../../session/prompt"
|
||||
import { SessionCompaction } from "../../session/compaction"
|
||||
import { SessionRevert } from "../../session/revert"
|
||||
import { SessionStatus } from "@/session/status"
|
||||
import { SessionSummary } from "@/session/summary"
|
||||
import { Todo } from "../../session/todo"
|
||||
import { Agent } from "../../agent/agent"
|
||||
import { Snapshot } from "@/snapshot"
|
||||
import { Log } from "../../util/log"
|
||||
import { PermissionNext } from "@/permission/next"
|
||||
import { errors } from "../error"
|
||||
import { lazy } from "../../util/lazy"
|
||||
|
||||
const log = Log.create({ service: "server" })
|
||||
|
||||
export const SessionRoutes = lazy(() =>
|
||||
new Hono()
|
||||
.get(
|
||||
"/",
|
||||
describeRoute({
|
||||
summary: "List sessions",
|
||||
description: "Get a list of all OpenCode sessions, sorted by most recently updated.",
|
||||
operationId: "session.list",
|
||||
responses: {
|
||||
200: {
|
||||
description: "List of sessions",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Session.Info.array()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"query",
|
||||
z.object({
|
||||
directory: z.string().optional().meta({ description: "Filter sessions by project directory" }),
|
||||
roots: z.coerce.boolean().optional().meta({ description: "Only return root sessions (no parentID)" }),
|
||||
start: z.coerce
|
||||
.number()
|
||||
.optional()
|
||||
.meta({ description: "Filter sessions updated on or after this timestamp (milliseconds since epoch)" }),
|
||||
search: z.string().optional().meta({ description: "Filter sessions by title (case-insensitive)" }),
|
||||
limit: z.coerce.number().optional().meta({ description: "Maximum number of sessions to return" }),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const query = c.req.valid("query")
|
||||
const term = query.search?.toLowerCase()
|
||||
const sessions: Session.Info[] = []
|
||||
for await (const session of Session.list()) {
|
||||
if (query.directory !== undefined && session.directory !== query.directory) continue
|
||||
if (query.roots && session.parentID) continue
|
||||
if (query.start !== undefined && session.time.updated < query.start) continue
|
||||
if (term !== undefined && !session.title.toLowerCase().includes(term)) continue
|
||||
sessions.push(session)
|
||||
if (query.limit !== undefined && sessions.length >= query.limit) break
|
||||
}
|
||||
return c.json(sessions)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/status",
|
||||
describeRoute({
|
||||
summary: "Get session status",
|
||||
description: "Retrieve the current status of all sessions, including active, idle, and completed states.",
|
||||
operationId: "session.status",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Get session status",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.record(z.string(), SessionStatus.Info)),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400),
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
const result = SessionStatus.list()
|
||||
return c.json(result)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/:sessionID",
|
||||
describeRoute({
|
||||
summary: "Get session",
|
||||
description: "Retrieve detailed information about a specific OpenCode session.",
|
||||
tags: ["Session"],
|
||||
operationId: "session.get",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Get session",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Session.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: Session.get.schema,
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const sessionID = c.req.valid("param").sessionID
|
||||
log.info("SEARCH", { url: c.req.url })
|
||||
const session = await Session.get(sessionID)
|
||||
return c.json(session)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/:sessionID/children",
|
||||
describeRoute({
|
||||
summary: "Get session children",
|
||||
tags: ["Session"],
|
||||
description: "Retrieve all child sessions that were forked from the specified parent session.",
|
||||
operationId: "session.children",
|
||||
responses: {
|
||||
200: {
|
||||
description: "List of children",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Session.Info.array()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: Session.children.schema,
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const sessionID = c.req.valid("param").sessionID
|
||||
const session = await Session.children(sessionID)
|
||||
return c.json(session)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/:sessionID/todo",
|
||||
describeRoute({
|
||||
summary: "Get session todos",
|
||||
description: "Retrieve the todo list associated with a specific session, showing tasks and action items.",
|
||||
operationId: "session.todo",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Todo list",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Todo.Info.array()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: z.string().meta({ description: "Session ID" }),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const sessionID = c.req.valid("param").sessionID
|
||||
const todos = await Todo.get(sessionID)
|
||||
return c.json(todos)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/",
|
||||
describeRoute({
|
||||
summary: "Create session",
|
||||
description: "Create a new OpenCode session for interacting with AI assistants and managing conversations.",
|
||||
operationId: "session.create",
|
||||
responses: {
|
||||
...errors(400),
|
||||
200: {
|
||||
description: "Successfully created session",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Session.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
validator("json", Session.create.schema.optional()),
|
||||
async (c) => {
|
||||
const body = c.req.valid("json") ?? {}
|
||||
const session = await Session.create(body)
|
||||
return c.json(session)
|
||||
},
|
||||
)
|
||||
.delete(
|
||||
"/:sessionID",
|
||||
describeRoute({
|
||||
summary: "Delete session",
|
||||
description: "Delete a session and permanently remove all associated data, including messages and history.",
|
||||
operationId: "session.delete",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Successfully deleted session",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: Session.remove.schema,
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const sessionID = c.req.valid("param").sessionID
|
||||
await Session.remove(sessionID)
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.patch(
|
||||
"/:sessionID",
|
||||
describeRoute({
|
||||
summary: "Update session",
|
||||
description: "Update properties of an existing session, such as title or other metadata.",
|
||||
operationId: "session.update",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Successfully updated session",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Session.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: z.string(),
|
||||
}),
|
||||
),
|
||||
validator(
|
||||
"json",
|
||||
z.object({
|
||||
title: z.string().optional(),
|
||||
time: z
|
||||
.object({
|
||||
archived: z.number().optional(),
|
||||
})
|
||||
.optional(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const sessionID = c.req.valid("param").sessionID
|
||||
const updates = c.req.valid("json")
|
||||
|
||||
const updatedSession = await Session.update(sessionID, (session) => {
|
||||
if (updates.title !== undefined) {
|
||||
session.title = updates.title
|
||||
}
|
||||
if (updates.time?.archived !== undefined) session.time.archived = updates.time.archived
|
||||
})
|
||||
|
||||
return c.json(updatedSession)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:sessionID/init",
|
||||
describeRoute({
|
||||
summary: "Initialize session",
|
||||
description:
|
||||
"Analyze the current application and create an AGENTS.md file with project-specific agent configurations.",
|
||||
operationId: "session.init",
|
||||
responses: {
|
||||
200: {
|
||||
description: "200",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: z.string().meta({ description: "Session ID" }),
|
||||
}),
|
||||
),
|
||||
validator("json", Session.initialize.schema.omit({ sessionID: true })),
|
||||
async (c) => {
|
||||
const sessionID = c.req.valid("param").sessionID
|
||||
const body = c.req.valid("json")
|
||||
await Session.initialize({ ...body, sessionID })
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:sessionID/fork",
|
||||
describeRoute({
|
||||
summary: "Fork session",
|
||||
description: "Create a new session by forking an existing session at a specific message point.",
|
||||
operationId: "session.fork",
|
||||
responses: {
|
||||
200: {
|
||||
description: "200",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Session.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: Session.fork.schema.shape.sessionID,
|
||||
}),
|
||||
),
|
||||
validator("json", Session.fork.schema.omit({ sessionID: true })),
|
||||
async (c) => {
|
||||
const sessionID = c.req.valid("param").sessionID
|
||||
const body = c.req.valid("json")
|
||||
const result = await Session.fork({ ...body, sessionID })
|
||||
return c.json(result)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:sessionID/abort",
|
||||
describeRoute({
|
||||
summary: "Abort session",
|
||||
description: "Abort an active session and stop any ongoing AI processing or command execution.",
|
||||
operationId: "session.abort",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Aborted session",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: z.string(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
SessionPrompt.cancel(c.req.valid("param").sessionID)
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:sessionID/share",
|
||||
describeRoute({
|
||||
summary: "Share session",
|
||||
description: "Create a shareable link for a session, allowing others to view the conversation.",
|
||||
operationId: "session.share",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Successfully shared session",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Session.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: z.string(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const sessionID = c.req.valid("param").sessionID
|
||||
await Session.share(sessionID)
|
||||
const session = await Session.get(sessionID)
|
||||
return c.json(session)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/:sessionID/diff",
|
||||
describeRoute({
|
||||
summary: "Get message diff",
|
||||
description: "Get the file changes (diff) that resulted from a specific user message in the session.",
|
||||
operationId: "session.diff",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Successfully retrieved diff",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Snapshot.FileDiff.array()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: SessionSummary.diff.schema.shape.sessionID,
|
||||
}),
|
||||
),
|
||||
validator(
|
||||
"query",
|
||||
z.object({
|
||||
messageID: SessionSummary.diff.schema.shape.messageID,
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const query = c.req.valid("query")
|
||||
const params = c.req.valid("param")
|
||||
const result = await SessionSummary.diff({
|
||||
sessionID: params.sessionID,
|
||||
messageID: query.messageID,
|
||||
})
|
||||
return c.json(result)
|
||||
},
|
||||
)
|
||||
.delete(
|
||||
"/:sessionID/share",
|
||||
describeRoute({
|
||||
summary: "Unshare session",
|
||||
description: "Remove the shareable link for a session, making it private again.",
|
||||
operationId: "session.unshare",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Successfully unshared session",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Session.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: Session.unshare.schema,
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const sessionID = c.req.valid("param").sessionID
|
||||
await Session.unshare(sessionID)
|
||||
const session = await Session.get(sessionID)
|
||||
return c.json(session)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:sessionID/summarize",
|
||||
describeRoute({
|
||||
summary: "Summarize session",
|
||||
description: "Generate a concise summary of the session using AI compaction to preserve key information.",
|
||||
operationId: "session.summarize",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Summarized session",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: z.string().meta({ description: "Session ID" }),
|
||||
}),
|
||||
),
|
||||
validator(
|
||||
"json",
|
||||
z.object({
|
||||
providerID: z.string(),
|
||||
modelID: z.string(),
|
||||
auto: z.boolean().optional().default(false),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const sessionID = c.req.valid("param").sessionID
|
||||
const body = c.req.valid("json")
|
||||
const session = await Session.get(sessionID)
|
||||
await SessionRevert.cleanup(session)
|
||||
const msgs = await Session.messages({ sessionID })
|
||||
let currentAgent = await Agent.defaultAgent()
|
||||
for (let i = msgs.length - 1; i >= 0; i--) {
|
||||
const info = msgs[i].info
|
||||
if (info.role === "user") {
|
||||
currentAgent = info.agent || (await Agent.defaultAgent())
|
||||
break
|
||||
}
|
||||
}
|
||||
await SessionCompaction.create({
|
||||
sessionID,
|
||||
agent: currentAgent,
|
||||
model: {
|
||||
providerID: body.providerID,
|
||||
modelID: body.modelID,
|
||||
},
|
||||
auto: body.auto,
|
||||
})
|
||||
await SessionPrompt.loop(sessionID)
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/:sessionID/message",
|
||||
describeRoute({
|
||||
summary: "Get session messages",
|
||||
description: "Retrieve all messages in a session, including user prompts and AI responses.",
|
||||
operationId: "session.messages",
|
||||
responses: {
|
||||
200: {
|
||||
description: "List of messages",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(MessageV2.WithParts.array()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: z.string().meta({ description: "Session ID" }),
|
||||
}),
|
||||
),
|
||||
validator(
|
||||
"query",
|
||||
z.object({
|
||||
limit: z.coerce.number().optional(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const query = c.req.valid("query")
|
||||
const messages = await Session.messages({
|
||||
sessionID: c.req.valid("param").sessionID,
|
||||
limit: query.limit,
|
||||
})
|
||||
return c.json(messages)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/:sessionID/message/:messageID",
|
||||
describeRoute({
|
||||
summary: "Get message",
|
||||
description: "Retrieve a specific message from a session by its message ID.",
|
||||
operationId: "session.message",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Message",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(
|
||||
z.object({
|
||||
info: MessageV2.Info,
|
||||
parts: MessageV2.Part.array(),
|
||||
}),
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: z.string().meta({ description: "Session ID" }),
|
||||
messageID: z.string().meta({ description: "Message ID" }),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const params = c.req.valid("param")
|
||||
const message = await MessageV2.get({
|
||||
sessionID: params.sessionID,
|
||||
messageID: params.messageID,
|
||||
})
|
||||
return c.json(message)
|
||||
},
|
||||
)
|
||||
.delete(
|
||||
"/:sessionID/message/:messageID/part/:partID",
|
||||
describeRoute({
|
||||
description: "Delete a part from a message",
|
||||
operationId: "part.delete",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Successfully deleted part",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: z.string().meta({ description: "Session ID" }),
|
||||
messageID: z.string().meta({ description: "Message ID" }),
|
||||
partID: z.string().meta({ description: "Part ID" }),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const params = c.req.valid("param")
|
||||
await Session.removePart({
|
||||
sessionID: params.sessionID,
|
||||
messageID: params.messageID,
|
||||
partID: params.partID,
|
||||
})
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.patch(
|
||||
"/:sessionID/message/:messageID/part/:partID",
|
||||
describeRoute({
|
||||
description: "Update a part in a message",
|
||||
operationId: "part.update",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Successfully updated part",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(MessageV2.Part),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: z.string().meta({ description: "Session ID" }),
|
||||
messageID: z.string().meta({ description: "Message ID" }),
|
||||
partID: z.string().meta({ description: "Part ID" }),
|
||||
}),
|
||||
),
|
||||
validator("json", MessageV2.Part),
|
||||
async (c) => {
|
||||
const params = c.req.valid("param")
|
||||
const body = c.req.valid("json")
|
||||
if (body.id !== params.partID || body.messageID !== params.messageID || body.sessionID !== params.sessionID) {
|
||||
throw new Error(
|
||||
`Part mismatch: body.id='${body.id}' vs partID='${params.partID}', body.messageID='${body.messageID}' vs messageID='${params.messageID}', body.sessionID='${body.sessionID}' vs sessionID='${params.sessionID}'`,
|
||||
)
|
||||
}
|
||||
const part = await Session.updatePart(body)
|
||||
return c.json(part)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:sessionID/message",
|
||||
describeRoute({
|
||||
summary: "Send message",
|
||||
description: "Create and send a new message to a session, streaming the AI response.",
|
||||
operationId: "session.prompt",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Created message",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(
|
||||
z.object({
|
||||
info: MessageV2.Assistant,
|
||||
parts: MessageV2.Part.array(),
|
||||
}),
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: z.string().meta({ description: "Session ID" }),
|
||||
}),
|
||||
),
|
||||
validator("json", SessionPrompt.PromptInput.omit({ sessionID: true })),
|
||||
async (c) => {
|
||||
c.status(200)
|
||||
c.header("Content-Type", "application/json")
|
||||
return stream(c, async (stream) => {
|
||||
const sessionID = c.req.valid("param").sessionID
|
||||
const body = c.req.valid("json")
|
||||
const msg = await SessionPrompt.prompt({ ...body, sessionID })
|
||||
stream.write(JSON.stringify(msg))
|
||||
})
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:sessionID/prompt_async",
|
||||
describeRoute({
|
||||
summary: "Send async message",
|
||||
description:
|
||||
"Create and send a new message to a session asynchronously, starting the session if needed and returning immediately.",
|
||||
operationId: "session.prompt_async",
|
||||
responses: {
|
||||
204: {
|
||||
description: "Prompt accepted",
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: z.string().meta({ description: "Session ID" }),
|
||||
}),
|
||||
),
|
||||
validator("json", SessionPrompt.PromptInput.omit({ sessionID: true })),
|
||||
async (c) => {
|
||||
c.status(204)
|
||||
c.header("Content-Type", "application/json")
|
||||
return stream(c, async () => {
|
||||
const sessionID = c.req.valid("param").sessionID
|
||||
const body = c.req.valid("json")
|
||||
SessionPrompt.prompt({ ...body, sessionID })
|
||||
})
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:sessionID/command",
|
||||
describeRoute({
|
||||
summary: "Send command",
|
||||
description: "Send a new command to a session for execution by the AI assistant.",
|
||||
operationId: "session.command",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Created message",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(
|
||||
z.object({
|
||||
info: MessageV2.Assistant,
|
||||
parts: MessageV2.Part.array(),
|
||||
}),
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: z.string().meta({ description: "Session ID" }),
|
||||
}),
|
||||
),
|
||||
validator("json", SessionPrompt.CommandInput.omit({ sessionID: true })),
|
||||
async (c) => {
|
||||
const sessionID = c.req.valid("param").sessionID
|
||||
const body = c.req.valid("json")
|
||||
const msg = await SessionPrompt.command({ ...body, sessionID })
|
||||
return c.json(msg)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:sessionID/shell",
|
||||
describeRoute({
|
||||
summary: "Run shell command",
|
||||
description: "Execute a shell command within the session context and return the AI's response.",
|
||||
operationId: "session.shell",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Created message",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(MessageV2.Assistant),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: z.string().meta({ description: "Session ID" }),
|
||||
}),
|
||||
),
|
||||
validator("json", SessionPrompt.ShellInput.omit({ sessionID: true })),
|
||||
async (c) => {
|
||||
const sessionID = c.req.valid("param").sessionID
|
||||
const body = c.req.valid("json")
|
||||
const msg = await SessionPrompt.shell({ ...body, sessionID })
|
||||
return c.json(msg)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:sessionID/revert",
|
||||
describeRoute({
|
||||
summary: "Revert message",
|
||||
description: "Revert a specific message in a session, undoing its effects and restoring the previous state.",
|
||||
operationId: "session.revert",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Updated session",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Session.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: z.string(),
|
||||
}),
|
||||
),
|
||||
validator("json", SessionRevert.RevertInput.omit({ sessionID: true })),
|
||||
async (c) => {
|
||||
const sessionID = c.req.valid("param").sessionID
|
||||
log.info("revert", c.req.valid("json"))
|
||||
const session = await SessionRevert.revert({
|
||||
sessionID,
|
||||
...c.req.valid("json"),
|
||||
})
|
||||
return c.json(session)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:sessionID/unrevert",
|
||||
describeRoute({
|
||||
summary: "Restore reverted messages",
|
||||
description: "Restore all previously reverted messages in a session.",
|
||||
operationId: "session.unrevert",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Updated session",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Session.Info),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: z.string(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const sessionID = c.req.valid("param").sessionID
|
||||
const session = await SessionRevert.unrevert({ sessionID })
|
||||
return c.json(session)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:sessionID/permissions/:permissionID",
|
||||
describeRoute({
|
||||
summary: "Respond to permission",
|
||||
deprecated: true,
|
||||
description: "Approve or deny a permission request from the AI assistant.",
|
||||
operationId: "permission.respond",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Permission processed successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
sessionID: z.string(),
|
||||
permissionID: z.string(),
|
||||
}),
|
||||
),
|
||||
validator("json", z.object({ response: PermissionNext.Reply })),
|
||||
async (c) => {
|
||||
const params = c.req.valid("param")
|
||||
PermissionNext.reply({
|
||||
requestID: params.permissionID,
|
||||
reply: c.req.valid("json").response,
|
||||
})
|
||||
return c.json(true)
|
||||
},
|
||||
),
|
||||
)
|
||||
377
packages/opencode/src/server/routes/tui.ts
Normal file
377
packages/opencode/src/server/routes/tui.ts
Normal file
@@ -0,0 +1,377 @@
|
||||
import { Hono, type Context } from "hono"
|
||||
import { describeRoute, validator, resolver } from "hono-openapi"
|
||||
import z from "zod"
|
||||
import { Bus } from "../../bus"
|
||||
import { Session } from "../../session"
|
||||
import { TuiEvent } from "@/cli/cmd/tui/event"
|
||||
import { AsyncQueue } from "../../util/queue"
|
||||
import { errors } from "../error"
|
||||
import { lazy } from "../../util/lazy"
|
||||
|
||||
const TuiRequest = z.object({
|
||||
path: z.string(),
|
||||
body: z.any(),
|
||||
})
|
||||
|
||||
type TuiRequest = z.infer<typeof TuiRequest>
|
||||
|
||||
const request = new AsyncQueue<TuiRequest>()
|
||||
const response = new AsyncQueue<any>()
|
||||
|
||||
export async function callTui(ctx: Context) {
|
||||
const body = await ctx.req.json()
|
||||
request.push({
|
||||
path: ctx.req.path,
|
||||
body,
|
||||
})
|
||||
return response.next()
|
||||
}
|
||||
|
||||
const TuiControlRoutes = new Hono()
|
||||
.get(
|
||||
"/next",
|
||||
describeRoute({
|
||||
summary: "Get next TUI request",
|
||||
description: "Retrieve the next TUI (Terminal User Interface) request from the queue for processing.",
|
||||
operationId: "tui.control.next",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Next TUI request",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(TuiRequest),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
const req = await request.next()
|
||||
return c.json(req)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/response",
|
||||
describeRoute({
|
||||
summary: "Submit TUI response",
|
||||
description: "Submit a response to the TUI request queue to complete a pending request.",
|
||||
operationId: "tui.control.response",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Response submitted successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
validator("json", z.any()),
|
||||
async (c) => {
|
||||
const body = c.req.valid("json")
|
||||
response.push(body)
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
|
||||
export const TuiRoutes = lazy(() =>
|
||||
new Hono()
|
||||
.post(
|
||||
"/append-prompt",
|
||||
describeRoute({
|
||||
summary: "Append TUI prompt",
|
||||
description: "Append prompt to the TUI",
|
||||
operationId: "tui.appendPrompt",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Prompt processed successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400),
|
||||
},
|
||||
}),
|
||||
validator("json", TuiEvent.PromptAppend.properties),
|
||||
async (c) => {
|
||||
await Bus.publish(TuiEvent.PromptAppend, c.req.valid("json"))
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/open-help",
|
||||
describeRoute({
|
||||
summary: "Open help dialog",
|
||||
description: "Open the help dialog in the TUI to display user assistance information.",
|
||||
operationId: "tui.openHelp",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Help dialog opened successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
await Bus.publish(TuiEvent.CommandExecute, {
|
||||
command: "help.show",
|
||||
})
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/open-sessions",
|
||||
describeRoute({
|
||||
summary: "Open sessions dialog",
|
||||
description: "Open the session dialog",
|
||||
operationId: "tui.openSessions",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Session dialog opened successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
await Bus.publish(TuiEvent.CommandExecute, {
|
||||
command: "session.list",
|
||||
})
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/open-themes",
|
||||
describeRoute({
|
||||
summary: "Open themes dialog",
|
||||
description: "Open the theme dialog",
|
||||
operationId: "tui.openThemes",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Theme dialog opened successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
await Bus.publish(TuiEvent.CommandExecute, {
|
||||
command: "session.list",
|
||||
})
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/open-models",
|
||||
describeRoute({
|
||||
summary: "Open models dialog",
|
||||
description: "Open the model dialog",
|
||||
operationId: "tui.openModels",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Model dialog opened successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
await Bus.publish(TuiEvent.CommandExecute, {
|
||||
command: "model.list",
|
||||
})
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/submit-prompt",
|
||||
describeRoute({
|
||||
summary: "Submit TUI prompt",
|
||||
description: "Submit the prompt",
|
||||
operationId: "tui.submitPrompt",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Prompt submitted successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
await Bus.publish(TuiEvent.CommandExecute, {
|
||||
command: "prompt.submit",
|
||||
})
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/clear-prompt",
|
||||
describeRoute({
|
||||
summary: "Clear TUI prompt",
|
||||
description: "Clear the prompt",
|
||||
operationId: "tui.clearPrompt",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Prompt cleared successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
await Bus.publish(TuiEvent.CommandExecute, {
|
||||
command: "prompt.clear",
|
||||
})
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/execute-command",
|
||||
describeRoute({
|
||||
summary: "Execute TUI command",
|
||||
description: "Execute a TUI command (e.g. agent_cycle)",
|
||||
operationId: "tui.executeCommand",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Command executed successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400),
|
||||
},
|
||||
}),
|
||||
validator("json", z.object({ command: z.string() })),
|
||||
async (c) => {
|
||||
const command = c.req.valid("json").command
|
||||
await Bus.publish(TuiEvent.CommandExecute, {
|
||||
// @ts-expect-error
|
||||
command: {
|
||||
session_new: "session.new",
|
||||
session_share: "session.share",
|
||||
session_interrupt: "session.interrupt",
|
||||
session_compact: "session.compact",
|
||||
messages_page_up: "session.page.up",
|
||||
messages_page_down: "session.page.down",
|
||||
messages_half_page_up: "session.half.page.up",
|
||||
messages_half_page_down: "session.half.page.down",
|
||||
messages_first: "session.first",
|
||||
messages_last: "session.last",
|
||||
agent_cycle: "agent.cycle",
|
||||
}[command],
|
||||
})
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/show-toast",
|
||||
describeRoute({
|
||||
summary: "Show TUI toast",
|
||||
description: "Show a toast notification in the TUI",
|
||||
operationId: "tui.showToast",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Toast notification shown successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
validator("json", TuiEvent.ToastShow.properties),
|
||||
async (c) => {
|
||||
await Bus.publish(TuiEvent.ToastShow, c.req.valid("json"))
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/publish",
|
||||
describeRoute({
|
||||
summary: "Publish TUI event",
|
||||
description: "Publish a TUI event",
|
||||
operationId: "tui.publish",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Event published successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"json",
|
||||
z.union(
|
||||
Object.values(TuiEvent).map((def) => {
|
||||
return z
|
||||
.object({
|
||||
type: z.literal(def.type),
|
||||
properties: def.properties,
|
||||
})
|
||||
.meta({
|
||||
ref: "Event" + "." + def.type,
|
||||
})
|
||||
}),
|
||||
),
|
||||
),
|
||||
async (c) => {
|
||||
const evt = c.req.valid("json")
|
||||
await Bus.publish(Object.values(TuiEvent).find((def) => def.type === evt.type)!, evt.properties)
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/select-session",
|
||||
describeRoute({
|
||||
summary: "Select session",
|
||||
description: "Navigate the TUI to display the specified session.",
|
||||
operationId: "tui.selectSession",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Session selected successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400, 404),
|
||||
},
|
||||
}),
|
||||
validator("json", TuiEvent.SessionSelect.properties),
|
||||
async (c) => {
|
||||
const { sessionID } = c.req.valid("json")
|
||||
await Session.get(sessionID)
|
||||
await Bus.publish(TuiEvent.SessionSelect, { sessionID })
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.route("/control", TuiControlRoutes),
|
||||
)
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,71 +0,0 @@
|
||||
import { Hono, type Context } from "hono"
|
||||
import { describeRoute, resolver, validator } from "hono-openapi"
|
||||
import { z } from "zod"
|
||||
import { AsyncQueue } from "../util/queue"
|
||||
|
||||
const TuiRequest = z.object({
|
||||
path: z.string(),
|
||||
body: z.any(),
|
||||
})
|
||||
|
||||
type TuiRequest = z.infer<typeof TuiRequest>
|
||||
|
||||
const request = new AsyncQueue<TuiRequest>()
|
||||
const response = new AsyncQueue<any>()
|
||||
|
||||
export async function callTui(ctx: Context) {
|
||||
const body = await ctx.req.json()
|
||||
request.push({
|
||||
path: ctx.req.path,
|
||||
body,
|
||||
})
|
||||
return response.next()
|
||||
}
|
||||
|
||||
export const TuiRoute = new Hono()
|
||||
.get(
|
||||
"/next",
|
||||
describeRoute({
|
||||
summary: "Get next TUI request",
|
||||
description: "Retrieve the next TUI (Terminal User Interface) request from the queue for processing.",
|
||||
operationId: "tui.control.next",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Next TUI request",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(TuiRequest),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
const req = await request.next()
|
||||
return c.json(req)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/response",
|
||||
describeRoute({
|
||||
summary: "Submit TUI response",
|
||||
description: "Submit a response to the TUI request queue to complete a pending request.",
|
||||
operationId: "tui.control.response",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Response submitted successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
validator("json", z.any()),
|
||||
async (c) => {
|
||||
const body = c.req.valid("json")
|
||||
response.push(body)
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
@@ -1,14 +1,7 @@
|
||||
import { BusEvent } from "@/bus/bus-event"
|
||||
import z from "zod"
|
||||
import { NamedError } from "@opencode-ai/util/error"
|
||||
import {
|
||||
APICallError,
|
||||
convertToModelMessages,
|
||||
LoadAPIKeyError,
|
||||
type ModelMessage,
|
||||
type UIMessage,
|
||||
type ToolSet,
|
||||
} from "ai"
|
||||
import { APICallError, convertToModelMessages, LoadAPIKeyError, type ModelMessage, type UIMessage } from "ai"
|
||||
import { Identifier } from "../id/id"
|
||||
import { LSP } from "../lsp"
|
||||
import { Snapshot } from "@/snapshot"
|
||||
@@ -439,7 +432,7 @@ export namespace MessageV2 {
|
||||
})
|
||||
export type WithParts = z.infer<typeof WithParts>
|
||||
|
||||
export function toModelMessage(input: WithParts[], options?: { tools?: ToolSet }): ModelMessage[] {
|
||||
export function toModelMessage(input: WithParts[]): ModelMessage[] {
|
||||
const result: UIMessage[] = []
|
||||
|
||||
for (const msg of input) {
|
||||
@@ -510,14 +503,30 @@ export namespace MessageV2 {
|
||||
})
|
||||
if (part.type === "tool") {
|
||||
if (part.state.status === "completed") {
|
||||
if (part.state.attachments?.length) {
|
||||
result.push({
|
||||
id: Identifier.ascending("message"),
|
||||
role: "user",
|
||||
parts: [
|
||||
{
|
||||
type: "text",
|
||||
text: `The tool ${part.tool} returned the following attachments:`,
|
||||
},
|
||||
...part.state.attachments.map((attachment) => ({
|
||||
type: "file" as const,
|
||||
url: attachment.url,
|
||||
mediaType: attachment.mime,
|
||||
filename: attachment.filename,
|
||||
})),
|
||||
],
|
||||
})
|
||||
}
|
||||
assistantMessage.parts.push({
|
||||
type: ("tool-" + part.tool) as `tool-${string}`,
|
||||
state: "output-available",
|
||||
toolCallId: part.callID,
|
||||
input: part.state.input,
|
||||
output: part.state.time.compacted
|
||||
? "[Old tool result content cleared]"
|
||||
: { output: part.state.output, attachments: part.state.attachments },
|
||||
output: part.state.time.compacted ? "[Old tool result content cleared]" : part.state.output,
|
||||
callProviderMetadata: part.metadata,
|
||||
})
|
||||
}
|
||||
@@ -556,12 +565,7 @@ export namespace MessageV2 {
|
||||
}
|
||||
}
|
||||
|
||||
return convertToModelMessages(
|
||||
result.filter((msg) => msg.parts.some((part) => part.type !== "step-start")),
|
||||
{
|
||||
tools: options?.tools,
|
||||
},
|
||||
)
|
||||
return convertToModelMessages(result.filter((msg) => msg.parts.some((part) => part.type !== "step-start")))
|
||||
}
|
||||
|
||||
export const stream = fn(Identifier.schema("session"), async function* (sessionID) {
|
||||
|
||||
@@ -597,7 +597,7 @@ export namespace SessionPrompt {
|
||||
sessionID,
|
||||
system: [...(await SystemPrompt.environment()), ...(await SystemPrompt.custom())],
|
||||
messages: [
|
||||
...MessageV2.toModelMessage(sessionMessages, { tools }),
|
||||
...MessageV2.toModelMessage(sessionMessages),
|
||||
...(isLastStep
|
||||
? [
|
||||
{
|
||||
@@ -685,7 +685,10 @@ export namespace SessionPrompt {
|
||||
},
|
||||
})
|
||||
|
||||
for (const item of await ToolRegistry.tools(input.model.providerID, input.agent)) {
|
||||
for (const item of await ToolRegistry.tools(
|
||||
{ modelID: input.model.api.id, providerID: input.model.providerID },
|
||||
input.agent,
|
||||
)) {
|
||||
const schema = ProviderTransform.schema(input.model, z.toJSONSchema(item.parameters))
|
||||
tools[item.id] = tool({
|
||||
id: item.id as any,
|
||||
@@ -716,18 +719,10 @@ export namespace SessionPrompt {
|
||||
)
|
||||
return result
|
||||
},
|
||||
toModelOutput(result: { output: string; attachments?: MessageV2.FilePart[] }) {
|
||||
if (!result.attachments?.length) return { type: "text", value: result.output }
|
||||
toModelOutput(result) {
|
||||
return {
|
||||
type: "content",
|
||||
value: [
|
||||
{ type: "text", text: result.output },
|
||||
...result.attachments.map((a) => ({
|
||||
type: "media" as const,
|
||||
data: a.url.slice(a.url.indexOf(",") + 1),
|
||||
mediaType: a.mime,
|
||||
})),
|
||||
],
|
||||
type: "text",
|
||||
value: result.output,
|
||||
}
|
||||
},
|
||||
})
|
||||
@@ -814,18 +809,10 @@ export namespace SessionPrompt {
|
||||
content: result.content, // directly return content to preserve ordering when outputting to model
|
||||
}
|
||||
}
|
||||
item.toModelOutput = (result: { output: string; attachments?: MessageV2.FilePart[] }) => {
|
||||
if (!result.attachments?.length) return { type: "text", value: result.output }
|
||||
item.toModelOutput = (result) => {
|
||||
return {
|
||||
type: "content",
|
||||
value: [
|
||||
{ type: "text", text: result.output },
|
||||
...result.attachments.map((a) => ({
|
||||
type: "media" as const,
|
||||
data: a.url.slice(a.url.indexOf(",") + 1),
|
||||
mediaType: a.mime,
|
||||
})),
|
||||
],
|
||||
type: "text",
|
||||
value: result.output,
|
||||
}
|
||||
}
|
||||
tools[key] = item
|
||||
|
||||
@@ -5,6 +5,7 @@ You are an interactive CLI tool that helps users with software engineering tasks
|
||||
## Editing constraints
|
||||
- Default to ASCII when editing or creating files. Only introduce non-ASCII or other Unicode characters when there is a clear justification and the file already uses them.
|
||||
- Only add comments if they are necessary to make a non-obvious block easier to understand.
|
||||
- Try to use apply_patch for single file edits, but it is fine to explore other options to make the edit if it does not work well. Do not use apply_patch for changes that are auto-generated (i.e. generating package.json or running a lint or format command like gofmt) or when scripting is more efficient (such as search and replacing a string across a codebase).
|
||||
|
||||
## Tool usage
|
||||
- Prefer specialized tools over shell for file operations:
|
||||
|
||||
277
packages/opencode/src/tool/apply_patch.ts
Normal file
277
packages/opencode/src/tool/apply_patch.ts
Normal file
@@ -0,0 +1,277 @@
|
||||
import z from "zod"
|
||||
import * as path from "path"
|
||||
import * as fs from "fs/promises"
|
||||
import { Tool } from "./tool"
|
||||
import { FileTime } from "../file/time"
|
||||
import { Bus } from "../bus"
|
||||
import { FileWatcher } from "../file/watcher"
|
||||
import { Instance } from "../project/instance"
|
||||
import { Patch } from "../patch"
|
||||
import { createTwoFilesPatch, diffLines } from "diff"
|
||||
import { assertExternalDirectory } from "./external-directory"
|
||||
import { trimDiff } from "./edit"
|
||||
import { LSP } from "../lsp"
|
||||
import { Filesystem } from "../util/filesystem"
|
||||
|
||||
const PatchParams = z.object({
|
||||
patchText: z.string().describe("The full patch text that describes all changes to be made"),
|
||||
})
|
||||
|
||||
export const ApplyPatchTool = Tool.define("apply_patch", {
|
||||
description: "Use the `apply_patch` tool to edit files. This is a FREEFORM tool, so do not wrap the patch in JSON.",
|
||||
parameters: PatchParams,
|
||||
async execute(params, ctx) {
|
||||
if (!params.patchText) {
|
||||
throw new Error("patchText is required")
|
||||
}
|
||||
|
||||
// Parse the patch to get hunks
|
||||
let hunks: Patch.Hunk[]
|
||||
try {
|
||||
const parseResult = Patch.parsePatch(params.patchText)
|
||||
hunks = parseResult.hunks
|
||||
} catch (error) {
|
||||
throw new Error(`apply_patch verification failed: ${error}`)
|
||||
}
|
||||
|
||||
if (hunks.length === 0) {
|
||||
const normalized = params.patchText.replace(/\r\n/g, "\n").replace(/\r/g, "\n").trim()
|
||||
if (normalized === "*** Begin Patch\n*** End Patch") {
|
||||
throw new Error("patch rejected: empty patch")
|
||||
}
|
||||
throw new Error("apply_patch verification failed: no hunks found")
|
||||
}
|
||||
|
||||
// Validate file paths and check permissions
|
||||
const fileChanges: Array<{
|
||||
filePath: string
|
||||
oldContent: string
|
||||
newContent: string
|
||||
type: "add" | "update" | "delete" | "move"
|
||||
movePath?: string
|
||||
diff: string
|
||||
additions: number
|
||||
deletions: number
|
||||
}> = []
|
||||
|
||||
let totalDiff = ""
|
||||
|
||||
for (const hunk of hunks) {
|
||||
const filePath = path.resolve(Instance.directory, hunk.path)
|
||||
await assertExternalDirectory(ctx, filePath)
|
||||
|
||||
switch (hunk.type) {
|
||||
case "add": {
|
||||
const oldContent = ""
|
||||
const newContent =
|
||||
hunk.contents.length === 0 || hunk.contents.endsWith("\n") ? hunk.contents : `${hunk.contents}\n`
|
||||
const diff = trimDiff(createTwoFilesPatch(filePath, filePath, oldContent, newContent))
|
||||
|
||||
let additions = 0
|
||||
let deletions = 0
|
||||
for (const change of diffLines(oldContent, newContent)) {
|
||||
if (change.added) additions += change.count || 0
|
||||
if (change.removed) deletions += change.count || 0
|
||||
}
|
||||
|
||||
fileChanges.push({
|
||||
filePath,
|
||||
oldContent,
|
||||
newContent,
|
||||
type: "add",
|
||||
diff,
|
||||
additions,
|
||||
deletions,
|
||||
})
|
||||
|
||||
totalDiff += diff + "\n"
|
||||
break
|
||||
}
|
||||
|
||||
case "update": {
|
||||
// Check if file exists for update
|
||||
const stats = await fs.stat(filePath).catch(() => null)
|
||||
if (!stats || stats.isDirectory()) {
|
||||
throw new Error(`apply_patch verification failed: Failed to read file to update: ${filePath}`)
|
||||
}
|
||||
|
||||
// Read file and update time tracking (like edit tool does)
|
||||
await FileTime.assert(ctx.sessionID, filePath)
|
||||
const oldContent = await fs.readFile(filePath, "utf-8")
|
||||
let newContent = oldContent
|
||||
|
||||
// Apply the update chunks to get new content
|
||||
try {
|
||||
const fileUpdate = Patch.deriveNewContentsFromChunks(filePath, hunk.chunks)
|
||||
newContent = fileUpdate.content
|
||||
} catch (error) {
|
||||
throw new Error(`apply_patch verification failed: ${error}`)
|
||||
}
|
||||
|
||||
const diff = trimDiff(createTwoFilesPatch(filePath, filePath, oldContent, newContent))
|
||||
|
||||
let additions = 0
|
||||
let deletions = 0
|
||||
for (const change of diffLines(oldContent, newContent)) {
|
||||
if (change.added) additions += change.count || 0
|
||||
if (change.removed) deletions += change.count || 0
|
||||
}
|
||||
|
||||
const movePath = hunk.move_path ? path.resolve(Instance.directory, hunk.move_path) : undefined
|
||||
await assertExternalDirectory(ctx, movePath)
|
||||
|
||||
fileChanges.push({
|
||||
filePath,
|
||||
oldContent,
|
||||
newContent,
|
||||
type: hunk.move_path ? "move" : "update",
|
||||
movePath,
|
||||
diff,
|
||||
additions,
|
||||
deletions,
|
||||
})
|
||||
|
||||
totalDiff += diff + "\n"
|
||||
break
|
||||
}
|
||||
|
||||
case "delete": {
|
||||
const contentToDelete = await fs.readFile(filePath, "utf-8").catch((error) => {
|
||||
throw new Error(`apply_patch verification failed: ${error}`)
|
||||
})
|
||||
const deleteDiff = trimDiff(createTwoFilesPatch(filePath, filePath, contentToDelete, ""))
|
||||
|
||||
const deletions = contentToDelete.split("\n").length
|
||||
|
||||
fileChanges.push({
|
||||
filePath,
|
||||
oldContent: contentToDelete,
|
||||
newContent: "",
|
||||
type: "delete",
|
||||
diff: deleteDiff,
|
||||
additions: 0,
|
||||
deletions,
|
||||
})
|
||||
|
||||
totalDiff += deleteDiff + "\n"
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check permissions if needed
|
||||
await ctx.ask({
|
||||
permission: "edit",
|
||||
patterns: fileChanges.map((c) => path.relative(Instance.worktree, c.filePath)),
|
||||
always: ["*"],
|
||||
metadata: {
|
||||
diff: totalDiff,
|
||||
},
|
||||
})
|
||||
|
||||
// Apply the changes
|
||||
const changedFiles: string[] = []
|
||||
|
||||
for (const change of fileChanges) {
|
||||
switch (change.type) {
|
||||
case "add":
|
||||
// Create parent directories (recursive: true is safe on existing/root dirs)
|
||||
await fs.mkdir(path.dirname(change.filePath), { recursive: true })
|
||||
await fs.writeFile(change.filePath, change.newContent, "utf-8")
|
||||
changedFiles.push(change.filePath)
|
||||
break
|
||||
|
||||
case "update":
|
||||
await fs.writeFile(change.filePath, change.newContent, "utf-8")
|
||||
changedFiles.push(change.filePath)
|
||||
break
|
||||
|
||||
case "move":
|
||||
if (change.movePath) {
|
||||
// Create parent directories (recursive: true is safe on existing/root dirs)
|
||||
await fs.mkdir(path.dirname(change.movePath), { recursive: true })
|
||||
await fs.writeFile(change.movePath, change.newContent, "utf-8")
|
||||
await fs.unlink(change.filePath)
|
||||
changedFiles.push(change.movePath)
|
||||
}
|
||||
break
|
||||
|
||||
case "delete":
|
||||
await fs.unlink(change.filePath)
|
||||
changedFiles.push(change.filePath)
|
||||
break
|
||||
}
|
||||
|
||||
// Update file time tracking
|
||||
FileTime.read(ctx.sessionID, change.filePath)
|
||||
if (change.movePath) {
|
||||
FileTime.read(ctx.sessionID, change.movePath)
|
||||
}
|
||||
}
|
||||
|
||||
// Publish file change events
|
||||
for (const filePath of changedFiles) {
|
||||
await Bus.publish(FileWatcher.Event.Updated, { file: filePath, event: "change" })
|
||||
}
|
||||
|
||||
// Notify LSP of file changes and collect diagnostics
|
||||
for (const change of fileChanges) {
|
||||
if (change.type === "delete") continue
|
||||
const target = change.movePath ?? change.filePath
|
||||
await LSP.touchFile(target, true)
|
||||
}
|
||||
const diagnostics = await LSP.diagnostics()
|
||||
|
||||
// Generate output summary
|
||||
const summaryLines = fileChanges.map((change) => {
|
||||
if (change.type === "add") {
|
||||
return `A ${path.relative(Instance.worktree, change.filePath)}`
|
||||
}
|
||||
if (change.type === "delete") {
|
||||
return `D ${path.relative(Instance.worktree, change.filePath)}`
|
||||
}
|
||||
const target = change.movePath ?? change.filePath
|
||||
return `M ${path.relative(Instance.worktree, target)}`
|
||||
})
|
||||
let output = `Success. Updated the following files:\n${summaryLines.join("\n")}`
|
||||
|
||||
// Report LSP errors for changed files
|
||||
const MAX_DIAGNOSTICS_PER_FILE = 20
|
||||
for (const change of fileChanges) {
|
||||
if (change.type === "delete") continue
|
||||
const target = change.movePath ?? change.filePath
|
||||
const normalized = Filesystem.normalizePath(target)
|
||||
const issues = diagnostics[normalized] ?? []
|
||||
const errors = issues.filter((item) => item.severity === 1)
|
||||
if (errors.length > 0) {
|
||||
const limited = errors.slice(0, MAX_DIAGNOSTICS_PER_FILE)
|
||||
const suffix =
|
||||
errors.length > MAX_DIAGNOSTICS_PER_FILE ? `\n... and ${errors.length - MAX_DIAGNOSTICS_PER_FILE} more` : ""
|
||||
output += `\n\nLSP errors detected in ${path.relative(Instance.worktree, target)}, please fix:\n<diagnostics file="${target}">\n${limited.map(LSP.Diagnostic.pretty).join("\n")}${suffix}\n</diagnostics>`
|
||||
}
|
||||
}
|
||||
|
||||
// Build per-file metadata for UI rendering
|
||||
const files = fileChanges.map((change) => ({
|
||||
filePath: change.filePath,
|
||||
relativePath: path.relative(Instance.worktree, change.movePath ?? change.filePath),
|
||||
type: change.type,
|
||||
diff: change.diff,
|
||||
before: change.oldContent,
|
||||
after: change.newContent,
|
||||
additions: change.additions,
|
||||
deletions: change.deletions,
|
||||
movePath: change.movePath,
|
||||
}))
|
||||
|
||||
return {
|
||||
title: output,
|
||||
metadata: {
|
||||
diff: totalDiff,
|
||||
files,
|
||||
diagnostics,
|
||||
},
|
||||
output,
|
||||
}
|
||||
},
|
||||
})
|
||||
1
packages/opencode/src/tool/apply_patch.txt
Normal file
1
packages/opencode/src/tool/apply_patch.txt
Normal file
@@ -0,0 +1 @@
|
||||
Use the `apply_patch` tool to edit files. This is a FREEFORM tool, so do not wrap the patch in JSON.
|
||||
@@ -37,7 +37,7 @@ export const BatchTool = Tool.define("batch", async () => {
|
||||
const discardedCalls = params.tool_calls.slice(10)
|
||||
|
||||
const { ToolRegistry } = await import("./registry")
|
||||
const availableTools = await ToolRegistry.tools("")
|
||||
const availableTools = await ToolRegistry.tools({ modelID: "", providerID: "" })
|
||||
const toolMap = new Map(availableTools.map((t) => [t.id, t]))
|
||||
|
||||
const executeCall = async (call: (typeof toolCalls)[0]) => {
|
||||
|
||||
@@ -1,201 +0,0 @@
|
||||
import z from "zod"
|
||||
import * as path from "path"
|
||||
import * as fs from "fs/promises"
|
||||
import { Tool } from "./tool"
|
||||
import { FileTime } from "../file/time"
|
||||
import { Bus } from "../bus"
|
||||
import { FileWatcher } from "../file/watcher"
|
||||
import { Instance } from "../project/instance"
|
||||
import { Patch } from "../patch"
|
||||
import { createTwoFilesPatch } from "diff"
|
||||
import { assertExternalDirectory } from "./external-directory"
|
||||
|
||||
const PatchParams = z.object({
|
||||
patchText: z.string().describe("The full patch text that describes all changes to be made"),
|
||||
})
|
||||
|
||||
export const PatchTool = Tool.define("patch", {
|
||||
description:
|
||||
"Apply a patch to modify multiple files. Supports adding, updating, and deleting files with context-aware changes.",
|
||||
parameters: PatchParams,
|
||||
async execute(params, ctx) {
|
||||
if (!params.patchText) {
|
||||
throw new Error("patchText is required")
|
||||
}
|
||||
|
||||
// Parse the patch to get hunks
|
||||
let hunks: Patch.Hunk[]
|
||||
try {
|
||||
const parseResult = Patch.parsePatch(params.patchText)
|
||||
hunks = parseResult.hunks
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to parse patch: ${error}`)
|
||||
}
|
||||
|
||||
if (hunks.length === 0) {
|
||||
throw new Error("No file changes found in patch")
|
||||
}
|
||||
|
||||
// Validate file paths and check permissions
|
||||
const fileChanges: Array<{
|
||||
filePath: string
|
||||
oldContent: string
|
||||
newContent: string
|
||||
type: "add" | "update" | "delete" | "move"
|
||||
movePath?: string
|
||||
}> = []
|
||||
|
||||
let totalDiff = ""
|
||||
|
||||
for (const hunk of hunks) {
|
||||
const filePath = path.resolve(Instance.directory, hunk.path)
|
||||
await assertExternalDirectory(ctx, filePath)
|
||||
|
||||
switch (hunk.type) {
|
||||
case "add":
|
||||
if (hunk.type === "add") {
|
||||
const oldContent = ""
|
||||
const newContent = hunk.contents
|
||||
const diff = createTwoFilesPatch(filePath, filePath, oldContent, newContent)
|
||||
|
||||
fileChanges.push({
|
||||
filePath,
|
||||
oldContent,
|
||||
newContent,
|
||||
type: "add",
|
||||
})
|
||||
|
||||
totalDiff += diff + "\n"
|
||||
}
|
||||
break
|
||||
|
||||
case "update":
|
||||
// Check if file exists for update
|
||||
const stats = await fs.stat(filePath).catch(() => null)
|
||||
if (!stats || stats.isDirectory()) {
|
||||
throw new Error(`File not found or is directory: ${filePath}`)
|
||||
}
|
||||
|
||||
// Read file and update time tracking (like edit tool does)
|
||||
await FileTime.assert(ctx.sessionID, filePath)
|
||||
const oldContent = await fs.readFile(filePath, "utf-8")
|
||||
let newContent = oldContent
|
||||
|
||||
// Apply the update chunks to get new content
|
||||
try {
|
||||
const fileUpdate = Patch.deriveNewContentsFromChunks(filePath, hunk.chunks)
|
||||
newContent = fileUpdate.content
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to apply update to ${filePath}: ${error}`)
|
||||
}
|
||||
|
||||
const diff = createTwoFilesPatch(filePath, filePath, oldContent, newContent)
|
||||
|
||||
const movePath = hunk.move_path ? path.resolve(Instance.directory, hunk.move_path) : undefined
|
||||
await assertExternalDirectory(ctx, movePath)
|
||||
|
||||
fileChanges.push({
|
||||
filePath,
|
||||
oldContent,
|
||||
newContent,
|
||||
type: hunk.move_path ? "move" : "update",
|
||||
movePath,
|
||||
})
|
||||
|
||||
totalDiff += diff + "\n"
|
||||
break
|
||||
|
||||
case "delete":
|
||||
// Check if file exists for deletion
|
||||
await FileTime.assert(ctx.sessionID, filePath)
|
||||
const contentToDelete = await fs.readFile(filePath, "utf-8")
|
||||
const deleteDiff = createTwoFilesPatch(filePath, filePath, contentToDelete, "")
|
||||
|
||||
fileChanges.push({
|
||||
filePath,
|
||||
oldContent: contentToDelete,
|
||||
newContent: "",
|
||||
type: "delete",
|
||||
})
|
||||
|
||||
totalDiff += deleteDiff + "\n"
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Check permissions if needed
|
||||
await ctx.ask({
|
||||
permission: "edit",
|
||||
patterns: fileChanges.map((c) => path.relative(Instance.worktree, c.filePath)),
|
||||
always: ["*"],
|
||||
metadata: {
|
||||
diff: totalDiff,
|
||||
},
|
||||
})
|
||||
|
||||
// Apply the changes
|
||||
const changedFiles: string[] = []
|
||||
|
||||
for (const change of fileChanges) {
|
||||
switch (change.type) {
|
||||
case "add":
|
||||
// Create parent directories
|
||||
const addDir = path.dirname(change.filePath)
|
||||
if (addDir !== "." && addDir !== "/") {
|
||||
await fs.mkdir(addDir, { recursive: true })
|
||||
}
|
||||
await fs.writeFile(change.filePath, change.newContent, "utf-8")
|
||||
changedFiles.push(change.filePath)
|
||||
break
|
||||
|
||||
case "update":
|
||||
await fs.writeFile(change.filePath, change.newContent, "utf-8")
|
||||
changedFiles.push(change.filePath)
|
||||
break
|
||||
|
||||
case "move":
|
||||
if (change.movePath) {
|
||||
// Create parent directories for destination
|
||||
const moveDir = path.dirname(change.movePath)
|
||||
if (moveDir !== "." && moveDir !== "/") {
|
||||
await fs.mkdir(moveDir, { recursive: true })
|
||||
}
|
||||
// Write to new location
|
||||
await fs.writeFile(change.movePath, change.newContent, "utf-8")
|
||||
// Remove original
|
||||
await fs.unlink(change.filePath)
|
||||
changedFiles.push(change.movePath)
|
||||
}
|
||||
break
|
||||
|
||||
case "delete":
|
||||
await fs.unlink(change.filePath)
|
||||
changedFiles.push(change.filePath)
|
||||
break
|
||||
}
|
||||
|
||||
// Update file time tracking
|
||||
FileTime.read(ctx.sessionID, change.filePath)
|
||||
if (change.movePath) {
|
||||
FileTime.read(ctx.sessionID, change.movePath)
|
||||
}
|
||||
}
|
||||
|
||||
// Publish file change events
|
||||
for (const filePath of changedFiles) {
|
||||
await Bus.publish(FileWatcher.Event.Updated, { file: filePath, event: "change" })
|
||||
}
|
||||
|
||||
// Generate output summary
|
||||
const relativePaths = changedFiles.map((filePath) => path.relative(Instance.worktree, filePath))
|
||||
const summary = `${fileChanges.length} files changed`
|
||||
|
||||
return {
|
||||
title: summary,
|
||||
metadata: {
|
||||
diff: totalDiff,
|
||||
},
|
||||
output: `Patch applied successfully. ${summary}:\n${relativePaths.map((p) => ` ${p}`).join("\n")}`,
|
||||
}
|
||||
},
|
||||
})
|
||||
@@ -1 +0,0 @@
|
||||
do not use
|
||||
@@ -26,6 +26,7 @@ import { Log } from "@/util/log"
|
||||
import { LspTool } from "./lsp"
|
||||
import { Truncate } from "./truncation"
|
||||
import { PlanExitTool, PlanEnterTool } from "./plan"
|
||||
import { ApplyPatchTool } from "./apply_patch"
|
||||
|
||||
export namespace ToolRegistry {
|
||||
const log = Log.create({ service: "tool.registry" })
|
||||
@@ -108,6 +109,7 @@ export namespace ToolRegistry {
|
||||
WebSearchTool,
|
||||
CodeSearchTool,
|
||||
SkillTool,
|
||||
ApplyPatchTool,
|
||||
...(Flag.OPENCODE_EXPERIMENTAL_LSP_TOOL ? [LspTool] : []),
|
||||
...(config.experimental?.batch_tool === true ? [BatchTool] : []),
|
||||
...(Flag.OPENCODE_EXPERIMENTAL_PLAN_MODE && Flag.OPENCODE_CLIENT === "cli" ? [PlanExitTool, PlanEnterTool] : []),
|
||||
@@ -119,15 +121,28 @@ export namespace ToolRegistry {
|
||||
return all().then((x) => x.map((t) => t.id))
|
||||
}
|
||||
|
||||
export async function tools(providerID: string, agent?: Agent.Info) {
|
||||
export async function tools(
|
||||
model: {
|
||||
providerID: string
|
||||
modelID: string
|
||||
},
|
||||
agent?: Agent.Info,
|
||||
) {
|
||||
const tools = await all()
|
||||
const result = await Promise.all(
|
||||
tools
|
||||
.filter((t) => {
|
||||
// Enable websearch/codesearch for zen users OR via enable flag
|
||||
if (t.id === "codesearch" || t.id === "websearch") {
|
||||
return providerID === "opencode" || Flag.OPENCODE_ENABLE_EXA
|
||||
return model.providerID === "opencode" || Flag.OPENCODE_ENABLE_EXA
|
||||
}
|
||||
|
||||
// use apply tool in same format as codex
|
||||
const usePatch =
|
||||
model.modelID.includes("gpt-") && !model.modelID.includes("oss") && !model.modelID.includes("gpt-4")
|
||||
if (t.id === "apply_patch") return usePatch
|
||||
if (t.id === "edit" || t.id === "write") return !usePatch
|
||||
|
||||
return true
|
||||
})
|
||||
.map(async (t) => {
|
||||
|
||||
@@ -1,75 +0,0 @@
|
||||
import { test, expect, describe, afterEach } from "bun:test"
|
||||
import { McpOAuthCallback } from "../../src/mcp/oauth-callback"
|
||||
import { parseRedirectUri } from "../../src/mcp/oauth-provider"
|
||||
|
||||
describe("McpOAuthCallback.ensureRunning", () => {
|
||||
afterEach(async () => {
|
||||
await McpOAuthCallback.stop()
|
||||
})
|
||||
|
||||
test("starts server with default config when no redirectUri provided", async () => {
|
||||
await McpOAuthCallback.ensureRunning()
|
||||
expect(McpOAuthCallback.isRunning()).toBe(true)
|
||||
})
|
||||
|
||||
test("starts server with custom redirectUri", async () => {
|
||||
await McpOAuthCallback.ensureRunning("http://127.0.0.1:18000/custom/callback")
|
||||
expect(McpOAuthCallback.isRunning()).toBe(true)
|
||||
})
|
||||
|
||||
test("is idempotent when called with same redirectUri", async () => {
|
||||
await McpOAuthCallback.ensureRunning("http://127.0.0.1:18001/callback")
|
||||
await McpOAuthCallback.ensureRunning("http://127.0.0.1:18001/callback")
|
||||
expect(McpOAuthCallback.isRunning()).toBe(true)
|
||||
})
|
||||
|
||||
test("restarts server when redirectUri changes", async () => {
|
||||
await McpOAuthCallback.ensureRunning("http://127.0.0.1:18002/path1")
|
||||
expect(McpOAuthCallback.isRunning()).toBe(true)
|
||||
|
||||
await McpOAuthCallback.ensureRunning("http://127.0.0.1:18003/path2")
|
||||
expect(McpOAuthCallback.isRunning()).toBe(true)
|
||||
})
|
||||
|
||||
test("isRunning returns false when not started", async () => {
|
||||
expect(McpOAuthCallback.isRunning()).toBe(false)
|
||||
})
|
||||
|
||||
test("isRunning returns false after stop", async () => {
|
||||
await McpOAuthCallback.ensureRunning()
|
||||
await McpOAuthCallback.stop()
|
||||
expect(McpOAuthCallback.isRunning()).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe("parseRedirectUri", () => {
|
||||
test("returns defaults when no URI provided", () => {
|
||||
const result = parseRedirectUri()
|
||||
expect(result.port).toBe(19876)
|
||||
expect(result.path).toBe("/mcp/oauth/callback")
|
||||
})
|
||||
|
||||
test("parses port and path from URI", () => {
|
||||
const result = parseRedirectUri("http://127.0.0.1:8080/oauth/callback")
|
||||
expect(result.port).toBe(8080)
|
||||
expect(result.path).toBe("/oauth/callback")
|
||||
})
|
||||
|
||||
test("defaults to port 80 for http without explicit port", () => {
|
||||
const result = parseRedirectUri("http://127.0.0.1/callback")
|
||||
expect(result.port).toBe(80)
|
||||
expect(result.path).toBe("/callback")
|
||||
})
|
||||
|
||||
test("defaults to port 443 for https without explicit port", () => {
|
||||
const result = parseRedirectUri("https://127.0.0.1/callback")
|
||||
expect(result.port).toBe(443)
|
||||
expect(result.path).toBe("/callback")
|
||||
})
|
||||
|
||||
test("returns defaults for invalid URI", () => {
|
||||
const result = parseRedirectUri("not-a-valid-url")
|
||||
expect(result.port).toBe(19876)
|
||||
expect(result.path).toBe("/mcp/oauth/callback")
|
||||
})
|
||||
})
|
||||
@@ -649,7 +649,7 @@ describe("ProviderTransform.message - strip openai metadata when store=false", (
|
||||
headers: {},
|
||||
} as any
|
||||
|
||||
test("strips itemId and reasoningEncryptedContent when store=false", () => {
|
||||
test("preserves itemId and reasoningEncryptedContent when store=false", () => {
|
||||
const msgs = [
|
||||
{
|
||||
role: "assistant",
|
||||
@@ -680,11 +680,11 @@ describe("ProviderTransform.message - strip openai metadata when store=false", (
|
||||
const result = ProviderTransform.message(msgs, openaiModel, { store: false }) as any[]
|
||||
|
||||
expect(result).toHaveLength(1)
|
||||
expect(result[0].content[0].providerOptions?.openai?.itemId).toBeUndefined()
|
||||
expect(result[0].content[1].providerOptions?.openai?.itemId).toBeUndefined()
|
||||
expect(result[0].content[0].providerOptions?.openai?.itemId).toBe("rs_123")
|
||||
expect(result[0].content[1].providerOptions?.openai?.itemId).toBe("msg_456")
|
||||
})
|
||||
|
||||
test("strips itemId and reasoningEncryptedContent when store=false even when not openai", () => {
|
||||
test("preserves itemId and reasoningEncryptedContent when store=false even when not openai", () => {
|
||||
const zenModel = {
|
||||
...openaiModel,
|
||||
providerID: "zen",
|
||||
@@ -719,11 +719,11 @@ describe("ProviderTransform.message - strip openai metadata when store=false", (
|
||||
const result = ProviderTransform.message(msgs, zenModel, { store: false }) as any[]
|
||||
|
||||
expect(result).toHaveLength(1)
|
||||
expect(result[0].content[0].providerOptions?.openai?.itemId).toBeUndefined()
|
||||
expect(result[0].content[1].providerOptions?.openai?.itemId).toBeUndefined()
|
||||
expect(result[0].content[0].providerOptions?.openai?.itemId).toBe("rs_123")
|
||||
expect(result[0].content[1].providerOptions?.openai?.itemId).toBe("msg_456")
|
||||
})
|
||||
|
||||
test("preserves other openai options when stripping itemId", () => {
|
||||
test("preserves other openai options including itemId", () => {
|
||||
const msgs = [
|
||||
{
|
||||
role: "assistant",
|
||||
@@ -744,11 +744,11 @@ describe("ProviderTransform.message - strip openai metadata when store=false", (
|
||||
|
||||
const result = ProviderTransform.message(msgs, openaiModel, { store: false }) as any[]
|
||||
|
||||
expect(result[0].content[0].providerOptions?.openai?.itemId).toBeUndefined()
|
||||
expect(result[0].content[0].providerOptions?.openai?.itemId).toBe("msg_123")
|
||||
expect(result[0].content[0].providerOptions?.openai?.otherOption).toBe("value")
|
||||
})
|
||||
|
||||
test("strips metadata for openai package even when store is true", () => {
|
||||
test("preserves metadata for openai package when store is true", () => {
|
||||
const msgs = [
|
||||
{
|
||||
role: "assistant",
|
||||
@@ -766,13 +766,13 @@ describe("ProviderTransform.message - strip openai metadata when store=false", (
|
||||
},
|
||||
] as any[]
|
||||
|
||||
// openai package always strips itemId regardless of store value
|
||||
// openai package preserves itemId regardless of store value
|
||||
const result = ProviderTransform.message(msgs, openaiModel, { store: true }) as any[]
|
||||
|
||||
expect(result[0].content[0].providerOptions?.openai?.itemId).toBeUndefined()
|
||||
expect(result[0].content[0].providerOptions?.openai?.itemId).toBe("msg_123")
|
||||
})
|
||||
|
||||
test("strips metadata for non-openai packages when store is false", () => {
|
||||
test("preserves metadata for non-openai packages when store is false", () => {
|
||||
const anthropicModel = {
|
||||
...openaiModel,
|
||||
providerID: "anthropic",
|
||||
@@ -799,10 +799,86 @@ describe("ProviderTransform.message - strip openai metadata when store=false", (
|
||||
},
|
||||
] as any[]
|
||||
|
||||
// store=false triggers stripping even for non-openai packages
|
||||
// store=false preserves metadata for non-openai packages
|
||||
const result = ProviderTransform.message(msgs, anthropicModel, { store: false }) as any[]
|
||||
|
||||
expect(result[0].content[0].providerOptions?.openai?.itemId).toBeUndefined()
|
||||
expect(result[0].content[0].providerOptions?.openai?.itemId).toBe("msg_123")
|
||||
})
|
||||
|
||||
test("preserves metadata using providerID key when store is false", () => {
|
||||
const opencodeModel = {
|
||||
...openaiModel,
|
||||
providerID: "opencode",
|
||||
api: {
|
||||
id: "opencode-test",
|
||||
url: "https://api.opencode.ai",
|
||||
npm: "@ai-sdk/openai-compatible",
|
||||
},
|
||||
}
|
||||
const msgs = [
|
||||
{
|
||||
role: "assistant",
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: "Hello",
|
||||
providerOptions: {
|
||||
opencode: {
|
||||
itemId: "msg_123",
|
||||
otherOption: "value",
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
] as any[]
|
||||
|
||||
const result = ProviderTransform.message(msgs, opencodeModel, { store: false }) as any[]
|
||||
|
||||
expect(result[0].content[0].providerOptions?.opencode?.itemId).toBe("msg_123")
|
||||
expect(result[0].content[0].providerOptions?.opencode?.otherOption).toBe("value")
|
||||
})
|
||||
|
||||
test("preserves itemId across all providerOptions keys", () => {
|
||||
const opencodeModel = {
|
||||
...openaiModel,
|
||||
providerID: "opencode",
|
||||
api: {
|
||||
id: "opencode-test",
|
||||
url: "https://api.opencode.ai",
|
||||
npm: "@ai-sdk/openai-compatible",
|
||||
},
|
||||
}
|
||||
const msgs = [
|
||||
{
|
||||
role: "assistant",
|
||||
providerOptions: {
|
||||
openai: { itemId: "msg_root" },
|
||||
opencode: { itemId: "msg_opencode" },
|
||||
extra: { itemId: "msg_extra" },
|
||||
},
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: "Hello",
|
||||
providerOptions: {
|
||||
openai: { itemId: "msg_openai_part" },
|
||||
opencode: { itemId: "msg_opencode_part" },
|
||||
extra: { itemId: "msg_extra_part" },
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
] as any[]
|
||||
|
||||
const result = ProviderTransform.message(msgs, opencodeModel, { store: false }) as any[]
|
||||
|
||||
expect(result[0].providerOptions?.openai?.itemId).toBe("msg_root")
|
||||
expect(result[0].providerOptions?.opencode?.itemId).toBe("msg_opencode")
|
||||
expect(result[0].providerOptions?.extra?.itemId).toBe("msg_extra")
|
||||
expect(result[0].content[0].providerOptions?.openai?.itemId).toBe("msg_openai_part")
|
||||
expect(result[0].content[0].providerOptions?.opencode?.itemId).toBe("msg_opencode_part")
|
||||
expect(result[0].content[0].providerOptions?.extra?.itemId).toBe("msg_extra_part")
|
||||
})
|
||||
|
||||
test("does not strip metadata for non-openai packages when store is not false", () => {
|
||||
@@ -838,6 +914,88 @@ describe("ProviderTransform.message - strip openai metadata when store=false", (
|
||||
})
|
||||
})
|
||||
|
||||
describe("ProviderTransform.message - providerOptions key remapping", () => {
|
||||
const createModel = (providerID: string, npm: string) =>
|
||||
({
|
||||
id: `${providerID}/test-model`,
|
||||
providerID,
|
||||
api: {
|
||||
id: "test-model",
|
||||
url: "https://api.test.com",
|
||||
npm,
|
||||
},
|
||||
name: "Test Model",
|
||||
capabilities: {
|
||||
temperature: true,
|
||||
reasoning: false,
|
||||
attachment: true,
|
||||
toolcall: true,
|
||||
input: { text: true, audio: false, image: true, video: false, pdf: true },
|
||||
output: { text: true, audio: false, image: false, video: false, pdf: false },
|
||||
interleaved: false,
|
||||
},
|
||||
cost: { input: 0.001, output: 0.002, cache: { read: 0.0001, write: 0.0002 } },
|
||||
limit: { context: 128000, output: 8192 },
|
||||
status: "active",
|
||||
options: {},
|
||||
headers: {},
|
||||
}) as any
|
||||
|
||||
test("azure keeps 'azure' key and does not remap to 'openai'", () => {
|
||||
const model = createModel("azure", "@ai-sdk/azure")
|
||||
const msgs = [
|
||||
{
|
||||
role: "user",
|
||||
content: "Hello",
|
||||
providerOptions: {
|
||||
azure: { someOption: "value" },
|
||||
},
|
||||
},
|
||||
] as any[]
|
||||
|
||||
const result = ProviderTransform.message(msgs, model, {})
|
||||
|
||||
expect(result[0].providerOptions?.azure).toEqual({ someOption: "value" })
|
||||
expect(result[0].providerOptions?.openai).toBeUndefined()
|
||||
})
|
||||
|
||||
test("openai with github-copilot npm remaps providerID to 'openai'", () => {
|
||||
const model = createModel("github-copilot", "@ai-sdk/github-copilot")
|
||||
const msgs = [
|
||||
{
|
||||
role: "user",
|
||||
content: "Hello",
|
||||
providerOptions: {
|
||||
"github-copilot": { someOption: "value" },
|
||||
},
|
||||
},
|
||||
] as any[]
|
||||
|
||||
const result = ProviderTransform.message(msgs, model, {})
|
||||
|
||||
expect(result[0].providerOptions?.openai).toEqual({ someOption: "value" })
|
||||
expect(result[0].providerOptions?.["github-copilot"]).toBeUndefined()
|
||||
})
|
||||
|
||||
test("bedrock remaps providerID to 'bedrock' key", () => {
|
||||
const model = createModel("my-bedrock", "@ai-sdk/amazon-bedrock")
|
||||
const msgs = [
|
||||
{
|
||||
role: "user",
|
||||
content: "Hello",
|
||||
providerOptions: {
|
||||
"my-bedrock": { someOption: "value" },
|
||||
},
|
||||
},
|
||||
] as any[]
|
||||
|
||||
const result = ProviderTransform.message(msgs, model, {})
|
||||
|
||||
expect(result[0].providerOptions?.bedrock).toEqual({ someOption: "value" })
|
||||
expect(result[0].providerOptions?.["my-bedrock"]).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
describe("ProviderTransform.variants", () => {
|
||||
const createMockModel = (overrides: Partial<any> = {}): any => ({
|
||||
id: "test/test-model",
|
||||
|
||||
@@ -264,6 +264,18 @@ describe("session.message-v2.toModelMessage", () => {
|
||||
role: "user",
|
||||
content: [{ type: "text", text: "run tool" }],
|
||||
},
|
||||
{
|
||||
role: "user",
|
||||
content: [
|
||||
{ type: "text", text: "The tool bash returned the following attachments:" },
|
||||
{
|
||||
type: "file",
|
||||
mediaType: "image/png",
|
||||
filename: "attachment.png",
|
||||
data: "https://example.com/attachment.png",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
role: "assistant",
|
||||
content: [
|
||||
@@ -285,21 +297,7 @@ describe("session.message-v2.toModelMessage", () => {
|
||||
type: "tool-result",
|
||||
toolCallId: "call-1",
|
||||
toolName: "bash",
|
||||
output: {
|
||||
type: "json",
|
||||
value: {
|
||||
output: "ok",
|
||||
attachments: [
|
||||
{
|
||||
...basePart(assistantID, "file-1"),
|
||||
type: "file",
|
||||
mime: "image/png",
|
||||
filename: "attachment.png",
|
||||
url: "https://example.com/attachment.png",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
output: { type: "text", value: "ok" },
|
||||
providerOptions: { openai: { tool: "meta" } },
|
||||
},
|
||||
],
|
||||
|
||||
515
packages/opencode/test/tool/apply_patch.test.ts
Normal file
515
packages/opencode/test/tool/apply_patch.test.ts
Normal file
@@ -0,0 +1,515 @@
|
||||
import { describe, expect, test } from "bun:test"
|
||||
import path from "path"
|
||||
import * as fs from "fs/promises"
|
||||
import { ApplyPatchTool } from "../../src/tool/apply_patch"
|
||||
import { Instance } from "../../src/project/instance"
|
||||
import { FileTime } from "../../src/file/time"
|
||||
import { tmpdir } from "../fixture/fixture"
|
||||
|
||||
const baseCtx = {
|
||||
sessionID: "test",
|
||||
messageID: "",
|
||||
callID: "",
|
||||
agent: "build",
|
||||
abort: AbortSignal.any([]),
|
||||
metadata: () => {},
|
||||
}
|
||||
|
||||
type AskInput = {
|
||||
permission: string
|
||||
patterns: string[]
|
||||
always: string[]
|
||||
metadata: { diff: string }
|
||||
}
|
||||
|
||||
type ToolCtx = typeof baseCtx & {
|
||||
ask: (input: AskInput) => Promise<void>
|
||||
}
|
||||
|
||||
const execute = async (params: { patchText: string }, ctx: ToolCtx) => {
|
||||
const tool = await ApplyPatchTool.init()
|
||||
return tool.execute(params, ctx)
|
||||
}
|
||||
|
||||
const makeCtx = () => {
|
||||
const calls: AskInput[] = []
|
||||
const ctx: ToolCtx = {
|
||||
...baseCtx,
|
||||
ask: async (input) => {
|
||||
calls.push(input)
|
||||
},
|
||||
}
|
||||
|
||||
return { ctx, calls }
|
||||
}
|
||||
|
||||
describe("tool.apply_patch freeform", () => {
|
||||
test("requires patchText", async () => {
|
||||
const { ctx } = makeCtx()
|
||||
await expect(execute({ patchText: "" }, ctx)).rejects.toThrow("patchText is required")
|
||||
})
|
||||
|
||||
test("rejects invalid patch format", async () => {
|
||||
const { ctx } = makeCtx()
|
||||
await expect(execute({ patchText: "invalid patch" }, ctx)).rejects.toThrow("apply_patch verification failed")
|
||||
})
|
||||
|
||||
test("rejects empty patch", async () => {
|
||||
const { ctx } = makeCtx()
|
||||
const emptyPatch = "*** Begin Patch\n*** End Patch"
|
||||
await expect(execute({ patchText: emptyPatch }, ctx)).rejects.toThrow("patch rejected: empty patch")
|
||||
})
|
||||
|
||||
test("applies add/update/delete in one patch", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx, calls } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const modifyPath = path.join(fixture.path, "modify.txt")
|
||||
const deletePath = path.join(fixture.path, "delete.txt")
|
||||
await fs.writeFile(modifyPath, "line1\nline2\n", "utf-8")
|
||||
await fs.writeFile(deletePath, "obsolete\n", "utf-8")
|
||||
FileTime.read(ctx.sessionID, modifyPath)
|
||||
FileTime.read(ctx.sessionID, deletePath)
|
||||
|
||||
const patchText =
|
||||
"*** Begin Patch\n*** Add File: nested/new.txt\n+created\n*** Delete File: delete.txt\n*** Update File: modify.txt\n@@\n-line2\n+changed\n*** End Patch"
|
||||
|
||||
const result = await execute({ patchText }, ctx)
|
||||
|
||||
expect(result.title).toContain("Success. Updated the following files")
|
||||
expect(result.output).toContain("Success. Updated the following files")
|
||||
expect(result.metadata.diff).toContain("Index:")
|
||||
expect(calls.length).toBe(1)
|
||||
|
||||
const added = await fs.readFile(path.join(fixture.path, "nested", "new.txt"), "utf-8")
|
||||
expect(added).toBe("created\n")
|
||||
expect(await fs.readFile(modifyPath, "utf-8")).toBe("line1\nchanged\n")
|
||||
await expect(fs.readFile(deletePath, "utf-8")).rejects.toThrow()
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("applies multiple hunks to one file", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const target = path.join(fixture.path, "multi.txt")
|
||||
await fs.writeFile(target, "line1\nline2\nline3\nline4\n", "utf-8")
|
||||
FileTime.read(ctx.sessionID, target)
|
||||
|
||||
const patchText =
|
||||
"*** Begin Patch\n*** Update File: multi.txt\n@@\n-line2\n+changed2\n@@\n-line4\n+changed4\n*** End Patch"
|
||||
|
||||
await execute({ patchText }, ctx)
|
||||
|
||||
expect(await fs.readFile(target, "utf-8")).toBe("line1\nchanged2\nline3\nchanged4\n")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("inserts lines with insert-only hunk", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const target = path.join(fixture.path, "insert_only.txt")
|
||||
await fs.writeFile(target, "alpha\nomega\n", "utf-8")
|
||||
FileTime.read(ctx.sessionID, target)
|
||||
|
||||
const patchText = "*** Begin Patch\n*** Update File: insert_only.txt\n@@\n alpha\n+beta\n omega\n*** End Patch"
|
||||
|
||||
await execute({ patchText }, ctx)
|
||||
|
||||
expect(await fs.readFile(target, "utf-8")).toBe("alpha\nbeta\nomega\n")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("appends trailing newline on update", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const target = path.join(fixture.path, "no_newline.txt")
|
||||
await fs.writeFile(target, "no newline at end", "utf-8")
|
||||
FileTime.read(ctx.sessionID, target)
|
||||
|
||||
const patchText =
|
||||
"*** Begin Patch\n*** Update File: no_newline.txt\n@@\n-no newline at end\n+first line\n+second line\n*** End Patch"
|
||||
|
||||
await execute({ patchText }, ctx)
|
||||
|
||||
const contents = await fs.readFile(target, "utf-8")
|
||||
expect(contents.endsWith("\n")).toBe(true)
|
||||
expect(contents).toBe("first line\nsecond line\n")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("moves file to a new directory", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const original = path.join(fixture.path, "old", "name.txt")
|
||||
await fs.mkdir(path.dirname(original), { recursive: true })
|
||||
await fs.writeFile(original, "old content\n", "utf-8")
|
||||
FileTime.read(ctx.sessionID, original)
|
||||
|
||||
const patchText =
|
||||
"*** Begin Patch\n*** Update File: old/name.txt\n*** Move to: renamed/dir/name.txt\n@@\n-old content\n+new content\n*** End Patch"
|
||||
|
||||
await execute({ patchText }, ctx)
|
||||
|
||||
const moved = path.join(fixture.path, "renamed", "dir", "name.txt")
|
||||
await expect(fs.readFile(original, "utf-8")).rejects.toThrow()
|
||||
expect(await fs.readFile(moved, "utf-8")).toBe("new content\n")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("moves file overwriting existing destination", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const original = path.join(fixture.path, "old", "name.txt")
|
||||
const destination = path.join(fixture.path, "renamed", "dir", "name.txt")
|
||||
await fs.mkdir(path.dirname(original), { recursive: true })
|
||||
await fs.mkdir(path.dirname(destination), { recursive: true })
|
||||
await fs.writeFile(original, "from\n", "utf-8")
|
||||
await fs.writeFile(destination, "existing\n", "utf-8")
|
||||
FileTime.read(ctx.sessionID, original)
|
||||
|
||||
const patchText =
|
||||
"*** Begin Patch\n*** Update File: old/name.txt\n*** Move to: renamed/dir/name.txt\n@@\n-from\n+new\n*** End Patch"
|
||||
|
||||
await execute({ patchText }, ctx)
|
||||
|
||||
await expect(fs.readFile(original, "utf-8")).rejects.toThrow()
|
||||
expect(await fs.readFile(destination, "utf-8")).toBe("new\n")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("adds file overwriting existing file", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const target = path.join(fixture.path, "duplicate.txt")
|
||||
await fs.writeFile(target, "old content\n", "utf-8")
|
||||
|
||||
const patchText = "*** Begin Patch\n*** Add File: duplicate.txt\n+new content\n*** End Patch"
|
||||
|
||||
await execute({ patchText }, ctx)
|
||||
expect(await fs.readFile(target, "utf-8")).toBe("new content\n")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("rejects update when target file is missing", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const patchText = "*** Begin Patch\n*** Update File: missing.txt\n@@\n-nope\n+better\n*** End Patch"
|
||||
|
||||
await expect(execute({ patchText }, ctx)).rejects.toThrow(
|
||||
"apply_patch verification failed: Failed to read file to update",
|
||||
)
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("rejects delete when file is missing", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const patchText = "*** Begin Patch\n*** Delete File: missing.txt\n*** End Patch"
|
||||
|
||||
await expect(execute({ patchText }, ctx)).rejects.toThrow()
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("rejects delete when target is a directory", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const dirPath = path.join(fixture.path, "dir")
|
||||
await fs.mkdir(dirPath)
|
||||
|
||||
const patchText = "*** Begin Patch\n*** Delete File: dir\n*** End Patch"
|
||||
|
||||
await expect(execute({ patchText }, ctx)).rejects.toThrow()
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("rejects invalid hunk header", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const patchText = "*** Begin Patch\n*** Frobnicate File: foo\n*** End Patch"
|
||||
|
||||
await expect(execute({ patchText }, ctx)).rejects.toThrow("apply_patch verification failed")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("rejects update with missing context", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const target = path.join(fixture.path, "modify.txt")
|
||||
await fs.writeFile(target, "line1\nline2\n", "utf-8")
|
||||
FileTime.read(ctx.sessionID, target)
|
||||
|
||||
const patchText = "*** Begin Patch\n*** Update File: modify.txt\n@@\n-missing\n+changed\n*** End Patch"
|
||||
|
||||
await expect(execute({ patchText }, ctx)).rejects.toThrow("apply_patch verification failed")
|
||||
expect(await fs.readFile(target, "utf-8")).toBe("line1\nline2\n")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("verification failure leaves no side effects", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const patchText =
|
||||
"*** Begin Patch\n*** Add File: created.txt\n+hello\n*** Update File: missing.txt\n@@\n-old\n+new\n*** End Patch"
|
||||
|
||||
await expect(execute({ patchText }, ctx)).rejects.toThrow()
|
||||
|
||||
const createdPath = path.join(fixture.path, "created.txt")
|
||||
await expect(fs.readFile(createdPath, "utf-8")).rejects.toThrow()
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("supports end of file anchor", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const target = path.join(fixture.path, "tail.txt")
|
||||
await fs.writeFile(target, "alpha\nlast\n", "utf-8")
|
||||
FileTime.read(ctx.sessionID, target)
|
||||
|
||||
const patchText = "*** Begin Patch\n*** Update File: tail.txt\n@@\n-last\n+end\n*** End of File\n*** End Patch"
|
||||
|
||||
await execute({ patchText }, ctx)
|
||||
expect(await fs.readFile(target, "utf-8")).toBe("alpha\nend\n")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("rejects missing second chunk context", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const target = path.join(fixture.path, "two_chunks.txt")
|
||||
await fs.writeFile(target, "a\nb\nc\nd\n", "utf-8")
|
||||
FileTime.read(ctx.sessionID, target)
|
||||
|
||||
const patchText = "*** Begin Patch\n*** Update File: two_chunks.txt\n@@\n-b\n+B\n\n-d\n+D\n*** End Patch"
|
||||
|
||||
await expect(execute({ patchText }, ctx)).rejects.toThrow()
|
||||
expect(await fs.readFile(target, "utf-8")).toBe("a\nb\nc\nd\n")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("disambiguates change context with @@ header", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const target = path.join(fixture.path, "multi_ctx.txt")
|
||||
await fs.writeFile(target, "fn a\nx=10\ny=2\nfn b\nx=10\ny=20\n", "utf-8")
|
||||
FileTime.read(ctx.sessionID, target)
|
||||
|
||||
const patchText = "*** Begin Patch\n*** Update File: multi_ctx.txt\n@@ fn b\n-x=10\n+x=11\n*** End Patch"
|
||||
|
||||
await execute({ patchText }, ctx)
|
||||
expect(await fs.readFile(target, "utf-8")).toBe("fn a\nx=10\ny=2\nfn b\nx=11\ny=20\n")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("EOF anchor matches from end of file first", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const target = path.join(fixture.path, "eof_anchor.txt")
|
||||
// File has duplicate "marker" lines - one in middle, one at end
|
||||
await fs.writeFile(target, "start\nmarker\nmiddle\nmarker\nend\n", "utf-8")
|
||||
FileTime.read(ctx.sessionID, target)
|
||||
|
||||
// With EOF anchor, should match the LAST "marker" line, not the first
|
||||
const patchText =
|
||||
"*** Begin Patch\n*** Update File: eof_anchor.txt\n@@\n-marker\n-end\n+marker-changed\n+end\n*** End of File\n*** End Patch"
|
||||
|
||||
await execute({ patchText }, ctx)
|
||||
// First marker unchanged, second marker changed
|
||||
expect(await fs.readFile(target, "utf-8")).toBe("start\nmarker\nmiddle\nmarker-changed\nend\n")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("parses heredoc-wrapped patch", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const patchText = `cat <<'EOF'
|
||||
*** Begin Patch
|
||||
*** Add File: heredoc_test.txt
|
||||
+heredoc content
|
||||
*** End Patch
|
||||
EOF`
|
||||
|
||||
await execute({ patchText }, ctx)
|
||||
const content = await fs.readFile(path.join(fixture.path, "heredoc_test.txt"), "utf-8")
|
||||
expect(content).toBe("heredoc content\n")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("parses heredoc-wrapped patch without cat", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const patchText = `<<EOF
|
||||
*** Begin Patch
|
||||
*** Add File: heredoc_no_cat.txt
|
||||
+no cat prefix
|
||||
*** End Patch
|
||||
EOF`
|
||||
|
||||
await execute({ patchText }, ctx)
|
||||
const content = await fs.readFile(path.join(fixture.path, "heredoc_no_cat.txt"), "utf-8")
|
||||
expect(content).toBe("no cat prefix\n")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("matches with trailing whitespace differences", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const target = path.join(fixture.path, "trailing_ws.txt")
|
||||
// File has trailing spaces on some lines
|
||||
await fs.writeFile(target, "line1 \nline2\nline3 \n", "utf-8")
|
||||
FileTime.read(ctx.sessionID, target)
|
||||
|
||||
// Patch doesn't have trailing spaces - should still match via rstrip pass
|
||||
const patchText = "*** Begin Patch\n*** Update File: trailing_ws.txt\n@@\n-line2\n+changed\n*** End Patch"
|
||||
|
||||
await execute({ patchText }, ctx)
|
||||
expect(await fs.readFile(target, "utf-8")).toBe("line1 \nchanged\nline3 \n")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("matches with leading whitespace differences", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const target = path.join(fixture.path, "leading_ws.txt")
|
||||
// File has leading spaces
|
||||
await fs.writeFile(target, " line1\nline2\n line3\n", "utf-8")
|
||||
FileTime.read(ctx.sessionID, target)
|
||||
|
||||
// Patch without leading spaces - should match via trim pass
|
||||
const patchText = "*** Begin Patch\n*** Update File: leading_ws.txt\n@@\n-line2\n+changed\n*** End Patch"
|
||||
|
||||
await execute({ patchText }, ctx)
|
||||
expect(await fs.readFile(target, "utf-8")).toBe(" line1\nchanged\n line3\n")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("matches with Unicode punctuation differences", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
const { ctx } = makeCtx()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const target = path.join(fixture.path, "unicode.txt")
|
||||
// File has fancy Unicode quotes (U+201C, U+201D) and em-dash (U+2014)
|
||||
const leftQuote = "\u201C"
|
||||
const rightQuote = "\u201D"
|
||||
const emDash = "\u2014"
|
||||
await fs.writeFile(target, `He said ${leftQuote}hello${rightQuote}\nsome${emDash}dash\nend\n`, "utf-8")
|
||||
FileTime.read(ctx.sessionID, target)
|
||||
|
||||
// Patch uses ASCII equivalents - should match via normalized pass
|
||||
// The replacement uses ASCII quotes from the patch (not preserving Unicode)
|
||||
const patchText =
|
||||
'*** Begin Patch\n*** Update File: unicode.txt\n@@\n-He said "hello"\n+He said "hi"\n*** End Patch'
|
||||
|
||||
await execute({ patchText }, ctx)
|
||||
// Result has ASCII quotes because that's what the patch specifies
|
||||
expect(await fs.readFile(target, "utf-8")).toBe(`He said "hi"\nsome${emDash}dash\nend\n`)
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,261 +0,0 @@
|
||||
import { describe, expect, test } from "bun:test"
|
||||
import path from "path"
|
||||
import { PatchTool } from "../../src/tool/patch"
|
||||
import { Instance } from "../../src/project/instance"
|
||||
import { tmpdir } from "../fixture/fixture"
|
||||
import { PermissionNext } from "../../src/permission/next"
|
||||
import * as fs from "fs/promises"
|
||||
|
||||
const ctx = {
|
||||
sessionID: "test",
|
||||
messageID: "",
|
||||
callID: "",
|
||||
agent: "build",
|
||||
abort: AbortSignal.any([]),
|
||||
metadata: () => {},
|
||||
ask: async () => {},
|
||||
}
|
||||
|
||||
const patchTool = await PatchTool.init()
|
||||
|
||||
describe("tool.patch", () => {
|
||||
test("should validate required parameters", async () => {
|
||||
await Instance.provide({
|
||||
directory: "/tmp",
|
||||
fn: async () => {
|
||||
expect(patchTool.execute({ patchText: "" }, ctx)).rejects.toThrow("patchText is required")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("should validate patch format", async () => {
|
||||
await Instance.provide({
|
||||
directory: "/tmp",
|
||||
fn: async () => {
|
||||
expect(patchTool.execute({ patchText: "invalid patch" }, ctx)).rejects.toThrow("Failed to parse patch")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("should handle empty patch", async () => {
|
||||
await Instance.provide({
|
||||
directory: "/tmp",
|
||||
fn: async () => {
|
||||
const emptyPatch = `*** Begin Patch
|
||||
*** End Patch`
|
||||
|
||||
expect(patchTool.execute({ patchText: emptyPatch }, ctx)).rejects.toThrow("No file changes found in patch")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test.skip("should ask permission for files outside working directory", async () => {
|
||||
await Instance.provide({
|
||||
directory: "/tmp",
|
||||
fn: async () => {
|
||||
const maliciousPatch = `*** Begin Patch
|
||||
*** Add File: /etc/passwd
|
||||
+malicious content
|
||||
*** End Patch`
|
||||
patchTool.execute({ patchText: maliciousPatch }, ctx)
|
||||
// TODO: this sucks
|
||||
await new Promise((resolve) => setTimeout(resolve, 1000))
|
||||
const pending = await PermissionNext.list()
|
||||
expect(pending.find((p) => p.sessionID === ctx.sessionID)).toBeDefined()
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("should handle simple add file operation", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const patchText = `*** Begin Patch
|
||||
*** Add File: test-file.txt
|
||||
+Hello World
|
||||
+This is a test file
|
||||
*** End Patch`
|
||||
|
||||
const result = await patchTool.execute({ patchText }, ctx)
|
||||
|
||||
expect(result.title).toContain("files changed")
|
||||
expect(result.metadata.diff).toBeDefined()
|
||||
expect(result.output).toContain("Patch applied successfully")
|
||||
|
||||
// Verify file was created
|
||||
const filePath = path.join(fixture.path, "test-file.txt")
|
||||
const content = await fs.readFile(filePath, "utf-8")
|
||||
expect(content).toBe("Hello World\nThis is a test file")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("should handle file with context update", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const patchText = `*** Begin Patch
|
||||
*** Add File: config.js
|
||||
+const API_KEY = "test-key"
|
||||
+const DEBUG = false
|
||||
+const VERSION = "1.0"
|
||||
*** End Patch`
|
||||
|
||||
const result = await patchTool.execute({ patchText }, ctx)
|
||||
|
||||
expect(result.title).toContain("files changed")
|
||||
expect(result.metadata.diff).toBeDefined()
|
||||
expect(result.output).toContain("Patch applied successfully")
|
||||
|
||||
// Verify file was created with correct content
|
||||
const filePath = path.join(fixture.path, "config.js")
|
||||
const content = await fs.readFile(filePath, "utf-8")
|
||||
expect(content).toBe('const API_KEY = "test-key"\nconst DEBUG = false\nconst VERSION = "1.0"')
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("should handle multiple file operations", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const patchText = `*** Begin Patch
|
||||
*** Add File: file1.txt
|
||||
+Content of file 1
|
||||
*** Add File: file2.txt
|
||||
+Content of file 2
|
||||
*** Add File: file3.txt
|
||||
+Content of file 3
|
||||
*** End Patch`
|
||||
|
||||
const result = await patchTool.execute({ patchText }, ctx)
|
||||
|
||||
expect(result.title).toContain("3 files changed")
|
||||
expect(result.metadata.diff).toBeDefined()
|
||||
expect(result.output).toContain("Patch applied successfully")
|
||||
|
||||
// Verify all files were created
|
||||
for (let i = 1; i <= 3; i++) {
|
||||
const filePath = path.join(fixture.path, `file${i}.txt`)
|
||||
const content = await fs.readFile(filePath, "utf-8")
|
||||
expect(content).toBe(`Content of file ${i}`)
|
||||
}
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("should create parent directories when adding nested files", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const patchText = `*** Begin Patch
|
||||
*** Add File: deep/nested/file.txt
|
||||
+Deep nested content
|
||||
*** End Patch`
|
||||
|
||||
const result = await patchTool.execute({ patchText }, ctx)
|
||||
|
||||
expect(result.title).toContain("files changed")
|
||||
expect(result.output).toContain("Patch applied successfully")
|
||||
|
||||
// Verify nested file was created
|
||||
const nestedPath = path.join(fixture.path, "deep", "nested", "file.txt")
|
||||
const exists = await fs
|
||||
.access(nestedPath)
|
||||
.then(() => true)
|
||||
.catch(() => false)
|
||||
expect(exists).toBe(true)
|
||||
|
||||
const content = await fs.readFile(nestedPath, "utf-8")
|
||||
expect(content).toBe("Deep nested content")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("should generate proper unified diff in metadata", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
// First create a file with simple content
|
||||
const patchText1 = `*** Begin Patch
|
||||
*** Add File: test.txt
|
||||
+line 1
|
||||
+line 2
|
||||
+line 3
|
||||
*** End Patch`
|
||||
|
||||
await patchTool.execute({ patchText: patchText1 }, ctx)
|
||||
|
||||
// Now create an update patch
|
||||
const patchText2 = `*** Begin Patch
|
||||
*** Update File: test.txt
|
||||
@@
|
||||
line 1
|
||||
-line 2
|
||||
+line 2 updated
|
||||
line 3
|
||||
*** End Patch`
|
||||
|
||||
const result = await patchTool.execute({ patchText: patchText2 }, ctx)
|
||||
|
||||
expect(result.metadata.diff).toBeDefined()
|
||||
expect(result.metadata.diff).toContain("@@")
|
||||
expect(result.metadata.diff).toContain("-line 2")
|
||||
expect(result.metadata.diff).toContain("+line 2 updated")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("should handle complex patch with multiple operations", async () => {
|
||||
await using fixture = await tmpdir()
|
||||
|
||||
await Instance.provide({
|
||||
directory: fixture.path,
|
||||
fn: async () => {
|
||||
const patchText = `*** Begin Patch
|
||||
*** Add File: new.txt
|
||||
+This is a new file
|
||||
+with multiple lines
|
||||
*** Add File: existing.txt
|
||||
+old content
|
||||
+new line
|
||||
+more content
|
||||
*** Add File: config.json
|
||||
+{
|
||||
+ "version": "1.0",
|
||||
+ "debug": true
|
||||
+}
|
||||
*** End Patch`
|
||||
|
||||
const result = await patchTool.execute({ patchText }, ctx)
|
||||
|
||||
expect(result.title).toContain("3 files changed")
|
||||
expect(result.metadata.diff).toBeDefined()
|
||||
expect(result.output).toContain("Patch applied successfully")
|
||||
|
||||
// Verify all files were created
|
||||
const newPath = path.join(fixture.path, "new.txt")
|
||||
const newContent = await fs.readFile(newPath, "utf-8")
|
||||
expect(newContent).toBe("This is a new file\nwith multiple lines")
|
||||
|
||||
const existingPath = path.join(fixture.path, "existing.txt")
|
||||
const existingContent = await fs.readFile(existingPath, "utf-8")
|
||||
expect(existingContent).toBe("old content\nnew line\nmore content")
|
||||
|
||||
const configPath = path.join(fixture.path, "config.json")
|
||||
const configContent = await fs.readFile(configPath, "utf-8")
|
||||
expect(configContent).toBe('{\n "version": "1.0",\n "debug": true\n}')
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/package.json",
|
||||
"name": "@opencode-ai/plugin",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/package.json",
|
||||
"name": "@opencode-ai/sdk",
|
||||
"version": "1.1.23",
|
||||
"version": "1.1.25",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
@@ -20,7 +20,7 @@
|
||||
"dist"
|
||||
],
|
||||
"devDependencies": {
|
||||
"@hey-api/openapi-ts": "0.88.1",
|
||||
"@hey-api/openapi-ts": "0.90.4",
|
||||
"@tsconfig/node22": "catalog:",
|
||||
"@types/node": "catalog:",
|
||||
"typescript": "catalog:",
|
||||
|
||||
@@ -162,10 +162,16 @@ export const createClient = (config: Config = {}): Client => {
|
||||
case "arrayBuffer":
|
||||
case "blob":
|
||||
case "formData":
|
||||
case "json":
|
||||
case "text":
|
||||
data = await response[parseAs]()
|
||||
break
|
||||
case "json": {
|
||||
// Some servers return 200 with no Content-Length and empty body.
|
||||
// response.json() would throw; read as text and parse if non-empty.
|
||||
const text = await response.text()
|
||||
data = text ? JSON.parse(text) : {}
|
||||
break
|
||||
}
|
||||
case "stream":
|
||||
return opts.responseStyle === "data"
|
||||
? response.body
|
||||
@@ -244,6 +250,7 @@ export const createClient = (config: Config = {}): Client => {
|
||||
}
|
||||
return request
|
||||
},
|
||||
serializedBody: getValidRequestBody(opts) as BodyInit | null | undefined,
|
||||
url,
|
||||
})
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user