Compare commits

..

15 Commits

Author SHA1 Message Date
Kujtim Hoxha
16103e013c update aur 2025-05-14 23:19:06 +02:00
Kujtim Hoxha
44bf46209e update aur 2025-05-14 23:11:56 +02:00
Kujtim Hoxha
b04faf0a3a update readme 2025-05-14 22:59:28 +02:00
Kujtim Hoxha
90084ce43d Context Window Warning (#152)
* context window warning & compact command

* auto compact

* fix permissions

* update readme

* fix 3.5 context window

* small update

* remove unused interface

* remove unused msg
2025-05-09 19:30:57 +02:00
Nicholas Hamilton
9345830c8a Fix filepicker manual input (#146)
* fix: allows to type i while manual inputting filepath

* fix: file selection in filepicker focus mode

* remove duplicate code
2025-05-09 16:40:06 +02:00
Kujtim Hoxha
5307100f89 small fix 2025-05-09 16:37:02 +02:00
Ed Zynda
a58e607c5f feat: custom commands (#133)
* Implement custom commands

* Add User: prefix

* Reuse var

* Check if the agent is busy and if so report a warning

* Update README

* fix typo

* Implement user and project scoped custom commands

* Allow for $ARGUMENTS

* UI tweaks

* Update internal/tui/components/dialog/arguments.go

Co-authored-by: Kujtim Hoxha <kujtimii.h@gmail.com>

* Also search in $HOME/.opencode/commands

---------

Co-authored-by: Kujtim Hoxha <kujtimii.h@gmail.com>
2025-05-09 16:33:35 +02:00
mineo
cd04c44517 replace github.com/google/generative-ai-go with github.com/googleapis/go-genai (#138)
* replace to github.com/googleapis/go-genai

* fix history logic

* small fixes

---------

Co-authored-by: Kujtim Hoxha <kujtimii.h@gmail.com>
2025-05-09 14:15:38 +02:00
Joshua LaMorey-Salzmann
88711db796 Config fix correcting loose viper string check, default model now set correctly (#147) 2025-05-05 09:40:58 +02:00
phantomreactor
9fec8df7d0 add support for images (#144) 2025-05-02 22:23:58 +02:00
Kujtim Hoxha
603a3e3c71 add xai support (#135) 2025-05-01 14:17:33 +02:00
Aiden Cline
e14de7a211 fix: tweak the logic in config to ensure that env vs file configurations merge properly (#115) 2025-05-01 13:22:48 +02:00
Garrett Ladley
004cfe7e8e feat: test for getContextFromPaths (#105)
* feat: test for getContextFromPaths

* fix: use testify
2025-05-01 12:55:28 +02:00
Adam
58705a1352 fix: more intuitive keybinds (#121) 2025-05-01 12:51:07 +02:00
Adam
82de14371d feat: themes (#113)
* feat: themes

* feat: flexoki theme

* feat: onedark theme

* feat: monokai pro theme

* feat: opencode theme (default)

* feat: dracula theme

* feat: tokyonight theme

* feat: tron theme

* some small fixes

---------

Co-authored-by: Kujtim Hoxha <kujtimii.h@gmail.com>
2025-05-01 12:49:26 +02:00
3080 changed files with 41425 additions and 340391 deletions

View File

@@ -1,9 +0,0 @@
root = true
[*]
charset = utf-8
insert_final_newline = true
end_of_line = lf
indent_style = space
indent_size = 2
max_line_length = 80

4
.github/CODEOWNERS vendored
View File

@@ -1,4 +0,0 @@
# web + desktop packages
packages/app/ @adamdotdevin
packages/tauri/ @adamdotdevin
packages/desktop/ @adamdotdevin

View File

@@ -1,67 +0,0 @@
name: Bug report
description: Report an issue that should be fixed
labels: ["bug"]
body:
- type: textarea
id: description
attributes:
label: Description
description: Describe the bug you encountered
placeholder: What happened?
validations:
required: true
- type: input
id: plugins
attributes:
label: Plugins
description: What plugins are you using?
validations:
required: false
- type: input
id: opencode-version
attributes:
label: OpenCode version
description: What version of OpenCode are you using?
validations:
required: false
- type: textarea
id: reproduce
attributes:
label: Steps to reproduce
description: How can we reproduce this issue?
placeholder: |
1.
2.
3.
validations:
required: false
- type: textarea
id: screenshot-or-link
attributes:
label: Screenshot and/or share link
description: Run `/share` to get a share link, or attach a screenshot
placeholder: Paste link or drag and drop screenshot here
validations:
required: false
- type: input
id: os
attributes:
label: Operating System
description: what OS are you using?
placeholder: e.g., macOS 26.0.1, Ubuntu 22.04, Windows 11
validations:
required: false
- type: input
id: terminal
attributes:
label: Terminal
description: what terminal are you using?
placeholder: e.g., iTerm2, Ghostty, Alacritty, Windows Terminal
validations:
required: false

View File

@@ -1,5 +0,0 @@
blank_issues_enabled: true
contact_links:
- name: 💬 Discord Community
url: https://discord.gg/opencode
about: For quick questions or real-time discussion. Note that issues are searchable and help others with the same question.

View File

@@ -1,20 +0,0 @@
name: 🚀 Feature Request
description: Suggest an idea, feature, or enhancement
labels: [discussion]
title: "[FEATURE]:"
body:
- type: checkboxes
id: verified
attributes:
label: Feature hasn't been suggested before.
options:
- label: I have verified this feature I'm about to request hasn't been suggested before.
required: true
- type: textarea
attributes:
label: Describe the enhancement you want to request
description: What do you want to change or add? What are the benefits of implementing this? Try to be detailed so we can understand your request better :)
validations:
required: true

View File

@@ -1,11 +0,0 @@
name: Question
description: Ask a question
labels: ["question"]
body:
- type: textarea
id: question
attributes:
label: Question
description: What's your question?
validations:
required: true

View File

@@ -1,22 +0,0 @@
name: "Setup Bun"
description: "Setup Bun with caching and install dependencies"
runs:
using: "composite"
steps:
- name: Setup Bun
uses: oven-sh/setup-bun@v2
with:
bun-version-file: package.json
- name: Cache ~/.bun
id: cache-bun
uses: actions/cache@v4
with:
path: ~/.bun
key: ${{ runner.os }}-bun-${{ hashFiles('package.json') }}-${{ hashFiles('bun.lockb', 'bun.lock') }}
restore-keys: |
${{ runner.os }}-bun-${{ hashFiles('package.json') }}-
- name: Install dependencies
run: bun install
shell: bash

View File

@@ -1,71 +0,0 @@
#
# This file is intentionally in the wrong dir, will move and add later....
#
# name: publish-python-sdk
# on:
# release:
# types: [published]
# workflow_dispatch:
# jobs:
# publish:
# runs-on: ubuntu-latest
# permissions:
# contents: read
# steps:
# - name: Checkout repository
# uses: actions/checkout@v4
# - name: Setup Bun
# uses: oven-sh/setup-bun@v1
# with:
# bun-version: 1.2.21
# - name: Install dependencies (JS/Bun)
# run: bun install
# - name: Install uv
# shell: bash
# run: curl -LsSf https://astral.sh/uv/install.sh | sh
# - name: Generate Python SDK from OpenAPI (CLI)
# shell: bash
# run: |
# ~/.local/bin/uv run --project packages/sdk/python python packages/sdk/python/scripts/generate.py --source cli
# - name: Sync Python dependencies
# shell: bash
# run: |
# ~/.local/bin/uv sync --dev --project packages/sdk/python
# - name: Set version from release tag
# shell: bash
# run: |
# TAG="${GITHUB_REF_NAME:-}"
# if [ -z "$TAG" ]; then
# TAG="$(git describe --tags --abbrev=0 || echo 0.0.0)"
# fi
# echo "Using version: $TAG"
# VERSION="$TAG" ~/.local/bin/uv run --project packages/sdk/python python - <<'PY'
# import os, re, pathlib
# root = pathlib.Path('packages/sdk/python')
# pt = (root / 'pyproject.toml').read_text()
# version = os.environ.get('VERSION','0.0.0').lstrip('v')
# pt = re.sub(r'(?m)^(version\s*=\s*")[^"]+("\s*)$', f"\\1{version}\\2", pt)
# (root / 'pyproject.toml').write_text(pt)
# # Also update generator config override for consistency
# cfgp = root / 'openapi-python-client.yaml'
# if cfgp.exists():
# cfg = cfgp.read_text()
# cfg = re.sub(r'(?m)^(package_version_override:\s*)\S+$', f"\\1{version}", cfg)
# cfgp.write_text(cfg)
# PY
# - name: Build and publish to PyPI
# env:
# PYPI_TOKEN: ${{ secrets.PYPI_API_TOKEN }}
# shell: bash
# run: |
# ~/.local/bin/uv run --project packages/sdk/python python packages/sdk/python/scripts/publish.py

View File

@@ -1,3 +0,0 @@
### What does this PR do?
### How did you verify your code works?

37
.github/workflows/build.yml vendored Normal file
View File

@@ -0,0 +1,37 @@
name: build
on:
workflow_dispatch:
push:
branches:
- main
concurrency: ${{ github.workflow }}-${{ github.ref }}
permissions:
contents: write
packages: write
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- run: git fetch --force --tags
- uses: actions/setup-go@v5
with:
go-version: ">=1.23.2"
cache: true
cache-dependency-path: go.sum
- run: go mod download
- uses: goreleaser/goreleaser-action@v6
with:
distribution: goreleaser
version: latest
args: build --snapshot --clean

View File

@@ -1,166 +0,0 @@
name: Daily Issues Recap
on:
schedule:
# Run at 6 PM EST (23:00 UTC, or 22:00 UTC during daylight saving)
- cron: "0 23 * * *"
workflow_dispatch: # Allow manual trigger for testing
jobs:
daily-recap:
runs-on: blacksmith-4vcpu-ubuntu-2404
permissions:
contents: read
issues: read
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 1
- uses: ./.github/actions/setup-bun
- name: Install opencode
run: curl -fsSL https://opencode.ai/install | bash
- name: Generate daily issues recap
id: recap
env:
OPENCODE_API_KEY: ${{ secrets.OPENCODE_API_KEY }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
OPENCODE_PERMISSION: |
{
"bash": {
"*": "deny",
"gh issue*": "allow",
"gh search*": "allow"
},
"webfetch": "deny",
"edit": "deny",
"write": "deny"
}
run: |
# Get today's date range
TODAY=$(date -u +%Y-%m-%d)
opencode run -m opencode/claude-sonnet-4-5 "Generate a daily issues recap for the OpenCode repository.
TODAY'S DATE: ${TODAY}
STEP 1: Gather today's issues
Search for all issues created today (${TODAY}) using:
gh issue list --repo ${{ github.repository }} --state all --search \"created:${TODAY}\" --json number,title,body,labels,state,comments,createdAt,author --limit 500
STEP 2: Analyze and categorize
For each issue created today, categorize it:
**Severity Assessment:**
- CRITICAL: Crashes, data loss, security issues, blocks major functionality
- HIGH: Significant bugs affecting many users, important features broken
- MEDIUM: Bugs with workarounds, minor features broken
- LOW: Minor issues, cosmetic, nice-to-haves
**Activity Assessment:**
- Note issues with high comment counts or engagement
- Note issues from repeat reporters (check if author has filed before)
STEP 3: Cross-reference with existing issues
For issues that seem like feature requests or recurring bugs:
- Search for similar older issues to identify patterns
- Note if this is a frequently requested feature
- Identify any issues that are duplicates of long-standing requests
STEP 4: Generate the recap
Create a structured recap with these sections:
===DISCORD_START===
**Daily Issues Recap - ${TODAY}**
**Summary Stats**
- Total issues opened today: [count]
- By category: [bugs/features/questions]
**Critical/High Priority Issues**
[List any CRITICAL or HIGH severity issues with brief descriptions and issue numbers]
**Most Active/Discussed**
[Issues with significant engagement or from active community members]
**Trending Topics**
[Patterns noticed - e.g., 'Multiple reports about X', 'Continued interest in Y feature']
**Duplicates & Related**
[Issues that relate to existing open issues]
===DISCORD_END===
STEP 5: Format for Discord
Format the recap as a Discord-compatible message:
- Use Discord markdown (**, __, etc.)
- BE EXTREMELY CONCISE - this is an EOD summary, not a detailed report
- Use hyperlinked issue numbers with suppressed embeds: [#1234](<https://github.com/${{ github.repository }}/issues/1234>)
- Group related issues on single lines where possible
- Add emoji sparingly for critical items only
- HARD LIMIT: Keep under 1800 characters total
- Skip sections that have nothing notable (e.g., if no critical issues, omit that section)
- Prioritize signal over completeness - only surface what matters
OUTPUT: Output ONLY the content between ===DISCORD_START=== and ===DISCORD_END=== markers. Include the markers so I can extract it." > /tmp/recap_raw.txt
# Extract only the Discord message between markers
sed -n '/===DISCORD_START===/,/===DISCORD_END===/p' /tmp/recap_raw.txt | grep -v '===DISCORD' > /tmp/recap.txt
echo "recap_file=/tmp/recap.txt" >> $GITHUB_OUTPUT
- name: Post to Discord
env:
DISCORD_WEBHOOK_URL: ${{ secrets.DISCORD_ISSUES_WEBHOOK_URL }}
run: |
if [ -z "$DISCORD_WEBHOOK_URL" ]; then
echo "Warning: DISCORD_ISSUES_WEBHOOK_URL secret not set, skipping Discord post"
cat /tmp/recap.txt
exit 0
fi
# Read the recap
RECAP_RAW=$(cat /tmp/recap.txt)
RECAP_LENGTH=${#RECAP_RAW}
echo "Recap length: ${RECAP_LENGTH} chars"
# Function to post a message to Discord
post_to_discord() {
local msg="$1"
local content=$(echo "$msg" | jq -Rs '.')
curl -s -H "Content-Type: application/json" \
-X POST \
-d "{\"content\": ${content}}" \
"$DISCORD_WEBHOOK_URL"
sleep 1
}
# If under limit, send as single message
if [ "$RECAP_LENGTH" -le 1950 ]; then
post_to_discord "$RECAP_RAW"
else
echo "Splitting into multiple messages..."
remaining="$RECAP_RAW"
while [ ${#remaining} -gt 0 ]; do
if [ ${#remaining} -le 1950 ]; then
post_to_discord "$remaining"
break
else
chunk="${remaining:0:1900}"
last_newline=$(echo "$chunk" | grep -bo $'\n' | tail -1 | cut -d: -f1)
if [ -n "$last_newline" ] && [ "$last_newline" -gt 500 ]; then
chunk="${remaining:0:$last_newline}"
remaining="${remaining:$((last_newline+1))}"
else
chunk="${remaining:0:1900}"
remaining="${remaining:1900}"
fi
post_to_discord "$chunk"
fi
done
fi
echo "Posted daily recap to Discord"

View File

@@ -1,169 +0,0 @@
name: Daily PR Recap
on:
schedule:
# Run at 5pm EST (22:00 UTC, or 21:00 UTC during daylight saving)
- cron: "0 22 * * *"
workflow_dispatch: # Allow manual trigger for testing
jobs:
pr-recap:
runs-on: blacksmith-4vcpu-ubuntu-2404
permissions:
contents: read
pull-requests: read
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 1
- uses: ./.github/actions/setup-bun
- name: Install opencode
run: curl -fsSL https://opencode.ai/install | bash
- name: Generate daily PR recap
id: recap
env:
OPENCODE_API_KEY: ${{ secrets.OPENCODE_API_KEY }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
OPENCODE_PERMISSION: |
{
"bash": {
"*": "deny",
"gh pr*": "allow",
"gh search*": "allow"
},
"webfetch": "deny",
"edit": "deny",
"write": "deny"
}
run: |
TODAY=$(date -u +%Y-%m-%d)
opencode run -m opencode/claude-sonnet-4-5 "Generate a daily PR activity recap for the OpenCode repository.
TODAY'S DATE: ${TODAY}
STEP 1: Gather PR data
Run these commands to gather PR information. ONLY include PRs created or updated TODAY (${TODAY}):
# PRs created today
gh pr list --repo ${{ github.repository }} --state all --search \"created:${TODAY}\" --json number,title,author,labels,createdAt,updatedAt,reviewDecision,isDraft,additions,deletions --limit 100
# PRs with activity today (updated today)
gh pr list --repo ${{ github.repository }} --state open --search \"updated:${TODAY}\" --json number,title,author,labels,createdAt,updatedAt,reviewDecision,isDraft,additions,deletions --limit 100
STEP 2: For high-activity PRs, check comment counts
For promising PRs, run:
gh pr view [NUMBER] --repo ${{ github.repository }} --json comments --jq '[.comments[] | select(.author.login != \"copilot-pull-request-reviewer\" and .author.login != \"github-actions\")] | length'
IMPORTANT: When counting comments/activity, EXCLUDE these bot accounts:
- copilot-pull-request-reviewer
- github-actions
STEP 3: Identify what matters (ONLY from today's PRs)
**Bug Fixes From Today:**
- PRs with 'fix' or 'bug' in title created/updated today
- Small bug fixes (< 100 lines changed) that are easy to review
- Bug fixes from community contributors
**High Activity Today:**
- PRs with significant human comments today (excluding bots listed above)
- PRs with back-and-forth discussion today
**Quick Wins:**
- Small PRs (< 50 lines) that are approved or nearly approved
- PRs that just need a final review
STEP 4: Generate the recap
Create a structured recap:
===DISCORD_START===
**Daily PR Recap - ${TODAY}**
**New PRs Today**
[PRs opened today - group by type: bug fixes, features, etc.]
**Active PRs Today**
[PRs with activity/updates today - significant discussion]
**Quick Wins**
[Small PRs ready to merge]
===DISCORD_END===
STEP 5: Format for Discord
- Use Discord markdown (**, __, etc.)
- BE EXTREMELY CONCISE - surface what we might miss
- Use hyperlinked PR numbers with suppressed embeds: [#1234](<https://github.com/${{ github.repository }}/pull/1234>)
- Include PR author: [#1234](<url>) (@author)
- For bug fixes, add brief description of what it fixes
- Show line count for quick wins: \"(+15/-3 lines)\"
- HARD LIMIT: Keep under 1800 characters total
- Skip empty sections
- Focus on PRs that need human eyes
OUTPUT: Output ONLY the content between ===DISCORD_START=== and ===DISCORD_END=== markers. Include the markers so I can extract it." > /tmp/pr_recap_raw.txt
# Extract only the Discord message between markers
sed -n '/===DISCORD_START===/,/===DISCORD_END===/p' /tmp/pr_recap_raw.txt | grep -v '===DISCORD' > /tmp/pr_recap.txt
echo "recap_file=/tmp/pr_recap.txt" >> $GITHUB_OUTPUT
- name: Post to Discord
env:
DISCORD_WEBHOOK_URL: ${{ secrets.DISCORD_ISSUES_WEBHOOK_URL }}
run: |
if [ -z "$DISCORD_WEBHOOK_URL" ]; then
echo "Warning: DISCORD_ISSUES_WEBHOOK_URL secret not set, skipping Discord post"
cat /tmp/pr_recap.txt
exit 0
fi
# Read the recap
RECAP_RAW=$(cat /tmp/pr_recap.txt)
RECAP_LENGTH=${#RECAP_RAW}
echo "Recap length: ${RECAP_LENGTH} chars"
# Function to post a message to Discord
post_to_discord() {
local msg="$1"
local content=$(echo "$msg" | jq -Rs '.')
curl -s -H "Content-Type: application/json" \
-X POST \
-d "{\"content\": ${content}}" \
"$DISCORD_WEBHOOK_URL"
sleep 1
}
# If under limit, send as single message
if [ "$RECAP_LENGTH" -le 1950 ]; then
post_to_discord "$RECAP_RAW"
else
echo "Splitting into multiple messages..."
remaining="$RECAP_RAW"
while [ ${#remaining} -gt 0 ]; do
if [ ${#remaining} -le 1950 ]; then
post_to_discord "$remaining"
break
else
chunk="${remaining:0:1900}"
last_newline=$(echo "$chunk" | grep -bo $'\n' | tail -1 | cut -d: -f1)
if [ -n "$last_newline" ] && [ "$last_newline" -gt 500 ]; then
chunk="${remaining:0:$last_newline}"
remaining="${remaining:$((last_newline+1))}"
else
chunk="${remaining:0:1900}"
remaining="${remaining:1900}"
fi
post_to_discord "$chunk"
fi
done
fi
echo "Posted daily PR recap to Discord"

View File

@@ -1,29 +0,0 @@
name: deploy
on:
push:
branches:
- dev
- production
workflow_dispatch:
concurrency: ${{ github.workflow }}-${{ github.ref }}
jobs:
deploy:
runs-on: blacksmith-4vcpu-ubuntu-2404
steps:
- uses: actions/checkout@v3
- uses: ./.github/actions/setup-bun
- uses: actions/setup-node@v4
with:
node-version: "24"
- run: bun sst deploy --stage=${{ github.ref_name }}
env:
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
PLANETSCALE_SERVICE_TOKEN_NAME: ${{ secrets.PLANETSCALE_SERVICE_TOKEN_NAME }}
PLANETSCALE_SERVICE_TOKEN: ${{ secrets.PLANETSCALE_SERVICE_TOKEN }}
STRIPE_SECRET_KEY: ${{ github.ref_name == 'production' && secrets.STRIPE_SECRET_KEY_PROD || secrets.STRIPE_SECRET_KEY_DEV }}

View File

@@ -1,72 +0,0 @@
name: Docs Update
on:
schedule:
- cron: "0 */12 * * *"
workflow_dispatch:
env:
LOOKBACK_HOURS: 4
jobs:
update-docs:
if: github.repository == 'sst/opencode'
runs-on: blacksmith-4vcpu-ubuntu-2404
permissions:
id-token: write
contents: write
pull-requests: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0 # Fetch full history to access commits
- name: Setup Bun
uses: ./.github/actions/setup-bun
- name: Get recent commits
id: commits
run: |
COMMITS=$(git log --since="${{ env.LOOKBACK_HOURS }} hours ago" --pretty=format:"- %h %s" 2>/dev/null || echo "")
if [ -z "$COMMITS" ]; then
echo "No commits in the last ${{ env.LOOKBACK_HOURS }} hours"
echo "has_commits=false" >> $GITHUB_OUTPUT
else
echo "has_commits=true" >> $GITHUB_OUTPUT
{
echo "list<<EOF"
echo "$COMMITS"
echo "EOF"
} >> $GITHUB_OUTPUT
fi
- name: Run opencode
if: steps.commits.outputs.has_commits == 'true'
uses: sst/opencode/github@latest
env:
OPENCODE_API_KEY: ${{ secrets.OPENCODE_API_KEY }}
with:
model: opencode/gpt-5.2
agent: docs
prompt: |
Review the following commits from the last ${{ env.LOOKBACK_HOURS }} hours and identify any new features that may need documentation.
<recent_commits>
${{ steps.commits.outputs.list }}
</recent_commits>
Steps:
1. For each commit that looks like a new feature or significant change:
- Read the changed files to understand what was added
- Check if the feature is already documented in packages/web/src/content/docs/*
2. If you find undocumented features:
- Update the relevant documentation files in packages/web/src/content/docs/*
- Follow the existing documentation style and structure
- Make sure to document the feature clearly with examples where appropriate
3. If all new features are already documented, report that no updates are needed
4. If you are creating a new documentation file be sure to update packages/web/astro.config.mjs too.
Focus on user-facing features and API changes. Skip internal refactors, bug fixes, and test updates unless they affect user-facing behavior.
Don't feel the need to document every little thing. It is perfectly okay to make 0 changes at all.
Try to keep documentation only for large features or changes that already have a good spot to be documented.

View File

@@ -1,63 +0,0 @@
name: Duplicate Issue Detection
on:
issues:
types: [opened]
jobs:
check-duplicates:
runs-on: blacksmith-4vcpu-ubuntu-2404
permissions:
contents: read
issues: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 1
- uses: ./.github/actions/setup-bun
- name: Install opencode
run: curl -fsSL https://opencode.ai/install | bash
- name: Check for duplicate issues
env:
OPENCODE_API_KEY: ${{ secrets.OPENCODE_API_KEY }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
OPENCODE_PERMISSION: |
{
"bash": {
"*": "deny",
"gh issue*": "allow"
},
"webfetch": "deny"
}
run: |
opencode run -m opencode/claude-haiku-4-5 "A new issue has been created:'
Issue number:
${{ github.event.issue.number }}
Lookup this issue and search through existing issues (excluding #${{ github.event.issue.number }}) in this repository to find any potential duplicates of this new issue.
Consider:
1. Similar titles or descriptions
2. Same error messages or symptoms
3. Related functionality or components
4. Similar feature requests
If you find any potential duplicates, please comment on the new issue with:
- A brief explanation of why it might be a duplicate
- Links to the potentially duplicate issues
- A suggestion to check those issues first
Use this format for the comment:
'This issue might be a duplicate of existing issues. Please check:
- #[issue_number]: [brief description of similarity]
Feel free to ignore if none of these address your specific case.'
Additionally, if the issue mentions keybinds, keyboard shortcuts, or key bindings, please add a comment mentioning the pinned keybinds issue #4997:
'For keybind-related issues, please also check our pinned keybinds documentation: #4997'
If no clear duplicates are found, do not comment."

View File

@@ -1,65 +0,0 @@
name: Duplicate PR Check
on:
pull_request_target:
types: [opened]
jobs:
check-duplicates:
if: |
github.event.pull_request.user.login != 'actions-user' &&
github.event.pull_request.user.login != 'opencode' &&
github.event.pull_request.user.login != 'rekram1-node' &&
github.event.pull_request.user.login != 'thdxr' &&
github.event.pull_request.user.login != 'kommander' &&
github.event.pull_request.user.login != 'jayair' &&
github.event.pull_request.user.login != 'fwang' &&
github.event.pull_request.user.login != 'adamdotdevin' &&
github.event.pull_request.user.login != 'iamdavidhill' &&
github.event.pull_request.user.login != 'opencode-agent[bot]'
runs-on: blacksmith-4vcpu-ubuntu-2404
permissions:
contents: read
pull-requests: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Setup Bun
uses: ./.github/actions/setup-bun
- name: Install dependencies
run: bun install
- name: Install opencode
run: curl -fsSL https://opencode.ai/install | bash
- name: Build prompt
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
PR_NUMBER: ${{ github.event.pull_request.number }}
run: |
{
echo "Check for duplicate PRs related to this new PR:"
echo ""
echo "CURRENT_PR_NUMBER: $PR_NUMBER"
echo ""
echo "Title: $(gh pr view "$PR_NUMBER" --json title --jq .title)"
echo ""
echo "Description:"
gh pr view "$PR_NUMBER" --json body --jq .body
} > pr_info.txt
- name: Check for duplicate PRs
env:
OPENCODE_API_KEY: ${{ secrets.OPENCODE_API_KEY }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
PR_NUMBER: ${{ github.event.pull_request.number }}
run: |
COMMENT=$(bun script/duplicate-pr.ts -f pr_info.txt "Check the attached file for PR details and search for duplicates")
gh pr comment "$PR_NUMBER" --body "_The following comment was made by an LLM, it may be inaccurate:_
$COMMENT"

View File

@@ -1,51 +0,0 @@
name: generate
on:
push:
branches:
- dev
workflow_dispatch:
jobs:
generate:
runs-on: blacksmith-4vcpu-ubuntu-2404
permissions:
contents: write
pull-requests: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
repository: ${{ github.event.pull_request.head.repo.full_name || github.repository }}
ref: ${{ github.event.pull_request.head.ref || github.ref_name }}
- name: Setup Bun
uses: ./.github/actions/setup-bun
- name: Generate
run: ./script/generate.ts
- name: Commit and push
run: |
if [ -z "$(git status --porcelain)" ]; then
echo "No changes to commit"
exit 0
fi
git config --local user.email "action@github.com"
git config --local user.name "GitHub Action"
git add -A
git commit -m "chore: generate"
git push origin HEAD:${{ github.ref_name }} --no-verify
# if ! git push origin HEAD:${{ github.event.pull_request.head.ref || github.ref_name }} --no-verify; then
# echo ""
# echo "============================================"
# echo "Failed to push generated code."
# echo "Please run locally and push:"
# echo ""
# echo " ./script/generate.ts"
# echo " git add -A && git commit -m \"chore: generate\" && git push"
# echo ""
# echo "============================================"
# exit 1
# fi

View File

@@ -1,46 +0,0 @@
name: nix desktop
on:
push:
branches: [dev]
paths:
- "flake.nix"
- "flake.lock"
- "nix/**"
- "packages/app/**"
- "packages/desktop/**"
- ".github/workflows/nix-desktop.yml"
pull_request:
paths:
- "flake.nix"
- "flake.lock"
- "nix/**"
- "packages/app/**"
- "packages/desktop/**"
- ".github/workflows/nix-desktop.yml"
workflow_dispatch:
jobs:
build-desktop:
strategy:
fail-fast: false
matrix:
os:
- blacksmith-4vcpu-ubuntu-2404
- blacksmith-4vcpu-ubuntu-2404-arm
- macos-15-intel
- macos-latest
runs-on: ${{ matrix.os }}
timeout-minutes: 60
steps:
- name: Checkout repository
uses: actions/checkout@v6
- name: Setup Nix
uses: nixbuild/nix-quick-install-action@v34
- name: Build desktop via flake
run: |
set -euo pipefail
nix --version
nix build .#desktop -L

View File

@@ -1,14 +0,0 @@
name: discord
on:
release:
types: [released] # fires when a draft release is published
jobs:
notify:
runs-on: blacksmith-4vcpu-ubuntu-2404
steps:
- name: Send nicely-formatted embed to Discord
uses: SethCohen/github-releases-to-discord@v1
with:
webhook_url: ${{ secrets.DISCORD_WEBHOOK }}

View File

@@ -1,34 +0,0 @@
name: opencode
on:
issue_comment:
types: [created]
pull_request_review_comment:
types: [created]
jobs:
opencode:
if: |
contains(github.event.comment.body, ' /oc') ||
startsWith(github.event.comment.body, '/oc') ||
contains(github.event.comment.body, ' /opencode') ||
startsWith(github.event.comment.body, '/opencode')
runs-on: blacksmith-4vcpu-ubuntu-2404
permissions:
id-token: write
contents: read
pull-requests: read
issues: read
steps:
- name: Checkout repository
uses: actions/checkout@v4
- uses: ./.github/actions/setup-bun
- name: Run opencode
uses: anomalyco/opencode/github@latest
env:
OPENCODE_API_KEY: ${{ secrets.OPENCODE_API_KEY }}
OPENCODE_PERMISSION: '{"bash": "deny"}'
with:
model: opencode/claude-opus-4-5

View File

@@ -1,139 +0,0 @@
name: PR Standards
on:
pull_request_target:
types: [opened, edited, synchronize]
jobs:
check-standards:
if: |
github.event.pull_request.user.login != 'actions-user' &&
github.event.pull_request.user.login != 'opencode' &&
github.event.pull_request.user.login != 'rekram1-node' &&
github.event.pull_request.user.login != 'thdxr' &&
github.event.pull_request.user.login != 'kommander' &&
github.event.pull_request.user.login != 'jayair' &&
github.event.pull_request.user.login != 'fwang' &&
github.event.pull_request.user.login != 'adamdotdevin' &&
github.event.pull_request.user.login != 'iamdavidhill' &&
github.event.pull_request.user.login != 'opencode-agent[bot]'
runs-on: ubuntu-latest
permissions:
pull-requests: write
steps:
- name: Check PR standards
uses: actions/github-script@v7
with:
script: |
const pr = context.payload.pull_request;
const title = pr.title;
async function addLabel(label) {
await github.rest.issues.addLabels({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: pr.number,
labels: [label]
});
}
async function removeLabel(label) {
try {
await github.rest.issues.removeLabel({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: pr.number,
name: label
});
} catch (e) {
// Label wasn't present, ignore
}
}
async function comment(marker, body) {
const markerText = `<!-- pr-standards:${marker} -->`;
const { data: comments } = await github.rest.issues.listComments({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: pr.number
});
const existing = comments.find(c => c.body.includes(markerText));
if (existing) return;
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: pr.number,
body: markerText + '\n' + body
});
}
// Step 1: Check title format
// Matches: feat:, feat(scope):, feat (scope):, etc.
const titlePattern = /^(feat|fix|docs|chore|refactor|test)\s*(\([a-zA-Z0-9-]+\))?\s*:/;
const hasValidTitle = titlePattern.test(title);
if (!hasValidTitle) {
await addLabel('needs:title');
await comment('title', `Hey! Your PR title \`${title}\` doesn't follow conventional commit format.
Please update it to start with one of:
- \`feat:\` or \`feat(scope):\` new feature
- \`fix:\` or \`fix(scope):\` bug fix
- \`docs:\` or \`docs(scope):\` documentation changes
- \`chore:\` or \`chore(scope):\` maintenance tasks
- \`refactor:\` or \`refactor(scope):\` code refactoring
- \`test:\` or \`test(scope):\` adding or updating tests
Where \`scope\` is the package name (e.g., \`app\`, \`desktop\`, \`opencode\`).
See [CONTRIBUTING.md](../blob/dev/CONTRIBUTING.md#pr-titles) for details.`);
return;
}
await removeLabel('needs:title');
// Step 2: Check for linked issue (skip for docs/refactor PRs)
const skipIssueCheck = /^(docs|refactor)\s*(\([a-zA-Z0-9-]+\))?\s*:/.test(title);
if (skipIssueCheck) {
await removeLabel('needs:issue');
console.log('Skipping issue check for docs/refactor PR');
return;
}
const query = `
query($owner: String!, $repo: String!, $number: Int!) {
repository(owner: $owner, name: $repo) {
pullRequest(number: $number) {
closingIssuesReferences(first: 1) {
totalCount
}
}
}
}
`;
const result = await github.graphql(query, {
owner: context.repo.owner,
repo: context.repo.repo,
number: pr.number
});
const linkedIssues = result.repository.pullRequest.closingIssuesReferences.totalCount;
if (linkedIssues === 0) {
await addLabel('needs:issue');
await comment('issue', `Thanks for your contribution!
This PR doesn't have a linked issue. All PRs must reference an existing issue.
Please:
1. Open an issue describing the bug/feature (if one doesn't exist)
2. Add \`Fixes #<number>\` or \`Closes #<number>\` to this PR description
See [CONTRIBUTING.md](../blob/dev/CONTRIBUTING.md#issue-first-policy) for details.`);
return;
}
await removeLabel('needs:issue');
console.log('PR meets all standards');

View File

@@ -1,30 +0,0 @@
name: publish-github-action
on:
workflow_dispatch:
push:
tags:
- "github-v*.*.*"
- "!github-v1"
concurrency: ${{ github.workflow }}-${{ github.ref }}
permissions:
contents: write
jobs:
publish:
runs-on: blacksmith-4vcpu-ubuntu-2404
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- run: git fetch --force --tags
- name: Publish
run: |
git config --global user.email "opencode@sst.dev"
git config --global user.name "opencode"
./script/publish
working-directory: ./github

View File

@@ -1,37 +0,0 @@
name: publish-vscode
on:
workflow_dispatch:
push:
tags:
- "vscode-v*.*.*"
concurrency: ${{ github.workflow }}-${{ github.ref }}
permissions:
contents: write
jobs:
publish:
runs-on: blacksmith-4vcpu-ubuntu-2404
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- uses: ./.github/actions/setup-bun
- run: git fetch --force --tags
- run: bun install -g @vscode/vsce
- name: Install extension dependencies
run: bun install
working-directory: ./sdks/vscode
- name: Publish
run: |
./script/publish
working-directory: ./sdks/vscode
env:
VSCE_PAT: ${{ secrets.VSCE_PAT }}
OPENVSX_TOKEN: ${{ secrets.OPENVSX_TOKEN }}

View File

@@ -1,237 +0,0 @@
name: publish
run-name: "${{ format('release {0}', inputs.bump) }}"
on:
push:
branches:
- dev
- snapshot-*
workflow_dispatch:
inputs:
bump:
description: "Bump major, minor, or patch"
required: false
type: choice
options:
- major
- minor
- patch
version:
description: "Override version (optional)"
required: false
type: string
concurrency: ${{ github.workflow }}-${{ github.ref }}-${{ inputs.version || inputs.bump }}
permissions:
id-token: write
contents: write
packages: write
jobs:
publish:
runs-on: blacksmith-4vcpu-ubuntu-2404
if: github.repository == 'anomalyco/opencode'
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- run: git fetch --force --tags
- uses: ./.github/actions/setup-bun
- name: Install OpenCode
if: inputs.bump || inputs.version
run: bun i -g opencode-ai@1.0.169
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- uses: actions/setup-node@v4
with:
node-version: "24"
registry-url: "https://registry.npmjs.org"
- name: Setup Git Identity
run: |
git config --global user.email "opencode@sst.dev"
git config --global user.name "opencode"
git remote set-url origin https://x-access-token:${{ secrets.SST_GITHUB_TOKEN }}@github.com/${{ github.repository }}
- name: Publish
id: publish
run: ./script/publish-start.ts
env:
OPENCODE_BUMP: ${{ inputs.bump }}
OPENCODE_VERSION: ${{ inputs.version }}
OPENCODE_API_KEY: ${{ secrets.OPENCODE_API_KEY }}
AUR_KEY: ${{ secrets.AUR_KEY }}
GITHUB_TOKEN: ${{ secrets.SST_GITHUB_TOKEN }}
NPM_CONFIG_PROVENANCE: false
- uses: actions/upload-artifact@v4
with:
name: opencode-cli
path: packages/opencode/dist
outputs:
release: ${{ steps.publish.outputs.release }}
tag: ${{ steps.publish.outputs.tag }}
version: ${{ steps.publish.outputs.version }}
publish-tauri:
needs: publish
continue-on-error: false
strategy:
fail-fast: false
matrix:
settings:
- host: macos-latest
target: x86_64-apple-darwin
- host: macos-latest
target: aarch64-apple-darwin
- host: blacksmith-4vcpu-windows-2025
target: x86_64-pc-windows-msvc
- host: blacksmith-4vcpu-ubuntu-2404
target: x86_64-unknown-linux-gnu
- host: blacksmith-4vcpu-ubuntu-2404-arm
target: aarch64-unknown-linux-gnu
runs-on: ${{ matrix.settings.host }}
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
ref: ${{ needs.publish.outputs.tag }}
- uses: apple-actions/import-codesign-certs@v2
if: ${{ runner.os == 'macOS' }}
with:
keychain: build
p12-file-base64: ${{ secrets.APPLE_CERTIFICATE }}
p12-password: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
- name: Verify Certificate
if: ${{ runner.os == 'macOS' }}
run: |
CERT_INFO=$(security find-identity -v -p codesigning build.keychain | grep "Developer ID Application")
CERT_ID=$(echo "$CERT_INFO" | awk -F'"' '{print $2}')
echo "CERT_ID=$CERT_ID" >> $GITHUB_ENV
echo "Certificate imported."
- name: Setup Apple API Key
if: ${{ runner.os == 'macOS' }}
run: |
echo "${{ secrets.APPLE_API_KEY_PATH }}" > $RUNNER_TEMP/apple-api-key.p8
- run: git fetch --force --tags
- uses: ./.github/actions/setup-bun
- name: install dependencies (ubuntu only)
if: contains(matrix.settings.host, 'ubuntu')
run: |
sudo apt-get update
sudo apt-get install -y libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf
- name: install Rust stable
uses: dtolnay/rust-toolchain@stable
with:
targets: ${{ matrix.settings.target }}
- uses: Swatinem/rust-cache@v2
with:
workspaces: packages/desktop/src-tauri
shared-key: ${{ matrix.settings.target }}
- name: Prepare
run: |
cd packages/desktop
bun ./scripts/prepare.ts
env:
OPENCODE_VERSION: ${{ needs.publish.outputs.version }}
NPM_CONFIG_TOKEN: ${{ secrets.NPM_TOKEN }}
GITHUB_TOKEN: ${{ secrets.SST_GITHUB_TOKEN }}
AUR_KEY: ${{ secrets.AUR_KEY }}
OPENCODE_API_KEY: ${{ secrets.OPENCODE_API_KEY }}
RUST_TARGET: ${{ matrix.settings.target }}
GH_TOKEN: ${{ github.token }}
GITHUB_RUN_ID: ${{ github.run_id }}
# Fixes AppImage build issues, can be removed when https://github.com/tauri-apps/tauri/pull/12491 is released
- name: Install tauri-cli from portable appimage branch
if: contains(matrix.settings.host, 'ubuntu')
run: |
cargo install tauri-cli --git https://github.com/tauri-apps/tauri --branch feat/truly-portable-appimage --force
echo "Installed tauri-cli version:"
cargo tauri --version
- name: Build and upload artifacts
uses: Wandalen/wretry.action@v3
timeout-minutes: 60
with:
attempt_limit: 3
attempt_delay: 10000
action: tauri-apps/tauri-action@390cbe447412ced1303d35abe75287949e43437a
with: |
projectPath: packages/desktop
uploadWorkflowArtifacts: true
tauriScript: ${{ (contains(matrix.settings.host, 'ubuntu') && 'cargo tauri') || '' }}
args: --target ${{ matrix.settings.target }} --config ./src-tauri/tauri.prod.conf.json --verbose
updaterJsonPreferNsis: true
releaseId: ${{ needs.publish.outputs.release }}
tagName: ${{ needs.publish.outputs.tag }}
releaseAssetNamePattern: opencode-desktop-[platform]-[arch][ext]
releaseDraft: true
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
TAURI_BUNDLER_NEW_APPIMAGE_FORMAT: true
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}
APPLE_CERTIFICATE: ${{ secrets.APPLE_CERTIFICATE }}
APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
APPLE_SIGNING_IDENTITY: ${{ env.CERT_ID }}
APPLE_API_ISSUER: ${{ secrets.APPLE_API_ISSUER }}
APPLE_API_KEY: ${{ secrets.APPLE_API_KEY }}
APPLE_API_KEY_PATH: ${{ runner.temp }}/apple-api-key.p8
publish-release:
needs:
- publish
- publish-tauri
if: needs.publish.outputs.tag
runs-on: blacksmith-4vcpu-ubuntu-2404
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
ref: ${{ needs.publish.outputs.tag }}
- uses: ./.github/actions/setup-bun
- name: Setup SSH for AUR
run: |
sudo apt-get update
sudo apt-get install -y pacman-package-manager
mkdir -p ~/.ssh
echo "${{ secrets.AUR_KEY }}" > ~/.ssh/id_rsa
chmod 600 ~/.ssh/id_rsa
git config --global user.email "opencode@sst.dev"
git config --global user.name "opencode"
ssh-keyscan -H aur.archlinux.org >> ~/.ssh/known_hosts || true
- run: ./script/publish-complete.ts
env:
OPENCODE_VERSION: ${{ needs.publish.outputs.version }}
AUR_KEY: ${{ secrets.AUR_KEY }}
GITHUB_TOKEN: ${{ secrets.SST_GITHUB_TOKEN }}

View File

@@ -1,29 +0,0 @@
name: release-github-action
on:
push:
branches:
- dev
paths:
- "github/**"
concurrency: ${{ github.workflow }}-${{ github.ref }}
permissions:
contents: write
jobs:
release:
runs-on: blacksmith-4vcpu-ubuntu-2404
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- run: git fetch --force --tags
- name: Release
run: |
git config --global user.email "opencode@sst.dev"
git config --global user.name "opencode"
./github/script/release

40
.github/workflows/release.yml vendored Normal file
View File

@@ -0,0 +1,40 @@
name: release
on:
workflow_dispatch:
push:
tags:
- "*"
concurrency: ${{ github.workflow }}-${{ github.ref }}
permissions:
contents: write
packages: write
jobs:
goreleaser:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- run: git fetch --force --tags
- uses: actions/setup-go@v5
with:
go-version: ">=1.23.2"
cache: true
cache-dependency-path: go.sum
- run: go mod download
- uses: goreleaser/goreleaser-action@v6
with:
distribution: goreleaser
version: latest
args: release --clean
env:
GITHUB_TOKEN: ${{ secrets.HOMEBREW_GITHUB_TOKEN }}
AUR_KEY: ${{ secrets.AUR_KEY }}

View File

@@ -1,83 +0,0 @@
name: Guidelines Check
on:
issue_comment:
types: [created]
jobs:
check-guidelines:
if: |
github.event.issue.pull_request &&
startsWith(github.event.comment.body, '/review') &&
contains(fromJson('["OWNER","MEMBER"]'), github.event.comment.author_association)
runs-on: blacksmith-4vcpu-ubuntu-2404
permissions:
contents: read
pull-requests: write
steps:
- name: Get PR number
id: pr-number
run: |
if [ "${{ github.event_name }}" = "pull_request_target" ]; then
echo "number=${{ github.event.pull_request.number }}" >> $GITHUB_OUTPUT
else
echo "number=${{ github.event.issue.number }}" >> $GITHUB_OUTPUT
fi
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 1
- uses: ./.github/actions/setup-bun
- name: Install opencode
run: curl -fsSL https://opencode.ai/install | bash
- name: Get PR details
id: pr-details
run: |
gh api /repos/${{ github.repository }}/pulls/${{ steps.pr-number.outputs.number }} > pr_data.json
echo "title=$(jq -r .title pr_data.json)" >> $GITHUB_OUTPUT
echo "sha=$(jq -r .head.sha pr_data.json)" >> $GITHUB_OUTPUT
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Check PR guidelines compliance
env:
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
OPENCODE_PERMISSION: '{ "bash": { "*": "deny", "gh*": "allow", "gh pr review*": "deny" } }'
PR_TITLE: ${{ steps.pr-details.outputs.title }}
run: |
PR_BODY=$(jq -r .body pr_data.json)
opencode run -m anthropic/claude-opus-4-5 "A new pull request has been created: '${PR_TITLE}'
<pr-number>
${{ steps.pr-number.outputs.number }}
</pr-number>
<pr-description>
$PR_BODY
</pr-description>
Please check all the code changes in this pull request against the style guide, also look for any bugs if they exist. Diffs are important but make sure you read the entire file to get proper context. Make it clear the suggestions are merely suggestions and the human can decide what to do
When critiquing code against the style guide, be sure that the code is ACTUALLY in violation, don't complain about else statements if they already use early returns there. You may complain about excessive nesting though, regardless of else statement usage.
When critiquing code style don't be a zealot, we don't like "let" statements but sometimes they are the simplest option, if someone does a bunch of nesting with let, they should consider using iife (see packages/opencode/src/util.iife.ts)
Use the gh cli to create comments on the files for the violations. Try to leave the comment on the exact line number. If you have a suggested fix include it in a suggestion code block.
If you are writing suggested fixes, BE SURE THAT the change you are recommending is actually valid typescript, often I have seen missing closing "}" or other syntax errors.
Generally, write a comment instead of writing suggested change if you can help it.
Command MUST be like this.
\`\`\`
gh api \
--method POST \
-H \"Accept: application/vnd.github+json\" \
-H \"X-GitHub-Api-Version: 2022-11-28\" \
/repos/${{ github.repository }}/pulls/${{ steps.pr-number.outputs.number }}/comments \
-f 'body=[summary of issue]' -f 'commit_id=${{ steps.pr-details.outputs.sha }}' -f 'path=[path-to-file]' -F \"line=[line]\" -f 'side=RIGHT'
\`\`\`
Only create comments for actual violations. If the code follows all guidelines, comment on the issue using gh cli: 'lgtm' AND NOTHING ELSE!!!!."

View File

@@ -1,33 +0,0 @@
name: "Auto-close stale issues"
on:
schedule:
- cron: "30 1 * * *" # Daily at 1:30 AM
workflow_dispatch:
env:
DAYS_BEFORE_STALE: 90
DAYS_BEFORE_CLOSE: 7
jobs:
stale:
runs-on: ubuntu-latest
permissions:
issues: write
steps:
- uses: actions/stale@v10
with:
days-before-stale: ${{ env.DAYS_BEFORE_STALE }}
days-before-close: ${{ env.DAYS_BEFORE_CLOSE }}
stale-issue-label: "stale"
close-issue-message: |
[automated] Closing due to ${{ env.DAYS_BEFORE_STALE }}+ days of inactivity.
Feel free to reopen if you still need this!
stale-issue-message: |
[automated] This issue has had no activity for ${{ env.DAYS_BEFORE_STALE }} days.
It will be closed in ${{ env.DAYS_BEFORE_CLOSE }} days if there's no new activity.
remove-stale-when-updated: true
exempt-issue-labels: "pinned,security,feature-request,on-hold"
start-date: "2025-12-27"

View File

@@ -1,35 +0,0 @@
name: stats
on:
schedule:
- cron: "0 12 * * *" # Run daily at 12:00 UTC
workflow_dispatch: # Allow manual trigger
concurrency: ${{ github.workflow }}-${{ github.ref }}
jobs:
stats:
if: github.repository == 'anomalyco/opencode'
runs-on: blacksmith-4vcpu-ubuntu-2404
permissions:
contents: write
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Bun
uses: ./.github/actions/setup-bun
- name: Run stats script
run: bun script/stats.ts
- name: Commit stats
run: |
git config --local user.email "action@github.com"
git config --local user.name "GitHub Action"
git add STATS.md
git diff --staged --quiet || git commit -m "ignore: update download stats $(date -I)"
git push
env:
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}

View File

@@ -1,35 +0,0 @@
name: "sync-zed-extension"
on:
workflow_dispatch:
release:
types: [published]
jobs:
zed:
name: Release Zed Extension
runs-on: blacksmith-4vcpu-ubuntu-2404
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: ./.github/actions/setup-bun
- name: Get version tag
id: get_tag
run: |
if [ "${{ github.event_name }}" = "release" ]; then
TAG="${{ github.event.release.tag_name }}"
else
TAG=$(git tag --list 'v[0-9]*.*' --sort=-version:refname | head -n 1)
fi
echo "tag=${TAG}" >> $GITHUB_OUTPUT
echo "Using tag: ${TAG}"
- name: Sync Zed extension
run: |
./script/sync-zed.ts ${{ steps.get_tag.outputs.tag }}
env:
ZED_EXTENSIONS_PAT: ${{ secrets.ZED_EXTENSIONS_PAT }}
ZED_PR_PAT: ${{ secrets.ZED_PR_PAT }}

View File

@@ -1,147 +0,0 @@
name: test
on:
push:
branches:
- dev
pull_request:
workflow_dispatch:
jobs:
test:
name: test (${{ matrix.settings.name }})
strategy:
fail-fast: false
matrix:
settings:
- name: linux
host: blacksmith-4vcpu-ubuntu-2404
playwright: bunx playwright install --with-deps
workdir: .
command: |
git config --global user.email "bot@opencode.ai"
git config --global user.name "opencode"
bun turbo typecheck
bun turbo test
- name: windows
host: windows-latest
playwright: bunx playwright install
workdir: packages/app
command: bun test:e2e:local
runs-on: ${{ matrix.settings.host }}
defaults:
run:
shell: bash
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Setup Bun
uses: ./.github/actions/setup-bun
- name: Install Playwright browsers
working-directory: packages/app
run: ${{ matrix.settings.playwright }}
- name: Set OS-specific paths
run: |
if [ "${{ runner.os }}" = "Windows" ]; then
printf '%s\n' "OPENCODE_E2E_ROOT=${{ runner.temp }}\\opencode-e2e" >> "$GITHUB_ENV"
printf '%s\n' "OPENCODE_TEST_HOME=${{ runner.temp }}\\opencode-e2e\\home" >> "$GITHUB_ENV"
printf '%s\n' "XDG_DATA_HOME=${{ runner.temp }}\\opencode-e2e\\share" >> "$GITHUB_ENV"
printf '%s\n' "XDG_CACHE_HOME=${{ runner.temp }}\\opencode-e2e\\cache" >> "$GITHUB_ENV"
printf '%s\n' "XDG_CONFIG_HOME=${{ runner.temp }}\\opencode-e2e\\config" >> "$GITHUB_ENV"
printf '%s\n' "XDG_STATE_HOME=${{ runner.temp }}\\opencode-e2e\\state" >> "$GITHUB_ENV"
printf '%s\n' "MODELS_DEV_API_JSON=${{ github.workspace }}\\packages\\opencode\\test\\tool\\fixtures\\models-api.json" >> "$GITHUB_ENV"
else
printf '%s\n' "OPENCODE_E2E_ROOT=${{ runner.temp }}/opencode-e2e" >> "$GITHUB_ENV"
printf '%s\n' "OPENCODE_TEST_HOME=${{ runner.temp }}/opencode-e2e/home" >> "$GITHUB_ENV"
printf '%s\n' "XDG_DATA_HOME=${{ runner.temp }}/opencode-e2e/share" >> "$GITHUB_ENV"
printf '%s\n' "XDG_CACHE_HOME=${{ runner.temp }}/opencode-e2e/cache" >> "$GITHUB_ENV"
printf '%s\n' "XDG_CONFIG_HOME=${{ runner.temp }}/opencode-e2e/config" >> "$GITHUB_ENV"
printf '%s\n' "XDG_STATE_HOME=${{ runner.temp }}/opencode-e2e/state" >> "$GITHUB_ENV"
printf '%s\n' "MODELS_DEV_API_JSON=${{ github.workspace }}/packages/opencode/test/tool/fixtures/models-api.json" >> "$GITHUB_ENV"
fi
- name: Seed opencode data
if: matrix.settings.name != 'windows'
working-directory: packages/opencode
run: bun script/seed-e2e.ts
env:
MODELS_DEV_API_JSON: ${{ env.MODELS_DEV_API_JSON }}
OPENCODE_DISABLE_MODELS_FETCH: "true"
OPENCODE_DISABLE_SHARE: "true"
OPENCODE_DISABLE_LSP_DOWNLOAD: "true"
OPENCODE_DISABLE_DEFAULT_PLUGINS: "true"
OPENCODE_EXPERIMENTAL_DISABLE_FILEWATCHER: "true"
OPENCODE_TEST_HOME: ${{ env.OPENCODE_TEST_HOME }}
XDG_DATA_HOME: ${{ env.XDG_DATA_HOME }}
XDG_CACHE_HOME: ${{ env.XDG_CACHE_HOME }}
XDG_CONFIG_HOME: ${{ env.XDG_CONFIG_HOME }}
XDG_STATE_HOME: ${{ env.XDG_STATE_HOME }}
OPENCODE_E2E_PROJECT_DIR: ${{ github.workspace }}
OPENCODE_E2E_SESSION_TITLE: "E2E Session"
OPENCODE_E2E_MESSAGE: "Seeded for UI e2e"
OPENCODE_E2E_MODEL: "opencode/gpt-5-nano"
- name: Run opencode server
if: matrix.settings.name != 'windows'
working-directory: packages/opencode
run: bun dev -- --print-logs --log-level WARN serve --port 4096 --hostname 127.0.0.1 &
env:
MODELS_DEV_API_JSON: ${{ env.MODELS_DEV_API_JSON }}
OPENCODE_DISABLE_MODELS_FETCH: "true"
OPENCODE_DISABLE_SHARE: "true"
OPENCODE_DISABLE_LSP_DOWNLOAD: "true"
OPENCODE_DISABLE_DEFAULT_PLUGINS: "true"
OPENCODE_EXPERIMENTAL_DISABLE_FILEWATCHER: "true"
OPENCODE_TEST_HOME: ${{ env.OPENCODE_TEST_HOME }}
XDG_DATA_HOME: ${{ env.XDG_DATA_HOME }}
XDG_CACHE_HOME: ${{ env.XDG_CACHE_HOME }}
XDG_CONFIG_HOME: ${{ env.XDG_CONFIG_HOME }}
XDG_STATE_HOME: ${{ env.XDG_STATE_HOME }}
OPENCODE_CLIENT: "app"
- name: Wait for opencode server
if: matrix.settings.name != 'windows'
run: |
for i in {1..120}; do
curl -fsS "http://127.0.0.1:4096/global/health" > /dev/null && exit 0
sleep 1
done
exit 1
- name: run
working-directory: ${{ matrix.settings.workdir }}
run: ${{ matrix.settings.command }}
env:
CI: true
MODELS_DEV_API_JSON: ${{ env.MODELS_DEV_API_JSON }}
OPENCODE_DISABLE_MODELS_FETCH: "true"
OPENCODE_DISABLE_SHARE: "true"
OPENCODE_DISABLE_LSP_DOWNLOAD: "true"
OPENCODE_DISABLE_DEFAULT_PLUGINS: "true"
OPENCODE_EXPERIMENTAL_DISABLE_FILEWATCHER: "true"
OPENCODE_TEST_HOME: ${{ env.OPENCODE_TEST_HOME }}
XDG_DATA_HOME: ${{ env.XDG_DATA_HOME }}
XDG_CACHE_HOME: ${{ env.XDG_CACHE_HOME }}
XDG_CONFIG_HOME: ${{ env.XDG_CONFIG_HOME }}
XDG_STATE_HOME: ${{ env.XDG_STATE_HOME }}
PLAYWRIGHT_SERVER_HOST: "127.0.0.1"
PLAYWRIGHT_SERVER_PORT: "4096"
VITE_OPENCODE_SERVER_HOST: "127.0.0.1"
VITE_OPENCODE_SERVER_PORT: "4096"
OPENCODE_CLIENT: "app"
timeout-minutes: 30
- name: Upload Playwright artifacts
if: failure()
uses: actions/upload-artifact@v4
with:
name: playwright-${{ matrix.settings.name }}-${{ github.run_attempt }}
if-no-files-found: ignore
retention-days: 7
path: |
packages/app/e2e/test-results
packages/app/e2e/playwright-report

View File

@@ -1,37 +0,0 @@
name: Issue Triage
on:
issues:
types: [opened]
jobs:
triage:
runs-on: blacksmith-4vcpu-ubuntu-2404
permissions:
contents: read
issues: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Setup Bun
uses: ./.github/actions/setup-bun
- name: Install opencode
run: curl -fsSL https://opencode.ai/install | bash
- name: Triage issue
env:
OPENCODE_API_KEY: ${{ secrets.OPENCODE_API_KEY }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
ISSUE_NUMBER: ${{ github.event.issue.number }}
ISSUE_TITLE: ${{ github.event.issue.title }}
ISSUE_BODY: ${{ github.event.issue.body }}
run: |
opencode run --agent triage "The following issue was just opened, triage it:
Title: $ISSUE_TITLE
$ISSUE_BODY"

View File

@@ -1,19 +0,0 @@
name: typecheck
on:
pull_request:
branches: [dev]
workflow_dispatch:
jobs:
typecheck:
runs-on: blacksmith-4vcpu-ubuntu-2404
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Bun
uses: ./.github/actions/setup-bun
- name: Run typecheck
run: bun typecheck

View File

@@ -1,138 +0,0 @@
name: Update Nix Hashes
permissions:
contents: write
on:
workflow_dispatch:
push:
paths:
- "bun.lock"
- "package.json"
- "packages/*/package.json"
- "flake.lock"
- ".github/workflows/update-nix-hashes.yml"
pull_request:
paths:
- "bun.lock"
- "package.json"
- "packages/*/package.json"
- "flake.lock"
- ".github/workflows/update-nix-hashes.yml"
jobs:
update-node-modules-hashes:
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository
runs-on: blacksmith-4vcpu-ubuntu-2404
env:
TITLE: node_modules hashes
steps:
- name: Checkout repository
uses: actions/checkout@v6
with:
token: ${{ secrets.GITHUB_TOKEN }}
fetch-depth: 0
ref: ${{ github.head_ref || github.ref_name }}
repository: ${{ github.event.pull_request.head.repo.full_name || github.repository }}
- name: Setup Nix
uses: nixbuild/nix-quick-install-action@v34
- name: Configure git
run: |
git config --global user.email "action@github.com"
git config --global user.name "Github Action"
- name: Pull latest changes
env:
TARGET_BRANCH: ${{ github.head_ref || github.ref_name }}
run: |
BRANCH="${TARGET_BRANCH:-${GITHUB_REF_NAME}}"
git pull --rebase --autostash origin "$BRANCH"
- name: Compute all node_modules hashes
run: |
set -euo pipefail
HASH_FILE="nix/hashes.json"
SYSTEMS="x86_64-linux aarch64-linux x86_64-darwin aarch64-darwin"
if [ ! -f "$HASH_FILE" ]; then
mkdir -p "$(dirname "$HASH_FILE")"
echo '{"nodeModules":{}}' > "$HASH_FILE"
fi
for SYSTEM in $SYSTEMS; do
echo "Computing hash for ${SYSTEM}..."
BUILD_LOG=$(mktemp)
trap 'rm -f "$BUILD_LOG"' EXIT
# The updater derivations use fakeHash, so they will fail and reveal the correct hash
UPDATER_ATTR=".#packages.x86_64-linux.${SYSTEM}_node_modules"
nix build "$UPDATER_ATTR" --no-link 2>&1 | tee "$BUILD_LOG" || true
CORRECT_HASH="$(grep -E 'got:\s+sha256-[A-Za-z0-9+/=]+' "$BUILD_LOG" | awk '{print $2}' | head -n1 || true)"
if [ -z "$CORRECT_HASH" ]; then
CORRECT_HASH="$(grep -A2 'hash mismatch' "$BUILD_LOG" | grep 'got:' | awk '{print $2}' | sed 's/sha256:/sha256-/' || true)"
fi
if [ -z "$CORRECT_HASH" ]; then
echo "Failed to determine correct node_modules hash for ${SYSTEM}."
cat "$BUILD_LOG"
exit 1
fi
echo " ${SYSTEM}: ${CORRECT_HASH}"
jq --arg sys "$SYSTEM" --arg h "$CORRECT_HASH" \
'.nodeModules[$sys] = $h' "$HASH_FILE" > "${HASH_FILE}.tmp"
mv "${HASH_FILE}.tmp" "$HASH_FILE"
done
echo "All hashes computed:"
cat "$HASH_FILE"
- name: Commit ${{ env.TITLE }} changes
env:
TARGET_BRANCH: ${{ github.head_ref || github.ref_name }}
run: |
set -euo pipefail
HASH_FILE="nix/hashes.json"
echo "Checking for changes..."
summarize() {
local status="$1"
{
echo "### Nix $TITLE"
echo ""
echo "- ref: ${GITHUB_REF_NAME}"
echo "- status: ${status}"
} >> "$GITHUB_STEP_SUMMARY"
if [ -n "${GITHUB_SERVER_URL:-}" ] && [ -n "${GITHUB_REPOSITORY:-}" ] && [ -n "${GITHUB_RUN_ID:-}" ]; then
echo "- run: ${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" >> "$GITHUB_STEP_SUMMARY"
fi
echo "" >> "$GITHUB_STEP_SUMMARY"
}
FILES=("$HASH_FILE")
STATUS="$(git status --short -- "${FILES[@]}" || true)"
if [ -z "$STATUS" ]; then
echo "No changes detected."
summarize "no changes"
exit 0
fi
echo "Changes detected:"
echo "$STATUS"
git add "${FILES[@]}"
git commit -m "chore: update nix node_modules hashes"
BRANCH="${TARGET_BRANCH:-${GITHUB_REF_NAME}}"
git pull --rebase --autostash origin "$BRANCH"
git push origin HEAD:"$BRANCH"
echo "Changes pushed successfully"
summarize "committed $(git rev-parse --short HEAD)"

71
.gitignore vendored
View File

@@ -1,28 +1,45 @@
.DS_Store
node_modules
.worktrees
.sst
.env
.idea
.vscode
*~
playground
tmp
dist
ts-dist
.turbo
**/.serena
.serena/
/result
refs
Session.vim
opencode.json
a.out
target
.scripts
.direnv/
# Binaries for programs and plugins
*.exe
*.exe~
*.dll
*.so
*.dylib
# Test binary, built with `go test -c`
*.test
# Output of the go coverage tool, specifically when used with LiteIDE
*.out
# Dependency directories (remove the comment below to include it)
# vendor/
# Go workspace file
go.work
# IDE specific files
.idea/
.vscode/
*.swp
*.swo
# OS specific files
.DS_Store
.DS_Store?
._*
.Spotlight-V100
.Trashes
ehthumbs.db
Thumbs.db
*.log
# Binary output directory
/bin/
/dist/
# Local environment variables
.env
.env.local
.opencode/
# Local dev files
opencode-dev
logs/
*.bun-build

76
.goreleaser.yml Normal file
View File

@@ -0,0 +1,76 @@
version: 2
project_name: opencode
before:
hooks:
builds:
- env:
- CGO_ENABLED=0
goos:
- linux
- darwin
goarch:
- amd64
- arm64
ldflags:
- -s -w -X github.com/opencode-ai/opencode/internal/version.Version={{.Version}}
main: ./main.go
archives:
- format: tar.gz
name_template: >-
opencode-
{{- if eq .Os "darwin" }}mac-
{{- else if eq .Os "windows" }}windows-
{{- else if eq .Os "linux" }}linux-{{end}}
{{- if eq .Arch "amd64" }}x86_64
{{- else if eq .Arch "#86" }}i386
{{- else }}{{ .Arch }}{{ end }}
{{- if .Arm }}v{{ .Arm }}{{ end }}
format_overrides:
- goos: windows
format: zip
checksum:
name_template: "checksums.txt"
snapshot:
name_template: "0.0.0-{{ .Timestamp }}"
aurs:
- name: opencode-ai
homepage: "https://github.com/opencode-ai/opencode"
description: "terminal based agent that can build anything"
maintainers:
- "kujtimiihoxha <kujtimii.h@gmail.com>"
license: "MIT"
private_key: "{{ .Env.AUR_KEY }}"
git_url: "ssh://aur@aur.archlinux.org/opencode-ai-bin.git"
provides:
- opencode
conflicts:
- opencode
package: |-
install -Dm755 ./opencode "${pkgdir}/usr/bin/opencode"
brews:
- repository:
owner: opencode-ai
name: homebrew-tap
nfpms:
- maintainer: kujtimiihoxha
description: terminal based agent that can build anything
formats:
- deb
- rpm
file_name_template: >-
{{ .ProjectName }}-
{{- if eq .Os "darwin" }}mac
{{- else }}{{ .Os }}{{ end }}-{{ .Arch }}
changelog:
sort: asc
filters:
exclude:
- "^docs:"
- "^doc:"
- "^test:"
- "^ci:"
- "^ignore:"
- "^example:"
- "^wip:"

View File

@@ -1,20 +0,0 @@
#!/bin/sh
set -e
# Check if bun version matches package.json
# keep in sync with packages/script/src/index.ts semver qualifier
bun -e '
import { semver } from "bun";
const pkg = await Bun.file("package.json").json();
const expectedBunVersion = pkg.packageManager?.split("@")[1];
if (!expectedBunVersion) {
throw new Error("packageManager field not found in root package.json");
}
const expectedBunVersionRange = `^${expectedBunVersion}`;
if (!semver.satisfies(process.versions.bun, expectedBunVersionRange)) {
throw new Error(`This script requires bun@${expectedBunVersionRange}, but you are using bun@${process.versions.bun}`);
}
if (process.versions.bun !== expectedBunVersion) {
console.warn(`Warning: Bun version ${process.versions.bun} differs from expected ${expectedBunVersion}`);
}
'
bun typecheck

8
.opencode.json Normal file
View File

@@ -0,0 +1,8 @@
{
"$schema": "./opencode-schema.json",
"lsp": {
"gopls": {
"command": "gopls"
}
}
}

View File

@@ -1,3 +0,0 @@
plans/
bun.lock
package.json

View File

@@ -1,34 +0,0 @@
---
description: ALWAYS use this when writing docs
color: "#38A3EE"
---
You are an expert technical documentation writer
You are not verbose
Use a relaxed and friendly tone
The title of the page should be a word or a 2-3 word phrase
The description should be one short line, should not start with "The", should
avoid repeating the title of the page, should be 5-10 words long
Chunks of text should not be more than 2 sentences long
Each section is separated by a divider of 3 dashes
The section titles are short with only the first letter of the word capitalized
The section titles are in the imperative mood
The section titles should not repeat the term used in the page title, for
example, if the page title is "Models", avoid using a section title like "Add
new models". This might be unavoidable in some cases, but try to avoid it.
Check out the /packages/web/src/content/docs/docs/index.mdx as an example.
For JS or TS code snippets remove trailing semicolons and any trailing commas
that might not be needed.
If you are making a commit prefix the commit message with `docs:`

View File

@@ -1,26 +0,0 @@
---
mode: primary
hidden: true
model: opencode/claude-haiku-4-5
color: "#E67E22"
tools:
"*": false
"github-pr-search": true
---
You are a duplicate PR detection agent. When a PR is opened, your job is to search for potentially duplicate or related open PRs.
Use the github-pr-search tool to search for PRs that might be addressing the same issue or feature.
IMPORTANT: The input will contain a line `CURRENT_PR_NUMBER: NNNN`. This is the current PR number, you should not mark that the current PR as a duplicate of itself.
Search using keywords from the PR title and description. Try multiple searches with different relevant terms.
If you find potential duplicates:
- List them with their titles and URLs
- Briefly explain why they might be related
If no duplicates are found, say so clearly. BUT ONLY SAY "No duplicate PRs found" (don't say anything else if no dups)
Keep your response concise and actionable.

View File

@@ -1,78 +0,0 @@
---
mode: primary
hidden: true
model: opencode/claude-haiku-4-5
color: "#44BA81"
tools:
"*": false
"github-triage": true
---
You are a triage agent responsible for triaging github issues.
Use your github-triage tool to triage issues.
## Labels
### windows
Use for any issue that mentions Windows (the OS). Be sure they are saying that they are on Windows.
- Use if they mention WSL too
#### perf
Performance-related issues:
- Slow performance
- High RAM usage
- High CPU usage
**Only** add if it's likely a RAM or CPU issue. **Do not** add for LLM slowness.
#### desktop
Desktop app issues:
- `opencode web` command
- The desktop app itself
**Only** add if it's specifically about the Desktop application or `opencode web` view. **Do not** add for terminal, TUI, or general opencode issues.
#### nix
**Only** add if the issue explicitly mentions nix.
#### zen
**Only** add if the issue mentions "zen" or "opencode zen" or "opencode black".
If the issue doesn't have "zen" or "opencode black" in it then don't add zen label
#### docs
Add if the issue requests better documentation or docs updates.
#### opentui
TUI issues potentially caused by our underlying TUI library:
- Keybindings not working
- Scroll speed issues (too fast/slow/laggy)
- Screen flickering
- Crashes with opentui in the log
**Do not** add for general TUI bugs.
When assigning to people here are the following rules:
adamdotdev:
ONLY assign adam if the issue will have the "desktop" label.
fwang:
ONLY assign fwang if the issue will have the "zen" label.
jayair:
ONLY assign jayair if the issue will have the "docs" label.
In all other cases use best judgment. Avoid assigning to kommander needlessly, when in doubt assign to rekram1-node.

View File

@@ -1,24 +0,0 @@
---
description: "Bump AI sdk dependencies minor / patch versions only"
---
Please read @package.json and @packages/opencode/package.json.
Your job is to look into AI SDK dependencies, figure out if they have versions that can be upgraded (minor or patch versions ONLY no major ignore major changes).
I want a report of every dependency and the version that can be upgraded to.
What would be even better is if you can give me links to the changelog for each dependency, or at least some reference info so I can see what bugs were fixed or new features were added.
Consider using subagents for each dep to save your context window.
Here is a short list of some deps (please be comprehensive tho):
- "ai"
- "@ai-sdk/openai"
- "@ai-sdk/anthropic"
- "@openrouter/ai-sdk-provider"
- etc, etc
DO NOT upgrade the dependencies yet, just make a list of all dependencies and their versions that can be upgraded to minor or patch versions only.
Write up your findings to ai-sdk-updates.md

View File

@@ -1,28 +0,0 @@
---
description: git commit and push
model: opencode/glm-4.6
subtask: true
---
commit and push
make sure it includes a prefix like
docs:
tui:
core:
ci:
ignore:
wip:
For anything in the packages/web use the docs: prefix.
For anything in the packages/app use the ignore: prefix.
prefer to explain WHY something was done from an end user perspective instead of
WHAT was done.
do not do generic messages like "improved agent experience" be very specific
about what user facing changes were made
if there are changes do a git pull --rebase
if there are conflicts DO NOT FIX THEM. notify me and I will fix them

View File

@@ -1,23 +0,0 @@
---
description: "find issue(s) on github"
model: opencode/claude-haiku-4-5
---
Search through existing issues in anomalyco/opencode using the gh cli to find issues matching this query:
$ARGUMENTS
Consider:
1. Similar titles or descriptions
2. Same error messages or symptoms
3. Related functionality or components
4. Similar feature requests
Please list any matching issues with:
- Issue number and title
- Brief explanation of why it matches the query
- Link to the issue
If no clear matches are found, say so.

View File

@@ -1,15 +0,0 @@
---
description: Remove AI code slop
---
Check the diff against dev, and remove all AI generated slop introduced in this branch.
This includes:
- Extra comments that a human wouldn't add or is inconsistent with the rest of the file
- Extra defensive checks or try/catch blocks that are abnormal for that area of the codebase (especially if called by trusted / validated codepaths)
- Casts to any to get around type issues
- Any other style that is inconsistent with the file
- Unnecessary emoji usage
Report at the end with only a 1-3 sentence summary of what you changed

View File

@@ -1,5 +0,0 @@
---
description: spellcheck all markdown file changes
---
Look at all the unstaged changes to markdown (.md, .mdx) files, pull out the lines that have changed, and check for spelling and grammar errors.

4
.opencode/env.d.ts vendored
View File

@@ -1,4 +0,0 @@
declare module "*.txt" {
const content: string
export default content
}

View File

@@ -1,23 +0,0 @@
{
"$schema": "https://opencode.ai/config.json",
// "plugin": ["opencode-openai-codex-auth"],
// "enterprise": {
// "url": "https://enterprise.dev.opencode.ai",
// },
"instructions": ["STYLE_GUIDE.md"],
"provider": {
"opencode": {
"options": {},
},
},
"mcp": {
"context7": {
"type": "remote",
"url": "https://mcp.context7.com/mcp",
},
},
"tools": {
"github-triage": false,
"github-pr-search": false,
},
}

View File

@@ -1,39 +0,0 @@
---
name: bun-file-io
description: Use this when you are working on file operations like reading, writing, scanning, or deleting files. It summarizes the preferred file APIs and patterns used in this repo. It also notes when to use filesystem helpers for directories.
---
## Use this when
- Editing file I/O or scans in `packages/opencode`
- Handling directory operations or external tools
## Bun file APIs (from Bun docs)
- `Bun.file(path)` is lazy; call `text`, `json`, `stream`, `arrayBuffer`, `bytes`, `exists` to read.
- Metadata: `file.size`, `file.type`, `file.name`.
- `Bun.write(dest, input)` writes strings, buffers, Blobs, Responses, or files.
- `Bun.file(...).delete()` deletes a file.
- `file.writer()` returns a FileSink for incremental writes.
- `Bun.Glob` + `Array.fromAsync(glob.scan({ cwd, absolute, onlyFiles, dot }))` for scans.
- Use `Bun.which` to find a binary, then `Bun.spawn` to run it.
- `Bun.readableStreamToText/Bytes/JSON` for stream output.
## When to use node:fs
- Use `node:fs/promises` for directories (`mkdir`, `readdir`, recursive operations).
## Repo patterns
- Prefer Bun APIs over Node `fs` for file access.
- Check `Bun.file(...).exists()` before reading.
- For binary/large files use `arrayBuffer()` and MIME checks via `file.type`.
- Use `Bun.Glob` + `Array.fromAsync` for scans.
- Decode tool stderr with `Bun.readableStreamToText`.
- For large writes, use `Bun.write(Bun.file(path), text)`.
## Quick checklist
- Use Bun APIs first.
- Use `path.join`/`path.resolve` for paths.
- Prefer promise `.catch(...)` over `try/catch` when possible.

View File

@@ -1,223 +0,0 @@
{
"$schema": "https://opencode.ai/theme.json",
"defs": {
"nord0": "#2E3440",
"nord1": "#3B4252",
"nord2": "#434C5E",
"nord3": "#4C566A",
"nord4": "#D8DEE9",
"nord5": "#E5E9F0",
"nord6": "#ECEFF4",
"nord7": "#8FBCBB",
"nord8": "#88C0D0",
"nord9": "#81A1C1",
"nord10": "#5E81AC",
"nord11": "#BF616A",
"nord12": "#D08770",
"nord13": "#EBCB8B",
"nord14": "#A3BE8C",
"nord15": "#B48EAD"
},
"theme": {
"primary": {
"dark": "nord8",
"light": "nord10"
},
"secondary": {
"dark": "nord9",
"light": "nord9"
},
"accent": {
"dark": "nord7",
"light": "nord7"
},
"error": {
"dark": "nord11",
"light": "nord11"
},
"warning": {
"dark": "nord12",
"light": "nord12"
},
"success": {
"dark": "nord14",
"light": "nord14"
},
"info": {
"dark": "nord8",
"light": "nord10"
},
"text": {
"dark": "nord4",
"light": "nord0"
},
"textMuted": {
"dark": "nord3",
"light": "nord1"
},
"background": {
"dark": "nord0",
"light": "nord6"
},
"backgroundPanel": {
"dark": "nord1",
"light": "nord5"
},
"backgroundElement": {
"dark": "nord1",
"light": "nord4"
},
"border": {
"dark": "nord2",
"light": "nord3"
},
"borderActive": {
"dark": "nord3",
"light": "nord2"
},
"borderSubtle": {
"dark": "nord2",
"light": "nord3"
},
"diffAdded": {
"dark": "nord14",
"light": "nord14"
},
"diffRemoved": {
"dark": "nord11",
"light": "nord11"
},
"diffContext": {
"dark": "nord3",
"light": "nord3"
},
"diffHunkHeader": {
"dark": "nord3",
"light": "nord3"
},
"diffHighlightAdded": {
"dark": "nord14",
"light": "nord14"
},
"diffHighlightRemoved": {
"dark": "nord11",
"light": "nord11"
},
"diffAddedBg": {
"dark": "#3B4252",
"light": "#E5E9F0"
},
"diffRemovedBg": {
"dark": "#3B4252",
"light": "#E5E9F0"
},
"diffContextBg": {
"dark": "nord1",
"light": "nord5"
},
"diffLineNumber": {
"dark": "nord2",
"light": "nord4"
},
"diffAddedLineNumberBg": {
"dark": "#3B4252",
"light": "#E5E9F0"
},
"diffRemovedLineNumberBg": {
"dark": "#3B4252",
"light": "#E5E9F0"
},
"markdownText": {
"dark": "nord4",
"light": "nord0"
},
"markdownHeading": {
"dark": "nord8",
"light": "nord10"
},
"markdownLink": {
"dark": "nord9",
"light": "nord9"
},
"markdownLinkText": {
"dark": "nord7",
"light": "nord7"
},
"markdownCode": {
"dark": "nord14",
"light": "nord14"
},
"markdownBlockQuote": {
"dark": "nord3",
"light": "nord3"
},
"markdownEmph": {
"dark": "nord12",
"light": "nord12"
},
"markdownStrong": {
"dark": "nord13",
"light": "nord13"
},
"markdownHorizontalRule": {
"dark": "nord3",
"light": "nord3"
},
"markdownListItem": {
"dark": "nord8",
"light": "nord10"
},
"markdownListEnumeration": {
"dark": "nord7",
"light": "nord7"
},
"markdownImage": {
"dark": "nord9",
"light": "nord9"
},
"markdownImageText": {
"dark": "nord7",
"light": "nord7"
},
"markdownCodeBlock": {
"dark": "nord4",
"light": "nord0"
},
"syntaxComment": {
"dark": "nord3",
"light": "nord3"
},
"syntaxKeyword": {
"dark": "nord9",
"light": "nord9"
},
"syntaxFunction": {
"dark": "nord8",
"light": "nord8"
},
"syntaxVariable": {
"dark": "nord7",
"light": "nord7"
},
"syntaxString": {
"dark": "nord14",
"light": "nord14"
},
"syntaxNumber": {
"dark": "nord15",
"light": "nord15"
},
"syntaxType": {
"dark": "nord7",
"light": "nord7"
},
"syntaxOperator": {
"dark": "nord9",
"light": "nord9"
},
"syntaxPunctuation": {
"dark": "nord4",
"light": "nord0"
}
}
}

View File

@@ -1,57 +0,0 @@
/// <reference path="../env.d.ts" />
import { tool } from "@opencode-ai/plugin"
import DESCRIPTION from "./github-pr-search.txt"
async function githubFetch(endpoint: string, options: RequestInit = {}) {
const response = await fetch(`https://api.github.com${endpoint}`, {
...options,
headers: {
Authorization: `Bearer ${process.env.GITHUB_TOKEN}`,
Accept: "application/vnd.github+json",
"Content-Type": "application/json",
...options.headers,
},
})
if (!response.ok) {
throw new Error(`GitHub API error: ${response.status} ${response.statusText}`)
}
return response.json()
}
interface PR {
title: string
html_url: string
}
export default tool({
description: DESCRIPTION,
args: {
query: tool.schema.string().describe("Search query for PR titles and descriptions"),
limit: tool.schema.number().describe("Maximum number of results to return").default(10),
offset: tool.schema.number().describe("Number of results to skip for pagination").default(0),
},
async execute(args) {
const owner = "anomalyco"
const repo = "opencode"
const page = Math.floor(args.offset / args.limit) + 1
const searchQuery = encodeURIComponent(`${args.query} repo:${owner}/${repo} type:pr state:open`)
const result = await githubFetch(
`/search/issues?q=${searchQuery}&per_page=${args.limit}&page=${page}&sort=updated&order=desc`,
)
if (result.total_count === 0) {
return `No PRs found matching "${args.query}"`
}
const prs = result.items as PR[]
if (prs.length === 0) {
return `No other PRs found matching "${args.query}"`
}
const formatted = prs.map((pr) => `${pr.title}\n${pr.html_url}`).join("\n\n")
return `Found ${result.total_count} PRs (showing ${prs.length}):\n\n${formatted}`
},
})

View File

@@ -1,10 +0,0 @@
Use this tool to search GitHub pull requests by title and description.
This tool searches PRs in the sst/opencode repository and returns LLM-friendly results including:
- PR number and title
- Author
- State (open/closed/merged)
- Labels
- Description snippet
Use the query parameter to search for keywords that might appear in PR titles or descriptions.

View File

@@ -1,90 +0,0 @@
/// <reference path="../env.d.ts" />
// import { Octokit } from "@octokit/rest"
import { tool } from "@opencode-ai/plugin"
import DESCRIPTION from "./github-triage.txt"
function getIssueNumber(): number {
const issue = parseInt(process.env.ISSUE_NUMBER ?? "", 10)
if (!issue) throw new Error("ISSUE_NUMBER env var not set")
return issue
}
async function githubFetch(endpoint: string, options: RequestInit = {}) {
const response = await fetch(`https://api.github.com${endpoint}`, {
...options,
headers: {
Authorization: `Bearer ${process.env.GITHUB_TOKEN}`,
Accept: "application/vnd.github+json",
"Content-Type": "application/json",
...options.headers,
},
})
if (!response.ok) {
throw new Error(`GitHub API error: ${response.status} ${response.statusText}`)
}
return response.json()
}
export default tool({
description: DESCRIPTION,
args: {
assignee: tool.schema
.enum(["thdxr", "adamdotdevin", "rekram1-node", "fwang", "jayair", "kommander"])
.describe("The username of the assignee")
.default("rekram1-node"),
labels: tool.schema
.array(tool.schema.enum(["nix", "opentui", "perf", "desktop", "zen", "docs", "windows"]))
.describe("The labels(s) to add to the issue")
.default([]),
},
async execute(args) {
const issue = getIssueNumber()
// const octokit = new Octokit({ auth: process.env.GITHUB_TOKEN })
const owner = "anomalyco"
const repo = "opencode"
const results: string[] = []
if (args.assignee === "adamdotdevin" && !args.labels.includes("desktop")) {
throw new Error("Only desktop issues should be assigned to adamdotdevin")
}
if (args.assignee === "fwang" && !args.labels.includes("zen")) {
throw new Error("Only zen issues should be assigned to fwang")
}
if (args.assignee === "kommander" && !args.labels.includes("opentui")) {
throw new Error("Only opentui issues should be assigned to kommander")
}
// await octokit.rest.issues.addAssignees({
// owner,
// repo,
// issue_number: issue,
// assignees: [args.assignee],
// })
await githubFetch(`/repos/${owner}/${repo}/issues/${issue}/assignees`, {
method: "POST",
body: JSON.stringify({ assignees: [args.assignee] }),
})
results.push(`Assigned @${args.assignee} to issue #${issue}`)
const labels: string[] = args.labels.map((label) => (label === "desktop" ? "web" : label))
if (labels.length > 0) {
// await octokit.rest.issues.addLabels({
// owner,
// repo,
// issue_number: issue,
// labels,
// })
await githubFetch(`/repos/${owner}/${repo}/issues/${issue}/labels`, {
method: "POST",
body: JSON.stringify({ labels }),
})
results.push(`Added labels: ${args.labels.join(", ")}`)
}
return results.join("\n")
},
})

View File

@@ -1,88 +0,0 @@
Use this tool to assign and/or label a Github issue.
You can assign the following users:
- thdxr
- adamdotdevin
- fwang
- jayair
- kommander
- rekram1-node
You can use the following labels:
- nix
- opentui
- perf
- web
- zen
- docs
Always try to assign an issue, if in doubt, assign rekram1-node to it.
## Breakdown of responsibilities:
### thdxr
Dax is responsible for managing core parts of the application, for large feature requests, api changes, or things that require significant changes to the codebase assign him.
This relates to OpenCode server primarily but has overlap with just about anything
### adamdotdevin
Adam is responsible for managing the Desktop/Web app. If there is an issue relating to the desktop app or `opencode web` command. Assign him.
### fwang
Frank is responsible for managing Zen, if you see complaints about OpenCode Zen, maybe it's the dashboard, the model quality, billing issues, etc. Assign him to the issue.
### jayair
Jay is responsible for documentation. If there is an issue relating to documentation assign him.
### kommander
Sebastian is responsible for managing an OpenTUI (a library for building terminal user interfaces). OpenCode's TUI is built with OpenTUI. If there are issues about:
- random characters on screen
- keybinds not working on different terminals
- general terminal stuff
Then assign the issue to Him.
### rekram1-node
ALL BUGS SHOULD BE assigned to rekram1-node unless they have the `opentui` label.
Assign Aiden to an issue as a catch all, if you can't assign anyone else. Most of the time this will be bugs/polish things.
If no one else makes sense to assign, assign rekram1-node to it.
Always assign to aiden if the issue mentions "acp", "zed", or model performance issues
## Breakdown of Labels:
### nix
Any issue that mentions nix, or nixos should have a nix label
### opentui
Anything relating to the TUI itself should have an opentui label
### perf
Anything related to slow performance, high ram, high cpu usage, or any other performance related issue should have a perf label
### desktop
Anything related to `opencode web` command or the desktop app should have a desktop label. Never add this label for anything terminal/tui related
### zen
Anything related to OpenCode Zen, billing, or model quality from Zen should have a zen label
### docs
Anything related to the documentation should have a docs label
### windows
Use for any issue that involves the windows OS

View File

@@ -1 +0,0 @@
sst-env.d.ts

View File

@@ -1,11 +0,0 @@
{
"version": "0.2.0",
"configurations": [
{
"type": "bun",
"request": "attach",
"name": "opencode (attach)",
"url": "ws://localhost:6499/"
}
]
}

View File

@@ -1,5 +0,0 @@
{
"recommendations": [
"oven.bun-vscode"
]
}

View File

@@ -1,4 +0,0 @@
- To test opencode in `packages/opencode`, run `bun dev`.
- To regenerate the JavaScript SDK, run `./packages/sdk/js/script/build.ts`.
- ALWAYS USE PARALLEL TOOLS WHEN APPLICABLE.
- The default branch in this repo is `dev`.

View File

@@ -1,260 +0,0 @@
# Contributing to OpenCode
We want to make it easy for you to contribute to OpenCode. Here are the most common type of changes that get merged:
- Bug fixes
- Additional LSPs / Formatters
- Improvements to LLM performance
- Support for new providers
- Fixes for environment-specific quirks
- Missing standard behavior
- Documentation improvements
However, any UI or core product feature must go through a design review with the core team before implementation.
If you are unsure if a PR would be accepted, feel free to ask a maintainer or look for issues with any of the following labels:
- [`help wanted`](https://github.com/anomalyco/opencode/issues?q=is%3Aissue%20state%3Aopen%20label%3Ahelp-wanted)
- [`good first issue`](https://github.com/anomalyco/opencode/issues?q=is%3Aissue%20state%3Aopen%20label%3A%22good%20first%20issue%22)
- [`bug`](https://github.com/anomalyco/opencode/issues?q=is%3Aissue%20state%3Aopen%20label%3Abug)
- [`perf`](https://github.com/anomalyco/opencode/issues?q=is%3Aopen%20is%3Aissue%20label%3A%22perf%22)
> [!NOTE]
> PRs that ignore these guardrails will likely be closed.
Want to take on an issue? Leave a comment and a maintainer may assign it to you unless it is something we are already working on.
## Developing OpenCode
- Requirements: Bun 1.3+
- Install dependencies and start the dev server from the repo root:
```bash
bun install
bun dev
```
### Running against a different directory
By default, `bun dev` runs OpenCode in the `packages/opencode` directory. To run it against a different directory or repository:
```bash
bun dev <directory>
```
To run OpenCode in the root of the opencode repo itself:
```bash
bun dev .
```
### Building a "localcode"
To compile a standalone executable:
```bash
./packages/opencode/script/build.ts --single
```
Then run it with:
```bash
./packages/opencode/dist/opencode-<platform>/bin/opencode
```
Replace `<platform>` with your platform (e.g., `darwin-arm64`, `linux-x64`).
- Core pieces:
- `packages/opencode`: OpenCode core business logic & server.
- `packages/opencode/src/cli/cmd/tui/`: The TUI code, written in SolidJS with [opentui](https://github.com/sst/opentui)
- `packages/app`: The shared web UI components, written in SolidJS
- `packages/desktop`: The native desktop app, built with Tauri (wraps `packages/app`)
- `packages/plugin`: Source for `@opencode-ai/plugin`
### Understanding bun dev vs opencode
During development, `bun dev` is the local equivalent of the built `opencode` command. Both run the same CLI interface:
```bash
# Development (from project root)
bun dev --help # Show all available commands
bun dev serve # Start headless API server
bun dev web # Start server + open web interface
bun dev <directory> # Start TUI in specific directory
# Production
opencode --help # Show all available commands
opencode serve # Start headless API server
opencode web # Start server + open web interface
opencode <directory> # Start TUI in specific directory
```
### Running the API Server
To start the OpenCode headless API server:
```bash
bun dev serve
```
This starts the headless server on port 4096 by default. You can specify a different port:
```bash
bun dev serve --port 8080
```
### Running the Web App
To test UI changes during development:
1. **First, start the OpenCode server** (see [Running the API Server](#running-the-api-server) section above)
2. **Then run the web app:**
```bash
bun run --cwd packages/app dev
```
This starts a local dev server at http://localhost:5173 (or similar port shown in output). Most UI changes can be tested here, but the server must be running for full functionality.
### Running the Desktop App
The desktop app is a native Tauri application that wraps the web UI.
To run the native desktop app:
```bash
bun run --cwd packages/desktop tauri dev
```
This starts the web dev server on http://localhost:1420 and opens the native window.
If you only want the web dev server (no native shell):
```bash
bun run --cwd packages/desktop dev
```
To create a production `dist/` and build the native app bundle:
```bash
bun run --cwd packages/desktop tauri build
```
This runs `bun run --cwd packages/desktop build` automatically via Tauris `beforeBuildCommand`.
> [!NOTE]
> Running the desktop app requires additional Tauri dependencies (Rust toolchain, platform-specific libraries). See the [Tauri prerequisites](https://v2.tauri.app/start/prerequisites/) for setup instructions.
> [!NOTE]
> If you make changes to the API or SDK (e.g. `packages/opencode/src/server/server.ts`), run `./script/generate.ts` to regenerate the SDK and related files.
Please try to follow the [style guide](./STYLE_GUIDE.md)
### Setting up a Debugger
Bun debugging is currently rough around the edges. We hope this guide helps you get set up and avoid some pain points.
The most reliable way to debug OpenCode is to run it manually in a terminal via `bun run --inspect=<url> dev ...` and attach
your debugger via that URL. Other methods can result in breakpoints being mapped incorrectly, at least in VSCode (YMMV).
Caveats:
- If you want to run the OpenCode TUI and have breakpoints triggered in the server code, you might need to run `bun dev spawn` instead of
the usual `bun dev`. This is because `bun dev` runs the server in a worker thread and breakpoints might not work there.
- If `spawn` does not work for you, you can debug the server separately:
- Debug server: `bun run --inspect=ws://localhost:6499/ --cwd packages/opencode ./src/index.ts serve --port 4096`,
then attach TUI with `opencode attach http://localhost:4096`
- Debug TUI: `bun run --inspect=ws://localhost:6499/ --cwd packages/opencode --conditions=browser ./src/index.ts`
Other tips and tricks:
- You might want to use `--inspect-wait` or `--inspect-brk` instead of `--inspect`, depending on your workflow
- Specifying `--inspect=ws://localhost:6499/` on every invocation can be tiresome, you may want to `export BUN_OPTIONS=--inspect=ws://localhost:6499/` instead
#### VSCode Setup
If you use VSCode, you can use our example configurations [.vscode/settings.example.json](.vscode/settings.example.json) and [.vscode/launch.example.json](.vscode/launch.example.json).
Some debug methods that can be problematic:
- Debug configurations with `"request": "launch"` can have breakpoints incorrectly mapped and thus unusable
- The same problem arises when running OpenCode in the VSCode `JavaScript Debug Terminal`
With that said, you may want to try these methods, as they might work for you.
## Pull Request Expectations
### Issue First Policy
**All PRs must reference an existing issue.** Before opening a PR, open an issue describing the bug or feature. This helps maintainers triage and prevents duplicate work. PRs without a linked issue may be closed without review.
- Use `Fixes #123` or `Closes #123` in your PR description to link the issue
- For small fixes, a brief issue is fine - just enough context for maintainers to understand the problem
### General Requirements
- Keep pull requests small and focused
- Explain the issue and why your change fixes it
- Before adding new functionality, ensure it doesn't already exist elsewhere in the codebase
### UI Changes
If your PR includes UI changes, please include screenshots or videos showing the before and after. This helps maintainers review faster and gives you quicker feedback.
### Logic Changes
For non-UI changes (bug fixes, new features, refactors), explain **how you verified it works**:
- What did you test?
- How can a reviewer reproduce/confirm the fix?
### No AI-Generated Walls of Text
Long, AI-generated PR descriptions and issues are not acceptable and may be ignored. Respect the maintainers' time:
- Write short, focused descriptions
- Explain what changed and why in your own words
- If you can't explain it briefly, your PR might be too large
### PR Titles
PR titles should follow conventional commit standards:
- `feat:` new feature or functionality
- `fix:` bug fix
- `docs:` documentation or README changes
- `chore:` maintenance tasks, dependency updates, etc.
- `refactor:` code refactoring without changing behavior
- `test:` adding or updating tests
You can optionally include a scope to indicate which package is affected:
- `feat(app):` feature in the app package
- `fix(desktop):` bug fix in the desktop package
- `chore(opencode):` maintenance in the opencode package
Examples:
- `docs: update contributing guidelines`
- `fix: resolve crash on startup`
- `feat: add dark mode support`
- `feat(app): add dark mode support`
- `fix(desktop): resolve crash on startup`
- `chore: bump dependency versions`
### Style Preferences
These are not strictly enforced, they are just general guidelines:
- **Functions:** Keep logic within a single function unless breaking it out adds clear reuse or composition benefits.
- **Destructuring:** Do not do unnecessary destructuring of variables.
- **Control flow:** Avoid `else` statements.
- **Error handling:** Prefer `.catch(...)` instead of `try`/`catch` when possible.
- **Types:** Reach for precise types and avoid `any`.
- **Variables:** Stick to immutable patterns and avoid `let`.
- **Naming:** Choose concise single-word identifiers when they remain descriptive.
- **Runtime APIs:** Use Bun helpers such as `Bun.file()` when they fit the use case.
## Feature Requests
For net-new functionality, start with a design conversation. Open an issue describing the problem, your proposed approach (optional), and why it belongs in OpenCode. The core team will help decide whether it should move forward; please wait for that approval instead of opening a feature PR directly.

View File

@@ -1,6 +1,6 @@
MIT License
Copyright (c) 2025 opencode
Copyright (c) 2025 Kujtim Hoxha
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

577
README.md
View File

@@ -1,115 +1,526 @@
<p align="center">
<a href="https://opencode.ai">
<picture>
<source srcset="packages/console/app/src/asset/logo-ornate-dark.svg" media="(prefers-color-scheme: dark)">
<source srcset="packages/console/app/src/asset/logo-ornate-light.svg" media="(prefers-color-scheme: light)">
<img src="packages/console/app/src/asset/logo-ornate-light.svg" alt="OpenCode logo">
</picture>
</a>
</p>
<p align="center">The open source AI coding agent.</p>
<p align="center">
<a href="https://opencode.ai/discord"><img alt="Discord" src="https://img.shields.io/discord/1391832426048651334?style=flat-square&label=discord" /></a>
<a href="https://www.npmjs.com/package/opencode-ai"><img alt="npm" src="https://img.shields.io/npm/v/opencode-ai?style=flat-square" /></a>
<a href="https://github.com/anomalyco/opencode/actions/workflows/publish.yml"><img alt="Build status" src="https://img.shields.io/github/actions/workflow/status/anomalyco/opencode/publish.yml?style=flat-square&branch=dev" /></a>
</p>
# ⌬ OpenCode
[![OpenCode Terminal UI](packages/web/src/assets/lander/screenshot.png)](https://opencode.ai)
> **⚠️ Early Development Notice:** This project is in early development and is not yet ready for production use. Features may change, break, or be incomplete. Use at your own risk.
---
A powerful terminal-based AI assistant for developers, providing intelligent coding assistance directly in your terminal.
### Installation
## Overview
OpenCode is a Go-based CLI application that brings AI assistance to your terminal. It provides a TUI (Terminal User Interface) for interacting with various AI models to help with coding tasks, debugging, and more.
## Features
- **Interactive TUI**: Built with [Bubble Tea](https://github.com/charmbracelet/bubbletea) for a smooth terminal experience
- **Multiple AI Providers**: Support for OpenAI, Anthropic Claude, Google Gemini, AWS Bedrock, Groq, Azure OpenAI, and OpenRouter
- **Session Management**: Save and manage multiple conversation sessions
- **Tool Integration**: AI can execute commands, search files, and modify code
- **Vim-like Editor**: Integrated editor with text input capabilities
- **Persistent Storage**: SQLite database for storing conversations and sessions
- **LSP Integration**: Language Server Protocol support for code intelligence
- **File Change Tracking**: Track and visualize file changes during sessions
- **External Editor Support**: Open your preferred editor for composing messages
## Installation
### Using the Install Script
```bash
# YOLO
curl -fsSL https://opencode.ai/install | bash
# Install the latest version
curl -fsSL https://raw.githubusercontent.com/opencode-ai/opencode/refs/heads/main/install | bash
# Package managers
npm i -g opencode-ai@latest # or bun/pnpm/yarn
scoop install opencode # Windows
choco install opencode # Windows
brew install anomalyco/tap/opencode # macOS and Linux (recommended, always up to date)
brew install opencode # macOS and Linux (official brew formula, updated less)
paru -S opencode-bin # Arch Linux
mise use -g opencode # Any OS
nix run nixpkgs#opencode # or github:anomalyco/opencode for latest dev branch
# Install a specific version
curl -fsSL https://raw.githubusercontent.com/opencode-ai/opencode/refs/heads/main/install | VERSION=0.1.0 bash
```
> [!TIP]
> Remove versions older than 0.1.x before installing.
### Desktop App (BETA)
OpenCode is also available as a desktop application. Download directly from the [releases page](https://github.com/anomalyco/opencode/releases) or [opencode.ai/download](https://opencode.ai/download).
| Platform | Download |
| --------------------- | ------------------------------------- |
| macOS (Apple Silicon) | `opencode-desktop-darwin-aarch64.dmg` |
| macOS (Intel) | `opencode-desktop-darwin-x64.dmg` |
| Windows | `opencode-desktop-windows-x64.exe` |
| Linux | `.deb`, `.rpm`, or AppImage |
### Using Homebrew (macOS and Linux)
```bash
# macOS (Homebrew)
brew install --cask opencode-desktop
# Windows (Scoop)
scoop bucket add extras; scoop install extras/opencode-desktop
brew install opencode-ai/tap/opencode
```
#### Installation Directory
The install script respects the following priority order for the installation path:
1. `$OPENCODE_INSTALL_DIR` - Custom installation directory
2. `$XDG_BIN_DIR` - XDG Base Directory Specification compliant path
3. `$HOME/bin` - Standard user binary directory (if exists or can be created)
4. `$HOME/.opencode/bin` - Default fallback
### Using Go
```bash
# Examples
OPENCODE_INSTALL_DIR=/usr/local/bin curl -fsSL https://opencode.ai/install | bash
XDG_BIN_DIR=$HOME/.local/bin curl -fsSL https://opencode.ai/install | bash
go install github.com/opencode-ai/opencode@latest
```
### Agents
## Configuration
OpenCode includes two built-in agents you can switch between with the `Tab` key.
OpenCode looks for configuration in the following locations:
- **build** - Default, full access agent for development work
- **plan** - Read-only agent for analysis and code exploration
- Denies file edits by default
- Asks permission before running bash commands
- Ideal for exploring unfamiliar codebases or planning changes
- `$HOME/.opencode.json`
- `$XDG_CONFIG_HOME/opencode/.opencode.json`
- `./.opencode.json` (local directory)
Also, included is a **general** subagent for complex searches and multistep tasks.
This is used internally and can be invoked using `@general` in messages.
### Auto Compact Feature
Learn more about [agents](https://opencode.ai/docs/agents).
OpenCode includes an auto compact feature that automatically summarizes your conversation when it approaches the model's context window limit. When enabled (default setting), this feature:
### Documentation
- Monitors token usage during your conversation
- Automatically triggers summarization when usage reaches 95% of the model's context window
- Creates a new session with the summary, allowing you to continue your work without losing context
- Helps prevent "out of context" errors that can occur with long conversations
For more info on how to configure OpenCode [**head over to our docs**](https://opencode.ai/docs).
You can enable or disable this feature in your configuration file:
### Contributing
```json
{
"autoCompact": true // default is true
}
```
If you're interested in contributing to OpenCode, please read our [contributing docs](./CONTRIBUTING.md) before submitting a pull request.
### Environment Variables
### Building on OpenCode
You can configure OpenCode using environment variables:
If you are working on a project that's related to OpenCode and is using "opencode" as a part of its name; for example, "opencode-dashboard" or "opencode-mobile", please add a note to your README to clarify that it is not built by the OpenCode team and is not affiliated with us in any way.
| Environment Variable | Purpose |
| -------------------------- | ------------------------------------------------------ |
| `ANTHROPIC_API_KEY` | For Claude models |
| `OPENAI_API_KEY` | For OpenAI models |
| `GEMINI_API_KEY` | For Google Gemini models |
| `GROQ_API_KEY` | For Groq models |
| `AWS_ACCESS_KEY_ID` | For AWS Bedrock (Claude) |
| `AWS_SECRET_ACCESS_KEY` | For AWS Bedrock (Claude) |
| `AWS_REGION` | For AWS Bedrock (Claude) |
| `AZURE_OPENAI_ENDPOINT` | For Azure OpenAI models |
| `AZURE_OPENAI_API_KEY` | For Azure OpenAI models (optional when using Entra ID) |
| `AZURE_OPENAI_API_VERSION` | For Azure OpenAI models |
### FAQ
### Configuration File Structure
#### How is this different from Claude Code?
```json
{
"data": {
"directory": ".opencode"
},
"providers": {
"openai": {
"apiKey": "your-api-key",
"disabled": false
},
"anthropic": {
"apiKey": "your-api-key",
"disabled": false
},
"groq": {
"apiKey": "your-api-key",
"disabled": false
},
"openrouter": {
"apiKey": "your-api-key",
"disabled": false
}
},
"agents": {
"coder": {
"model": "claude-3.7-sonnet",
"maxTokens": 5000
},
"task": {
"model": "claude-3.7-sonnet",
"maxTokens": 5000
},
"title": {
"model": "claude-3.7-sonnet",
"maxTokens": 80
}
},
"mcpServers": {
"example": {
"type": "stdio",
"command": "path/to/mcp-server",
"env": [],
"args": []
}
},
"lsp": {
"go": {
"disabled": false,
"command": "gopls"
}
},
"debug": false,
"debugLSP": false,
"autoCompact": true
}
```
It's very similar to Claude Code in terms of capability. Here are the key differences:
## Supported AI Models
- 100% open source
- Not coupled to any provider. Although we recommend the models we provide through [OpenCode Zen](https://opencode.ai/zen); OpenCode can be used with Claude, OpenAI, Google or even local models. As models evolve the gaps between them will close and pricing will drop so being provider-agnostic is important.
- Out of the box LSP support
- A focus on TUI. OpenCode is built by neovim users and the creators of [terminal.shop](https://terminal.shop); we are going to push the limits of what's possible in the terminal.
- A client/server architecture. This for example can allow OpenCode to run on your computer, while you can drive it remotely from a mobile app. Meaning that the TUI frontend is just one of the possible clients.
OpenCode supports a variety of AI models from different providers:
---
### OpenAI
**Join our community** [Discord](https://discord.gg/opencode) | [X.com](https://x.com/opencode)
- GPT-4.1 family (gpt-4.1, gpt-4.1-mini, gpt-4.1-nano)
- GPT-4.5 Preview
- GPT-4o family (gpt-4o, gpt-4o-mini)
- O1 family (o1, o1-pro, o1-mini)
- O3 family (o3, o3-mini)
- O4 Mini
### Anthropic
- Claude 3.5 Sonnet
- Claude 3.5 Haiku
- Claude 3.7 Sonnet
- Claude 3 Haiku
- Claude 3 Opus
### Google
- Gemini 2.5
- Gemini 2.5 Flash
- Gemini 2.0 Flash
- Gemini 2.0 Flash Lite
### AWS Bedrock
- Claude 3.7 Sonnet
### Groq
- Llama 4 Maverick (17b-128e-instruct)
- Llama 4 Scout (17b-16e-instruct)
- QWEN QWQ-32b
- Deepseek R1 distill Llama 70b
- Llama 3.3 70b Versatile
### Azure OpenAI
- GPT-4.1 family (gpt-4.1, gpt-4.1-mini, gpt-4.1-nano)
- GPT-4.5 Preview
- GPT-4o family (gpt-4o, gpt-4o-mini)
- O1 family (o1, o1-mini)
- O3 family (o3, o3-mini)
- O4 Mini
## Usage
```bash
# Start OpenCode
opencode
# Start with debug logging
opencode -d
# Start with a specific working directory
opencode -c /path/to/project
```
## Command-line Flags
| Flag | Short | Description |
| --------- | ----- | ----------------------------- |
| `--help` | `-h` | Display help information |
| `--debug` | `-d` | Enable debug mode |
| `--cwd` | `-c` | Set current working directory |
## Keyboard Shortcuts
### Global Shortcuts
| Shortcut | Action |
| -------- | ------------------------------------------------------- |
| `Ctrl+C` | Quit application |
| `Ctrl+?` | Toggle help dialog |
| `?` | Toggle help dialog (when not in editing mode) |
| `Ctrl+L` | View logs |
| `Ctrl+A` | Switch session |
| `Ctrl+K` | Command dialog |
| `Ctrl+O` | Toggle model selection dialog |
| `Esc` | Close current overlay/dialog or return to previous mode |
### Chat Page Shortcuts
| Shortcut | Action |
| -------- | --------------------------------------- |
| `Ctrl+N` | Create new session |
| `Ctrl+X` | Cancel current operation/generation |
| `i` | Focus editor (when not in writing mode) |
| `Esc` | Exit writing mode and focus messages |
### Editor Shortcuts
| Shortcut | Action |
| ------------------- | ----------------------------------------- |
| `Ctrl+S` | Send message (when editor is focused) |
| `Enter` or `Ctrl+S` | Send message (when editor is not focused) |
| `Ctrl+E` | Open external editor |
| `Esc` | Blur editor and focus messages |
### Session Dialog Shortcuts
| Shortcut | Action |
| ---------- | ---------------- |
| `↑` or `k` | Previous session |
| `↓` or `j` | Next session |
| `Enter` | Select session |
| `Esc` | Close dialog |
### Model Dialog Shortcuts
| Shortcut | Action |
| ---------- | ----------------- |
| `↑` or `k` | Move up |
| `↓` or `j` | Move down |
| `←` or `h` | Previous provider |
| `→` or `l` | Next provider |
| `Esc` | Close dialog |
### Permission Dialog Shortcuts
| Shortcut | Action |
| ----------------------- | ---------------------------- |
| `←` or `left` | Switch options left |
| `→` or `right` or `tab` | Switch options right |
| `Enter` or `space` | Confirm selection |
| `a` | Allow permission |
| `A` | Allow permission for session |
| `d` | Deny permission |
### Logs Page Shortcuts
| Shortcut | Action |
| ------------------ | ------------------- |
| `Backspace` or `q` | Return to chat page |
## AI Assistant Tools
OpenCode's AI assistant has access to various tools to help with coding tasks:
### File and Code Tools
| Tool | Description | Parameters |
| ------------- | --------------------------- | ---------------------------------------------------------------------------------------- |
| `glob` | Find files by pattern | `pattern` (required), `path` (optional) |
| `grep` | Search file contents | `pattern` (required), `path` (optional), `include` (optional), `literal_text` (optional) |
| `ls` | List directory contents | `path` (optional), `ignore` (optional array of patterns) |
| `view` | View file contents | `file_path` (required), `offset` (optional), `limit` (optional) |
| `write` | Write to files | `file_path` (required), `content` (required) |
| `edit` | Edit files | Various parameters for file editing |
| `patch` | Apply patches to files | `file_path` (required), `diff` (required) |
| `diagnostics` | Get diagnostics information | `file_path` (optional) |
### Other Tools
| Tool | Description | Parameters |
| ------------- | -------------------------------------- | ----------------------------------------------------------------------------------------- |
| `bash` | Execute shell commands | `command` (required), `timeout` (optional) |
| `fetch` | Fetch data from URLs | `url` (required), `format` (required), `timeout` (optional) |
| `sourcegraph` | Search code across public repositories | `query` (required), `count` (optional), `context_window` (optional), `timeout` (optional) |
| `agent` | Run sub-tasks with the AI agent | `prompt` (required) |
## Architecture
OpenCode is built with a modular architecture:
- **cmd**: Command-line interface using Cobra
- **internal/app**: Core application services
- **internal/config**: Configuration management
- **internal/db**: Database operations and migrations
- **internal/llm**: LLM providers and tools integration
- **internal/tui**: Terminal UI components and layouts
- **internal/logging**: Logging infrastructure
- **internal/message**: Message handling
- **internal/session**: Session management
- **internal/lsp**: Language Server Protocol integration
## Custom Commands
OpenCode supports custom commands that can be created by users to quickly send predefined prompts to the AI assistant.
### Creating Custom Commands
Custom commands are predefined prompts stored as Markdown files in one of three locations:
1. **User Commands** (prefixed with `user:`):
```
$XDG_CONFIG_HOME/opencode/commands/
```
(typically `~/.config/opencode/commands/` on Linux/macOS)
or
```
$HOME/.opencode/commands/
```
2. **Project Commands** (prefixed with `project:`):
```
<PROJECT DIR>/.opencode/commands/
```
Each `.md` file in these directories becomes a custom command. The file name (without extension) becomes the command ID.
For example, creating a file at `~/.config/opencode/commands/prime-context.md` with content:
```markdown
RUN git ls-files
READ README.md
```
This creates a command called `user:prime-context`.
### Command Arguments
You can create commands that accept arguments by including the `$ARGUMENTS` placeholder in your command file:
```markdown
RUN git show $ARGUMENTS
```
When you run this command, OpenCode will prompt you to enter the text that should replace `$ARGUMENTS`.
### Organizing Commands
You can organize commands in subdirectories:
```
~/.config/opencode/commands/git/commit.md
```
This creates a command with ID `user:git:commit`.
### Using Custom Commands
1. Press `Ctrl+K` to open the command dialog
2. Select your custom command (prefixed with either `user:` or `project:`)
3. Press Enter to execute the command
The content of the command file will be sent as a message to the AI assistant.
### Built-in Commands
OpenCode includes several built-in commands:
| Command | Description |
| ------------------ | --------------------------------------------------------------------------------------------------- |
| Initialize Project | Creates or updates the OpenCode.md memory file with project-specific information |
| Compact Session | Manually triggers the summarization of the current session, creating a new session with the summary |
## MCP (Model Context Protocol)
OpenCode implements the Model Context Protocol (MCP) to extend its capabilities through external tools. MCP provides a standardized way for the AI assistant to interact with external services and tools.
### MCP Features
- **External Tool Integration**: Connect to external tools and services via a standardized protocol
- **Tool Discovery**: Automatically discover available tools from MCP servers
- **Multiple Connection Types**:
- **Stdio**: Communicate with tools via standard input/output
- **SSE**: Communicate with tools via Server-Sent Events
- **Security**: Permission system for controlling access to MCP tools
### Configuring MCP Servers
MCP servers are defined in the configuration file under the `mcpServers` section:
```json
{
"mcpServers": {
"example": {
"type": "stdio",
"command": "path/to/mcp-server",
"env": [],
"args": []
},
"web-example": {
"type": "sse",
"url": "https://example.com/mcp",
"headers": {
"Authorization": "Bearer token"
}
}
}
}
```
### MCP Tool Usage
Once configured, MCP tools are automatically available to the AI assistant alongside built-in tools. They follow the same permission model as other tools, requiring user approval before execution.
## LSP (Language Server Protocol)
OpenCode integrates with Language Server Protocol to provide code intelligence features across multiple programming languages.
### LSP Features
- **Multi-language Support**: Connect to language servers for different programming languages
- **Diagnostics**: Receive error checking and linting information
- **File Watching**: Automatically notify language servers of file changes
### Configuring LSP
Language servers are configured in the configuration file under the `lsp` section:
```json
{
"lsp": {
"go": {
"disabled": false,
"command": "gopls"
},
"typescript": {
"disabled": false,
"command": "typescript-language-server",
"args": ["--stdio"]
}
}
}
```
### LSP Integration with AI
The AI assistant can access LSP features through the `diagnostics` tool, allowing it to:
- Check for errors in your code
- Suggest fixes based on diagnostics
While the LSP client implementation supports the full LSP protocol (including completions, hover, definition, etc.), currently only diagnostics are exposed to the AI assistant.
## Development
### Prerequisites
- Go 1.24.0 or higher
### Building from Source
```bash
# Clone the repository
git clone https://github.com/opencode-ai/opencode.git
cd opencode
# Build
go build -o opencode
# Run
./opencode
```
## Acknowledgments
OpenCode gratefully acknowledges the contributions and support from these key individuals:
- [@isaacphi](https://github.com/isaacphi) - For the [mcp-language-server](https://github.com/isaacphi/mcp-language-server) project which provided the foundation for our LSP client implementation
- [@adamdottv](https://github.com/adamdottv) - For the design direction and UI/UX architecture
Special thanks to the broader open source community whose tools and libraries have made this project possible.
## License
OpenCode is licensed under the MIT License. See the [LICENSE](LICENSE) file for details.
## Contributing
Contributions are welcome! Here's how you can contribute:
1. Fork the repository
2. Create a feature branch (`git checkout -b feature/amazing-feature`)
3. Commit your changes (`git commit -m 'Add some amazing feature'`)
4. Push to the branch (`git push origin feature/amazing-feature`)
5. Open a Pull Request
Please make sure to update tests as appropriate and follow the existing code style.

View File

@@ -1,118 +0,0 @@
<p align="center">
<a href="https://opencode.ai">
<picture>
<source srcset="packages/console/app/src/asset/logo-ornate-dark.svg" media="(prefers-color-scheme: dark)">
<source srcset="packages/console/app/src/asset/logo-ornate-light.svg" media="(prefers-color-scheme: light)">
<img src="packages/console/app/src/asset/logo-ornate-light.svg" alt="OpenCode logo">
</picture>
</a>
</p>
<p align="center">开源的 AI Coding Agent。</p>
<p align="center">
<a href="https://opencode.ai/discord"><img alt="Discord" src="https://img.shields.io/discord/1391832426048651334?style=flat-square&label=discord" /></a>
<a href="https://www.npmjs.com/package/opencode-ai"><img alt="npm" src="https://img.shields.io/npm/v/opencode-ai?style=flat-square" /></a>
<a href="https://github.com/anomalyco/opencode/actions/workflows/publish.yml"><img alt="Build status" src="https://img.shields.io/github/actions/workflow/status/anomalyco/opencode/publish.yml?style=flat-square&branch=dev" /></a>
</p>
[![OpenCode Terminal UI](packages/web/src/assets/lander/screenshot.png)](https://opencode.ai)
---
### 安装
```bash
# 直接安装 (YOLO)
curl -fsSL https://opencode.ai/install | bash
# 软件包管理器
npm i -g opencode-ai@latest # 也可使用 bun/pnpm/yarn
scoop install opencode # Windows
choco install opencode # Windows
brew install anomalyco/tap/opencode # macOS 和 Linux推荐始终保持最新
brew install opencode # macOS 和 Linux官方 brew formula更新频率较低
paru -S opencode-bin # Arch Linux
mise use -g opencode # 任意系统
nix run nixpkgs#opencode # 或用 github:anomalyco/opencode 获取最新 dev 分支
```
> [!TIP]
> 安装前请先移除 0.1.x 之前的旧版本。
### 桌面应用程序 (BETA)
OpenCode 也提供桌面版应用。可直接从 [发布页 (releases page)](https://github.com/anomalyco/opencode/releases) 或 [opencode.ai/download](https://opencode.ai/download) 下载。
| 平台 | 下载文件 |
| --------------------- | ------------------------------------- |
| macOS (Apple Silicon) | `opencode-desktop-darwin-aarch64.dmg` |
| macOS (Intel) | `opencode-desktop-darwin-x64.dmg` |
| Windows | `opencode-desktop-windows-x64.exe` |
| Linux | `.deb``.rpm` 或 AppImage |
```bash
# macOS (Homebrew Cask)
brew install --cask opencode-desktop
# Windows (Scoop)
scoop bucket add extras; scoop install extras/opencode-desktop
```
#### 安装目录
安装脚本按照以下优先级决定安装路径:
1. `$OPENCODE_INSTALL_DIR` - 自定义安装目录
2. `$XDG_BIN_DIR` - 符合 XDG 基础目录规范的路径
3. `$HOME/bin` - 如果存在或可创建的用户二进制目录
4. `$HOME/.opencode/bin` - 默认备用路径
```bash
# 示例
OPENCODE_INSTALL_DIR=/usr/local/bin curl -fsSL https://opencode.ai/install | bash
XDG_BIN_DIR=$HOME/.local/bin curl -fsSL https://opencode.ai/install | bash
```
### Agents
OpenCode 内置两种 Agent可用 `Tab` 键快速切换:
- **build** - 默认模式,具备完整权限,适合开发工作
- **plan** - 只读模式,适合代码分析与探索
- 默认拒绝修改文件
- 运行 bash 命令前会询问
- 便于探索未知代码库或规划改动
另外还包含一个 **general** 子 Agent用于复杂搜索和多步任务内部使用也可在消息中输入 `@general` 调用。
了解更多 [Agents](https://opencode.ai/docs/agents) 相关信息。
### 文档
更多配置说明请查看我们的 [**官方文档**](https://opencode.ai/docs)。
### 参与贡献
如有兴趣贡献代码,请在提交 PR 前阅读 [贡献指南 (Contributing Docs)](./CONTRIBUTING.md)。
### 基于 OpenCode 进行开发
如果你在项目名中使用了 “opencode”如 “opencode-dashboard” 或 “opencode-mobile”请在 README 里注明该项目不是 OpenCode 团队官方开发,且不存在隶属关系。
### 常见问题 (FAQ)
#### 这和 Claude Code 有什么不同?
功能上很相似,关键差异:
- 100% 开源。
- 不绑定特定提供商。推荐使用 [OpenCode Zen](https://opencode.ai/zen) 的模型,但也可搭配 Claude、OpenAI、Google 甚至本地模型。模型迭代会缩小差异、降低成本,因此保持 provider-agnostic 很重要。
- 内置 LSP 支持。
- 聚焦终端界面 (TUI)。OpenCode 由 Neovim 爱好者和 [terminal.shop](https://terminal.shop) 的创建者打造,会持续探索终端的极限。
- 客户端/服务器架构。可在本机运行同时用移动设备远程驱动。TUI 只是众多潜在客户端之一。
#### 另一个同名的仓库是什么?
另一个名字相近的仓库与本项目无关。[点击这里了解背后故事](https://x.com/thdxr/status/1933561254481666466)。
---
**加入我们的社区** [Discord](https://discord.gg/opencode) | [X.com](https://x.com/opencode)

View File

@@ -1,118 +0,0 @@
<p align="center">
<a href="https://opencode.ai">
<picture>
<source srcset="packages/console/app/src/asset/logo-ornate-dark.svg" media="(prefers-color-scheme: dark)">
<source srcset="packages/console/app/src/asset/logo-ornate-light.svg" media="(prefers-color-scheme: light)">
<img src="packages/console/app/src/asset/logo-ornate-light.svg" alt="OpenCode logo">
</picture>
</a>
</p>
<p align="center">開源的 AI Coding Agent。</p>
<p align="center">
<a href="https://opencode.ai/discord"><img alt="Discord" src="https://img.shields.io/discord/1391832426048651334?style=flat-square&label=discord" /></a>
<a href="https://www.npmjs.com/package/opencode-ai"><img alt="npm" src="https://img.shields.io/npm/v/opencode-ai?style=flat-square" /></a>
<a href="https://github.com/anomalyco/opencode/actions/workflows/publish.yml"><img alt="Build status" src="https://img.shields.io/github/actions/workflow/status/anomalyco/opencode/publish.yml?style=flat-square&branch=dev" /></a>
</p>
[![OpenCode Terminal UI](packages/web/src/assets/lander/screenshot.png)](https://opencode.ai)
---
### 安裝
```bash
# 直接安裝 (YOLO)
curl -fsSL https://opencode.ai/install | bash
# 套件管理員
npm i -g opencode-ai@latest # 也可使用 bun/pnpm/yarn
scoop install opencode # Windows
choco install opencode # Windows
brew install anomalyco/tap/opencode # macOS 與 Linux推薦始終保持最新
brew install opencode # macOS 與 Linux官方 brew formula更新頻率較低
paru -S opencode-bin # Arch Linux
mise use -g github:anomalyco/opencode # 任何作業系統
nix run nixpkgs#opencode # 或使用 github:anomalyco/opencode 以取得最新開發分支
```
> [!TIP]
> 安裝前請先移除 0.1.x 以前的舊版本。
### 桌面應用程式 (BETA)
OpenCode 也提供桌面版應用程式。您可以直接從 [發佈頁面 (releases page)](https://github.com/anomalyco/opencode/releases) 或 [opencode.ai/download](https://opencode.ai/download) 下載。
| 平台 | 下載連結 |
| --------------------- | ------------------------------------- |
| macOS (Apple Silicon) | `opencode-desktop-darwin-aarch64.dmg` |
| macOS (Intel) | `opencode-desktop-darwin-x64.dmg` |
| Windows | `opencode-desktop-windows-x64.exe` |
| Linux | `.deb`, `.rpm`, 或 AppImage |
```bash
# macOS (Homebrew Cask)
brew install --cask opencode-desktop
# Windows (Scoop)
scoop bucket add extras; scoop install extras/opencode-desktop
```
#### 安裝目錄
安裝腳本會依據以下優先順序決定安裝路徑:
1. `$OPENCODE_INSTALL_DIR` - 自定義安裝目錄
2. `$XDG_BIN_DIR` - 符合 XDG 基礎目錄規範的路徑
3. `$HOME/bin` - 標準使用者執行檔目錄 (若存在或可建立)
4. `$HOME/.opencode/bin` - 預設備用路徑
```bash
# 範例
OPENCODE_INSTALL_DIR=/usr/local/bin curl -fsSL https://opencode.ai/install | bash
XDG_BIN_DIR=$HOME/.local/bin curl -fsSL https://opencode.ai/install | bash
```
### Agents
OpenCode 內建了兩種 Agent您可以使用 `Tab` 鍵快速切換。
- **build** - 預設模式,具備完整權限的 Agent適用於開發工作。
- **plan** - 唯讀模式,適用於程式碼分析與探索。
- 預設禁止修改檔案。
- 執行 bash 指令前會詢問權限。
- 非常適合用來探索陌生的程式碼庫或規劃變更。
此外OpenCode 還包含一個 **general** 子 Agent用於處理複雜搜尋與多步驟任務。此 Agent 供系統內部使用,亦可透過在訊息中輸入 `@general` 來呼叫。
了解更多關於 [Agents](https://opencode.ai/docs/agents) 的資訊。
### 線上文件
關於如何設定 OpenCode 的詳細資訊,請參閱我們的 [**官方文件**](https://opencode.ai/docs)。
### 參與貢獻
如果您有興趣參與 OpenCode 的開發,請在提交 Pull Request 前先閱讀我們的 [貢獻指南 (Contributing Docs)](./CONTRIBUTING.md)。
### 基於 OpenCode 進行開發
如果您正在開發與 OpenCode 相關的專案,並在名稱中使用了 "opencode"(例如 "opencode-dashboard" 或 "opencode-mobile"),請在您的 README 中加入聲明,說明該專案並非由 OpenCode 團隊開發,且與我們沒有任何隸屬關係。
### 常見問題 (FAQ)
#### 這跟 Claude Code 有什麼不同?
在功能面上與 Claude Code 非常相似。以下是關鍵差異:
- 100% 開源。
- 不綁定特定的服務提供商。雖然我們推薦使用透過 [OpenCode Zen](https://opencode.ai/zen) 提供的模型,但 OpenCode 也可搭配 Claude, OpenAI, Google 甚至本地模型使用。隨著模型不斷演進,彼此間的差距會縮小且價格會下降,因此具備「不限廠商 (provider-agnostic)」的特性至關重要。
- 內建 LSP (語言伺服器協定) 支援。
- 專注於終端機介面 (TUI)。OpenCode 由 Neovim 愛好者與 [terminal.shop](https://terminal.shop) 的創作者打造;我們將不斷挑戰終端機介面的極限。
- 客戶端/伺服器架構 (Client/Server Architecture)。這讓 OpenCode 能夠在您的電腦上運行的同時,由行動裝置進行遠端操控。這意味著 TUI 前端只是眾多可能的客戶端之一。
#### 另一個同名的 Repo 是什麼?
另一個名稱相近的儲存庫與本專案無關。您可以點此[閱讀背後的故事](https://x.com/thdxr/status/1933561254481666466)。
---
**加入我們的社群** [Discord](https://discord.gg/opencode) | [X.com](https://x.com/opencode)

View File

@@ -1,41 +0,0 @@
# Security
## Threat Model
### Overview
OpenCode is an AI-powered coding assistant that runs locally on your machine. It provides an agent system with access to powerful tools including shell execution, file operations, and web access.
### No Sandbox
OpenCode does **not** sandbox the agent. The permission system exists as a UX feature to help users stay aware of what actions the agent is taking - it prompts for confirmation before executing commands, writing files, etc. However, it is not designed to provide security isolation.
If you need true isolation, run OpenCode inside a Docker container or VM.
### Server Mode
Server mode is opt-in only. When enabled, set `OPENCODE_SERVER_PASSWORD` to require HTTP Basic Auth. Without this, the server runs unauthenticated (with a warning). It is the end user's responsibility to secure the server - any functionality it provides is not a vulnerability.
### Out of Scope
| Category | Rationale |
| ------------------------------- | ----------------------------------------------------------------------- |
| **Server access when opted-in** | If you enable server mode, API access is expected behavior |
| **Sandbox escapes** | The permission system is not a sandbox (see above) |
| **LLM provider data handling** | Data sent to your configured LLM provider is governed by their policies |
| **MCP server behavior** | External MCP servers you configure are outside our trust boundary |
| **Malicious config files** | Users control their own config; modifying it is not an attack vector |
---
# Reporting Security Issues
We appreciate your efforts to responsibly disclose your findings, and will make every effort to acknowledge your contributions.
To report a security issue, please use the GitHub Security Advisory ["Report a Vulnerability"](https://github.com/anomalyco/opencode/security/advisories/new) tab.
The team will send a response indicating the next steps in handling your report. After the initial reply to your report, the security team will keep you informed of the progress towards a fix and full announcement, and may ask for additional information or guidance.
## Escalation
If you do not receive an acknowledgement of your report within 6 business days, you may send an email to security@anoma.ly

211
STATS.md
View File

@@ -1,211 +0,0 @@
# Download Stats
| Date | GitHub Downloads | npm Downloads | Total |
| ---------- | -------------------- | -------------------- | -------------------- |
| 2025-06-29 | 18,789 (+0) | 39,420 (+0) | 58,209 (+0) |
| 2025-06-30 | 20,127 (+1,338) | 41,059 (+1,639) | 61,186 (+2,977) |
| 2025-07-01 | 22,108 (+1,981) | 43,745 (+2,686) | 65,853 (+4,667) |
| 2025-07-02 | 24,814 (+2,706) | 46,168 (+2,423) | 70,982 (+5,129) |
| 2025-07-03 | 27,834 (+3,020) | 49,955 (+3,787) | 77,789 (+6,807) |
| 2025-07-04 | 30,608 (+2,774) | 54,758 (+4,803) | 85,366 (+7,577) |
| 2025-07-05 | 32,524 (+1,916) | 58,371 (+3,613) | 90,895 (+5,529) |
| 2025-07-06 | 33,766 (+1,242) | 59,694 (+1,323) | 93,460 (+2,565) |
| 2025-07-08 | 38,052 (+4,286) | 64,468 (+4,774) | 102,520 (+9,060) |
| 2025-07-09 | 40,924 (+2,872) | 67,935 (+3,467) | 108,859 (+6,339) |
| 2025-07-10 | 43,796 (+2,872) | 71,402 (+3,467) | 115,198 (+6,339) |
| 2025-07-11 | 46,982 (+3,186) | 77,462 (+6,060) | 124,444 (+9,246) |
| 2025-07-12 | 49,302 (+2,320) | 82,177 (+4,715) | 131,479 (+7,035) |
| 2025-07-13 | 50,803 (+1,501) | 86,394 (+4,217) | 137,197 (+5,718) |
| 2025-07-14 | 53,283 (+2,480) | 87,860 (+1,466) | 141,143 (+3,946) |
| 2025-07-15 | 57,590 (+4,307) | 91,036 (+3,176) | 148,626 (+7,483) |
| 2025-07-16 | 62,313 (+4,723) | 95,258 (+4,222) | 157,571 (+8,945) |
| 2025-07-17 | 66,684 (+4,371) | 100,048 (+4,790) | 166,732 (+9,161) |
| 2025-07-18 | 70,379 (+3,695) | 102,587 (+2,539) | 172,966 (+6,234) |
| 2025-07-19 | 73,497 (+3,117) | 105,904 (+3,317) | 179,401 (+6,434) |
| 2025-07-20 | 76,453 (+2,956) | 109,044 (+3,140) | 185,497 (+6,096) |
| 2025-07-21 | 80,197 (+3,744) | 113,537 (+4,493) | 193,734 (+8,237) |
| 2025-07-22 | 84,251 (+4,054) | 118,073 (+4,536) | 202,324 (+8,590) |
| 2025-07-23 | 88,589 (+4,338) | 121,436 (+3,363) | 210,025 (+7,701) |
| 2025-07-24 | 92,469 (+3,880) | 124,091 (+2,655) | 216,560 (+6,535) |
| 2025-07-25 | 96,417 (+3,948) | 126,985 (+2,894) | 223,402 (+6,842) |
| 2025-07-26 | 100,646 (+4,229) | 131,411 (+4,426) | 232,057 (+8,655) |
| 2025-07-27 | 102,644 (+1,998) | 134,736 (+3,325) | 237,380 (+5,323) |
| 2025-07-28 | 105,446 (+2,802) | 136,016 (+1,280) | 241,462 (+4,082) |
| 2025-07-29 | 108,998 (+3,552) | 137,542 (+1,526) | 246,540 (+5,078) |
| 2025-07-30 | 113,544 (+4,546) | 140,317 (+2,775) | 253,861 (+7,321) |
| 2025-07-31 | 118,339 (+4,795) | 143,344 (+3,027) | 261,683 (+7,822) |
| 2025-08-01 | 123,539 (+5,200) | 146,680 (+3,336) | 270,219 (+8,536) |
| 2025-08-02 | 127,864 (+4,325) | 149,236 (+2,556) | 277,100 (+6,881) |
| 2025-08-03 | 131,397 (+3,533) | 150,451 (+1,215) | 281,848 (+4,748) |
| 2025-08-04 | 136,266 (+4,869) | 153,260 (+2,809) | 289,526 (+7,678) |
| 2025-08-05 | 141,596 (+5,330) | 155,752 (+2,492) | 297,348 (+7,822) |
| 2025-08-06 | 147,067 (+5,471) | 158,309 (+2,557) | 305,376 (+8,028) |
| 2025-08-07 | 152,591 (+5,524) | 160,889 (+2,580) | 313,480 (+8,104) |
| 2025-08-08 | 158,187 (+5,596) | 163,448 (+2,559) | 321,635 (+8,155) |
| 2025-08-09 | 162,770 (+4,583) | 165,721 (+2,273) | 328,491 (+6,856) |
| 2025-08-10 | 165,695 (+2,925) | 167,109 (+1,388) | 332,804 (+4,313) |
| 2025-08-11 | 169,297 (+3,602) | 167,953 (+844) | 337,250 (+4,446) |
| 2025-08-12 | 176,307 (+7,010) | 171,876 (+3,923) | 348,183 (+10,933) |
| 2025-08-13 | 182,997 (+6,690) | 177,182 (+5,306) | 360,179 (+11,996) |
| 2025-08-14 | 189,063 (+6,066) | 179,741 (+2,559) | 368,804 (+8,625) |
| 2025-08-15 | 193,608 (+4,545) | 181,792 (+2,051) | 375,400 (+6,596) |
| 2025-08-16 | 198,118 (+4,510) | 184,558 (+2,766) | 382,676 (+7,276) |
| 2025-08-17 | 201,299 (+3,181) | 186,269 (+1,711) | 387,568 (+4,892) |
| 2025-08-18 | 204,559 (+3,260) | 187,399 (+1,130) | 391,958 (+4,390) |
| 2025-08-19 | 209,814 (+5,255) | 189,668 (+2,269) | 399,482 (+7,524) |
| 2025-08-20 | 214,497 (+4,683) | 191,481 (+1,813) | 405,978 (+6,496) |
| 2025-08-21 | 220,465 (+5,968) | 194,784 (+3,303) | 415,249 (+9,271) |
| 2025-08-22 | 225,899 (+5,434) | 197,204 (+2,420) | 423,103 (+7,854) |
| 2025-08-23 | 229,005 (+3,106) | 199,238 (+2,034) | 428,243 (+5,140) |
| 2025-08-24 | 232,098 (+3,093) | 201,157 (+1,919) | 433,255 (+5,012) |
| 2025-08-25 | 236,607 (+4,509) | 202,650 (+1,493) | 439,257 (+6,002) |
| 2025-08-26 | 242,783 (+6,176) | 205,242 (+2,592) | 448,025 (+8,768) |
| 2025-08-27 | 248,409 (+5,626) | 205,242 (+0) | 453,651 (+5,626) |
| 2025-08-28 | 252,796 (+4,387) | 205,242 (+0) | 458,038 (+4,387) |
| 2025-08-29 | 256,045 (+3,249) | 211,075 (+5,833) | 467,120 (+9,082) |
| 2025-08-30 | 258,863 (+2,818) | 212,397 (+1,322) | 471,260 (+4,140) |
| 2025-08-31 | 262,004 (+3,141) | 213,944 (+1,547) | 475,948 (+4,688) |
| 2025-09-01 | 265,359 (+3,355) | 215,115 (+1,171) | 480,474 (+4,526) |
| 2025-09-02 | 270,483 (+5,124) | 217,075 (+1,960) | 487,558 (+7,084) |
| 2025-09-03 | 274,793 (+4,310) | 219,755 (+2,680) | 494,548 (+6,990) |
| 2025-09-04 | 280,430 (+5,637) | 222,103 (+2,348) | 502,533 (+7,985) |
| 2025-09-05 | 283,769 (+3,339) | 223,793 (+1,690) | 507,562 (+5,029) |
| 2025-09-06 | 286,245 (+2,476) | 225,036 (+1,243) | 511,281 (+3,719) |
| 2025-09-07 | 288,623 (+2,378) | 225,866 (+830) | 514,489 (+3,208) |
| 2025-09-08 | 293,341 (+4,718) | 227,073 (+1,207) | 520,414 (+5,925) |
| 2025-09-09 | 300,036 (+6,695) | 229,788 (+2,715) | 529,824 (+9,410) |
| 2025-09-10 | 307,287 (+7,251) | 233,435 (+3,647) | 540,722 (+10,898) |
| 2025-09-11 | 314,083 (+6,796) | 237,356 (+3,921) | 551,439 (+10,717) |
| 2025-09-12 | 321,046 (+6,963) | 240,728 (+3,372) | 561,774 (+10,335) |
| 2025-09-13 | 324,894 (+3,848) | 245,539 (+4,811) | 570,433 (+8,659) |
| 2025-09-14 | 328,876 (+3,982) | 248,245 (+2,706) | 577,121 (+6,688) |
| 2025-09-15 | 334,201 (+5,325) | 250,983 (+2,738) | 585,184 (+8,063) |
| 2025-09-16 | 342,609 (+8,408) | 255,264 (+4,281) | 597,873 (+12,689) |
| 2025-09-17 | 351,117 (+8,508) | 260,970 (+5,706) | 612,087 (+14,214) |
| 2025-09-18 | 358,717 (+7,600) | 266,922 (+5,952) | 625,639 (+13,552) |
| 2025-09-19 | 365,401 (+6,684) | 271,859 (+4,937) | 637,260 (+11,621) |
| 2025-09-20 | 372,092 (+6,691) | 276,917 (+5,058) | 649,009 (+11,749) |
| 2025-09-21 | 377,079 (+4,987) | 280,261 (+3,344) | 657,340 (+8,331) |
| 2025-09-22 | 382,492 (+5,413) | 284,009 (+3,748) | 666,501 (+9,161) |
| 2025-09-23 | 387,008 (+4,516) | 289,129 (+5,120) | 676,137 (+9,636) |
| 2025-09-24 | 393,325 (+6,317) | 294,927 (+5,798) | 688,252 (+12,115) |
| 2025-09-25 | 398,879 (+5,554) | 301,663 (+6,736) | 700,542 (+12,290) |
| 2025-09-26 | 404,334 (+5,455) | 306,713 (+5,050) | 711,047 (+10,505) |
| 2025-09-27 | 411,618 (+7,284) | 317,763 (+11,050) | 729,381 (+18,334) |
| 2025-09-28 | 414,910 (+3,292) | 322,522 (+4,759) | 737,432 (+8,051) |
| 2025-09-29 | 419,919 (+5,009) | 328,033 (+5,511) | 747,952 (+10,520) |
| 2025-09-30 | 427,991 (+8,072) | 336,472 (+8,439) | 764,463 (+16,511) |
| 2025-10-01 | 433,591 (+5,600) | 341,742 (+5,270) | 775,333 (+10,870) |
| 2025-10-02 | 440,852 (+7,261) | 348,099 (+6,357) | 788,951 (+13,618) |
| 2025-10-03 | 446,829 (+5,977) | 359,937 (+11,838) | 806,766 (+17,815) |
| 2025-10-04 | 452,561 (+5,732) | 370,386 (+10,449) | 822,947 (+16,181) |
| 2025-10-05 | 455,559 (+2,998) | 374,745 (+4,359) | 830,304 (+7,357) |
| 2025-10-06 | 460,927 (+5,368) | 379,489 (+4,744) | 840,416 (+10,112) |
| 2025-10-07 | 467,336 (+6,409) | 385,438 (+5,949) | 852,774 (+12,358) |
| 2025-10-08 | 474,643 (+7,307) | 394,139 (+8,701) | 868,782 (+16,008) |
| 2025-10-09 | 479,203 (+4,560) | 400,526 (+6,387) | 879,729 (+10,947) |
| 2025-10-10 | 484,374 (+5,171) | 406,015 (+5,489) | 890,389 (+10,660) |
| 2025-10-11 | 488,427 (+4,053) | 414,699 (+8,684) | 903,126 (+12,737) |
| 2025-10-12 | 492,125 (+3,698) | 418,745 (+4,046) | 910,870 (+7,744) |
| 2025-10-14 | 505,130 (+13,005) | 429,286 (+10,541) | 934,416 (+23,546) |
| 2025-10-15 | 512,717 (+7,587) | 439,290 (+10,004) | 952,007 (+17,591) |
| 2025-10-16 | 517,719 (+5,002) | 447,137 (+7,847) | 964,856 (+12,849) |
| 2025-10-17 | 526,239 (+8,520) | 457,467 (+10,330) | 983,706 (+18,850) |
| 2025-10-18 | 531,564 (+5,325) | 465,272 (+7,805) | 996,836 (+13,130) |
| 2025-10-19 | 536,209 (+4,645) | 469,078 (+3,806) | 1,005,287 (+8,451) |
| 2025-10-20 | 541,264 (+5,055) | 472,952 (+3,874) | 1,014,216 (+8,929) |
| 2025-10-21 | 548,721 (+7,457) | 479,703 (+6,751) | 1,028,424 (+14,208) |
| 2025-10-22 | 557,949 (+9,228) | 491,395 (+11,692) | 1,049,344 (+20,920) |
| 2025-10-23 | 564,716 (+6,767) | 498,736 (+7,341) | 1,063,452 (+14,108) |
| 2025-10-24 | 572,692 (+7,976) | 506,905 (+8,169) | 1,079,597 (+16,145) |
| 2025-10-25 | 578,927 (+6,235) | 516,129 (+9,224) | 1,095,056 (+15,459) |
| 2025-10-26 | 584,409 (+5,482) | 521,179 (+5,050) | 1,105,588 (+10,532) |
| 2025-10-27 | 589,999 (+5,590) | 526,001 (+4,822) | 1,116,000 (+10,412) |
| 2025-10-28 | 595,776 (+5,777) | 532,438 (+6,437) | 1,128,214 (+12,214) |
| 2025-10-29 | 606,259 (+10,483) | 542,064 (+9,626) | 1,148,323 (+20,109) |
| 2025-10-30 | 613,746 (+7,487) | 542,064 (+0) | 1,155,810 (+7,487) |
| 2025-10-30 | 617,846 (+4,100) | 555,026 (+12,962) | 1,172,872 (+17,062) |
| 2025-10-31 | 626,612 (+8,766) | 564,579 (+9,553) | 1,191,191 (+18,319) |
| 2025-11-01 | 636,100 (+9,488) | 581,806 (+17,227) | 1,217,906 (+26,715) |
| 2025-11-02 | 644,067 (+7,967) | 590,004 (+8,198) | 1,234,071 (+16,165) |
| 2025-11-03 | 653,130 (+9,063) | 597,139 (+7,135) | 1,250,269 (+16,198) |
| 2025-11-04 | 663,912 (+10,782) | 608,056 (+10,917) | 1,271,968 (+21,699) |
| 2025-11-05 | 675,074 (+11,162) | 619,690 (+11,634) | 1,294,764 (+22,796) |
| 2025-11-06 | 686,252 (+11,178) | 630,885 (+11,195) | 1,317,137 (+22,373) |
| 2025-11-07 | 696,646 (+10,394) | 642,146 (+11,261) | 1,338,792 (+21,655) |
| 2025-11-08 | 706,035 (+9,389) | 653,489 (+11,343) | 1,359,524 (+20,732) |
| 2025-11-09 | 713,462 (+7,427) | 660,459 (+6,970) | 1,373,921 (+14,397) |
| 2025-11-10 | 722,288 (+8,826) | 668,225 (+7,766) | 1,390,513 (+16,592) |
| 2025-11-11 | 729,769 (+7,481) | 677,501 (+9,276) | 1,407,270 (+16,757) |
| 2025-11-12 | 740,180 (+10,411) | 686,454 (+8,953) | 1,426,634 (+19,364) |
| 2025-11-13 | 749,905 (+9,725) | 696,157 (+9,703) | 1,446,062 (+19,428) |
| 2025-11-14 | 759,928 (+10,023) | 705,237 (+9,080) | 1,465,165 (+19,103) |
| 2025-11-15 | 765,955 (+6,027) | 712,870 (+7,633) | 1,478,825 (+13,660) |
| 2025-11-16 | 771,069 (+5,114) | 716,596 (+3,726) | 1,487,665 (+8,840) |
| 2025-11-17 | 780,161 (+9,092) | 723,339 (+6,743) | 1,503,500 (+15,835) |
| 2025-11-18 | 791,563 (+11,402) | 732,544 (+9,205) | 1,524,107 (+20,607) |
| 2025-11-19 | 804,409 (+12,846) | 747,624 (+15,080) | 1,552,033 (+27,926) |
| 2025-11-20 | 814,620 (+10,211) | 757,907 (+10,283) | 1,572,527 (+20,494) |
| 2025-11-21 | 826,309 (+11,689) | 769,307 (+11,400) | 1,595,616 (+23,089) |
| 2025-11-22 | 837,269 (+10,960) | 780,996 (+11,689) | 1,618,265 (+22,649) |
| 2025-11-23 | 846,609 (+9,340) | 795,069 (+14,073) | 1,641,678 (+23,413) |
| 2025-11-24 | 856,733 (+10,124) | 804,033 (+8,964) | 1,660,766 (+19,088) |
| 2025-11-25 | 869,423 (+12,690) | 817,339 (+13,306) | 1,686,762 (+25,996) |
| 2025-11-26 | 881,414 (+11,991) | 832,518 (+15,179) | 1,713,932 (+27,170) |
| 2025-11-27 | 893,960 (+12,546) | 846,180 (+13,662) | 1,740,140 (+26,208) |
| 2025-11-28 | 901,741 (+7,781) | 856,482 (+10,302) | 1,758,223 (+18,083) |
| 2025-11-29 | 908,689 (+6,948) | 863,361 (+6,879) | 1,772,050 (+13,827) |
| 2025-11-30 | 916,116 (+7,427) | 870,194 (+6,833) | 1,786,310 (+14,260) |
| 2025-12-01 | 925,898 (+9,782) | 876,500 (+6,306) | 1,802,398 (+16,088) |
| 2025-12-02 | 939,250 (+13,352) | 890,919 (+14,419) | 1,830,169 (+27,771) |
| 2025-12-03 | 952,249 (+12,999) | 903,713 (+12,794) | 1,855,962 (+25,793) |
| 2025-12-04 | 965,611 (+13,362) | 916,471 (+12,758) | 1,882,082 (+26,120) |
| 2025-12-05 | 977,996 (+12,385) | 930,616 (+14,145) | 1,908,612 (+26,530) |
| 2025-12-06 | 987,884 (+9,888) | 943,773 (+13,157) | 1,931,657 (+23,045) |
| 2025-12-07 | 994,046 (+6,162) | 951,425 (+7,652) | 1,945,471 (+13,814) |
| 2025-12-08 | 1,000,898 (+6,852) | 957,149 (+5,724) | 1,958,047 (+12,576) |
| 2025-12-09 | 1,011,488 (+10,590) | 973,922 (+16,773) | 1,985,410 (+27,363) |
| 2025-12-10 | 1,025,891 (+14,403) | 991,708 (+17,786) | 2,017,599 (+32,189) |
| 2025-12-11 | 1,045,110 (+19,219) | 1,010,559 (+18,851) | 2,055,669 (+38,070) |
| 2025-12-12 | 1,061,340 (+16,230) | 1,030,838 (+20,279) | 2,092,178 (+36,509) |
| 2025-12-13 | 1,073,561 (+12,221) | 1,044,608 (+13,770) | 2,118,169 (+25,991) |
| 2025-12-14 | 1,082,042 (+8,481) | 1,052,425 (+7,817) | 2,134,467 (+16,298) |
| 2025-12-15 | 1,093,632 (+11,590) | 1,059,078 (+6,653) | 2,152,710 (+18,243) |
| 2025-12-16 | 1,120,477 (+26,845) | 1,078,022 (+18,944) | 2,198,499 (+45,789) |
| 2025-12-17 | 1,151,067 (+30,590) | 1,097,661 (+19,639) | 2,248,728 (+50,229) |
| 2025-12-18 | 1,178,658 (+27,591) | 1,113,418 (+15,757) | 2,292,076 (+43,348) |
| 2025-12-19 | 1,203,485 (+24,827) | 1,129,698 (+16,280) | 2,333,183 (+41,107) |
| 2025-12-20 | 1,223,000 (+19,515) | 1,146,258 (+16,560) | 2,369,258 (+36,075) |
| 2025-12-21 | 1,242,675 (+19,675) | 1,158,909 (+12,651) | 2,401,584 (+32,326) |
| 2025-12-22 | 1,262,522 (+19,847) | 1,169,121 (+10,212) | 2,431,643 (+30,059) |
| 2025-12-23 | 1,286,548 (+24,026) | 1,186,439 (+17,318) | 2,472,987 (+41,344) |
| 2025-12-24 | 1,309,323 (+22,775) | 1,203,767 (+17,328) | 2,513,090 (+40,103) |
| 2025-12-25 | 1,333,032 (+23,709) | 1,217,283 (+13,516) | 2,550,315 (+37,225) |
| 2025-12-26 | 1,352,411 (+19,379) | 1,227,615 (+10,332) | 2,580,026 (+29,711) |
| 2025-12-27 | 1,371,771 (+19,360) | 1,238,236 (+10,621) | 2,610,007 (+29,981) |
| 2025-12-28 | 1,390,388 (+18,617) | 1,245,690 (+7,454) | 2,636,078 (+26,071) |
| 2025-12-29 | 1,415,560 (+25,172) | 1,257,101 (+11,411) | 2,672,661 (+36,583) |
| 2025-12-30 | 1,445,450 (+29,890) | 1,272,689 (+15,588) | 2,718,139 (+45,478) |
| 2025-12-31 | 1,479,598 (+34,148) | 1,293,235 (+20,546) | 2,772,833 (+54,694) |
| 2026-01-01 | 1,508,883 (+29,285) | 1,309,874 (+16,639) | 2,818,757 (+45,924) |
| 2026-01-02 | 1,563,474 (+54,591) | 1,320,959 (+11,085) | 2,884,433 (+65,676) |
| 2026-01-03 | 1,618,065 (+54,591) | 1,331,914 (+10,955) | 2,949,979 (+65,546) |
| 2026-01-04 | 1,672,656 (+39,702) | 1,339,883 (+7,969) | 3,012,539 (+62,560) |
| 2026-01-05 | 1,738,171 (+65,515) | 1,353,043 (+13,160) | 3,091,214 (+78,675) |
| 2026-01-06 | 1,960,988 (+222,817) | 1,377,377 (+24,334) | 3,338,365 (+247,151) |
| 2026-01-07 | 2,123,239 (+162,251) | 1,398,648 (+21,271) | 3,521,887 (+183,522) |
| 2026-01-08 | 2,272,630 (+149,391) | 1,432,480 (+33,832) | 3,705,110 (+183,223) |
| 2026-01-09 | 2,443,565 (+170,935) | 1,469,451 (+36,971) | 3,913,016 (+207,906) |
| 2026-01-10 | 2,632,023 (+188,458) | 1,503,670 (+34,219) | 4,135,693 (+222,677) |
| 2026-01-11 | 2,836,394 (+204,371) | 1,530,479 (+26,809) | 4,366,873 (+231,180) |
| 2026-01-12 | 3,053,594 (+217,200) | 1,553,671 (+23,192) | 4,607,265 (+240,392) |
| 2026-01-13 | 3,297,078 (+243,484) | 1,595,062 (+41,391) | 4,892,140 (+284,875) |
| 2026-01-14 | 3,568,928 (+271,850) | 1,645,362 (+50,300) | 5,214,290 (+322,150) |
| 2026-01-16 | 4,121,550 (+552,622) | 1,754,418 (+109,056) | 5,875,968 (+661,678) |
| 2026-01-17 | 4,389,558 (+268,008) | 1,805,315 (+50,897) | 6,194,873 (+318,905) |
| 2026-01-18 | 4,627,623 (+238,065) | 1,839,171 (+33,856) | 6,466,794 (+271,921) |
| 2026-01-19 | 4,861,108 (+233,485) | 1,863,112 (+23,941) | 6,724,220 (+257,426) |
| 2026-01-20 | 5,128,999 (+267,891) | 1,903,665 (+40,553) | 7,032,664 (+308,444) |
| 2026-01-21 | 5,444,842 (+315,843) | 1,962,531 (+58,866) | 7,407,373 (+374,709) |
| 2026-01-22 | 5,766,340 (+321,498) | 2,029,487 (+66,956) | 7,795,827 (+388,454) |
| 2026-01-23 | 6,096,236 (+329,896) | 2,096,235 (+66,748) | 8,192,471 (+396,644) |

View File

@@ -1,71 +0,0 @@
## Style Guide
- Keep things in one function unless composable or reusable
- Avoid unnecessary destructuring. Instead of `const { a, b } = obj`, use `obj.a` and `obj.b` to preserve context
- Avoid `try`/`catch` where possible
- Avoid using the `any` type
- Prefer single word variable names where possible
- Use Bun APIs when possible, like `Bun.file()`
# Avoid let statements
We don't like `let` statements, especially combined with if/else statements.
Prefer `const`.
Good:
```ts
const foo = condition ? 1 : 2
```
Bad:
```ts
let foo
if (condition) foo = 1
else foo = 2
```
# Avoid else statements
Prefer early returns or using an `iife` to avoid else statements.
Good:
```ts
function foo() {
if (condition) return 1
return 2
}
```
Bad:
```ts
function foo() {
if (condition) return 1
else return 2
}
```
# Prefer single word naming
Try your best to find a single word name for your variables, functions, etc.
Only use multiple words if you cannot.
Good:
```ts
const foo = 1
const bar = 2
const baz = 3
```
Bad:
```ts
const fooBar = 1
const barBaz = 2
const bazFoo = 3
```

5420
bun.lock

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +0,0 @@
[install]
exact = true
[test]
root = "./do-not-run-tests-from-root"

258
cmd/root.go Normal file
View File

@@ -0,0 +1,258 @@
package cmd
import (
"context"
"fmt"
"os"
"sync"
"time"
tea "github.com/charmbracelet/bubbletea"
zone "github.com/lrstanley/bubblezone"
"github.com/opencode-ai/opencode/internal/app"
"github.com/opencode-ai/opencode/internal/config"
"github.com/opencode-ai/opencode/internal/db"
"github.com/opencode-ai/opencode/internal/llm/agent"
"github.com/opencode-ai/opencode/internal/logging"
"github.com/opencode-ai/opencode/internal/pubsub"
"github.com/opencode-ai/opencode/internal/tui"
"github.com/opencode-ai/opencode/internal/version"
"github.com/spf13/cobra"
)
var rootCmd = &cobra.Command{
Use: "OpenCode",
Short: "A terminal AI assistant for software development",
Long: `OpenCode is a powerful terminal-based AI assistant that helps with software development tasks.
It provides an interactive chat interface with AI capabilities, code analysis, and LSP integration
to assist developers in writing, debugging, and understanding code directly from the terminal.`,
RunE: func(cmd *cobra.Command, args []string) error {
// If the help flag is set, show the help message
if cmd.Flag("help").Changed {
cmd.Help()
return nil
}
if cmd.Flag("version").Changed {
fmt.Println(version.Version)
return nil
}
// Load the config
debug, _ := cmd.Flags().GetBool("debug")
cwd, _ := cmd.Flags().GetString("cwd")
if cwd != "" {
err := os.Chdir(cwd)
if err != nil {
return fmt.Errorf("failed to change directory: %v", err)
}
}
if cwd == "" {
c, err := os.Getwd()
if err != nil {
return fmt.Errorf("failed to get current working directory: %v", err)
}
cwd = c
}
_, err := config.Load(cwd, debug)
if err != nil {
return err
}
// Connect DB, this will also run migrations
conn, err := db.Connect()
if err != nil {
return err
}
// Create main context for the application
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
app, err := app.New(ctx, conn)
if err != nil {
logging.Error("Failed to create app: %v", err)
return err
}
// Set up the TUI
zone.NewGlobal()
program := tea.NewProgram(
tui.New(app),
tea.WithAltScreen(),
)
// Initialize MCP tools in the background
initMCPTools(ctx, app)
// Setup the subscriptions, this will send services events to the TUI
ch, cancelSubs := setupSubscriptions(app, ctx)
// Create a context for the TUI message handler
tuiCtx, tuiCancel := context.WithCancel(ctx)
var tuiWg sync.WaitGroup
tuiWg.Add(1)
// Set up message handling for the TUI
go func() {
defer tuiWg.Done()
defer logging.RecoverPanic("TUI-message-handler", func() {
attemptTUIRecovery(program)
})
for {
select {
case <-tuiCtx.Done():
logging.Info("TUI message handler shutting down")
return
case msg, ok := <-ch:
if !ok {
logging.Info("TUI message channel closed")
return
}
program.Send(msg)
}
}
}()
// Cleanup function for when the program exits
cleanup := func() {
// Shutdown the app
app.Shutdown()
// Cancel subscriptions first
cancelSubs()
// Then cancel TUI message handler
tuiCancel()
// Wait for TUI message handler to finish
tuiWg.Wait()
logging.Info("All goroutines cleaned up")
}
// Run the TUI
result, err := program.Run()
cleanup()
if err != nil {
logging.Error("TUI error: %v", err)
return fmt.Errorf("TUI error: %v", err)
}
logging.Info("TUI exited with result: %v", result)
return nil
},
}
// attemptTUIRecovery tries to recover the TUI after a panic
func attemptTUIRecovery(program *tea.Program) {
logging.Info("Attempting to recover TUI after panic")
// We could try to restart the TUI or gracefully exit
// For now, we'll just quit the program to avoid further issues
program.Quit()
}
func initMCPTools(ctx context.Context, app *app.App) {
go func() {
defer logging.RecoverPanic("MCP-goroutine", nil)
// Create a context with timeout for the initial MCP tools fetch
ctxWithTimeout, cancel := context.WithTimeout(ctx, 30*time.Second)
defer cancel()
// Set this up once with proper error handling
agent.GetMcpTools(ctxWithTimeout, app.Permissions)
logging.Info("MCP message handling goroutine exiting")
}()
}
func setupSubscriber[T any](
ctx context.Context,
wg *sync.WaitGroup,
name string,
subscriber func(context.Context) <-chan pubsub.Event[T],
outputCh chan<- tea.Msg,
) {
wg.Add(1)
go func() {
defer wg.Done()
defer logging.RecoverPanic(fmt.Sprintf("subscription-%s", name), nil)
subCh := subscriber(ctx)
for {
select {
case event, ok := <-subCh:
if !ok {
logging.Info("subscription channel closed", "name", name)
return
}
var msg tea.Msg = event
select {
case outputCh <- msg:
case <-time.After(2 * time.Second):
logging.Warn("message dropped due to slow consumer", "name", name)
case <-ctx.Done():
logging.Info("subscription cancelled", "name", name)
return
}
case <-ctx.Done():
logging.Info("subscription cancelled", "name", name)
return
}
}
}()
}
func setupSubscriptions(app *app.App, parentCtx context.Context) (chan tea.Msg, func()) {
ch := make(chan tea.Msg, 100)
wg := sync.WaitGroup{}
ctx, cancel := context.WithCancel(parentCtx) // Inherit from parent context
setupSubscriber(ctx, &wg, "logging", logging.Subscribe, ch)
setupSubscriber(ctx, &wg, "sessions", app.Sessions.Subscribe, ch)
setupSubscriber(ctx, &wg, "messages", app.Messages.Subscribe, ch)
setupSubscriber(ctx, &wg, "permissions", app.Permissions.Subscribe, ch)
setupSubscriber(ctx, &wg, "coderAgent", app.CoderAgent.Subscribe, ch)
cleanupFunc := func() {
logging.Info("Cancelling all subscriptions")
cancel() // Signal all goroutines to stop
waitCh := make(chan struct{})
go func() {
defer logging.RecoverPanic("subscription-cleanup", nil)
wg.Wait()
close(waitCh)
}()
select {
case <-waitCh:
logging.Info("All subscription goroutines completed successfully")
close(ch) // Only close after all writers are confirmed done
case <-time.After(5 * time.Second):
logging.Warn("Timed out waiting for some subscription goroutines to complete")
close(ch)
}
}
return ch, cleanupFunc
}
func Execute() {
err := rootCmd.Execute()
if err != nil {
os.Exit(1)
}
}
func init() {
rootCmd.Flags().BoolP("help", "h", false, "Help")
rootCmd.Flags().BoolP("version", "v", false, "Version")
rootCmd.Flags().BoolP("debug", "d", false, "Debug")
rootCmd.Flags().StringP("cwd", "c", "", "Current working directory")
}

64
cmd/schema/README.md Normal file
View File

@@ -0,0 +1,64 @@
# OpenCode Configuration Schema Generator
This tool generates a JSON Schema for the OpenCode configuration file. The schema can be used to validate configuration files and provide autocompletion in editors that support JSON Schema.
## Usage
```bash
go run cmd/schema/main.go > opencode-schema.json
```
This will generate a JSON Schema file that can be used to validate configuration files.
## Schema Features
The generated schema includes:
- All configuration options with descriptions
- Default values where applicable
- Validation for enum values (e.g., model IDs, provider types)
- Required fields
- Type checking
## Using the Schema
You can use the generated schema in several ways:
1. **Editor Integration**: Many editors (VS Code, JetBrains IDEs, etc.) support JSON Schema for validation and autocompletion. You can configure your editor to use the generated schema for `.opencode.json` files.
2. **Validation Tools**: You can use tools like [jsonschema](https://github.com/Julian/jsonschema) to validate your configuration files against the schema.
3. **Documentation**: The schema serves as documentation for the configuration options.
## Example Configuration
Here's an example configuration that conforms to the schema:
```json
{
"data": {
"directory": ".opencode"
},
"debug": false,
"providers": {
"anthropic": {
"apiKey": "your-api-key"
}
},
"agents": {
"coder": {
"model": "claude-3.7-sonnet",
"maxTokens": 5000,
"reasoningEffort": "medium"
},
"task": {
"model": "claude-3.7-sonnet",
"maxTokens": 5000
},
"title": {
"model": "claude-3.7-sonnet",
"maxTokens": 80
}
}
}
```

307
cmd/schema/main.go Normal file
View File

@@ -0,0 +1,307 @@
package main
import (
"encoding/json"
"fmt"
"os"
"github.com/opencode-ai/opencode/internal/config"
"github.com/opencode-ai/opencode/internal/llm/models"
)
// JSONSchemaType represents a JSON Schema type
type JSONSchemaType struct {
Type string `json:"type,omitempty"`
Description string `json:"description,omitempty"`
Properties map[string]any `json:"properties,omitempty"`
Required []string `json:"required,omitempty"`
AdditionalProperties any `json:"additionalProperties,omitempty"`
Enum []any `json:"enum,omitempty"`
Items map[string]any `json:"items,omitempty"`
OneOf []map[string]any `json:"oneOf,omitempty"`
AnyOf []map[string]any `json:"anyOf,omitempty"`
Default any `json:"default,omitempty"`
}
func main() {
schema := generateSchema()
// Pretty print the schema
encoder := json.NewEncoder(os.Stdout)
encoder.SetIndent("", " ")
if err := encoder.Encode(schema); err != nil {
fmt.Fprintf(os.Stderr, "Error encoding schema: %v\n", err)
os.Exit(1)
}
}
func generateSchema() map[string]any {
schema := map[string]any{
"$schema": "http://json-schema.org/draft-07/schema#",
"title": "OpenCode Configuration",
"description": "Configuration schema for the OpenCode application",
"type": "object",
"properties": map[string]any{},
}
// Add Data configuration
schema["properties"].(map[string]any)["data"] = map[string]any{
"type": "object",
"description": "Storage configuration",
"properties": map[string]any{
"directory": map[string]any{
"type": "string",
"description": "Directory where application data is stored",
"default": ".opencode",
},
},
"required": []string{"directory"},
}
// Add working directory
schema["properties"].(map[string]any)["wd"] = map[string]any{
"type": "string",
"description": "Working directory for the application",
}
// Add debug flags
schema["properties"].(map[string]any)["debug"] = map[string]any{
"type": "boolean",
"description": "Enable debug mode",
"default": false,
}
schema["properties"].(map[string]any)["debugLSP"] = map[string]any{
"type": "boolean",
"description": "Enable LSP debug mode",
"default": false,
}
schema["properties"].(map[string]any)["contextPaths"] = map[string]any{
"type": "array",
"description": "Context paths for the application",
"items": map[string]any{
"type": "string",
},
"default": []string{
".github/copilot-instructions.md",
".cursorrules",
".cursor/rules/",
"CLAUDE.md",
"CLAUDE.local.md",
"opencode.md",
"opencode.local.md",
"OpenCode.md",
"OpenCode.local.md",
"OPENCODE.md",
"OPENCODE.local.md",
},
}
schema["properties"].(map[string]any)["tui"] = map[string]any{
"type": "object",
"description": "Terminal User Interface configuration",
"properties": map[string]any{
"theme": map[string]any{
"type": "string",
"description": "TUI theme name",
"default": "opencode",
"enum": []string{
"opencode",
"catppuccin",
"dracula",
"flexoki",
"gruvbox",
"monokai",
"onedark",
"tokyonight",
"tron",
},
},
},
}
// Add MCP servers
schema["properties"].(map[string]any)["mcpServers"] = map[string]any{
"type": "object",
"description": "Model Control Protocol server configurations",
"additionalProperties": map[string]any{
"type": "object",
"description": "MCP server configuration",
"properties": map[string]any{
"command": map[string]any{
"type": "string",
"description": "Command to execute for the MCP server",
},
"env": map[string]any{
"type": "array",
"description": "Environment variables for the MCP server",
"items": map[string]any{
"type": "string",
},
},
"args": map[string]any{
"type": "array",
"description": "Command arguments for the MCP server",
"items": map[string]any{
"type": "string",
},
},
"type": map[string]any{
"type": "string",
"description": "Type of MCP server",
"enum": []string{"stdio", "sse"},
"default": "stdio",
},
"url": map[string]any{
"type": "string",
"description": "URL for SSE type MCP servers",
},
"headers": map[string]any{
"type": "object",
"description": "HTTP headers for SSE type MCP servers",
"additionalProperties": map[string]any{
"type": "string",
},
},
},
"required": []string{"command"},
},
}
// Add providers
providerSchema := map[string]any{
"type": "object",
"description": "LLM provider configurations",
"additionalProperties": map[string]any{
"type": "object",
"description": "Provider configuration",
"properties": map[string]any{
"apiKey": map[string]any{
"type": "string",
"description": "API key for the provider",
},
"disabled": map[string]any{
"type": "boolean",
"description": "Whether the provider is disabled",
"default": false,
},
},
},
}
// Add known providers
knownProviders := []string{
string(models.ProviderAnthropic),
string(models.ProviderOpenAI),
string(models.ProviderGemini),
string(models.ProviderGROQ),
string(models.ProviderOpenRouter),
string(models.ProviderBedrock),
string(models.ProviderAzure),
}
providerSchema["additionalProperties"].(map[string]any)["properties"].(map[string]any)["provider"] = map[string]any{
"type": "string",
"description": "Provider type",
"enum": knownProviders,
}
schema["properties"].(map[string]any)["providers"] = providerSchema
// Add agents
agentSchema := map[string]any{
"type": "object",
"description": "Agent configurations",
"additionalProperties": map[string]any{
"type": "object",
"description": "Agent configuration",
"properties": map[string]any{
"model": map[string]any{
"type": "string",
"description": "Model ID for the agent",
},
"maxTokens": map[string]any{
"type": "integer",
"description": "Maximum tokens for the agent",
"minimum": 1,
},
"reasoningEffort": map[string]any{
"type": "string",
"description": "Reasoning effort for models that support it (OpenAI, Anthropic)",
"enum": []string{"low", "medium", "high"},
},
},
"required": []string{"model"},
},
}
// Add model enum
modelEnum := []string{}
for modelID := range models.SupportedModels {
modelEnum = append(modelEnum, string(modelID))
}
agentSchema["additionalProperties"].(map[string]any)["properties"].(map[string]any)["model"].(map[string]any)["enum"] = modelEnum
// Add specific agent properties
agentProperties := map[string]any{}
knownAgents := []string{
string(config.AgentCoder),
string(config.AgentTask),
string(config.AgentTitle),
}
for _, agentName := range knownAgents {
agentProperties[agentName] = map[string]any{
"$ref": "#/definitions/agent",
}
}
// Create a combined schema that allows both specific agents and additional ones
combinedAgentSchema := map[string]any{
"type": "object",
"description": "Agent configurations",
"properties": agentProperties,
"additionalProperties": agentSchema["additionalProperties"],
}
schema["properties"].(map[string]any)["agents"] = combinedAgentSchema
schema["definitions"] = map[string]any{
"agent": agentSchema["additionalProperties"],
}
// Add LSP configuration
schema["properties"].(map[string]any)["lsp"] = map[string]any{
"type": "object",
"description": "Language Server Protocol configurations",
"additionalProperties": map[string]any{
"type": "object",
"description": "LSP configuration for a language",
"properties": map[string]any{
"disabled": map[string]any{
"type": "boolean",
"description": "Whether the LSP is disabled",
"default": false,
},
"command": map[string]any{
"type": "string",
"description": "Command to execute for the LSP server",
},
"args": map[string]any{
"type": "array",
"description": "Command arguments for the LSP server",
"items": map[string]any{
"type": "string",
},
},
"options": map[string]any{
"type": "object",
"description": "Additional options for the LSP server",
},
},
"required": []string{"command"},
},
}
return schema
}

27
flake.lock generated
View File

@@ -1,27 +0,0 @@
{
"nodes": {
"nixpkgs": {
"locked": {
"lastModified": 1768393167,
"narHash": "sha256-n2063BRjHde6DqAz2zavhOOiLUwA3qXt7jQYHyETjX8=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "2f594d5af95d4fdac67fba60376ec11e482041cb",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixpkgs-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"nixpkgs": "nixpkgs"
}
}
},
"root": "root",
"version": 7
}

View File

@@ -1,69 +0,0 @@
{
description = "OpenCode development flake";
inputs = {
nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable";
};
outputs =
{ self, nixpkgs, ... }:
let
systems = [
"aarch64-linux"
"x86_64-linux"
"aarch64-darwin"
"x86_64-darwin"
];
forEachSystem = f: nixpkgs.lib.genAttrs systems (system: f nixpkgs.legacyPackages.${system});
rev = self.shortRev or self.dirtyShortRev or "dirty";
in
{
devShells = forEachSystem (pkgs: {
default = pkgs.mkShell {
packages = with pkgs; [
bun
nodejs_20
pkg-config
openssl
git
];
};
});
packages = forEachSystem (
pkgs:
let
node_modules = pkgs.callPackage ./nix/node_modules.nix {
inherit rev;
};
opencode = pkgs.callPackage ./nix/opencode.nix {
inherit node_modules;
};
desktop = pkgs.callPackage ./nix/desktop.nix {
inherit opencode;
};
# nixpkgs cpu naming to bun cpu naming
cpuMap = { x86_64 = "x64"; aarch64 = "arm64"; };
# matrix of node_modules builds - these will always fail due to fakeHash usage
# but allow computation of the correct hash from any build machine for any cpu/os
# see the update-nix-hashes workflow for usage
moduleUpdaters = pkgs.lib.listToAttrs (
pkgs.lib.concatMap (cpu:
map (os: {
name = "${cpu}-${os}_node_modules";
value = node_modules.override {
bunCpu = cpuMap.${cpu};
bunOs = os;
hash = pkgs.lib.fakeHash;
};
}) [ "linux" "darwin" ]
) [ "x86_64" "aarch64" ]
);
in
{
default = opencode;
inherit opencode desktop;
} // moduleUpdaters
);
};
}

34
github/.gitignore vendored
View File

@@ -1,34 +0,0 @@
# dependencies (bun install)
node_modules
# output
out
dist
*.tgz
# code coverage
coverage
*.lcov
# logs
logs
_.log
report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
# dotenv environment variable files
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
# caches
.eslintcache
.cache
*.tsbuildinfo
# IntelliJ based IDEs
.idea
# Finder (MacOS) folder config
.DS_Store

View File

@@ -1,166 +0,0 @@
# opencode GitHub Action
A GitHub Action that integrates [opencode](https://opencode.ai) directly into your GitHub workflow.
Mention `/opencode` in your comment, and opencode will execute tasks within your GitHub Actions runner.
## Features
#### Explain an issue
Leave the following comment on a GitHub issue. `opencode` will read the entire thread, including all comments, and reply with a clear explanation.
```
/opencode explain this issue
```
#### Fix an issue
Leave the following comment on a GitHub issue. opencode will create a new branch, implement the changes, and open a PR with the changes.
```
/opencode fix this
```
#### Review PRs and make changes
Leave the following comment on a GitHub PR. opencode will implement the requested change and commit it to the same PR.
```
Delete the attachment from S3 when the note is removed /oc
```
#### Review specific code lines
Leave a comment directly on code lines in the PR's "Files" tab. opencode will automatically detect the file, line numbers, and diff context to provide precise responses.
```
[Comment on specific lines in Files tab]
/oc add error handling here
```
When commenting on specific lines, opencode receives:
- The exact file being reviewed
- The specific lines of code
- The surrounding diff context
- Line number information
This allows for more targeted requests without needing to specify file paths or line numbers manually.
## Installation
Run the following command in the terminal from your GitHub repo:
```bash
opencode github install
```
This will walk you through installing the GitHub app, creating the workflow, and setting up secrets.
### Manual Setup
1. Install the GitHub app https://github.com/apps/opencode-agent. Make sure it is installed on the target repository.
2. Add the following workflow file to `.github/workflows/opencode.yml` in your repo. Set the appropriate `model` and required API keys in `env`.
```yml
name: opencode
on:
issue_comment:
types: [created]
pull_request_review_comment:
types: [created]
jobs:
opencode:
if: |
contains(github.event.comment.body, '/oc') ||
contains(github.event.comment.body, '/opencode')
runs-on: ubuntu-latest
permissions:
id-token: write
steps:
- name: Checkout repository
uses: actions/checkout@v6
with:
fetch-depth: 1
persist-credentials: false
- name: Run opencode
uses: anomalyco/opencode/github@latest
env:
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
model: anthropic/claude-sonnet-4-20250514
use_github_token: true
```
3. Store the API keys in secrets. In your organization or project **settings**, expand **Secrets and variables** on the left and select **Actions**. Add the required API keys.
## Support
This is an early release. If you encounter issues or have feedback, please create an issue at https://github.com/anomalyco/opencode/issues.
## Development
To test locally:
1. Navigate to a test repo (e.g. `hello-world`):
```bash
cd hello-world
```
2. Run:
```bash
MODEL=anthropic/claude-sonnet-4-20250514 \
ANTHROPIC_API_KEY=sk-ant-api03-1234567890 \
GITHUB_RUN_ID=dummy \
MOCK_TOKEN=github_pat_1234567890 \
MOCK_EVENT='{"eventName":"issue_comment",...}' \
bun /path/to/opencode/github/index.ts
```
- `MODEL`: The model used by opencode. Same as the `MODEL` defined in the GitHub workflow.
- `ANTHROPIC_API_KEY`: Your model provider API key. Same as the keys defined in the GitHub workflow.
- `GITHUB_RUN_ID`: Dummy value to emulate GitHub action environment.
- `MOCK_TOKEN`: A GitHub personal access token. This token is used to verify you have `admin` or `write` access to the test repo. Generate a token [here](https://github.com/settings/personal-access-tokens).
- `MOCK_EVENT`: Mock GitHub event payload (see templates below).
- `/path/to/opencode`: Path to your cloned opencode repo. `bun /path/to/opencode/github/index.ts` runs your local version of `opencode`.
### Issue comment event
```
MOCK_EVENT='{"eventName":"issue_comment","repo":{"owner":"sst","repo":"hello-world"},"actor":"fwang","payload":{"issue":{"number":4},"comment":{"id":1,"body":"hey opencode, summarize thread"}}}'
```
Replace:
- `"owner":"sst"` with repo owner
- `"repo":"hello-world"` with repo name
- `"actor":"fwang"` with the GitHub username of commenter
- `"number":4` with the GitHub issue id
- `"body":"hey opencode, summarize thread"` with comment body
### Issue comment with image attachment.
```
MOCK_EVENT='{"eventName":"issue_comment","repo":{"owner":"sst","repo":"hello-world"},"actor":"fwang","payload":{"issue":{"number":4},"comment":{"id":1,"body":"hey opencode, what is in my image ![Image](https://github.com/user-attachments/assets/xxxxxxxx)"}}}'
```
Replace the image URL `https://github.com/user-attachments/assets/xxxxxxxx` with a valid GitHub attachment (you can generate one by commenting with an image in any issue).
### PR comment event
```
MOCK_EVENT='{"eventName":"issue_comment","repo":{"owner":"sst","repo":"hello-world"},"actor":"fwang","payload":{"issue":{"number":4,"pull_request":{}},"comment":{"id":1,"body":"hey opencode, summarize thread"}}}'
```
### PR review comment event
```
MOCK_EVENT='{"eventName":"pull_request_review_comment","repo":{"owner":"sst","repo":"hello-world"},"actor":"fwang","payload":{"pull_request":{"number":7},"comment":{"id":1,"body":"hey opencode, add error handling","path":"src/components/Button.tsx","diff_hunk":"@@ -45,8 +45,11 @@\n- const handleClick = () => {\n- console.log('clicked')\n+ const handleClick = useCallback(() => {\n+ console.log('clicked')\n+ doSomething()\n+ }, [doSomething])","line":47,"original_line":45,"position":10,"commit_id":"abc123","original_commit_id":"def456"}}}'
```

View File

@@ -1,74 +0,0 @@
name: "opencode GitHub Action"
description: "Run opencode in GitHub Actions workflows"
branding:
icon: "code"
color: "orange"
inputs:
model:
description: "Model to use"
required: true
agent:
description: "Agent to use. Must be a primary agent. Falls back to default_agent from config or 'build' if not found."
required: false
share:
description: "Share the opencode session (defaults to true for public repos)"
required: false
prompt:
description: "Custom prompt to override the default prompt"
required: false
use_github_token:
description: "Use GITHUB_TOKEN directly instead of OpenCode App token exchange. When true, skips OIDC and uses the GITHUB_TOKEN env var."
required: false
default: "false"
mentions:
description: "Comma-separated list of trigger phrases (case-insensitive). Defaults to '/opencode,/oc'"
required: false
oidc_base_url:
description: "Base URL for OIDC token exchange API. Only required when running a custom GitHub App install. Defaults to https://api.opencode.ai"
required: false
runs:
using: "composite"
steps:
- name: Get opencode version
id: version
shell: bash
run: |
VERSION=$(curl -sf https://api.github.com/repos/anomalyco/opencode/releases/latest | grep -o '"tag_name": *"[^"]*"' | cut -d'"' -f4)
echo "version=${VERSION:-latest}" >> $GITHUB_OUTPUT
- name: Cache opencode
id: cache
uses: actions/cache@v4
with:
path: ~/.opencode/bin
key: opencode-${{ runner.os }}-${{ runner.arch }}-${{ steps.version.outputs.version }}
- name: Install opencode
if: steps.cache.outputs.cache-hit != 'true'
shell: bash
run: curl -fsSL https://opencode.ai/install | bash
- name: Add opencode to PATH
shell: bash
run: echo "$HOME/.opencode/bin" >> $GITHUB_PATH
- name: Run opencode
shell: bash
id: run_opencode
run: opencode github run
env:
MODEL: ${{ inputs.model }}
AGENT: ${{ inputs.agent }}
SHARE: ${{ inputs.share }}
PROMPT: ${{ inputs.prompt }}
USE_GITHUB_TOKEN: ${{ inputs.use_github_token }}
MENTIONS: ${{ inputs.mentions }}
OIDC_BASE_URL: ${{ inputs.oidc_base_url }}

View File

@@ -1,156 +0,0 @@
{
"lockfileVersion": 1,
"workspaces": {
"": {
"name": "github",
"dependencies": {
"@actions/core": "1.11.1",
"@actions/github": "6.0.1",
"@octokit/graphql": "9.0.1",
"@octokit/rest": "22.0.0",
"@opencode-ai/sdk": "0.5.4",
},
"devDependencies": {
"@types/bun": "latest",
},
"peerDependencies": {
"typescript": "^5",
},
},
},
"packages": {
"@actions/core": ["@actions/core@1.11.1", "", { "dependencies": { "@actions/exec": "^1.1.1", "@actions/http-client": "^2.0.1" } }, "sha512-hXJCSrkwfA46Vd9Z3q4cpEpHB1rL5NG04+/rbqW9d3+CSvtB1tYe8UTpAlixa1vj0m/ULglfEK2UKxMGxCxv5A=="],
"@actions/exec": ["@actions/exec@1.1.1", "", { "dependencies": { "@actions/io": "^1.0.1" } }, "sha512-+sCcHHbVdk93a0XT19ECtO/gIXoxvdsgQLzb2fE2/5sIZmWQuluYyjPQtrtTHdU1YzTZ7bAPN4sITq2xi1679w=="],
"@actions/github": ["@actions/github@6.0.1", "", { "dependencies": { "@actions/http-client": "^2.2.0", "@octokit/core": "^5.0.1", "@octokit/plugin-paginate-rest": "^9.2.2", "@octokit/plugin-rest-endpoint-methods": "^10.4.0", "@octokit/request": "^8.4.1", "@octokit/request-error": "^5.1.1", "undici": "^5.28.5" } }, "sha512-xbZVcaqD4XnQAe35qSQqskb3SqIAfRyLBrHMd/8TuL7hJSz2QtbDwnNM8zWx4zO5l2fnGtseNE3MbEvD7BxVMw=="],
"@actions/http-client": ["@actions/http-client@2.2.3", "", { "dependencies": { "tunnel": "^0.0.6", "undici": "^5.25.4" } }, "sha512-mx8hyJi/hjFvbPokCg4uRd4ZX78t+YyRPtnKWwIl+RzNaVuFpQHfmlGVfsKEJN8LwTCvL+DfVgAM04XaHkm6bA=="],
"@actions/io": ["@actions/io@1.1.3", "", {}, "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q=="],
"@fastify/busboy": ["@fastify/busboy@2.1.1", "", {}, "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA=="],
"@octokit/auth-token": ["@octokit/auth-token@4.0.0", "", {}, "sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA=="],
"@octokit/core": ["@octokit/core@5.2.2", "", { "dependencies": { "@octokit/auth-token": "^4.0.0", "@octokit/graphql": "^7.1.0", "@octokit/request": "^8.4.1", "@octokit/request-error": "^5.1.1", "@octokit/types": "^13.0.0", "before-after-hook": "^2.2.0", "universal-user-agent": "^6.0.0" } }, "sha512-/g2d4sW9nUDJOMz3mabVQvOGhVa4e/BN/Um7yca9Bb2XTzPPnfTWHWQg+IsEYO7M3Vx+EXvaM/I2pJWIMun1bg=="],
"@octokit/endpoint": ["@octokit/endpoint@9.0.6", "", { "dependencies": { "@octokit/types": "^13.1.0", "universal-user-agent": "^6.0.0" } }, "sha512-H1fNTMA57HbkFESSt3Y9+FBICv+0jFceJFPWDePYlR/iMGrwM5ph+Dd4XRQs+8X+PUFURLQgX9ChPfhJ/1uNQw=="],
"@octokit/graphql": ["@octokit/graphql@9.0.1", "", { "dependencies": { "@octokit/request": "^10.0.2", "@octokit/types": "^14.0.0", "universal-user-agent": "^7.0.0" } }, "sha512-j1nQNU1ZxNFx2ZtKmL4sMrs4egy5h65OMDmSbVyuCzjOcwsHq6EaYjOTGXPQxgfiN8dJ4CriYHk6zF050WEULg=="],
"@octokit/openapi-types": ["@octokit/openapi-types@25.1.0", "", {}, "sha512-idsIggNXUKkk0+BExUn1dQ92sfysJrje03Q0bv0e+KPLrvyqZF8MnBpFz8UNfYDwB3Ie7Z0TByjWfzxt7vseaA=="],
"@octokit/plugin-paginate-rest": ["@octokit/plugin-paginate-rest@9.2.2", "", { "dependencies": { "@octokit/types": "^12.6.0" }, "peerDependencies": { "@octokit/core": "5" } }, "sha512-u3KYkGF7GcZnSD/3UP0S7K5XUFT2FkOQdcfXZGZQPGv3lm4F2Xbf71lvjldr8c1H3nNbF+33cLEkWYbokGWqiQ=="],
"@octokit/plugin-request-log": ["@octokit/plugin-request-log@6.0.0", "", { "peerDependencies": { "@octokit/core": ">=6" } }, "sha512-UkOzeEN3W91/eBq9sPZNQ7sUBvYCqYbrrD8gTbBuGtHEuycE4/awMXcYvx6sVYo7LypPhmQwwpUe4Yyu4QZN5Q=="],
"@octokit/plugin-rest-endpoint-methods": ["@octokit/plugin-rest-endpoint-methods@10.4.1", "", { "dependencies": { "@octokit/types": "^12.6.0" }, "peerDependencies": { "@octokit/core": "5" } }, "sha512-xV1b+ceKV9KytQe3zCVqjg+8GTGfDYwaT1ATU5isiUyVtlVAO3HNdzpS4sr4GBx4hxQ46s7ITtZrAsxG22+rVg=="],
"@octokit/request": ["@octokit/request@8.4.1", "", { "dependencies": { "@octokit/endpoint": "^9.0.6", "@octokit/request-error": "^5.1.1", "@octokit/types": "^13.1.0", "universal-user-agent": "^6.0.0" } }, "sha512-qnB2+SY3hkCmBxZsR/MPCybNmbJe4KAlfWErXq+rBKkQJlbjdJeS85VI9r8UqeLYLvnAenU8Q1okM/0MBsAGXw=="],
"@octokit/request-error": ["@octokit/request-error@5.1.1", "", { "dependencies": { "@octokit/types": "^13.1.0", "deprecation": "^2.0.0", "once": "^1.4.0" } }, "sha512-v9iyEQJH6ZntoENr9/yXxjuezh4My67CBSu9r6Ve/05Iu5gNgnisNWOsoJHTP6k0Rr0+HQIpnH+kyammu90q/g=="],
"@octokit/rest": ["@octokit/rest@22.0.0", "", { "dependencies": { "@octokit/core": "^7.0.2", "@octokit/plugin-paginate-rest": "^13.0.1", "@octokit/plugin-request-log": "^6.0.0", "@octokit/plugin-rest-endpoint-methods": "^16.0.0" } }, "sha512-z6tmTu9BTnw51jYGulxrlernpsQYXpui1RK21vmXn8yF5bp6iX16yfTtJYGK5Mh1qDkvDOmp2n8sRMcQmR8jiA=="],
"@octokit/types": ["@octokit/types@14.1.0", "", { "dependencies": { "@octokit/openapi-types": "^25.1.0" } }, "sha512-1y6DgTy8Jomcpu33N+p5w58l6xyt55Ar2I91RPiIA0xCJBXyUAhXCcmZaDWSANiha7R9a6qJJ2CRomGPZ6f46g=="],
"@opencode-ai/sdk": ["@opencode-ai/sdk@0.5.4", "", {}, "sha512-bNT9hJgTvmnWGZU4LM90PMy60xOxxCOI5IaGB5voP2EVj+8RdLxmkwuAB4FUHwLo7fNlmxkZp89NVsMYw2Y3Aw=="],
"@types/bun": ["@types/bun@1.2.20", "", { "dependencies": { "bun-types": "1.2.20" } }, "sha512-dX3RGzQ8+KgmMw7CsW4xT5ITBSCrSbfHc36SNT31EOUg/LA9JWq0VDdEXDRSe1InVWpd2yLUM1FUF/kEOyTzYA=="],
"@types/node": ["@types/node@24.3.0", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-aPTXCrfwnDLj4VvXrm+UUCQjNEvJgNA8s5F1cvwQU+3KNltTOkBm1j30uNLyqqPNe7gE3KFzImYoZEfLhp4Yow=="],
"@types/react": ["@types/react@19.1.10", "", { "dependencies": { "csstype": "^3.0.2" } }, "sha512-EhBeSYX0Y6ye8pNebpKrwFJq7BoQ8J5SO6NlvNwwHjSj6adXJViPQrKlsyPw7hLBLvckEMO1yxeGdR82YBBlDg=="],
"before-after-hook": ["before-after-hook@2.2.3", "", {}, "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ=="],
"bun-types": ["bun-types@1.2.20", "", { "dependencies": { "@types/node": "*" }, "peerDependencies": { "@types/react": "^19" } }, "sha512-pxTnQYOrKvdOwyiyd/7sMt9yFOenN004Y6O4lCcCUoKVej48FS5cvTw9geRaEcB9TsDZaJKAxPTVvi8tFsVuXA=="],
"csstype": ["csstype@3.1.3", "", {}, "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw=="],
"deprecation": ["deprecation@2.3.1", "", {}, "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ=="],
"fast-content-type-parse": ["fast-content-type-parse@3.0.0", "", {}, "sha512-ZvLdcY8P+N8mGQJahJV5G4U88CSvT1rP8ApL6uETe88MBXrBHAkZlSEySdUlyztF7ccb+Znos3TFqaepHxdhBg=="],
"once": ["once@1.4.0", "", { "dependencies": { "wrappy": "1" } }, "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w=="],
"tunnel": ["tunnel@0.0.6", "", {}, "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg=="],
"typescript": ["typescript@5.9.2", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A=="],
"undici": ["undici@5.29.0", "", { "dependencies": { "@fastify/busboy": "^2.0.0" } }, "sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg=="],
"undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="],
"universal-user-agent": ["universal-user-agent@7.0.3", "", {}, "sha512-TmnEAEAsBJVZM/AADELsK76llnwcf9vMKuPz8JflO1frO8Lchitr0fNaN9d+Ap0BjKtqWqd/J17qeDnXh8CL2A=="],
"wrappy": ["wrappy@1.0.2", "", {}, "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="],
"@octokit/core/@octokit/graphql": ["@octokit/graphql@7.1.1", "", { "dependencies": { "@octokit/request": "^8.4.1", "@octokit/types": "^13.0.0", "universal-user-agent": "^6.0.0" } }, "sha512-3mkDltSfcDUoa176nlGoA32RGjeWjl3K7F/BwHwRMJUW/IteSa4bnSV8p2ThNkcIcZU2umkZWxwETSSCJf2Q7g=="],
"@octokit/core/@octokit/types": ["@octokit/types@13.10.0", "", { "dependencies": { "@octokit/openapi-types": "^24.2.0" } }, "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA=="],
"@octokit/core/universal-user-agent": ["universal-user-agent@6.0.1", "", {}, "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ=="],
"@octokit/endpoint/@octokit/types": ["@octokit/types@13.10.0", "", { "dependencies": { "@octokit/openapi-types": "^24.2.0" } }, "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA=="],
"@octokit/endpoint/universal-user-agent": ["universal-user-agent@6.0.1", "", {}, "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ=="],
"@octokit/graphql/@octokit/request": ["@octokit/request@10.0.3", "", { "dependencies": { "@octokit/endpoint": "^11.0.0", "@octokit/request-error": "^7.0.0", "@octokit/types": "^14.0.0", "fast-content-type-parse": "^3.0.0", "universal-user-agent": "^7.0.2" } }, "sha512-V6jhKokg35vk098iBqp2FBKunk3kMTXlmq+PtbV9Gl3TfskWlebSofU9uunVKhUN7xl+0+i5vt0TGTG8/p/7HA=="],
"@octokit/plugin-paginate-rest/@octokit/types": ["@octokit/types@12.6.0", "", { "dependencies": { "@octokit/openapi-types": "^20.0.0" } }, "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw=="],
"@octokit/plugin-request-log/@octokit/core": ["@octokit/core@7.0.3", "", { "dependencies": { "@octokit/auth-token": "^6.0.0", "@octokit/graphql": "^9.0.1", "@octokit/request": "^10.0.2", "@octokit/request-error": "^7.0.0", "@octokit/types": "^14.0.0", "before-after-hook": "^4.0.0", "universal-user-agent": "^7.0.0" } }, "sha512-oNXsh2ywth5aowwIa7RKtawnkdH6LgU1ztfP9AIUCQCvzysB+WeU8o2kyyosDPwBZutPpjZDKPQGIzzrfTWweQ=="],
"@octokit/plugin-rest-endpoint-methods/@octokit/types": ["@octokit/types@12.6.0", "", { "dependencies": { "@octokit/openapi-types": "^20.0.0" } }, "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw=="],
"@octokit/request/@octokit/types": ["@octokit/types@13.10.0", "", { "dependencies": { "@octokit/openapi-types": "^24.2.0" } }, "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA=="],
"@octokit/request/universal-user-agent": ["universal-user-agent@6.0.1", "", {}, "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ=="],
"@octokit/request-error/@octokit/types": ["@octokit/types@13.10.0", "", { "dependencies": { "@octokit/openapi-types": "^24.2.0" } }, "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA=="],
"@octokit/rest/@octokit/core": ["@octokit/core@7.0.3", "", { "dependencies": { "@octokit/auth-token": "^6.0.0", "@octokit/graphql": "^9.0.1", "@octokit/request": "^10.0.2", "@octokit/request-error": "^7.0.0", "@octokit/types": "^14.0.0", "before-after-hook": "^4.0.0", "universal-user-agent": "^7.0.0" } }, "sha512-oNXsh2ywth5aowwIa7RKtawnkdH6LgU1ztfP9AIUCQCvzysB+WeU8o2kyyosDPwBZutPpjZDKPQGIzzrfTWweQ=="],
"@octokit/rest/@octokit/plugin-paginate-rest": ["@octokit/plugin-paginate-rest@13.1.1", "", { "dependencies": { "@octokit/types": "^14.1.0" }, "peerDependencies": { "@octokit/core": ">=6" } }, "sha512-q9iQGlZlxAVNRN2jDNskJW/Cafy7/XE52wjZ5TTvyhyOD904Cvx//DNyoO3J/MXJ0ve3rPoNWKEg5iZrisQSuw=="],
"@octokit/rest/@octokit/plugin-rest-endpoint-methods": ["@octokit/plugin-rest-endpoint-methods@16.0.0", "", { "dependencies": { "@octokit/types": "^14.1.0" }, "peerDependencies": { "@octokit/core": ">=6" } }, "sha512-kJVUQk6/dx/gRNLWUnAWKFs1kVPn5O5CYZyssyEoNYaFedqZxsfYs7DwI3d67hGz4qOwaJ1dpm07hOAD1BXx6g=="],
"@octokit/core/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@24.2.0", "", {}, "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg=="],
"@octokit/endpoint/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@24.2.0", "", {}, "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg=="],
"@octokit/graphql/@octokit/request/@octokit/endpoint": ["@octokit/endpoint@11.0.0", "", { "dependencies": { "@octokit/types": "^14.0.0", "universal-user-agent": "^7.0.2" } }, "sha512-hoYicJZaqISMAI3JfaDr1qMNi48OctWuOih1m80bkYow/ayPw6Jj52tqWJ6GEoFTk1gBqfanSoI1iY99Z5+ekQ=="],
"@octokit/graphql/@octokit/request/@octokit/request-error": ["@octokit/request-error@7.0.0", "", { "dependencies": { "@octokit/types": "^14.0.0" } }, "sha512-KRA7VTGdVyJlh0cP5Tf94hTiYVVqmt2f3I6mnimmaVz4UG3gQV/k4mDJlJv3X67iX6rmN7gSHCF8ssqeMnmhZg=="],
"@octokit/plugin-paginate-rest/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@20.0.0", "", {}, "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA=="],
"@octokit/plugin-request-log/@octokit/core/@octokit/auth-token": ["@octokit/auth-token@6.0.0", "", {}, "sha512-P4YJBPdPSpWTQ1NU4XYdvHvXJJDxM6YwpS0FZHRgP7YFkdVxsWcpWGy/NVqlAA7PcPCnMacXlRm1y2PFZRWL/w=="],
"@octokit/plugin-request-log/@octokit/core/@octokit/request": ["@octokit/request@10.0.3", "", { "dependencies": { "@octokit/endpoint": "^11.0.0", "@octokit/request-error": "^7.0.0", "@octokit/types": "^14.0.0", "fast-content-type-parse": "^3.0.0", "universal-user-agent": "^7.0.2" } }, "sha512-V6jhKokg35vk098iBqp2FBKunk3kMTXlmq+PtbV9Gl3TfskWlebSofU9uunVKhUN7xl+0+i5vt0TGTG8/p/7HA=="],
"@octokit/plugin-request-log/@octokit/core/@octokit/request-error": ["@octokit/request-error@7.0.0", "", { "dependencies": { "@octokit/types": "^14.0.0" } }, "sha512-KRA7VTGdVyJlh0cP5Tf94hTiYVVqmt2f3I6mnimmaVz4UG3gQV/k4mDJlJv3X67iX6rmN7gSHCF8ssqeMnmhZg=="],
"@octokit/plugin-request-log/@octokit/core/before-after-hook": ["before-after-hook@4.0.0", "", {}, "sha512-q6tR3RPqIB1pMiTRMFcZwuG5T8vwp+vUvEG0vuI6B+Rikh5BfPp2fQ82c925FOs+b0lcFQ8CFrL+KbilfZFhOQ=="],
"@octokit/plugin-rest-endpoint-methods/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@20.0.0", "", {}, "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA=="],
"@octokit/request-error/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@24.2.0", "", {}, "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg=="],
"@octokit/request/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@24.2.0", "", {}, "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg=="],
"@octokit/rest/@octokit/core/@octokit/auth-token": ["@octokit/auth-token@6.0.0", "", {}, "sha512-P4YJBPdPSpWTQ1NU4XYdvHvXJJDxM6YwpS0FZHRgP7YFkdVxsWcpWGy/NVqlAA7PcPCnMacXlRm1y2PFZRWL/w=="],
"@octokit/rest/@octokit/core/@octokit/request": ["@octokit/request@10.0.3", "", { "dependencies": { "@octokit/endpoint": "^11.0.0", "@octokit/request-error": "^7.0.0", "@octokit/types": "^14.0.0", "fast-content-type-parse": "^3.0.0", "universal-user-agent": "^7.0.2" } }, "sha512-V6jhKokg35vk098iBqp2FBKunk3kMTXlmq+PtbV9Gl3TfskWlebSofU9uunVKhUN7xl+0+i5vt0TGTG8/p/7HA=="],
"@octokit/rest/@octokit/core/@octokit/request-error": ["@octokit/request-error@7.0.0", "", { "dependencies": { "@octokit/types": "^14.0.0" } }, "sha512-KRA7VTGdVyJlh0cP5Tf94hTiYVVqmt2f3I6mnimmaVz4UG3gQV/k4mDJlJv3X67iX6rmN7gSHCF8ssqeMnmhZg=="],
"@octokit/rest/@octokit/core/before-after-hook": ["before-after-hook@4.0.0", "", {}, "sha512-q6tR3RPqIB1pMiTRMFcZwuG5T8vwp+vUvEG0vuI6B+Rikh5BfPp2fQ82c925FOs+b0lcFQ8CFrL+KbilfZFhOQ=="],
"@octokit/plugin-request-log/@octokit/core/@octokit/request/@octokit/endpoint": ["@octokit/endpoint@11.0.0", "", { "dependencies": { "@octokit/types": "^14.0.0", "universal-user-agent": "^7.0.2" } }, "sha512-hoYicJZaqISMAI3JfaDr1qMNi48OctWuOih1m80bkYow/ayPw6Jj52tqWJ6GEoFTk1gBqfanSoI1iY99Z5+ekQ=="],
"@octokit/rest/@octokit/core/@octokit/request/@octokit/endpoint": ["@octokit/endpoint@11.0.0", "", { "dependencies": { "@octokit/types": "^14.0.0", "universal-user-agent": "^7.0.2" } }, "sha512-hoYicJZaqISMAI3JfaDr1qMNi48OctWuOih1m80bkYow/ayPw6Jj52tqWJ6GEoFTk1gBqfanSoI1iY99Z5+ekQ=="],
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,20 +0,0 @@
{
"name": "github",
"module": "index.ts",
"type": "module",
"private": true,
"license": "MIT",
"devDependencies": {
"@types/bun": "catalog:"
},
"peerDependencies": {
"typescript": "^5"
},
"dependencies": {
"@actions/core": "1.11.1",
"@actions/github": "6.0.1",
"@octokit/graphql": "9.0.1",
"@octokit/rest": "catalog:",
"@opencode-ai/sdk": "workspace:*"
}
}

View File

@@ -1,15 +0,0 @@
#!/usr/bin/env bash
# Get the latest Git tag
latest_tag=$(git tag --sort=committerdate | grep -E '^github-v[0-9]+\.[0-9]+\.[0-9]+$' | tail -1)
if [ -z "$latest_tag" ]; then
echo "No tags found"
exit 1
fi
echo "Latest tag: $latest_tag"
# Update latest tag
git tag -d latest
git push origin :refs/tags/latest
git tag -a latest $latest_tag -m "Update latest to $latest_tag"
git push origin latest

9
github/sst-env.d.ts vendored
View File

@@ -1,9 +0,0 @@
/* This file is auto-generated by SST. Do not edit. */
/* tslint:disable */
/* eslint-disable */
/* deno-fmt-ignore-file */
/// <reference path="../sst-env.d.ts" />
import "sst"
export {}

View File

@@ -1,29 +0,0 @@
{
"compilerOptions": {
// Environment setup & latest features
"lib": ["ESNext"],
"target": "ESNext",
"module": "Preserve",
"moduleDetection": "force",
"jsx": "react-jsx",
"allowJs": true,
// Bundler mode
"moduleResolution": "bundler",
"allowImportingTsExtensions": true,
"verbatimModuleSyntax": true,
"noEmit": true,
// Best practices
"strict": true,
"skipLibCheck": true,
"noFallthroughCasesInSwitch": true,
"noUncheckedIndexedAccess": true,
"noImplicitOverride": true,
// Some stricter flags (disabled by default)
"noUnusedLocals": false,
"noUnusedParameters": false,
"noPropertyAccessFromIndexSignature": false
}
}

128
go.mod Normal file
View File

@@ -0,0 +1,128 @@
module github.com/opencode-ai/opencode
go 1.24.0
require (
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.7.0
github.com/JohannesKaufmann/html-to-markdown v1.6.0
github.com/PuerkitoBio/goquery v1.9.2
github.com/alecthomas/chroma/v2 v2.15.0
github.com/anthropics/anthropic-sdk-go v0.2.0-beta.2
github.com/aymanbagabas/go-udiff v0.2.0
github.com/bmatcuk/doublestar/v4 v4.8.1
github.com/catppuccin/go v0.3.0
github.com/charmbracelet/bubbles v0.20.0
github.com/charmbracelet/bubbletea v1.3.4
github.com/charmbracelet/glamour v0.9.1
github.com/charmbracelet/lipgloss v1.1.0
github.com/charmbracelet/x/ansi v0.8.0
github.com/fsnotify/fsnotify v1.8.0
github.com/go-logfmt/logfmt v0.6.0
github.com/google/uuid v1.6.0
github.com/lrstanley/bubblezone v0.0.0-20250315020633-c249a3fe1231
github.com/mark3labs/mcp-go v0.17.0
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6
github.com/muesli/reflow v0.3.0
github.com/muesli/termenv v0.16.0
github.com/ncruces/go-sqlite3 v0.25.0
github.com/openai/openai-go v0.1.0-beta.2
github.com/pressly/goose/v3 v3.24.2
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3
github.com/spf13/cobra v1.9.1
github.com/spf13/viper v1.20.0
github.com/stretchr/testify v1.10.0
)
require (
cloud.google.com/go v0.116.0 // indirect
github.com/google/go-cmp v0.7.0 // indirect
github.com/gorilla/websocket v1.5.3 // indirect
cloud.google.com/go/auth v0.13.0 // indirect
cloud.google.com/go/compute/metadata v0.6.0 // indirect
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.0 // indirect
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 // indirect
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 // indirect
github.com/andybalholm/cascadia v1.3.2 // indirect
github.com/atotto/clipboard v0.1.4 // indirect
github.com/aws/aws-sdk-go-v2 v1.30.3 // indirect
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.3 // indirect
github.com/aws/aws-sdk-go-v2/config v1.27.27 // indirect
github.com/aws/aws-sdk-go-v2/credentials v1.17.27 // indirect
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.11 // indirect
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.15 // indirect
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.15 // indirect
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.3 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.17 // indirect
github.com/aws/aws-sdk-go-v2/service/sso v1.22.4 // indirect
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.26.4 // indirect
github.com/aws/aws-sdk-go-v2/service/sts v1.30.3 // indirect
github.com/aws/smithy-go v1.20.3 // indirect
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
github.com/aymerick/douceur v0.2.0 // indirect
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc // indirect
github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd // indirect
github.com/charmbracelet/x/term v0.2.1 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/disintegration/imaging v1.6.2
github.com/dlclark/regexp2 v1.11.4 // indirect
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect
github.com/felixge/httpsnoop v1.0.4 // indirect
github.com/go-logr/logr v1.4.2 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/go-viper/mapstructure/v2 v2.2.1 // indirect
github.com/golang-jwt/jwt/v5 v5.2.2 // indirect
github.com/google/s2a-go v0.1.8 // indirect
github.com/googleapis/enterprise-certificate-proxy v0.3.4 // indirect
github.com/googleapis/gax-go/v2 v2.14.1 // indirect
github.com/gorilla/css v1.0.1 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/kylelemons/godebug v1.1.0 // indirect
github.com/lucasb-eyer/go-colorful v1.2.0
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mattn/go-localereader v0.0.1 // indirect
github.com/mattn/go-runewidth v0.0.16 // indirect
github.com/mfridman/interpolate v0.0.2 // indirect
github.com/microcosm-cc/bluemonday v1.0.27 // indirect
github.com/muesli/cancelreader v0.2.2 // indirect
github.com/ncruces/julianday v1.0.0 // indirect
github.com/pelletier/go-toml/v2 v2.2.3 // indirect
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/rivo/uniseg v0.4.7 // indirect
github.com/rogpeppe/go-internal v1.14.1 // indirect
github.com/sagikazarmark/locafero v0.7.0 // indirect
github.com/sethvargo/go-retry v0.3.0 // indirect
github.com/sourcegraph/conc v0.3.0 // indirect
github.com/spf13/afero v1.12.0 // indirect
github.com/spf13/cast v1.7.1 // indirect
github.com/spf13/pflag v1.0.6 // indirect
github.com/subosito/gotenv v1.6.0 // indirect
github.com/tetratelabs/wazero v1.9.0 // indirect
github.com/tidwall/gjson v1.18.0 // indirect
github.com/tidwall/match v1.1.1 // indirect
github.com/tidwall/pretty v1.2.1 // indirect
github.com/tidwall/sjson v1.2.5 // indirect
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
github.com/yosida95/uritemplate/v3 v3.0.2 // indirect
github.com/yuin/goldmark v1.7.8 // indirect
github.com/yuin/goldmark-emoji v1.0.5 // indirect
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.54.0 // indirect
go.opentelemetry.io/otel v1.35.0 // indirect
go.opentelemetry.io/otel/metric v1.35.0 // indirect
go.opentelemetry.io/otel/trace v1.35.0 // indirect
go.uber.org/multierr v1.11.0 // indirect
golang.org/x/crypto v0.37.0 // indirect
golang.org/x/image v0.26.0 // indirect
golang.org/x/net v0.39.0 // indirect
golang.org/x/sync v0.13.0 // indirect
golang.org/x/sys v0.32.0 // indirect
golang.org/x/term v0.31.0 // indirect
golang.org/x/text v0.24.0 // indirect
google.golang.org/genai v1.3.0
google.golang.org/genproto/googleapis/rpc v0.0.0-20250324211829-b45e905df463 // indirect
google.golang.org/grpc v1.71.0 // indirect
google.golang.org/protobuf v1.36.6 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)

359
go.sum Normal file
View File

@@ -0,0 +1,359 @@
cloud.google.com/go v0.116.0 h1:B3fRrSDkLRt5qSHWe40ERJvhvnQwdZiHu0bJOpldweE=
cloud.google.com/go v0.116.0/go.mod h1:cEPSRWPzZEswwdr9BxE6ChEn01dWlTaF05LiC2Xs70U=
cloud.google.com/go/auth v0.13.0 h1:8Fu8TZy167JkW8Tj3q7dIkr2v4cndv41ouecJx0PAHs=
cloud.google.com/go/auth v0.13.0/go.mod h1:COOjD9gwfKNKz+IIduatIhYJQIc0mG3H102r/EMxX6Q=
cloud.google.com/go/compute/metadata v0.6.0 h1:A6hENjEsCDtC1k8byVsgwvVcioamEHvZ4j01OwKxG9I=
cloud.google.com/go/compute/metadata v0.6.0/go.mod h1:FjyFAW1MW0C203CEOMDTu3Dk1FlqW3Rga40jzHL4hfg=
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.0 h1:g0EZJwz7xkXQiZAI5xi9f3WWFYBlX1CPTrR+NDToRkQ=
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.0/go.mod h1:XCW7KnZet0Opnr7HccfUw1PLc4CjHqpcaxW8DHklNkQ=
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.7.0 h1:tfLQ34V6F7tVSwoTf/4lH5sE0o6eCJuNDTmH09nDpbc=
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.7.0/go.mod h1:9kIvujWAA58nmPmWB1m23fyWic1kYZMxD9CxaWn4Qpg=
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 h1:ywEEhmNahHBihViHepv3xPBn1663uRv2t2q/ESv9seY=
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0/go.mod h1:iZDifYGJTIgIIkYRNWPENUnqx6bJ2xnSDFI2tjwZNuY=
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 h1:XHOnouVk1mxXfQidrMEnLlPk9UMeRtyBTnEFtxkV0kU=
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
github.com/JohannesKaufmann/html-to-markdown v1.6.0 h1:04VXMiE50YYfCfLboJCLcgqF5x+rHJnb1ssNmqpLH/k=
github.com/JohannesKaufmann/html-to-markdown v1.6.0/go.mod h1:NUI78lGg/a7vpEJTz/0uOcYMaibytE4BUOQS8k78yPQ=
github.com/MakeNowJust/heredoc v1.0.0 h1:cXCdzVdstXyiTqTvfqk9SDHpKNjxuom+DOlyEeQ4pzQ=
github.com/MakeNowJust/heredoc v1.0.0/go.mod h1:mG5amYoWBHf8vpLOuehzbGGw0EHxpZZ6lCpQ4fNJ8LE=
github.com/PuerkitoBio/goquery v1.9.2 h1:4/wZksC3KgkQw7SQgkKotmKljk0M6V8TUvA8Wb4yPeE=
github.com/PuerkitoBio/goquery v1.9.2/go.mod h1:GHPCaP0ODyyxqcNoFGYlAprUFH81NuRPd0GX3Zu2Mvk=
github.com/alecthomas/assert/v2 v2.11.0 h1:2Q9r3ki8+JYXvGsDyBXwH3LcJ+WK5D0gc5E8vS6K3D0=
github.com/alecthomas/assert/v2 v2.11.0/go.mod h1:Bze95FyfUr7x34QZrjL+XP+0qgp/zg8yS+TtBj1WA3k=
github.com/alecthomas/chroma/v2 v2.15.0 h1:LxXTQHFoYrstG2nnV9y2X5O94sOBzf0CIUpSTbpxvMc=
github.com/alecthomas/chroma/v2 v2.15.0/go.mod h1:gUhVLrPDXPtp/f+L1jo9xepo9gL4eLwRuGAunSZMkio=
github.com/alecthomas/repr v0.4.0 h1:GhI2A8MACjfegCPVq9f1FLvIBS+DrQ2KQBFZP1iFzXc=
github.com/alecthomas/repr v0.4.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4=
github.com/andybalholm/cascadia v1.3.2 h1:3Xi6Dw5lHF15JtdcmAHD3i1+T8plmv7BQ/nsViSLyss=
github.com/andybalholm/cascadia v1.3.2/go.mod h1:7gtRlve5FxPPgIgX36uWBX58OdBsSS6lUvCFb+h7KvU=
github.com/anthropics/anthropic-sdk-go v0.2.0-beta.2 h1:h7qxtumNjKPWFv1QM/HJy60MteeW23iKeEtBoY7bYZk=
github.com/anthropics/anthropic-sdk-go v0.2.0-beta.2/go.mod h1:AapDW22irxK2PSumZiQXYUFvsdQgkwIWlpESweWZI/c=
github.com/atotto/clipboard v0.1.4 h1:EH0zSVneZPSuFR11BlR9YppQTVDbh5+16AmcJi4g1z4=
github.com/atotto/clipboard v0.1.4/go.mod h1:ZY9tmq7sm5xIbd9bOK4onWV4S6X0u6GY7Vn0Yu86PYI=
github.com/aws/aws-sdk-go-v2 v1.30.3 h1:jUeBtG0Ih+ZIFH0F4UkmL9w3cSpaMv9tYYDbzILP8dY=
github.com/aws/aws-sdk-go-v2 v1.30.3/go.mod h1:nIQjQVp5sfpQcTc9mPSr1B0PaWK5ByX9MOoDadSN4lc=
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.3 h1:tW1/Rkad38LA15X4UQtjXZXNKsCgkshC3EbmcUmghTg=
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.3/go.mod h1:UbnqO+zjqk3uIt9yCACHJ9IVNhyhOCnYk8yA19SAWrM=
github.com/aws/aws-sdk-go-v2/config v1.27.27 h1:HdqgGt1OAP0HkEDDShEl0oSYa9ZZBSOmKpdpsDMdO90=
github.com/aws/aws-sdk-go-v2/config v1.27.27/go.mod h1:MVYamCg76dFNINkZFu4n4RjDixhVr51HLj4ErWzrVwg=
github.com/aws/aws-sdk-go-v2/credentials v1.17.27 h1:2raNba6gr2IfA0eqqiP2XiQ0UVOpGPgDSi0I9iAP+UI=
github.com/aws/aws-sdk-go-v2/credentials v1.17.27/go.mod h1:gniiwbGahQByxan6YjQUMcW4Aov6bLC3m+evgcoN4r4=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.11 h1:KreluoV8FZDEtI6Co2xuNk/UqI9iwMrOx/87PBNIKqw=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.11/go.mod h1:SeSUYBLsMYFoRvHE0Tjvn7kbxaUhl75CJi1sbfhMxkU=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.15 h1:SoNJ4RlFEQEbtDcCEt+QG56MY4fm4W8rYirAmq+/DdU=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.15/go.mod h1:U9ke74k1n2bf+RIgoX1SXFed1HLs51OgUSs+Ph0KJP8=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.15 h1:C6WHdGnTDIYETAm5iErQUiVNsclNx9qbJVPIt03B6bI=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.15/go.mod h1:ZQLZqhcu+JhSrA9/NXRm8SkDvsycE+JkV3WGY41e+IM=
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 h1:hT8rVHwugYE2lEfdFE0QWVo81lF7jMrYJVDWI+f+VxU=
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0/go.mod h1:8tu/lYfQfFe6IGnaOdrpVgEL2IrrDOf6/m9RQum4NkY=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.3 h1:dT3MqvGhSoaIhRseqw2I0yH81l7wiR2vjs57O51EAm8=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.3/go.mod h1:GlAeCkHwugxdHaueRr4nhPuY+WW+gR8UjlcqzPr1SPI=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.17 h1:HGErhhrxZlQ044RiM+WdoZxp0p+EGM62y3L6pwA4olE=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.17/go.mod h1:RkZEx4l0EHYDJpWppMJ3nD9wZJAa8/0lq9aVC+r2UII=
github.com/aws/aws-sdk-go-v2/service/sso v1.22.4 h1:BXx0ZIxvrJdSgSvKTZ+yRBeSqqgPM89VPlulEcl37tM=
github.com/aws/aws-sdk-go-v2/service/sso v1.22.4/go.mod h1:ooyCOXjvJEsUw7x+ZDHeISPMhtwI3ZCB7ggFMcFfWLU=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.26.4 h1:yiwVzJW2ZxZTurVbYWA7QOrAaCYQR72t0wrSBfoesUE=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.26.4/go.mod h1:0oxfLkpz3rQ/CHlx5hB7H69YUpFiI1tql6Q6Ne+1bCw=
github.com/aws/aws-sdk-go-v2/service/sts v1.30.3 h1:ZsDKRLXGWHk8WdtyYMoGNO7bTudrvuKpDKgMVRlepGE=
github.com/aws/aws-sdk-go-v2/service/sts v1.30.3/go.mod h1:zwySh8fpFyXp9yOr/KVzxOl8SRqgf/IDw5aUt9UKFcQ=
github.com/aws/smithy-go v1.20.3 h1:ryHwveWzPV5BIof6fyDvor6V3iUL7nTfiTKXHiW05nE=
github.com/aws/smithy-go v1.20.3/go.mod h1:krry+ya/rV9RDcV/Q16kpu6ypI4K2czasz0NC3qS14E=
github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k=
github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8=
github.com/aymanbagabas/go-udiff v0.2.0 h1:TK0fH4MteXUDspT88n8CKzvK0X9O2xu9yQjWpi6yML8=
github.com/aymanbagabas/go-udiff v0.2.0/go.mod h1:RE4Ex0qsGkTAJoQdQQCA0uG+nAzJO/pI/QwceO5fgrA=
github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk=
github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4=
github.com/bmatcuk/doublestar/v4 v4.8.1 h1:54Bopc5c2cAvhLRAzqOGCYHYyhcDHsFF4wWIR5wKP38=
github.com/bmatcuk/doublestar/v4 v4.8.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc=
github.com/catppuccin/go v0.3.0 h1:d+0/YicIq+hSTo5oPuRi5kOpqkVA5tAsU6dNhvRu+aY=
github.com/catppuccin/go v0.3.0/go.mod h1:8IHJuMGaUUjQM82qBrGNBv7LFq6JI3NnQCF6MOlZjpc=
github.com/charmbracelet/bubbles v0.20.0 h1:jSZu6qD8cRQ6k9OMfR1WlM+ruM8fkPWkHvQWD9LIutE=
github.com/charmbracelet/bubbles v0.20.0/go.mod h1:39slydyswPy+uVOHZ5x/GjwVAFkCsV8IIVy+4MhzwwU=
github.com/charmbracelet/bubbletea v1.3.4 h1:kCg7B+jSCFPLYRA52SDZjr51kG/fMUEoPoZrkaDHyoI=
github.com/charmbracelet/bubbletea v1.3.4/go.mod h1:dtcUCyCGEX3g9tosuYiut3MXgY/Jsv9nKVdibKKRRXo=
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc h1:4pZI35227imm7yK2bGPcfpFEmuY1gc2YSTShr4iJBfs=
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc/go.mod h1:X4/0JoqgTIPSFcRA/P6INZzIuyqdFY5rm8tb41s9okk=
github.com/charmbracelet/glamour v0.9.1 h1:11dEfiGP8q1BEqvGoIjivuc2rBk+5qEXdPtaQ2WoiCM=
github.com/charmbracelet/glamour v0.9.1/go.mod h1:+SHvIS8qnwhgTpVMiXwn7OfGomSqff1cHBCI8jLOetk=
github.com/charmbracelet/lipgloss v1.1.0 h1:vYXsiLHVkK7fp74RkV7b2kq9+zDLoEU4MZoFqR/noCY=
github.com/charmbracelet/lipgloss v1.1.0/go.mod h1:/6Q8FR2o+kj8rz4Dq0zQc3vYf7X+B0binUUBwA0aL30=
github.com/charmbracelet/x/ansi v0.8.0 h1:9GTq3xq9caJW8ZrBTe0LIe2fvfLR/bYXKTx2llXn7xE=
github.com/charmbracelet/x/ansi v0.8.0/go.mod h1:wdYl/ONOLHLIVmQaxbIYEC/cRKOQyjTkowiI4blgS9Q=
github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd h1:vy0GVL4jeHEwG5YOXDmi86oYw2yuYUGqz6a8sLwg0X8=
github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd/go.mod h1:xe0nKWGd3eJgtqZRaN9RjMtK7xUYchjzPr7q6kcvCCs=
github.com/charmbracelet/x/exp/golden v0.0.0-20240815200342-61de596daa2b h1:MnAMdlwSltxJyULnrYbkZpp4k58Co7Tah3ciKhSNo0Q=
github.com/charmbracelet/x/exp/golden v0.0.0-20240815200342-61de596daa2b/go.mod h1:wDlXFlCrmJ8J+swcL/MnGUuYnqgQdW9rhSD61oNMb6U=
github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ=
github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg=
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/disintegration/imaging v1.6.2 h1:w1LecBlG2Lnp8B3jk5zSuNqd7b4DXhcjwek1ei82L+c=
github.com/disintegration/imaging v1.6.2/go.mod h1:44/5580QXChDfwIclfc/PCwrr44amcmDAg8hxG0Ewe4=
github.com/dlclark/regexp2 v1.11.4 h1:rPYF9/LECdNymJufQKmri9gV604RvvABwgOA8un7yAo=
github.com/dlclark/regexp2 v1.11.4/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f h1:Y/CXytFA4m6baUTXGLOoWe4PQhGxaX0KpnayAqC48p4=
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f/go.mod h1:vw97MGsxSvLiUE2X8qFplwetxpGLQrlU1Q9AUEIzCaM=
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
github.com/fsnotify/fsnotify v1.8.0 h1:dAwr6QBTBZIkG8roQaJjGof0pp0EeF+tNV7YBP3F/8M=
github.com/fsnotify/fsnotify v1.8.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0=
github.com/go-logfmt/logfmt v0.6.0 h1:wGYYu3uicYdqXVgoYbvnkrPVXkuLM1p1ifugDMEdRi4=
github.com/go-logfmt/logfmt v0.6.0/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs=
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY=
github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
github.com/go-viper/mapstructure/v2 v2.2.1 h1:ZAaOCxANMuZx5RCeg0mBdEZk7DZasvvZIxtHqx8aGss=
github.com/go-viper/mapstructure/v2 v2.2.1/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8=
github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
github.com/google/s2a-go v0.1.8 h1:zZDs9gcbt9ZPLV0ndSyQk6Kacx2g/X+SKYovpnz3SMM=
github.com/google/s2a-go v0.1.8/go.mod h1:6iNWHTpQ+nfNRN5E00MSdfDwVesa8hhS32PhPO8deJA=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/googleapis/enterprise-certificate-proxy v0.3.4 h1:XYIDZApgAnrN1c855gTgghdIA6Stxb52D5RnLI1SLyw=
github.com/googleapis/enterprise-certificate-proxy v0.3.4/go.mod h1:YKe7cfqYXjKGpGvmSg28/fFvhNzinZQm8DGnaburhGA=
github.com/googleapis/gax-go/v2 v2.14.1 h1:hb0FFeiPaQskmvakKu5EbCbpntQn48jyHuvrkurSS/Q=
github.com/googleapis/gax-go/v2 v2.14.1/go.mod h1:Hb/NubMaVM88SrNkvl8X/o8XWwDJEPqouaLeN2IUxoA=
github.com/gorilla/css v1.0.1 h1:ntNaBIghp6JmvWnxbZKANoLyuXTPZ4cAMlo6RyhlbO8=
github.com/gorilla/css v1.0.1/go.mod h1:BvnYkspnSzMmwRK+b8/xgNPLiIuNZr6vbZBTPQ2A3b0=
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM=
github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg=
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
github.com/lrstanley/bubblezone v0.0.0-20250315020633-c249a3fe1231 h1:9rjt7AfnrXKNSZhp36A3/4QAZAwGGCGD/p8Bse26zms=
github.com/lrstanley/bubblezone v0.0.0-20250315020633-c249a3fe1231/go.mod h1:S5etECMx+sZnW0Gm100Ma9J1PgVCTgNyFaqGu2b08b4=
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
github.com/mark3labs/mcp-go v0.17.0 h1:5Ps6T7qXr7De/2QTqs9h6BKeZ/qdeUeGrgM5lPzi930=
github.com/mark3labs/mcp-go v0.17.0/go.mod h1:KmJndYv7GIgcPVwEKJjNcbhVQ+hJGJhrCCB/9xITzpE=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-localereader v0.0.1 h1:ygSAOl7ZXTx4RdPYinUpg6W99U8jWvWi9Ye2JC/oIi4=
github.com/mattn/go-localereader v0.0.1/go.mod h1:8fBrzywKY7BI3czFoHkuzRoWE9C+EiG4R1k4Cjx5p88=
github.com/mattn/go-runewidth v0.0.12/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk=
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
github.com/mfridman/interpolate v0.0.2 h1:pnuTK7MQIxxFz1Gr+rjSIx9u7qVjf5VOoM/u6BbAxPY=
github.com/mfridman/interpolate v0.0.2/go.mod h1:p+7uk6oE07mpE/Ik1b8EckO0O4ZXiGAfshKBWLUM9Xg=
github.com/microcosm-cc/bluemonday v1.0.27 h1:MpEUotklkwCSLeH+Qdx1VJgNqLlpY2KXwXFM08ygZfk=
github.com/microcosm-cc/bluemonday v1.0.27/go.mod h1:jFi9vgW+H7c3V0lb6nR74Ib/DIB5OBs92Dimizgw2cA=
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 h1:ZK8zHtRHOkbHy6Mmr5D264iyp3TiX5OmNcI5cIARiQI=
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6/go.mod h1:CJlz5H+gyd6CUWT45Oy4q24RdLyn7Md9Vj2/ldJBSIo=
github.com/muesli/cancelreader v0.2.2 h1:3I4Kt4BQjOR54NavqnDogx/MIoWBFa0StPA8ELUXHmA=
github.com/muesli/cancelreader v0.2.2/go.mod h1:3XuTXfFS2VjM+HTLZY9Ak0l6eUKfijIfMUZ4EgX0QYo=
github.com/muesli/reflow v0.3.0 h1:IFsN6K9NfGtjeggFP+68I4chLZV2yIKsXJFNZ+eWh6s=
github.com/muesli/reflow v0.3.0/go.mod h1:pbwTDkVPibjO2kyvBQRBxTWEEGDGq0FlB1BIKtnHY/8=
github.com/muesli/termenv v0.16.0 h1:S5AlUN9dENB57rsbnkPyfdGuWIlkmzJjbFf0Tf5FWUc=
github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3VfY/Cnk=
github.com/ncruces/go-sqlite3 v0.25.0 h1:trugKUs98Zwy9KwRr/EUxZHL92LYt7UqcKqAfpGpK+I=
github.com/ncruces/go-sqlite3 v0.25.0/go.mod h1:n6Z7036yFilJx04yV0mi5JWaF66rUmXn1It9Ux8dx68=
github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4=
github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
github.com/ncruces/julianday v1.0.0 h1:fH0OKwa7NWvniGQtxdJRxAgkBMolni2BjDHaWTxqt7M=
github.com/ncruces/julianday v1.0.0/go.mod h1:Dusn2KvZrrovOMJuOt0TNXL6tB7U2E8kvza5fFc9G7g=
github.com/openai/openai-go v0.1.0-beta.2 h1:Ra5nCFkbEl9w+UJwAciC4kqnIBUCcJazhmMA0/YN894=
github.com/openai/openai-go v0.1.0-beta.2/go.mod h1:g461MYGXEXBVdV5SaR/5tNzNbSfwTBBefwc+LlDCK0Y=
github.com/pelletier/go-toml/v2 v2.2.3 h1:YmeHyLY8mFWbdkNWwpr+qIL2bEqT0o95WSdkNHvL12M=
github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc=
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ=
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/pressly/goose/v3 v3.24.2 h1:c/ie0Gm8rnIVKvnDQ/scHErv46jrDv9b4I0WRcFJzYU=
github.com/pressly/goose/v3 v3.24.2/go.mod h1:kjefwFB0eR4w30Td2Gj2Mznyw94vSP+2jJYkOVNbD1k=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/sagikazarmark/locafero v0.7.0 h1:5MqpDsTGNDhY8sGp0Aowyf0qKsPrhewaLSsFaodPcyo=
github.com/sagikazarmark/locafero v0.7.0/go.mod h1:2za3Cg5rMaTMoG/2Ulr9AwtFaIppKXTRYnozin4aB5k=
github.com/sebdah/goldie/v2 v2.5.3 h1:9ES/mNN+HNUbNWpVAlrzuZ7jE+Nrczbj8uFRjM7624Y=
github.com/sebdah/goldie/v2 v2.5.3/go.mod h1:oZ9fp0+se1eapSRjfYbsV/0Hqhbuu3bJVvKI/NNtssI=
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
github.com/sergi/go-diff v1.3.1/go.mod h1:aMJSSKb2lpPvRNec0+w3fl7LP9IOFzdc9Pa4NFbPK1I=
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN3Uc8sB6B/s6Z4t2xvBgU1htSHuq8=
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4=
github.com/sethvargo/go-retry v0.3.0 h1:EEt31A35QhrcRZtrYFDTBg91cqZVnFL2navjDrah2SE=
github.com/sethvargo/go-retry v0.3.0/go.mod h1:mNX17F0C/HguQMyMyJxcnU471gOZGxCLyYaFyAZraas=
github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo=
github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0=
github.com/spf13/afero v1.12.0 h1:UcOPyRBYczmFn6yvphxkn9ZEOY65cpwGKb5mL36mrqs=
github.com/spf13/afero v1.12.0/go.mod h1:ZTlWwG4/ahT8W7T0WQ5uYmjI9duaLQGy3Q2OAl4sk/4=
github.com/spf13/cast v1.7.1 h1:cuNEagBQEHWN1FnbGEjCXL2szYEXqfJPbP2HNUaca9Y=
github.com/spf13/cast v1.7.1/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo=
github.com/spf13/cobra v1.9.1 h1:CXSaggrXdbHK9CF+8ywj8Amf7PBRmPCOJugH954Nnlo=
github.com/spf13/cobra v1.9.1/go.mod h1:nDyEzZ8ogv936Cinf6g1RU9MRY64Ir93oCnqb9wxYW0=
github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o=
github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/viper v1.20.0 h1:zrxIyR3RQIOsarIrgL8+sAvALXul9jeEPa06Y0Ph6vY=
github.com/spf13/viper v1.20.0/go.mod h1:P9Mdzt1zoHIG8m2eZQinpiBjo6kCmZSKBClNNqjJvu4=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
github.com/tetratelabs/wazero v1.9.0 h1:IcZ56OuxrtaEz8UYNRHBrUa9bYeX9oVY93KspZZBf/I=
github.com/tetratelabs/wazero v1.9.0/go.mod h1:TSbcXCfFP0L2FGkRPxHphadXPjo1T6W+CseNNY7EkjM=
github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY=
github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA=
github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM=
github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4=
github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY=
github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28=
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no=
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM=
github.com/yosida95/uritemplate/v3 v3.0.2 h1:Ed3Oyj9yrmi9087+NczuL5BwkIc4wvTb5zIM+UJPGz4=
github.com/yosida95/uritemplate/v3 v3.0.2/go.mod h1:ILOh0sOhIJR3+L/8afwt/kE++YT040gmv5BQTMR2HP4=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
github.com/yuin/goldmark v1.7.1/go.mod h1:uzxRWxtg69N339t3louHJ7+O03ezfj6PlliRlaOzY1E=
github.com/yuin/goldmark v1.7.8 h1:iERMLn0/QJeHFhxSt3p6PeN9mGnvIKSpG9YYorDMnic=
github.com/yuin/goldmark v1.7.8/go.mod h1:uzxRWxtg69N339t3louHJ7+O03ezfj6PlliRlaOzY1E=
github.com/yuin/goldmark-emoji v1.0.5 h1:EMVWyCGPlXJfUXBXpuMu+ii3TIaxbVBnEX9uaDC4cIk=
github.com/yuin/goldmark-emoji v1.0.5/go.mod h1:tTkZEbwu5wkPmgTcitqddVxY9osFZiavD+r4AzQrh1U=
go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA=
go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.54.0 h1:TT4fX+nBOA/+LUkobKGW1ydGcn+G3vRw9+g5HwCphpk=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.54.0/go.mod h1:L7UH0GbB0p47T4Rri3uHjbpCFYrVrwc1I25QhNPiGK8=
go.opentelemetry.io/otel v1.35.0 h1:xKWKPxrxB6OtMCbmMY021CqC45J+3Onta9MqjhnusiQ=
go.opentelemetry.io/otel v1.35.0/go.mod h1:UEqy8Zp11hpkUrL73gSlELM0DupHoiq72dR+Zqel/+Y=
go.opentelemetry.io/otel/metric v1.35.0 h1:0znxYu2SNyuMSQT4Y9WDWej0VpcsxkuklLa4/siN90M=
go.opentelemetry.io/otel/metric v1.35.0/go.mod h1:nKVFgxBZ2fReX6IlyW28MgZojkoAkJGaE8CpgeAU3oE=
go.opentelemetry.io/otel/sdk v1.34.0 h1:95zS4k/2GOy069d321O8jWgYsW3MzVV+KuSPKp7Wr1A=
go.opentelemetry.io/otel/sdk v1.34.0/go.mod h1:0e/pNiaMAqaykJGKbi+tSjWfNNHMTxoC9qANsCzbyxU=
go.opentelemetry.io/otel/sdk/metric v1.34.0 h1:5CeK9ujjbFVL5c1PhLuStg1wxA7vQv7ce1EK0Gyvahk=
go.opentelemetry.io/otel/sdk/metric v1.34.0/go.mod h1:jQ/r8Ze28zRKoNRdkjCZxfs6YvBTG1+YIqyFVFYec5w=
go.opentelemetry.io/otel/trace v1.35.0 h1:dPpEfJu1sDIqruz7BHFG3c7528f6ddfSWfFDVt/xgMs=
go.opentelemetry.io/otel/trace v1.35.0/go.mod h1:WUk7DtFp1Aw2MkvqGdwiXYDZZNvA/1J8o6xRXLrIkyc=
go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0=
go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
golang.org/x/crypto v0.22.0/go.mod h1:vr6Su+7cTlO45qkww3VDJlzDn0ctJvRgYbC2NvXHt+M=
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
golang.org/x/crypto v0.37.0 h1:kJNSjF/Xp7kU0iB2Z+9viTPMW4EqqsrywMXLJOOsXSE=
golang.org/x/crypto v0.37.0/go.mod h1:vg+k43peMZ0pUMhYmVAWysMK35e6ioLh3wB8ZCAfbVc=
golang.org/x/exp v0.0.0-20250305212735-054e65f0b394 h1:nDVHiLt8aIbd/VzvPWN6kSOPE7+F/fNFDSXLVYkE/Iw=
golang.org/x/exp v0.0.0-20250305212735-054e65f0b394/go.mod h1:sIifuuw/Yco/y6yb6+bDNfyeQ/MdPUy/hKEMYQV17cM=
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.26.0 h1:4XjIFEZWQmCZi6Wv8BoxsDhRU3RVnLX04dToTDAEPlY=
golang.org/x/image v0.26.0/go.mod h1:lcxbMFAovzpnJxzXS3nyL83K27tmqtKzIJpctK8YO5c=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns=
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
golang.org/x/net v0.24.0/go.mod h1:2Q7sJY5mzlzWjKtYUEXSlBWCdyaioyXzRB2RtU8KVE8=
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
golang.org/x/net v0.39.0 h1:ZCu7HMWDxpXpaiKdhzIfaltL9Lp31x/3fCP11bc6/fY=
golang.org/x/net v0.39.0/go.mod h1:X7NRbYVEA+ewNkCNyJ513WmMdQ3BineSwVtN2zD/d+E=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.13.0 h1:AauUjRAJ9OSnvULf/ARrrVywoJDy0YS2AwQ98I37610=
golang.org/x/sync v0.13.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.32.0 h1:s77OFDvIQeibCmezSnk/q6iAfkdiQaJi4VzroCFrN20=
golang.org/x/sys v0.32.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY=
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
golang.org/x/term v0.19.0/go.mod h1:2CuTdWZ7KHSQwUzKva0cbMg6q2DMI3Mmxp+gKJbskEk=
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
golang.org/x/term v0.31.0 h1:erwDkOK1Msy6offm1mOgvspSkslFnIGsFnxOKoufg3o=
golang.org/x/term v0.31.0/go.mod h1:R4BeIy7D95HzImkxGkTW1UQTtP54tio2RyHz7PwK0aw=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.24.0 h1:dd5Bzh4yt5KYA8f9CJHCP4FB4D51c2c6JvN37xJJkJ0=
golang.org/x/text v0.24.0/go.mod h1:L8rBsPeo2pSS+xqN0d5u2ikmjtmoJbDBT1b7nHvFCdU=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/genai v1.3.0 h1:tXhPJF30skOjnnDY7ZnjK3q7IKy4PuAlEA0fk7uEaEI=
google.golang.org/genai v1.3.0/go.mod h1:TyfOKRz/QyCaj6f/ZDt505x+YreXnY40l2I6k8TvgqY=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250324211829-b45e905df463 h1:e0AIkUUhxyBKh6ssZNrAMeqhA7RKUj42346d1y02i2g=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250324211829-b45e905df463/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A=
google.golang.org/grpc v1.71.0 h1:kF77BGdPTQ4/JZWMlb9VpJ5pa25aqvVqogsxNHHdeBg=
google.golang.org/grpc v1.71.0/go.mod h1:H0GRtasmQOh9LkFoCPDu3ZrwUtD1YGE+b2vYBYd/8Ec=
google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY=
google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
modernc.org/libc v1.61.13 h1:3LRd6ZO1ezsFiX1y+bHd1ipyEHIJKvuprv0sLTBwLW8=
modernc.org/libc v1.61.13/go.mod h1:8F/uJWL/3nNil0Lgt1Dpz+GgkApWh04N3el3hxJcA6E=
modernc.org/mathutil v1.7.1 h1:GCZVGXdaN8gTqB1Mf/usp1Y/hSqgI2vAGGP4jZMCxOU=
modernc.org/mathutil v1.7.1/go.mod h1:4p5IwJITfppl0G4sUEDtCr4DthTaT47/N3aT6MhfgJg=
modernc.org/memory v1.9.1 h1:V/Z1solwAVmMW1yttq3nDdZPJqV1rM05Ccq6KMSZ34g=
modernc.org/memory v1.9.1/go.mod h1:/JP4VbVC+K5sU2wZi9bHoq2MAkCnrt2r98UGeSK7Mjw=
modernc.org/sqlite v1.36.2 h1:vjcSazuoFve9Wm0IVNHgmJECoOXLZM1KfMXbcX2axHA=
modernc.org/sqlite v1.36.2/go.mod h1:ADySlx7K4FdY5MaJcEv86hTJ0PjedAloTUuif0YS3ws=

View File

@@ -1,68 +0,0 @@
import { domain } from "./stage"
const GITHUB_APP_ID = new sst.Secret("GITHUB_APP_ID")
const GITHUB_APP_PRIVATE_KEY = new sst.Secret("GITHUB_APP_PRIVATE_KEY")
export const EMAILOCTOPUS_API_KEY = new sst.Secret("EMAILOCTOPUS_API_KEY")
const ADMIN_SECRET = new sst.Secret("ADMIN_SECRET")
const DISCORD_SUPPORT_BOT_TOKEN = new sst.Secret("DISCORD_SUPPORT_BOT_TOKEN")
const DISCORD_SUPPORT_CHANNEL_ID = new sst.Secret("DISCORD_SUPPORT_CHANNEL_ID")
const FEISHU_APP_ID = new sst.Secret("FEISHU_APP_ID")
const FEISHU_APP_SECRET = new sst.Secret("FEISHU_APP_SECRET")
const bucket = new sst.cloudflare.Bucket("Bucket")
export const api = new sst.cloudflare.Worker("Api", {
domain: `api.${domain}`,
handler: "packages/function/src/api.ts",
environment: {
WEB_DOMAIN: domain,
},
url: true,
link: [
bucket,
GITHUB_APP_ID,
GITHUB_APP_PRIVATE_KEY,
ADMIN_SECRET,
DISCORD_SUPPORT_BOT_TOKEN,
DISCORD_SUPPORT_CHANNEL_ID,
FEISHU_APP_ID,
FEISHU_APP_SECRET,
],
transform: {
worker: (args) => {
args.logpush = true
args.bindings = $resolve(args.bindings).apply((bindings) => [
...bindings,
{
name: "SYNC_SERVER",
type: "durable_object_namespace",
className: "SyncServer",
},
])
args.migrations = {
// Note: when releasing the next tag, make sure all stages use tag v2
oldTag: $app.stage === "production" || $app.stage === "thdxr" ? "" : "v1",
newTag: $app.stage === "production" || $app.stage === "thdxr" ? "" : "v1",
//newSqliteClasses: ["SyncServer"],
}
},
},
})
new sst.cloudflare.x.Astro("Web", {
domain: "docs." + domain,
path: "packages/web",
environment: {
// For astro config
SST_STAGE: $app.stage,
VITE_API_URL: api.url.apply((url) => url!),
},
})
new sst.cloudflare.StaticSite("WebApp", {
domain: "app." + domain,
path: "packages/app",
build: {
command: "bun turbo build",
output: "./dist",
},
})

View File

@@ -1,205 +0,0 @@
import { domain } from "./stage"
import { EMAILOCTOPUS_API_KEY } from "./app"
////////////////
// DATABASE
////////////////
const cluster = planetscale.getDatabaseOutput({
name: "opencode",
organization: "anomalyco",
})
const branch =
$app.stage === "production"
? planetscale.getBranchOutput({
name: "production",
organization: cluster.organization,
database: cluster.name,
})
: new planetscale.Branch("DatabaseBranch", {
database: cluster.name,
organization: cluster.organization,
name: $app.stage,
parentBranch: "production",
})
const password = new planetscale.Password("DatabasePassword", {
name: $app.stage,
database: cluster.name,
organization: cluster.organization,
branch: branch.name,
})
export const database = new sst.Linkable("Database", {
properties: {
host: password.accessHostUrl,
database: cluster.name,
username: password.username,
password: password.plaintext,
port: 3306,
},
})
new sst.x.DevCommand("Studio", {
link: [database],
dev: {
command: "bun db studio",
directory: "packages/console/core",
autostart: true,
},
})
////////////////
// AUTH
////////////////
const GITHUB_CLIENT_ID_CONSOLE = new sst.Secret("GITHUB_CLIENT_ID_CONSOLE")
const GITHUB_CLIENT_SECRET_CONSOLE = new sst.Secret("GITHUB_CLIENT_SECRET_CONSOLE")
const GOOGLE_CLIENT_ID = new sst.Secret("GOOGLE_CLIENT_ID")
const authStorage = new sst.cloudflare.Kv("AuthStorage")
export const auth = new sst.cloudflare.Worker("AuthApi", {
domain: `auth.${domain}`,
handler: "packages/console/function/src/auth.ts",
url: true,
link: [database, authStorage, GITHUB_CLIENT_ID_CONSOLE, GITHUB_CLIENT_SECRET_CONSOLE, GOOGLE_CLIENT_ID],
})
////////////////
// GATEWAY
////////////////
export const stripeWebhook = new stripe.WebhookEndpoint("StripeWebhookEndpoint", {
url: $interpolate`https://${domain}/stripe/webhook`,
enabledEvents: [
"checkout.session.async_payment_failed",
"checkout.session.async_payment_succeeded",
"checkout.session.completed",
"checkout.session.expired",
"charge.refunded",
"invoice.payment_succeeded",
"customer.created",
"customer.deleted",
"customer.updated",
"customer.discount.created",
"customer.discount.deleted",
"customer.discount.updated",
"customer.source.created",
"customer.source.deleted",
"customer.source.expiring",
"customer.source.updated",
"customer.subscription.created",
"customer.subscription.deleted",
"customer.subscription.paused",
"customer.subscription.pending_update_applied",
"customer.subscription.pending_update_expired",
"customer.subscription.resumed",
"customer.subscription.trial_will_end",
"customer.subscription.updated",
],
})
const zenProduct = new stripe.Product("ZenBlack", {
name: "OpenCode Black",
})
const zenPriceProps = {
product: zenProduct.id,
currency: "usd",
recurring: {
interval: "month",
intervalCount: 1,
},
}
const zenPrice200 = new stripe.Price("ZenBlackPrice", { ...zenPriceProps, unitAmount: 20000 })
const zenPrice100 = new stripe.Price("ZenBlack100Price", { ...zenPriceProps, unitAmount: 10000 })
const zenPrice20 = new stripe.Price("ZenBlack20Price", { ...zenPriceProps, unitAmount: 2000 })
const ZEN_BLACK_PRICE = new sst.Linkable("ZEN_BLACK_PRICE", {
properties: {
product: zenProduct.id,
plan200: zenPrice200.id,
plan100: zenPrice100.id,
plan20: zenPrice20.id,
},
})
const ZEN_BLACK_LIMITS = new sst.Secret("ZEN_BLACK_LIMITS")
const ZEN_MODELS = [
new sst.Secret("ZEN_MODELS1"),
new sst.Secret("ZEN_MODELS2"),
new sst.Secret("ZEN_MODELS3"),
new sst.Secret("ZEN_MODELS4"),
new sst.Secret("ZEN_MODELS5"),
new sst.Secret("ZEN_MODELS6"),
new sst.Secret("ZEN_MODELS7"),
new sst.Secret("ZEN_MODELS8"),
]
const STRIPE_SECRET_KEY = new sst.Secret("STRIPE_SECRET_KEY")
const STRIPE_PUBLISHABLE_KEY = new sst.Secret("STRIPE_PUBLISHABLE_KEY")
const AUTH_API_URL = new sst.Linkable("AUTH_API_URL", {
properties: { value: auth.url.apply((url) => url!) },
})
const STRIPE_WEBHOOK_SECRET = new sst.Linkable("STRIPE_WEBHOOK_SECRET", {
properties: { value: stripeWebhook.secret },
})
const gatewayKv = new sst.cloudflare.Kv("GatewayKv")
////////////////
// CONSOLE
////////////////
const bucket = new sst.cloudflare.Bucket("ZenData")
const bucketNew = new sst.cloudflare.Bucket("ZenDataNew")
const AWS_SES_ACCESS_KEY_ID = new sst.Secret("AWS_SES_ACCESS_KEY_ID")
const AWS_SES_SECRET_ACCESS_KEY = new sst.Secret("AWS_SES_SECRET_ACCESS_KEY")
let logProcessor
if ($app.stage === "production" || $app.stage === "frank") {
const HONEYCOMB_API_KEY = new sst.Secret("HONEYCOMB_API_KEY")
logProcessor = new sst.cloudflare.Worker("LogProcessor", {
handler: "packages/console/function/src/log-processor.ts",
link: [HONEYCOMB_API_KEY],
})
}
new sst.cloudflare.x.SolidStart("Console", {
domain,
path: "packages/console/app",
link: [
bucket,
bucketNew,
database,
AUTH_API_URL,
STRIPE_WEBHOOK_SECRET,
STRIPE_SECRET_KEY,
EMAILOCTOPUS_API_KEY,
AWS_SES_ACCESS_KEY_ID,
AWS_SES_SECRET_ACCESS_KEY,
ZEN_BLACK_PRICE,
ZEN_BLACK_LIMITS,
new sst.Secret("ZEN_SESSION_SECRET"),
...ZEN_MODELS,
...($dev
? [
new sst.Secret("CLOUDFLARE_DEFAULT_ACCOUNT_ID", process.env.CLOUDFLARE_DEFAULT_ACCOUNT_ID!),
new sst.Secret("CLOUDFLARE_API_TOKEN", process.env.CLOUDFLARE_API_TOKEN!),
]
: []),
gatewayKv,
],
environment: {
//VITE_DOCS_URL: web.url.apply((url) => url!),
//VITE_API_URL: gateway.url.apply((url) => url!),
VITE_AUTH_URL: auth.url.apply((url) => url!),
VITE_STRIPE_PUBLISHABLE_KEY: STRIPE_PUBLISHABLE_KEY.value,
},
transform: {
server: {
transform: {
worker: {
placement: { mode: "smart" },
tailConsumers: logProcessor ? [{ service: logProcessor.nodes.worker.scriptName }] : [],
},
},
},
},
})

View File

@@ -1,17 +0,0 @@
import { SECRET } from "./secret"
import { domain, shortDomain } from "./stage"
const storage = new sst.cloudflare.Bucket("EnterpriseStorage")
const teams = new sst.cloudflare.x.SolidStart("Teams", {
domain: shortDomain,
path: "packages/enterprise",
buildCommand: "bun run build:cloudflare",
environment: {
OPENCODE_STORAGE_ADAPTER: "r2",
OPENCODE_STORAGE_ACCOUNT_ID: sst.cloudflare.DEFAULT_ACCOUNT_ID,
OPENCODE_STORAGE_ACCESS_KEY_ID: SECRET.R2AccessKey.value,
OPENCODE_STORAGE_SECRET_ACCESS_KEY: SECRET.R2SecretKey.value,
OPENCODE_STORAGE_BUCKET: storage.name,
},
})

View File

@@ -1,4 +0,0 @@
export const SECRET = {
R2AccessKey: new sst.Secret("R2AccessKey", "unknown"),
R2SecretKey: new sst.Secret("R2SecretKey", "unknown"),
}

View File

@@ -1,19 +0,0 @@
export const domain = (() => {
if ($app.stage === "production") return "opencode.ai"
if ($app.stage === "dev") return "dev.opencode.ai"
return `${$app.stage}.dev.opencode.ai`
})()
export const zoneID = "430ba34c138cfb5360826c4909f99be8"
new cloudflxare.RegionalHostname("RegionalHostname", {
hostname: domain,
regionKey: "us",
zoneId: zoneID,
})
export const shortDomain = (() => {
if ($app.stage === "production") return "opncd.ai"
if ($app.stage === "dev") return "dev.opncd.ai"
return `${$app.stage}.dev.opncd.ai`
})()

444
install
View File

@@ -2,192 +2,54 @@
set -euo pipefail
APP=opencode
MUTED='\033[0;2m'
RED='\033[0;31m'
ORANGE='\033[38;5;214m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
ORANGE='\033[38;2;255;140;0m'
NC='\033[0m' # No Color
usage() {
cat <<EOF
OpenCode Installer
Usage: install.sh [options]
Options:
-h, --help Display this help message
-v, --version <version> Install a specific version (e.g., 1.0.180)
-b, --binary <path> Install from a local binary instead of downloading
--no-modify-path Don't modify shell config files (.zshrc, .bashrc, etc.)
Examples:
curl -fsSL https://opencode.ai/install | bash
curl -fsSL https://opencode.ai/install | bash -s -- --version 1.0.180
./install --binary /path/to/opencode
EOF
}
requested_version=${VERSION:-}
no_modify_path=false
binary_path=""
while [[ $# -gt 0 ]]; do
case "$1" in
-h|--help)
usage
exit 0
;;
-v|--version)
if [[ -n "${2:-}" ]]; then
requested_version="$2"
shift 2
else
echo -e "${RED}Error: --version requires a version argument${NC}"
exit 1
fi
;;
-b|--binary)
if [[ -n "${2:-}" ]]; then
binary_path="$2"
shift 2
else
echo -e "${RED}Error: --binary requires a path argument${NC}"
exit 1
fi
;;
--no-modify-path)
no_modify_path=true
shift
;;
*)
echo -e "${ORANGE}Warning: Unknown option '$1'${NC}" >&2
shift
;;
esac
done
os=$(uname -s | tr '[:upper:]' '[:lower:]')
if [[ "$os" == "darwin" ]]; then
os="mac"
fi
arch=$(uname -m)
if [[ "$arch" == "aarch64" ]]; then
arch="arm64"
fi
filename="$APP-$os-$arch.tar.gz"
case "$filename" in
*"-linux-"*)
[[ "$arch" == "x86_64" || "$arch" == "arm64" || "$arch" == "i386" ]] || exit 1
;;
*"-mac-"*)
[[ "$arch" == "x86_64" || "$arch" == "arm64" ]] || exit 1
;;
*)
echo "${RED}Unsupported OS/Arch: $os/$arch${NC}"
exit 1
;;
esac
INSTALL_DIR=$HOME/.opencode/bin
mkdir -p "$INSTALL_DIR"
# If --binary is provided, skip all download/detection logic
if [ -n "$binary_path" ]; then
if [ ! -f "$binary_path" ]; then
echo -e "${RED}Error: Binary not found at ${binary_path}${NC}"
if [ -z "$requested_version" ]; then
url="https://github.com/opencode-ai/opencode/releases/latest/download/$filename"
specific_version=$(curl -s https://api.github.com/repos/opencode-ai/opencode/releases/latest | awk -F'"' '/"tag_name": "/ {gsub(/^v/, "", $4); print $4}')
if [[ $? -ne 0 ]]; then
echo "${RED}Failed to fetch version information${NC}"
exit 1
fi
specific_version="local"
else
raw_os=$(uname -s)
os=$(echo "$raw_os" | tr '[:upper:]' '[:lower:]')
case "$raw_os" in
Darwin*) os="darwin" ;;
Linux*) os="linux" ;;
MINGW*|MSYS*|CYGWIN*) os="windows" ;;
esac
arch=$(uname -m)
if [[ "$arch" == "aarch64" ]]; then
arch="arm64"
fi
if [[ "$arch" == "x86_64" ]]; then
arch="x64"
fi
if [ "$os" = "darwin" ] && [ "$arch" = "x64" ]; then
rosetta_flag=$(sysctl -n sysctl.proc_translated 2>/dev/null || echo 0)
if [ "$rosetta_flag" = "1" ]; then
arch="arm64"
fi
fi
combo="$os-$arch"
case "$combo" in
linux-x64|linux-arm64|darwin-x64|darwin-arm64|windows-x64)
;;
*)
echo -e "${RED}Unsupported OS/Arch: $os/$arch${NC}"
exit 1
;;
esac
archive_ext=".zip"
if [ "$os" = "linux" ]; then
archive_ext=".tar.gz"
fi
is_musl=false
if [ "$os" = "linux" ]; then
if [ -f /etc/alpine-release ]; then
is_musl=true
fi
if command -v ldd >/dev/null 2>&1; then
if ldd --version 2>&1 | grep -qi musl; then
is_musl=true
fi
fi
fi
needs_baseline=false
if [ "$arch" = "x64" ]; then
if [ "$os" = "linux" ]; then
if ! grep -qi avx2 /proc/cpuinfo 2>/dev/null; then
needs_baseline=true
fi
fi
if [ "$os" = "darwin" ]; then
avx2=$(sysctl -n hw.optional.avx2_0 2>/dev/null || echo 0)
if [ "$avx2" != "1" ]; then
needs_baseline=true
fi
fi
fi
target="$os-$arch"
if [ "$needs_baseline" = "true" ]; then
target="$target-baseline"
fi
if [ "$is_musl" = "true" ]; then
target="$target-musl"
fi
filename="$APP-$target$archive_ext"
if [ "$os" = "linux" ]; then
if ! command -v tar >/dev/null 2>&1; then
echo -e "${RED}Error: 'tar' is required but not installed.${NC}"
exit 1
fi
else
if ! command -v unzip >/dev/null 2>&1; then
echo -e "${RED}Error: 'unzip' is required but not installed.${NC}"
exit 1
fi
fi
if [ -z "$requested_version" ]; then
url="https://github.com/anomalyco/opencode/releases/latest/download/$filename"
specific_version=$(curl -s https://api.github.com/repos/anomalyco/opencode/releases/latest | sed -n 's/.*"tag_name": *"v\([^"]*\)".*/\1/p')
if [[ $? -ne 0 || -z "$specific_version" ]]; then
echo -e "${RED}Failed to fetch version information${NC}"
exit 1
fi
else
# Strip leading 'v' if present
requested_version="${requested_version#v}"
url="https://github.com/anomalyco/opencode/releases/download/v${requested_version}/$filename"
specific_version=$requested_version
# Verify the release exists before downloading
http_status=$(curl -sI -o /dev/null -w "%{http_code}" "https://github.com/anomalyco/opencode/releases/tag/v${requested_version}")
if [ "$http_status" = "404" ]; then
echo -e "${RED}Error: Release v${requested_version} not found${NC}"
echo -e "${MUTED}Available releases: https://github.com/anomalyco/opencode/releases${NC}"
exit 1
fi
fi
url="https://github.com/opencode-ai/opencode/releases/download/v${requested_version}/$filename"
specific_version=$requested_version
fi
print_message() {
@@ -196,8 +58,8 @@ print_message() {
local color=""
case $level in
info) color="${NC}" ;;
warning) color="${NC}" ;;
info) color="${GREEN}" ;;
warning) color="${YELLOW}" ;;
error) color="${RED}" ;;
esac
@@ -208,153 +70,41 @@ check_version() {
if command -v opencode >/dev/null 2>&1; then
opencode_path=$(which opencode)
## Check the installed version
installed_version=$(opencode --version 2>/dev/null || echo "")
## TODO: check if version is installed
# installed_version=$(opencode version)
installed_version="0.0.1"
installed_version=$(echo $installed_version | awk '{print $2}')
if [[ "$installed_version" != "$specific_version" ]]; then
print_message info "${MUTED}Installed version: ${NC}$installed_version."
print_message info "Installed version: ${YELLOW}$installed_version."
else
print_message info "${MUTED}Version ${NC}$specific_version${MUTED} already installed"
print_message info "Version ${YELLOW}$specific_version${GREEN} already installed"
exit 0
fi
fi
}
unbuffered_sed() {
if echo | sed -u -e "" >/dev/null 2>&1; then
sed -nu "$@"
elif echo | sed -l -e "" >/dev/null 2>&1; then
sed -nl "$@"
else
local pad="$(printf "\n%512s" "")"
sed -ne "s/$/\\${pad}/" "$@"
fi
}
print_progress() {
local bytes="$1"
local length="$2"
[ "$length" -gt 0 ] || return 0
local width=50
local percent=$(( bytes * 100 / length ))
[ "$percent" -gt 100 ] && percent=100
local on=$(( percent * width / 100 ))
local off=$(( width - on ))
local filled=$(printf "%*s" "$on" "")
filled=${filled// /■}
local empty=$(printf "%*s" "$off" "")
empty=${empty// /・}
printf "\r${ORANGE}%s%s %3d%%${NC}" "$filled" "$empty" "$percent" >&4
}
download_with_progress() {
local url="$1"
local output="$2"
if [ -t 2 ]; then
exec 4>&2
else
exec 4>/dev/null
fi
local tmp_dir=${TMPDIR:-/tmp}
local basename="${tmp_dir}/opencode_install_$$"
local tracefile="${basename}.trace"
rm -f "$tracefile"
mkfifo "$tracefile"
# Hide cursor
printf "\033[?25l" >&4
trap "trap - RETURN; rm -f \"$tracefile\"; printf '\033[?25h' >&4; exec 4>&-" RETURN
(
curl --trace-ascii "$tracefile" -s -L -o "$output" "$url"
) &
local curl_pid=$!
unbuffered_sed \
-e 'y/ACDEGHLNORTV/acdeghlnortv/' \
-e '/^0000: content-length:/p' \
-e '/^<= recv data/p' \
"$tracefile" | \
{
local length=0
local bytes=0
while IFS=" " read -r -a line; do
[ "${#line[@]}" -lt 2 ] && continue
local tag="${line[0]} ${line[1]}"
if [ "$tag" = "0000: content-length:" ]; then
length="${line[2]}"
length=$(echo "$length" | tr -d '\r')
bytes=0
elif [ "$tag" = "<= recv" ]; then
local size="${line[3]}"
bytes=$(( bytes + size ))
if [ "$length" -gt 0 ]; then
print_progress "$bytes" "$length"
fi
fi
done
}
wait $curl_pid
local ret=$?
echo "" >&4
return $ret
}
download_and_install() {
print_message info "\n${MUTED}Installing ${NC}opencode ${MUTED}version: ${NC}$specific_version"
local tmp_dir="${TMPDIR:-/tmp}/opencode_install_$$"
mkdir -p "$tmp_dir"
if [[ "$os" == "windows" ]] || ! [ -t 2 ] || ! download_with_progress "$url" "$tmp_dir/$filename"; then
# Fallback to standard curl on Windows, non-TTY environments, or if custom progress fails
curl -# -L -o "$tmp_dir/$filename" "$url"
fi
if [ "$os" = "linux" ]; then
tar -xzf "$tmp_dir/$filename" -C "$tmp_dir"
else
unzip -q "$tmp_dir/$filename" -d "$tmp_dir"
fi
mv "$tmp_dir/opencode" "$INSTALL_DIR"
chmod 755 "${INSTALL_DIR}/opencode"
rm -rf "$tmp_dir"
print_message info "Downloading ${ORANGE}opencode ${GREEN}version: ${YELLOW}$specific_version ${GREEN}..."
mkdir -p opencodetmp && cd opencodetmp
curl -# -L $url | tar xz
mv opencode $INSTALL_DIR
cd .. && rm -rf opencodetmp
}
install_from_binary() {
print_message info "\n${MUTED}Installing ${NC}opencode ${MUTED}from: ${NC}$binary_path"
cp "$binary_path" "${INSTALL_DIR}/opencode"
chmod 755 "${INSTALL_DIR}/opencode"
}
if [ -n "$binary_path" ]; then
install_from_binary
else
check_version
download_and_install
fi
check_version
download_and_install
add_to_path() {
local config_file=$1
local command=$2
if grep -Fxq "$command" "$config_file"; then
print_message info "Command already exists in $config_file, skipping write."
elif [[ -w $config_file ]]; then
if [[ -w $config_file ]]; then
echo -e "\n# opencode" >> "$config_file"
echo "$command" >> "$config_file"
print_message info "${MUTED}Successfully added ${NC}opencode ${MUTED}to \$PATH in ${NC}$config_file"
print_message info "Successfully added ${ORANGE}opencode ${GREEN}to \$PATH in $config_file"
else
print_message warning "Manually add the directory to $config_file (or similar):"
print_message info " $command"
@@ -369,7 +119,7 @@ case $current_shell in
config_files="$HOME/.config/fish/config.fish"
;;
zsh)
config_files="${ZDOTDIR:-$HOME}/.zshrc ${ZDOTDIR:-$HOME}/.zshenv $XDG_CONFIG_HOME/zsh/.zshrc $XDG_CONFIG_HOME/zsh/.zshenv"
config_files="$HOME/.zshrc $HOME/.zshenv $XDG_CONFIG_HOME/zsh/.zshrc $XDG_CONFIG_HOME/zsh/.zshenv"
;;
bash)
config_files="$HOME/.bashrc $HOME/.bash_profile $HOME/.profile $XDG_CONFIG_HOME/bash/.bashrc $XDG_CONFIG_HOME/bash/.bash_profile"
@@ -386,42 +136,41 @@ case $current_shell in
;;
esac
if [[ "$no_modify_path" != "true" ]]; then
config_file=""
for file in $config_files; do
if [[ -f $file ]]; then
config_file=$file
break
fi
done
if [[ -z $config_file ]]; then
print_message warning "No config file found for $current_shell. You may need to manually add to PATH:"
print_message info " export PATH=$INSTALL_DIR:\$PATH"
elif [[ ":$PATH:" != *":$INSTALL_DIR:"* ]]; then
case $current_shell in
fish)
add_to_path "$config_file" "fish_add_path $INSTALL_DIR"
;;
zsh)
add_to_path "$config_file" "export PATH=$INSTALL_DIR:\$PATH"
;;
bash)
add_to_path "$config_file" "export PATH=$INSTALL_DIR:\$PATH"
;;
ash)
add_to_path "$config_file" "export PATH=$INSTALL_DIR:\$PATH"
;;
sh)
add_to_path "$config_file" "export PATH=$INSTALL_DIR:\$PATH"
;;
*)
export PATH=$INSTALL_DIR:$PATH
print_message warning "Manually add the directory to $config_file (or similar):"
print_message info " export PATH=$INSTALL_DIR:\$PATH"
;;
esac
config_file=""
for file in $config_files; do
if [[ -f $file ]]; then
config_file=$file
break
fi
done
if [[ -z $config_file ]]; then
print_message error "No config file found for $current_shell. Checked files: ${config_files[@]}"
exit 1
fi
if [[ ":$PATH:" != *":$INSTALL_DIR:"* ]]; then
case $current_shell in
fish)
add_to_path "$config_file" "fish_add_path $INSTALL_DIR"
;;
zsh)
add_to_path "$config_file" "export PATH=$INSTALL_DIR:\$PATH"
;;
bash)
add_to_path "$config_file" "export PATH=$INSTALL_DIR:\$PATH"
;;
ash)
add_to_path "$config_file" "export PATH=$INSTALL_DIR:\$PATH"
;;
sh)
add_to_path "$config_file" "export PATH=$INSTALL_DIR:\$PATH"
;;
*)
print_message warning "Manually add the directory to $config_file (or similar):"
print_message info " export PATH=$INSTALL_DIR:\$PATH"
;;
esac
fi
if [ -n "${GITHUB_ACTIONS-}" ] && [ "${GITHUB_ACTIONS}" == "true" ]; then
@@ -429,18 +178,3 @@ if [ -n "${GITHUB_ACTIONS-}" ] && [ "${GITHUB_ACTIONS}" == "true" ]; then
print_message info "Added $INSTALL_DIR to \$GITHUB_PATH"
fi
echo -e ""
echo -e "${MUTED}  ${NC} ▄ "
echo -e "${MUTED}█▀▀█ █▀▀█ █▀▀█ █▀▀▄ ${NC}█▀▀▀ █▀▀█ █▀▀█ █▀▀█"
echo -e "${MUTED}█░░█ █░░█ █▀▀▀ █░░█ ${NC}█░░░ █░░█ █░░█ █▀▀▀"
echo -e "${MUTED}▀▀▀▀ █▀▀▀ ▀▀▀▀ ▀ ▀ ${NC}▀▀▀▀ ▀▀▀▀ ▀▀▀▀ ▀▀▀▀"
echo -e ""
echo -e ""
echo -e "${MUTED}OpenCode includes free models, to start:${NC}"
echo -e ""
echo -e "cd <project> ${MUTED}# Open directory${NC}"
echo -e "opencode ${MUTED}# Run command${NC}"
echo -e ""
echo -e "${MUTED}For more information visit ${NC}https://opencode.ai/docs"
echo -e ""
echo -e ""

119
internal/app/app.go Normal file
View File

@@ -0,0 +1,119 @@
package app
import (
"context"
"database/sql"
"maps"
"sync"
"time"
"github.com/opencode-ai/opencode/internal/config"
"github.com/opencode-ai/opencode/internal/db"
"github.com/opencode-ai/opencode/internal/history"
"github.com/opencode-ai/opencode/internal/llm/agent"
"github.com/opencode-ai/opencode/internal/logging"
"github.com/opencode-ai/opencode/internal/lsp"
"github.com/opencode-ai/opencode/internal/message"
"github.com/opencode-ai/opencode/internal/permission"
"github.com/opencode-ai/opencode/internal/session"
"github.com/opencode-ai/opencode/internal/tui/theme"
)
type App struct {
Sessions session.Service
Messages message.Service
History history.Service
Permissions permission.Service
CoderAgent agent.Service
LSPClients map[string]*lsp.Client
clientsMutex sync.RWMutex
watcherCancelFuncs []context.CancelFunc
cancelFuncsMutex sync.Mutex
watcherWG sync.WaitGroup
}
func New(ctx context.Context, conn *sql.DB) (*App, error) {
q := db.New(conn)
sessions := session.NewService(q)
messages := message.NewService(q)
files := history.NewService(q, conn)
app := &App{
Sessions: sessions,
Messages: messages,
History: files,
Permissions: permission.NewPermissionService(),
LSPClients: make(map[string]*lsp.Client),
}
// Initialize theme based on configuration
app.initTheme()
// Initialize LSP clients in the background
go app.initLSPClients(ctx)
var err error
app.CoderAgent, err = agent.NewAgent(
config.AgentCoder,
app.Sessions,
app.Messages,
agent.CoderAgentTools(
app.Permissions,
app.Sessions,
app.Messages,
app.History,
app.LSPClients,
),
)
if err != nil {
logging.Error("Failed to create coder agent", err)
return nil, err
}
return app, nil
}
// initTheme sets the application theme based on the configuration
func (app *App) initTheme() {
cfg := config.Get()
if cfg == nil || cfg.TUI.Theme == "" {
return // Use default theme
}
// Try to set the theme from config
err := theme.SetTheme(cfg.TUI.Theme)
if err != nil {
logging.Warn("Failed to set theme from config, using default theme", "theme", cfg.TUI.Theme, "error", err)
} else {
logging.Debug("Set theme from config", "theme", cfg.TUI.Theme)
}
}
// Shutdown performs a clean shutdown of the application
func (app *App) Shutdown() {
// Cancel all watcher goroutines
app.cancelFuncsMutex.Lock()
for _, cancel := range app.watcherCancelFuncs {
cancel()
}
app.cancelFuncsMutex.Unlock()
app.watcherWG.Wait()
// Perform additional cleanup for LSP clients
app.clientsMutex.RLock()
clients := make(map[string]*lsp.Client, len(app.LSPClients))
maps.Copy(clients, app.LSPClients)
app.clientsMutex.RUnlock()
for name, client := range clients {
shutdownCtx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
if err := client.Shutdown(shutdownCtx); err != nil {
logging.Error("Failed to shutdown LSP client", "name", name, "error", err)
}
cancel()
}
}

126
internal/app/lsp.go Normal file
View File

@@ -0,0 +1,126 @@
package app
import (
"context"
"time"
"github.com/opencode-ai/opencode/internal/config"
"github.com/opencode-ai/opencode/internal/logging"
"github.com/opencode-ai/opencode/internal/lsp"
"github.com/opencode-ai/opencode/internal/lsp/watcher"
)
func (app *App) initLSPClients(ctx context.Context) {
cfg := config.Get()
// Initialize LSP clients
for name, clientConfig := range cfg.LSP {
// Start each client initialization in its own goroutine
go app.createAndStartLSPClient(ctx, name, clientConfig.Command, clientConfig.Args...)
}
logging.Info("LSP clients initialization started in background")
}
// createAndStartLSPClient creates a new LSP client, initializes it, and starts its workspace watcher
func (app *App) createAndStartLSPClient(ctx context.Context, name string, command string, args ...string) {
// Create a specific context for initialization with a timeout
logging.Info("Creating LSP client", "name", name, "command", command, "args", args)
// Create the LSP client
lspClient, err := lsp.NewClient(ctx, command, args...)
if err != nil {
logging.Error("Failed to create LSP client for", name, err)
return
}
// Create a longer timeout for initialization (some servers take time to start)
initCtx, cancel := context.WithTimeout(ctx, 30*time.Second)
defer cancel()
// Initialize with the initialization context
_, err = lspClient.InitializeLSPClient(initCtx, config.WorkingDirectory())
if err != nil {
logging.Error("Initialize failed", "name", name, "error", err)
// Clean up the client to prevent resource leaks
lspClient.Close()
return
}
// Wait for the server to be ready
if err := lspClient.WaitForServerReady(initCtx); err != nil {
logging.Error("Server failed to become ready", "name", name, "error", err)
// We'll continue anyway, as some functionality might still work
lspClient.SetServerState(lsp.StateError)
} else {
logging.Info("LSP server is ready", "name", name)
lspClient.SetServerState(lsp.StateReady)
}
logging.Info("LSP client initialized", "name", name)
// Create a child context that can be canceled when the app is shutting down
watchCtx, cancelFunc := context.WithCancel(ctx)
// Create a context with the server name for better identification
watchCtx = context.WithValue(watchCtx, "serverName", name)
// Create the workspace watcher
workspaceWatcher := watcher.NewWorkspaceWatcher(lspClient)
// Store the cancel function to be called during cleanup
app.cancelFuncsMutex.Lock()
app.watcherCancelFuncs = append(app.watcherCancelFuncs, cancelFunc)
app.cancelFuncsMutex.Unlock()
// Add the watcher to a WaitGroup to track active goroutines
app.watcherWG.Add(1)
// Add to map with mutex protection before starting goroutine
app.clientsMutex.Lock()
app.LSPClients[name] = lspClient
app.clientsMutex.Unlock()
go app.runWorkspaceWatcher(watchCtx, name, workspaceWatcher)
}
// runWorkspaceWatcher executes the workspace watcher for an LSP client
func (app *App) runWorkspaceWatcher(ctx context.Context, name string, workspaceWatcher *watcher.WorkspaceWatcher) {
defer app.watcherWG.Done()
defer logging.RecoverPanic("LSP-"+name, func() {
// Try to restart the client
app.restartLSPClient(ctx, name)
})
workspaceWatcher.WatchWorkspace(ctx, config.WorkingDirectory())
logging.Info("Workspace watcher stopped", "client", name)
}
// restartLSPClient attempts to restart a crashed or failed LSP client
func (app *App) restartLSPClient(ctx context.Context, name string) {
// Get the original configuration
cfg := config.Get()
clientConfig, exists := cfg.LSP[name]
if !exists {
logging.Error("Cannot restart client, configuration not found", "client", name)
return
}
// Clean up the old client if it exists
app.clientsMutex.Lock()
oldClient, exists := app.LSPClients[name]
if exists {
delete(app.LSPClients, name) // Remove from map before potentially slow shutdown
}
app.clientsMutex.Unlock()
if exists && oldClient != nil {
// Try to shut it down gracefully, but don't block on errors
shutdownCtx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
_ = oldClient.Shutdown(shutdownCtx)
cancel()
}
// Create a new client using the shared function
app.createAndStartLSPClient(ctx, name, clientConfig.Command, clientConfig.Args...)
logging.Info("Successfully restarted LSP client", "client", name)
}

813
internal/config/config.go Normal file
View File

@@ -0,0 +1,813 @@
// Package config manages application configuration from various sources.
package config
import (
"encoding/json"
"fmt"
"log/slog"
"os"
"path/filepath"
"strings"
"github.com/opencode-ai/opencode/internal/llm/models"
"github.com/opencode-ai/opencode/internal/logging"
"github.com/spf13/viper"
)
// MCPType defines the type of MCP (Model Control Protocol) server.
type MCPType string
// Supported MCP types
const (
MCPStdio MCPType = "stdio"
MCPSse MCPType = "sse"
)
// MCPServer defines the configuration for a Model Control Protocol server.
type MCPServer struct {
Command string `json:"command"`
Env []string `json:"env"`
Args []string `json:"args"`
Type MCPType `json:"type"`
URL string `json:"url"`
Headers map[string]string `json:"headers"`
}
type AgentName string
const (
AgentCoder AgentName = "coder"
AgentSummarizer AgentName = "summarizer"
AgentTask AgentName = "task"
AgentTitle AgentName = "title"
)
// Agent defines configuration for different LLM models and their token limits.
type Agent struct {
Model models.ModelID `json:"model"`
MaxTokens int64 `json:"maxTokens"`
ReasoningEffort string `json:"reasoningEffort"` // For openai models low,medium,heigh
}
// Provider defines configuration for an LLM provider.
type Provider struct {
APIKey string `json:"apiKey"`
Disabled bool `json:"disabled"`
}
// Data defines storage configuration.
type Data struct {
Directory string `json:"directory"`
}
// LSPConfig defines configuration for Language Server Protocol integration.
type LSPConfig struct {
Disabled bool `json:"enabled"`
Command string `json:"command"`
Args []string `json:"args"`
Options any `json:"options"`
}
// TUIConfig defines the configuration for the Terminal User Interface.
type TUIConfig struct {
Theme string `json:"theme,omitempty"`
}
// Config is the main configuration structure for the application.
type Config struct {
Data Data `json:"data"`
WorkingDir string `json:"wd,omitempty"`
MCPServers map[string]MCPServer `json:"mcpServers,omitempty"`
Providers map[models.ModelProvider]Provider `json:"providers,omitempty"`
LSP map[string]LSPConfig `json:"lsp,omitempty"`
Agents map[AgentName]Agent `json:"agents"`
Debug bool `json:"debug,omitempty"`
DebugLSP bool `json:"debugLSP,omitempty"`
ContextPaths []string `json:"contextPaths,omitempty"`
TUI TUIConfig `json:"tui"`
AutoCompact bool `json:"autoCompact,omitempty"`
}
// Application constants
const (
defaultDataDirectory = ".opencode"
defaultLogLevel = "info"
appName = "opencode"
MaxTokensFallbackDefault = 4096
)
var defaultContextPaths = []string{
".github/copilot-instructions.md",
".cursorrules",
".cursor/rules/",
"CLAUDE.md",
"CLAUDE.local.md",
"opencode.md",
"opencode.local.md",
"OpenCode.md",
"OpenCode.local.md",
"OPENCODE.md",
"OPENCODE.local.md",
}
// Global configuration instance
var cfg *Config
// Load initializes the configuration from environment variables and config files.
// If debug is true, debug mode is enabled and log level is set to debug.
// It returns an error if configuration loading fails.
func Load(workingDir string, debug bool) (*Config, error) {
if cfg != nil {
return cfg, nil
}
cfg = &Config{
WorkingDir: workingDir,
MCPServers: make(map[string]MCPServer),
Providers: make(map[models.ModelProvider]Provider),
LSP: make(map[string]LSPConfig),
}
configureViper()
setDefaults(debug)
// Read global config
if err := readConfig(viper.ReadInConfig()); err != nil {
return cfg, err
}
// Load and merge local config
mergeLocalConfig(workingDir)
setProviderDefaults()
// Apply configuration to the struct
if err := viper.Unmarshal(cfg); err != nil {
return cfg, fmt.Errorf("failed to unmarshal config: %w", err)
}
applyDefaultValues()
defaultLevel := slog.LevelInfo
if cfg.Debug {
defaultLevel = slog.LevelDebug
}
if os.Getenv("OPENCODE_DEV_DEBUG") == "true" {
loggingFile := fmt.Sprintf("%s/%s", cfg.Data.Directory, "debug.log")
// if file does not exist create it
if _, err := os.Stat(loggingFile); os.IsNotExist(err) {
if err := os.MkdirAll(cfg.Data.Directory, 0o755); err != nil {
return cfg, fmt.Errorf("failed to create directory: %w", err)
}
if _, err := os.Create(loggingFile); err != nil {
return cfg, fmt.Errorf("failed to create log file: %w", err)
}
}
sloggingFileWriter, err := os.OpenFile(loggingFile, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0o666)
if err != nil {
return cfg, fmt.Errorf("failed to open log file: %w", err)
}
// Configure logger
logger := slog.New(slog.NewTextHandler(sloggingFileWriter, &slog.HandlerOptions{
Level: defaultLevel,
}))
slog.SetDefault(logger)
} else {
// Configure logger
logger := slog.New(slog.NewTextHandler(logging.NewWriter(), &slog.HandlerOptions{
Level: defaultLevel,
}))
slog.SetDefault(logger)
}
// Validate configuration
if err := Validate(); err != nil {
return cfg, fmt.Errorf("config validation failed: %w", err)
}
if cfg.Agents == nil {
cfg.Agents = make(map[AgentName]Agent)
}
// Override the max tokens for title agent
cfg.Agents[AgentTitle] = Agent{
Model: cfg.Agents[AgentTitle].Model,
MaxTokens: 80,
}
return cfg, nil
}
// configureViper sets up viper's configuration paths and environment variables.
func configureViper() {
viper.SetConfigName(fmt.Sprintf(".%s", appName))
viper.SetConfigType("json")
viper.AddConfigPath("$HOME")
viper.AddConfigPath(fmt.Sprintf("$XDG_CONFIG_HOME/%s", appName))
viper.AddConfigPath(fmt.Sprintf("$HOME/.config/%s", appName))
viper.SetEnvPrefix(strings.ToUpper(appName))
viper.AutomaticEnv()
}
// setDefaults configures default values for configuration options.
func setDefaults(debug bool) {
viper.SetDefault("data.directory", defaultDataDirectory)
viper.SetDefault("contextPaths", defaultContextPaths)
viper.SetDefault("tui.theme", "opencode")
viper.SetDefault("autoCompact", true)
if debug {
viper.SetDefault("debug", true)
viper.Set("log.level", "debug")
} else {
viper.SetDefault("debug", false)
viper.SetDefault("log.level", defaultLogLevel)
}
}
// setProviderDefaults configures LLM provider defaults based on provider provided by
// environment variables and configuration file.
func setProviderDefaults() {
// Set all API keys we can find in the environment
if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
viper.SetDefault("providers.anthropic.apiKey", apiKey)
}
if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
viper.SetDefault("providers.openai.apiKey", apiKey)
}
if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
viper.SetDefault("providers.gemini.apiKey", apiKey)
}
if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
viper.SetDefault("providers.groq.apiKey", apiKey)
}
if apiKey := os.Getenv("OPENROUTER_API_KEY"); apiKey != "" {
viper.SetDefault("providers.openrouter.apiKey", apiKey)
}
if apiKey := os.Getenv("XAI_API_KEY"); apiKey != "" {
viper.SetDefault("providers.xai.apiKey", apiKey)
}
if apiKey := os.Getenv("AZURE_OPENAI_ENDPOINT"); apiKey != "" {
// api-key may be empty when using Entra ID credentials that's okay
viper.SetDefault("providers.azure.apiKey", os.Getenv("AZURE_OPENAI_API_KEY"))
}
// Use this order to set the default models
// 1. Anthropic
// 2. OpenAI
// 3. Google Gemini
// 4. Groq
// 5. OpenRouter
// 6. AWS Bedrock
// 7. Azure
// Anthropic configuration
if key := viper.GetString("providers.anthropic.apiKey"); strings.TrimSpace(key) != "" {
viper.SetDefault("agents.coder.model", models.Claude37Sonnet)
viper.SetDefault("agents.summarizer.model", models.Claude37Sonnet)
viper.SetDefault("agents.task.model", models.Claude37Sonnet)
viper.SetDefault("agents.title.model", models.Claude37Sonnet)
return
}
// OpenAI configuration
if key := viper.GetString("providers.openai.apiKey"); strings.TrimSpace(key) != "" {
viper.SetDefault("agents.coder.model", models.GPT41)
viper.SetDefault("agents.summarizer.model", models.GPT41)
viper.SetDefault("agents.task.model", models.GPT41Mini)
viper.SetDefault("agents.title.model", models.GPT41Mini)
return
}
// Google Gemini configuration
if key := viper.GetString("providers.gemini.apiKey"); strings.TrimSpace(key) != "" {
viper.SetDefault("agents.coder.model", models.Gemini25)
viper.SetDefault("agents.summarizer.model", models.Gemini25)
viper.SetDefault("agents.task.model", models.Gemini25Flash)
viper.SetDefault("agents.title.model", models.Gemini25Flash)
return
}
// Groq configuration
if key := viper.GetString("providers.groq.apiKey"); strings.TrimSpace(key) != "" {
viper.SetDefault("agents.coder.model", models.QWENQwq)
viper.SetDefault("agents.summarizer.model", models.QWENQwq)
viper.SetDefault("agents.task.model", models.QWENQwq)
viper.SetDefault("agents.title.model", models.QWENQwq)
return
}
// OpenRouter configuration
if key := viper.GetString("providers.openrouter.apiKey"); strings.TrimSpace(key) != "" {
viper.SetDefault("agents.coder.model", models.OpenRouterClaude37Sonnet)
viper.SetDefault("agents.summarizer.model", models.OpenRouterClaude37Sonnet)
viper.SetDefault("agents.task.model", models.OpenRouterClaude37Sonnet)
viper.SetDefault("agents.title.model", models.OpenRouterClaude35Haiku)
return
}
// XAI configuration
if key := viper.GetString("providers.xai.apiKey"); strings.TrimSpace(key) != "" {
viper.SetDefault("agents.coder.model", models.XAIGrok3Beta)
viper.SetDefault("agents.summarizer.model", models.XAIGrok3Beta)
viper.SetDefault("agents.task.model", models.XAIGrok3Beta)
viper.SetDefault("agents.title.model", models.XAiGrok3MiniFastBeta)
return
}
// AWS Bedrock configuration
if hasAWSCredentials() {
viper.SetDefault("agents.coder.model", models.BedrockClaude37Sonnet)
viper.SetDefault("agents.summarizer.model", models.BedrockClaude37Sonnet)
viper.SetDefault("agents.task.model", models.BedrockClaude37Sonnet)
viper.SetDefault("agents.title.model", models.BedrockClaude37Sonnet)
return
}
// Azure OpenAI configuration
if os.Getenv("AZURE_OPENAI_ENDPOINT") != "" {
viper.SetDefault("agents.coder.model", models.AzureGPT41)
viper.SetDefault("agents.summarizer.model", models.AzureGPT41)
viper.SetDefault("agents.task.model", models.AzureGPT41Mini)
viper.SetDefault("agents.title.model", models.AzureGPT41Mini)
return
}
}
// hasAWSCredentials checks if AWS credentials are available in the environment.
func hasAWSCredentials() bool {
// Check for explicit AWS credentials
if os.Getenv("AWS_ACCESS_KEY_ID") != "" && os.Getenv("AWS_SECRET_ACCESS_KEY") != "" {
return true
}
// Check for AWS profile
if os.Getenv("AWS_PROFILE") != "" || os.Getenv("AWS_DEFAULT_PROFILE") != "" {
return true
}
// Check for AWS region
if os.Getenv("AWS_REGION") != "" || os.Getenv("AWS_DEFAULT_REGION") != "" {
return true
}
// Check if running on EC2 with instance profile
if os.Getenv("AWS_CONTAINER_CREDENTIALS_RELATIVE_URI") != "" ||
os.Getenv("AWS_CONTAINER_CREDENTIALS_FULL_URI") != "" {
return true
}
return false
}
// readConfig handles the result of reading a configuration file.
func readConfig(err error) error {
if err == nil {
return nil
}
// It's okay if the config file doesn't exist
if _, ok := err.(viper.ConfigFileNotFoundError); ok {
return nil
}
return fmt.Errorf("failed to read config: %w", err)
}
// mergeLocalConfig loads and merges configuration from the local directory.
func mergeLocalConfig(workingDir string) {
local := viper.New()
local.SetConfigName(fmt.Sprintf(".%s", appName))
local.SetConfigType("json")
local.AddConfigPath(workingDir)
// Merge local config if it exists
if err := local.ReadInConfig(); err == nil {
viper.MergeConfigMap(local.AllSettings())
}
}
// applyDefaultValues sets default values for configuration fields that need processing.
func applyDefaultValues() {
// Set default MCP type if not specified
for k, v := range cfg.MCPServers {
if v.Type == "" {
v.Type = MCPStdio
cfg.MCPServers[k] = v
}
}
}
// It validates model IDs and providers, ensuring they are supported.
func validateAgent(cfg *Config, name AgentName, agent Agent) error {
// Check if model exists
model, modelExists := models.SupportedModels[agent.Model]
if !modelExists {
logging.Warn("unsupported model configured, reverting to default",
"agent", name,
"configured_model", agent.Model)
// Set default model based on available providers
if setDefaultModelForAgent(name) {
logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
} else {
return fmt.Errorf("no valid provider available for agent %s", name)
}
return nil
}
// Check if provider for the model is configured
provider := model.Provider
providerCfg, providerExists := cfg.Providers[provider]
if !providerExists {
// Provider not configured, check if we have environment variables
apiKey := getProviderAPIKey(provider)
if apiKey == "" {
logging.Warn("provider not configured for model, reverting to default",
"agent", name,
"model", agent.Model,
"provider", provider)
// Set default model based on available providers
if setDefaultModelForAgent(name) {
logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
} else {
return fmt.Errorf("no valid provider available for agent %s", name)
}
} else {
// Add provider with API key from environment
cfg.Providers[provider] = Provider{
APIKey: apiKey,
}
logging.Info("added provider from environment", "provider", provider)
}
} else if providerCfg.Disabled || providerCfg.APIKey == "" {
// Provider is disabled or has no API key
logging.Warn("provider is disabled or has no API key, reverting to default",
"agent", name,
"model", agent.Model,
"provider", provider)
// Set default model based on available providers
if setDefaultModelForAgent(name) {
logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
} else {
return fmt.Errorf("no valid provider available for agent %s", name)
}
}
// Validate max tokens
if agent.MaxTokens <= 0 {
logging.Warn("invalid max tokens, setting to default",
"agent", name,
"model", agent.Model,
"max_tokens", agent.MaxTokens)
// Update the agent with default max tokens
updatedAgent := cfg.Agents[name]
if model.DefaultMaxTokens > 0 {
updatedAgent.MaxTokens = model.DefaultMaxTokens
} else {
updatedAgent.MaxTokens = MaxTokensFallbackDefault
}
cfg.Agents[name] = updatedAgent
} else if model.ContextWindow > 0 && agent.MaxTokens > model.ContextWindow/2 {
// Ensure max tokens doesn't exceed half the context window (reasonable limit)
logging.Warn("max tokens exceeds half the context window, adjusting",
"agent", name,
"model", agent.Model,
"max_tokens", agent.MaxTokens,
"context_window", model.ContextWindow)
// Update the agent with adjusted max tokens
updatedAgent := cfg.Agents[name]
updatedAgent.MaxTokens = model.ContextWindow / 2
cfg.Agents[name] = updatedAgent
}
// Validate reasoning effort for models that support reasoning
if model.CanReason && provider == models.ProviderOpenAI {
if agent.ReasoningEffort == "" {
// Set default reasoning effort for models that support it
logging.Info("setting default reasoning effort for model that supports reasoning",
"agent", name,
"model", agent.Model)
// Update the agent with default reasoning effort
updatedAgent := cfg.Agents[name]
updatedAgent.ReasoningEffort = "medium"
cfg.Agents[name] = updatedAgent
} else {
// Check if reasoning effort is valid (low, medium, high)
effort := strings.ToLower(agent.ReasoningEffort)
if effort != "low" && effort != "medium" && effort != "high" {
logging.Warn("invalid reasoning effort, setting to medium",
"agent", name,
"model", agent.Model,
"reasoning_effort", agent.ReasoningEffort)
// Update the agent with valid reasoning effort
updatedAgent := cfg.Agents[name]
updatedAgent.ReasoningEffort = "medium"
cfg.Agents[name] = updatedAgent
}
}
} else if !model.CanReason && agent.ReasoningEffort != "" {
// Model doesn't support reasoning but reasoning effort is set
logging.Warn("model doesn't support reasoning but reasoning effort is set, ignoring",
"agent", name,
"model", agent.Model,
"reasoning_effort", agent.ReasoningEffort)
// Update the agent to remove reasoning effort
updatedAgent := cfg.Agents[name]
updatedAgent.ReasoningEffort = ""
cfg.Agents[name] = updatedAgent
}
return nil
}
// Validate checks if the configuration is valid and applies defaults where needed.
func Validate() error {
if cfg == nil {
return fmt.Errorf("config not loaded")
}
// Validate agent models
for name, agent := range cfg.Agents {
if err := validateAgent(cfg, name, agent); err != nil {
return err
}
}
// Validate providers
for provider, providerCfg := range cfg.Providers {
if providerCfg.APIKey == "" && !providerCfg.Disabled {
logging.Warn("provider has no API key, marking as disabled", "provider", provider)
providerCfg.Disabled = true
cfg.Providers[provider] = providerCfg
}
}
// Validate LSP configurations
for language, lspConfig := range cfg.LSP {
if lspConfig.Command == "" && !lspConfig.Disabled {
logging.Warn("LSP configuration has no command, marking as disabled", "language", language)
lspConfig.Disabled = true
cfg.LSP[language] = lspConfig
}
}
return nil
}
// getProviderAPIKey gets the API key for a provider from environment variables
func getProviderAPIKey(provider models.ModelProvider) string {
switch provider {
case models.ProviderAnthropic:
return os.Getenv("ANTHROPIC_API_KEY")
case models.ProviderOpenAI:
return os.Getenv("OPENAI_API_KEY")
case models.ProviderGemini:
return os.Getenv("GEMINI_API_KEY")
case models.ProviderGROQ:
return os.Getenv("GROQ_API_KEY")
case models.ProviderAzure:
return os.Getenv("AZURE_OPENAI_API_KEY")
case models.ProviderOpenRouter:
return os.Getenv("OPENROUTER_API_KEY")
case models.ProviderBedrock:
if hasAWSCredentials() {
return "aws-credentials-available"
}
}
return ""
}
// setDefaultModelForAgent sets a default model for an agent based on available providers
func setDefaultModelForAgent(agent AgentName) bool {
// Check providers in order of preference
if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
maxTokens := int64(5000)
if agent == AgentTitle {
maxTokens = 80
}
cfg.Agents[agent] = Agent{
Model: models.Claude37Sonnet,
MaxTokens: maxTokens,
}
return true
}
if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
var model models.ModelID
maxTokens := int64(5000)
reasoningEffort := ""
switch agent {
case AgentTitle:
model = models.GPT41Mini
maxTokens = 80
case AgentTask:
model = models.GPT41Mini
default:
model = models.GPT41
}
// Check if model supports reasoning
if modelInfo, ok := models.SupportedModels[model]; ok && modelInfo.CanReason {
reasoningEffort = "medium"
}
cfg.Agents[agent] = Agent{
Model: model,
MaxTokens: maxTokens,
ReasoningEffort: reasoningEffort,
}
return true
}
if apiKey := os.Getenv("OPENROUTER_API_KEY"); apiKey != "" {
var model models.ModelID
maxTokens := int64(5000)
reasoningEffort := ""
switch agent {
case AgentTitle:
model = models.OpenRouterClaude35Haiku
maxTokens = 80
case AgentTask:
model = models.OpenRouterClaude37Sonnet
default:
model = models.OpenRouterClaude37Sonnet
}
// Check if model supports reasoning
if modelInfo, ok := models.SupportedModels[model]; ok && modelInfo.CanReason {
reasoningEffort = "medium"
}
cfg.Agents[agent] = Agent{
Model: model,
MaxTokens: maxTokens,
ReasoningEffort: reasoningEffort,
}
return true
}
if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
var model models.ModelID
maxTokens := int64(5000)
if agent == AgentTitle {
model = models.Gemini25Flash
maxTokens = 80
} else {
model = models.Gemini25
}
cfg.Agents[agent] = Agent{
Model: model,
MaxTokens: maxTokens,
}
return true
}
if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
maxTokens := int64(5000)
if agent == AgentTitle {
maxTokens = 80
}
cfg.Agents[agent] = Agent{
Model: models.QWENQwq,
MaxTokens: maxTokens,
}
return true
}
if hasAWSCredentials() {
maxTokens := int64(5000)
if agent == AgentTitle {
maxTokens = 80
}
cfg.Agents[agent] = Agent{
Model: models.BedrockClaude37Sonnet,
MaxTokens: maxTokens,
ReasoningEffort: "medium", // Claude models support reasoning
}
return true
}
return false
}
// Get returns the current configuration.
// It's safe to call this function multiple times.
func Get() *Config {
return cfg
}
// WorkingDirectory returns the current working directory from the configuration.
func WorkingDirectory() string {
if cfg == nil {
panic("config not loaded")
}
return cfg.WorkingDir
}
func UpdateAgentModel(agentName AgentName, modelID models.ModelID) error {
if cfg == nil {
panic("config not loaded")
}
existingAgentCfg := cfg.Agents[agentName]
model, ok := models.SupportedModels[modelID]
if !ok {
return fmt.Errorf("model %s not supported", modelID)
}
maxTokens := existingAgentCfg.MaxTokens
if model.DefaultMaxTokens > 0 {
maxTokens = model.DefaultMaxTokens
}
newAgentCfg := Agent{
Model: modelID,
MaxTokens: maxTokens,
ReasoningEffort: existingAgentCfg.ReasoningEffort,
}
cfg.Agents[agentName] = newAgentCfg
if err := validateAgent(cfg, agentName, newAgentCfg); err != nil {
// revert config update on failure
cfg.Agents[agentName] = existingAgentCfg
return fmt.Errorf("failed to update agent model: %w", err)
}
return nil
}
// UpdateTheme updates the theme in the configuration and writes it to the config file.
func UpdateTheme(themeName string) error {
if cfg == nil {
return fmt.Errorf("config not loaded")
}
// Update the in-memory config
cfg.TUI.Theme = themeName
// Get the config file path
configFile := viper.ConfigFileUsed()
var configData []byte
if configFile == "" {
homeDir, err := os.UserHomeDir()
if err != nil {
return fmt.Errorf("failed to get home directory: %w", err)
}
configFile = filepath.Join(homeDir, fmt.Sprintf(".%s.json", appName))
logging.Info("config file not found, creating new one", "path", configFile)
configData = []byte(`{}`)
} else {
// Read the existing config file
data, err := os.ReadFile(configFile)
if err != nil {
return fmt.Errorf("failed to read config file: %w", err)
}
configData = data
}
// Parse the JSON
var configMap map[string]interface{}
if err := json.Unmarshal(configData, &configMap); err != nil {
return fmt.Errorf("failed to parse config file: %w", err)
}
// Update just the theme value
tuiConfig, ok := configMap["tui"].(map[string]interface{})
if !ok {
// TUI config doesn't exist yet, create it
configMap["tui"] = map[string]interface{}{"theme": themeName}
} else {
// Update existing TUI config
tuiConfig["theme"] = themeName
configMap["tui"] = tuiConfig
}
// Write the updated config back to file
updatedData, err := json.MarshalIndent(configMap, "", " ")
if err != nil {
return fmt.Errorf("failed to marshal config: %w", err)
}
if err := os.WriteFile(configFile, updatedData, 0o644); err != nil {
return fmt.Errorf("failed to write config file: %w", err)
}
return nil
}

61
internal/config/init.go Normal file
View File

@@ -0,0 +1,61 @@
package config
import (
"fmt"
"os"
"path/filepath"
)
const (
// InitFlagFilename is the name of the file that indicates whether the project has been initialized
InitFlagFilename = "init"
)
// ProjectInitFlag represents the initialization status for a project directory
type ProjectInitFlag struct {
Initialized bool `json:"initialized"`
}
// ShouldShowInitDialog checks if the initialization dialog should be shown for the current directory
func ShouldShowInitDialog() (bool, error) {
if cfg == nil {
return false, fmt.Errorf("config not loaded")
}
// Create the flag file path
flagFilePath := filepath.Join(cfg.Data.Directory, InitFlagFilename)
// Check if the flag file exists
_, err := os.Stat(flagFilePath)
if err == nil {
// File exists, don't show the dialog
return false, nil
}
// If the error is not "file not found", return the error
if !os.IsNotExist(err) {
return false, fmt.Errorf("failed to check init flag file: %w", err)
}
// File doesn't exist, show the dialog
return true, nil
}
// MarkProjectInitialized marks the current project as initialized
func MarkProjectInitialized() error {
if cfg == nil {
return fmt.Errorf("config not loaded")
}
// Create the flag file path
flagFilePath := filepath.Join(cfg.Data.Directory, InitFlagFilename)
// Create an empty file to mark the project as initialized
file, err := os.Create(flagFilePath)
if err != nil {
return fmt.Errorf("failed to create init flag file: %w", err)
}
defer file.Close()
return nil
}

68
internal/db/connect.go Normal file
View File

@@ -0,0 +1,68 @@
package db
import (
"database/sql"
"fmt"
"os"
"path/filepath"
_ "github.com/ncruces/go-sqlite3/driver"
_ "github.com/ncruces/go-sqlite3/embed"
"github.com/opencode-ai/opencode/internal/config"
"github.com/opencode-ai/opencode/internal/logging"
"github.com/pressly/goose/v3"
)
func Connect() (*sql.DB, error) {
dataDir := config.Get().Data.Directory
if dataDir == "" {
return nil, fmt.Errorf("data.dir is not set")
}
if err := os.MkdirAll(dataDir, 0o700); err != nil {
return nil, fmt.Errorf("failed to create data directory: %w", err)
}
dbPath := filepath.Join(dataDir, "opencode.db")
// Open the SQLite database
db, err := sql.Open("sqlite3", dbPath)
if err != nil {
return nil, fmt.Errorf("failed to open database: %w", err)
}
// Verify connection
if err = db.Ping(); err != nil {
db.Close()
return nil, fmt.Errorf("failed to connect to database: %w", err)
}
// Set pragmas for better performance
pragmas := []string{
"PRAGMA foreign_keys = ON;",
"PRAGMA journal_mode = WAL;",
"PRAGMA page_size = 4096;",
"PRAGMA cache_size = -8000;",
"PRAGMA synchronous = NORMAL;",
}
for _, pragma := range pragmas {
if _, err = db.Exec(pragma); err != nil {
logging.Error("Failed to set pragma", pragma, err)
} else {
logging.Debug("Set pragma", "pragma", pragma)
}
}
goose.SetBaseFS(FS)
if err := goose.SetDialect("sqlite3"); err != nil {
logging.Error("Failed to set dialect", "error", err)
return nil, fmt.Errorf("failed to set dialect: %w", err)
}
if err := goose.Up(db, "migrations"); err != nil {
logging.Error("Failed to apply migrations", "error", err)
return nil, fmt.Errorf("failed to apply migrations: %w", err)
}
return db, nil
}

288
internal/db/db.go Normal file
View File

@@ -0,0 +1,288 @@
// Code generated by sqlc. DO NOT EDIT.
// versions:
// sqlc v1.27.0
package db
import (
"context"
"database/sql"
"fmt"
)
type DBTX interface {
ExecContext(context.Context, string, ...interface{}) (sql.Result, error)
PrepareContext(context.Context, string) (*sql.Stmt, error)
QueryContext(context.Context, string, ...interface{}) (*sql.Rows, error)
QueryRowContext(context.Context, string, ...interface{}) *sql.Row
}
func New(db DBTX) *Queries {
return &Queries{db: db}
}
func Prepare(ctx context.Context, db DBTX) (*Queries, error) {
q := Queries{db: db}
var err error
if q.createFileStmt, err = db.PrepareContext(ctx, createFile); err != nil {
return nil, fmt.Errorf("error preparing query CreateFile: %w", err)
}
if q.createMessageStmt, err = db.PrepareContext(ctx, createMessage); err != nil {
return nil, fmt.Errorf("error preparing query CreateMessage: %w", err)
}
if q.createSessionStmt, err = db.PrepareContext(ctx, createSession); err != nil {
return nil, fmt.Errorf("error preparing query CreateSession: %w", err)
}
if q.deleteFileStmt, err = db.PrepareContext(ctx, deleteFile); err != nil {
return nil, fmt.Errorf("error preparing query DeleteFile: %w", err)
}
if q.deleteMessageStmt, err = db.PrepareContext(ctx, deleteMessage); err != nil {
return nil, fmt.Errorf("error preparing query DeleteMessage: %w", err)
}
if q.deleteSessionStmt, err = db.PrepareContext(ctx, deleteSession); err != nil {
return nil, fmt.Errorf("error preparing query DeleteSession: %w", err)
}
if q.deleteSessionFilesStmt, err = db.PrepareContext(ctx, deleteSessionFiles); err != nil {
return nil, fmt.Errorf("error preparing query DeleteSessionFiles: %w", err)
}
if q.deleteSessionMessagesStmt, err = db.PrepareContext(ctx, deleteSessionMessages); err != nil {
return nil, fmt.Errorf("error preparing query DeleteSessionMessages: %w", err)
}
if q.getFileStmt, err = db.PrepareContext(ctx, getFile); err != nil {
return nil, fmt.Errorf("error preparing query GetFile: %w", err)
}
if q.getFileByPathAndSessionStmt, err = db.PrepareContext(ctx, getFileByPathAndSession); err != nil {
return nil, fmt.Errorf("error preparing query GetFileByPathAndSession: %w", err)
}
if q.getMessageStmt, err = db.PrepareContext(ctx, getMessage); err != nil {
return nil, fmt.Errorf("error preparing query GetMessage: %w", err)
}
if q.getSessionByIDStmt, err = db.PrepareContext(ctx, getSessionByID); err != nil {
return nil, fmt.Errorf("error preparing query GetSessionByID: %w", err)
}
if q.listFilesByPathStmt, err = db.PrepareContext(ctx, listFilesByPath); err != nil {
return nil, fmt.Errorf("error preparing query ListFilesByPath: %w", err)
}
if q.listFilesBySessionStmt, err = db.PrepareContext(ctx, listFilesBySession); err != nil {
return nil, fmt.Errorf("error preparing query ListFilesBySession: %w", err)
}
if q.listLatestSessionFilesStmt, err = db.PrepareContext(ctx, listLatestSessionFiles); err != nil {
return nil, fmt.Errorf("error preparing query ListLatestSessionFiles: %w", err)
}
if q.listMessagesBySessionStmt, err = db.PrepareContext(ctx, listMessagesBySession); err != nil {
return nil, fmt.Errorf("error preparing query ListMessagesBySession: %w", err)
}
if q.listNewFilesStmt, err = db.PrepareContext(ctx, listNewFiles); err != nil {
return nil, fmt.Errorf("error preparing query ListNewFiles: %w", err)
}
if q.listSessionsStmt, err = db.PrepareContext(ctx, listSessions); err != nil {
return nil, fmt.Errorf("error preparing query ListSessions: %w", err)
}
if q.updateFileStmt, err = db.PrepareContext(ctx, updateFile); err != nil {
return nil, fmt.Errorf("error preparing query UpdateFile: %w", err)
}
if q.updateMessageStmt, err = db.PrepareContext(ctx, updateMessage); err != nil {
return nil, fmt.Errorf("error preparing query UpdateMessage: %w", err)
}
if q.updateSessionStmt, err = db.PrepareContext(ctx, updateSession); err != nil {
return nil, fmt.Errorf("error preparing query UpdateSession: %w", err)
}
return &q, nil
}
func (q *Queries) Close() error {
var err error
if q.createFileStmt != nil {
if cerr := q.createFileStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing createFileStmt: %w", cerr)
}
}
if q.createMessageStmt != nil {
if cerr := q.createMessageStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing createMessageStmt: %w", cerr)
}
}
if q.createSessionStmt != nil {
if cerr := q.createSessionStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing createSessionStmt: %w", cerr)
}
}
if q.deleteFileStmt != nil {
if cerr := q.deleteFileStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing deleteFileStmt: %w", cerr)
}
}
if q.deleteMessageStmt != nil {
if cerr := q.deleteMessageStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing deleteMessageStmt: %w", cerr)
}
}
if q.deleteSessionStmt != nil {
if cerr := q.deleteSessionStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing deleteSessionStmt: %w", cerr)
}
}
if q.deleteSessionFilesStmt != nil {
if cerr := q.deleteSessionFilesStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing deleteSessionFilesStmt: %w", cerr)
}
}
if q.deleteSessionMessagesStmt != nil {
if cerr := q.deleteSessionMessagesStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing deleteSessionMessagesStmt: %w", cerr)
}
}
if q.getFileStmt != nil {
if cerr := q.getFileStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing getFileStmt: %w", cerr)
}
}
if q.getFileByPathAndSessionStmt != nil {
if cerr := q.getFileByPathAndSessionStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing getFileByPathAndSessionStmt: %w", cerr)
}
}
if q.getMessageStmt != nil {
if cerr := q.getMessageStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing getMessageStmt: %w", cerr)
}
}
if q.getSessionByIDStmt != nil {
if cerr := q.getSessionByIDStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing getSessionByIDStmt: %w", cerr)
}
}
if q.listFilesByPathStmt != nil {
if cerr := q.listFilesByPathStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing listFilesByPathStmt: %w", cerr)
}
}
if q.listFilesBySessionStmt != nil {
if cerr := q.listFilesBySessionStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing listFilesBySessionStmt: %w", cerr)
}
}
if q.listLatestSessionFilesStmt != nil {
if cerr := q.listLatestSessionFilesStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing listLatestSessionFilesStmt: %w", cerr)
}
}
if q.listMessagesBySessionStmt != nil {
if cerr := q.listMessagesBySessionStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing listMessagesBySessionStmt: %w", cerr)
}
}
if q.listNewFilesStmt != nil {
if cerr := q.listNewFilesStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing listNewFilesStmt: %w", cerr)
}
}
if q.listSessionsStmt != nil {
if cerr := q.listSessionsStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing listSessionsStmt: %w", cerr)
}
}
if q.updateFileStmt != nil {
if cerr := q.updateFileStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing updateFileStmt: %w", cerr)
}
}
if q.updateMessageStmt != nil {
if cerr := q.updateMessageStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing updateMessageStmt: %w", cerr)
}
}
if q.updateSessionStmt != nil {
if cerr := q.updateSessionStmt.Close(); cerr != nil {
err = fmt.Errorf("error closing updateSessionStmt: %w", cerr)
}
}
return err
}
func (q *Queries) exec(ctx context.Context, stmt *sql.Stmt, query string, args ...interface{}) (sql.Result, error) {
switch {
case stmt != nil && q.tx != nil:
return q.tx.StmtContext(ctx, stmt).ExecContext(ctx, args...)
case stmt != nil:
return stmt.ExecContext(ctx, args...)
default:
return q.db.ExecContext(ctx, query, args...)
}
}
func (q *Queries) query(ctx context.Context, stmt *sql.Stmt, query string, args ...interface{}) (*sql.Rows, error) {
switch {
case stmt != nil && q.tx != nil:
return q.tx.StmtContext(ctx, stmt).QueryContext(ctx, args...)
case stmt != nil:
return stmt.QueryContext(ctx, args...)
default:
return q.db.QueryContext(ctx, query, args...)
}
}
func (q *Queries) queryRow(ctx context.Context, stmt *sql.Stmt, query string, args ...interface{}) *sql.Row {
switch {
case stmt != nil && q.tx != nil:
return q.tx.StmtContext(ctx, stmt).QueryRowContext(ctx, args...)
case stmt != nil:
return stmt.QueryRowContext(ctx, args...)
default:
return q.db.QueryRowContext(ctx, query, args...)
}
}
type Queries struct {
db DBTX
tx *sql.Tx
createFileStmt *sql.Stmt
createMessageStmt *sql.Stmt
createSessionStmt *sql.Stmt
deleteFileStmt *sql.Stmt
deleteMessageStmt *sql.Stmt
deleteSessionStmt *sql.Stmt
deleteSessionFilesStmt *sql.Stmt
deleteSessionMessagesStmt *sql.Stmt
getFileStmt *sql.Stmt
getFileByPathAndSessionStmt *sql.Stmt
getMessageStmt *sql.Stmt
getSessionByIDStmt *sql.Stmt
listFilesByPathStmt *sql.Stmt
listFilesBySessionStmt *sql.Stmt
listLatestSessionFilesStmt *sql.Stmt
listMessagesBySessionStmt *sql.Stmt
listNewFilesStmt *sql.Stmt
listSessionsStmt *sql.Stmt
updateFileStmt *sql.Stmt
updateMessageStmt *sql.Stmt
updateSessionStmt *sql.Stmt
}
func (q *Queries) WithTx(tx *sql.Tx) *Queries {
return &Queries{
db: tx,
tx: tx,
createFileStmt: q.createFileStmt,
createMessageStmt: q.createMessageStmt,
createSessionStmt: q.createSessionStmt,
deleteFileStmt: q.deleteFileStmt,
deleteMessageStmt: q.deleteMessageStmt,
deleteSessionStmt: q.deleteSessionStmt,
deleteSessionFilesStmt: q.deleteSessionFilesStmt,
deleteSessionMessagesStmt: q.deleteSessionMessagesStmt,
getFileStmt: q.getFileStmt,
getFileByPathAndSessionStmt: q.getFileByPathAndSessionStmt,
getMessageStmt: q.getMessageStmt,
getSessionByIDStmt: q.getSessionByIDStmt,
listFilesByPathStmt: q.listFilesByPathStmt,
listFilesBySessionStmt: q.listFilesBySessionStmt,
listLatestSessionFilesStmt: q.listLatestSessionFilesStmt,
listMessagesBySessionStmt: q.listMessagesBySessionStmt,
listNewFilesStmt: q.listNewFilesStmt,
listSessionsStmt: q.listSessionsStmt,
updateFileStmt: q.updateFileStmt,
updateMessageStmt: q.updateMessageStmt,
updateSessionStmt: q.updateSessionStmt,
}
}

6
internal/db/embed.go Normal file
View File

@@ -0,0 +1,6 @@
package db
import "embed"
//go:embed migrations/*.sql
var FS embed.FS

311
internal/db/files.sql.go Normal file
View File

@@ -0,0 +1,311 @@
// Code generated by sqlc. DO NOT EDIT.
// versions:
// sqlc v1.27.0
// source: files.sql
package db
import (
"context"
)
const createFile = `-- name: CreateFile :one
INSERT INTO files (
id,
session_id,
path,
content,
version,
created_at,
updated_at
) VALUES (
?, ?, ?, ?, ?, strftime('%s', 'now'), strftime('%s', 'now')
)
RETURNING id, session_id, path, content, version, created_at, updated_at
`
type CreateFileParams struct {
ID string `json:"id"`
SessionID string `json:"session_id"`
Path string `json:"path"`
Content string `json:"content"`
Version string `json:"version"`
}
func (q *Queries) CreateFile(ctx context.Context, arg CreateFileParams) (File, error) {
row := q.queryRow(ctx, q.createFileStmt, createFile,
arg.ID,
arg.SessionID,
arg.Path,
arg.Content,
arg.Version,
)
var i File
err := row.Scan(
&i.ID,
&i.SessionID,
&i.Path,
&i.Content,
&i.Version,
&i.CreatedAt,
&i.UpdatedAt,
)
return i, err
}
const deleteFile = `-- name: DeleteFile :exec
DELETE FROM files
WHERE id = ?
`
func (q *Queries) DeleteFile(ctx context.Context, id string) error {
_, err := q.exec(ctx, q.deleteFileStmt, deleteFile, id)
return err
}
const deleteSessionFiles = `-- name: DeleteSessionFiles :exec
DELETE FROM files
WHERE session_id = ?
`
func (q *Queries) DeleteSessionFiles(ctx context.Context, sessionID string) error {
_, err := q.exec(ctx, q.deleteSessionFilesStmt, deleteSessionFiles, sessionID)
return err
}
const getFile = `-- name: GetFile :one
SELECT id, session_id, path, content, version, created_at, updated_at
FROM files
WHERE id = ? LIMIT 1
`
func (q *Queries) GetFile(ctx context.Context, id string) (File, error) {
row := q.queryRow(ctx, q.getFileStmt, getFile, id)
var i File
err := row.Scan(
&i.ID,
&i.SessionID,
&i.Path,
&i.Content,
&i.Version,
&i.CreatedAt,
&i.UpdatedAt,
)
return i, err
}
const getFileByPathAndSession = `-- name: GetFileByPathAndSession :one
SELECT id, session_id, path, content, version, created_at, updated_at
FROM files
WHERE path = ? AND session_id = ?
ORDER BY created_at DESC
LIMIT 1
`
type GetFileByPathAndSessionParams struct {
Path string `json:"path"`
SessionID string `json:"session_id"`
}
func (q *Queries) GetFileByPathAndSession(ctx context.Context, arg GetFileByPathAndSessionParams) (File, error) {
row := q.queryRow(ctx, q.getFileByPathAndSessionStmt, getFileByPathAndSession, arg.Path, arg.SessionID)
var i File
err := row.Scan(
&i.ID,
&i.SessionID,
&i.Path,
&i.Content,
&i.Version,
&i.CreatedAt,
&i.UpdatedAt,
)
return i, err
}
const listFilesByPath = `-- name: ListFilesByPath :many
SELECT id, session_id, path, content, version, created_at, updated_at
FROM files
WHERE path = ?
ORDER BY created_at DESC
`
func (q *Queries) ListFilesByPath(ctx context.Context, path string) ([]File, error) {
rows, err := q.query(ctx, q.listFilesByPathStmt, listFilesByPath, path)
if err != nil {
return nil, err
}
defer rows.Close()
items := []File{}
for rows.Next() {
var i File
if err := rows.Scan(
&i.ID,
&i.SessionID,
&i.Path,
&i.Content,
&i.Version,
&i.CreatedAt,
&i.UpdatedAt,
); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Close(); err != nil {
return nil, err
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}
const listFilesBySession = `-- name: ListFilesBySession :many
SELECT id, session_id, path, content, version, created_at, updated_at
FROM files
WHERE session_id = ?
ORDER BY created_at ASC
`
func (q *Queries) ListFilesBySession(ctx context.Context, sessionID string) ([]File, error) {
rows, err := q.query(ctx, q.listFilesBySessionStmt, listFilesBySession, sessionID)
if err != nil {
return nil, err
}
defer rows.Close()
items := []File{}
for rows.Next() {
var i File
if err := rows.Scan(
&i.ID,
&i.SessionID,
&i.Path,
&i.Content,
&i.Version,
&i.CreatedAt,
&i.UpdatedAt,
); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Close(); err != nil {
return nil, err
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}
const listLatestSessionFiles = `-- name: ListLatestSessionFiles :many
SELECT f.id, f.session_id, f.path, f.content, f.version, f.created_at, f.updated_at
FROM files f
INNER JOIN (
SELECT path, MAX(created_at) as max_created_at
FROM files
GROUP BY path
) latest ON f.path = latest.path AND f.created_at = latest.max_created_at
WHERE f.session_id = ?
ORDER BY f.path
`
func (q *Queries) ListLatestSessionFiles(ctx context.Context, sessionID string) ([]File, error) {
rows, err := q.query(ctx, q.listLatestSessionFilesStmt, listLatestSessionFiles, sessionID)
if err != nil {
return nil, err
}
defer rows.Close()
items := []File{}
for rows.Next() {
var i File
if err := rows.Scan(
&i.ID,
&i.SessionID,
&i.Path,
&i.Content,
&i.Version,
&i.CreatedAt,
&i.UpdatedAt,
); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Close(); err != nil {
return nil, err
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}
const listNewFiles = `-- name: ListNewFiles :many
SELECT id, session_id, path, content, version, created_at, updated_at
FROM files
WHERE is_new = 1
ORDER BY created_at DESC
`
func (q *Queries) ListNewFiles(ctx context.Context) ([]File, error) {
rows, err := q.query(ctx, q.listNewFilesStmt, listNewFiles)
if err != nil {
return nil, err
}
defer rows.Close()
items := []File{}
for rows.Next() {
var i File
if err := rows.Scan(
&i.ID,
&i.SessionID,
&i.Path,
&i.Content,
&i.Version,
&i.CreatedAt,
&i.UpdatedAt,
); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Close(); err != nil {
return nil, err
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}
const updateFile = `-- name: UpdateFile :one
UPDATE files
SET
content = ?,
version = ?,
updated_at = strftime('%s', 'now')
WHERE id = ?
RETURNING id, session_id, path, content, version, created_at, updated_at
`
type UpdateFileParams struct {
Content string `json:"content"`
Version string `json:"version"`
ID string `json:"id"`
}
func (q *Queries) UpdateFile(ctx context.Context, arg UpdateFileParams) (File, error) {
row := q.queryRow(ctx, q.updateFileStmt, updateFile, arg.Content, arg.Version, arg.ID)
var i File
err := row.Scan(
&i.ID,
&i.SessionID,
&i.Path,
&i.Content,
&i.Version,
&i.CreatedAt,
&i.UpdatedAt,
)
return i, err
}

157
internal/db/messages.sql.go Normal file
View File

@@ -0,0 +1,157 @@
// Code generated by sqlc. DO NOT EDIT.
// versions:
// sqlc v1.27.0
// source: messages.sql
package db
import (
"context"
"database/sql"
)
const createMessage = `-- name: CreateMessage :one
INSERT INTO messages (
id,
session_id,
role,
parts,
model,
created_at,
updated_at
) VALUES (
?, ?, ?, ?, ?, strftime('%s', 'now'), strftime('%s', 'now')
)
RETURNING id, session_id, role, parts, model, created_at, updated_at, finished_at
`
type CreateMessageParams struct {
ID string `json:"id"`
SessionID string `json:"session_id"`
Role string `json:"role"`
Parts string `json:"parts"`
Model sql.NullString `json:"model"`
}
func (q *Queries) CreateMessage(ctx context.Context, arg CreateMessageParams) (Message, error) {
row := q.queryRow(ctx, q.createMessageStmt, createMessage,
arg.ID,
arg.SessionID,
arg.Role,
arg.Parts,
arg.Model,
)
var i Message
err := row.Scan(
&i.ID,
&i.SessionID,
&i.Role,
&i.Parts,
&i.Model,
&i.CreatedAt,
&i.UpdatedAt,
&i.FinishedAt,
)
return i, err
}
const deleteMessage = `-- name: DeleteMessage :exec
DELETE FROM messages
WHERE id = ?
`
func (q *Queries) DeleteMessage(ctx context.Context, id string) error {
_, err := q.exec(ctx, q.deleteMessageStmt, deleteMessage, id)
return err
}
const deleteSessionMessages = `-- name: DeleteSessionMessages :exec
DELETE FROM messages
WHERE session_id = ?
`
func (q *Queries) DeleteSessionMessages(ctx context.Context, sessionID string) error {
_, err := q.exec(ctx, q.deleteSessionMessagesStmt, deleteSessionMessages, sessionID)
return err
}
const getMessage = `-- name: GetMessage :one
SELECT id, session_id, role, parts, model, created_at, updated_at, finished_at
FROM messages
WHERE id = ? LIMIT 1
`
func (q *Queries) GetMessage(ctx context.Context, id string) (Message, error) {
row := q.queryRow(ctx, q.getMessageStmt, getMessage, id)
var i Message
err := row.Scan(
&i.ID,
&i.SessionID,
&i.Role,
&i.Parts,
&i.Model,
&i.CreatedAt,
&i.UpdatedAt,
&i.FinishedAt,
)
return i, err
}
const listMessagesBySession = `-- name: ListMessagesBySession :many
SELECT id, session_id, role, parts, model, created_at, updated_at, finished_at
FROM messages
WHERE session_id = ?
ORDER BY created_at ASC
`
func (q *Queries) ListMessagesBySession(ctx context.Context, sessionID string) ([]Message, error) {
rows, err := q.query(ctx, q.listMessagesBySessionStmt, listMessagesBySession, sessionID)
if err != nil {
return nil, err
}
defer rows.Close()
items := []Message{}
for rows.Next() {
var i Message
if err := rows.Scan(
&i.ID,
&i.SessionID,
&i.Role,
&i.Parts,
&i.Model,
&i.CreatedAt,
&i.UpdatedAt,
&i.FinishedAt,
); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Close(); err != nil {
return nil, err
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}
const updateMessage = `-- name: UpdateMessage :exec
UPDATE messages
SET
parts = ?,
finished_at = ?,
updated_at = strftime('%s', 'now')
WHERE id = ?
`
type UpdateMessageParams struct {
Parts string `json:"parts"`
FinishedAt sql.NullInt64 `json:"finished_at"`
ID string `json:"id"`
}
func (q *Queries) UpdateMessage(ctx context.Context, arg UpdateMessageParams) error {
_, err := q.exec(ctx, q.updateMessageStmt, updateMessage, arg.Parts, arg.FinishedAt, arg.ID)
return err
}

Some files were not shown because too many files have changed in this diff Show More