Compare commits

..

1 Commits

Author SHA1 Message Date
opencode
48e12ade76 release: v0.4.13 2025-08-11 05:51:06 +00:00
3191 changed files with 63987 additions and 309004 deletions

4
.github/CODEOWNERS vendored
View File

@@ -1,4 +0,0 @@
# web + desktop packages
packages/app/ @adamdotdevin
packages/tauri/ @adamdotdevin
packages/desktop/ @adamdotdevin

View File

@@ -1,67 +0,0 @@
name: Bug report
description: Report an issue that should be fixed
labels: ["bug"]
body:
- type: textarea
id: description
attributes:
label: Description
description: Describe the bug you encountered
placeholder: What happened?
validations:
required: true
- type: input
id: plugins
attributes:
label: Plugins
description: What plugins are you using?
validations:
required: false
- type: input
id: opencode-version
attributes:
label: OpenCode version
description: What version of OpenCode are you using?
validations:
required: false
- type: textarea
id: reproduce
attributes:
label: Steps to reproduce
description: How can we reproduce this issue?
placeholder: |
1.
2.
3.
validations:
required: false
- type: textarea
id: screenshot-or-link
attributes:
label: Screenshot and/or share link
description: Run `/share` to get a share link, or attach a screenshot
placeholder: Paste link or drag and drop screenshot here
validations:
required: false
- type: input
id: os
attributes:
label: Operating System
description: what OS are you using?
placeholder: e.g., macOS 26.0.1, Ubuntu 22.04, Windows 11
validations:
required: false
- type: input
id: terminal
attributes:
label: Terminal
description: what terminal are you using?
placeholder: e.g., iTerm2, Ghostty, Alacritty, Windows Terminal
validations:
required: false

View File

@@ -1,5 +0,0 @@
blank_issues_enabled: true
contact_links:
- name: 💬 Discord Community
url: https://discord.gg/opencode
about: For quick questions or real-time discussion. Note that issues are searchable and help others with the same question.

View File

@@ -1,20 +0,0 @@
name: 🚀 Feature Request
description: Suggest an idea, feature, or enhancement
labels: [discussion]
title: "[FEATURE]:"
body:
- type: checkboxes
id: verified
attributes:
label: Feature hasn't been suggested before.
options:
- label: I have verified this feature I'm about to request hasn't been suggested before.
required: true
- type: textarea
attributes:
label: Describe the enhancement you want to request
description: What do you want to change or add? What are the benefits of implementing this? Try to be detailed so we can understand your request better :)
validations:
required: true

View File

@@ -1,11 +0,0 @@
name: Question
description: Ask a question
labels: ["question"]
body:
- type: textarea
id: question
attributes:
label: Question
description: What's your question?
validations:
required: true

View File

@@ -1,22 +0,0 @@
name: "Setup Bun"
description: "Setup Bun with caching and install dependencies"
runs:
using: "composite"
steps:
- name: Setup Bun
uses: oven-sh/setup-bun@v2
with:
bun-version-file: package.json
- name: Cache ~/.bun
id: cache-bun
uses: actions/cache@v4
with:
path: ~/.bun
key: ${{ runner.os }}-bun-${{ hashFiles('package.json') }}-${{ hashFiles('bun.lockb', 'bun.lock') }}
restore-keys: |
${{ runner.os }}-bun-${{ hashFiles('package.json') }}-
- name: Install dependencies
run: bun install
shell: bash

View File

@@ -1,71 +0,0 @@
#
# This file is intentionally in the wrong dir, will move and add later....
#
# name: publish-python-sdk
# on:
# release:
# types: [published]
# workflow_dispatch:
# jobs:
# publish:
# runs-on: ubuntu-latest
# permissions:
# contents: read
# steps:
# - name: Checkout repository
# uses: actions/checkout@v4
# - name: Setup Bun
# uses: oven-sh/setup-bun@v1
# with:
# bun-version: 1.2.21
# - name: Install dependencies (JS/Bun)
# run: bun install
# - name: Install uv
# shell: bash
# run: curl -LsSf https://astral.sh/uv/install.sh | sh
# - name: Generate Python SDK from OpenAPI (CLI)
# shell: bash
# run: |
# ~/.local/bin/uv run --project packages/sdk/python python packages/sdk/python/scripts/generate.py --source cli
# - name: Sync Python dependencies
# shell: bash
# run: |
# ~/.local/bin/uv sync --dev --project packages/sdk/python
# - name: Set version from release tag
# shell: bash
# run: |
# TAG="${GITHUB_REF_NAME:-}"
# if [ -z "$TAG" ]; then
# TAG="$(git describe --tags --abbrev=0 || echo 0.0.0)"
# fi
# echo "Using version: $TAG"
# VERSION="$TAG" ~/.local/bin/uv run --project packages/sdk/python python - <<'PY'
# import os, re, pathlib
# root = pathlib.Path('packages/sdk/python')
# pt = (root / 'pyproject.toml').read_text()
# version = os.environ.get('VERSION','0.0.0').lstrip('v')
# pt = re.sub(r'(?m)^(version\s*=\s*")[^"]+("\s*)$', f"\\1{version}\\2", pt)
# (root / 'pyproject.toml').write_text(pt)
# # Also update generator config override for consistency
# cfgp = root / 'openapi-python-client.yaml'
# if cfgp.exists():
# cfg = cfgp.read_text()
# cfg = re.sub(r'(?m)^(package_version_override:\s*)\S+$', f"\\1{version}", cfg)
# cfgp.write_text(cfg)
# PY
# - name: Build and publish to PyPI
# env:
# PYPI_TOKEN: ${{ secrets.PYPI_API_TOKEN }}
# shell: bash
# run: |
# ~/.local/bin/uv run --project packages/sdk/python python packages/sdk/python/scripts/publish.py

View File

@@ -1,3 +0,0 @@
### What does this PR do?
### How did you verify your code works?

View File

@@ -1,166 +0,0 @@
name: Daily Issues Recap
on:
schedule:
# Run at 6 PM EST (23:00 UTC, or 22:00 UTC during daylight saving)
- cron: "0 23 * * *"
workflow_dispatch: # Allow manual trigger for testing
jobs:
daily-recap:
runs-on: blacksmith-4vcpu-ubuntu-2404
permissions:
contents: read
issues: read
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 1
- uses: ./.github/actions/setup-bun
- name: Install opencode
run: curl -fsSL https://opencode.ai/install | bash
- name: Generate daily issues recap
id: recap
env:
OPENCODE_API_KEY: ${{ secrets.OPENCODE_API_KEY }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
OPENCODE_PERMISSION: |
{
"bash": {
"*": "deny",
"gh issue*": "allow",
"gh search*": "allow"
},
"webfetch": "deny",
"edit": "deny",
"write": "deny"
}
run: |
# Get today's date range
TODAY=$(date -u +%Y-%m-%d)
opencode run -m opencode/claude-sonnet-4-5 "Generate a daily issues recap for the OpenCode repository.
TODAY'S DATE: ${TODAY}
STEP 1: Gather today's issues
Search for all issues created today (${TODAY}) using:
gh issue list --repo ${{ github.repository }} --state all --search \"created:${TODAY}\" --json number,title,body,labels,state,comments,createdAt,author --limit 500
STEP 2: Analyze and categorize
For each issue created today, categorize it:
**Severity Assessment:**
- CRITICAL: Crashes, data loss, security issues, blocks major functionality
- HIGH: Significant bugs affecting many users, important features broken
- MEDIUM: Bugs with workarounds, minor features broken
- LOW: Minor issues, cosmetic, nice-to-haves
**Activity Assessment:**
- Note issues with high comment counts or engagement
- Note issues from repeat reporters (check if author has filed before)
STEP 3: Cross-reference with existing issues
For issues that seem like feature requests or recurring bugs:
- Search for similar older issues to identify patterns
- Note if this is a frequently requested feature
- Identify any issues that are duplicates of long-standing requests
STEP 4: Generate the recap
Create a structured recap with these sections:
===DISCORD_START===
**Daily Issues Recap - ${TODAY}**
**Summary Stats**
- Total issues opened today: [count]
- By category: [bugs/features/questions]
**Critical/High Priority Issues**
[List any CRITICAL or HIGH severity issues with brief descriptions and issue numbers]
**Most Active/Discussed**
[Issues with significant engagement or from active community members]
**Trending Topics**
[Patterns noticed - e.g., 'Multiple reports about X', 'Continued interest in Y feature']
**Duplicates & Related**
[Issues that relate to existing open issues]
===DISCORD_END===
STEP 5: Format for Discord
Format the recap as a Discord-compatible message:
- Use Discord markdown (**, __, etc.)
- BE EXTREMELY CONCISE - this is an EOD summary, not a detailed report
- Use hyperlinked issue numbers with suppressed embeds: [#1234](<https://github.com/${{ github.repository }}/issues/1234>)
- Group related issues on single lines where possible
- Add emoji sparingly for critical items only
- HARD LIMIT: Keep under 1800 characters total
- Skip sections that have nothing notable (e.g., if no critical issues, omit that section)
- Prioritize signal over completeness - only surface what matters
OUTPUT: Output ONLY the content between ===DISCORD_START=== and ===DISCORD_END=== markers. Include the markers so I can extract it." > /tmp/recap_raw.txt
# Extract only the Discord message between markers
sed -n '/===DISCORD_START===/,/===DISCORD_END===/p' /tmp/recap_raw.txt | grep -v '===DISCORD' > /tmp/recap.txt
echo "recap_file=/tmp/recap.txt" >> $GITHUB_OUTPUT
- name: Post to Discord
env:
DISCORD_WEBHOOK_URL: ${{ secrets.DISCORD_ISSUES_WEBHOOK_URL }}
run: |
if [ -z "$DISCORD_WEBHOOK_URL" ]; then
echo "Warning: DISCORD_ISSUES_WEBHOOK_URL secret not set, skipping Discord post"
cat /tmp/recap.txt
exit 0
fi
# Read the recap
RECAP_RAW=$(cat /tmp/recap.txt)
RECAP_LENGTH=${#RECAP_RAW}
echo "Recap length: ${RECAP_LENGTH} chars"
# Function to post a message to Discord
post_to_discord() {
local msg="$1"
local content=$(echo "$msg" | jq -Rs '.')
curl -s -H "Content-Type: application/json" \
-X POST \
-d "{\"content\": ${content}}" \
"$DISCORD_WEBHOOK_URL"
sleep 1
}
# If under limit, send as single message
if [ "$RECAP_LENGTH" -le 1950 ]; then
post_to_discord "$RECAP_RAW"
else
echo "Splitting into multiple messages..."
remaining="$RECAP_RAW"
while [ ${#remaining} -gt 0 ]; do
if [ ${#remaining} -le 1950 ]; then
post_to_discord "$remaining"
break
else
chunk="${remaining:0:1900}"
last_newline=$(echo "$chunk" | grep -bo $'\n' | tail -1 | cut -d: -f1)
if [ -n "$last_newline" ] && [ "$last_newline" -gt 500 ]; then
chunk="${remaining:0:$last_newline}"
remaining="${remaining:$((last_newline+1))}"
else
chunk="${remaining:0:1900}"
remaining="${remaining:1900}"
fi
post_to_discord "$chunk"
fi
done
fi
echo "Posted daily recap to Discord"

View File

@@ -1,169 +0,0 @@
name: Daily PR Recap
on:
schedule:
# Run at 5pm EST (22:00 UTC, or 21:00 UTC during daylight saving)
- cron: "0 22 * * *"
workflow_dispatch: # Allow manual trigger for testing
jobs:
pr-recap:
runs-on: blacksmith-4vcpu-ubuntu-2404
permissions:
contents: read
pull-requests: read
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 1
- uses: ./.github/actions/setup-bun
- name: Install opencode
run: curl -fsSL https://opencode.ai/install | bash
- name: Generate daily PR recap
id: recap
env:
OPENCODE_API_KEY: ${{ secrets.OPENCODE_API_KEY }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
OPENCODE_PERMISSION: |
{
"bash": {
"*": "deny",
"gh pr*": "allow",
"gh search*": "allow"
},
"webfetch": "deny",
"edit": "deny",
"write": "deny"
}
run: |
TODAY=$(date -u +%Y-%m-%d)
opencode run -m opencode/claude-sonnet-4-5 "Generate a daily PR activity recap for the OpenCode repository.
TODAY'S DATE: ${TODAY}
STEP 1: Gather PR data
Run these commands to gather PR information. ONLY include PRs created or updated TODAY (${TODAY}):
# PRs created today
gh pr list --repo ${{ github.repository }} --state all --search \"created:${TODAY}\" --json number,title,author,labels,createdAt,updatedAt,reviewDecision,isDraft,additions,deletions --limit 100
# PRs with activity today (updated today)
gh pr list --repo ${{ github.repository }} --state open --search \"updated:${TODAY}\" --json number,title,author,labels,createdAt,updatedAt,reviewDecision,isDraft,additions,deletions --limit 100
STEP 2: For high-activity PRs, check comment counts
For promising PRs, run:
gh pr view [NUMBER] --repo ${{ github.repository }} --json comments --jq '[.comments[] | select(.author.login != \"copilot-pull-request-reviewer\" and .author.login != \"github-actions\")] | length'
IMPORTANT: When counting comments/activity, EXCLUDE these bot accounts:
- copilot-pull-request-reviewer
- github-actions
STEP 3: Identify what matters (ONLY from today's PRs)
**Bug Fixes From Today:**
- PRs with 'fix' or 'bug' in title created/updated today
- Small bug fixes (< 100 lines changed) that are easy to review
- Bug fixes from community contributors
**High Activity Today:**
- PRs with significant human comments today (excluding bots listed above)
- PRs with back-and-forth discussion today
**Quick Wins:**
- Small PRs (< 50 lines) that are approved or nearly approved
- PRs that just need a final review
STEP 4: Generate the recap
Create a structured recap:
===DISCORD_START===
**Daily PR Recap - ${TODAY}**
**New PRs Today**
[PRs opened today - group by type: bug fixes, features, etc.]
**Active PRs Today**
[PRs with activity/updates today - significant discussion]
**Quick Wins**
[Small PRs ready to merge]
===DISCORD_END===
STEP 5: Format for Discord
- Use Discord markdown (**, __, etc.)
- BE EXTREMELY CONCISE - surface what we might miss
- Use hyperlinked PR numbers with suppressed embeds: [#1234](<https://github.com/${{ github.repository }}/pull/1234>)
- Include PR author: [#1234](<url>) (@author)
- For bug fixes, add brief description of what it fixes
- Show line count for quick wins: \"(+15/-3 lines)\"
- HARD LIMIT: Keep under 1800 characters total
- Skip empty sections
- Focus on PRs that need human eyes
OUTPUT: Output ONLY the content between ===DISCORD_START=== and ===DISCORD_END=== markers. Include the markers so I can extract it." > /tmp/pr_recap_raw.txt
# Extract only the Discord message between markers
sed -n '/===DISCORD_START===/,/===DISCORD_END===/p' /tmp/pr_recap_raw.txt | grep -v '===DISCORD' > /tmp/pr_recap.txt
echo "recap_file=/tmp/pr_recap.txt" >> $GITHUB_OUTPUT
- name: Post to Discord
env:
DISCORD_WEBHOOK_URL: ${{ secrets.DISCORD_ISSUES_WEBHOOK_URL }}
run: |
if [ -z "$DISCORD_WEBHOOK_URL" ]; then
echo "Warning: DISCORD_ISSUES_WEBHOOK_URL secret not set, skipping Discord post"
cat /tmp/pr_recap.txt
exit 0
fi
# Read the recap
RECAP_RAW=$(cat /tmp/pr_recap.txt)
RECAP_LENGTH=${#RECAP_RAW}
echo "Recap length: ${RECAP_LENGTH} chars"
# Function to post a message to Discord
post_to_discord() {
local msg="$1"
local content=$(echo "$msg" | jq -Rs '.')
curl -s -H "Content-Type: application/json" \
-X POST \
-d "{\"content\": ${content}}" \
"$DISCORD_WEBHOOK_URL"
sleep 1
}
# If under limit, send as single message
if [ "$RECAP_LENGTH" -le 1950 ]; then
post_to_discord "$RECAP_RAW"
else
echo "Splitting into multiple messages..."
remaining="$RECAP_RAW"
while [ ${#remaining} -gt 0 ]; do
if [ ${#remaining} -le 1950 ]; then
post_to_discord "$remaining"
break
else
chunk="${remaining:0:1900}"
last_newline=$(echo "$chunk" | grep -bo $'\n' | tail -1 | cut -d: -f1)
if [ -n "$last_newline" ] && [ "$last_newline" -gt 500 ]; then
chunk="${remaining:0:$last_newline}"
remaining="${remaining:$((last_newline+1))}"
else
chunk="${remaining:0:1900}"
remaining="${remaining:1900}"
fi
post_to_discord "$chunk"
fi
done
fi
echo "Posted daily PR recap to Discord"

View File

@@ -11,19 +11,17 @@ concurrency: ${{ github.workflow }}-${{ github.ref }}
jobs:
deploy:
runs-on: blacksmith-4vcpu-ubuntu-2404
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: ./.github/actions/setup-bun
- uses: actions/setup-node@v4
- uses: oven-sh/setup-bun@v1
with:
node-version: "24"
bun-version: 1.2.17
- run: bun install
- run: bun sst deploy --stage=${{ github.ref_name }}
env:
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
PLANETSCALE_SERVICE_TOKEN_NAME: ${{ secrets.PLANETSCALE_SERVICE_TOKEN_NAME }}
PLANETSCALE_SERVICE_TOKEN: ${{ secrets.PLANETSCALE_SERVICE_TOKEN }}
STRIPE_SECRET_KEY: ${{ github.ref_name == 'production' && secrets.STRIPE_SECRET_KEY_PROD || secrets.STRIPE_SECRET_KEY_DEV }}

View File

@@ -1,72 +0,0 @@
name: Docs Update
on:
schedule:
- cron: "0 */12 * * *"
workflow_dispatch:
env:
LOOKBACK_HOURS: 4
jobs:
update-docs:
if: github.repository == 'sst/opencode'
runs-on: blacksmith-4vcpu-ubuntu-2404
permissions:
id-token: write
contents: write
pull-requests: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0 # Fetch full history to access commits
- name: Setup Bun
uses: ./.github/actions/setup-bun
- name: Get recent commits
id: commits
run: |
COMMITS=$(git log --since="${{ env.LOOKBACK_HOURS }} hours ago" --pretty=format:"- %h %s" 2>/dev/null || echo "")
if [ -z "$COMMITS" ]; then
echo "No commits in the last ${{ env.LOOKBACK_HOURS }} hours"
echo "has_commits=false" >> $GITHUB_OUTPUT
else
echo "has_commits=true" >> $GITHUB_OUTPUT
{
echo "list<<EOF"
echo "$COMMITS"
echo "EOF"
} >> $GITHUB_OUTPUT
fi
- name: Run opencode
if: steps.commits.outputs.has_commits == 'true'
uses: sst/opencode/github@latest
env:
OPENCODE_API_KEY: ${{ secrets.OPENCODE_API_KEY }}
with:
model: opencode/gpt-5.2
agent: docs
prompt: |
Review the following commits from the last ${{ env.LOOKBACK_HOURS }} hours and identify any new features that may need documentation.
<recent_commits>
${{ steps.commits.outputs.list }}
</recent_commits>
Steps:
1. For each commit that looks like a new feature or significant change:
- Read the changed files to understand what was added
- Check if the feature is already documented in packages/web/src/content/docs/*
2. If you find undocumented features:
- Update the relevant documentation files in packages/web/src/content/docs/*
- Follow the existing documentation style and structure
- Make sure to document the feature clearly with examples where appropriate
3. If all new features are already documented, report that no updates are needed
4. If you are creating a new documentation file be sure to update packages/web/astro.config.mjs too.
Focus on user-facing features and API changes. Skip internal refactors, bug fixes, and test updates unless they affect user-facing behavior.
Don't feel the need to document every little thing. It is perfectly okay to make 0 changes at all.
Try to keep documentation only for large features or changes that already have a good spot to be documented.

View File

@@ -1,63 +0,0 @@
name: Duplicate Issue Detection
on:
issues:
types: [opened]
jobs:
check-duplicates:
runs-on: blacksmith-4vcpu-ubuntu-2404
permissions:
contents: read
issues: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 1
- uses: ./.github/actions/setup-bun
- name: Install opencode
run: curl -fsSL https://opencode.ai/install | bash
- name: Check for duplicate issues
env:
OPENCODE_API_KEY: ${{ secrets.OPENCODE_API_KEY }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
OPENCODE_PERMISSION: |
{
"bash": {
"*": "deny",
"gh issue*": "allow"
},
"webfetch": "deny"
}
run: |
opencode run -m opencode/claude-haiku-4-5 "A new issue has been created:'
Issue number:
${{ github.event.issue.number }}
Lookup this issue and search through existing issues (excluding #${{ github.event.issue.number }}) in this repository to find any potential duplicates of this new issue.
Consider:
1. Similar titles or descriptions
2. Same error messages or symptoms
3. Related functionality or components
4. Similar feature requests
If you find any potential duplicates, please comment on the new issue with:
- A brief explanation of why it might be a duplicate
- Links to the potentially duplicate issues
- A suggestion to check those issues first
Use this format for the comment:
'This issue might be a duplicate of existing issues. Please check:
- #[issue_number]: [brief description of similarity]
Feel free to ignore if none of these address your specific case.'
Additionally, if the issue mentions keybinds, keyboard shortcuts, or key bindings, please add a comment mentioning the pinned keybinds issue #4997:
'For keybind-related issues, please also check our pinned keybinds documentation: #4997'
If no clear duplicates are found, do not comment."

View File

@@ -1,65 +0,0 @@
name: Duplicate PR Check
on:
pull_request_target:
types: [opened]
jobs:
check-duplicates:
if: |
github.event.pull_request.user.login != 'actions-user' &&
github.event.pull_request.user.login != 'opencode' &&
github.event.pull_request.user.login != 'rekram1-node' &&
github.event.pull_request.user.login != 'thdxr' &&
github.event.pull_request.user.login != 'kommander' &&
github.event.pull_request.user.login != 'jayair' &&
github.event.pull_request.user.login != 'fwang' &&
github.event.pull_request.user.login != 'adamdotdevin' &&
github.event.pull_request.user.login != 'iamdavidhill' &&
github.event.pull_request.user.login != 'opencode-agent[bot]'
runs-on: blacksmith-4vcpu-ubuntu-2404
permissions:
contents: read
pull-requests: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Setup Bun
uses: ./.github/actions/setup-bun
- name: Install dependencies
run: bun install
- name: Install opencode
run: curl -fsSL https://opencode.ai/install | bash
- name: Build prompt
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
PR_NUMBER: ${{ github.event.pull_request.number }}
run: |
{
echo "Check for duplicate PRs related to this new PR:"
echo ""
echo "CURRENT_PR_NUMBER: $PR_NUMBER"
echo ""
echo "Title: $(gh pr view "$PR_NUMBER" --json title --jq .title)"
echo ""
echo "Description:"
gh pr view "$PR_NUMBER" --json body --jq .body
} > pr_info.txt
- name: Check for duplicate PRs
env:
OPENCODE_API_KEY: ${{ secrets.OPENCODE_API_KEY }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
PR_NUMBER: ${{ github.event.pull_request.number }}
run: |
COMMENT=$(bun script/duplicate-pr.ts -f pr_info.txt "Check the attached file for PR details and search for duplicates")
gh pr comment "$PR_NUMBER" --body "_The following comment was made by an LLM, it may be inaccurate:_
$COMMENT"

View File

@@ -1,51 +0,0 @@
name: generate
on:
push:
branches:
- dev
workflow_dispatch:
jobs:
generate:
runs-on: blacksmith-4vcpu-ubuntu-2404
permissions:
contents: write
pull-requests: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
repository: ${{ github.event.pull_request.head.repo.full_name || github.repository }}
ref: ${{ github.event.pull_request.head.ref || github.ref_name }}
- name: Setup Bun
uses: ./.github/actions/setup-bun
- name: Generate
run: ./script/generate.ts
- name: Commit and push
run: |
if [ -z "$(git status --porcelain)" ]; then
echo "No changes to commit"
exit 0
fi
git config --local user.email "action@github.com"
git config --local user.name "GitHub Action"
git add -A
git commit -m "chore: generate"
git push origin HEAD:${{ github.ref_name }} --no-verify
# if ! git push origin HEAD:${{ github.event.pull_request.head.ref || github.ref_name }} --no-verify; then
# echo ""
# echo "============================================"
# echo "Failed to push generated code."
# echo "Please run locally and push:"
# echo ""
# echo " ./script/generate.ts"
# echo " git add -A && git commit -m \"chore: generate\" && git push"
# echo ""
# echo "============================================"
# exit 1
# fi

View File

@@ -1,46 +0,0 @@
name: nix desktop
on:
push:
branches: [dev]
paths:
- "flake.nix"
- "flake.lock"
- "nix/**"
- "packages/app/**"
- "packages/desktop/**"
- ".github/workflows/nix-desktop.yml"
pull_request:
paths:
- "flake.nix"
- "flake.lock"
- "nix/**"
- "packages/app/**"
- "packages/desktop/**"
- ".github/workflows/nix-desktop.yml"
workflow_dispatch:
jobs:
build-desktop:
strategy:
fail-fast: false
matrix:
os:
- blacksmith-4vcpu-ubuntu-2404
- blacksmith-4vcpu-ubuntu-2404-arm
- macos-15-intel
- macos-latest
runs-on: ${{ matrix.os }}
timeout-minutes: 60
steps:
- name: Checkout repository
uses: actions/checkout@v6
- name: Setup Nix
uses: nixbuild/nix-quick-install-action@v34
- name: Build desktop via flake
run: |
set -euo pipefail
nix --version
nix build .#desktop -L

View File

@@ -2,11 +2,11 @@ name: discord
on:
release:
types: [released] # fires when a draft release is published
types: [published] # fires only when a release is published
jobs:
notify:
runs-on: blacksmith-4vcpu-ubuntu-2404
runs-on: ubuntu-latest
steps:
- name: Send nicely-formatted embed to Discord
uses: SethCohen/github-releases-to-discord@v1

View File

@@ -3,8 +3,6 @@ name: opencode
on:
issue_comment:
types: [created]
pull_request_review_comment:
types: [created]
jobs:
opencode:
@@ -13,22 +11,19 @@ jobs:
startsWith(github.event.comment.body, '/oc') ||
contains(github.event.comment.body, ' /opencode') ||
startsWith(github.event.comment.body, '/opencode')
runs-on: blacksmith-4vcpu-ubuntu-2404
runs-on: ubuntu-latest
permissions:
id-token: write
contents: read
pull-requests: read
issues: read
id-token: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
- uses: ./.github/actions/setup-bun
with:
fetch-depth: 1
- name: Run opencode
uses: anomalyco/opencode/github@latest
uses: sst/opencode/github@latest
env:
OPENCODE_API_KEY: ${{ secrets.OPENCODE_API_KEY }}
OPENCODE_PERMISSION: '{"bash": "deny"}'
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
with:
model: opencode/claude-opus-4-5
model: anthropic/claude-sonnet-4-20250514

View File

@@ -1,139 +0,0 @@
name: PR Standards
on:
pull_request_target:
types: [opened, edited, synchronize]
jobs:
check-standards:
if: |
github.event.pull_request.user.login != 'actions-user' &&
github.event.pull_request.user.login != 'opencode' &&
github.event.pull_request.user.login != 'rekram1-node' &&
github.event.pull_request.user.login != 'thdxr' &&
github.event.pull_request.user.login != 'kommander' &&
github.event.pull_request.user.login != 'jayair' &&
github.event.pull_request.user.login != 'fwang' &&
github.event.pull_request.user.login != 'adamdotdevin' &&
github.event.pull_request.user.login != 'iamdavidhill' &&
github.event.pull_request.user.login != 'opencode-agent[bot]'
runs-on: ubuntu-latest
permissions:
pull-requests: write
steps:
- name: Check PR standards
uses: actions/github-script@v7
with:
script: |
const pr = context.payload.pull_request;
const title = pr.title;
async function addLabel(label) {
await github.rest.issues.addLabels({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: pr.number,
labels: [label]
});
}
async function removeLabel(label) {
try {
await github.rest.issues.removeLabel({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: pr.number,
name: label
});
} catch (e) {
// Label wasn't present, ignore
}
}
async function comment(marker, body) {
const markerText = `<!-- pr-standards:${marker} -->`;
const { data: comments } = await github.rest.issues.listComments({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: pr.number
});
const existing = comments.find(c => c.body.includes(markerText));
if (existing) return;
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: pr.number,
body: markerText + '\n' + body
});
}
// Step 1: Check title format
// Matches: feat:, feat(scope):, feat (scope):, etc.
const titlePattern = /^(feat|fix|docs|chore|refactor|test)\s*(\([a-zA-Z0-9-]+\))?\s*:/;
const hasValidTitle = titlePattern.test(title);
if (!hasValidTitle) {
await addLabel('needs:title');
await comment('title', `Hey! Your PR title \`${title}\` doesn't follow conventional commit format.
Please update it to start with one of:
- \`feat:\` or \`feat(scope):\` new feature
- \`fix:\` or \`fix(scope):\` bug fix
- \`docs:\` or \`docs(scope):\` documentation changes
- \`chore:\` or \`chore(scope):\` maintenance tasks
- \`refactor:\` or \`refactor(scope):\` code refactoring
- \`test:\` or \`test(scope):\` adding or updating tests
Where \`scope\` is the package name (e.g., \`app\`, \`desktop\`, \`opencode\`).
See [CONTRIBUTING.md](../blob/dev/CONTRIBUTING.md#pr-titles) for details.`);
return;
}
await removeLabel('needs:title');
// Step 2: Check for linked issue (skip for docs/refactor PRs)
const skipIssueCheck = /^(docs|refactor)\s*(\([a-zA-Z0-9-]+\))?\s*:/.test(title);
if (skipIssueCheck) {
await removeLabel('needs:issue');
console.log('Skipping issue check for docs/refactor PR');
return;
}
const query = `
query($owner: String!, $repo: String!, $number: Int!) {
repository(owner: $owner, name: $repo) {
pullRequest(number: $number) {
closingIssuesReferences(first: 1) {
totalCount
}
}
}
}
`;
const result = await github.graphql(query, {
owner: context.repo.owner,
repo: context.repo.repo,
number: pr.number
});
const linkedIssues = result.repository.pullRequest.closingIssuesReferences.totalCount;
if (linkedIssues === 0) {
await addLabel('needs:issue');
await comment('issue', `Thanks for your contribution!
This PR doesn't have a linked issue. All PRs must reference an existing issue.
Please:
1. Open an issue describing the bug/feature (if one doesn't exist)
2. Add \`Fixes #<number>\` or \`Closes #<number>\` to this PR description
See [CONTRIBUTING.md](../blob/dev/CONTRIBUTING.md#issue-first-policy) for details.`);
return;
}
await removeLabel('needs:issue');
console.log('PR meets all standards');

View File

@@ -14,7 +14,7 @@ permissions:
jobs:
publish:
runs-on: blacksmith-4vcpu-ubuntu-2404
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:

View File

@@ -13,23 +13,22 @@ permissions:
jobs:
publish:
runs-on: blacksmith-4vcpu-ubuntu-2404
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- uses: ./.github/actions/setup-bun
- uses: oven-sh/setup-bun@v2
with:
bun-version: 1.2.17
- run: git fetch --force --tags
- run: bun install -g @vscode/vsce
- name: Install extension dependencies
run: bun install
working-directory: ./sdks/vscode
- name: Publish
run: |
bun install
./script/publish
working-directory: ./sdks/vscode
env:

View File

@@ -1,37 +1,27 @@
name: publish
run-name: "${{ format('release {0}', inputs.bump) }}"
run-name: "${{ format('v{0}', inputs.version) }}"
on:
push:
branches:
- dev
- snapshot-*
workflow_dispatch:
inputs:
bump:
description: "Bump major, minor, or patch"
required: false
type: choice
options:
- major
- minor
- patch
version:
description: "Override version (optional)"
description: "Version to publish"
required: true
type: string
title:
description: "Custom title for this run"
required: false
type: string
concurrency: ${{ github.workflow }}-${{ github.ref }}-${{ inputs.version || inputs.bump }}
concurrency: ${{ github.workflow }}-${{ github.ref }}
permissions:
id-token: write
contents: write
packages: write
jobs:
publish:
runs-on: blacksmith-4vcpu-ubuntu-2404
if: github.repository == 'anomalyco/opencode'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
@@ -39,199 +29,46 @@ jobs:
- run: git fetch --force --tags
- uses: ./.github/actions/setup-bun
- name: Install OpenCode
if: inputs.bump || inputs.version
run: bun i -g opencode-ai@1.0.169
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
- uses: actions/setup-go@v5
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
go-version: ">=1.24.0"
cache: true
cache-dependency-path: go.sum
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- uses: actions/setup-node@v4
- uses: oven-sh/setup-bun@v2
with:
node-version: "24"
registry-url: "https://registry.npmjs.org"
bun-version: 1.2.19
- name: Setup Git Identity
run: |
git config --global user.email "opencode@sst.dev"
git config --global user.name "opencode"
git remote set-url origin https://x-access-token:${{ secrets.SST_GITHUB_TOKEN }}@github.com/${{ github.repository }}
- name: Publish
id: publish
run: ./script/publish-start.ts
env:
OPENCODE_BUMP: ${{ inputs.bump }}
OPENCODE_VERSION: ${{ inputs.version }}
OPENCODE_API_KEY: ${{ secrets.OPENCODE_API_KEY }}
AUR_KEY: ${{ secrets.AUR_KEY }}
GITHUB_TOKEN: ${{ secrets.SST_GITHUB_TOKEN }}
NPM_CONFIG_PROVENANCE: false
- uses: actions/upload-artifact@v4
- name: Cache ~/.bun
id: cache-bun
uses: actions/cache@v3
with:
name: opencode-cli
path: packages/opencode/dist
path: ~/.bun
key: ${{ runner.os }}-bun-${{ hashFiles('bun.lock') }}
restore-keys: |
${{ runner.os }}-bun-
outputs:
release: ${{ steps.publish.outputs.release }}
tag: ${{ steps.publish.outputs.tag }}
version: ${{ steps.publish.outputs.version }}
publish-tauri:
needs: publish
continue-on-error: false
strategy:
fail-fast: false
matrix:
settings:
- host: macos-latest
target: x86_64-apple-darwin
- host: macos-latest
target: aarch64-apple-darwin
- host: blacksmith-4vcpu-windows-2025
target: x86_64-pc-windows-msvc
- host: blacksmith-4vcpu-ubuntu-2404
target: x86_64-unknown-linux-gnu
- host: blacksmith-4vcpu-ubuntu-2404-arm
target: aarch64-unknown-linux-gnu
runs-on: ${{ matrix.settings.host }}
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
ref: ${{ needs.publish.outputs.tag }}
- uses: apple-actions/import-codesign-certs@v2
if: ${{ runner.os == 'macOS' }}
with:
keychain: build
p12-file-base64: ${{ secrets.APPLE_CERTIFICATE }}
p12-password: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
- name: Verify Certificate
if: ${{ runner.os == 'macOS' }}
run: |
CERT_INFO=$(security find-identity -v -p codesigning build.keychain | grep "Developer ID Application")
CERT_ID=$(echo "$CERT_INFO" | awk -F'"' '{print $2}')
echo "CERT_ID=$CERT_ID" >> $GITHUB_ENV
echo "Certificate imported."
- name: Setup Apple API Key
if: ${{ runner.os == 'macOS' }}
run: |
echo "${{ secrets.APPLE_API_KEY_PATH }}" > $RUNNER_TEMP/apple-api-key.p8
- run: git fetch --force --tags
- uses: ./.github/actions/setup-bun
- name: install dependencies (ubuntu only)
if: contains(matrix.settings.host, 'ubuntu')
run: |
sudo apt-get update
sudo apt-get install -y libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf
- name: install Rust stable
uses: dtolnay/rust-toolchain@stable
with:
targets: ${{ matrix.settings.target }}
- uses: Swatinem/rust-cache@v2
with:
workspaces: packages/desktop/src-tauri
shared-key: ${{ matrix.settings.target }}
- name: Prepare
run: |
cd packages/desktop
bun ./scripts/prepare.ts
env:
OPENCODE_VERSION: ${{ needs.publish.outputs.version }}
NPM_CONFIG_TOKEN: ${{ secrets.NPM_TOKEN }}
GITHUB_TOKEN: ${{ secrets.SST_GITHUB_TOKEN }}
AUR_KEY: ${{ secrets.AUR_KEY }}
OPENCODE_API_KEY: ${{ secrets.OPENCODE_API_KEY }}
RUST_TARGET: ${{ matrix.settings.target }}
GH_TOKEN: ${{ github.token }}
GITHUB_RUN_ID: ${{ github.run_id }}
# Fixes AppImage build issues, can be removed when https://github.com/tauri-apps/tauri/pull/12491 is released
- name: Install tauri-cli from portable appimage branch
if: contains(matrix.settings.host, 'ubuntu')
run: |
cargo install tauri-cli --git https://github.com/tauri-apps/tauri --branch feat/truly-portable-appimage --force
echo "Installed tauri-cli version:"
cargo tauri --version
- name: Build and upload artifacts
uses: Wandalen/wretry.action@v3
timeout-minutes: 60
with:
attempt_limit: 3
attempt_delay: 10000
action: tauri-apps/tauri-action@390cbe447412ced1303d35abe75287949e43437a
with: |
projectPath: packages/desktop
uploadWorkflowArtifacts: true
tauriScript: ${{ (contains(matrix.settings.host, 'ubuntu') && 'cargo tauri') || '' }}
args: --target ${{ matrix.settings.target }} --config ./src-tauri/tauri.prod.conf.json --verbose
updaterJsonPreferNsis: true
releaseId: ${{ needs.publish.outputs.release }}
tagName: ${{ needs.publish.outputs.tag }}
releaseAssetNamePattern: opencode-desktop-[platform]-[arch][ext]
releaseDraft: true
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
TAURI_BUNDLER_NEW_APPIMAGE_FORMAT: true
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}
APPLE_CERTIFICATE: ${{ secrets.APPLE_CERTIFICATE }}
APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
APPLE_SIGNING_IDENTITY: ${{ env.CERT_ID }}
APPLE_API_ISSUER: ${{ secrets.APPLE_API_ISSUER }}
APPLE_API_KEY: ${{ secrets.APPLE_API_KEY }}
APPLE_API_KEY_PATH: ${{ runner.temp }}/apple-api-key.p8
publish-release:
needs:
- publish
- publish-tauri
if: needs.publish.outputs.tag
runs-on: blacksmith-4vcpu-ubuntu-2404
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
ref: ${{ needs.publish.outputs.tag }}
- uses: ./.github/actions/setup-bun
- name: Setup SSH for AUR
- name: Install makepkg
run: |
sudo apt-get update
sudo apt-get install -y pacman-package-manager
- name: Setup SSH for AUR
run: |
mkdir -p ~/.ssh
echo "${{ secrets.AUR_KEY }}" > ~/.ssh/id_rsa
chmod 600 ~/.ssh/id_rsa
ssh-keyscan -H aur.archlinux.org >> ~/.ssh/known_hosts
git config --global user.email "opencode@sst.dev"
git config --global user.name "opencode"
ssh-keyscan -H aur.archlinux.org >> ~/.ssh/known_hosts || true
- run: ./script/publish-complete.ts
- name: Install dependencies
run: bun install
- name: Publish
run: |
OPENCODE_VERSION=${{ inputs.version }} ./script/publish.ts
env:
OPENCODE_VERSION: ${{ needs.publish.outputs.version }}
AUR_KEY: ${{ secrets.AUR_KEY }}
GITHUB_TOKEN: ${{ secrets.SST_GITHUB_TOKEN }}
AUR_KEY: ${{ secrets.AUR_KEY }}
NPM_CONFIG_TOKEN: ${{ secrets.NPM_TOKEN }}

View File

@@ -1,29 +0,0 @@
name: release-github-action
on:
push:
branches:
- dev
paths:
- "github/**"
concurrency: ${{ github.workflow }}-${{ github.ref }}
permissions:
contents: write
jobs:
release:
runs-on: blacksmith-4vcpu-ubuntu-2404
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- run: git fetch --force --tags
- name: Release
run: |
git config --global user.email "opencode@sst.dev"
git config --global user.name "opencode"
./github/script/release

View File

@@ -1,83 +0,0 @@
name: Guidelines Check
on:
issue_comment:
types: [created]
jobs:
check-guidelines:
if: |
github.event.issue.pull_request &&
startsWith(github.event.comment.body, '/review') &&
contains(fromJson('["OWNER","MEMBER"]'), github.event.comment.author_association)
runs-on: blacksmith-4vcpu-ubuntu-2404
permissions:
contents: read
pull-requests: write
steps:
- name: Get PR number
id: pr-number
run: |
if [ "${{ github.event_name }}" = "pull_request_target" ]; then
echo "number=${{ github.event.pull_request.number }}" >> $GITHUB_OUTPUT
else
echo "number=${{ github.event.issue.number }}" >> $GITHUB_OUTPUT
fi
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 1
- uses: ./.github/actions/setup-bun
- name: Install opencode
run: curl -fsSL https://opencode.ai/install | bash
- name: Get PR details
id: pr-details
run: |
gh api /repos/${{ github.repository }}/pulls/${{ steps.pr-number.outputs.number }} > pr_data.json
echo "title=$(jq -r .title pr_data.json)" >> $GITHUB_OUTPUT
echo "sha=$(jq -r .head.sha pr_data.json)" >> $GITHUB_OUTPUT
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Check PR guidelines compliance
env:
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
OPENCODE_PERMISSION: '{ "bash": { "*": "deny", "gh*": "allow", "gh pr review*": "deny" } }'
PR_TITLE: ${{ steps.pr-details.outputs.title }}
run: |
PR_BODY=$(jq -r .body pr_data.json)
opencode run -m anthropic/claude-opus-4-5 "A new pull request has been created: '${PR_TITLE}'
<pr-number>
${{ steps.pr-number.outputs.number }}
</pr-number>
<pr-description>
$PR_BODY
</pr-description>
Please check all the code changes in this pull request against the style guide, also look for any bugs if they exist. Diffs are important but make sure you read the entire file to get proper context. Make it clear the suggestions are merely suggestions and the human can decide what to do
When critiquing code against the style guide, be sure that the code is ACTUALLY in violation, don't complain about else statements if they already use early returns there. You may complain about excessive nesting though, regardless of else statement usage.
When critiquing code style don't be a zealot, we don't like "let" statements but sometimes they are the simplest option, if someone does a bunch of nesting with let, they should consider using iife (see packages/opencode/src/util.iife.ts)
Use the gh cli to create comments on the files for the violations. Try to leave the comment on the exact line number. If you have a suggested fix include it in a suggestion code block.
If you are writing suggested fixes, BE SURE THAT the change you are recommending is actually valid typescript, often I have seen missing closing "}" or other syntax errors.
Generally, write a comment instead of writing suggested change if you can help it.
Command MUST be like this.
\`\`\`
gh api \
--method POST \
-H \"Accept: application/vnd.github+json\" \
-H \"X-GitHub-Api-Version: 2022-11-28\" \
/repos/${{ github.repository }}/pulls/${{ steps.pr-number.outputs.number }}/comments \
-f 'body=[summary of issue]' -f 'commit_id=${{ steps.pr-details.outputs.sha }}' -f 'path=[path-to-file]' -F \"line=[line]\" -f 'side=RIGHT'
\`\`\`
Only create comments for actual violations. If the code follows all guidelines, comment on the issue using gh cli: 'lgtm' AND NOTHING ELSE!!!!."

View File

@@ -1,33 +0,0 @@
name: "Auto-close stale issues"
on:
schedule:
- cron: "30 1 * * *" # Daily at 1:30 AM
workflow_dispatch:
env:
DAYS_BEFORE_STALE: 90
DAYS_BEFORE_CLOSE: 7
jobs:
stale:
runs-on: ubuntu-latest
permissions:
issues: write
steps:
- uses: actions/stale@v10
with:
days-before-stale: ${{ env.DAYS_BEFORE_STALE }}
days-before-close: ${{ env.DAYS_BEFORE_CLOSE }}
stale-issue-label: "stale"
close-issue-message: |
[automated] Closing due to ${{ env.DAYS_BEFORE_STALE }}+ days of inactivity.
Feel free to reopen if you still need this!
stale-issue-message: |
[automated] This issue has had no activity for ${{ env.DAYS_BEFORE_STALE }} days.
It will be closed in ${{ env.DAYS_BEFORE_CLOSE }} days if there's no new activity.
remove-stale-when-updated: true
exempt-issue-labels: "pinned,security,feature-request,on-hold"
start-date: "2025-12-27"

View File

@@ -5,12 +5,9 @@ on:
- cron: "0 12 * * *" # Run daily at 12:00 UTC
workflow_dispatch: # Allow manual trigger
concurrency: ${{ github.workflow }}-${{ github.ref }}
jobs:
stats:
if: github.repository == 'anomalyco/opencode'
runs-on: blacksmith-4vcpu-ubuntu-2404
runs-on: ubuntu-latest
permissions:
contents: write
@@ -19,7 +16,9 @@ jobs:
uses: actions/checkout@v4
- name: Setup Bun
uses: ./.github/actions/setup-bun
uses: oven-sh/setup-bun@v2
with:
bun-version: latest
- name: Run stats script
run: bun script/stats.ts

View File

@@ -1,35 +0,0 @@
name: "sync-zed-extension"
on:
workflow_dispatch:
release:
types: [published]
jobs:
zed:
name: Release Zed Extension
runs-on: blacksmith-4vcpu-ubuntu-2404
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: ./.github/actions/setup-bun
- name: Get version tag
id: get_tag
run: |
if [ "${{ github.event_name }}" = "release" ]; then
TAG="${{ github.event.release.tag_name }}"
else
TAG=$(git tag --list 'v[0-9]*.*' --sort=-version:refname | head -n 1)
fi
echo "tag=${TAG}" >> $GITHUB_OUTPUT
echo "Using tag: ${TAG}"
- name: Sync Zed extension
run: |
./script/sync-zed.ts ${{ steps.get_tag.outputs.tag }}
env:
ZED_EXTENSIONS_PAT: ${{ secrets.ZED_EXTENSIONS_PAT }}
ZED_PR_PAT: ${{ secrets.ZED_PR_PAT }}

View File

@@ -1,147 +0,0 @@
name: test
on:
push:
branches:
- dev
pull_request:
workflow_dispatch:
jobs:
test:
name: test (${{ matrix.settings.name }})
strategy:
fail-fast: false
matrix:
settings:
- name: linux
host: blacksmith-4vcpu-ubuntu-2404
playwright: bunx playwright install --with-deps
workdir: .
command: |
git config --global user.email "bot@opencode.ai"
git config --global user.name "opencode"
bun turbo typecheck
bun turbo test
- name: windows
host: windows-latest
playwright: bunx playwright install
workdir: packages/app
command: bun test:e2e:local
runs-on: ${{ matrix.settings.host }}
defaults:
run:
shell: bash
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Setup Bun
uses: ./.github/actions/setup-bun
- name: Install Playwright browsers
working-directory: packages/app
run: ${{ matrix.settings.playwright }}
- name: Set OS-specific paths
run: |
if [ "${{ runner.os }}" = "Windows" ]; then
printf '%s\n' "OPENCODE_E2E_ROOT=${{ runner.temp }}\\opencode-e2e" >> "$GITHUB_ENV"
printf '%s\n' "OPENCODE_TEST_HOME=${{ runner.temp }}\\opencode-e2e\\home" >> "$GITHUB_ENV"
printf '%s\n' "XDG_DATA_HOME=${{ runner.temp }}\\opencode-e2e\\share" >> "$GITHUB_ENV"
printf '%s\n' "XDG_CACHE_HOME=${{ runner.temp }}\\opencode-e2e\\cache" >> "$GITHUB_ENV"
printf '%s\n' "XDG_CONFIG_HOME=${{ runner.temp }}\\opencode-e2e\\config" >> "$GITHUB_ENV"
printf '%s\n' "XDG_STATE_HOME=${{ runner.temp }}\\opencode-e2e\\state" >> "$GITHUB_ENV"
printf '%s\n' "MODELS_DEV_API_JSON=${{ github.workspace }}\\packages\\opencode\\test\\tool\\fixtures\\models-api.json" >> "$GITHUB_ENV"
else
printf '%s\n' "OPENCODE_E2E_ROOT=${{ runner.temp }}/opencode-e2e" >> "$GITHUB_ENV"
printf '%s\n' "OPENCODE_TEST_HOME=${{ runner.temp }}/opencode-e2e/home" >> "$GITHUB_ENV"
printf '%s\n' "XDG_DATA_HOME=${{ runner.temp }}/opencode-e2e/share" >> "$GITHUB_ENV"
printf '%s\n' "XDG_CACHE_HOME=${{ runner.temp }}/opencode-e2e/cache" >> "$GITHUB_ENV"
printf '%s\n' "XDG_CONFIG_HOME=${{ runner.temp }}/opencode-e2e/config" >> "$GITHUB_ENV"
printf '%s\n' "XDG_STATE_HOME=${{ runner.temp }}/opencode-e2e/state" >> "$GITHUB_ENV"
printf '%s\n' "MODELS_DEV_API_JSON=${{ github.workspace }}/packages/opencode/test/tool/fixtures/models-api.json" >> "$GITHUB_ENV"
fi
- name: Seed opencode data
if: matrix.settings.name != 'windows'
working-directory: packages/opencode
run: bun script/seed-e2e.ts
env:
MODELS_DEV_API_JSON: ${{ env.MODELS_DEV_API_JSON }}
OPENCODE_DISABLE_MODELS_FETCH: "true"
OPENCODE_DISABLE_SHARE: "true"
OPENCODE_DISABLE_LSP_DOWNLOAD: "true"
OPENCODE_DISABLE_DEFAULT_PLUGINS: "true"
OPENCODE_EXPERIMENTAL_DISABLE_FILEWATCHER: "true"
OPENCODE_TEST_HOME: ${{ env.OPENCODE_TEST_HOME }}
XDG_DATA_HOME: ${{ env.XDG_DATA_HOME }}
XDG_CACHE_HOME: ${{ env.XDG_CACHE_HOME }}
XDG_CONFIG_HOME: ${{ env.XDG_CONFIG_HOME }}
XDG_STATE_HOME: ${{ env.XDG_STATE_HOME }}
OPENCODE_E2E_PROJECT_DIR: ${{ github.workspace }}
OPENCODE_E2E_SESSION_TITLE: "E2E Session"
OPENCODE_E2E_MESSAGE: "Seeded for UI e2e"
OPENCODE_E2E_MODEL: "opencode/gpt-5-nano"
- name: Run opencode server
if: matrix.settings.name != 'windows'
working-directory: packages/opencode
run: bun dev -- --print-logs --log-level WARN serve --port 4096 --hostname 127.0.0.1 &
env:
MODELS_DEV_API_JSON: ${{ env.MODELS_DEV_API_JSON }}
OPENCODE_DISABLE_MODELS_FETCH: "true"
OPENCODE_DISABLE_SHARE: "true"
OPENCODE_DISABLE_LSP_DOWNLOAD: "true"
OPENCODE_DISABLE_DEFAULT_PLUGINS: "true"
OPENCODE_EXPERIMENTAL_DISABLE_FILEWATCHER: "true"
OPENCODE_TEST_HOME: ${{ env.OPENCODE_TEST_HOME }}
XDG_DATA_HOME: ${{ env.XDG_DATA_HOME }}
XDG_CACHE_HOME: ${{ env.XDG_CACHE_HOME }}
XDG_CONFIG_HOME: ${{ env.XDG_CONFIG_HOME }}
XDG_STATE_HOME: ${{ env.XDG_STATE_HOME }}
OPENCODE_CLIENT: "app"
- name: Wait for opencode server
if: matrix.settings.name != 'windows'
run: |
for i in {1..120}; do
curl -fsS "http://127.0.0.1:4096/global/health" > /dev/null && exit 0
sleep 1
done
exit 1
- name: run
working-directory: ${{ matrix.settings.workdir }}
run: ${{ matrix.settings.command }}
env:
CI: true
MODELS_DEV_API_JSON: ${{ env.MODELS_DEV_API_JSON }}
OPENCODE_DISABLE_MODELS_FETCH: "true"
OPENCODE_DISABLE_SHARE: "true"
OPENCODE_DISABLE_LSP_DOWNLOAD: "true"
OPENCODE_DISABLE_DEFAULT_PLUGINS: "true"
OPENCODE_EXPERIMENTAL_DISABLE_FILEWATCHER: "true"
OPENCODE_TEST_HOME: ${{ env.OPENCODE_TEST_HOME }}
XDG_DATA_HOME: ${{ env.XDG_DATA_HOME }}
XDG_CACHE_HOME: ${{ env.XDG_CACHE_HOME }}
XDG_CONFIG_HOME: ${{ env.XDG_CONFIG_HOME }}
XDG_STATE_HOME: ${{ env.XDG_STATE_HOME }}
PLAYWRIGHT_SERVER_HOST: "127.0.0.1"
PLAYWRIGHT_SERVER_PORT: "4096"
VITE_OPENCODE_SERVER_HOST: "127.0.0.1"
VITE_OPENCODE_SERVER_PORT: "4096"
OPENCODE_CLIENT: "app"
timeout-minutes: 30
- name: Upload Playwright artifacts
if: failure()
uses: actions/upload-artifact@v4
with:
name: playwright-${{ matrix.settings.name }}-${{ github.run_attempt }}
if-no-files-found: ignore
retention-days: 7
path: |
packages/app/e2e/test-results
packages/app/e2e/playwright-report

View File

@@ -1,37 +0,0 @@
name: Issue Triage
on:
issues:
types: [opened]
jobs:
triage:
runs-on: blacksmith-4vcpu-ubuntu-2404
permissions:
contents: read
issues: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Setup Bun
uses: ./.github/actions/setup-bun
- name: Install opencode
run: curl -fsSL https://opencode.ai/install | bash
- name: Triage issue
env:
OPENCODE_API_KEY: ${{ secrets.OPENCODE_API_KEY }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
ISSUE_NUMBER: ${{ github.event.issue.number }}
ISSUE_TITLE: ${{ github.event.issue.title }}
ISSUE_BODY: ${{ github.event.issue.body }}
run: |
opencode run --agent triage "The following issue was just opened, triage it:
Title: $ISSUE_TITLE
$ISSUE_BODY"

View File

@@ -1,4 +1,4 @@
name: typecheck
name: Typecheck
on:
pull_request:
@@ -7,13 +7,18 @@ on:
jobs:
typecheck:
runs-on: blacksmith-4vcpu-ubuntu-2404
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Bun
uses: ./.github/actions/setup-bun
uses: oven-sh/setup-bun@v1
with:
bun-version: 1.2.19
- name: Install dependencies
run: bun install
- name: Run typecheck
run: bun typecheck

View File

@@ -1,138 +0,0 @@
name: Update Nix Hashes
permissions:
contents: write
on:
workflow_dispatch:
push:
paths:
- "bun.lock"
- "package.json"
- "packages/*/package.json"
- "flake.lock"
- ".github/workflows/update-nix-hashes.yml"
pull_request:
paths:
- "bun.lock"
- "package.json"
- "packages/*/package.json"
- "flake.lock"
- ".github/workflows/update-nix-hashes.yml"
jobs:
update-node-modules-hashes:
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository
runs-on: blacksmith-4vcpu-ubuntu-2404
env:
TITLE: node_modules hashes
steps:
- name: Checkout repository
uses: actions/checkout@v6
with:
token: ${{ secrets.GITHUB_TOKEN }}
fetch-depth: 0
ref: ${{ github.head_ref || github.ref_name }}
repository: ${{ github.event.pull_request.head.repo.full_name || github.repository }}
- name: Setup Nix
uses: nixbuild/nix-quick-install-action@v34
- name: Configure git
run: |
git config --global user.email "action@github.com"
git config --global user.name "Github Action"
- name: Pull latest changes
env:
TARGET_BRANCH: ${{ github.head_ref || github.ref_name }}
run: |
BRANCH="${TARGET_BRANCH:-${GITHUB_REF_NAME}}"
git pull --rebase --autostash origin "$BRANCH"
- name: Compute all node_modules hashes
run: |
set -euo pipefail
HASH_FILE="nix/hashes.json"
SYSTEMS="x86_64-linux aarch64-linux x86_64-darwin aarch64-darwin"
if [ ! -f "$HASH_FILE" ]; then
mkdir -p "$(dirname "$HASH_FILE")"
echo '{"nodeModules":{}}' > "$HASH_FILE"
fi
for SYSTEM in $SYSTEMS; do
echo "Computing hash for ${SYSTEM}..."
BUILD_LOG=$(mktemp)
trap 'rm -f "$BUILD_LOG"' EXIT
# The updater derivations use fakeHash, so they will fail and reveal the correct hash
UPDATER_ATTR=".#packages.x86_64-linux.${SYSTEM}_node_modules"
nix build "$UPDATER_ATTR" --no-link 2>&1 | tee "$BUILD_LOG" || true
CORRECT_HASH="$(grep -E 'got:\s+sha256-[A-Za-z0-9+/=]+' "$BUILD_LOG" | awk '{print $2}' | head -n1 || true)"
if [ -z "$CORRECT_HASH" ]; then
CORRECT_HASH="$(grep -A2 'hash mismatch' "$BUILD_LOG" | grep 'got:' | awk '{print $2}' | sed 's/sha256:/sha256-/' || true)"
fi
if [ -z "$CORRECT_HASH" ]; then
echo "Failed to determine correct node_modules hash for ${SYSTEM}."
cat "$BUILD_LOG"
exit 1
fi
echo " ${SYSTEM}: ${CORRECT_HASH}"
jq --arg sys "$SYSTEM" --arg h "$CORRECT_HASH" \
'.nodeModules[$sys] = $h' "$HASH_FILE" > "${HASH_FILE}.tmp"
mv "${HASH_FILE}.tmp" "$HASH_FILE"
done
echo "All hashes computed:"
cat "$HASH_FILE"
- name: Commit ${{ env.TITLE }} changes
env:
TARGET_BRANCH: ${{ github.head_ref || github.ref_name }}
run: |
set -euo pipefail
HASH_FILE="nix/hashes.json"
echo "Checking for changes..."
summarize() {
local status="$1"
{
echo "### Nix $TITLE"
echo ""
echo "- ref: ${GITHUB_REF_NAME}"
echo "- status: ${status}"
} >> "$GITHUB_STEP_SUMMARY"
if [ -n "${GITHUB_SERVER_URL:-}" ] && [ -n "${GITHUB_REPOSITORY:-}" ] && [ -n "${GITHUB_RUN_ID:-}" ]; then
echo "- run: ${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" >> "$GITHUB_STEP_SUMMARY"
fi
echo "" >> "$GITHUB_STEP_SUMMARY"
}
FILES=("$HASH_FILE")
STATUS="$(git status --short -- "${FILES[@]}" || true)"
if [ -z "$STATUS" ]; then
echo "No changes detected."
summarize "no changes"
exit 0
fi
echo "Changes detected:"
echo "$STATUS"
git add "${FILES[@]}"
git commit -m "chore: update nix node_modules hashes"
BRANCH="${TARGET_BRANCH:-${GITHUB_REF_NAME}}"
git pull --rebase --autostash origin "$BRANCH"
git push origin HEAD:"$BRANCH"
echo "Changes pushed successfully"
summarize "committed $(git rev-parse --short HEAD)"

22
.gitignore vendored
View File

@@ -1,28 +1,8 @@
.DS_Store
node_modules
.worktrees
.sst
.env
.idea
.vscode
*~
openapi.json
playground
tmp
dist
ts-dist
.turbo
**/.serena
.serena/
/result
refs
Session.vim
opencode.json
a.out
target
.scripts
.direnv/
# Local dev files
opencode-dev
logs/
*.bun-build

View File

@@ -1,20 +0,0 @@
#!/bin/sh
set -e
# Check if bun version matches package.json
# keep in sync with packages/script/src/index.ts semver qualifier
bun -e '
import { semver } from "bun";
const pkg = await Bun.file("package.json").json();
const expectedBunVersion = pkg.packageManager?.split("@")[1];
if (!expectedBunVersion) {
throw new Error("packageManager field not found in root package.json");
}
const expectedBunVersionRange = `^${expectedBunVersion}`;
if (!semver.satisfies(process.versions.bun, expectedBunVersionRange)) {
throw new Error(`This script requires bun@${expectedBunVersionRange}, but you are using bun@${process.versions.bun}`);
}
if (process.versions.bun !== expectedBunVersion) {
console.warn(`Warning: Bun version ${process.versions.bun} differs from expected ${expectedBunVersion}`);
}
'
bun typecheck

View File

@@ -1,3 +0,0 @@
plans/
bun.lock
package.json

View File

@@ -1,34 +1,14 @@
---
model: openai/gpt-5
reasoningEffort: medium
description: ALWAYS use this when writing docs
color: "#38A3EE"
---
You are an expert technical documentation writer
You are not verbose
Use a relaxed and friendly tone
Every chunk of text should be followed by an example or something besides text
to look at.
The title of the page should be a word or a 2-3 word phrase
The description should be one short line, should not start with "The", should
avoid repeating the title of the page, should be 5-10 words long
Chunks of text should not be more than 2 sentences long
Each section is separated by a divider of 3 dashes
The section titles are short with only the first letter of the word capitalized
The section titles are in the imperative mood
The section titles should not repeat the term used in the page title, for
example, if the page title is "Models", avoid using a section title like "Add
new models". This might be unavoidable in some cases, but try to avoid it.
Check out the /packages/web/src/content/docs/docs/index.mdx as an example.
For JS or TS code snippets remove trailing semicolons and any trailing commas
that might not be needed.
If you are making a commit prefix the commit message with `docs:`
Chunks of text should not be more than 2 sentences long.

View File

@@ -1,26 +0,0 @@
---
mode: primary
hidden: true
model: opencode/claude-haiku-4-5
color: "#E67E22"
tools:
"*": false
"github-pr-search": true
---
You are a duplicate PR detection agent. When a PR is opened, your job is to search for potentially duplicate or related open PRs.
Use the github-pr-search tool to search for PRs that might be addressing the same issue or feature.
IMPORTANT: The input will contain a line `CURRENT_PR_NUMBER: NNNN`. This is the current PR number, you should not mark that the current PR as a duplicate of itself.
Search using keywords from the PR title and description. Try multiple searches with different relevant terms.
If you find potential duplicates:
- List them with their titles and URLs
- Briefly explain why they might be related
If no duplicates are found, say so clearly. BUT ONLY SAY "No duplicate PRs found" (don't say anything else if no dups)
Keep your response concise and actionable.

View File

@@ -1,78 +0,0 @@
---
mode: primary
hidden: true
model: opencode/claude-haiku-4-5
color: "#44BA81"
tools:
"*": false
"github-triage": true
---
You are a triage agent responsible for triaging github issues.
Use your github-triage tool to triage issues.
## Labels
### windows
Use for any issue that mentions Windows (the OS). Be sure they are saying that they are on Windows.
- Use if they mention WSL too
#### perf
Performance-related issues:
- Slow performance
- High RAM usage
- High CPU usage
**Only** add if it's likely a RAM or CPU issue. **Do not** add for LLM slowness.
#### desktop
Desktop app issues:
- `opencode web` command
- The desktop app itself
**Only** add if it's specifically about the Desktop application or `opencode web` view. **Do not** add for terminal, TUI, or general opencode issues.
#### nix
**Only** add if the issue explicitly mentions nix.
#### zen
**Only** add if the issue mentions "zen" or "opencode zen" or "opencode black".
If the issue doesn't have "zen" or "opencode black" in it then don't add zen label
#### docs
Add if the issue requests better documentation or docs updates.
#### opentui
TUI issues potentially caused by our underlying TUI library:
- Keybindings not working
- Scroll speed issues (too fast/slow/laggy)
- Screen flickering
- Crashes with opentui in the log
**Do not** add for general TUI bugs.
When assigning to people here are the following rules:
adamdotdev:
ONLY assign adam if the issue will have the "desktop" label.
fwang:
ONLY assign fwang if the issue will have the "zen" label.
jayair:
ONLY assign jayair if the issue will have the "docs" label.
In all other cases use best judgment. Avoid assigning to kommander needlessly, when in doubt assign to rekram1-node.

View File

@@ -1,24 +0,0 @@
---
description: "Bump AI sdk dependencies minor / patch versions only"
---
Please read @package.json and @packages/opencode/package.json.
Your job is to look into AI SDK dependencies, figure out if they have versions that can be upgraded (minor or patch versions ONLY no major ignore major changes).
I want a report of every dependency and the version that can be upgraded to.
What would be even better is if you can give me links to the changelog for each dependency, or at least some reference info so I can see what bugs were fixed or new features were added.
Consider using subagents for each dep to save your context window.
Here is a short list of some deps (please be comprehensive tho):
- "ai"
- "@ai-sdk/openai"
- "@ai-sdk/anthropic"
- "@openrouter/ai-sdk-provider"
- etc, etc
DO NOT upgrade the dependencies yet, just make a list of all dependencies and their versions that can be upgraded to minor or patch versions only.
Write up your findings to ai-sdk-updates.md

View File

@@ -1,28 +0,0 @@
---
description: git commit and push
model: opencode/glm-4.6
subtask: true
---
commit and push
make sure it includes a prefix like
docs:
tui:
core:
ci:
ignore:
wip:
For anything in the packages/web use the docs: prefix.
For anything in the packages/app use the ignore: prefix.
prefer to explain WHY something was done from an end user perspective instead of
WHAT was done.
do not do generic messages like "improved agent experience" be very specific
about what user facing changes were made
if there are changes do a git pull --rebase
if there are conflicts DO NOT FIX THEM. notify me and I will fix them

View File

@@ -1,23 +0,0 @@
---
description: "find issue(s) on github"
model: opencode/claude-haiku-4-5
---
Search through existing issues in anomalyco/opencode using the gh cli to find issues matching this query:
$ARGUMENTS
Consider:
1. Similar titles or descriptions
2. Same error messages or symptoms
3. Related functionality or components
4. Similar feature requests
Please list any matching issues with:
- Issue number and title
- Brief explanation of why it matches the query
- Link to the issue
If no clear matches are found, say so.

View File

@@ -1,15 +0,0 @@
---
description: Remove AI code slop
---
Check the diff against dev, and remove all AI generated slop introduced in this branch.
This includes:
- Extra comments that a human wouldn't add or is inconsistent with the rest of the file
- Extra defensive checks or try/catch blocks that are abnormal for that area of the codebase (especially if called by trusted / validated codepaths)
- Casts to any to get around type issues
- Any other style that is inconsistent with the file
- Unnecessary emoji usage
Report at the end with only a 1-3 sentence summary of what you changed

View File

@@ -1,5 +0,0 @@
---
description: spellcheck all markdown file changes
---
Look at all the unstaged changes to markdown (.md, .mdx) files, pull out the lines that have changed, and check for spelling and grammar errors.

4
.opencode/env.d.ts vendored
View File

@@ -1,4 +0,0 @@
declare module "*.txt" {
const content: string
export default content
}

View File

@@ -1,23 +0,0 @@
{
"$schema": "https://opencode.ai/config.json",
// "plugin": ["opencode-openai-codex-auth"],
// "enterprise": {
// "url": "https://enterprise.dev.opencode.ai",
// },
"instructions": ["STYLE_GUIDE.md"],
"provider": {
"opencode": {
"options": {},
},
},
"mcp": {
"context7": {
"type": "remote",
"url": "https://mcp.context7.com/mcp",
},
},
"tools": {
"github-triage": false,
"github-pr-search": false,
},
}

View File

@@ -1,39 +0,0 @@
---
name: bun-file-io
description: Use this when you are working on file operations like reading, writing, scanning, or deleting files. It summarizes the preferred file APIs and patterns used in this repo. It also notes when to use filesystem helpers for directories.
---
## Use this when
- Editing file I/O or scans in `packages/opencode`
- Handling directory operations or external tools
## Bun file APIs (from Bun docs)
- `Bun.file(path)` is lazy; call `text`, `json`, `stream`, `arrayBuffer`, `bytes`, `exists` to read.
- Metadata: `file.size`, `file.type`, `file.name`.
- `Bun.write(dest, input)` writes strings, buffers, Blobs, Responses, or files.
- `Bun.file(...).delete()` deletes a file.
- `file.writer()` returns a FileSink for incremental writes.
- `Bun.Glob` + `Array.fromAsync(glob.scan({ cwd, absolute, onlyFiles, dot }))` for scans.
- Use `Bun.which` to find a binary, then `Bun.spawn` to run it.
- `Bun.readableStreamToText/Bytes/JSON` for stream output.
## When to use node:fs
- Use `node:fs/promises` for directories (`mkdir`, `readdir`, recursive operations).
## Repo patterns
- Prefer Bun APIs over Node `fs` for file access.
- Check `Bun.file(...).exists()` before reading.
- For binary/large files use `arrayBuffer()` and MIME checks via `file.type`.
- Use `Bun.Glob` + `Array.fromAsync` for scans.
- Decode tool stderr with `Bun.readableStreamToText`.
- For large writes, use `Bun.write(Bun.file(path), text)`.
## Quick checklist
- Use Bun APIs first.
- Use `path.join`/`path.resolve` for paths.
- Prefer promise `.catch(...)` over `try/catch` when possible.

View File

@@ -1,223 +0,0 @@
{
"$schema": "https://opencode.ai/theme.json",
"defs": {
"nord0": "#2E3440",
"nord1": "#3B4252",
"nord2": "#434C5E",
"nord3": "#4C566A",
"nord4": "#D8DEE9",
"nord5": "#E5E9F0",
"nord6": "#ECEFF4",
"nord7": "#8FBCBB",
"nord8": "#88C0D0",
"nord9": "#81A1C1",
"nord10": "#5E81AC",
"nord11": "#BF616A",
"nord12": "#D08770",
"nord13": "#EBCB8B",
"nord14": "#A3BE8C",
"nord15": "#B48EAD"
},
"theme": {
"primary": {
"dark": "nord8",
"light": "nord10"
},
"secondary": {
"dark": "nord9",
"light": "nord9"
},
"accent": {
"dark": "nord7",
"light": "nord7"
},
"error": {
"dark": "nord11",
"light": "nord11"
},
"warning": {
"dark": "nord12",
"light": "nord12"
},
"success": {
"dark": "nord14",
"light": "nord14"
},
"info": {
"dark": "nord8",
"light": "nord10"
},
"text": {
"dark": "nord4",
"light": "nord0"
},
"textMuted": {
"dark": "nord3",
"light": "nord1"
},
"background": {
"dark": "nord0",
"light": "nord6"
},
"backgroundPanel": {
"dark": "nord1",
"light": "nord5"
},
"backgroundElement": {
"dark": "nord1",
"light": "nord4"
},
"border": {
"dark": "nord2",
"light": "nord3"
},
"borderActive": {
"dark": "nord3",
"light": "nord2"
},
"borderSubtle": {
"dark": "nord2",
"light": "nord3"
},
"diffAdded": {
"dark": "nord14",
"light": "nord14"
},
"diffRemoved": {
"dark": "nord11",
"light": "nord11"
},
"diffContext": {
"dark": "nord3",
"light": "nord3"
},
"diffHunkHeader": {
"dark": "nord3",
"light": "nord3"
},
"diffHighlightAdded": {
"dark": "nord14",
"light": "nord14"
},
"diffHighlightRemoved": {
"dark": "nord11",
"light": "nord11"
},
"diffAddedBg": {
"dark": "#3B4252",
"light": "#E5E9F0"
},
"diffRemovedBg": {
"dark": "#3B4252",
"light": "#E5E9F0"
},
"diffContextBg": {
"dark": "nord1",
"light": "nord5"
},
"diffLineNumber": {
"dark": "nord2",
"light": "nord4"
},
"diffAddedLineNumberBg": {
"dark": "#3B4252",
"light": "#E5E9F0"
},
"diffRemovedLineNumberBg": {
"dark": "#3B4252",
"light": "#E5E9F0"
},
"markdownText": {
"dark": "nord4",
"light": "nord0"
},
"markdownHeading": {
"dark": "nord8",
"light": "nord10"
},
"markdownLink": {
"dark": "nord9",
"light": "nord9"
},
"markdownLinkText": {
"dark": "nord7",
"light": "nord7"
},
"markdownCode": {
"dark": "nord14",
"light": "nord14"
},
"markdownBlockQuote": {
"dark": "nord3",
"light": "nord3"
},
"markdownEmph": {
"dark": "nord12",
"light": "nord12"
},
"markdownStrong": {
"dark": "nord13",
"light": "nord13"
},
"markdownHorizontalRule": {
"dark": "nord3",
"light": "nord3"
},
"markdownListItem": {
"dark": "nord8",
"light": "nord10"
},
"markdownListEnumeration": {
"dark": "nord7",
"light": "nord7"
},
"markdownImage": {
"dark": "nord9",
"light": "nord9"
},
"markdownImageText": {
"dark": "nord7",
"light": "nord7"
},
"markdownCodeBlock": {
"dark": "nord4",
"light": "nord0"
},
"syntaxComment": {
"dark": "nord3",
"light": "nord3"
},
"syntaxKeyword": {
"dark": "nord9",
"light": "nord9"
},
"syntaxFunction": {
"dark": "nord8",
"light": "nord8"
},
"syntaxVariable": {
"dark": "nord7",
"light": "nord7"
},
"syntaxString": {
"dark": "nord14",
"light": "nord14"
},
"syntaxNumber": {
"dark": "nord15",
"light": "nord15"
},
"syntaxType": {
"dark": "nord7",
"light": "nord7"
},
"syntaxOperator": {
"dark": "nord9",
"light": "nord9"
},
"syntaxPunctuation": {
"dark": "nord4",
"light": "nord0"
}
}
}

View File

@@ -1,57 +0,0 @@
/// <reference path="../env.d.ts" />
import { tool } from "@opencode-ai/plugin"
import DESCRIPTION from "./github-pr-search.txt"
async function githubFetch(endpoint: string, options: RequestInit = {}) {
const response = await fetch(`https://api.github.com${endpoint}`, {
...options,
headers: {
Authorization: `Bearer ${process.env.GITHUB_TOKEN}`,
Accept: "application/vnd.github+json",
"Content-Type": "application/json",
...options.headers,
},
})
if (!response.ok) {
throw new Error(`GitHub API error: ${response.status} ${response.statusText}`)
}
return response.json()
}
interface PR {
title: string
html_url: string
}
export default tool({
description: DESCRIPTION,
args: {
query: tool.schema.string().describe("Search query for PR titles and descriptions"),
limit: tool.schema.number().describe("Maximum number of results to return").default(10),
offset: tool.schema.number().describe("Number of results to skip for pagination").default(0),
},
async execute(args) {
const owner = "anomalyco"
const repo = "opencode"
const page = Math.floor(args.offset / args.limit) + 1
const searchQuery = encodeURIComponent(`${args.query} repo:${owner}/${repo} type:pr state:open`)
const result = await githubFetch(
`/search/issues?q=${searchQuery}&per_page=${args.limit}&page=${page}&sort=updated&order=desc`,
)
if (result.total_count === 0) {
return `No PRs found matching "${args.query}"`
}
const prs = result.items as PR[]
if (prs.length === 0) {
return `No other PRs found matching "${args.query}"`
}
const formatted = prs.map((pr) => `${pr.title}\n${pr.html_url}`).join("\n\n")
return `Found ${result.total_count} PRs (showing ${prs.length}):\n\n${formatted}`
},
})

View File

@@ -1,10 +0,0 @@
Use this tool to search GitHub pull requests by title and description.
This tool searches PRs in the sst/opencode repository and returns LLM-friendly results including:
- PR number and title
- Author
- State (open/closed/merged)
- Labels
- Description snippet
Use the query parameter to search for keywords that might appear in PR titles or descriptions.

View File

@@ -1,90 +0,0 @@
/// <reference path="../env.d.ts" />
// import { Octokit } from "@octokit/rest"
import { tool } from "@opencode-ai/plugin"
import DESCRIPTION from "./github-triage.txt"
function getIssueNumber(): number {
const issue = parseInt(process.env.ISSUE_NUMBER ?? "", 10)
if (!issue) throw new Error("ISSUE_NUMBER env var not set")
return issue
}
async function githubFetch(endpoint: string, options: RequestInit = {}) {
const response = await fetch(`https://api.github.com${endpoint}`, {
...options,
headers: {
Authorization: `Bearer ${process.env.GITHUB_TOKEN}`,
Accept: "application/vnd.github+json",
"Content-Type": "application/json",
...options.headers,
},
})
if (!response.ok) {
throw new Error(`GitHub API error: ${response.status} ${response.statusText}`)
}
return response.json()
}
export default tool({
description: DESCRIPTION,
args: {
assignee: tool.schema
.enum(["thdxr", "adamdotdevin", "rekram1-node", "fwang", "jayair", "kommander"])
.describe("The username of the assignee")
.default("rekram1-node"),
labels: tool.schema
.array(tool.schema.enum(["nix", "opentui", "perf", "desktop", "zen", "docs", "windows"]))
.describe("The labels(s) to add to the issue")
.default([]),
},
async execute(args) {
const issue = getIssueNumber()
// const octokit = new Octokit({ auth: process.env.GITHUB_TOKEN })
const owner = "anomalyco"
const repo = "opencode"
const results: string[] = []
if (args.assignee === "adamdotdevin" && !args.labels.includes("desktop")) {
throw new Error("Only desktop issues should be assigned to adamdotdevin")
}
if (args.assignee === "fwang" && !args.labels.includes("zen")) {
throw new Error("Only zen issues should be assigned to fwang")
}
if (args.assignee === "kommander" && !args.labels.includes("opentui")) {
throw new Error("Only opentui issues should be assigned to kommander")
}
// await octokit.rest.issues.addAssignees({
// owner,
// repo,
// issue_number: issue,
// assignees: [args.assignee],
// })
await githubFetch(`/repos/${owner}/${repo}/issues/${issue}/assignees`, {
method: "POST",
body: JSON.stringify({ assignees: [args.assignee] }),
})
results.push(`Assigned @${args.assignee} to issue #${issue}`)
const labels: string[] = args.labels.map((label) => (label === "desktop" ? "web" : label))
if (labels.length > 0) {
// await octokit.rest.issues.addLabels({
// owner,
// repo,
// issue_number: issue,
// labels,
// })
await githubFetch(`/repos/${owner}/${repo}/issues/${issue}/labels`, {
method: "POST",
body: JSON.stringify({ labels }),
})
results.push(`Added labels: ${args.labels.join(", ")}`)
}
return results.join("\n")
},
})

View File

@@ -1,88 +0,0 @@
Use this tool to assign and/or label a Github issue.
You can assign the following users:
- thdxr
- adamdotdevin
- fwang
- jayair
- kommander
- rekram1-node
You can use the following labels:
- nix
- opentui
- perf
- web
- zen
- docs
Always try to assign an issue, if in doubt, assign rekram1-node to it.
## Breakdown of responsibilities:
### thdxr
Dax is responsible for managing core parts of the application, for large feature requests, api changes, or things that require significant changes to the codebase assign him.
This relates to OpenCode server primarily but has overlap with just about anything
### adamdotdevin
Adam is responsible for managing the Desktop/Web app. If there is an issue relating to the desktop app or `opencode web` command. Assign him.
### fwang
Frank is responsible for managing Zen, if you see complaints about OpenCode Zen, maybe it's the dashboard, the model quality, billing issues, etc. Assign him to the issue.
### jayair
Jay is responsible for documentation. If there is an issue relating to documentation assign him.
### kommander
Sebastian is responsible for managing an OpenTUI (a library for building terminal user interfaces). OpenCode's TUI is built with OpenTUI. If there are issues about:
- random characters on screen
- keybinds not working on different terminals
- general terminal stuff
Then assign the issue to Him.
### rekram1-node
ALL BUGS SHOULD BE assigned to rekram1-node unless they have the `opentui` label.
Assign Aiden to an issue as a catch all, if you can't assign anyone else. Most of the time this will be bugs/polish things.
If no one else makes sense to assign, assign rekram1-node to it.
Always assign to aiden if the issue mentions "acp", "zed", or model performance issues
## Breakdown of Labels:
### nix
Any issue that mentions nix, or nixos should have a nix label
### opentui
Anything relating to the TUI itself should have an opentui label
### perf
Anything related to slow performance, high ram, high cpu usage, or any other performance related issue should have a perf label
### desktop
Anything related to `opencode web` command or the desktop app should have a desktop label. Never add this label for anything terminal/tui related
### zen
Anything related to OpenCode Zen, billing, or model quality from Zen should have a zen label
### docs
Anything related to the documentation should have a docs label
### windows
Use for any issue that involves the windows OS

View File

@@ -1 +0,0 @@
sst-env.d.ts

View File

@@ -1,11 +0,0 @@
{
"version": "0.2.0",
"configurations": [
{
"type": "bun",
"request": "attach",
"name": "opencode (attach)",
"url": "ws://localhost:6499/"
}
]
}

View File

@@ -1,5 +0,0 @@
{
"recommendations": [
"oven.bun-vscode"
]
}

View File

@@ -1,4 +1,12 @@
- To test opencode in `packages/opencode`, run `bun dev`.
- To regenerate the JavaScript SDK, run `./packages/sdk/js/script/build.ts`.
- ALWAYS USE PARALLEL TOOLS WHEN APPLICABLE.
- The default branch in this repo is `dev`.
## IMPORTANT
- Try to keep things in one function unless composable or reusable
- DO NOT do unnecessary destructuring of variables
- DO NOT use `else` statements unless necessary
- DO NOT use `try`/`catch` if it can be avoided
- AVOID `try`/`catch` where possible
- AVOID `else` statements
- AVOID using `any` type
- AVOID `let` statements
- PREFER single word variable names where possible
- Use as many bun apis as possible like Bun.file()

View File

@@ -1,260 +0,0 @@
# Contributing to OpenCode
We want to make it easy for you to contribute to OpenCode. Here are the most common type of changes that get merged:
- Bug fixes
- Additional LSPs / Formatters
- Improvements to LLM performance
- Support for new providers
- Fixes for environment-specific quirks
- Missing standard behavior
- Documentation improvements
However, any UI or core product feature must go through a design review with the core team before implementation.
If you are unsure if a PR would be accepted, feel free to ask a maintainer or look for issues with any of the following labels:
- [`help wanted`](https://github.com/anomalyco/opencode/issues?q=is%3Aissue%20state%3Aopen%20label%3Ahelp-wanted)
- [`good first issue`](https://github.com/anomalyco/opencode/issues?q=is%3Aissue%20state%3Aopen%20label%3A%22good%20first%20issue%22)
- [`bug`](https://github.com/anomalyco/opencode/issues?q=is%3Aissue%20state%3Aopen%20label%3Abug)
- [`perf`](https://github.com/anomalyco/opencode/issues?q=is%3Aopen%20is%3Aissue%20label%3A%22perf%22)
> [!NOTE]
> PRs that ignore these guardrails will likely be closed.
Want to take on an issue? Leave a comment and a maintainer may assign it to you unless it is something we are already working on.
## Developing OpenCode
- Requirements: Bun 1.3+
- Install dependencies and start the dev server from the repo root:
```bash
bun install
bun dev
```
### Running against a different directory
By default, `bun dev` runs OpenCode in the `packages/opencode` directory. To run it against a different directory or repository:
```bash
bun dev <directory>
```
To run OpenCode in the root of the opencode repo itself:
```bash
bun dev .
```
### Building a "localcode"
To compile a standalone executable:
```bash
./packages/opencode/script/build.ts --single
```
Then run it with:
```bash
./packages/opencode/dist/opencode-<platform>/bin/opencode
```
Replace `<platform>` with your platform (e.g., `darwin-arm64`, `linux-x64`).
- Core pieces:
- `packages/opencode`: OpenCode core business logic & server.
- `packages/opencode/src/cli/cmd/tui/`: The TUI code, written in SolidJS with [opentui](https://github.com/sst/opentui)
- `packages/app`: The shared web UI components, written in SolidJS
- `packages/desktop`: The native desktop app, built with Tauri (wraps `packages/app`)
- `packages/plugin`: Source for `@opencode-ai/plugin`
### Understanding bun dev vs opencode
During development, `bun dev` is the local equivalent of the built `opencode` command. Both run the same CLI interface:
```bash
# Development (from project root)
bun dev --help # Show all available commands
bun dev serve # Start headless API server
bun dev web # Start server + open web interface
bun dev <directory> # Start TUI in specific directory
# Production
opencode --help # Show all available commands
opencode serve # Start headless API server
opencode web # Start server + open web interface
opencode <directory> # Start TUI in specific directory
```
### Running the API Server
To start the OpenCode headless API server:
```bash
bun dev serve
```
This starts the headless server on port 4096 by default. You can specify a different port:
```bash
bun dev serve --port 8080
```
### Running the Web App
To test UI changes during development:
1. **First, start the OpenCode server** (see [Running the API Server](#running-the-api-server) section above)
2. **Then run the web app:**
```bash
bun run --cwd packages/app dev
```
This starts a local dev server at http://localhost:5173 (or similar port shown in output). Most UI changes can be tested here, but the server must be running for full functionality.
### Running the Desktop App
The desktop app is a native Tauri application that wraps the web UI.
To run the native desktop app:
```bash
bun run --cwd packages/desktop tauri dev
```
This starts the web dev server on http://localhost:1420 and opens the native window.
If you only want the web dev server (no native shell):
```bash
bun run --cwd packages/desktop dev
```
To create a production `dist/` and build the native app bundle:
```bash
bun run --cwd packages/desktop tauri build
```
This runs `bun run --cwd packages/desktop build` automatically via Tauris `beforeBuildCommand`.
> [!NOTE]
> Running the desktop app requires additional Tauri dependencies (Rust toolchain, platform-specific libraries). See the [Tauri prerequisites](https://v2.tauri.app/start/prerequisites/) for setup instructions.
> [!NOTE]
> If you make changes to the API or SDK (e.g. `packages/opencode/src/server/server.ts`), run `./script/generate.ts` to regenerate the SDK and related files.
Please try to follow the [style guide](./STYLE_GUIDE.md)
### Setting up a Debugger
Bun debugging is currently rough around the edges. We hope this guide helps you get set up and avoid some pain points.
The most reliable way to debug OpenCode is to run it manually in a terminal via `bun run --inspect=<url> dev ...` and attach
your debugger via that URL. Other methods can result in breakpoints being mapped incorrectly, at least in VSCode (YMMV).
Caveats:
- If you want to run the OpenCode TUI and have breakpoints triggered in the server code, you might need to run `bun dev spawn` instead of
the usual `bun dev`. This is because `bun dev` runs the server in a worker thread and breakpoints might not work there.
- If `spawn` does not work for you, you can debug the server separately:
- Debug server: `bun run --inspect=ws://localhost:6499/ --cwd packages/opencode ./src/index.ts serve --port 4096`,
then attach TUI with `opencode attach http://localhost:4096`
- Debug TUI: `bun run --inspect=ws://localhost:6499/ --cwd packages/opencode --conditions=browser ./src/index.ts`
Other tips and tricks:
- You might want to use `--inspect-wait` or `--inspect-brk` instead of `--inspect`, depending on your workflow
- Specifying `--inspect=ws://localhost:6499/` on every invocation can be tiresome, you may want to `export BUN_OPTIONS=--inspect=ws://localhost:6499/` instead
#### VSCode Setup
If you use VSCode, you can use our example configurations [.vscode/settings.example.json](.vscode/settings.example.json) and [.vscode/launch.example.json](.vscode/launch.example.json).
Some debug methods that can be problematic:
- Debug configurations with `"request": "launch"` can have breakpoints incorrectly mapped and thus unusable
- The same problem arises when running OpenCode in the VSCode `JavaScript Debug Terminal`
With that said, you may want to try these methods, as they might work for you.
## Pull Request Expectations
### Issue First Policy
**All PRs must reference an existing issue.** Before opening a PR, open an issue describing the bug or feature. This helps maintainers triage and prevents duplicate work. PRs without a linked issue may be closed without review.
- Use `Fixes #123` or `Closes #123` in your PR description to link the issue
- For small fixes, a brief issue is fine - just enough context for maintainers to understand the problem
### General Requirements
- Keep pull requests small and focused
- Explain the issue and why your change fixes it
- Before adding new functionality, ensure it doesn't already exist elsewhere in the codebase
### UI Changes
If your PR includes UI changes, please include screenshots or videos showing the before and after. This helps maintainers review faster and gives you quicker feedback.
### Logic Changes
For non-UI changes (bug fixes, new features, refactors), explain **how you verified it works**:
- What did you test?
- How can a reviewer reproduce/confirm the fix?
### No AI-Generated Walls of Text
Long, AI-generated PR descriptions and issues are not acceptable and may be ignored. Respect the maintainers' time:
- Write short, focused descriptions
- Explain what changed and why in your own words
- If you can't explain it briefly, your PR might be too large
### PR Titles
PR titles should follow conventional commit standards:
- `feat:` new feature or functionality
- `fix:` bug fix
- `docs:` documentation or README changes
- `chore:` maintenance tasks, dependency updates, etc.
- `refactor:` code refactoring without changing behavior
- `test:` adding or updating tests
You can optionally include a scope to indicate which package is affected:
- `feat(app):` feature in the app package
- `fix(desktop):` bug fix in the desktop package
- `chore(opencode):` maintenance in the opencode package
Examples:
- `docs: update contributing guidelines`
- `fix: resolve crash on startup`
- `feat: add dark mode support`
- `feat(app): add dark mode support`
- `fix(desktop): resolve crash on startup`
- `chore: bump dependency versions`
### Style Preferences
These are not strictly enforced, they are just general guidelines:
- **Functions:** Keep logic within a single function unless breaking it out adds clear reuse or composition benefits.
- **Destructuring:** Do not do unnecessary destructuring of variables.
- **Control flow:** Avoid `else` statements.
- **Error handling:** Prefer `.catch(...)` instead of `try`/`catch` when possible.
- **Types:** Reach for precise types and avoid `any`.
- **Variables:** Stick to immutable patterns and avoid `let`.
- **Naming:** Choose concise single-word identifiers when they remain descriptive.
- **Runtime APIs:** Use Bun helpers such as `Bun.file()` when they fit the use case.
## Feature Requests
For net-new functionality, start with a design conversation. Open an issue describing the problem, your proposed approach (optional), and why it belongs in OpenCode. The core team will help decide whether it should move forward; please wait for that approval instead of opening a feature PR directly.

105
README.md
View File

@@ -1,20 +1,20 @@
<p align="center">
<a href="https://opencode.ai">
<picture>
<source srcset="packages/console/app/src/asset/logo-ornate-dark.svg" media="(prefers-color-scheme: dark)">
<source srcset="packages/console/app/src/asset/logo-ornate-light.svg" media="(prefers-color-scheme: light)">
<img src="packages/console/app/src/asset/logo-ornate-light.svg" alt="OpenCode logo">
<source srcset="packages/web/src/assets/logo-ornate-dark.svg" media="(prefers-color-scheme: dark)">
<source srcset="packages/web/src/assets/logo-ornate-light.svg" media="(prefers-color-scheme: light)">
<img src="packages/web/src/assets/logo-ornate-light.svg" alt="opencode logo">
</picture>
</a>
</p>
<p align="center">The open source AI coding agent.</p>
<p align="center">AI coding agent, built for the terminal.</p>
<p align="center">
<a href="https://opencode.ai/discord"><img alt="Discord" src="https://img.shields.io/discord/1391832426048651334?style=flat-square&label=discord" /></a>
<a href="https://www.npmjs.com/package/opencode-ai"><img alt="npm" src="https://img.shields.io/npm/v/opencode-ai?style=flat-square" /></a>
<a href="https://github.com/anomalyco/opencode/actions/workflows/publish.yml"><img alt="Build status" src="https://img.shields.io/github/actions/workflow/status/anomalyco/opencode/publish.yml?style=flat-square&branch=dev" /></a>
<a href="https://github.com/sst/opencode/actions/workflows/publish.yml"><img alt="Build status" src="https://img.shields.io/github/actions/workflow/status/sst/opencode/publish.yml?style=flat-square&branch=dev" /></a>
</p>
[![OpenCode Terminal UI](packages/web/src/assets/lander/screenshot.png)](https://opencode.ai)
[![opencode Terminal UI](packages/web/src/assets/lander/screenshot.png)](https://opencode.ai)
---
@@ -26,36 +26,13 @@ curl -fsSL https://opencode.ai/install | bash
# Package managers
npm i -g opencode-ai@latest # or bun/pnpm/yarn
scoop install opencode # Windows
choco install opencode # Windows
brew install anomalyco/tap/opencode # macOS and Linux (recommended, always up to date)
brew install opencode # macOS and Linux (official brew formula, updated less)
brew install sst/tap/opencode # macOS and Linux
paru -S opencode-bin # Arch Linux
mise use -g opencode # Any OS
nix run nixpkgs#opencode # or github:anomalyco/opencode for latest dev branch
```
> [!TIP]
> Remove versions older than 0.1.x before installing.
### Desktop App (BETA)
OpenCode is also available as a desktop application. Download directly from the [releases page](https://github.com/anomalyco/opencode/releases) or [opencode.ai/download](https://opencode.ai/download).
| Platform | Download |
| --------------------- | ------------------------------------- |
| macOS (Apple Silicon) | `opencode-desktop-darwin-aarch64.dmg` |
| macOS (Intel) | `opencode-desktop-darwin-x64.dmg` |
| Windows | `opencode-desktop-windows-x64.exe` |
| Linux | `.deb`, `.rpm`, or AppImage |
```bash
# macOS (Homebrew)
brew install --cask opencode-desktop
# Windows (Scoop)
scoop bucket add extras; scoop install extras/opencode-desktop
```
#### Installation Directory
The install script respects the following priority order for the installation path:
@@ -71,45 +48,63 @@ OPENCODE_INSTALL_DIR=/usr/local/bin curl -fsSL https://opencode.ai/install | bas
XDG_BIN_DIR=$HOME/.local/bin curl -fsSL https://opencode.ai/install | bash
```
### Agents
OpenCode includes two built-in agents you can switch between with the `Tab` key.
- **build** - Default, full access agent for development work
- **plan** - Read-only agent for analysis and code exploration
- Denies file edits by default
- Asks permission before running bash commands
- Ideal for exploring unfamiliar codebases or planning changes
Also, included is a **general** subagent for complex searches and multistep tasks.
This is used internally and can be invoked using `@general` in messages.
Learn more about [agents](https://opencode.ai/docs/agents).
### Documentation
For more info on how to configure OpenCode [**head over to our docs**](https://opencode.ai/docs).
For more info on how to configure opencode [**head over to our docs**](https://opencode.ai/docs).
### Contributing
If you're interested in contributing to OpenCode, please read our [contributing docs](./CONTRIBUTING.md) before submitting a pull request.
opencode is an opinionated tool so any fundamental feature needs to go through a
design process with the core team.
### Building on OpenCode
> [!IMPORTANT]
> We do not accept PRs for core features.
If you are working on a project that's related to OpenCode and is using "opencode" as a part of its name; for example, "opencode-dashboard" or "opencode-mobile", please add a note to your README to clarify that it is not built by the OpenCode team and is not affiliated with us in any way.
However we still merge a ton of PRs - you can contribute:
- Bug fixes
- Improvements to LLM performance
- Support for new providers
- Fixes for env specific quirks
- Missing standard behavior
- Documentation
Take a look at the git history to see what kind of PRs we end up merging.
> [!NOTE]
> If you do not follow the above guidelines we might close your PR.
To run opencode locally you need.
- Bun
- Golang 1.24.x
And run.
```bash
$ bun install
$ bun dev
```
#### Development Notes
**API Client**: After making changes to the TypeScript API endpoints in `packages/opencode/src/server/server.ts`, you will need the opencode team to generate a new stainless sdk for the clients.
### FAQ
#### How is this different from Claude Code?
#### How is this different than Claude Code?
It's very similar to Claude Code in terms of capability. Here are the key differences:
- 100% open source
- Not coupled to any provider. Although we recommend the models we provide through [OpenCode Zen](https://opencode.ai/zen); OpenCode can be used with Claude, OpenAI, Google or even local models. As models evolve the gaps between them will close and pricing will drop so being provider-agnostic is important.
- Out of the box LSP support
- A focus on TUI. OpenCode is built by neovim users and the creators of [terminal.shop](https://terminal.shop); we are going to push the limits of what's possible in the terminal.
- A client/server architecture. This for example can allow OpenCode to run on your computer, while you can drive it remotely from a mobile app. Meaning that the TUI frontend is just one of the possible clients.
- Not coupled to any provider. Although Anthropic is recommended, opencode can be used with OpenAI, Google or even local models. As models evolve the gaps between them will close and pricing will drop so being provider-agnostic is important.
- A focus on TUI. opencode is built by neovim users and the creators of [terminal.shop](https://terminal.shop); we are going to push the limits of what's possible in the terminal.
- A client/server architecture. This for example can allow opencode to run on your computer, while you can drive it remotely from a mobile app. Meaning that the TUI frontend is just one of the possible clients.
#### What's the other repo?
The other confusingly named repo has no relation to this one. You can [read the story behind it here](https://x.com/thdxr/status/1933561254481666466).
---
**Join our community** [Discord](https://discord.gg/opencode) | [X.com](https://x.com/opencode)
**Join our community** [Discord](https://discord.gg/opencode) | [YouTube](https://www.youtube.com/c/sst-dev) | [X.com](https://x.com/SST_dev)

View File

@@ -1,118 +0,0 @@
<p align="center">
<a href="https://opencode.ai">
<picture>
<source srcset="packages/console/app/src/asset/logo-ornate-dark.svg" media="(prefers-color-scheme: dark)">
<source srcset="packages/console/app/src/asset/logo-ornate-light.svg" media="(prefers-color-scheme: light)">
<img src="packages/console/app/src/asset/logo-ornate-light.svg" alt="OpenCode logo">
</picture>
</a>
</p>
<p align="center">开源的 AI Coding Agent。</p>
<p align="center">
<a href="https://opencode.ai/discord"><img alt="Discord" src="https://img.shields.io/discord/1391832426048651334?style=flat-square&label=discord" /></a>
<a href="https://www.npmjs.com/package/opencode-ai"><img alt="npm" src="https://img.shields.io/npm/v/opencode-ai?style=flat-square" /></a>
<a href="https://github.com/anomalyco/opencode/actions/workflows/publish.yml"><img alt="Build status" src="https://img.shields.io/github/actions/workflow/status/anomalyco/opencode/publish.yml?style=flat-square&branch=dev" /></a>
</p>
[![OpenCode Terminal UI](packages/web/src/assets/lander/screenshot.png)](https://opencode.ai)
---
### 安装
```bash
# 直接安装 (YOLO)
curl -fsSL https://opencode.ai/install | bash
# 软件包管理器
npm i -g opencode-ai@latest # 也可使用 bun/pnpm/yarn
scoop install opencode # Windows
choco install opencode # Windows
brew install anomalyco/tap/opencode # macOS 和 Linux推荐始终保持最新
brew install opencode # macOS 和 Linux官方 brew formula更新频率较低
paru -S opencode-bin # Arch Linux
mise use -g opencode # 任意系统
nix run nixpkgs#opencode # 或用 github:anomalyco/opencode 获取最新 dev 分支
```
> [!TIP]
> 安装前请先移除 0.1.x 之前的旧版本。
### 桌面应用程序 (BETA)
OpenCode 也提供桌面版应用。可直接从 [发布页 (releases page)](https://github.com/anomalyco/opencode/releases) 或 [opencode.ai/download](https://opencode.ai/download) 下载。
| 平台 | 下载文件 |
| --------------------- | ------------------------------------- |
| macOS (Apple Silicon) | `opencode-desktop-darwin-aarch64.dmg` |
| macOS (Intel) | `opencode-desktop-darwin-x64.dmg` |
| Windows | `opencode-desktop-windows-x64.exe` |
| Linux | `.deb``.rpm` 或 AppImage |
```bash
# macOS (Homebrew Cask)
brew install --cask opencode-desktop
# Windows (Scoop)
scoop bucket add extras; scoop install extras/opencode-desktop
```
#### 安装目录
安装脚本按照以下优先级决定安装路径:
1. `$OPENCODE_INSTALL_DIR` - 自定义安装目录
2. `$XDG_BIN_DIR` - 符合 XDG 基础目录规范的路径
3. `$HOME/bin` - 如果存在或可创建的用户二进制目录
4. `$HOME/.opencode/bin` - 默认备用路径
```bash
# 示例
OPENCODE_INSTALL_DIR=/usr/local/bin curl -fsSL https://opencode.ai/install | bash
XDG_BIN_DIR=$HOME/.local/bin curl -fsSL https://opencode.ai/install | bash
```
### Agents
OpenCode 内置两种 Agent可用 `Tab` 键快速切换:
- **build** - 默认模式,具备完整权限,适合开发工作
- **plan** - 只读模式,适合代码分析与探索
- 默认拒绝修改文件
- 运行 bash 命令前会询问
- 便于探索未知代码库或规划改动
另外还包含一个 **general** 子 Agent用于复杂搜索和多步任务内部使用也可在消息中输入 `@general` 调用。
了解更多 [Agents](https://opencode.ai/docs/agents) 相关信息。
### 文档
更多配置说明请查看我们的 [**官方文档**](https://opencode.ai/docs)。
### 参与贡献
如有兴趣贡献代码,请在提交 PR 前阅读 [贡献指南 (Contributing Docs)](./CONTRIBUTING.md)。
### 基于 OpenCode 进行开发
如果你在项目名中使用了 “opencode”如 “opencode-dashboard” 或 “opencode-mobile”请在 README 里注明该项目不是 OpenCode 团队官方开发,且不存在隶属关系。
### 常见问题 (FAQ)
#### 这和 Claude Code 有什么不同?
功能上很相似,关键差异:
- 100% 开源。
- 不绑定特定提供商。推荐使用 [OpenCode Zen](https://opencode.ai/zen) 的模型,但也可搭配 Claude、OpenAI、Google 甚至本地模型。模型迭代会缩小差异、降低成本,因此保持 provider-agnostic 很重要。
- 内置 LSP 支持。
- 聚焦终端界面 (TUI)。OpenCode 由 Neovim 爱好者和 [terminal.shop](https://terminal.shop) 的创建者打造,会持续探索终端的极限。
- 客户端/服务器架构。可在本机运行同时用移动设备远程驱动。TUI 只是众多潜在客户端之一。
#### 另一个同名的仓库是什么?
另一个名字相近的仓库与本项目无关。[点击这里了解背后故事](https://x.com/thdxr/status/1933561254481666466)。
---
**加入我们的社区** [Discord](https://discord.gg/opencode) | [X.com](https://x.com/opencode)

View File

@@ -1,118 +0,0 @@
<p align="center">
<a href="https://opencode.ai">
<picture>
<source srcset="packages/console/app/src/asset/logo-ornate-dark.svg" media="(prefers-color-scheme: dark)">
<source srcset="packages/console/app/src/asset/logo-ornate-light.svg" media="(prefers-color-scheme: light)">
<img src="packages/console/app/src/asset/logo-ornate-light.svg" alt="OpenCode logo">
</picture>
</a>
</p>
<p align="center">開源的 AI Coding Agent。</p>
<p align="center">
<a href="https://opencode.ai/discord"><img alt="Discord" src="https://img.shields.io/discord/1391832426048651334?style=flat-square&label=discord" /></a>
<a href="https://www.npmjs.com/package/opencode-ai"><img alt="npm" src="https://img.shields.io/npm/v/opencode-ai?style=flat-square" /></a>
<a href="https://github.com/anomalyco/opencode/actions/workflows/publish.yml"><img alt="Build status" src="https://img.shields.io/github/actions/workflow/status/anomalyco/opencode/publish.yml?style=flat-square&branch=dev" /></a>
</p>
[![OpenCode Terminal UI](packages/web/src/assets/lander/screenshot.png)](https://opencode.ai)
---
### 安裝
```bash
# 直接安裝 (YOLO)
curl -fsSL https://opencode.ai/install | bash
# 套件管理員
npm i -g opencode-ai@latest # 也可使用 bun/pnpm/yarn
scoop install opencode # Windows
choco install opencode # Windows
brew install anomalyco/tap/opencode # macOS 與 Linux推薦始終保持最新
brew install opencode # macOS 與 Linux官方 brew formula更新頻率較低
paru -S opencode-bin # Arch Linux
mise use -g github:anomalyco/opencode # 任何作業系統
nix run nixpkgs#opencode # 或使用 github:anomalyco/opencode 以取得最新開發分支
```
> [!TIP]
> 安裝前請先移除 0.1.x 以前的舊版本。
### 桌面應用程式 (BETA)
OpenCode 也提供桌面版應用程式。您可以直接從 [發佈頁面 (releases page)](https://github.com/anomalyco/opencode/releases) 或 [opencode.ai/download](https://opencode.ai/download) 下載。
| 平台 | 下載連結 |
| --------------------- | ------------------------------------- |
| macOS (Apple Silicon) | `opencode-desktop-darwin-aarch64.dmg` |
| macOS (Intel) | `opencode-desktop-darwin-x64.dmg` |
| Windows | `opencode-desktop-windows-x64.exe` |
| Linux | `.deb`, `.rpm`, 或 AppImage |
```bash
# macOS (Homebrew Cask)
brew install --cask opencode-desktop
# Windows (Scoop)
scoop bucket add extras; scoop install extras/opencode-desktop
```
#### 安裝目錄
安裝腳本會依據以下優先順序決定安裝路徑:
1. `$OPENCODE_INSTALL_DIR` - 自定義安裝目錄
2. `$XDG_BIN_DIR` - 符合 XDG 基礎目錄規範的路徑
3. `$HOME/bin` - 標準使用者執行檔目錄 (若存在或可建立)
4. `$HOME/.opencode/bin` - 預設備用路徑
```bash
# 範例
OPENCODE_INSTALL_DIR=/usr/local/bin curl -fsSL https://opencode.ai/install | bash
XDG_BIN_DIR=$HOME/.local/bin curl -fsSL https://opencode.ai/install | bash
```
### Agents
OpenCode 內建了兩種 Agent您可以使用 `Tab` 鍵快速切換。
- **build** - 預設模式,具備完整權限的 Agent適用於開發工作。
- **plan** - 唯讀模式,適用於程式碼分析與探索。
- 預設禁止修改檔案。
- 執行 bash 指令前會詢問權限。
- 非常適合用來探索陌生的程式碼庫或規劃變更。
此外OpenCode 還包含一個 **general** 子 Agent用於處理複雜搜尋與多步驟任務。此 Agent 供系統內部使用,亦可透過在訊息中輸入 `@general` 來呼叫。
了解更多關於 [Agents](https://opencode.ai/docs/agents) 的資訊。
### 線上文件
關於如何設定 OpenCode 的詳細資訊,請參閱我們的 [**官方文件**](https://opencode.ai/docs)。
### 參與貢獻
如果您有興趣參與 OpenCode 的開發,請在提交 Pull Request 前先閱讀我們的 [貢獻指南 (Contributing Docs)](./CONTRIBUTING.md)。
### 基於 OpenCode 進行開發
如果您正在開發與 OpenCode 相關的專案,並在名稱中使用了 "opencode"(例如 "opencode-dashboard" 或 "opencode-mobile"),請在您的 README 中加入聲明,說明該專案並非由 OpenCode 團隊開發,且與我們沒有任何隸屬關係。
### 常見問題 (FAQ)
#### 這跟 Claude Code 有什麼不同?
在功能面上與 Claude Code 非常相似。以下是關鍵差異:
- 100% 開源。
- 不綁定特定的服務提供商。雖然我們推薦使用透過 [OpenCode Zen](https://opencode.ai/zen) 提供的模型,但 OpenCode 也可搭配 Claude, OpenAI, Google 甚至本地模型使用。隨著模型不斷演進,彼此間的差距會縮小且價格會下降,因此具備「不限廠商 (provider-agnostic)」的特性至關重要。
- 內建 LSP (語言伺服器協定) 支援。
- 專注於終端機介面 (TUI)。OpenCode 由 Neovim 愛好者與 [terminal.shop](https://terminal.shop) 的創作者打造;我們將不斷挑戰終端機介面的極限。
- 客戶端/伺服器架構 (Client/Server Architecture)。這讓 OpenCode 能夠在您的電腦上運行的同時,由行動裝置進行遠端操控。這意味著 TUI 前端只是眾多可能的客戶端之一。
#### 另一個同名的 Repo 是什麼?
另一個名稱相近的儲存庫與本專案無關。您可以點此[閱讀背後的故事](https://x.com/thdxr/status/1933561254481666466)。
---
**加入我們的社群** [Discord](https://discord.gg/opencode) | [X.com](https://x.com/opencode)

View File

@@ -1,41 +0,0 @@
# Security
## Threat Model
### Overview
OpenCode is an AI-powered coding assistant that runs locally on your machine. It provides an agent system with access to powerful tools including shell execution, file operations, and web access.
### No Sandbox
OpenCode does **not** sandbox the agent. The permission system exists as a UX feature to help users stay aware of what actions the agent is taking - it prompts for confirmation before executing commands, writing files, etc. However, it is not designed to provide security isolation.
If you need true isolation, run OpenCode inside a Docker container or VM.
### Server Mode
Server mode is opt-in only. When enabled, set `OPENCODE_SERVER_PASSWORD` to require HTTP Basic Auth. Without this, the server runs unauthenticated (with a warning). It is the end user's responsibility to secure the server - any functionality it provides is not a vulnerability.
### Out of Scope
| Category | Rationale |
| ------------------------------- | ----------------------------------------------------------------------- |
| **Server access when opted-in** | If you enable server mode, API access is expected behavior |
| **Sandbox escapes** | The permission system is not a sandbox (see above) |
| **LLM provider data handling** | Data sent to your configured LLM provider is governed by their policies |
| **MCP server behavior** | External MCP servers you configure are outside our trust boundary |
| **Malicious config files** | Users control their own config; modifying it is not an attack vector |
---
# Reporting Security Issues
We appreciate your efforts to responsibly disclose your findings, and will make every effort to acknowledge your contributions.
To report a security issue, please use the GitHub Security Advisory ["Report a Vulnerability"](https://github.com/anomalyco/opencode/security/advisories/new) tab.
The team will send a response indicating the next steps in handling your report. After the initial reply to your report, the security team will keep you informed of the progress towards a fix and full announcement, and may ask for additional information or guidance.
## Escalation
If you do not receive an acknowledgement of your report within 6 business days, you may send an email to security@anoma.ly

253
STATS.md
View File

@@ -1,211 +1,46 @@
# Download Stats
| Date | GitHub Downloads | npm Downloads | Total |
| ---------- | -------------------- | -------------------- | -------------------- |
| 2025-06-29 | 18,789 (+0) | 39,420 (+0) | 58,209 (+0) |
| 2025-06-30 | 20,127 (+1,338) | 41,059 (+1,639) | 61,186 (+2,977) |
| 2025-07-01 | 22,108 (+1,981) | 43,745 (+2,686) | 65,853 (+4,667) |
| 2025-07-02 | 24,814 (+2,706) | 46,168 (+2,423) | 70,982 (+5,129) |
| 2025-07-03 | 27,834 (+3,020) | 49,955 (+3,787) | 77,789 (+6,807) |
| 2025-07-04 | 30,608 (+2,774) | 54,758 (+4,803) | 85,366 (+7,577) |
| 2025-07-05 | 32,524 (+1,916) | 58,371 (+3,613) | 90,895 (+5,529) |
| 2025-07-06 | 33,766 (+1,242) | 59,694 (+1,323) | 93,460 (+2,565) |
| 2025-07-08 | 38,052 (+4,286) | 64,468 (+4,774) | 102,520 (+9,060) |
| 2025-07-09 | 40,924 (+2,872) | 67,935 (+3,467) | 108,859 (+6,339) |
| 2025-07-10 | 43,796 (+2,872) | 71,402 (+3,467) | 115,198 (+6,339) |
| 2025-07-11 | 46,982 (+3,186) | 77,462 (+6,060) | 124,444 (+9,246) |
| 2025-07-12 | 49,302 (+2,320) | 82,177 (+4,715) | 131,479 (+7,035) |
| 2025-07-13 | 50,803 (+1,501) | 86,394 (+4,217) | 137,197 (+5,718) |
| 2025-07-14 | 53,283 (+2,480) | 87,860 (+1,466) | 141,143 (+3,946) |
| 2025-07-15 | 57,590 (+4,307) | 91,036 (+3,176) | 148,626 (+7,483) |
| 2025-07-16 | 62,313 (+4,723) | 95,258 (+4,222) | 157,571 (+8,945) |
| 2025-07-17 | 66,684 (+4,371) | 100,048 (+4,790) | 166,732 (+9,161) |
| 2025-07-18 | 70,379 (+3,695) | 102,587 (+2,539) | 172,966 (+6,234) |
| 2025-07-19 | 73,497 (+3,117) | 105,904 (+3,317) | 179,401 (+6,434) |
| 2025-07-20 | 76,453 (+2,956) | 109,044 (+3,140) | 185,497 (+6,096) |
| 2025-07-21 | 80,197 (+3,744) | 113,537 (+4,493) | 193,734 (+8,237) |
| 2025-07-22 | 84,251 (+4,054) | 118,073 (+4,536) | 202,324 (+8,590) |
| 2025-07-23 | 88,589 (+4,338) | 121,436 (+3,363) | 210,025 (+7,701) |
| 2025-07-24 | 92,469 (+3,880) | 124,091 (+2,655) | 216,560 (+6,535) |
| 2025-07-25 | 96,417 (+3,948) | 126,985 (+2,894) | 223,402 (+6,842) |
| 2025-07-26 | 100,646 (+4,229) | 131,411 (+4,426) | 232,057 (+8,655) |
| 2025-07-27 | 102,644 (+1,998) | 134,736 (+3,325) | 237,380 (+5,323) |
| 2025-07-28 | 105,446 (+2,802) | 136,016 (+1,280) | 241,462 (+4,082) |
| 2025-07-29 | 108,998 (+3,552) | 137,542 (+1,526) | 246,540 (+5,078) |
| 2025-07-30 | 113,544 (+4,546) | 140,317 (+2,775) | 253,861 (+7,321) |
| 2025-07-31 | 118,339 (+4,795) | 143,344 (+3,027) | 261,683 (+7,822) |
| 2025-08-01 | 123,539 (+5,200) | 146,680 (+3,336) | 270,219 (+8,536) |
| 2025-08-02 | 127,864 (+4,325) | 149,236 (+2,556) | 277,100 (+6,881) |
| 2025-08-03 | 131,397 (+3,533) | 150,451 (+1,215) | 281,848 (+4,748) |
| 2025-08-04 | 136,266 (+4,869) | 153,260 (+2,809) | 289,526 (+7,678) |
| 2025-08-05 | 141,596 (+5,330) | 155,752 (+2,492) | 297,348 (+7,822) |
| 2025-08-06 | 147,067 (+5,471) | 158,309 (+2,557) | 305,376 (+8,028) |
| 2025-08-07 | 152,591 (+5,524) | 160,889 (+2,580) | 313,480 (+8,104) |
| 2025-08-08 | 158,187 (+5,596) | 163,448 (+2,559) | 321,635 (+8,155) |
| 2025-08-09 | 162,770 (+4,583) | 165,721 (+2,273) | 328,491 (+6,856) |
| 2025-08-10 | 165,695 (+2,925) | 167,109 (+1,388) | 332,804 (+4,313) |
| 2025-08-11 | 169,297 (+3,602) | 167,953 (+844) | 337,250 (+4,446) |
| 2025-08-12 | 176,307 (+7,010) | 171,876 (+3,923) | 348,183 (+10,933) |
| 2025-08-13 | 182,997 (+6,690) | 177,182 (+5,306) | 360,179 (+11,996) |
| 2025-08-14 | 189,063 (+6,066) | 179,741 (+2,559) | 368,804 (+8,625) |
| 2025-08-15 | 193,608 (+4,545) | 181,792 (+2,051) | 375,400 (+6,596) |
| 2025-08-16 | 198,118 (+4,510) | 184,558 (+2,766) | 382,676 (+7,276) |
| 2025-08-17 | 201,299 (+3,181) | 186,269 (+1,711) | 387,568 (+4,892) |
| 2025-08-18 | 204,559 (+3,260) | 187,399 (+1,130) | 391,958 (+4,390) |
| 2025-08-19 | 209,814 (+5,255) | 189,668 (+2,269) | 399,482 (+7,524) |
| 2025-08-20 | 214,497 (+4,683) | 191,481 (+1,813) | 405,978 (+6,496) |
| 2025-08-21 | 220,465 (+5,968) | 194,784 (+3,303) | 415,249 (+9,271) |
| 2025-08-22 | 225,899 (+5,434) | 197,204 (+2,420) | 423,103 (+7,854) |
| 2025-08-23 | 229,005 (+3,106) | 199,238 (+2,034) | 428,243 (+5,140) |
| 2025-08-24 | 232,098 (+3,093) | 201,157 (+1,919) | 433,255 (+5,012) |
| 2025-08-25 | 236,607 (+4,509) | 202,650 (+1,493) | 439,257 (+6,002) |
| 2025-08-26 | 242,783 (+6,176) | 205,242 (+2,592) | 448,025 (+8,768) |
| 2025-08-27 | 248,409 (+5,626) | 205,242 (+0) | 453,651 (+5,626) |
| 2025-08-28 | 252,796 (+4,387) | 205,242 (+0) | 458,038 (+4,387) |
| 2025-08-29 | 256,045 (+3,249) | 211,075 (+5,833) | 467,120 (+9,082) |
| 2025-08-30 | 258,863 (+2,818) | 212,397 (+1,322) | 471,260 (+4,140) |
| 2025-08-31 | 262,004 (+3,141) | 213,944 (+1,547) | 475,948 (+4,688) |
| 2025-09-01 | 265,359 (+3,355) | 215,115 (+1,171) | 480,474 (+4,526) |
| 2025-09-02 | 270,483 (+5,124) | 217,075 (+1,960) | 487,558 (+7,084) |
| 2025-09-03 | 274,793 (+4,310) | 219,755 (+2,680) | 494,548 (+6,990) |
| 2025-09-04 | 280,430 (+5,637) | 222,103 (+2,348) | 502,533 (+7,985) |
| 2025-09-05 | 283,769 (+3,339) | 223,793 (+1,690) | 507,562 (+5,029) |
| 2025-09-06 | 286,245 (+2,476) | 225,036 (+1,243) | 511,281 (+3,719) |
| 2025-09-07 | 288,623 (+2,378) | 225,866 (+830) | 514,489 (+3,208) |
| 2025-09-08 | 293,341 (+4,718) | 227,073 (+1,207) | 520,414 (+5,925) |
| 2025-09-09 | 300,036 (+6,695) | 229,788 (+2,715) | 529,824 (+9,410) |
| 2025-09-10 | 307,287 (+7,251) | 233,435 (+3,647) | 540,722 (+10,898) |
| 2025-09-11 | 314,083 (+6,796) | 237,356 (+3,921) | 551,439 (+10,717) |
| 2025-09-12 | 321,046 (+6,963) | 240,728 (+3,372) | 561,774 (+10,335) |
| 2025-09-13 | 324,894 (+3,848) | 245,539 (+4,811) | 570,433 (+8,659) |
| 2025-09-14 | 328,876 (+3,982) | 248,245 (+2,706) | 577,121 (+6,688) |
| 2025-09-15 | 334,201 (+5,325) | 250,983 (+2,738) | 585,184 (+8,063) |
| 2025-09-16 | 342,609 (+8,408) | 255,264 (+4,281) | 597,873 (+12,689) |
| 2025-09-17 | 351,117 (+8,508) | 260,970 (+5,706) | 612,087 (+14,214) |
| 2025-09-18 | 358,717 (+7,600) | 266,922 (+5,952) | 625,639 (+13,552) |
| 2025-09-19 | 365,401 (+6,684) | 271,859 (+4,937) | 637,260 (+11,621) |
| 2025-09-20 | 372,092 (+6,691) | 276,917 (+5,058) | 649,009 (+11,749) |
| 2025-09-21 | 377,079 (+4,987) | 280,261 (+3,344) | 657,340 (+8,331) |
| 2025-09-22 | 382,492 (+5,413) | 284,009 (+3,748) | 666,501 (+9,161) |
| 2025-09-23 | 387,008 (+4,516) | 289,129 (+5,120) | 676,137 (+9,636) |
| 2025-09-24 | 393,325 (+6,317) | 294,927 (+5,798) | 688,252 (+12,115) |
| 2025-09-25 | 398,879 (+5,554) | 301,663 (+6,736) | 700,542 (+12,290) |
| 2025-09-26 | 404,334 (+5,455) | 306,713 (+5,050) | 711,047 (+10,505) |
| 2025-09-27 | 411,618 (+7,284) | 317,763 (+11,050) | 729,381 (+18,334) |
| 2025-09-28 | 414,910 (+3,292) | 322,522 (+4,759) | 737,432 (+8,051) |
| 2025-09-29 | 419,919 (+5,009) | 328,033 (+5,511) | 747,952 (+10,520) |
| 2025-09-30 | 427,991 (+8,072) | 336,472 (+8,439) | 764,463 (+16,511) |
| 2025-10-01 | 433,591 (+5,600) | 341,742 (+5,270) | 775,333 (+10,870) |
| 2025-10-02 | 440,852 (+7,261) | 348,099 (+6,357) | 788,951 (+13,618) |
| 2025-10-03 | 446,829 (+5,977) | 359,937 (+11,838) | 806,766 (+17,815) |
| 2025-10-04 | 452,561 (+5,732) | 370,386 (+10,449) | 822,947 (+16,181) |
| 2025-10-05 | 455,559 (+2,998) | 374,745 (+4,359) | 830,304 (+7,357) |
| 2025-10-06 | 460,927 (+5,368) | 379,489 (+4,744) | 840,416 (+10,112) |
| 2025-10-07 | 467,336 (+6,409) | 385,438 (+5,949) | 852,774 (+12,358) |
| 2025-10-08 | 474,643 (+7,307) | 394,139 (+8,701) | 868,782 (+16,008) |
| 2025-10-09 | 479,203 (+4,560) | 400,526 (+6,387) | 879,729 (+10,947) |
| 2025-10-10 | 484,374 (+5,171) | 406,015 (+5,489) | 890,389 (+10,660) |
| 2025-10-11 | 488,427 (+4,053) | 414,699 (+8,684) | 903,126 (+12,737) |
| 2025-10-12 | 492,125 (+3,698) | 418,745 (+4,046) | 910,870 (+7,744) |
| 2025-10-14 | 505,130 (+13,005) | 429,286 (+10,541) | 934,416 (+23,546) |
| 2025-10-15 | 512,717 (+7,587) | 439,290 (+10,004) | 952,007 (+17,591) |
| 2025-10-16 | 517,719 (+5,002) | 447,137 (+7,847) | 964,856 (+12,849) |
| 2025-10-17 | 526,239 (+8,520) | 457,467 (+10,330) | 983,706 (+18,850) |
| 2025-10-18 | 531,564 (+5,325) | 465,272 (+7,805) | 996,836 (+13,130) |
| 2025-10-19 | 536,209 (+4,645) | 469,078 (+3,806) | 1,005,287 (+8,451) |
| 2025-10-20 | 541,264 (+5,055) | 472,952 (+3,874) | 1,014,216 (+8,929) |
| 2025-10-21 | 548,721 (+7,457) | 479,703 (+6,751) | 1,028,424 (+14,208) |
| 2025-10-22 | 557,949 (+9,228) | 491,395 (+11,692) | 1,049,344 (+20,920) |
| 2025-10-23 | 564,716 (+6,767) | 498,736 (+7,341) | 1,063,452 (+14,108) |
| 2025-10-24 | 572,692 (+7,976) | 506,905 (+8,169) | 1,079,597 (+16,145) |
| 2025-10-25 | 578,927 (+6,235) | 516,129 (+9,224) | 1,095,056 (+15,459) |
| 2025-10-26 | 584,409 (+5,482) | 521,179 (+5,050) | 1,105,588 (+10,532) |
| 2025-10-27 | 589,999 (+5,590) | 526,001 (+4,822) | 1,116,000 (+10,412) |
| 2025-10-28 | 595,776 (+5,777) | 532,438 (+6,437) | 1,128,214 (+12,214) |
| 2025-10-29 | 606,259 (+10,483) | 542,064 (+9,626) | 1,148,323 (+20,109) |
| 2025-10-30 | 613,746 (+7,487) | 542,064 (+0) | 1,155,810 (+7,487) |
| 2025-10-30 | 617,846 (+4,100) | 555,026 (+12,962) | 1,172,872 (+17,062) |
| 2025-10-31 | 626,612 (+8,766) | 564,579 (+9,553) | 1,191,191 (+18,319) |
| 2025-11-01 | 636,100 (+9,488) | 581,806 (+17,227) | 1,217,906 (+26,715) |
| 2025-11-02 | 644,067 (+7,967) | 590,004 (+8,198) | 1,234,071 (+16,165) |
| 2025-11-03 | 653,130 (+9,063) | 597,139 (+7,135) | 1,250,269 (+16,198) |
| 2025-11-04 | 663,912 (+10,782) | 608,056 (+10,917) | 1,271,968 (+21,699) |
| 2025-11-05 | 675,074 (+11,162) | 619,690 (+11,634) | 1,294,764 (+22,796) |
| 2025-11-06 | 686,252 (+11,178) | 630,885 (+11,195) | 1,317,137 (+22,373) |
| 2025-11-07 | 696,646 (+10,394) | 642,146 (+11,261) | 1,338,792 (+21,655) |
| 2025-11-08 | 706,035 (+9,389) | 653,489 (+11,343) | 1,359,524 (+20,732) |
| 2025-11-09 | 713,462 (+7,427) | 660,459 (+6,970) | 1,373,921 (+14,397) |
| 2025-11-10 | 722,288 (+8,826) | 668,225 (+7,766) | 1,390,513 (+16,592) |
| 2025-11-11 | 729,769 (+7,481) | 677,501 (+9,276) | 1,407,270 (+16,757) |
| 2025-11-12 | 740,180 (+10,411) | 686,454 (+8,953) | 1,426,634 (+19,364) |
| 2025-11-13 | 749,905 (+9,725) | 696,157 (+9,703) | 1,446,062 (+19,428) |
| 2025-11-14 | 759,928 (+10,023) | 705,237 (+9,080) | 1,465,165 (+19,103) |
| 2025-11-15 | 765,955 (+6,027) | 712,870 (+7,633) | 1,478,825 (+13,660) |
| 2025-11-16 | 771,069 (+5,114) | 716,596 (+3,726) | 1,487,665 (+8,840) |
| 2025-11-17 | 780,161 (+9,092) | 723,339 (+6,743) | 1,503,500 (+15,835) |
| 2025-11-18 | 791,563 (+11,402) | 732,544 (+9,205) | 1,524,107 (+20,607) |
| 2025-11-19 | 804,409 (+12,846) | 747,624 (+15,080) | 1,552,033 (+27,926) |
| 2025-11-20 | 814,620 (+10,211) | 757,907 (+10,283) | 1,572,527 (+20,494) |
| 2025-11-21 | 826,309 (+11,689) | 769,307 (+11,400) | 1,595,616 (+23,089) |
| 2025-11-22 | 837,269 (+10,960) | 780,996 (+11,689) | 1,618,265 (+22,649) |
| 2025-11-23 | 846,609 (+9,340) | 795,069 (+14,073) | 1,641,678 (+23,413) |
| 2025-11-24 | 856,733 (+10,124) | 804,033 (+8,964) | 1,660,766 (+19,088) |
| 2025-11-25 | 869,423 (+12,690) | 817,339 (+13,306) | 1,686,762 (+25,996) |
| 2025-11-26 | 881,414 (+11,991) | 832,518 (+15,179) | 1,713,932 (+27,170) |
| 2025-11-27 | 893,960 (+12,546) | 846,180 (+13,662) | 1,740,140 (+26,208) |
| 2025-11-28 | 901,741 (+7,781) | 856,482 (+10,302) | 1,758,223 (+18,083) |
| 2025-11-29 | 908,689 (+6,948) | 863,361 (+6,879) | 1,772,050 (+13,827) |
| 2025-11-30 | 916,116 (+7,427) | 870,194 (+6,833) | 1,786,310 (+14,260) |
| 2025-12-01 | 925,898 (+9,782) | 876,500 (+6,306) | 1,802,398 (+16,088) |
| 2025-12-02 | 939,250 (+13,352) | 890,919 (+14,419) | 1,830,169 (+27,771) |
| 2025-12-03 | 952,249 (+12,999) | 903,713 (+12,794) | 1,855,962 (+25,793) |
| 2025-12-04 | 965,611 (+13,362) | 916,471 (+12,758) | 1,882,082 (+26,120) |
| 2025-12-05 | 977,996 (+12,385) | 930,616 (+14,145) | 1,908,612 (+26,530) |
| 2025-12-06 | 987,884 (+9,888) | 943,773 (+13,157) | 1,931,657 (+23,045) |
| 2025-12-07 | 994,046 (+6,162) | 951,425 (+7,652) | 1,945,471 (+13,814) |
| 2025-12-08 | 1,000,898 (+6,852) | 957,149 (+5,724) | 1,958,047 (+12,576) |
| 2025-12-09 | 1,011,488 (+10,590) | 973,922 (+16,773) | 1,985,410 (+27,363) |
| 2025-12-10 | 1,025,891 (+14,403) | 991,708 (+17,786) | 2,017,599 (+32,189) |
| 2025-12-11 | 1,045,110 (+19,219) | 1,010,559 (+18,851) | 2,055,669 (+38,070) |
| 2025-12-12 | 1,061,340 (+16,230) | 1,030,838 (+20,279) | 2,092,178 (+36,509) |
| 2025-12-13 | 1,073,561 (+12,221) | 1,044,608 (+13,770) | 2,118,169 (+25,991) |
| 2025-12-14 | 1,082,042 (+8,481) | 1,052,425 (+7,817) | 2,134,467 (+16,298) |
| 2025-12-15 | 1,093,632 (+11,590) | 1,059,078 (+6,653) | 2,152,710 (+18,243) |
| 2025-12-16 | 1,120,477 (+26,845) | 1,078,022 (+18,944) | 2,198,499 (+45,789) |
| 2025-12-17 | 1,151,067 (+30,590) | 1,097,661 (+19,639) | 2,248,728 (+50,229) |
| 2025-12-18 | 1,178,658 (+27,591) | 1,113,418 (+15,757) | 2,292,076 (+43,348) |
| 2025-12-19 | 1,203,485 (+24,827) | 1,129,698 (+16,280) | 2,333,183 (+41,107) |
| 2025-12-20 | 1,223,000 (+19,515) | 1,146,258 (+16,560) | 2,369,258 (+36,075) |
| 2025-12-21 | 1,242,675 (+19,675) | 1,158,909 (+12,651) | 2,401,584 (+32,326) |
| 2025-12-22 | 1,262,522 (+19,847) | 1,169,121 (+10,212) | 2,431,643 (+30,059) |
| 2025-12-23 | 1,286,548 (+24,026) | 1,186,439 (+17,318) | 2,472,987 (+41,344) |
| 2025-12-24 | 1,309,323 (+22,775) | 1,203,767 (+17,328) | 2,513,090 (+40,103) |
| 2025-12-25 | 1,333,032 (+23,709) | 1,217,283 (+13,516) | 2,550,315 (+37,225) |
| 2025-12-26 | 1,352,411 (+19,379) | 1,227,615 (+10,332) | 2,580,026 (+29,711) |
| 2025-12-27 | 1,371,771 (+19,360) | 1,238,236 (+10,621) | 2,610,007 (+29,981) |
| 2025-12-28 | 1,390,388 (+18,617) | 1,245,690 (+7,454) | 2,636,078 (+26,071) |
| 2025-12-29 | 1,415,560 (+25,172) | 1,257,101 (+11,411) | 2,672,661 (+36,583) |
| 2025-12-30 | 1,445,450 (+29,890) | 1,272,689 (+15,588) | 2,718,139 (+45,478) |
| 2025-12-31 | 1,479,598 (+34,148) | 1,293,235 (+20,546) | 2,772,833 (+54,694) |
| 2026-01-01 | 1,508,883 (+29,285) | 1,309,874 (+16,639) | 2,818,757 (+45,924) |
| 2026-01-02 | 1,563,474 (+54,591) | 1,320,959 (+11,085) | 2,884,433 (+65,676) |
| 2026-01-03 | 1,618,065 (+54,591) | 1,331,914 (+10,955) | 2,949,979 (+65,546) |
| 2026-01-04 | 1,672,656 (+39,702) | 1,339,883 (+7,969) | 3,012,539 (+62,560) |
| 2026-01-05 | 1,738,171 (+65,515) | 1,353,043 (+13,160) | 3,091,214 (+78,675) |
| 2026-01-06 | 1,960,988 (+222,817) | 1,377,377 (+24,334) | 3,338,365 (+247,151) |
| 2026-01-07 | 2,123,239 (+162,251) | 1,398,648 (+21,271) | 3,521,887 (+183,522) |
| 2026-01-08 | 2,272,630 (+149,391) | 1,432,480 (+33,832) | 3,705,110 (+183,223) |
| 2026-01-09 | 2,443,565 (+170,935) | 1,469,451 (+36,971) | 3,913,016 (+207,906) |
| 2026-01-10 | 2,632,023 (+188,458) | 1,503,670 (+34,219) | 4,135,693 (+222,677) |
| 2026-01-11 | 2,836,394 (+204,371) | 1,530,479 (+26,809) | 4,366,873 (+231,180) |
| 2026-01-12 | 3,053,594 (+217,200) | 1,553,671 (+23,192) | 4,607,265 (+240,392) |
| 2026-01-13 | 3,297,078 (+243,484) | 1,595,062 (+41,391) | 4,892,140 (+284,875) |
| 2026-01-14 | 3,568,928 (+271,850) | 1,645,362 (+50,300) | 5,214,290 (+322,150) |
| 2026-01-16 | 4,121,550 (+552,622) | 1,754,418 (+109,056) | 5,875,968 (+661,678) |
| 2026-01-17 | 4,389,558 (+268,008) | 1,805,315 (+50,897) | 6,194,873 (+318,905) |
| 2026-01-18 | 4,627,623 (+238,065) | 1,839,171 (+33,856) | 6,466,794 (+271,921) |
| 2026-01-19 | 4,861,108 (+233,485) | 1,863,112 (+23,941) | 6,724,220 (+257,426) |
| 2026-01-20 | 5,128,999 (+267,891) | 1,903,665 (+40,553) | 7,032,664 (+308,444) |
| 2026-01-21 | 5,444,842 (+315,843) | 1,962,531 (+58,866) | 7,407,373 (+374,709) |
| 2026-01-22 | 5,766,340 (+321,498) | 2,029,487 (+66,956) | 7,795,827 (+388,454) |
| 2026-01-23 | 6,096,236 (+329,896) | 2,096,235 (+66,748) | 8,192,471 (+396,644) |
| Date | GitHub Downloads | npm Downloads | Total |
| ---------- | ---------------- | ---------------- | ---------------- |
| 2025-06-29 | 18,789 (+0) | 39,420 (+0) | 58,209 (+0) |
| 2025-06-30 | 20,127 (+1,338) | 41,059 (+1,639) | 61,186 (+2,977) |
| 2025-07-01 | 22,108 (+1,981) | 43,745 (+2,686) | 65,853 (+4,667) |
| 2025-07-02 | 24,814 (+2,706) | 46,168 (+2,423) | 70,982 (+5,129) |
| 2025-07-03 | 27,834 (+3,020) | 49,955 (+3,787) | 77,789 (+6,807) |
| 2025-07-04 | 30,608 (+2,774) | 54,758 (+4,803) | 85,366 (+7,577) |
| 2025-07-05 | 32,524 (+1,916) | 58,371 (+3,613) | 90,895 (+5,529) |
| 2025-07-06 | 33,766 (+1,242) | 59,694 (+1,323) | 93,460 (+2,565) |
| 2025-07-08 | 38,052 (+4,286) | 64,468 (+4,774) | 102,520 (+9,060) |
| 2025-07-09 | 40,924 (+2,872) | 67,935 (+3,467) | 108,859 (+6,339) |
| 2025-07-10 | 43,796 (+2,872) | 71,402 (+3,467) | 115,198 (+6,339) |
| 2025-07-11 | 46,982 (+3,186) | 77,462 (+6,060) | 124,444 (+9,246) |
| 2025-07-12 | 49,302 (+2,320) | 82,177 (+4,715) | 131,479 (+7,035) |
| 2025-07-13 | 50,803 (+1,501) | 86,394 (+4,217) | 137,197 (+5,718) |
| 2025-07-14 | 53,283 (+2,480) | 87,860 (+1,466) | 141,143 (+3,946) |
| 2025-07-15 | 57,590 (+4,307) | 91,036 (+3,176) | 148,626 (+7,483) |
| 2025-07-16 | 62,313 (+4,723) | 95,258 (+4,222) | 157,571 (+8,945) |
| 2025-07-17 | 66,684 (+4,371) | 100,048 (+4,790) | 166,732 (+9,161) |
| 2025-07-18 | 70,379 (+3,695) | 102,587 (+2,539) | 172,966 (+6,234) |
| 2025-07-19 | 73,497 (+3,117) | 105,904 (+3,317) | 179,401 (+6,434) |
| 2025-07-20 | 76,453 (+2,956) | 109,044 (+3,140) | 185,497 (+6,096) |
| 2025-07-21 | 80,197 (+3,744) | 113,537 (+4,493) | 193,734 (+8,237) |
| 2025-07-22 | 84,251 (+4,054) | 118,073 (+4,536) | 202,324 (+8,590) |
| 2025-07-23 | 88,589 (+4,338) | 121,436 (+3,363) | 210,025 (+7,701) |
| 2025-07-24 | 92,469 (+3,880) | 124,091 (+2,655) | 216,560 (+6,535) |
| 2025-07-25 | 96,417 (+3,948) | 126,985 (+2,894) | 223,402 (+6,842) |
| 2025-07-26 | 100,646 (+4,229) | 131,411 (+4,426) | 232,057 (+8,655) |
| 2025-07-27 | 102,644 (+1,998) | 134,736 (+3,325) | 237,380 (+5,323) |
| 2025-07-28 | 105,446 (+2,802) | 136,016 (+1,280) | 241,462 (+4,082) |
| 2025-07-29 | 108,998 (+3,552) | 137,542 (+1,526) | 246,540 (+5,078) |
| 2025-07-30 | 113,544 (+4,546) | 140,317 (+2,775) | 253,861 (+7,321) |
| 2025-07-31 | 118,339 (+4,795) | 143,344 (+3,027) | 261,683 (+7,822) |
| 2025-08-01 | 123,539 (+5,200) | 146,680 (+3,336) | 270,219 (+8,536) |
| 2025-08-02 | 127,864 (+4,325) | 149,236 (+2,556) | 277,100 (+6,881) |
| 2025-08-03 | 131,397 (+3,533) | 150,451 (+1,215) | 281,848 (+4,748) |
| 2025-08-04 | 136,266 (+4,869) | 153,260 (+2,809) | 289,526 (+7,678) |
| 2025-08-05 | 141,596 (+5,330) | 155,752 (+2,492) | 297,348 (+7,822) |
| 2025-08-06 | 147,067 (+5,471) | 158,309 (+2,557) | 305,376 (+8,028) |
| 2025-08-07 | 152,591 (+5,524) | 160,889 (+2,580) | 313,480 (+8,104) |
| 2025-08-08 | 158,187 (+5,596) | 163,448 (+2,559) | 321,635 (+8,155) |
| 2025-08-09 | 162,770 (+4,583) | 165,721 (+2,273) | 328,491 (+6,856) |
| 2025-08-10 | 165,695 (+2,925) | 167,109 (+1,388) | 332,804 (+4,313) |

View File

@@ -1,71 +0,0 @@
## Style Guide
- Keep things in one function unless composable or reusable
- Avoid unnecessary destructuring. Instead of `const { a, b } = obj`, use `obj.a` and `obj.b` to preserve context
- Avoid `try`/`catch` where possible
- Avoid using the `any` type
- Prefer single word variable names where possible
- Use Bun APIs when possible, like `Bun.file()`
# Avoid let statements
We don't like `let` statements, especially combined with if/else statements.
Prefer `const`.
Good:
```ts
const foo = condition ? 1 : 2
```
Bad:
```ts
let foo
if (condition) foo = 1
else foo = 2
```
# Avoid else statements
Prefer early returns or using an `iife` to avoid else statements.
Good:
```ts
function foo() {
if (condition) return 1
return 2
}
```
Bad:
```ts
function foo() {
if (condition) return 1
else return 2
}
```
# Prefer single word naming
Try your best to find a single word name for your variables, functions, etc.
Only use multiple words if you cannot.
Good:
```ts
const foo = 1
const bar = 2
const baz = 3
```
Bad:
```ts
const fooBar = 1
const barBaz = 2
const bazFoo = 3
```

4289
bun.lock

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,2 @@
[install]
exact = true
[test]
root = "./do-not-run-tests-from-root"
exact = true

View File

@@ -1,12 +1,12 @@
import { Resource } from "sst"
import { defineConfig } from "drizzle-kit"
import { Resource } from "sst"
export default defineConfig({
out: "./migrations/",
strict: true,
schema: ["./src/**/*.sql.ts"],
verbose: true,
dialect: "mysql",
dialect: "postgresql",
dbCredentials: {
database: Resource.Database.database,
host: Resource.Database.host,

View File

@@ -0,0 +1,66 @@
CREATE TABLE "billing" (
"id" varchar(30) NOT NULL,
"workspace_id" varchar(30) NOT NULL,
"time_created" timestamp with time zone DEFAULT now() NOT NULL,
"time_deleted" timestamp with time zone,
"customer_id" varchar(255),
"payment_method_id" varchar(255),
"payment_method_last4" varchar(4),
"balance" bigint NOT NULL,
"reload" boolean,
CONSTRAINT "billing_workspace_id_id_pk" PRIMARY KEY("workspace_id","id")
);
--> statement-breakpoint
CREATE TABLE "payment" (
"id" varchar(30) NOT NULL,
"workspace_id" varchar(30) NOT NULL,
"time_created" timestamp with time zone DEFAULT now() NOT NULL,
"time_deleted" timestamp with time zone,
"customer_id" varchar(255),
"payment_id" varchar(255),
"amount" bigint NOT NULL,
CONSTRAINT "payment_workspace_id_id_pk" PRIMARY KEY("workspace_id","id")
);
--> statement-breakpoint
CREATE TABLE "usage" (
"id" varchar(30) NOT NULL,
"workspace_id" varchar(30) NOT NULL,
"time_created" timestamp with time zone DEFAULT now() NOT NULL,
"time_deleted" timestamp with time zone,
"request_id" varchar(255),
"model" varchar(255) NOT NULL,
"input_tokens" integer NOT NULL,
"output_tokens" integer NOT NULL,
"reasoning_tokens" integer,
"cache_read_tokens" integer,
"cache_write_tokens" integer,
"cost" bigint NOT NULL,
CONSTRAINT "usage_workspace_id_id_pk" PRIMARY KEY("workspace_id","id")
);
--> statement-breakpoint
CREATE TABLE "user" (
"id" varchar(30) NOT NULL,
"workspace_id" varchar(30) NOT NULL,
"time_created" timestamp with time zone DEFAULT now() NOT NULL,
"time_deleted" timestamp with time zone,
"email" text NOT NULL,
"name" varchar(255) NOT NULL,
"time_seen" timestamp with time zone,
"color" integer,
CONSTRAINT "user_workspace_id_id_pk" PRIMARY KEY("workspace_id","id")
);
--> statement-breakpoint
CREATE TABLE "workspace" (
"id" varchar(30) PRIMARY KEY NOT NULL,
"slug" varchar(255),
"name" varchar(255),
"time_created" timestamp with time zone DEFAULT now() NOT NULL,
"time_deleted" timestamp with time zone
);
--> statement-breakpoint
ALTER TABLE "billing" ADD CONSTRAINT "billing_workspace_id_workspace_id_fk" FOREIGN KEY ("workspace_id") REFERENCES "public"."workspace"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "payment" ADD CONSTRAINT "payment_workspace_id_workspace_id_fk" FOREIGN KEY ("workspace_id") REFERENCES "public"."workspace"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "usage" ADD CONSTRAINT "usage_workspace_id_workspace_id_fk" FOREIGN KEY ("workspace_id") REFERENCES "public"."workspace"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "user" ADD CONSTRAINT "user_workspace_id_workspace_id_fk" FOREIGN KEY ("workspace_id") REFERENCES "public"."workspace"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
CREATE UNIQUE INDEX "user_email" ON "user" USING btree ("workspace_id","email");--> statement-breakpoint
CREATE UNIQUE INDEX "slug" ON "workspace" USING btree ("slug");

View File

@@ -0,0 +1,8 @@
CREATE TABLE "account" (
"id" varchar(30) NOT NULL,
"time_created" timestamp with time zone DEFAULT now() NOT NULL,
"time_deleted" timestamp with time zone,
"email" varchar(255) NOT NULL
);
--> statement-breakpoint
CREATE UNIQUE INDEX "email" ON "account" USING btree ("email");

View File

@@ -0,0 +1,14 @@
CREATE TABLE "key" (
"id" varchar(30) NOT NULL,
"workspace_id" varchar(30) NOT NULL,
"time_created" timestamp with time zone DEFAULT now() NOT NULL,
"time_deleted" timestamp with time zone,
"user_id" text NOT NULL,
"name" varchar(255) NOT NULL,
"key" varchar(255) NOT NULL,
"time_used" timestamp with time zone,
CONSTRAINT "key_workspace_id_id_pk" PRIMARY KEY("workspace_id","id")
);
--> statement-breakpoint
ALTER TABLE "key" ADD CONSTRAINT "key_workspace_id_workspace_id_fk" FOREIGN KEY ("workspace_id") REFERENCES "public"."workspace"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
CREATE UNIQUE INDEX "global_key" ON "key" USING btree ("key");

View File

@@ -0,0 +1 @@
ALTER TABLE "usage" DROP COLUMN "request_id";

View File

@@ -0,0 +1,461 @@
{
"id": "9b5cec8c-8b59-4d7a-bb5c-76ade1c83d6f",
"prevId": "00000000-0000-0000-0000-000000000000",
"version": "7",
"dialect": "postgresql",
"tables": {
"public.billing": {
"name": "billing",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true
},
"workspace_id": {
"name": "workspace_id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true
},
"time_created": {
"name": "time_created",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": true,
"default": "now()"
},
"time_deleted": {
"name": "time_deleted",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": false
},
"customer_id": {
"name": "customer_id",
"type": "varchar(255)",
"primaryKey": false,
"notNull": false
},
"payment_method_id": {
"name": "payment_method_id",
"type": "varchar(255)",
"primaryKey": false,
"notNull": false
},
"payment_method_last4": {
"name": "payment_method_last4",
"type": "varchar(4)",
"primaryKey": false,
"notNull": false
},
"balance": {
"name": "balance",
"type": "bigint",
"primaryKey": false,
"notNull": true
},
"reload": {
"name": "reload",
"type": "boolean",
"primaryKey": false,
"notNull": false
}
},
"indexes": {},
"foreignKeys": {
"billing_workspace_id_workspace_id_fk": {
"name": "billing_workspace_id_workspace_id_fk",
"tableFrom": "billing",
"tableTo": "workspace",
"columnsFrom": [
"workspace_id"
],
"columnsTo": [
"id"
],
"onDelete": "no action",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {
"billing_workspace_id_id_pk": {
"name": "billing_workspace_id_id_pk",
"columns": [
"workspace_id",
"id"
]
}
},
"uniqueConstraints": {},
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
"public.payment": {
"name": "payment",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true
},
"workspace_id": {
"name": "workspace_id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true
},
"time_created": {
"name": "time_created",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": true,
"default": "now()"
},
"time_deleted": {
"name": "time_deleted",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": false
},
"customer_id": {
"name": "customer_id",
"type": "varchar(255)",
"primaryKey": false,
"notNull": false
},
"payment_id": {
"name": "payment_id",
"type": "varchar(255)",
"primaryKey": false,
"notNull": false
},
"amount": {
"name": "amount",
"type": "bigint",
"primaryKey": false,
"notNull": true
}
},
"indexes": {},
"foreignKeys": {
"payment_workspace_id_workspace_id_fk": {
"name": "payment_workspace_id_workspace_id_fk",
"tableFrom": "payment",
"tableTo": "workspace",
"columnsFrom": [
"workspace_id"
],
"columnsTo": [
"id"
],
"onDelete": "no action",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {
"payment_workspace_id_id_pk": {
"name": "payment_workspace_id_id_pk",
"columns": [
"workspace_id",
"id"
]
}
},
"uniqueConstraints": {},
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
"public.usage": {
"name": "usage",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true
},
"workspace_id": {
"name": "workspace_id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true
},
"time_created": {
"name": "time_created",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": true,
"default": "now()"
},
"time_deleted": {
"name": "time_deleted",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": false
},
"request_id": {
"name": "request_id",
"type": "varchar(255)",
"primaryKey": false,
"notNull": false
},
"model": {
"name": "model",
"type": "varchar(255)",
"primaryKey": false,
"notNull": true
},
"input_tokens": {
"name": "input_tokens",
"type": "integer",
"primaryKey": false,
"notNull": true
},
"output_tokens": {
"name": "output_tokens",
"type": "integer",
"primaryKey": false,
"notNull": true
},
"reasoning_tokens": {
"name": "reasoning_tokens",
"type": "integer",
"primaryKey": false,
"notNull": false
},
"cache_read_tokens": {
"name": "cache_read_tokens",
"type": "integer",
"primaryKey": false,
"notNull": false
},
"cache_write_tokens": {
"name": "cache_write_tokens",
"type": "integer",
"primaryKey": false,
"notNull": false
},
"cost": {
"name": "cost",
"type": "bigint",
"primaryKey": false,
"notNull": true
}
},
"indexes": {},
"foreignKeys": {
"usage_workspace_id_workspace_id_fk": {
"name": "usage_workspace_id_workspace_id_fk",
"tableFrom": "usage",
"tableTo": "workspace",
"columnsFrom": [
"workspace_id"
],
"columnsTo": [
"id"
],
"onDelete": "no action",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {
"usage_workspace_id_id_pk": {
"name": "usage_workspace_id_id_pk",
"columns": [
"workspace_id",
"id"
]
}
},
"uniqueConstraints": {},
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
"public.user": {
"name": "user",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true
},
"workspace_id": {
"name": "workspace_id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true
},
"time_created": {
"name": "time_created",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": true,
"default": "now()"
},
"time_deleted": {
"name": "time_deleted",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": false
},
"email": {
"name": "email",
"type": "text",
"primaryKey": false,
"notNull": true
},
"name": {
"name": "name",
"type": "varchar(255)",
"primaryKey": false,
"notNull": true
},
"time_seen": {
"name": "time_seen",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": false
},
"color": {
"name": "color",
"type": "integer",
"primaryKey": false,
"notNull": false
}
},
"indexes": {
"user_email": {
"name": "user_email",
"columns": [
{
"expression": "workspace_id",
"isExpression": false,
"asc": true,
"nulls": "last"
},
{
"expression": "email",
"isExpression": false,
"asc": true,
"nulls": "last"
}
],
"isUnique": true,
"concurrently": false,
"method": "btree",
"with": {}
}
},
"foreignKeys": {
"user_workspace_id_workspace_id_fk": {
"name": "user_workspace_id_workspace_id_fk",
"tableFrom": "user",
"tableTo": "workspace",
"columnsFrom": [
"workspace_id"
],
"columnsTo": [
"id"
],
"onDelete": "no action",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {
"user_workspace_id_id_pk": {
"name": "user_workspace_id_id_pk",
"columns": [
"workspace_id",
"id"
]
}
},
"uniqueConstraints": {},
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
"public.workspace": {
"name": "workspace",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "varchar(30)",
"primaryKey": true,
"notNull": true
},
"slug": {
"name": "slug",
"type": "varchar(255)",
"primaryKey": false,
"notNull": false
},
"name": {
"name": "name",
"type": "varchar(255)",
"primaryKey": false,
"notNull": false
},
"time_created": {
"name": "time_created",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": true,
"default": "now()"
},
"time_deleted": {
"name": "time_deleted",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": false
}
},
"indexes": {
"slug": {
"name": "slug",
"columns": [
{
"expression": "slug",
"isExpression": false,
"asc": true,
"nulls": "last"
}
],
"isUnique": true,
"concurrently": false,
"method": "btree",
"with": {}
}
},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
}
},
"enums": {},
"schemas": {},
"sequences": {},
"roles": {},
"policies": {},
"views": {},
"_meta": {
"columns": {},
"schemas": {},
"tables": {}
}
}

View File

@@ -0,0 +1,515 @@
{
"id": "bf9e9084-4073-4ecb-8e56-5610816c9589",
"prevId": "9b5cec8c-8b59-4d7a-bb5c-76ade1c83d6f",
"version": "7",
"dialect": "postgresql",
"tables": {
"public.account": {
"name": "account",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true
},
"time_created": {
"name": "time_created",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": true,
"default": "now()"
},
"time_deleted": {
"name": "time_deleted",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": false
},
"email": {
"name": "email",
"type": "varchar(255)",
"primaryKey": false,
"notNull": true
}
},
"indexes": {
"email": {
"name": "email",
"columns": [
{
"expression": "email",
"isExpression": false,
"asc": true,
"nulls": "last"
}
],
"isUnique": true,
"concurrently": false,
"method": "btree",
"with": {}
}
},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
"public.billing": {
"name": "billing",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true
},
"workspace_id": {
"name": "workspace_id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true
},
"time_created": {
"name": "time_created",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": true,
"default": "now()"
},
"time_deleted": {
"name": "time_deleted",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": false
},
"customer_id": {
"name": "customer_id",
"type": "varchar(255)",
"primaryKey": false,
"notNull": false
},
"payment_method_id": {
"name": "payment_method_id",
"type": "varchar(255)",
"primaryKey": false,
"notNull": false
},
"payment_method_last4": {
"name": "payment_method_last4",
"type": "varchar(4)",
"primaryKey": false,
"notNull": false
},
"balance": {
"name": "balance",
"type": "bigint",
"primaryKey": false,
"notNull": true
},
"reload": {
"name": "reload",
"type": "boolean",
"primaryKey": false,
"notNull": false
}
},
"indexes": {},
"foreignKeys": {
"billing_workspace_id_workspace_id_fk": {
"name": "billing_workspace_id_workspace_id_fk",
"tableFrom": "billing",
"tableTo": "workspace",
"columnsFrom": [
"workspace_id"
],
"columnsTo": [
"id"
],
"onDelete": "no action",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {
"billing_workspace_id_id_pk": {
"name": "billing_workspace_id_id_pk",
"columns": [
"workspace_id",
"id"
]
}
},
"uniqueConstraints": {},
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
"public.payment": {
"name": "payment",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true
},
"workspace_id": {
"name": "workspace_id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true
},
"time_created": {
"name": "time_created",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": true,
"default": "now()"
},
"time_deleted": {
"name": "time_deleted",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": false
},
"customer_id": {
"name": "customer_id",
"type": "varchar(255)",
"primaryKey": false,
"notNull": false
},
"payment_id": {
"name": "payment_id",
"type": "varchar(255)",
"primaryKey": false,
"notNull": false
},
"amount": {
"name": "amount",
"type": "bigint",
"primaryKey": false,
"notNull": true
}
},
"indexes": {},
"foreignKeys": {
"payment_workspace_id_workspace_id_fk": {
"name": "payment_workspace_id_workspace_id_fk",
"tableFrom": "payment",
"tableTo": "workspace",
"columnsFrom": [
"workspace_id"
],
"columnsTo": [
"id"
],
"onDelete": "no action",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {
"payment_workspace_id_id_pk": {
"name": "payment_workspace_id_id_pk",
"columns": [
"workspace_id",
"id"
]
}
},
"uniqueConstraints": {},
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
"public.usage": {
"name": "usage",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true
},
"workspace_id": {
"name": "workspace_id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true
},
"time_created": {
"name": "time_created",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": true,
"default": "now()"
},
"time_deleted": {
"name": "time_deleted",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": false
},
"request_id": {
"name": "request_id",
"type": "varchar(255)",
"primaryKey": false,
"notNull": false
},
"model": {
"name": "model",
"type": "varchar(255)",
"primaryKey": false,
"notNull": true
},
"input_tokens": {
"name": "input_tokens",
"type": "integer",
"primaryKey": false,
"notNull": true
},
"output_tokens": {
"name": "output_tokens",
"type": "integer",
"primaryKey": false,
"notNull": true
},
"reasoning_tokens": {
"name": "reasoning_tokens",
"type": "integer",
"primaryKey": false,
"notNull": false
},
"cache_read_tokens": {
"name": "cache_read_tokens",
"type": "integer",
"primaryKey": false,
"notNull": false
},
"cache_write_tokens": {
"name": "cache_write_tokens",
"type": "integer",
"primaryKey": false,
"notNull": false
},
"cost": {
"name": "cost",
"type": "bigint",
"primaryKey": false,
"notNull": true
}
},
"indexes": {},
"foreignKeys": {
"usage_workspace_id_workspace_id_fk": {
"name": "usage_workspace_id_workspace_id_fk",
"tableFrom": "usage",
"tableTo": "workspace",
"columnsFrom": [
"workspace_id"
],
"columnsTo": [
"id"
],
"onDelete": "no action",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {
"usage_workspace_id_id_pk": {
"name": "usage_workspace_id_id_pk",
"columns": [
"workspace_id",
"id"
]
}
},
"uniqueConstraints": {},
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
"public.user": {
"name": "user",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true
},
"workspace_id": {
"name": "workspace_id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true
},
"time_created": {
"name": "time_created",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": true,
"default": "now()"
},
"time_deleted": {
"name": "time_deleted",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": false
},
"email": {
"name": "email",
"type": "text",
"primaryKey": false,
"notNull": true
},
"name": {
"name": "name",
"type": "varchar(255)",
"primaryKey": false,
"notNull": true
},
"time_seen": {
"name": "time_seen",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": false
},
"color": {
"name": "color",
"type": "integer",
"primaryKey": false,
"notNull": false
}
},
"indexes": {
"user_email": {
"name": "user_email",
"columns": [
{
"expression": "workspace_id",
"isExpression": false,
"asc": true,
"nulls": "last"
},
{
"expression": "email",
"isExpression": false,
"asc": true,
"nulls": "last"
}
],
"isUnique": true,
"concurrently": false,
"method": "btree",
"with": {}
}
},
"foreignKeys": {
"user_workspace_id_workspace_id_fk": {
"name": "user_workspace_id_workspace_id_fk",
"tableFrom": "user",
"tableTo": "workspace",
"columnsFrom": [
"workspace_id"
],
"columnsTo": [
"id"
],
"onDelete": "no action",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {
"user_workspace_id_id_pk": {
"name": "user_workspace_id_id_pk",
"columns": [
"workspace_id",
"id"
]
}
},
"uniqueConstraints": {},
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
"public.workspace": {
"name": "workspace",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "varchar(30)",
"primaryKey": true,
"notNull": true
},
"slug": {
"name": "slug",
"type": "varchar(255)",
"primaryKey": false,
"notNull": false
},
"name": {
"name": "name",
"type": "varchar(255)",
"primaryKey": false,
"notNull": false
},
"time_created": {
"name": "time_created",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": true,
"default": "now()"
},
"time_deleted": {
"name": "time_deleted",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": false
}
},
"indexes": {
"slug": {
"name": "slug",
"columns": [
{
"expression": "slug",
"isExpression": false,
"asc": true,
"nulls": "last"
}
],
"isUnique": true,
"concurrently": false,
"method": "btree",
"with": {}
}
},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
}
},
"enums": {},
"schemas": {},
"sequences": {},
"roles": {},
"policies": {},
"views": {},
"_meta": {
"columns": {},
"schemas": {},
"tables": {}
}
}

View File

@@ -1,576 +1,615 @@
{
"version": "5",
"dialect": "mysql",
"id": "9f51ef52-31ac-4ace-8b6d-39b35efe9c81",
"prevId": "79b7ee25-1c1c-41ff-9bbf-754af257102b",
"id": "351e4956-74e0-4282-a23b-02f1a73fa38c",
"prevId": "bf9e9084-4073-4ecb-8e56-5610816c9589",
"version": "7",
"dialect": "postgresql",
"tables": {
"account": {
"public.account": {
"name": "account",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
},
"time_created": {
"name": "time_created",
"type": "timestamp(3)",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": "(now())"
},
"time_updated": {
"name": "time_updated",
"type": "timestamp(3)",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": "CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3)"
"default": "now()"
},
"time_deleted": {
"name": "time_deleted",
"type": "timestamp(3)",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
},
"email": {
"name": "email",
"type": "varchar(255)",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
}
},
"indexes": {
"email": {
"name": "email",
"columns": ["email"],
"isUnique": true
"columns": [
{
"expression": "email",
"isExpression": false,
"asc": true,
"nulls": "last"
}
],
"isUnique": true,
"concurrently": false,
"method": "btree",
"with": {}
}
},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraint": {}
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
"billing": {
"public.billing": {
"name": "billing",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
},
"workspace_id": {
"name": "workspace_id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
},
"time_created": {
"name": "time_created",
"type": "timestamp(3)",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": "(now())"
},
"time_updated": {
"name": "time_updated",
"type": "timestamp(3)",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": "CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3)"
"default": "now()"
},
"time_deleted": {
"name": "time_deleted",
"type": "timestamp(3)",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
},
"customer_id": {
"name": "customer_id",
"type": "varchar(255)",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
},
"payment_method_id": {
"name": "payment_method_id",
"type": "varchar(255)",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
},
"payment_method_last4": {
"name": "payment_method_last4",
"type": "varchar(4)",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
},
"balance": {
"name": "balance",
"type": "bigint",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
},
"reload": {
"name": "reload",
"type": "boolean",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
}
},
"indexes": {},
"foreignKeys": {},
"foreignKeys": {
"billing_workspace_id_workspace_id_fk": {
"name": "billing_workspace_id_workspace_id_fk",
"tableFrom": "billing",
"tableTo": "workspace",
"columnsFrom": [
"workspace_id"
],
"columnsTo": [
"id"
],
"onDelete": "no action",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {
"billing_workspace_id_id_pk": {
"name": "billing_workspace_id_id_pk",
"columns": ["workspace_id", "id"]
"columns": [
"workspace_id",
"id"
]
}
},
"uniqueConstraints": {},
"checkConstraint": {}
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
"payment": {
"public.payment": {
"name": "payment",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
},
"workspace_id": {
"name": "workspace_id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
},
"time_created": {
"name": "time_created",
"type": "timestamp(3)",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": "(now())"
},
"time_updated": {
"name": "time_updated",
"type": "timestamp(3)",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": "CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3)"
"default": "now()"
},
"time_deleted": {
"name": "time_deleted",
"type": "timestamp(3)",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
},
"customer_id": {
"name": "customer_id",
"type": "varchar(255)",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
},
"payment_id": {
"name": "payment_id",
"type": "varchar(255)",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
},
"amount": {
"name": "amount",
"type": "bigint",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
}
},
"indexes": {},
"foreignKeys": {},
"foreignKeys": {
"payment_workspace_id_workspace_id_fk": {
"name": "payment_workspace_id_workspace_id_fk",
"tableFrom": "payment",
"tableTo": "workspace",
"columnsFrom": [
"workspace_id"
],
"columnsTo": [
"id"
],
"onDelete": "no action",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {
"payment_workspace_id_id_pk": {
"name": "payment_workspace_id_id_pk",
"columns": ["workspace_id", "id"]
"columns": [
"workspace_id",
"id"
]
}
},
"uniqueConstraints": {},
"checkConstraint": {}
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
"usage": {
"public.usage": {
"name": "usage",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
},
"workspace_id": {
"name": "workspace_id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
},
"time_created": {
"name": "time_created",
"type": "timestamp(3)",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": "(now())"
},
"time_updated": {
"name": "time_updated",
"type": "timestamp(3)",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": "CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3)"
"default": "now()"
},
"time_deleted": {
"name": "time_deleted",
"type": "timestamp(3)",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
},
"request_id": {
"name": "request_id",
"type": "varchar(255)",
"primaryKey": false,
"notNull": false
},
"model": {
"name": "model",
"type": "varchar(255)",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
},
"input_tokens": {
"name": "input_tokens",
"type": "int",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
},
"output_tokens": {
"name": "output_tokens",
"type": "int",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
},
"reasoning_tokens": {
"name": "reasoning_tokens",
"type": "int",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
},
"cache_read_tokens": {
"name": "cache_read_tokens",
"type": "int",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
},
"cache_write_tokens": {
"name": "cache_write_tokens",
"type": "int",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
},
"cost": {
"name": "cost",
"type": "bigint",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
}
},
"indexes": {},
"foreignKeys": {},
"foreignKeys": {
"usage_workspace_id_workspace_id_fk": {
"name": "usage_workspace_id_workspace_id_fk",
"tableFrom": "usage",
"tableTo": "workspace",
"columnsFrom": [
"workspace_id"
],
"columnsTo": [
"id"
],
"onDelete": "no action",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {
"usage_workspace_id_id_pk": {
"name": "usage_workspace_id_id_pk",
"columns": ["workspace_id", "id"]
"columns": [
"workspace_id",
"id"
]
}
},
"uniqueConstraints": {},
"checkConstraint": {}
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
"key": {
"public.key": {
"name": "key",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
},
"workspace_id": {
"name": "workspace_id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
},
"time_created": {
"name": "time_created",
"type": "timestamp(3)",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": "(now())"
},
"time_updated": {
"name": "time_updated",
"type": "timestamp(3)",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": "CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3)"
"default": "now()"
},
"time_deleted": {
"name": "time_deleted",
"type": "timestamp(3)",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
},
"actor": {
"name": "actor",
"type": "json",
"user_id": {
"name": "user_id",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": true
},
"name": {
"name": "name",
"type": "varchar(255)",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"old_name": {
"name": "old_name",
"type": "varchar(255)",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": true
},
"key": {
"name": "key",
"type": "varchar(255)",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
},
"time_used": {
"name": "time_used",
"type": "timestamp(3)",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
}
},
"indexes": {
"global_key": {
"name": "global_key",
"columns": ["key"],
"isUnique": true
"columns": [
{
"expression": "key",
"isExpression": false,
"asc": true,
"nulls": "last"
}
],
"isUnique": true,
"concurrently": false,
"method": "btree",
"with": {}
}
},
"foreignKeys": {
"key_workspace_id_workspace_id_fk": {
"name": "key_workspace_id_workspace_id_fk",
"tableFrom": "key",
"tableTo": "workspace",
"columnsFrom": [
"workspace_id"
],
"columnsTo": [
"id"
],
"onDelete": "no action",
"onUpdate": "no action"
}
},
"foreignKeys": {},
"compositePrimaryKeys": {
"key_workspace_id_id_pk": {
"name": "key_workspace_id_id_pk",
"columns": ["workspace_id", "id"]
"columns": [
"workspace_id",
"id"
]
}
},
"uniqueConstraints": {},
"checkConstraint": {}
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
"user": {
"public.user": {
"name": "user",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
},
"workspace_id": {
"name": "workspace_id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
},
"time_created": {
"name": "time_created",
"type": "timestamp(3)",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": "(now())"
},
"time_updated": {
"name": "time_updated",
"type": "timestamp(3)",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": "CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3)"
"default": "now()"
},
"time_deleted": {
"name": "time_deleted",
"type": "timestamp(3)",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
},
"email": {
"name": "email",
"type": "varchar(255)",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
},
"name": {
"name": "name",
"type": "varchar(255)",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
},
"time_seen": {
"name": "time_seen",
"type": "timestamp(3)",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
},
"color": {
"name": "color",
"type": "int",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
}
},
"indexes": {
"user_email": {
"name": "user_email",
"columns": ["workspace_id", "email"],
"isUnique": true
"columns": [
{
"expression": "workspace_id",
"isExpression": false,
"asc": true,
"nulls": "last"
},
{
"expression": "email",
"isExpression": false,
"asc": true,
"nulls": "last"
}
],
"isUnique": true,
"concurrently": false,
"method": "btree",
"with": {}
}
},
"foreignKeys": {
"user_workspace_id_workspace_id_fk": {
"name": "user_workspace_id_workspace_id_fk",
"tableFrom": "user",
"tableTo": "workspace",
"columnsFrom": [
"workspace_id"
],
"columnsTo": [
"id"
],
"onDelete": "no action",
"onUpdate": "no action"
}
},
"foreignKeys": {},
"compositePrimaryKeys": {
"user_workspace_id_id_pk": {
"name": "user_workspace_id_id_pk",
"columns": ["workspace_id", "id"]
"columns": [
"workspace_id",
"id"
]
}
},
"uniqueConstraints": {},
"checkConstraint": {}
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
"workspace": {
"public.workspace": {
"name": "workspace",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"primaryKey": true,
"notNull": true
},
"slug": {
"name": "slug",
"type": "varchar(255)",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
},
"name": {
"name": "name",
"type": "varchar(255)",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
},
"time_created": {
"name": "time_created",
"type": "timestamp(3)",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": "(now())"
},
"time_updated": {
"name": "time_updated",
"type": "timestamp(3)",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": "CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3)"
"default": "now()"
},
"time_deleted": {
"name": "time_deleted",
"type": "timestamp(3)",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
}
},
"indexes": {
"slug": {
"name": "slug",
"columns": ["slug"],
"isUnique": true
"columns": [
{
"expression": "slug",
"isExpression": false,
"asc": true,
"nulls": "last"
}
],
"isUnique": true,
"concurrently": false,
"method": "btree",
"with": {}
}
},
"foreignKeys": {},
"compositePrimaryKeys": {
"workspace_id": {
"name": "workspace_id",
"columns": ["id"]
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraint": {}
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
}
},
"enums": {},
"schemas": {},
"sequences": {},
"roles": {},
"policies": {},
"views": {},
"_meta": {
"columns": {},
"schemas": {},
"tables": {},
"columns": {}
},
"internal": {
"tables": {},
"indexes": {}
"tables": {}
}
}
}

View File

@@ -1,569 +1,609 @@
{
"version": "5",
"dialect": "mysql",
"id": "aee779c5-db1d-4655-95ec-6451c18455be",
"prevId": "00000000-0000-0000-0000-000000000000",
"id": "fa935883-9e51-4811-90c7-8967eefe458c",
"prevId": "351e4956-74e0-4282-a23b-02f1a73fa38c",
"version": "7",
"dialect": "postgresql",
"tables": {
"account": {
"public.account": {
"name": "account",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
},
"time_created": {
"name": "time_created",
"type": "timestamp(3)",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": "(now())"
},
"time_updated": {
"name": "time_updated",
"type": "timestamp(3)",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": "CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3)"
"default": "now()"
},
"time_deleted": {
"name": "time_deleted",
"type": "timestamp(3)",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
},
"email": {
"name": "email",
"type": "varchar(255)",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
}
},
"indexes": {
"email": {
"name": "email",
"columns": ["email"],
"isUnique": true
"columns": [
{
"expression": "email",
"isExpression": false,
"asc": true,
"nulls": "last"
}
],
"isUnique": true,
"concurrently": false,
"method": "btree",
"with": {}
}
},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraint": {}
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
"billing": {
"public.billing": {
"name": "billing",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
},
"workspace_id": {
"name": "workspace_id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
},
"time_created": {
"name": "time_created",
"type": "timestamp(3)",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": "(now())"
},
"time_updated": {
"name": "time_updated",
"type": "timestamp(3)",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": "CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3)"
"default": "now()"
},
"time_deleted": {
"name": "time_deleted",
"type": "timestamp(3)",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
},
"customer_id": {
"name": "customer_id",
"type": "varchar(255)",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
},
"payment_method_id": {
"name": "payment_method_id",
"type": "varchar(255)",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
},
"payment_method_last4": {
"name": "payment_method_last4",
"type": "varchar(4)",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
},
"balance": {
"name": "balance",
"type": "bigint",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
},
"reload": {
"name": "reload",
"type": "boolean",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
}
},
"indexes": {},
"foreignKeys": {},
"foreignKeys": {
"billing_workspace_id_workspace_id_fk": {
"name": "billing_workspace_id_workspace_id_fk",
"tableFrom": "billing",
"tableTo": "workspace",
"columnsFrom": [
"workspace_id"
],
"columnsTo": [
"id"
],
"onDelete": "no action",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {
"billing_workspace_id_id_pk": {
"name": "billing_workspace_id_id_pk",
"columns": ["workspace_id", "id"]
"columns": [
"workspace_id",
"id"
]
}
},
"uniqueConstraints": {},
"checkConstraint": {}
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
"payment": {
"public.payment": {
"name": "payment",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
},
"workspace_id": {
"name": "workspace_id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
},
"time_created": {
"name": "time_created",
"type": "timestamp(3)",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": "(now())"
},
"time_updated": {
"name": "time_updated",
"type": "timestamp(3)",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": "CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3)"
"default": "now()"
},
"time_deleted": {
"name": "time_deleted",
"type": "timestamp(3)",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
},
"customer_id": {
"name": "customer_id",
"type": "varchar(255)",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
},
"payment_id": {
"name": "payment_id",
"type": "varchar(255)",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
},
"amount": {
"name": "amount",
"type": "bigint",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
}
},
"indexes": {},
"foreignKeys": {},
"foreignKeys": {
"payment_workspace_id_workspace_id_fk": {
"name": "payment_workspace_id_workspace_id_fk",
"tableFrom": "payment",
"tableTo": "workspace",
"columnsFrom": [
"workspace_id"
],
"columnsTo": [
"id"
],
"onDelete": "no action",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {
"payment_workspace_id_id_pk": {
"name": "payment_workspace_id_id_pk",
"columns": ["workspace_id", "id"]
"columns": [
"workspace_id",
"id"
]
}
},
"uniqueConstraints": {},
"checkConstraint": {}
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
"usage": {
"public.usage": {
"name": "usage",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
},
"workspace_id": {
"name": "workspace_id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
},
"time_created": {
"name": "time_created",
"type": "timestamp(3)",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": "(now())"
},
"time_updated": {
"name": "time_updated",
"type": "timestamp(3)",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": "CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3)"
"default": "now()"
},
"time_deleted": {
"name": "time_deleted",
"type": "timestamp(3)",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
},
"model": {
"name": "model",
"type": "varchar(255)",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
},
"input_tokens": {
"name": "input_tokens",
"type": "int",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
},
"output_tokens": {
"name": "output_tokens",
"type": "int",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
},
"reasoning_tokens": {
"name": "reasoning_tokens",
"type": "int",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
},
"cache_read_tokens": {
"name": "cache_read_tokens",
"type": "int",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
},
"cache_write_tokens": {
"name": "cache_write_tokens",
"type": "int",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
},
"cost": {
"name": "cost",
"type": "bigint",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
}
},
"indexes": {},
"foreignKeys": {},
"foreignKeys": {
"usage_workspace_id_workspace_id_fk": {
"name": "usage_workspace_id_workspace_id_fk",
"tableFrom": "usage",
"tableTo": "workspace",
"columnsFrom": [
"workspace_id"
],
"columnsTo": [
"id"
],
"onDelete": "no action",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {
"usage_workspace_id_id_pk": {
"name": "usage_workspace_id_id_pk",
"columns": ["workspace_id", "id"]
"columns": [
"workspace_id",
"id"
]
}
},
"uniqueConstraints": {},
"checkConstraint": {}
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
"key": {
"public.key": {
"name": "key",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
},
"workspace_id": {
"name": "workspace_id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
},
"time_created": {
"name": "time_created",
"type": "timestamp(3)",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": "(now())"
},
"time_updated": {
"name": "time_updated",
"type": "timestamp(3)",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": "CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3)"
"default": "now()"
},
"time_deleted": {
"name": "time_deleted",
"type": "timestamp(3)",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
},
"user_id": {
"name": "user_id",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
},
"name": {
"name": "name",
"type": "varchar(255)",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
},
"key": {
"name": "key",
"type": "varchar(255)",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
},
"time_used": {
"name": "time_used",
"type": "timestamp(3)",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
}
},
"indexes": {
"global_key": {
"name": "global_key",
"columns": ["key"],
"isUnique": true
"columns": [
{
"expression": "key",
"isExpression": false,
"asc": true,
"nulls": "last"
}
],
"isUnique": true,
"concurrently": false,
"method": "btree",
"with": {}
}
},
"foreignKeys": {
"key_workspace_id_workspace_id_fk": {
"name": "key_workspace_id_workspace_id_fk",
"tableFrom": "key",
"tableTo": "workspace",
"columnsFrom": [
"workspace_id"
],
"columnsTo": [
"id"
],
"onDelete": "no action",
"onUpdate": "no action"
}
},
"foreignKeys": {},
"compositePrimaryKeys": {
"key_workspace_id_id_pk": {
"name": "key_workspace_id_id_pk",
"columns": ["workspace_id", "id"]
"columns": [
"workspace_id",
"id"
]
}
},
"uniqueConstraints": {},
"checkConstraint": {}
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
"user": {
"public.user": {
"name": "user",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
},
"workspace_id": {
"name": "workspace_id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
},
"time_created": {
"name": "time_created",
"type": "timestamp(3)",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": "(now())"
},
"time_updated": {
"name": "time_updated",
"type": "timestamp(3)",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": "CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3)"
"default": "now()"
},
"time_deleted": {
"name": "time_deleted",
"type": "timestamp(3)",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
},
"email": {
"name": "email",
"type": "varchar(255)",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
},
"name": {
"name": "name",
"type": "varchar(255)",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"notNull": true
},
"time_seen": {
"name": "time_seen",
"type": "timestamp(3)",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
},
"color": {
"name": "color",
"type": "int",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
}
},
"indexes": {
"user_email": {
"name": "user_email",
"columns": ["workspace_id", "email"],
"isUnique": true
"columns": [
{
"expression": "workspace_id",
"isExpression": false,
"asc": true,
"nulls": "last"
},
{
"expression": "email",
"isExpression": false,
"asc": true,
"nulls": "last"
}
],
"isUnique": true,
"concurrently": false,
"method": "btree",
"with": {}
}
},
"foreignKeys": {
"user_workspace_id_workspace_id_fk": {
"name": "user_workspace_id_workspace_id_fk",
"tableFrom": "user",
"tableTo": "workspace",
"columnsFrom": [
"workspace_id"
],
"columnsTo": [
"id"
],
"onDelete": "no action",
"onUpdate": "no action"
}
},
"foreignKeys": {},
"compositePrimaryKeys": {
"user_workspace_id_id_pk": {
"name": "user_workspace_id_id_pk",
"columns": ["workspace_id", "id"]
"columns": [
"workspace_id",
"id"
]
}
},
"uniqueConstraints": {},
"checkConstraint": {}
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
"workspace": {
"public.workspace": {
"name": "workspace",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "varchar(30)",
"primaryKey": false,
"notNull": true,
"autoincrement": false
"primaryKey": true,
"notNull": true
},
"slug": {
"name": "slug",
"type": "varchar(255)",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
},
"name": {
"name": "name",
"type": "varchar(255)",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
},
"time_created": {
"name": "time_created",
"type": "timestamp(3)",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": "(now())"
},
"time_updated": {
"name": "time_updated",
"type": "timestamp(3)",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": "CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3)"
"default": "now()"
},
"time_deleted": {
"name": "time_deleted",
"type": "timestamp(3)",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": false,
"autoincrement": false
"notNull": false
}
},
"indexes": {
"slug": {
"name": "slug",
"columns": ["slug"],
"isUnique": true
"columns": [
{
"expression": "slug",
"isExpression": false,
"asc": true,
"nulls": "last"
}
],
"isUnique": true,
"concurrently": false,
"method": "btree",
"with": {}
}
},
"foreignKeys": {},
"compositePrimaryKeys": {
"workspace_id": {
"name": "workspace_id",
"columns": ["id"]
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraint": {}
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
}
},
"enums": {},
"schemas": {},
"sequences": {},
"roles": {},
"policies": {},
"views": {},
"_meta": {
"columns": {},
"schemas": {},
"tables": {},
"columns": {}
},
"internal": {
"tables": {},
"indexes": {}
"tables": {}
}
}
}

View File

@@ -0,0 +1,34 @@
{
"version": "7",
"dialect": "postgresql",
"entries": [
{
"idx": 0,
"version": "7",
"when": 1754518198186,
"tag": "0000_amused_mojo",
"breakpoints": true
},
{
"idx": 1,
"version": "7",
"when": 1754609655262,
"tag": "0001_thankful_chat",
"breakpoints": true
},
{
"idx": 2,
"version": "7",
"when": 1754627626945,
"tag": "0002_stale_jackal",
"breakpoints": true
},
{
"idx": 3,
"version": "7",
"when": 1754672464106,
"tag": "0003_tranquil_spencer_smythe",
"breakpoints": true
}
]
}

23
cloud/core/package.json Normal file
View File

@@ -0,0 +1,23 @@
{
"$schema": "https://json.schemastore.org/package.json",
"name": "@opencode/cloud-core",
"version": "0.4.13",
"private": true,
"type": "module",
"dependencies": {
"@aws-sdk/client-sts": "3.782.0",
"drizzle-orm": "0.41.0",
"postgres": "3.4.7",
"stripe": "18.0.0",
"ulid": "3.0.0"
},
"exports": {
"./*": "./src/*"
},
"scripts": {
"db": "sst shell drizzle-kit"
},
"devDependencies": {
"drizzle-kit": "0.30.5"
}
}

67
cloud/core/src/account.ts Normal file
View File

@@ -0,0 +1,67 @@
import { z } from "zod"
import { and, eq, getTableColumns, isNull } from "drizzle-orm"
import { fn } from "./util/fn"
import { Database } from "./drizzle"
import { Identifier } from "./identifier"
import { AccountTable } from "./schema/account.sql"
import { Actor } from "./actor"
import { WorkspaceTable } from "./schema/workspace.sql"
import { UserTable } from "./schema/user.sql"
export namespace Account {
export const create = fn(
z.object({
email: z.string().email(),
id: z.string().optional(),
}),
async (input) =>
Database.transaction(async (tx) => {
const id = input.id ?? Identifier.create("account")
await tx.insert(AccountTable).values({
id,
email: input.email,
})
return id
}),
)
export const fromID = fn(z.string(), async (id) =>
Database.transaction(async (tx) => {
return tx
.select()
.from(AccountTable)
.where(eq(AccountTable.id, id))
.execute()
.then((rows) => rows[0])
}),
)
export const fromEmail = fn(z.string().email(), async (email) =>
Database.transaction(async (tx) => {
return tx
.select()
.from(AccountTable)
.where(eq(AccountTable.email, email))
.execute()
.then((rows) => rows[0])
}),
)
export const workspaces = async () => {
const actor = Actor.assert("account")
return Database.transaction(async (tx) =>
tx
.select(getTableColumns(WorkspaceTable))
.from(WorkspaceTable)
.innerJoin(UserTable, eq(UserTable.workspaceID, WorkspaceTable.id))
.where(
and(
eq(UserTable.email, actor.properties.email),
isNull(UserTable.timeDeleted),
isNull(WorkspaceTable.timeDeleted),
),
)
.execute(),
)
}
}

View File

@@ -1,5 +1,4 @@
import { Context } from "./context"
import { UserRole } from "./schema/user.sql"
import { Log } from "./util/log"
export namespace Actor {
@@ -21,8 +20,7 @@ export namespace Actor {
properties: {
userID: string
workspaceID: string
accountID: string
role: (typeof UserRole)[number]
email: string
}
}
@@ -67,11 +65,6 @@ export namespace Actor {
return actor as Extract<Info, { type: T }>
}
export const assertAdmin = () => {
if (userRole() === "admin") return
throw new Error(`Action not allowed. Ask your workspace admin to perform this action.`)
}
export function workspace() {
const actor = use()
if ("workspaceID" in actor.properties) {
@@ -79,20 +72,4 @@ export namespace Actor {
}
throw new Error(`actor of type "${actor.type}" is not associated with a workspace`)
}
export function account() {
const actor = use()
if ("accountID" in actor.properties) {
return actor.properties.accountID
}
throw new Error(`actor of type "${actor.type}" is not associated with an account`)
}
export function userID() {
return Actor.assert("user").properties.userID
}
export function userRole() {
return Actor.assert("user").properties.role
}
}

71
cloud/core/src/billing.ts Normal file
View File

@@ -0,0 +1,71 @@
import { Resource } from "sst"
import { Stripe } from "stripe"
import { Database, eq, sql } from "./drizzle"
import { BillingTable, UsageTable } from "./schema/billing.sql"
import { Actor } from "./actor"
import { fn } from "./util/fn"
import { z } from "zod"
import { Identifier } from "./identifier"
import { centsToMicroCents } from "./util/price"
export namespace Billing {
export const stripe = () =>
new Stripe(Resource.STRIPE_SECRET_KEY.value, {
apiVersion: "2025-03-31.basil",
})
export const get = async () => {
return Database.use(async (tx) =>
tx
.select({
customerID: BillingTable.customerID,
paymentMethodID: BillingTable.paymentMethodID,
balance: BillingTable.balance,
reload: BillingTable.reload,
})
.from(BillingTable)
.where(eq(BillingTable.workspaceID, Actor.workspace()))
.then((r) => r[0]),
)
}
export const consume = fn(
z.object({
requestID: z.string().optional(),
model: z.string(),
inputTokens: z.number(),
outputTokens: z.number(),
reasoningTokens: z.number().optional(),
cacheReadTokens: z.number().optional(),
cacheWriteTokens: z.number().optional(),
costInCents: z.number(),
}),
async (input) => {
const workspaceID = Actor.workspace()
const cost = centsToMicroCents(input.costInCents)
return await Database.transaction(async (tx) => {
await tx.insert(UsageTable).values({
workspaceID,
id: Identifier.create("usage"),
requestID: input.requestID,
model: input.model,
inputTokens: input.inputTokens,
outputTokens: input.outputTokens,
reasoningTokens: input.reasoningTokens,
cacheReadTokens: input.cacheReadTokens,
cacheWriteTokens: input.cacheWriteTokens,
cost,
})
const [updated] = await tx
.update(BillingTable)
.set({
balance: sql`${BillingTable.balance} - ${cost}`,
})
.where(eq(BillingTable.workspaceID, workspaceID))
.returning()
return updated.balance
})
},
)
}

View File

@@ -14,7 +14,7 @@ export namespace Context {
return result
},
provide<R>(value: T, fn: () => R) {
return storage.run(value, fn)
return storage.run<R>(value, fn)
},
}
}

View File

@@ -1,33 +1,39 @@
import { drizzle } from "drizzle-orm/planetscale-serverless"
import { Resource } from "@opencode-ai/console-resource"
import { drizzle } from "drizzle-orm/postgres-js"
import { Resource } from "sst"
export * from "drizzle-orm"
import { Client } from "@planetscale/database"
import postgres from "postgres"
import { MySqlTransaction, type MySqlTransactionConfig } from "drizzle-orm/mysql-core"
import type { ExtractTablesWithRelations } from "drizzle-orm"
import type { PlanetScalePreparedQueryHKT, PlanetscaleQueryResultHKT } from "drizzle-orm/planetscale-serverless"
import { Context } from "../context"
import { memo } from "../util/memo"
export namespace Database {
export type Transaction = MySqlTransaction<
PlanetscaleQueryResultHKT,
PlanetScalePreparedQueryHKT,
Record<string, never>,
ExtractTablesWithRelations<Record<string, never>>
>
const client = memo(() => {
const result = new Client({
host: Resource.Database.host,
username: Resource.Database.username,
password: Resource.Database.password,
})
const db = drizzle(result, {})
return db
function createClient() {
const client = postgres({
idle_timeout: 30000,
connect_timeout: 30000,
host: Resource.Database.host,
database: Resource.Database.database,
user: Resource.Database.username,
password: Resource.Database.password,
port: Resource.Database.port,
ssl: {
rejectUnauthorized: false,
},
max: 1,
})
export type TxOrDb = Transaction | ReturnType<typeof client>
return drizzle(client, {})
}
import { PgTransaction, type PgTransactionConfig } from "drizzle-orm/pg-core"
import type { ExtractTablesWithRelations } from "drizzle-orm"
import type { PostgresJsQueryResultHKT } from "drizzle-orm/postgres-js"
import { Context } from "../context"
export namespace Database {
export type Transaction = PgTransaction<
PostgresJsQueryResultHKT,
Record<string, unknown>,
ExtractTablesWithRelations<Record<string, unknown>>
>
export type TxOrDb = Transaction | ReturnType<typeof createClient>
const TransactionContext = Context.create<{
tx: TxOrDb
@@ -40,13 +46,14 @@ export namespace Database {
return tx.transaction(callback)
} catch (err) {
if (err instanceof Context.NotFound) {
const client = createClient()
const effects: (() => void | Promise<void>)[] = []
const result = await TransactionContext.provide(
{
effects,
tx: client(),
tx: client,
},
() => callback(client()),
() => callback(client),
)
await Promise.all(effects.map((x) => x()))
return result
@@ -67,14 +74,15 @@ export namespace Database {
}
}
export async function transaction<T>(callback: (tx: TxOrDb) => Promise<T>, config?: MySqlTransactionConfig) {
export async function transaction<T>(callback: (tx: TxOrDb) => Promise<T>, config?: PgTransactionConfig) {
try {
const { tx } = TransactionContext.use()
return callback(tx)
} catch (err) {
if (err instanceof Context.NotFound) {
const client = createClient()
const effects: (() => void | Promise<void>)[] = []
const result = await client().transaction(async (tx) => {
const result = await client.transaction(async (tx) => {
return TransactionContext.provide({ tx, effects }, () => callback(tx))
}, config)
await Promise.all(effects.map((x) => x()))

View File

@@ -1,5 +1,4 @@
import { sql } from "drizzle-orm"
import { bigint, timestamp, varchar } from "drizzle-orm/mysql-core"
import { bigint, timestamp, varchar } from "drizzle-orm/pg-core"
export const ulid = (name: string) => varchar(name, { length: 30 })
@@ -16,7 +15,7 @@ export const id = () => ulid("id").notNull()
export const utc = (name: string) =>
timestamp(name, {
fsp: 3,
withTimezone: true,
})
export const currency = (name: string) =>
@@ -26,8 +25,5 @@ export const currency = (name: string) =>
export const timestamps = {
timeCreated: utc("time_created").notNull().defaultNow(),
timeUpdated: utc("time_updated")
.notNull()
.default(sql`CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3)`),
timeDeleted: utc("time_deleted"),
}

View File

@@ -4,14 +4,9 @@ import { z } from "zod"
export namespace Identifier {
const prefixes = {
account: "acc",
auth: "aut",
benchmark: "ben",
billing: "bil",
key: "key",
model: "mod",
payment: "pay",
provider: "prv",
subscription: "sub",
usage: "usg",
user: "usr",
workspace: "wrk",

View File

@@ -0,0 +1,12 @@
import { pgTable, uniqueIndex, varchar } from "drizzle-orm/pg-core"
import { id, timestamps } from "../drizzle/types"
export const AccountTable = pgTable(
"account",
{
id: id(),
...timestamps,
email: varchar("email", { length: 255 }).notNull(),
},
(table) => [uniqueIndex("email").on(table.email)],
)

View File

@@ -0,0 +1,45 @@
import { bigint, boolean, integer, pgTable, varchar } from "drizzle-orm/pg-core"
import { timestamps, workspaceColumns } from "../drizzle/types"
import { workspaceIndexes } from "./workspace.sql"
export const BillingTable = pgTable(
"billing",
{
...workspaceColumns,
...timestamps,
customerID: varchar("customer_id", { length: 255 }),
paymentMethodID: varchar("payment_method_id", { length: 255 }),
paymentMethodLast4: varchar("payment_method_last4", { length: 4 }),
balance: bigint("balance", { mode: "number" }).notNull(),
reload: boolean("reload"),
},
(table) => [...workspaceIndexes(table)],
)
export const PaymentTable = pgTable(
"payment",
{
...workspaceColumns,
...timestamps,
customerID: varchar("customer_id", { length: 255 }),
paymentID: varchar("payment_id", { length: 255 }),
amount: bigint("amount", { mode: "number" }).notNull(),
},
(table) => [...workspaceIndexes(table)],
)
export const UsageTable = pgTable(
"usage",
{
...workspaceColumns,
...timestamps,
model: varchar("model", { length: 255 }).notNull(),
inputTokens: integer("input_tokens").notNull(),
outputTokens: integer("output_tokens").notNull(),
reasoningTokens: integer("reasoning_tokens"),
cacheReadTokens: integer("cache_read_tokens"),
cacheWriteTokens: integer("cache_write_tokens"),
cost: bigint("cost", { mode: "number" }).notNull(),
},
(table) => [...workspaceIndexes(table)],
)

View File

@@ -1,15 +1,15 @@
import { mysqlTable, varchar, uniqueIndex } from "drizzle-orm/mysql-core"
import { timestamps, ulid, utc, workspaceColumns } from "../drizzle/types"
import { text, pgTable, varchar, uniqueIndex } from "drizzle-orm/pg-core"
import { timestamps, utc, workspaceColumns } from "../drizzle/types"
import { workspaceIndexes } from "./workspace.sql"
export const KeyTable = mysqlTable(
export const KeyTable = pgTable(
"key",
{
...workspaceColumns,
...timestamps,
userID: text("user_id").notNull(),
name: varchar("name", { length: 255 }).notNull(),
key: varchar("key", { length: 255 }).notNull(),
userID: ulid("user_id").notNull(),
timeUsed: utc("time_used"),
},
(table) => [...workspaceIndexes(table), uniqueIndex("global_key").on(table.key)],

View File

@@ -0,0 +1,16 @@
import { text, pgTable, uniqueIndex, varchar, integer } from "drizzle-orm/pg-core"
import { timestamps, utc, workspaceColumns } from "../drizzle/types"
import { workspaceIndexes } from "./workspace.sql"
export const UserTable = pgTable(
"user",
{
...workspaceColumns,
...timestamps,
email: text("email").notNull(),
name: varchar("name", { length: 255 }).notNull(),
timeSeen: utc("time_seen"),
color: integer("color"),
},
(table) => [...workspaceIndexes(table), uniqueIndex("user_email").on(table.workspaceID, table.email)],
)

View File

@@ -1,12 +1,12 @@
import { primaryKey, mysqlTable, uniqueIndex, varchar } from "drizzle-orm/mysql-core"
import { primaryKey, foreignKey, pgTable, uniqueIndex, varchar } from "drizzle-orm/pg-core"
import { timestamps, ulid } from "../drizzle/types"
export const WorkspaceTable = mysqlTable(
export const WorkspaceTable = pgTable(
"workspace",
{
id: ulid("id").notNull().primaryKey(),
slug: varchar("slug", { length: 255 }),
name: varchar("name", { length: 255 }).notNull(),
name: varchar("name", { length: 255 }),
...timestamps,
},
(table) => [uniqueIndex("slug").on(table.slug)],
@@ -17,5 +17,9 @@ export function workspaceIndexes(table: any) {
primaryKey({
columns: [table.workspaceID, table.id],
}),
foreignKey({
foreignColumns: [WorkspaceTable.id],
columns: [table.workspaceID],
}),
]
}

14
cloud/core/src/util/fn.ts Normal file
View File

@@ -0,0 +1,14 @@
import { z } from "zod"
export function fn<T extends z.ZodType, Result>(
schema: T,
cb: (input: z.output<T>) => Result,
) {
const result = (input: z.input<T>) => {
const parsed = schema.parse(input)
return cb(parsed)
}
result.force = (input: z.input<T>) => cb(input)
result.schema = schema
return result
}

View File

@@ -0,0 +1,48 @@
import { z } from "zod"
import { fn } from "./util/fn"
import { centsToMicroCents } from "./util/price"
import { Actor } from "./actor"
import { Database, eq } from "./drizzle"
import { Identifier } from "./identifier"
import { UserTable } from "./schema/user.sql"
import { BillingTable } from "./schema/billing.sql"
import { WorkspaceTable } from "./schema/workspace.sql"
export namespace Workspace {
export const create = fn(z.void(), async () => {
const account = Actor.assert("account")
const workspaceID = Identifier.create("workspace")
await Database.transaction(async (tx) => {
await tx.insert(WorkspaceTable).values({
id: workspaceID,
})
await tx.insert(UserTable).values({
workspaceID,
id: Identifier.create("user"),
email: account.properties.email,
name: "",
})
await tx.insert(BillingTable).values({
workspaceID,
id: Identifier.create("billing"),
balance: centsToMicroCents(100),
})
})
return workspaceID
})
export async function list() {
const account = Actor.assert("account")
return Database.use(async (tx) => {
return tx
.select({
id: WorkspaceTable.id,
slug: WorkspaceTable.slug,
name: WorkspaceTable.name,
})
.from(UserTable)
.innerJoin(WorkspaceTable, eq(UserTable.workspaceID, WorkspaceTable.id))
.where(eq(UserTable.email, account.properties.email))
})
}
}

View File

@@ -1,28 +1,20 @@
{
"name": "@opencode-ai/console-function",
"version": "1.1.34",
"name": "@opencode/cloud-function",
"version": "0.4.13",
"$schema": "https://json.schemastore.org/package.json",
"private": true,
"type": "module",
"license": "MIT",
"scripts": {
"typecheck": "tsgo --noEmit"
},
"devDependencies": {
"@cloudflare/workers-types": "catalog:",
"@tsconfig/node22": "22.0.2",
"@cloudflare/workers-types": "4.20250522.0",
"@types/node": "catalog:",
"openai": "5.11.0",
"typescript": "catalog:",
"@typescript/native-preview": "catalog:"
"typescript": "catalog:"
},
"dependencies": {
"@ai-sdk/anthropic": "2.0.0",
"@ai-sdk/openai": "2.0.2",
"@ai-sdk/openai-compatible": "1.0.1",
"@hono/zod-validator": "catalog:",
"@opencode-ai/console-core": "workspace:*",
"@opencode-ai/console-resource": "workspace:*",
"@openauthjs/openauth": "0.0.0-20250322224806",
"ai": "catalog:",
"hono": "catalog:",

124
cloud/function/src/auth.ts Normal file
View File

@@ -0,0 +1,124 @@
import { Resource } from "sst"
import { z } from "zod"
import { issuer } from "@openauthjs/openauth"
import { createSubjects } from "@openauthjs/openauth/subject"
import { CodeProvider } from "@openauthjs/openauth/provider/code"
import { GithubProvider } from "@openauthjs/openauth/provider/github"
import { GoogleOidcProvider } from "@openauthjs/openauth/provider/google"
import { CloudflareStorage } from "@openauthjs/openauth/storage/cloudflare"
import { Account } from "@opencode/cloud-core/account.js"
type Env = {
AuthStorage: KVNamespace
}
export const subjects = createSubjects({
account: z.object({
accountID: z.string(),
email: z.string(),
}),
user: z.object({
userID: z.string(),
workspaceID: z.string(),
}),
})
export default {
async fetch(request: Request, env: Env, ctx: ExecutionContext) {
return issuer({
providers: {
github: GithubProvider({
clientID: Resource.GITHUB_CLIENT_ID_CONSOLE.value,
clientSecret: Resource.GITHUB_CLIENT_SECRET_CONSOLE.value,
scopes: ["read:user", "user:email"],
}),
google: GoogleOidcProvider({
clientID: Resource.GOOGLE_CLIENT_ID.value,
scopes: ["openid", "email"],
}),
// email: CodeProvider({
// async request(req, state, form, error) {
// console.log(state)
// const params = new URLSearchParams()
// if (error) {
// params.set("error", error.type)
// }
// if (state.type === "start") {
// return Response.redirect(process.env.AUTH_FRONTEND_URL + "/auth/email?" + params.toString(), 302)
// }
//
// if (state.type === "code") {
// return Response.redirect(process.env.AUTH_FRONTEND_URL + "/auth/code?" + params.toString(), 302)
// }
//
// return new Response("ok")
// },
// async sendCode(claims, code) {
// const email = z.string().email().parse(claims.email)
// const cmd = new SendEmailCommand({
// Destination: {
// ToAddresses: [email],
// },
// FromEmailAddress: `SST <auth@${Resource.Email.sender}>`,
// Content: {
// Simple: {
// Body: {
// Html: {
// Data: `Your pin code is <strong>${code}</strong>`,
// },
// Text: {
// Data: `Your pin code is ${code}`,
// },
// },
// Subject: {
// Data: "SST Console Pin Code: " + code,
// },
// },
// },
// })
// await ses.send(cmd)
// },
// }),
},
storage: CloudflareStorage({
namespace: env.AuthStorage,
}),
subjects,
async success(ctx, response) {
console.log(response)
let email: string | undefined
if (response.provider === "github") {
const userResponse = await fetch("https://api.github.com/user", {
headers: {
Authorization: `Bearer ${response.tokenset.access}`,
"User-Agent": "opencode",
Accept: "application/vnd.github+json",
},
})
const user = (await userResponse.json()) as { email: string }
email = user.email
} else if (response.provider === "google") {
if (!response.id.email_verified) throw new Error("Google email not verified")
email = response.id.email as string
}
//if (response.provider === "email") {
// email = response.claims.email
//}
else throw new Error("Unsupported provider")
if (!email) throw new Error("No email found")
let accountID = await Account.fromEmail(email).then((x) => x?.id)
if (!accountID) {
console.log("creating account for", email)
accountID = await Account.create({
email: email!,
})
}
return ctx.subject("account", accountID, { accountID, email })
},
}).fetch(request, env, ctx)
},
}

View File

@@ -0,0 +1,909 @@
import { z } from "zod"
import { Hono, MiddlewareHandler } from "hono"
import { cors } from "hono/cors"
import { HTTPException } from "hono/http-exception"
import { zValidator } from "@hono/zod-validator"
import { Resource } from "sst"
import { type ProviderMetadata, type LanguageModelUsage, generateText, streamText } from "ai"
import { createAnthropic } from "@ai-sdk/anthropic"
import { createOpenAI } from "@ai-sdk/openai"
import { createOpenAICompatible } from "@ai-sdk/openai-compatible"
import type { LanguageModelV2Prompt } from "@ai-sdk/provider"
import { type ChatCompletionCreateParamsBase } from "openai/resources/chat/completions"
import { Actor } from "@opencode/cloud-core/actor.js"
import { and, Database, eq, sql } from "@opencode/cloud-core/drizzle/index.js"
import { UserTable } from "@opencode/cloud-core/schema/user.sql.js"
import { KeyTable } from "@opencode/cloud-core/schema/key.sql.js"
import { createClient } from "@openauthjs/openauth/client"
import { Log } from "@opencode/cloud-core/util/log.js"
import { Billing } from "@opencode/cloud-core/billing.js"
import { Workspace } from "@opencode/cloud-core/workspace.js"
import { BillingTable, PaymentTable, UsageTable } from "@opencode/cloud-core/schema/billing.sql.js"
import { centsToMicroCents } from "@opencode/cloud-core/util/price.js"
import { Identifier } from "../../core/src/identifier"
type Env = {}
let _client: ReturnType<typeof createClient>
const client = () => {
if (_client) return _client
_client = createClient({
clientID: "api",
issuer: Resource.AUTH_API_URL.value,
})
return _client
}
const SUPPORTED_MODELS = {
"anthropic/claude-sonnet-4": {
input: 0.0000015,
output: 0.000006,
reasoning: 0.0000015,
cacheRead: 0.0000001,
cacheWrite: 0.0000001,
model: () =>
createAnthropic({
apiKey: Resource.ANTHROPIC_API_KEY.value,
})("claude-sonnet-4-20250514"),
},
"openai/gpt-4.1": {
input: 0.0000015,
output: 0.000006,
reasoning: 0.0000015,
cacheRead: 0.0000001,
cacheWrite: 0.0000001,
model: () =>
createOpenAI({
apiKey: Resource.OPENAI_API_KEY.value,
})("gpt-4.1"),
},
"zhipuai/glm-4.5-flash": {
input: 0,
output: 0,
reasoning: 0,
cacheRead: 0,
cacheWrite: 0,
model: () =>
createOpenAICompatible({
name: "Zhipu AI",
baseURL: "https://api.z.ai/api/paas/v4",
apiKey: Resource.ZHIPU_API_KEY.value,
})("glm-4.5-flash"),
},
}
const log = Log.create({
namespace: "api",
})
const GatewayAuth: MiddlewareHandler = async (c, next) => {
const authHeader = c.req.header("authorization")
if (!authHeader || !authHeader.startsWith("Bearer ")) {
return c.json(
{
error: {
message: "Missing API key.",
type: "invalid_request_error",
param: null,
code: "unauthorized",
},
},
401,
)
}
const apiKey = authHeader.split(" ")[1]
// Check against KeyTable
const keyRecord = await Database.use((tx) =>
tx
.select({
id: KeyTable.id,
workspaceID: KeyTable.workspaceID,
})
.from(KeyTable)
.where(eq(KeyTable.key, apiKey))
.then((rows) => rows[0]),
)
if (!keyRecord) {
return c.json(
{
error: {
message: "Invalid API key.",
type: "invalid_request_error",
param: null,
code: "unauthorized",
},
},
401,
)
}
c.set("keyRecord", keyRecord)
await next()
}
const RestAuth: MiddlewareHandler = async (c, next) => {
const authorization = c.req.header("authorization")
if (!authorization) {
return Actor.provide("public", {}, next)
}
const token = authorization.split(" ")[1]
if (!token)
throw new HTTPException(403, {
message: "Bearer token is required.",
})
const verified = await client().verify(token)
if (verified.err) {
throw new HTTPException(403, {
message: "Invalid token.",
})
}
let subject = verified.subject as Actor.Info
if (subject.type === "account") {
const workspaceID = c.req.header("x-opencode-workspace")
const email = subject.properties.email
if (workspaceID) {
const user = await Database.use((tx) =>
tx
.select({
id: UserTable.id,
workspaceID: UserTable.workspaceID,
email: UserTable.email,
})
.from(UserTable)
.where(and(eq(UserTable.email, email), eq(UserTable.workspaceID, workspaceID)))
.then((rows) => rows[0]),
)
if (!user)
throw new HTTPException(403, {
message: "You do not have access to this workspace.",
})
subject = {
type: "user",
properties: {
userID: user.id,
workspaceID: workspaceID,
email: user.email,
},
}
}
}
await Actor.provide(subject.type, subject.properties, next)
}
const app = new Hono<{ Bindings: Env; Variables: { keyRecord?: { id: string; workspaceID: string } } }>()
.get("/", (c) => c.text("Hello, world!"))
.post("/v1/chat/completions", GatewayAuth, async (c) => {
const keyRecord = c.get("keyRecord")!
return await Actor.provide("system", { workspaceID: keyRecord.workspaceID }, async () => {
try {
// Check balance
const customer = await Billing.get()
if (customer.balance <= 0) {
return c.json(
{
error: {
message: "Insufficient balance",
type: "insufficient_quota",
param: null,
code: "insufficient_quota",
},
},
401,
)
}
const body = await c.req.json<ChatCompletionCreateParamsBase>()
const model = SUPPORTED_MODELS[body.model as keyof typeof SUPPORTED_MODELS]?.model()
if (!model) throw new Error(`Unsupported model: ${body.model}`)
const requestBody = transformOpenAIRequestToAiSDK()
return body.stream ? await handleStream() : await handleGenerate()
async function handleStream() {
const result = await model.doStream({
...requestBody,
})
const encoder = new TextEncoder()
const stream = new ReadableStream({
async start(controller) {
const id = `chatcmpl-${Date.now()}`
const created = Math.floor(Date.now() / 1000)
try {
for await (const chunk of result.stream) {
console.log("!!! CHUNK !!! : " + chunk.type)
switch (chunk.type) {
case "text-delta": {
const data = {
id,
object: "chat.completion.chunk",
created,
model: body.model,
choices: [
{
index: 0,
delta: {
content: chunk.delta,
},
finish_reason: null,
},
],
}
controller.enqueue(encoder.encode(`data: ${JSON.stringify(data)}\n\n`))
break
}
case "reasoning-delta": {
const data = {
id,
object: "chat.completion.chunk",
created,
model: body.model,
choices: [
{
index: 0,
delta: {
reasoning_content: chunk.delta,
},
finish_reason: null,
},
],
}
controller.enqueue(encoder.encode(`data: ${JSON.stringify(data)}\n\n`))
break
}
case "tool-call": {
const data = {
id,
object: "chat.completion.chunk",
created,
model: body.model,
choices: [
{
index: 0,
delta: {
tool_calls: [
{
index: 0,
id: chunk.toolCallId,
type: "function",
function: {
name: chunk.toolName,
arguments: chunk.input,
},
},
],
},
finish_reason: null,
},
],
}
controller.enqueue(encoder.encode(`data: ${JSON.stringify(data)}\n\n`))
break
}
case "error": {
const data = {
id,
object: "chat.completion.chunk",
created,
model: body.model,
choices: [
{
index: 0,
delta: {},
finish_reason: "stop",
},
],
error: {
message: typeof chunk.error === "string" ? chunk.error : chunk.error,
type: "server_error",
},
}
controller.enqueue(encoder.encode(`data: ${JSON.stringify(data)}\n\n`))
controller.enqueue(encoder.encode("data: [DONE]\n\n"))
controller.close()
break
}
case "finish": {
const data = {
id,
object: "chat.completion.chunk",
created,
model: body.model,
choices: [
{
index: 0,
delta: {},
finish_reason:
{
stop: "stop",
length: "length",
"content-filter": "content_filter",
"tool-calls": "tool_calls",
error: "stop",
other: "stop",
unknown: "stop",
}[chunk.finishReason] || "stop",
},
],
usage: {
prompt_tokens: chunk.usage.inputTokens,
completion_tokens: chunk.usage.outputTokens,
total_tokens: chunk.usage.totalTokens,
completion_tokens_details: {
reasoning_tokens: chunk.usage.reasoningTokens,
},
prompt_tokens_details: {
cached_tokens: chunk.usage.cachedInputTokens,
},
},
}
await trackUsage(body.model, chunk.usage, chunk.providerMetadata)
controller.enqueue(encoder.encode(`data: ${JSON.stringify(data)}\n\n`))
controller.enqueue(encoder.encode("data: [DONE]\n\n"))
controller.close()
break
}
//case "stream-start":
//case "response-metadata":
case "text-start":
case "text-end":
case "reasoning-start":
case "reasoning-end":
case "tool-input-start":
case "tool-input-delta":
case "tool-input-end":
case "raw":
default:
// Log unknown chunk types for debugging
console.warn(`Unknown chunk type: ${(chunk as any).type}`)
break
}
}
} catch (error) {
controller.error(error)
}
},
})
return new Response(stream, {
headers: {
"Content-Type": "text/plain; charset=utf-8",
"Cache-Control": "no-cache",
Connection: "keep-alive",
},
})
}
async function handleGenerate() {
const response = await model.doGenerate({
...requestBody,
})
await trackUsage(body.model, response.usage, response.providerMetadata)
return c.json({
id: `chatcmpl-${Date.now()}`,
object: "chat.completion" as const,
created: Math.floor(Date.now() / 1000),
model: body.model,
choices: [
{
index: 0,
message: {
role: "assistant" as const,
content: response.content?.find((c) => c.type === "text")?.text ?? "",
reasoning_content: response.content?.find((c) => c.type === "reasoning")?.text,
tool_calls: response.content
?.filter((c) => c.type === "tool-call")
.map((toolCall) => ({
id: toolCall.toolCallId,
type: "function" as const,
function: {
name: toolCall.toolName,
arguments: toolCall.input,
},
})),
},
finish_reason:
(
{
stop: "stop",
length: "length",
"content-filter": "content_filter",
"tool-calls": "tool_calls",
error: "stop",
other: "stop",
unknown: "stop",
} as const
)[response.finishReason] || "stop",
},
],
usage: {
prompt_tokens: response.usage?.inputTokens,
completion_tokens: response.usage?.outputTokens,
total_tokens: response.usage?.totalTokens,
completion_tokens_details: {
reasoning_tokens: response.usage?.reasoningTokens,
},
prompt_tokens_details: {
cached_tokens: response.usage?.cachedInputTokens,
},
},
})
}
function transformOpenAIRequestToAiSDK() {
const prompt = transformMessages()
const tools = transformTools()
return {
prompt,
maxOutputTokens: body.max_tokens ?? body.max_completion_tokens ?? undefined,
temperature: body.temperature ?? undefined,
topP: body.top_p ?? undefined,
frequencyPenalty: body.frequency_penalty ?? undefined,
presencePenalty: body.presence_penalty ?? undefined,
providerOptions: body.reasoning_effort
? {
anthropic: {
reasoningEffort: body.reasoning_effort,
},
}
: undefined,
stopSequences: (typeof body.stop === "string" ? [body.stop] : body.stop) ?? undefined,
responseFormat: (() => {
if (!body.response_format) return { type: "text" as const }
if (body.response_format.type === "json_schema")
return {
type: "json" as const,
schema: body.response_format.json_schema.schema,
name: body.response_format.json_schema.name,
description: body.response_format.json_schema.description,
}
if (body.response_format.type === "json_object") return { type: "json" as const }
throw new Error("Unsupported response format")
})(),
seed: body.seed ?? undefined,
tools: tools.tools,
toolChoice: tools.toolChoice,
}
function transformTools() {
const { tools, tool_choice } = body
if (!tools || tools.length === 0) {
return { tools: undefined, toolChoice: undefined }
}
const aiSdkTools = tools.map((tool) => {
return {
type: tool.type,
name: tool.function.name,
description: tool.function.description,
inputSchema: tool.function.parameters!,
}
})
let aiSdkToolChoice
if (tool_choice == null) {
aiSdkToolChoice = undefined
} else if (tool_choice === "auto") {
aiSdkToolChoice = { type: "auto" as const }
} else if (tool_choice === "none") {
aiSdkToolChoice = { type: "none" as const }
} else if (tool_choice === "required") {
aiSdkToolChoice = { type: "required" as const }
} else if (tool_choice.type === "function") {
aiSdkToolChoice = {
type: "tool" as const,
toolName: tool_choice.function.name,
}
}
return { tools: aiSdkTools, toolChoice: aiSdkToolChoice }
}
function transformMessages() {
const { messages } = body
const prompt: LanguageModelV2Prompt = []
for (const message of messages) {
switch (message.role) {
case "system": {
prompt.push({
role: "system",
content: message.content as string,
})
break
}
case "user": {
if (typeof message.content === "string") {
prompt.push({
role: "user",
content: [{ type: "text", text: message.content }],
})
} else {
const content = message.content.map((part) => {
switch (part.type) {
case "text":
return { type: "text" as const, text: part.text }
case "image_url":
return {
type: "file" as const,
mediaType: "image/jpeg" as const,
data: part.image_url.url,
}
default:
throw new Error(`Unsupported content part type: ${(part as any).type}`)
}
})
prompt.push({
role: "user",
content,
})
}
break
}
case "assistant": {
const content: Array<
| { type: "text"; text: string }
| {
type: "tool-call"
toolCallId: string
toolName: string
input: any
}
> = []
if (message.content) {
content.push({
type: "text",
text: message.content as string,
})
}
if (message.tool_calls) {
for (const toolCall of message.tool_calls) {
content.push({
type: "tool-call",
toolCallId: toolCall.id,
toolName: toolCall.function.name,
input: JSON.parse(toolCall.function.arguments),
})
}
}
prompt.push({
role: "assistant",
content,
})
break
}
case "tool": {
prompt.push({
role: "tool",
content: [
{
type: "tool-result",
toolName: "placeholder",
toolCallId: message.tool_call_id,
output: {
type: "text",
value: message.content as string,
},
},
],
})
break
}
default: {
throw new Error(`Unsupported message role: ${message.role}`)
}
}
}
return prompt
}
}
async function trackUsage(model: string, usage: LanguageModelUsage, providerMetadata?: ProviderMetadata) {
const modelData = SUPPORTED_MODELS[model as keyof typeof SUPPORTED_MODELS]
if (!modelData) throw new Error(`Unsupported model: ${model}`)
const inputTokens = usage.inputTokens ?? 0
const outputTokens = usage.outputTokens ?? 0
const reasoningTokens = usage.reasoningTokens ?? 0
const cacheReadTokens = usage.cachedInputTokens ?? 0
const cacheWriteTokens =
providerMetadata?.["anthropic"]?.["cacheCreationInputTokens"] ??
// @ts-expect-error
providerMetadata?.["bedrock"]?.["usage"]?.["cacheWriteInputTokens"] ??
0
const inputCost = modelData.input * inputTokens
const outputCost = modelData.output * outputTokens
const reasoningCost = modelData.reasoning * reasoningTokens
const cacheReadCost = modelData.cacheRead * cacheReadTokens
const cacheWriteCost = modelData.cacheWrite * cacheWriteTokens
const costInCents = (inputCost + outputCost + reasoningCost + cacheReadCost + cacheWriteCost) * 100
await Billing.consume({
model,
inputTokens,
outputTokens,
reasoningTokens,
cacheReadTokens,
cacheWriteTokens,
costInCents,
})
await Database.use((tx) =>
tx
.update(KeyTable)
.set({ timeUsed: sql`now()` })
.where(eq(KeyTable.id, keyRecord.id)),
)
}
} catch (error: any) {
return c.json({ error: { message: error.message } }, 500)
}
})
})
.use("/*", cors())
.use(RestAuth)
.get("/rest/account", async (c) => {
const account = Actor.assert("account")
let workspaces = await Workspace.list()
if (workspaces.length === 0) {
await Workspace.create()
workspaces = await Workspace.list()
}
return c.json({
id: account.properties.accountID,
email: account.properties.email,
workspaces,
})
})
.get("/billing/info", async (c) => {
const billing = await Billing.get()
const payments = await Database.use((tx) =>
tx
.select()
.from(PaymentTable)
.where(eq(PaymentTable.workspaceID, Actor.workspace()))
.orderBy(sql`${PaymentTable.timeCreated} DESC`)
.limit(100),
)
const usage = await Database.use((tx) =>
tx
.select()
.from(UsageTable)
.where(eq(UsageTable.workspaceID, Actor.workspace()))
.orderBy(sql`${UsageTable.timeCreated} DESC`)
.limit(100),
)
return c.json({ billing, payments, usage })
})
.post(
"/billing/checkout",
zValidator(
"json",
z.custom<{
success_url: string
cancel_url: string
}>(),
),
async (c) => {
const account = Actor.assert("user")
const body = await c.req.json()
const customer = await Billing.get()
const session = await Billing.stripe().checkout.sessions.create({
mode: "payment",
line_items: [
{
price_data: {
currency: "usd",
product_data: {
name: "opencode credits",
},
unit_amount: 2000, // $20 minimum
},
quantity: 1,
},
],
payment_intent_data: {
setup_future_usage: "on_session",
},
...(customer.customerID
? { customer: customer.customerID }
: {
customer_email: account.properties.email,
customer_creation: "always",
}),
metadata: {
workspaceID: Actor.workspace(),
},
currency: "usd",
payment_method_types: ["card"],
success_url: body.success_url,
cancel_url: body.cancel_url,
})
return c.json({
url: session.url,
})
},
)
.post("/billing/portal", async (c) => {
const body = await c.req.json()
const customer = await Billing.get()
if (!customer?.customerID) {
throw new Error("No stripe customer ID")
}
const session = await Billing.stripe().billingPortal.sessions.create({
customer: customer.customerID,
return_url: body.return_url,
})
return c.json({
url: session.url,
})
})
.post("/stripe/webhook", async (c) => {
const body = await Billing.stripe().webhooks.constructEventAsync(
await c.req.text(),
c.req.header("stripe-signature")!,
Resource.STRIPE_WEBHOOK_SECRET.value,
)
console.log(body.type, JSON.stringify(body, null, 2))
if (body.type === "checkout.session.completed") {
const workspaceID = body.data.object.metadata?.workspaceID
const customerID = body.data.object.customer as string
const paymentID = body.data.object.payment_intent as string
const amount = body.data.object.amount_total
if (!workspaceID) throw new Error("Workspace ID not found")
if (!customerID) throw new Error("Customer ID not found")
if (!amount) throw new Error("Amount not found")
if (!paymentID) throw new Error("Payment ID not found")
await Actor.provide("system", { workspaceID }, async () => {
const customer = await Billing.get()
if (customer?.customerID && customer.customerID !== customerID) throw new Error("Customer ID mismatch")
// set customer metadata
if (!customer?.customerID) {
await Billing.stripe().customers.update(customerID, {
metadata: {
workspaceID,
},
})
}
// get payment method for the payment intent
const paymentIntent = await Billing.stripe().paymentIntents.retrieve(paymentID, {
expand: ["payment_method"],
})
const paymentMethod = paymentIntent.payment_method
if (!paymentMethod || typeof paymentMethod === "string") throw new Error("Payment method not expanded")
await Database.transaction(async (tx) => {
await tx
.update(BillingTable)
.set({
balance: sql`${BillingTable.balance} + ${centsToMicroCents(amount)}`,
customerID,
paymentMethodID: paymentMethod.id,
paymentMethodLast4: paymentMethod.card!.last4,
})
.where(eq(BillingTable.workspaceID, workspaceID))
await tx.insert(PaymentTable).values({
workspaceID,
id: Identifier.create("payment"),
amount: centsToMicroCents(amount),
paymentID,
customerID,
})
})
})
}
console.log("finished handling")
return c.json("ok", 200)
})
.get("/keys", async (c) => {
const user = Actor.assert("user")
const keys = await Database.use((tx) =>
tx
.select({
id: KeyTable.id,
name: KeyTable.name,
key: KeyTable.key,
userID: KeyTable.userID,
timeCreated: KeyTable.timeCreated,
timeUsed: KeyTable.timeUsed,
})
.from(KeyTable)
.where(eq(KeyTable.workspaceID, user.properties.workspaceID))
.orderBy(sql`${KeyTable.timeCreated} DESC`),
)
return c.json({ keys })
})
.post("/keys", zValidator("json", z.object({ name: z.string().min(1).max(255) })), async (c) => {
const user = Actor.assert("user")
const { name } = c.req.valid("json")
// Generate secret key: sk- + 64 random characters (upper, lower, numbers)
const chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"
let randomPart = ""
for (let i = 0; i < 64; i++) {
randomPart += chars.charAt(Math.floor(Math.random() * chars.length))
}
const secretKey = `sk-${randomPart}`
const keyRecord = await Database.use((tx) =>
tx
.insert(KeyTable)
.values({
id: Identifier.create("key"),
workspaceID: user.properties.workspaceID,
userID: user.properties.userID,
name,
key: secretKey,
timeUsed: null,
})
.returning(),
)
return c.json({
key: secretKey,
id: keyRecord[0].id,
name: keyRecord[0].name,
created: keyRecord[0].timeCreated,
})
})
.delete("/keys/:id", async (c) => {
const user = Actor.assert("user")
const keyId = c.req.param("id")
const result = await Database.use((tx) =>
tx
.delete(KeyTable)
.where(and(eq(KeyTable.id, keyId), eq(KeyTable.workspaceID, user.properties.workspaceID)))
.returning({ id: KeyTable.id }),
)
if (result.length === 0) {
return c.json({ error: "Key not found" }, 404)
}
return c.json({ success: true, id: result[0].id })
})
.all("*", (c) => c.text("Not Found"))
export type ApiType = typeof app
export default app

92
cloud/function/sst-env.d.ts vendored Normal file
View File

@@ -0,0 +1,92 @@
/* This file is auto-generated by SST. Do not edit. */
/* tslint:disable */
/* eslint-disable */
/* deno-fmt-ignore-file */
import "sst"
declare module "sst" {
export interface Resource {
"ANTHROPIC_API_KEY": {
"type": "sst.sst.Secret"
"value": string
}
"AUTH_API_URL": {
"type": "sst.sst.Linkable"
"value": string
}
"Console": {
"type": "sst.cloudflare.StaticSite"
"url": string
}
"DATABASE_PASSWORD": {
"type": "sst.sst.Secret"
"value": string
}
"DATABASE_USERNAME": {
"type": "sst.sst.Secret"
"value": string
}
"Database": {
"database": string
"host": string
"password": string
"port": number
"type": "sst.sst.Linkable"
"username": string
}
"GITHUB_APP_ID": {
"type": "sst.sst.Secret"
"value": string
}
"GITHUB_APP_PRIVATE_KEY": {
"type": "sst.sst.Secret"
"value": string
}
"GITHUB_CLIENT_ID_CONSOLE": {
"type": "sst.sst.Secret"
"value": string
}
"GITHUB_CLIENT_SECRET_CONSOLE": {
"type": "sst.sst.Secret"
"value": string
}
"GOOGLE_CLIENT_ID": {
"type": "sst.sst.Secret"
"value": string
}
"OPENAI_API_KEY": {
"type": "sst.sst.Secret"
"value": string
}
"STRIPE_SECRET_KEY": {
"type": "sst.sst.Secret"
"value": string
}
"STRIPE_WEBHOOK_SECRET": {
"type": "sst.sst.Linkable"
"value": string
}
"Web": {
"type": "sst.cloudflare.Astro"
"url": string
}
"ZHIPU_API_KEY": {
"type": "sst.sst.Secret"
"value": string
}
}
}
// cloudflare
import * as cloudflare from "@cloudflare/workers-types";
declare module "sst" {
export interface Resource {
"Api": cloudflare.Service
"AuthApi": cloudflare.Service
"AuthStorage": cloudflare.KVNamespace
"Bucket": cloudflare.R2Bucket
"GatewayApi": cloudflare.Service
}
}
import "sst"
export {}

View File

@@ -4,8 +4,6 @@
"compilerOptions": {
"module": "ESNext",
"moduleResolution": "bundler",
"jsx": "preserve",
"jsxImportSource": "react",
"types": ["@cloudflare/workers-types", "node"]
}
}

2
cloud/web/.gitignore vendored Normal file
View File

@@ -0,0 +1,2 @@
node_modules
dist

38
cloud/web/index.html Normal file
View File

@@ -0,0 +1,38 @@
<!doctype html>
<html lang="en" data-color-mode="dark">
<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<title>OpenControl</title>
<link rel="shortcut icon" type="image/ico" href="/favicon.ico" />
<link rel="icon" href="/favicon.ico" sizes="48x48">
<link rel="icon" href="/favicon.svg" media="(prefers-color-scheme: light)">
<link rel="icon" href="/favicon-dark.svg" media="(prefers-color-scheme: dark)">
<link rel="shortcut icon" href="/favicon.svg" type="image/svg+xml">
<meta property="twitter:image" content="%BASE_URL%/social-share.png">
<meta property="og:title" content="OpenControl">
<meta property="og:url" content="%BASE_URL%">
<meta property="og:locale" content="en">
<meta property="og:description" content="Control your infrastructure with AI.">
<meta property="og:site_name" content="OpenControl">
<meta name="twitter:card" content="summary_large_image">
<meta name="description" content="Control your infrastructure with AI.">
<meta property="og:image" content="%BASE_URL%/social-share.png">
<link rel="preconnect" href="https://fonts.googleapis.com">
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
<link href="https://fonts.googleapis.com/css2?family=IBM+Plex+Mono:wght@400;500;600;700&family=Rubik:wght@300..900&display=swap" rel="stylesheet">
<!--ssr-head-->
<!--ssr-assets-->
</head>
<body>
<noscript>You need to enable JavaScript to run this app.</noscript>
<div id="root">
<!--ssr-outlet-->
</div>
<script type="module" src="/src/entry-client.tsx"></script>
</body>
</html>

29
cloud/web/npm-debug.log Normal file
View File

@@ -0,0 +1,29 @@
0 info it worked if it ends with ok
1 verbose cli [
1 verbose cli '/usr/local/bin/node',
1 verbose cli '/Users/frank/Sites/opencode/node_modules/.bin/npm',
1 verbose cli 'run',
1 verbose cli 'dev'
1 verbose cli ]
2 info using npm@2.15.12
3 info using node@v20.18.1
4 verbose stack Error: Invalid name: "@opencode/cloud/web"
4 verbose stack at ensureValidName (/Users/frank/Sites/opencode/node_modules/npm/node_modules/normalize-package-data/lib/fixer.js:336:15)
4 verbose stack at Object.fixNameField (/Users/frank/Sites/opencode/node_modules/npm/node_modules/normalize-package-data/lib/fixer.js:215:5)
4 verbose stack at /Users/frank/Sites/opencode/node_modules/npm/node_modules/normalize-package-data/lib/normalize.js:32:38
4 verbose stack at Array.forEach (<anonymous>)
4 verbose stack at normalize (/Users/frank/Sites/opencode/node_modules/npm/node_modules/normalize-package-data/lib/normalize.js:31:15)
4 verbose stack at final (/Users/frank/Sites/opencode/node_modules/npm/node_modules/read-package-json/read-json.js:349:5)
4 verbose stack at then (/Users/frank/Sites/opencode/node_modules/npm/node_modules/read-package-json/read-json.js:124:5)
4 verbose stack at ReadFileContext.<anonymous> (/Users/frank/Sites/opencode/node_modules/npm/node_modules/read-package-json/read-json.js:295:20)
4 verbose stack at ReadFileContext.callback (/Users/frank/Sites/opencode/node_modules/npm/node_modules/graceful-fs/graceful-fs.js:78:16)
4 verbose stack at FSReqCallback.readFileAfterOpen [as oncomplete] (node:fs:299:13)
5 verbose cwd /Users/frank/Sites/opencode/cloud/web
6 error Darwin 24.5.0
7 error argv "/usr/local/bin/node" "/Users/frank/Sites/opencode/node_modules/.bin/npm" "run" "dev"
8 error node v20.18.1
9 error npm v2.15.12
10 error Invalid name: "@opencode/cloud/web"
11 error If you need help, you may report this error at:
11 error <https://github.com/npm/npm/issues>
12 verbose exit [ 1, true ]

Some files were not shown because too many files have changed in this diff Show More