mirror of
https://github.com/anomalyco/opencode.git
synced 2026-02-12 11:54:29 +00:00
Compare commits
14 Commits
sqlite2
...
fixup-pr-a
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fb1ae386ba | ||
|
|
aea68c386a | ||
|
|
8eea53a41e | ||
|
|
3befd0c6c5 | ||
|
|
8577eb8ec9 | ||
|
|
c856f875a1 | ||
|
|
264dd213f9 | ||
|
|
125727d09c | ||
|
|
8c7b35ad05 | ||
|
|
e2a33f75e1 | ||
|
|
006d673ed2 | ||
|
|
6b4d617df0 | ||
|
|
e3471526f4 | ||
|
|
6b30e0b752 |
1
.github/VOUCHED.td
vendored
1
.github/VOUCHED.td
vendored
@@ -8,6 +8,7 @@
|
||||
# - Denounce with minus prefix: -username or -platform:username.
|
||||
# - Optional details after a space following the handle.
|
||||
adamdotdevin
|
||||
-florianleibert
|
||||
fwang
|
||||
iamdavidhill
|
||||
jayair
|
||||
|
||||
11
.github/workflows/docs-locale-sync.yml
vendored
11
.github/workflows/docs-locale-sync.yml
vendored
@@ -64,10 +64,13 @@ jobs:
|
||||
|
||||
Requirements:
|
||||
1. Update all relevant locale docs under packages/web/src/content/docs/<locale>/ so they reflect these English page changes.
|
||||
2. Preserve frontmatter keys, internal links, code blocks, and existing locale-specific metadata unless the English change requires an update.
|
||||
3. Keep locale docs structure aligned with their corresponding English pages.
|
||||
4. Do not modify English source docs in packages/web/src/content/docs/*.mdx.
|
||||
5. If no locale updates are needed, make no changes.
|
||||
2. You MUST use the Task tool for translation work and launch subagents with subagent_type `translator` (defined in .opencode/agent/translator.md).
|
||||
3. Do not translate directly in the primary agent. Use translator subagent output as the source for locale text updates.
|
||||
4. Run translator subagent Task calls in parallel whenever file/locale translation work is independent.
|
||||
5. Preserve frontmatter keys, internal links, code blocks, and existing locale-specific metadata unless the English change requires an update.
|
||||
6. Keep locale docs structure aligned with their corresponding English pages.
|
||||
7. Do not modify English source docs in packages/web/src/content/docs/*.mdx.
|
||||
8. If no locale updates are needed, make no changes.
|
||||
|
||||
- name: Commit and push locale docs updates
|
||||
if: steps.changes.outputs.has_changes == 'true'
|
||||
|
||||
4
.github/workflows/pr-management.yml
vendored
4
.github/workflows/pr-management.yml
vendored
@@ -60,9 +60,11 @@ jobs:
|
||||
run: |
|
||||
COMMENT=$(bun script/duplicate-pr.ts -f pr_info.txt "Check the attached file for PR details and search for duplicates")
|
||||
|
||||
gh pr comment "$PR_NUMBER" --body "_The following comment was made by an LLM, it may be inaccurate:_
|
||||
if [ "$COMMENT" != "No duplicate PRs found" ]; then
|
||||
gh pr comment "$PR_NUMBER" --body "_The following comment was made by an LLM, it may be inaccurate:_
|
||||
|
||||
$COMMENT"
|
||||
fi
|
||||
|
||||
add-contributor-label:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
@@ -16,12 +16,15 @@ wip:
|
||||
|
||||
For anything in the packages/web use the docs: prefix.
|
||||
|
||||
For anything in the packages/app use the ignore: prefix.
|
||||
|
||||
prefer to explain WHY something was done from an end user perspective instead of
|
||||
WHAT was done.
|
||||
|
||||
do not do generic messages like "improved agent experience" be very specific
|
||||
about what user facing changes were made
|
||||
|
||||
if there are changes do a git pull --rebase
|
||||
if there are conflicts DO NOT FIX THEM. notify me and I will fix them
|
||||
|
||||
## GIT DIFF
|
||||
|
||||
@@ -32,6 +32,9 @@ description: Use this when you are working on file operations like reading, writ
|
||||
- Decode tool stderr with `Bun.readableStreamToText`.
|
||||
- For large writes, use `Bun.write(Bun.file(path), text)`.
|
||||
|
||||
NOTE: Bun.file(...).exists() will return `false` if the value is a directory.
|
||||
Use Filesystem.exists(...) instead if path can be file or directory
|
||||
|
||||
## Quick checklist
|
||||
|
||||
- Use Bun APIs first.
|
||||
|
||||
@@ -110,4 +110,3 @@ const table = sqliteTable("session", {
|
||||
|
||||
- Avoid mocks as much as possible
|
||||
- Test actual implementation, do not duplicate logic into tests
|
||||
- Tests cannot run from repo root (guard: `do-not-run-tests-from-root`); run from package dirs like `packages/opencode`.
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
{
|
||||
"nodeModules": {
|
||||
"x86_64-linux": "sha256-cvRBvHRuunNjF07c4GVHl5rRgoTn1qfI/HdJWtOV63M=",
|
||||
"aarch64-linux": "sha256-DJUI4pMZ7wQTnyOiuDHALmZz7FZtrTbzRzCuNOShmWE=",
|
||||
"aarch64-darwin": "sha256-JnkqDwuC7lNsjafV+jOGfvs8K1xC8rk5CTOW+spjiCA=",
|
||||
"x86_64-darwin": "sha256-GBeTqq2vDn/mXplYNglrAT2xajjFVzB4ATHnMS0j7z4="
|
||||
"x86_64-linux": "sha256-pp2gb4nxiIT3VltB6Xli2wZPH32JfnMsI+BbihyU1+E=",
|
||||
"aarch64-linux": "sha256-hJwxhBICZz/pbIxQsF/sIpZTlFIgLpcAyF44O8wxMdU=",
|
||||
"aarch64-darwin": "sha256-DPONXP52XOg/ApdSnLp32a+K5XCOnDGhbTUto2Rme0g=",
|
||||
"x86_64-darwin": "sha256-KX1h5LRJSgthpbOPqWlbM/sPf8cvQrdRJvxtrz/FzBQ="
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"description": "AI-powered development tool",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"packageManager": "bun@1.3.8",
|
||||
"packageManager": "bun@1.3.9",
|
||||
"scripts": {
|
||||
"dev": "bun run --cwd packages/opencode --conditions=browser src/index.ts",
|
||||
"dev:desktop": "bun --cwd packages/desktop tauri dev",
|
||||
@@ -23,7 +23,7 @@
|
||||
"packages/slack"
|
||||
],
|
||||
"catalog": {
|
||||
"@types/bun": "1.3.8",
|
||||
"@types/bun": "1.3.9",
|
||||
"@octokit/rest": "22.0.0",
|
||||
"@hono/zod-validator": "0.4.2",
|
||||
"ulid": "3.0.1",
|
||||
@@ -40,8 +40,6 @@
|
||||
"@tailwindcss/vite": "4.1.11",
|
||||
"diff": "8.0.2",
|
||||
"dompurify": "3.3.1",
|
||||
"drizzle-kit": "1.0.0-beta.12-a5629fb",
|
||||
"drizzle-orm": "1.0.0-beta.12-a5629fb",
|
||||
"ai": "5.0.124",
|
||||
"hono": "4.10.7",
|
||||
"hono-openapi": "1.1.2",
|
||||
|
||||
@@ -231,24 +231,6 @@ export function applyDirectoryEvent(input: {
|
||||
}
|
||||
break
|
||||
}
|
||||
case "message.part.delta": {
|
||||
const props = event.properties as { messageID: string; partID: string; field: string; delta: string }
|
||||
const parts = input.store.part[props.messageID]
|
||||
if (!parts) break
|
||||
const result = Binary.search(parts, props.partID, (p) => p.id)
|
||||
if (!result.found) break
|
||||
input.setStore(
|
||||
"part",
|
||||
props.messageID,
|
||||
produce((draft) => {
|
||||
const part = draft[result.index]
|
||||
const field = props.field as keyof typeof part
|
||||
const existing = part[field] as string | undefined
|
||||
;(part[field] as string) = (existing ?? "") + props.delta
|
||||
}),
|
||||
)
|
||||
break
|
||||
}
|
||||
case "vcs.branch.updated": {
|
||||
const props = event.properties as { branch: string }
|
||||
const next = { branch: props.branch }
|
||||
|
||||
Binary file not shown.
|
After Width: | Height: | Size: 697 B |
@@ -0,0 +1,18 @@
|
||||
<svg width="300" height="300" viewBox="0 0 300 300" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g transform="translate(30, 0)">
|
||||
<g clip-path="url(#clip0_1401_86283)">
|
||||
<mask id="mask0_1401_86283" style="mask-type:luminance" maskUnits="userSpaceOnUse" x="0" y="0" width="240" height="300">
|
||||
<path d="M240 0H0V300H240V0Z" fill="white"/>
|
||||
</mask>
|
||||
<g mask="url(#mask0_1401_86283)">
|
||||
<path d="M180 240H60V120H180V240Z" fill="#4B4646"/>
|
||||
<path d="M180 60H60V240H180V60ZM240 300H0V0H240V300Z" fill="#F1ECEC"/>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
<defs>
|
||||
<clipPath id="clip0_1401_86283">
|
||||
<rect width="240" height="300" fill="white"/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 631 B |
Binary file not shown.
|
After Width: | Height: | Size: 697 B |
@@ -0,0 +1,18 @@
|
||||
<svg width="300" height="300" viewBox="0 0 300 300" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g transform="translate(30, 0)">
|
||||
<g clip-path="url(#clip0_1401_86274)">
|
||||
<mask id="mask0_1401_86274" style="mask-type:luminance" maskUnits="userSpaceOnUse" x="0" y="0" width="240" height="300">
|
||||
<path d="M240 0H0V300H240V0Z" fill="white"/>
|
||||
</mask>
|
||||
<g mask="url(#mask0_1401_86274)">
|
||||
<path d="M180 240H60V120H180V240Z" fill="#CFCECD"/>
|
||||
<path d="M180 60H60V240H180V60ZM240 300H0V0H240V300Z" fill="#211E1E"/>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
<defs>
|
||||
<clipPath id="clip0_1401_86274">
|
||||
<rect width="240" height="300" fill="white"/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 631 B |
Binary file not shown.
|
After Width: | Height: | Size: 1.4 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 1.4 KiB |
@@ -7,18 +7,24 @@ import { useI18n } from "~/context/i18n"
|
||||
import { LocaleLinks } from "~/component/locale-links"
|
||||
import previewLogoLight from "../../asset/brand/preview-opencode-logo-light.png"
|
||||
import previewLogoDark from "../../asset/brand/preview-opencode-logo-dark.png"
|
||||
import previewLogoLightSquare from "../../asset/brand/preview-opencode-logo-light-square.png"
|
||||
import previewLogoDarkSquare from "../../asset/brand/preview-opencode-logo-dark-square.png"
|
||||
import previewWordmarkLight from "../../asset/brand/preview-opencode-wordmark-light.png"
|
||||
import previewWordmarkDark from "../../asset/brand/preview-opencode-wordmark-dark.png"
|
||||
import previewWordmarkSimpleLight from "../../asset/brand/preview-opencode-wordmark-simple-light.png"
|
||||
import previewWordmarkSimpleDark from "../../asset/brand/preview-opencode-wordmark-simple-dark.png"
|
||||
import logoLightPng from "../../asset/brand/opencode-logo-light.png"
|
||||
import logoDarkPng from "../../asset/brand/opencode-logo-dark.png"
|
||||
import logoLightSquarePng from "../../asset/brand/opencode-logo-light-square.png"
|
||||
import logoDarkSquarePng from "../../asset/brand/opencode-logo-dark-square.png"
|
||||
import wordmarkLightPng from "../../asset/brand/opencode-wordmark-light.png"
|
||||
import wordmarkDarkPng from "../../asset/brand/opencode-wordmark-dark.png"
|
||||
import wordmarkSimpleLightPng from "../../asset/brand/opencode-wordmark-simple-light.png"
|
||||
import wordmarkSimpleDarkPng from "../../asset/brand/opencode-wordmark-simple-dark.png"
|
||||
import logoLightSvg from "../../asset/brand/opencode-logo-light.svg"
|
||||
import logoDarkSvg from "../../asset/brand/opencode-logo-dark.svg"
|
||||
import logoLightSquareSvg from "../../asset/brand/opencode-logo-light-square.svg"
|
||||
import logoDarkSquareSvg from "../../asset/brand/opencode-logo-dark-square.svg"
|
||||
import wordmarkLightSvg from "../../asset/brand/opencode-wordmark-light.svg"
|
||||
import wordmarkDarkSvg from "../../asset/brand/opencode-wordmark-dark.svg"
|
||||
import wordmarkSimpleLightSvg from "../../asset/brand/opencode-wordmark-simple-light.svg"
|
||||
@@ -135,6 +141,60 @@ export default function Brand() {
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<img src={previewLogoLightSquare} alt="OpenCode brand guidelines" />
|
||||
<div data-component="actions">
|
||||
<button onClick={() => downloadFile(logoLightSquarePng, "opencode-logo-light-square.png")}>
|
||||
PNG
|
||||
<svg width="20" height="20" viewBox="0 0 20 20" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path
|
||||
d="M13.9583 10.6247L10 14.583L6.04167 10.6247M10 2.08301V13.958M16.25 17.9163H3.75"
|
||||
stroke="currentColor"
|
||||
stroke-width="1.5"
|
||||
stroke-linecap="square"
|
||||
/>
|
||||
</svg>
|
||||
</button>
|
||||
<button onClick={() => downloadFile(logoLightSquareSvg, "opencode-logo-light-square.svg")}>
|
||||
SVG
|
||||
<svg width="20" height="20" viewBox="0 0 20 20" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path
|
||||
d="M13.9583 10.6247L10 14.583L6.04167 10.6247M10 2.08301V13.958M16.25 17.9163H3.75"
|
||||
stroke="currentColor"
|
||||
stroke-width="1.5"
|
||||
stroke-linecap="square"
|
||||
/>
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<img src={previewLogoDarkSquare} alt="OpenCode brand guidelines" />
|
||||
<div data-component="actions">
|
||||
<button onClick={() => downloadFile(logoDarkSquarePng, "opencode-logo-dark-square.png")}>
|
||||
PNG
|
||||
<svg width="20" height="20" viewBox="0 0 20 20" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path
|
||||
d="M13.9583 10.6247L10 14.583L6.04167 10.6247M10 2.08301V13.958M16.25 17.9163H3.75"
|
||||
stroke="currentColor"
|
||||
stroke-width="1.5"
|
||||
stroke-linecap="square"
|
||||
/>
|
||||
</svg>
|
||||
</button>
|
||||
<button onClick={() => downloadFile(logoDarkSquareSvg, "opencode-logo-dark-square.svg")}>
|
||||
SVG
|
||||
<svg width="20" height="20" viewBox="0 0 20 20" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path
|
||||
d="M13.9583 10.6247L10 14.583L6.04167 10.6247M10 2.08301V13.958M16.25 17.9163H3.75"
|
||||
stroke="currentColor"
|
||||
stroke-width="1.5"
|
||||
stroke-linecap="square"
|
||||
/>
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<img src={previewWordmarkLight} alt="OpenCode brand guidelines" />
|
||||
<div data-component="actions">
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
"@opencode-ai/console-resource": "workspace:*",
|
||||
"@planetscale/database": "1.19.0",
|
||||
"aws4fetch": "1.0.20",
|
||||
"drizzle-orm": "catalog:",
|
||||
"drizzle-orm": "0.41.0",
|
||||
"postgres": "3.4.7",
|
||||
"stripe": "18.0.0",
|
||||
"ulid": "catalog:",
|
||||
@@ -44,7 +44,7 @@
|
||||
"@tsconfig/node22": "22.0.2",
|
||||
"@types/bun": "1.3.0",
|
||||
"@types/node": "catalog:",
|
||||
"drizzle-kit": "catalog:",
|
||||
"drizzle-kit": "0.30.5",
|
||||
"mysql2": "3.14.4",
|
||||
"typescript": "catalog:",
|
||||
"@typescript/native-preview": "catalog:"
|
||||
|
||||
@@ -4,6 +4,7 @@ export * from "drizzle-orm"
|
||||
import { Client } from "@planetscale/database"
|
||||
|
||||
import { MySqlTransaction, type MySqlTransactionConfig } from "drizzle-orm/mysql-core"
|
||||
import type { ExtractTablesWithRelations } from "drizzle-orm"
|
||||
import type { PlanetScalePreparedQueryHKT, PlanetscaleQueryResultHKT } from "drizzle-orm/planetscale-serverless"
|
||||
import { Context } from "../context"
|
||||
import { memo } from "../util/memo"
|
||||
@@ -13,7 +14,7 @@ export namespace Database {
|
||||
PlanetscaleQueryResultHKT,
|
||||
PlanetScalePreparedQueryHKT,
|
||||
Record<string, never>,
|
||||
any
|
||||
ExtractTablesWithRelations<Record<string, never>>
|
||||
>
|
||||
|
||||
const client = memo(() => {
|
||||
@@ -22,7 +23,7 @@ export namespace Database {
|
||||
username: Resource.Database.username,
|
||||
password: Resource.Database.password,
|
||||
})
|
||||
const db = drizzle({ client: result })
|
||||
const db = drizzle(result, {})
|
||||
return db
|
||||
})
|
||||
|
||||
|
||||
@@ -1,10 +1,27 @@
|
||||
# opencode database guide
|
||||
# opencode agent guidelines
|
||||
|
||||
## Database
|
||||
## Build/Test Commands
|
||||
|
||||
- **Schema**: Drizzle schema lives in `src/**/*.sql.ts`.
|
||||
- **Naming**: tables and columns use snake*case; join columns are `<entity>_id`; indexes are `<table>*<column>\_idx`.
|
||||
- **Migrations**: generated by Drizzle Kit using `drizzle.config.ts` (schema: `./src/**/*.sql.ts`, output: `./migration`).
|
||||
- **Command**: `bun run db generate --name <slug>`.
|
||||
- **Output**: creates `migration/<timestamp>_<slug>/migration.sql` and `snapshot.json`.
|
||||
- **Tests**: migration tests should read the per-folder layout (no `_journal.json`).
|
||||
- **Install**: `bun install`
|
||||
- **Run**: `bun run --conditions=browser ./src/index.ts`
|
||||
- **Typecheck**: `bun run typecheck` (npm run typecheck)
|
||||
- **Test**: `bun test` (runs all tests)
|
||||
- **Single test**: `bun test test/tool/tool.test.ts` (specific test file)
|
||||
|
||||
## Code Style
|
||||
|
||||
- **Runtime**: Bun with TypeScript ESM modules
|
||||
- **Imports**: Use relative imports for local modules, named imports preferred
|
||||
- **Types**: Zod schemas for validation, TypeScript interfaces for structure
|
||||
- **Naming**: camelCase for variables/functions, PascalCase for classes/namespaces
|
||||
- **Error handling**: Use Result patterns, avoid throwing exceptions in tools
|
||||
- **File structure**: Namespace-based organization (e.g., `Tool.define()`, `Session.create()`)
|
||||
|
||||
## Architecture
|
||||
|
||||
- **Tools**: Implement `Tool.Info` interface with `execute()` method
|
||||
- **Context**: Pass `sessionID` in tool context, use `App.provide()` for DI
|
||||
- **Validation**: All inputs validated with Zod schemas
|
||||
- **Logging**: Use `Log.create({ service: "name" })` pattern
|
||||
- **Storage**: Use `Storage` namespace for persistence
|
||||
- **API Client**: The TypeScript TUI (built with SolidJS + OpenTUI) communicates with the OpenCode server using `@opencode-ai/sdk`. When adding/modifying server endpoints in `packages/opencode/src/server/server.ts`, run `./script/generate.ts` to regenerate the SDK and related files.
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
import { defineConfig } from "drizzle-kit"
|
||||
|
||||
export default defineConfig({
|
||||
dialect: "sqlite",
|
||||
schema: "./src/**/*.sql.ts",
|
||||
out: "./migration",
|
||||
dbCredentials: {
|
||||
url: "/home/thdxr/.local/share/opencode/opencode.db",
|
||||
},
|
||||
})
|
||||
@@ -1,90 +0,0 @@
|
||||
CREATE TABLE `project` (
|
||||
`id` text PRIMARY KEY,
|
||||
`worktree` text NOT NULL,
|
||||
`vcs` text,
|
||||
`name` text,
|
||||
`icon_url` text,
|
||||
`icon_color` text,
|
||||
`time_created` integer NOT NULL,
|
||||
`time_updated` integer NOT NULL,
|
||||
`time_initialized` integer,
|
||||
`sandboxes` text NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE `message` (
|
||||
`id` text PRIMARY KEY,
|
||||
`session_id` text NOT NULL,
|
||||
`time_created` integer NOT NULL,
|
||||
`time_updated` integer NOT NULL,
|
||||
`data` text NOT NULL,
|
||||
CONSTRAINT `fk_message_session_id_session_id_fk` FOREIGN KEY (`session_id`) REFERENCES `session`(`id`) ON DELETE CASCADE
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE `part` (
|
||||
`id` text PRIMARY KEY,
|
||||
`message_id` text NOT NULL,
|
||||
`session_id` text NOT NULL,
|
||||
`time_created` integer NOT NULL,
|
||||
`time_updated` integer NOT NULL,
|
||||
`data` text NOT NULL,
|
||||
CONSTRAINT `fk_part_message_id_message_id_fk` FOREIGN KEY (`message_id`) REFERENCES `message`(`id`) ON DELETE CASCADE
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE `permission` (
|
||||
`project_id` text PRIMARY KEY,
|
||||
`time_created` integer NOT NULL,
|
||||
`time_updated` integer NOT NULL,
|
||||
`data` text NOT NULL,
|
||||
CONSTRAINT `fk_permission_project_id_project_id_fk` FOREIGN KEY (`project_id`) REFERENCES `project`(`id`) ON DELETE CASCADE
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE `session` (
|
||||
`id` text PRIMARY KEY,
|
||||
`project_id` text NOT NULL,
|
||||
`parent_id` text,
|
||||
`slug` text NOT NULL,
|
||||
`directory` text NOT NULL,
|
||||
`title` text NOT NULL,
|
||||
`version` text NOT NULL,
|
||||
`share_url` text,
|
||||
`summary_additions` integer,
|
||||
`summary_deletions` integer,
|
||||
`summary_files` integer,
|
||||
`summary_diffs` text,
|
||||
`revert` text,
|
||||
`permission` text,
|
||||
`time_created` integer NOT NULL,
|
||||
`time_updated` integer NOT NULL,
|
||||
`time_compacting` integer,
|
||||
`time_archived` integer,
|
||||
CONSTRAINT `fk_session_project_id_project_id_fk` FOREIGN KEY (`project_id`) REFERENCES `project`(`id`) ON DELETE CASCADE
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE `todo` (
|
||||
`session_id` text NOT NULL,
|
||||
`content` text NOT NULL,
|
||||
`status` text NOT NULL,
|
||||
`priority` text NOT NULL,
|
||||
`position` integer NOT NULL,
|
||||
`time_created` integer NOT NULL,
|
||||
`time_updated` integer NOT NULL,
|
||||
CONSTRAINT `todo_pk` PRIMARY KEY(`session_id`, `position`),
|
||||
CONSTRAINT `fk_todo_session_id_session_id_fk` FOREIGN KEY (`session_id`) REFERENCES `session`(`id`) ON DELETE CASCADE
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE `session_share` (
|
||||
`session_id` text PRIMARY KEY,
|
||||
`id` text NOT NULL,
|
||||
`secret` text NOT NULL,
|
||||
`url` text NOT NULL,
|
||||
`time_created` integer NOT NULL,
|
||||
`time_updated` integer NOT NULL,
|
||||
CONSTRAINT `fk_session_share_session_id_session_id_fk` FOREIGN KEY (`session_id`) REFERENCES `session`(`id`) ON DELETE CASCADE
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE INDEX `message_session_idx` ON `message` (`session_id`);--> statement-breakpoint
|
||||
CREATE INDEX `part_message_idx` ON `part` (`message_id`);--> statement-breakpoint
|
||||
CREATE INDEX `part_session_idx` ON `part` (`session_id`);--> statement-breakpoint
|
||||
CREATE INDEX `session_project_idx` ON `session` (`project_id`);--> statement-breakpoint
|
||||
CREATE INDEX `session_parent_idx` ON `session` (`parent_id`);--> statement-breakpoint
|
||||
CREATE INDEX `todo_session_idx` ON `todo` (`session_id`);
|
||||
@@ -1,796 +0,0 @@
|
||||
{
|
||||
"version": "7",
|
||||
"dialect": "sqlite",
|
||||
"id": "068758ed-a97a-46f6-8a59-6c639ae7c20c",
|
||||
"prevIds": ["00000000-0000-0000-0000-000000000000"],
|
||||
"ddl": [
|
||||
{
|
||||
"name": "project",
|
||||
"entityType": "tables"
|
||||
},
|
||||
{
|
||||
"name": "message",
|
||||
"entityType": "tables"
|
||||
},
|
||||
{
|
||||
"name": "part",
|
||||
"entityType": "tables"
|
||||
},
|
||||
{
|
||||
"name": "permission",
|
||||
"entityType": "tables"
|
||||
},
|
||||
{
|
||||
"name": "session",
|
||||
"entityType": "tables"
|
||||
},
|
||||
{
|
||||
"name": "todo",
|
||||
"entityType": "tables"
|
||||
},
|
||||
{
|
||||
"name": "session_share",
|
||||
"entityType": "tables"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "id",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "worktree",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "vcs",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "name",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "icon_url",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "icon_color",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_created",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_updated",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_initialized",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "sandboxes",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "id",
|
||||
"entityType": "columns",
|
||||
"table": "message"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "session_id",
|
||||
"entityType": "columns",
|
||||
"table": "message"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_created",
|
||||
"entityType": "columns",
|
||||
"table": "message"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_updated",
|
||||
"entityType": "columns",
|
||||
"table": "message"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "data",
|
||||
"entityType": "columns",
|
||||
"table": "message"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "id",
|
||||
"entityType": "columns",
|
||||
"table": "part"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "message_id",
|
||||
"entityType": "columns",
|
||||
"table": "part"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "session_id",
|
||||
"entityType": "columns",
|
||||
"table": "part"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_created",
|
||||
"entityType": "columns",
|
||||
"table": "part"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_updated",
|
||||
"entityType": "columns",
|
||||
"table": "part"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "data",
|
||||
"entityType": "columns",
|
||||
"table": "part"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "project_id",
|
||||
"entityType": "columns",
|
||||
"table": "permission"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_created",
|
||||
"entityType": "columns",
|
||||
"table": "permission"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_updated",
|
||||
"entityType": "columns",
|
||||
"table": "permission"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "data",
|
||||
"entityType": "columns",
|
||||
"table": "permission"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "id",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "project_id",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "parent_id",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "slug",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "directory",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "title",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "version",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "share_url",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "summary_additions",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "summary_deletions",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "summary_files",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "summary_diffs",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "revert",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "permission",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_created",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_updated",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_compacting",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_archived",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "session_id",
|
||||
"entityType": "columns",
|
||||
"table": "todo"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "content",
|
||||
"entityType": "columns",
|
||||
"table": "todo"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "status",
|
||||
"entityType": "columns",
|
||||
"table": "todo"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "priority",
|
||||
"entityType": "columns",
|
||||
"table": "todo"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "position",
|
||||
"entityType": "columns",
|
||||
"table": "todo"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_created",
|
||||
"entityType": "columns",
|
||||
"table": "todo"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_updated",
|
||||
"entityType": "columns",
|
||||
"table": "todo"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "session_id",
|
||||
"entityType": "columns",
|
||||
"table": "session_share"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "id",
|
||||
"entityType": "columns",
|
||||
"table": "session_share"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "secret",
|
||||
"entityType": "columns",
|
||||
"table": "session_share"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "url",
|
||||
"entityType": "columns",
|
||||
"table": "session_share"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_created",
|
||||
"entityType": "columns",
|
||||
"table": "session_share"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_updated",
|
||||
"entityType": "columns",
|
||||
"table": "session_share"
|
||||
},
|
||||
{
|
||||
"columns": ["session_id"],
|
||||
"tableTo": "session",
|
||||
"columnsTo": ["id"],
|
||||
"onUpdate": "NO ACTION",
|
||||
"onDelete": "CASCADE",
|
||||
"nameExplicit": false,
|
||||
"name": "fk_message_session_id_session_id_fk",
|
||||
"entityType": "fks",
|
||||
"table": "message"
|
||||
},
|
||||
{
|
||||
"columns": ["message_id"],
|
||||
"tableTo": "message",
|
||||
"columnsTo": ["id"],
|
||||
"onUpdate": "NO ACTION",
|
||||
"onDelete": "CASCADE",
|
||||
"nameExplicit": false,
|
||||
"name": "fk_part_message_id_message_id_fk",
|
||||
"entityType": "fks",
|
||||
"table": "part"
|
||||
},
|
||||
{
|
||||
"columns": ["project_id"],
|
||||
"tableTo": "project",
|
||||
"columnsTo": ["id"],
|
||||
"onUpdate": "NO ACTION",
|
||||
"onDelete": "CASCADE",
|
||||
"nameExplicit": false,
|
||||
"name": "fk_permission_project_id_project_id_fk",
|
||||
"entityType": "fks",
|
||||
"table": "permission"
|
||||
},
|
||||
{
|
||||
"columns": ["project_id"],
|
||||
"tableTo": "project",
|
||||
"columnsTo": ["id"],
|
||||
"onUpdate": "NO ACTION",
|
||||
"onDelete": "CASCADE",
|
||||
"nameExplicit": false,
|
||||
"name": "fk_session_project_id_project_id_fk",
|
||||
"entityType": "fks",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"columns": ["session_id"],
|
||||
"tableTo": "session",
|
||||
"columnsTo": ["id"],
|
||||
"onUpdate": "NO ACTION",
|
||||
"onDelete": "CASCADE",
|
||||
"nameExplicit": false,
|
||||
"name": "fk_todo_session_id_session_id_fk",
|
||||
"entityType": "fks",
|
||||
"table": "todo"
|
||||
},
|
||||
{
|
||||
"columns": ["session_id"],
|
||||
"tableTo": "session",
|
||||
"columnsTo": ["id"],
|
||||
"onUpdate": "NO ACTION",
|
||||
"onDelete": "CASCADE",
|
||||
"nameExplicit": false,
|
||||
"name": "fk_session_share_session_id_session_id_fk",
|
||||
"entityType": "fks",
|
||||
"table": "session_share"
|
||||
},
|
||||
{
|
||||
"columns": ["session_id", "position"],
|
||||
"nameExplicit": false,
|
||||
"name": "todo_pk",
|
||||
"entityType": "pks",
|
||||
"table": "todo"
|
||||
},
|
||||
{
|
||||
"columns": ["id"],
|
||||
"nameExplicit": false,
|
||||
"name": "project_pk",
|
||||
"table": "project",
|
||||
"entityType": "pks"
|
||||
},
|
||||
{
|
||||
"columns": ["id"],
|
||||
"nameExplicit": false,
|
||||
"name": "message_pk",
|
||||
"table": "message",
|
||||
"entityType": "pks"
|
||||
},
|
||||
{
|
||||
"columns": ["id"],
|
||||
"nameExplicit": false,
|
||||
"name": "part_pk",
|
||||
"table": "part",
|
||||
"entityType": "pks"
|
||||
},
|
||||
{
|
||||
"columns": ["project_id"],
|
||||
"nameExplicit": false,
|
||||
"name": "permission_pk",
|
||||
"table": "permission",
|
||||
"entityType": "pks"
|
||||
},
|
||||
{
|
||||
"columns": ["id"],
|
||||
"nameExplicit": false,
|
||||
"name": "session_pk",
|
||||
"table": "session",
|
||||
"entityType": "pks"
|
||||
},
|
||||
{
|
||||
"columns": ["session_id"],
|
||||
"nameExplicit": false,
|
||||
"name": "session_share_pk",
|
||||
"table": "session_share",
|
||||
"entityType": "pks"
|
||||
},
|
||||
{
|
||||
"columns": [
|
||||
{
|
||||
"value": "session_id",
|
||||
"isExpression": false
|
||||
}
|
||||
],
|
||||
"isUnique": false,
|
||||
"where": null,
|
||||
"origin": "manual",
|
||||
"name": "message_session_idx",
|
||||
"entityType": "indexes",
|
||||
"table": "message"
|
||||
},
|
||||
{
|
||||
"columns": [
|
||||
{
|
||||
"value": "message_id",
|
||||
"isExpression": false
|
||||
}
|
||||
],
|
||||
"isUnique": false,
|
||||
"where": null,
|
||||
"origin": "manual",
|
||||
"name": "part_message_idx",
|
||||
"entityType": "indexes",
|
||||
"table": "part"
|
||||
},
|
||||
{
|
||||
"columns": [
|
||||
{
|
||||
"value": "session_id",
|
||||
"isExpression": false
|
||||
}
|
||||
],
|
||||
"isUnique": false,
|
||||
"where": null,
|
||||
"origin": "manual",
|
||||
"name": "part_session_idx",
|
||||
"entityType": "indexes",
|
||||
"table": "part"
|
||||
},
|
||||
{
|
||||
"columns": [
|
||||
{
|
||||
"value": "project_id",
|
||||
"isExpression": false
|
||||
}
|
||||
],
|
||||
"isUnique": false,
|
||||
"where": null,
|
||||
"origin": "manual",
|
||||
"name": "session_project_idx",
|
||||
"entityType": "indexes",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"columns": [
|
||||
{
|
||||
"value": "parent_id",
|
||||
"isExpression": false
|
||||
}
|
||||
],
|
||||
"isUnique": false,
|
||||
"where": null,
|
||||
"origin": "manual",
|
||||
"name": "session_parent_idx",
|
||||
"entityType": "indexes",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"columns": [
|
||||
{
|
||||
"value": "session_id",
|
||||
"isExpression": false
|
||||
}
|
||||
],
|
||||
"isUnique": false,
|
||||
"where": null,
|
||||
"origin": "manual",
|
||||
"name": "todo_session_idx",
|
||||
"entityType": "indexes",
|
||||
"table": "todo"
|
||||
}
|
||||
],
|
||||
"renames": []
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
ALTER TABLE `project` ADD `commands` text;
|
||||
@@ -1,847 +0,0 @@
|
||||
{
|
||||
"version": "7",
|
||||
"dialect": "sqlite",
|
||||
"id": "8bc2d11d-97fa-4ba8-8bfa-6c5956c49aeb",
|
||||
"prevIds": [
|
||||
"068758ed-a97a-46f6-8a59-6c639ae7c20c"
|
||||
],
|
||||
"ddl": [
|
||||
{
|
||||
"name": "project",
|
||||
"entityType": "tables"
|
||||
},
|
||||
{
|
||||
"name": "message",
|
||||
"entityType": "tables"
|
||||
},
|
||||
{
|
||||
"name": "part",
|
||||
"entityType": "tables"
|
||||
},
|
||||
{
|
||||
"name": "permission",
|
||||
"entityType": "tables"
|
||||
},
|
||||
{
|
||||
"name": "session",
|
||||
"entityType": "tables"
|
||||
},
|
||||
{
|
||||
"name": "todo",
|
||||
"entityType": "tables"
|
||||
},
|
||||
{
|
||||
"name": "session_share",
|
||||
"entityType": "tables"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "id",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "worktree",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "vcs",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "name",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "icon_url",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "icon_color",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_created",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_updated",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_initialized",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "sandboxes",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "commands",
|
||||
"entityType": "columns",
|
||||
"table": "project"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "id",
|
||||
"entityType": "columns",
|
||||
"table": "message"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "session_id",
|
||||
"entityType": "columns",
|
||||
"table": "message"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_created",
|
||||
"entityType": "columns",
|
||||
"table": "message"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_updated",
|
||||
"entityType": "columns",
|
||||
"table": "message"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "data",
|
||||
"entityType": "columns",
|
||||
"table": "message"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "id",
|
||||
"entityType": "columns",
|
||||
"table": "part"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "message_id",
|
||||
"entityType": "columns",
|
||||
"table": "part"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "session_id",
|
||||
"entityType": "columns",
|
||||
"table": "part"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_created",
|
||||
"entityType": "columns",
|
||||
"table": "part"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_updated",
|
||||
"entityType": "columns",
|
||||
"table": "part"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "data",
|
||||
"entityType": "columns",
|
||||
"table": "part"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "project_id",
|
||||
"entityType": "columns",
|
||||
"table": "permission"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_created",
|
||||
"entityType": "columns",
|
||||
"table": "permission"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_updated",
|
||||
"entityType": "columns",
|
||||
"table": "permission"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "data",
|
||||
"entityType": "columns",
|
||||
"table": "permission"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "id",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "project_id",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "parent_id",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "slug",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "directory",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "title",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "version",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "share_url",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "summary_additions",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "summary_deletions",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "summary_files",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "summary_diffs",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "revert",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "permission",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_created",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_updated",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_compacting",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_archived",
|
||||
"entityType": "columns",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "session_id",
|
||||
"entityType": "columns",
|
||||
"table": "todo"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "content",
|
||||
"entityType": "columns",
|
||||
"table": "todo"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "status",
|
||||
"entityType": "columns",
|
||||
"table": "todo"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "priority",
|
||||
"entityType": "columns",
|
||||
"table": "todo"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "position",
|
||||
"entityType": "columns",
|
||||
"table": "todo"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_created",
|
||||
"entityType": "columns",
|
||||
"table": "todo"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_updated",
|
||||
"entityType": "columns",
|
||||
"table": "todo"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "session_id",
|
||||
"entityType": "columns",
|
||||
"table": "session_share"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "id",
|
||||
"entityType": "columns",
|
||||
"table": "session_share"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "secret",
|
||||
"entityType": "columns",
|
||||
"table": "session_share"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "url",
|
||||
"entityType": "columns",
|
||||
"table": "session_share"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_created",
|
||||
"entityType": "columns",
|
||||
"table": "session_share"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": null,
|
||||
"generated": null,
|
||||
"name": "time_updated",
|
||||
"entityType": "columns",
|
||||
"table": "session_share"
|
||||
},
|
||||
{
|
||||
"columns": [
|
||||
"session_id"
|
||||
],
|
||||
"tableTo": "session",
|
||||
"columnsTo": [
|
||||
"id"
|
||||
],
|
||||
"onUpdate": "NO ACTION",
|
||||
"onDelete": "CASCADE",
|
||||
"nameExplicit": false,
|
||||
"name": "fk_message_session_id_session_id_fk",
|
||||
"entityType": "fks",
|
||||
"table": "message"
|
||||
},
|
||||
{
|
||||
"columns": [
|
||||
"message_id"
|
||||
],
|
||||
"tableTo": "message",
|
||||
"columnsTo": [
|
||||
"id"
|
||||
],
|
||||
"onUpdate": "NO ACTION",
|
||||
"onDelete": "CASCADE",
|
||||
"nameExplicit": false,
|
||||
"name": "fk_part_message_id_message_id_fk",
|
||||
"entityType": "fks",
|
||||
"table": "part"
|
||||
},
|
||||
{
|
||||
"columns": [
|
||||
"project_id"
|
||||
],
|
||||
"tableTo": "project",
|
||||
"columnsTo": [
|
||||
"id"
|
||||
],
|
||||
"onUpdate": "NO ACTION",
|
||||
"onDelete": "CASCADE",
|
||||
"nameExplicit": false,
|
||||
"name": "fk_permission_project_id_project_id_fk",
|
||||
"entityType": "fks",
|
||||
"table": "permission"
|
||||
},
|
||||
{
|
||||
"columns": [
|
||||
"project_id"
|
||||
],
|
||||
"tableTo": "project",
|
||||
"columnsTo": [
|
||||
"id"
|
||||
],
|
||||
"onUpdate": "NO ACTION",
|
||||
"onDelete": "CASCADE",
|
||||
"nameExplicit": false,
|
||||
"name": "fk_session_project_id_project_id_fk",
|
||||
"entityType": "fks",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"columns": [
|
||||
"session_id"
|
||||
],
|
||||
"tableTo": "session",
|
||||
"columnsTo": [
|
||||
"id"
|
||||
],
|
||||
"onUpdate": "NO ACTION",
|
||||
"onDelete": "CASCADE",
|
||||
"nameExplicit": false,
|
||||
"name": "fk_todo_session_id_session_id_fk",
|
||||
"entityType": "fks",
|
||||
"table": "todo"
|
||||
},
|
||||
{
|
||||
"columns": [
|
||||
"session_id"
|
||||
],
|
||||
"tableTo": "session",
|
||||
"columnsTo": [
|
||||
"id"
|
||||
],
|
||||
"onUpdate": "NO ACTION",
|
||||
"onDelete": "CASCADE",
|
||||
"nameExplicit": false,
|
||||
"name": "fk_session_share_session_id_session_id_fk",
|
||||
"entityType": "fks",
|
||||
"table": "session_share"
|
||||
},
|
||||
{
|
||||
"columns": [
|
||||
"session_id",
|
||||
"position"
|
||||
],
|
||||
"nameExplicit": false,
|
||||
"name": "todo_pk",
|
||||
"entityType": "pks",
|
||||
"table": "todo"
|
||||
},
|
||||
{
|
||||
"columns": [
|
||||
"id"
|
||||
],
|
||||
"nameExplicit": false,
|
||||
"name": "project_pk",
|
||||
"table": "project",
|
||||
"entityType": "pks"
|
||||
},
|
||||
{
|
||||
"columns": [
|
||||
"id"
|
||||
],
|
||||
"nameExplicit": false,
|
||||
"name": "message_pk",
|
||||
"table": "message",
|
||||
"entityType": "pks"
|
||||
},
|
||||
{
|
||||
"columns": [
|
||||
"id"
|
||||
],
|
||||
"nameExplicit": false,
|
||||
"name": "part_pk",
|
||||
"table": "part",
|
||||
"entityType": "pks"
|
||||
},
|
||||
{
|
||||
"columns": [
|
||||
"project_id"
|
||||
],
|
||||
"nameExplicit": false,
|
||||
"name": "permission_pk",
|
||||
"table": "permission",
|
||||
"entityType": "pks"
|
||||
},
|
||||
{
|
||||
"columns": [
|
||||
"id"
|
||||
],
|
||||
"nameExplicit": false,
|
||||
"name": "session_pk",
|
||||
"table": "session",
|
||||
"entityType": "pks"
|
||||
},
|
||||
{
|
||||
"columns": [
|
||||
"session_id"
|
||||
],
|
||||
"nameExplicit": false,
|
||||
"name": "session_share_pk",
|
||||
"table": "session_share",
|
||||
"entityType": "pks"
|
||||
},
|
||||
{
|
||||
"columns": [
|
||||
{
|
||||
"value": "session_id",
|
||||
"isExpression": false
|
||||
}
|
||||
],
|
||||
"isUnique": false,
|
||||
"where": null,
|
||||
"origin": "manual",
|
||||
"name": "message_session_idx",
|
||||
"entityType": "indexes",
|
||||
"table": "message"
|
||||
},
|
||||
{
|
||||
"columns": [
|
||||
{
|
||||
"value": "message_id",
|
||||
"isExpression": false
|
||||
}
|
||||
],
|
||||
"isUnique": false,
|
||||
"where": null,
|
||||
"origin": "manual",
|
||||
"name": "part_message_idx",
|
||||
"entityType": "indexes",
|
||||
"table": "part"
|
||||
},
|
||||
{
|
||||
"columns": [
|
||||
{
|
||||
"value": "session_id",
|
||||
"isExpression": false
|
||||
}
|
||||
],
|
||||
"isUnique": false,
|
||||
"where": null,
|
||||
"origin": "manual",
|
||||
"name": "part_session_idx",
|
||||
"entityType": "indexes",
|
||||
"table": "part"
|
||||
},
|
||||
{
|
||||
"columns": [
|
||||
{
|
||||
"value": "project_id",
|
||||
"isExpression": false
|
||||
}
|
||||
],
|
||||
"isUnique": false,
|
||||
"where": null,
|
||||
"origin": "manual",
|
||||
"name": "session_project_idx",
|
||||
"entityType": "indexes",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"columns": [
|
||||
{
|
||||
"value": "parent_id",
|
||||
"isExpression": false
|
||||
}
|
||||
],
|
||||
"isUnique": false,
|
||||
"where": null,
|
||||
"origin": "manual",
|
||||
"name": "session_parent_idx",
|
||||
"entityType": "indexes",
|
||||
"table": "session"
|
||||
},
|
||||
{
|
||||
"columns": [
|
||||
{
|
||||
"value": "session_id",
|
||||
"isExpression": false
|
||||
}
|
||||
],
|
||||
"isUnique": false,
|
||||
"where": null,
|
||||
"origin": "manual",
|
||||
"name": "todo_session_idx",
|
||||
"entityType": "indexes",
|
||||
"table": "todo"
|
||||
}
|
||||
],
|
||||
"renames": []
|
||||
}
|
||||
@@ -15,8 +15,7 @@
|
||||
"lint": "echo 'Running lint checks...' && bun test --coverage",
|
||||
"format": "echo 'Formatting code...' && bun run --prettier --write src/**/*.ts",
|
||||
"docs": "echo 'Generating documentation...' && find src -name '*.ts' -exec echo 'Processing: {}' \\;",
|
||||
"deploy": "echo 'Deploying application...' && bun run build && echo 'Deployment completed successfully'",
|
||||
"db": "bun drizzle-kit"
|
||||
"deploy": "echo 'Deploying application...' && bun run build && echo 'Deployment completed successfully'"
|
||||
},
|
||||
"bin": {
|
||||
"opencode": "./bin/opencode"
|
||||
@@ -43,8 +42,6 @@
|
||||
"@types/turndown": "5.0.5",
|
||||
"@types/yargs": "17.0.33",
|
||||
"@typescript/native-preview": "catalog:",
|
||||
"drizzle-kit": "1.0.0-beta.12-a5629fb",
|
||||
"drizzle-orm": "1.0.0-beta.12-a5629fb",
|
||||
"typescript": "catalog:",
|
||||
"vscode-languageserver-types": "3.17.5",
|
||||
"why-is-node-running": "3.2.2",
|
||||
@@ -87,8 +84,8 @@
|
||||
"@opencode-ai/sdk": "workspace:*",
|
||||
"@opencode-ai/util": "workspace:*",
|
||||
"@openrouter/ai-sdk-provider": "1.5.4",
|
||||
"@opentui/core": "0.1.77",
|
||||
"@opentui/solid": "0.1.77",
|
||||
"@opentui/core": "0.1.79",
|
||||
"@opentui/solid": "0.1.79",
|
||||
"@parcel/watcher": "2.5.1",
|
||||
"@pierre/diffs": "catalog:",
|
||||
"@solid-primitives/event-bus": "1.1.2",
|
||||
@@ -103,7 +100,6 @@
|
||||
"clipboardy": "4.0.0",
|
||||
"decimal.js": "10.5.0",
|
||||
"diff": "catalog:",
|
||||
"drizzle-orm": "1.0.0-beta.12-a5629fb",
|
||||
"fuzzysort": "3.1.0",
|
||||
"gray-matter": "4.0.3",
|
||||
"hono": "catalog:",
|
||||
@@ -126,8 +122,5 @@
|
||||
"yargs": "18.0.0",
|
||||
"zod": "catalog:",
|
||||
"zod-to-json-schema": "3.24.5"
|
||||
},
|
||||
"overrides": {
|
||||
"drizzle-orm": "1.0.0-beta.12-a5629fb"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,32 +25,6 @@ await Bun.write(
|
||||
)
|
||||
console.log("Generated models-snapshot.ts")
|
||||
|
||||
// Load migrations from migration directories
|
||||
const migrationDirs = (await fs.promises.readdir(path.join(dir, "migration"), { withFileTypes: true }))
|
||||
.filter((entry) => entry.isDirectory() && /^\d{4}\d{2}\d{2}\d{2}\d{2}\d{2}/.test(entry.name))
|
||||
.map((entry) => entry.name)
|
||||
.sort()
|
||||
|
||||
const migrations = await Promise.all(
|
||||
migrationDirs.map(async (name) => {
|
||||
const file = path.join(dir, "migration", name, "migration.sql")
|
||||
const sql = await Bun.file(file).text()
|
||||
const match = /^(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})/.exec(name)
|
||||
const timestamp = match
|
||||
? Date.UTC(
|
||||
Number(match[1]),
|
||||
Number(match[2]) - 1,
|
||||
Number(match[3]),
|
||||
Number(match[4]),
|
||||
Number(match[5]),
|
||||
Number(match[6]),
|
||||
)
|
||||
: 0
|
||||
return { sql, timestamp }
|
||||
}),
|
||||
)
|
||||
console.log(`Loaded ${migrations.length} migrations`)
|
||||
|
||||
const singleFlag = process.argv.includes("--single")
|
||||
const baselineFlag = process.argv.includes("--baseline")
|
||||
const skipInstall = process.argv.includes("--skip-install")
|
||||
@@ -182,7 +156,6 @@ for (const item of targets) {
|
||||
entrypoints: ["./src/index.ts", parserWorker, workerPath],
|
||||
define: {
|
||||
OPENCODE_VERSION: `'${Script.version}'`,
|
||||
OPENCODE_MIGRATIONS: JSON.stringify(migrations),
|
||||
OTUI_TREE_SITTER_WORKER_PATH: bunfsRoot + workerRelativePath,
|
||||
OPENCODE_WORKER_PATH: workerPath,
|
||||
OPENCODE_CHANNEL: `'${Script.channel}'`,
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
#!/usr/bin/env bun
|
||||
|
||||
import { $ } from "bun"
|
||||
|
||||
// drizzle-kit check compares schema to migrations, exits non-zero if drift
|
||||
const result = await $`bun drizzle-kit check`.quiet().nothrow()
|
||||
|
||||
if (result.exitCode !== 0) {
|
||||
console.error("Schema has changes not captured in migrations!")
|
||||
console.error("Run: bun drizzle-kit generate")
|
||||
console.error("")
|
||||
console.error(result.stderr.toString())
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
console.log("Migrations are up to date")
|
||||
@@ -435,68 +435,46 @@ export namespace ACP {
|
||||
return
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
case "message.part.delta": {
|
||||
const props = event.properties
|
||||
const session = this.sessionManager.tryGet(props.sessionID)
|
||||
if (!session) return
|
||||
const sessionId = session.id
|
||||
|
||||
const message = await this.sdk.session
|
||||
.message(
|
||||
{
|
||||
sessionID: props.sessionID,
|
||||
messageID: props.messageID,
|
||||
directory: session.cwd,
|
||||
},
|
||||
{ throwOnError: true },
|
||||
)
|
||||
.then((x) => x.data)
|
||||
.catch((error) => {
|
||||
log.error("unexpected error when fetching message", { error })
|
||||
return undefined
|
||||
})
|
||||
|
||||
if (!message || message.info.role !== "assistant") return
|
||||
|
||||
const part = message.parts.find((p) => p.id === props.partID)
|
||||
if (!part) return
|
||||
|
||||
if (part.type === "text" && props.field === "text" && part.ignored !== true) {
|
||||
await this.connection
|
||||
.sessionUpdate({
|
||||
sessionId,
|
||||
update: {
|
||||
sessionUpdate: "agent_message_chunk",
|
||||
content: {
|
||||
type: "text",
|
||||
text: props.delta,
|
||||
if (part.type === "text") {
|
||||
const delta = props.delta
|
||||
if (delta && part.ignored !== true) {
|
||||
await this.connection
|
||||
.sessionUpdate({
|
||||
sessionId,
|
||||
update: {
|
||||
sessionUpdate: "agent_message_chunk",
|
||||
content: {
|
||||
type: "text",
|
||||
text: delta,
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
.catch((error) => {
|
||||
log.error("failed to send text delta to ACP", { error })
|
||||
})
|
||||
})
|
||||
.catch((error) => {
|
||||
log.error("failed to send text to ACP", { error })
|
||||
})
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if (part.type === "reasoning" && props.field === "text") {
|
||||
await this.connection
|
||||
.sessionUpdate({
|
||||
sessionId,
|
||||
update: {
|
||||
sessionUpdate: "agent_thought_chunk",
|
||||
content: {
|
||||
type: "text",
|
||||
text: props.delta,
|
||||
if (part.type === "reasoning") {
|
||||
const delta = props.delta
|
||||
if (delta) {
|
||||
await this.connection
|
||||
.sessionUpdate({
|
||||
sessionId,
|
||||
update: {
|
||||
sessionUpdate: "agent_thought_chunk",
|
||||
content: {
|
||||
type: "text",
|
||||
text: delta,
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
.catch((error) => {
|
||||
log.error("failed to send reasoning delta to ACP", { error })
|
||||
})
|
||||
})
|
||||
.catch((error) => {
|
||||
log.error("failed to send reasoning to ACP", { error })
|
||||
})
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
@@ -3,8 +3,7 @@ import type { Session as SDKSession, Message, Part } from "@opencode-ai/sdk/v2"
|
||||
import { Session } from "../../session"
|
||||
import { cmd } from "./cmd"
|
||||
import { bootstrap } from "../bootstrap"
|
||||
import { Database } from "../../storage/db"
|
||||
import { SessionTable, MessageTable, PartTable } from "../../session/session.sql"
|
||||
import { Storage } from "../../storage/storage"
|
||||
import { Instance } from "../../project/instance"
|
||||
import { ShareNext } from "../../share/share-next"
|
||||
import { EOL } from "os"
|
||||
@@ -131,35 +130,13 @@ export const ImportCommand = cmd({
|
||||
return
|
||||
}
|
||||
|
||||
Database.use((db) => db.insert(SessionTable).values(Session.toRow(exportData.info)).onConflictDoNothing().run())
|
||||
await Storage.write(["session", Instance.project.id, exportData.info.id], exportData.info)
|
||||
|
||||
for (const msg of exportData.messages) {
|
||||
Database.use((db) =>
|
||||
db
|
||||
.insert(MessageTable)
|
||||
.values({
|
||||
id: msg.info.id,
|
||||
session_id: exportData.info.id,
|
||||
time_created: msg.info.time?.created ?? Date.now(),
|
||||
data: msg.info,
|
||||
})
|
||||
.onConflictDoNothing()
|
||||
.run(),
|
||||
)
|
||||
await Storage.write(["message", exportData.info.id, msg.info.id], msg.info)
|
||||
|
||||
for (const part of msg.parts) {
|
||||
Database.use((db) =>
|
||||
db
|
||||
.insert(PartTable)
|
||||
.values({
|
||||
id: part.id,
|
||||
message_id: msg.info.id,
|
||||
session_id: exportData.info.id,
|
||||
data: part,
|
||||
})
|
||||
.onConflictDoNothing()
|
||||
.run(),
|
||||
)
|
||||
await Storage.write(["part", msg.info.id, part.id], part)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,8 +2,7 @@ import type { Argv } from "yargs"
|
||||
import { cmd } from "./cmd"
|
||||
import { Session } from "../../session"
|
||||
import { bootstrap } from "../bootstrap"
|
||||
import { Database } from "../../storage/db"
|
||||
import { SessionTable } from "../../session/session.sql"
|
||||
import { Storage } from "../../storage/storage"
|
||||
import { Project } from "../../project/project"
|
||||
import { Instance } from "../../project/instance"
|
||||
|
||||
@@ -88,8 +87,25 @@ async function getCurrentProject(): Promise<Project.Info> {
|
||||
}
|
||||
|
||||
async function getAllSessions(): Promise<Session.Info[]> {
|
||||
const rows = Database.use((db) => db.select().from(SessionTable).all())
|
||||
return rows.map((row) => Session.fromRow(row))
|
||||
const sessions: Session.Info[] = []
|
||||
|
||||
const projectKeys = await Storage.list(["project"])
|
||||
const projects = await Promise.all(projectKeys.map((key) => Storage.read<Project.Info>(key)))
|
||||
|
||||
for (const project of projects) {
|
||||
if (!project) continue
|
||||
|
||||
const sessionKeys = await Storage.list(["session", project.id])
|
||||
const projectSessions = await Promise.all(sessionKeys.map((key) => Storage.read<Session.Info>(key)))
|
||||
|
||||
for (const session of projectSessions) {
|
||||
if (session) {
|
||||
sessions.push(session)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return sessions
|
||||
}
|
||||
|
||||
export async function aggregateSessionStats(days?: number, projectFilter?: string): Promise<SessionStats> {
|
||||
|
||||
@@ -299,24 +299,6 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({
|
||||
break
|
||||
}
|
||||
|
||||
case "message.part.delta": {
|
||||
const parts = store.part[event.properties.messageID]
|
||||
if (!parts) break
|
||||
const result = Binary.search(parts, event.properties.partID, (p) => p.id)
|
||||
if (!result.found) break
|
||||
setStore(
|
||||
"part",
|
||||
event.properties.messageID,
|
||||
produce((draft) => {
|
||||
const part = draft[result.index]
|
||||
const field = event.properties.field as keyof typeof part
|
||||
const existing = part[field] as string | undefined
|
||||
;(part[field] as string) = (existing ?? "") + event.properties.delta
|
||||
}),
|
||||
)
|
||||
break
|
||||
}
|
||||
|
||||
case "message.part.removed": {
|
||||
const parts = store.part[event.properties.messageID]
|
||||
const result = Binary.search(parts, event.properties.partID, (p) => p.id)
|
||||
|
||||
@@ -2032,8 +2032,8 @@ function ApplyPatch(props: ToolProps<typeof ApplyPatchTool>) {
|
||||
</For>
|
||||
</Match>
|
||||
<Match when={true}>
|
||||
<InlineTool icon="%" pending="Preparing patch..." complete={false} part={props.part}>
|
||||
Patch
|
||||
<InlineTool icon="%" pending="Preparing apply_patch..." complete={false} part={props.part}>
|
||||
apply_patch
|
||||
</InlineTool>
|
||||
</Match>
|
||||
</Switch>
|
||||
|
||||
@@ -26,10 +26,6 @@ import { EOL } from "os"
|
||||
import { WebCommand } from "./cli/cmd/web"
|
||||
import { PrCommand } from "./cli/cmd/pr"
|
||||
import { SessionCommand } from "./cli/cmd/session"
|
||||
import path from "path"
|
||||
import { Global } from "./global"
|
||||
import { JsonMigration } from "./storage/json-migration"
|
||||
import { Database } from "./storage/db"
|
||||
|
||||
process.on("unhandledRejection", (e) => {
|
||||
Log.Default.error("rejection", {
|
||||
@@ -78,44 +74,6 @@ const cli = yargs(hideBin(process.argv))
|
||||
version: Installation.VERSION,
|
||||
args: process.argv.slice(2),
|
||||
})
|
||||
|
||||
const marker = path.join(Global.Path.data, "opencode.db")
|
||||
if (!(await Bun.file(marker).exists())) {
|
||||
console.log("Performing one time database migration, may take a few minutes...")
|
||||
const tty = process.stdout.isTTY
|
||||
const width = 36
|
||||
const orange = "\x1b[38;5;214m"
|
||||
const muted = "\x1b[0;2m"
|
||||
const reset = "\x1b[0m"
|
||||
let last = -1
|
||||
if (tty) process.stdout.write("\x1b[?25l")
|
||||
try {
|
||||
await JsonMigration.run(Database.Client().$client, {
|
||||
progress: (event) => {
|
||||
const percent = Math.floor((event.current / event.total) * 100)
|
||||
if (percent === last && event.current !== event.total) return
|
||||
last = percent
|
||||
if (tty) {
|
||||
const fill = Math.round((percent / 100) * width)
|
||||
const bar = `${"■".repeat(fill)}${"・".repeat(width - fill)}`
|
||||
process.stdout.write(
|
||||
`\r${orange}${bar} ${percent.toString().padStart(3)}%${reset} ${muted}${event.label.padEnd(12)} ${event.current}/${event.total}${reset}`,
|
||||
)
|
||||
} else {
|
||||
process.stdout.write(`\rsqlite-migration:${percent}`)
|
||||
}
|
||||
|
||||
if (event.current === event.total) process.stdout.write("\n")
|
||||
},
|
||||
})
|
||||
} finally {
|
||||
if (tty) process.stdout.write("\x1b[?25h")
|
||||
else {
|
||||
console.log("sqlite-migration:done")
|
||||
}
|
||||
}
|
||||
console.log("Database migration complete.")
|
||||
}
|
||||
})
|
||||
.usage("\n" + UI.logo())
|
||||
.completion("completion", "generate shell completion script")
|
||||
|
||||
@@ -571,25 +571,28 @@ export namespace MCP {
|
||||
const clientsSnapshot = await clients()
|
||||
const defaultTimeout = cfg.experimental?.mcp_timeout
|
||||
|
||||
for (const [clientName, client] of Object.entries(clientsSnapshot)) {
|
||||
// Only include tools from connected MCPs (skip disabled ones)
|
||||
if (s.status[clientName]?.status !== "connected") {
|
||||
continue
|
||||
}
|
||||
const connectedClients = Object.entries(clientsSnapshot).filter(
|
||||
([clientName]) => s.status[clientName]?.status === "connected",
|
||||
)
|
||||
|
||||
const toolsResult = await client.listTools().catch((e) => {
|
||||
log.error("failed to get tools", { clientName, error: e.message })
|
||||
const failedStatus = {
|
||||
status: "failed" as const,
|
||||
error: e instanceof Error ? e.message : String(e),
|
||||
}
|
||||
s.status[clientName] = failedStatus
|
||||
delete s.clients[clientName]
|
||||
return undefined
|
||||
})
|
||||
if (!toolsResult) {
|
||||
continue
|
||||
}
|
||||
const toolsResults = await Promise.all(
|
||||
connectedClients.map(async ([clientName, client]) => {
|
||||
const toolsResult = await client.listTools().catch((e) => {
|
||||
log.error("failed to get tools", { clientName, error: e.message })
|
||||
const failedStatus = {
|
||||
status: "failed" as const,
|
||||
error: e instanceof Error ? e.message : String(e),
|
||||
}
|
||||
s.status[clientName] = failedStatus
|
||||
delete s.clients[clientName]
|
||||
return undefined
|
||||
})
|
||||
return { clientName, client, toolsResult }
|
||||
}),
|
||||
)
|
||||
|
||||
for (const { clientName, client, toolsResult } of toolsResults) {
|
||||
if (!toolsResult) continue
|
||||
const mcpConfig = config[clientName]
|
||||
const entry = isMcpConfigured(mcpConfig) ? mcpConfig : undefined
|
||||
const timeout = entry?.timeout ?? defaultTimeout
|
||||
|
||||
@@ -3,8 +3,7 @@ import { BusEvent } from "@/bus/bus-event"
|
||||
import { Config } from "@/config/config"
|
||||
import { Identifier } from "@/id/id"
|
||||
import { Instance } from "@/project/instance"
|
||||
import { Database, eq } from "@/storage/db"
|
||||
import { PermissionTable } from "@/session/session.sql"
|
||||
import { Storage } from "@/storage/storage"
|
||||
import { fn } from "@/util/fn"
|
||||
import { Log } from "@/util/log"
|
||||
import { Wildcard } from "@/util/wildcard"
|
||||
@@ -106,12 +105,9 @@ export namespace PermissionNext {
|
||||
),
|
||||
}
|
||||
|
||||
const state = Instance.state(() => {
|
||||
const state = Instance.state(async () => {
|
||||
const projectID = Instance.project.id
|
||||
const row = Database.use((db) =>
|
||||
db.select().from(PermissionTable).where(eq(PermissionTable.project_id, projectID)).get(),
|
||||
)
|
||||
const stored = row?.data ?? ([] as Ruleset)
|
||||
const stored = await Storage.read<Ruleset>(["permission", projectID]).catch(() => [] as Ruleset)
|
||||
|
||||
const pending: Record<
|
||||
string,
|
||||
@@ -226,8 +222,7 @@ export namespace PermissionNext {
|
||||
|
||||
// TODO: we don't save the permission ruleset to disk yet until there's
|
||||
// UI to manage it
|
||||
// db().insert(PermissionTable).values({ projectID: Instance.project.id, data: s.approved })
|
||||
// .onConflictDoUpdate({ target: PermissionTable.projectID, set: { data: s.approved } }).run()
|
||||
// await Storage.write(["permission", Instance.project.id], s.approved)
|
||||
return
|
||||
}
|
||||
},
|
||||
@@ -280,7 +275,6 @@ export namespace PermissionNext {
|
||||
}
|
||||
|
||||
export async function list() {
|
||||
const s = await state()
|
||||
return Object.values(s.pending).map((x) => x.info)
|
||||
return state().then((x) => Object.values(x.pending).map((x) => x.info))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { Plugin } from "../plugin"
|
||||
import { Share } from "../share/share"
|
||||
import { Format } from "../format"
|
||||
import { LSP } from "../lsp"
|
||||
import { FileWatcher } from "../file/watcher"
|
||||
@@ -16,6 +17,7 @@ import { Truncate } from "../tool/truncation"
|
||||
export async function InstanceBootstrap() {
|
||||
Log.Default.info("bootstrapping", { directory: Instance.directory })
|
||||
await Plugin.init()
|
||||
Share.init()
|
||||
ShareNext.init()
|
||||
Format.init()
|
||||
await LSP.init()
|
||||
|
||||
@@ -1,15 +0,0 @@
|
||||
import { sqliteTable, text, integer } from "drizzle-orm/sqlite-core"
|
||||
import { Timestamps } from "@/storage/schema.sql"
|
||||
|
||||
export const ProjectTable = sqliteTable("project", {
|
||||
id: text().primaryKey(),
|
||||
worktree: text().notNull(),
|
||||
vcs: text(),
|
||||
name: text(),
|
||||
icon_url: text(),
|
||||
icon_color: text(),
|
||||
...Timestamps,
|
||||
time_initialized: integer(),
|
||||
sandboxes: text({ mode: "json" }).notNull().$type<string[]>(),
|
||||
commands: text({ mode: "json" }).$type<{ start?: string }>(),
|
||||
})
|
||||
@@ -1,17 +1,18 @@
|
||||
import z from "zod"
|
||||
import fs from "fs/promises"
|
||||
import { Filesystem } from "../util/filesystem"
|
||||
import path from "path"
|
||||
import { $ } from "bun"
|
||||
import { Database, eq } from "../storage/db"
|
||||
import { ProjectTable } from "./project.sql"
|
||||
import { SessionTable } from "../session/session.sql"
|
||||
import { Storage } from "../storage/storage"
|
||||
import { Log } from "../util/log"
|
||||
import { Flag } from "@/flag/flag"
|
||||
import { Session } from "../session"
|
||||
import { work } from "../util/queue"
|
||||
import { fn } from "@opencode-ai/util/fn"
|
||||
import { BusEvent } from "@/bus/bus-event"
|
||||
import { iife } from "@/util/iife"
|
||||
import { GlobalBus } from "@/bus/global"
|
||||
import { existsSync } from "fs"
|
||||
|
||||
export namespace Project {
|
||||
const log = Log.create({ service: "project" })
|
||||
@@ -49,84 +50,64 @@ export namespace Project {
|
||||
Updated: BusEvent.define("project.updated", Info),
|
||||
}
|
||||
|
||||
type Row = typeof ProjectTable.$inferSelect
|
||||
|
||||
export function fromRow(row: Row): Info {
|
||||
const icon =
|
||||
row.icon_url || row.icon_color
|
||||
? { url: row.icon_url ?? undefined, color: row.icon_color ?? undefined }
|
||||
: undefined
|
||||
return {
|
||||
id: row.id,
|
||||
worktree: row.worktree,
|
||||
vcs: row.vcs ? Info.shape.vcs.parse(row.vcs) : undefined,
|
||||
name: row.name ?? undefined,
|
||||
icon,
|
||||
time: {
|
||||
created: row.time_created,
|
||||
updated: row.time_updated,
|
||||
initialized: row.time_initialized ?? undefined,
|
||||
},
|
||||
sandboxes: row.sandboxes,
|
||||
commands: row.commands ?? undefined,
|
||||
}
|
||||
}
|
||||
|
||||
export async function fromDirectory(directory: string) {
|
||||
log.info("fromDirectory", { directory })
|
||||
|
||||
const data = await iife(async () => {
|
||||
const { id, sandbox, worktree, vcs } = await iife(async () => {
|
||||
const matches = Filesystem.up({ targets: [".git"], start: directory })
|
||||
const git = await matches.next().then((x) => x.value)
|
||||
await matches.return()
|
||||
if (git) {
|
||||
const sandbox = path.dirname(git)
|
||||
const bin = Bun.which("git")
|
||||
let sandbox = path.dirname(git)
|
||||
|
||||
const cached = await Bun.file(path.join(git, "opencode"))
|
||||
const gitBinary = Bun.which("git")
|
||||
|
||||
// cached id calculation
|
||||
let id = await Bun.file(path.join(git, "opencode"))
|
||||
.text()
|
||||
.then((x) => x.trim())
|
||||
.catch(() => undefined)
|
||||
|
||||
if (!bin) {
|
||||
if (!gitBinary) {
|
||||
return {
|
||||
id: cached ?? "global",
|
||||
id: id ?? "global",
|
||||
worktree: sandbox,
|
||||
sandbox: sandbox,
|
||||
vcs: Info.shape.vcs.parse(Flag.OPENCODE_FAKE_VCS),
|
||||
}
|
||||
}
|
||||
|
||||
const roots = cached
|
||||
? undefined
|
||||
: await $`git rev-list --max-parents=0 --all`
|
||||
.quiet()
|
||||
.nothrow()
|
||||
.cwd(sandbox)
|
||||
.text()
|
||||
.then((x) =>
|
||||
x
|
||||
.split("\n")
|
||||
.filter(Boolean)
|
||||
.map((x) => x.trim())
|
||||
.toSorted(),
|
||||
)
|
||||
.catch(() => undefined)
|
||||
|
||||
if (!cached && !roots) {
|
||||
return {
|
||||
id: "global",
|
||||
worktree: sandbox,
|
||||
sandbox: sandbox,
|
||||
vcs: Info.shape.vcs.parse(Flag.OPENCODE_FAKE_VCS),
|
||||
}
|
||||
}
|
||||
|
||||
const id = cached ?? roots?.[0]
|
||||
if (!cached && id) {
|
||||
void Bun.file(path.join(git, "opencode"))
|
||||
.write(id)
|
||||
// generate id from root commit
|
||||
if (!id) {
|
||||
const roots = await $`git rev-list --max-parents=0 --all`
|
||||
.quiet()
|
||||
.nothrow()
|
||||
.cwd(sandbox)
|
||||
.text()
|
||||
.then((x) =>
|
||||
x
|
||||
.split("\n")
|
||||
.filter(Boolean)
|
||||
.map((x) => x.trim())
|
||||
.toSorted(),
|
||||
)
|
||||
.catch(() => undefined)
|
||||
|
||||
if (!roots) {
|
||||
return {
|
||||
id: "global",
|
||||
worktree: sandbox,
|
||||
sandbox: sandbox,
|
||||
vcs: Info.shape.vcs.parse(Flag.OPENCODE_FAKE_VCS),
|
||||
}
|
||||
}
|
||||
|
||||
id = roots[0]
|
||||
if (id) {
|
||||
void Bun.file(path.join(git, "opencode"))
|
||||
.write(id)
|
||||
.catch(() => undefined)
|
||||
}
|
||||
}
|
||||
|
||||
if (!id) {
|
||||
@@ -155,31 +136,33 @@ export namespace Project {
|
||||
}
|
||||
}
|
||||
|
||||
const tree = await $`git rev-parse --git-common-dir`
|
||||
sandbox = top
|
||||
|
||||
const worktree = await $`git rev-parse --git-common-dir`
|
||||
.quiet()
|
||||
.nothrow()
|
||||
.cwd(top)
|
||||
.cwd(sandbox)
|
||||
.text()
|
||||
.then((x) => {
|
||||
const dirname = path.dirname(x.trim())
|
||||
if (dirname === ".") return top
|
||||
if (dirname === ".") return sandbox
|
||||
return dirname
|
||||
})
|
||||
.catch(() => undefined)
|
||||
|
||||
if (!tree) {
|
||||
if (!worktree) {
|
||||
return {
|
||||
id,
|
||||
sandbox: top,
|
||||
worktree: top,
|
||||
sandbox,
|
||||
worktree: sandbox,
|
||||
vcs: Info.shape.vcs.parse(Flag.OPENCODE_FAKE_VCS),
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
id,
|
||||
sandbox: top,
|
||||
worktree: tree,
|
||||
sandbox,
|
||||
worktree,
|
||||
vcs: "git",
|
||||
}
|
||||
}
|
||||
@@ -192,80 +175,47 @@ export namespace Project {
|
||||
}
|
||||
})
|
||||
|
||||
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, data.id)).get())
|
||||
const existing = await iife(async () => {
|
||||
if (row) return fromRow(row)
|
||||
const fresh: Info = {
|
||||
id: data.id,
|
||||
worktree: data.worktree,
|
||||
vcs: data.vcs as Info["vcs"],
|
||||
let existing = await Storage.read<Info>(["project", id]).catch(() => undefined)
|
||||
if (!existing) {
|
||||
existing = {
|
||||
id,
|
||||
worktree,
|
||||
vcs: vcs as Info["vcs"],
|
||||
sandboxes: [],
|
||||
time: {
|
||||
created: Date.now(),
|
||||
updated: Date.now(),
|
||||
},
|
||||
}
|
||||
if (data.id !== "global") {
|
||||
await migrateFromGlobal(data.id, data.worktree)
|
||||
if (id !== "global") {
|
||||
await migrateFromGlobal(id, worktree)
|
||||
}
|
||||
return fresh
|
||||
})
|
||||
}
|
||||
|
||||
// migrate old projects before sandboxes
|
||||
if (!existing.sandboxes) existing.sandboxes = []
|
||||
|
||||
if (Flag.OPENCODE_EXPERIMENTAL_ICON_DISCOVERY) discover(existing)
|
||||
|
||||
const result: Info = {
|
||||
...existing,
|
||||
worktree: data.worktree,
|
||||
vcs: data.vcs as Info["vcs"],
|
||||
worktree,
|
||||
vcs: vcs as Info["vcs"],
|
||||
time: {
|
||||
...existing.time,
|
||||
updated: Date.now(),
|
||||
},
|
||||
}
|
||||
if (data.sandbox !== result.worktree && !result.sandboxes.includes(data.sandbox))
|
||||
result.sandboxes.push(data.sandbox)
|
||||
const sandboxes: string[] = []
|
||||
for (const x of result.sandboxes) {
|
||||
const stat = await Bun.file(x)
|
||||
.stat()
|
||||
.catch(() => undefined)
|
||||
if (stat) sandboxes.push(x)
|
||||
}
|
||||
result.sandboxes = sandboxes
|
||||
const insert = {
|
||||
id: result.id,
|
||||
worktree: result.worktree,
|
||||
vcs: result.vcs ?? null,
|
||||
name: result.name,
|
||||
icon_url: result.icon?.url,
|
||||
icon_color: result.icon?.color,
|
||||
time_created: result.time.created,
|
||||
time_updated: result.time.updated,
|
||||
time_initialized: result.time.initialized,
|
||||
sandboxes: result.sandboxes,
|
||||
commands: result.commands,
|
||||
}
|
||||
const updateSet = {
|
||||
worktree: result.worktree,
|
||||
vcs: result.vcs ?? null,
|
||||
name: result.name,
|
||||
icon_url: result.icon?.url,
|
||||
icon_color: result.icon?.color,
|
||||
time_updated: result.time.updated,
|
||||
time_initialized: result.time.initialized,
|
||||
sandboxes: result.sandboxes,
|
||||
commands: result.commands,
|
||||
}
|
||||
Database.use((db) =>
|
||||
db.insert(ProjectTable).values(insert).onConflictDoUpdate({ target: ProjectTable.id, set: updateSet }).run(),
|
||||
)
|
||||
if (sandbox !== result.worktree && !result.sandboxes.includes(sandbox)) result.sandboxes.push(sandbox)
|
||||
result.sandboxes = result.sandboxes.filter((x) => existsSync(x))
|
||||
await Storage.write<Info>(["project", id], result)
|
||||
GlobalBus.emit("event", {
|
||||
payload: {
|
||||
type: Event.Updated.type,
|
||||
properties: result,
|
||||
},
|
||||
})
|
||||
return { project: result, sandbox: data.sandbox }
|
||||
return { project: result, sandbox }
|
||||
}
|
||||
|
||||
export async function discover(input: Info) {
|
||||
@@ -298,54 +248,43 @@ export namespace Project {
|
||||
return
|
||||
}
|
||||
|
||||
async function migrateFromGlobal(id: string, worktree: string) {
|
||||
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, "global")).get())
|
||||
if (!row) return
|
||||
async function migrateFromGlobal(newProjectID: string, worktree: string) {
|
||||
const globalProject = await Storage.read<Info>(["project", "global"]).catch(() => undefined)
|
||||
if (!globalProject) return
|
||||
|
||||
const sessions = Database.use((db) =>
|
||||
db.select().from(SessionTable).where(eq(SessionTable.project_id, "global")).all(),
|
||||
)
|
||||
if (sessions.length === 0) return
|
||||
const globalSessions = await Storage.list(["session", "global"]).catch(() => [])
|
||||
if (globalSessions.length === 0) return
|
||||
|
||||
log.info("migrating sessions from global", { newProjectID: id, worktree, count: sessions.length })
|
||||
log.info("migrating sessions from global", { newProjectID, worktree, count: globalSessions.length })
|
||||
|
||||
await work(10, sessions, async (row) => {
|
||||
// Skip sessions that belong to a different directory
|
||||
if (row.directory && row.directory !== worktree) return
|
||||
await work(10, globalSessions, async (key) => {
|
||||
const sessionID = key[key.length - 1]
|
||||
const session = await Storage.read<Session.Info>(key).catch(() => undefined)
|
||||
if (!session) return
|
||||
if (session.directory && session.directory !== worktree) return
|
||||
|
||||
log.info("migrating session", { sessionID: row.id, from: "global", to: id })
|
||||
Database.use((db) => db.update(SessionTable).set({ project_id: id }).where(eq(SessionTable.id, row.id)).run())
|
||||
session.projectID = newProjectID
|
||||
log.info("migrating session", { sessionID, from: "global", to: newProjectID })
|
||||
await Storage.write(["session", newProjectID, sessionID], session)
|
||||
await Storage.remove(key)
|
||||
}).catch((error) => {
|
||||
log.error("failed to migrate sessions from global to project", { error, projectId: id })
|
||||
log.error("failed to migrate sessions from global to project", { error, projectId: newProjectID })
|
||||
})
|
||||
}
|
||||
|
||||
export function setInitialized(id: string) {
|
||||
Database.use((db) =>
|
||||
db
|
||||
.update(ProjectTable)
|
||||
.set({
|
||||
time_initialized: Date.now(),
|
||||
})
|
||||
.where(eq(ProjectTable.id, id))
|
||||
.run(),
|
||||
)
|
||||
export async function setInitialized(projectID: string) {
|
||||
await Storage.update<Info>(["project", projectID], (draft) => {
|
||||
draft.time.initialized = Date.now()
|
||||
})
|
||||
}
|
||||
|
||||
export function list() {
|
||||
return Database.use((db) =>
|
||||
db
|
||||
.select()
|
||||
.from(ProjectTable)
|
||||
.all()
|
||||
.map((row) => fromRow(row)),
|
||||
)
|
||||
}
|
||||
|
||||
export function get(id: string): Info | undefined {
|
||||
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get())
|
||||
if (!row) return undefined
|
||||
return fromRow(row)
|
||||
export async function list() {
|
||||
const keys = await Storage.list(["project"])
|
||||
const projects = await Promise.all(keys.map((x) => Storage.read<Info>(x)))
|
||||
return projects.map((project) => ({
|
||||
...project,
|
||||
sandboxes: project.sandboxes?.filter((x) => existsSync(x)),
|
||||
}))
|
||||
}
|
||||
|
||||
export const update = fn(
|
||||
@@ -356,90 +295,77 @@ export namespace Project {
|
||||
commands: Info.shape.commands.optional(),
|
||||
}),
|
||||
async (input) => {
|
||||
const result = Database.use((db) =>
|
||||
db
|
||||
.update(ProjectTable)
|
||||
.set({
|
||||
name: input.name,
|
||||
icon_url: input.icon?.url,
|
||||
icon_color: input.icon?.color,
|
||||
commands: input.commands,
|
||||
time_updated: Date.now(),
|
||||
})
|
||||
.where(eq(ProjectTable.id, input.projectID))
|
||||
.returning()
|
||||
.get(),
|
||||
)
|
||||
if (!result) throw new Error(`Project not found: ${input.projectID}`)
|
||||
const data = fromRow(result)
|
||||
const result = await Storage.update<Info>(["project", input.projectID], (draft) => {
|
||||
if (input.name !== undefined) draft.name = input.name
|
||||
if (input.icon !== undefined) {
|
||||
draft.icon = {
|
||||
...draft.icon,
|
||||
}
|
||||
if (input.icon.url !== undefined) draft.icon.url = input.icon.url
|
||||
if (input.icon.override !== undefined) draft.icon.override = input.icon.override || undefined
|
||||
if (input.icon.color !== undefined) draft.icon.color = input.icon.color
|
||||
}
|
||||
|
||||
if (input.commands?.start !== undefined) {
|
||||
const start = input.commands.start || undefined
|
||||
draft.commands = {
|
||||
...(draft.commands ?? {}),
|
||||
}
|
||||
draft.commands.start = start
|
||||
if (!draft.commands.start) draft.commands = undefined
|
||||
}
|
||||
|
||||
draft.time.updated = Date.now()
|
||||
})
|
||||
GlobalBus.emit("event", {
|
||||
payload: {
|
||||
type: Event.Updated.type,
|
||||
properties: data,
|
||||
properties: result,
|
||||
},
|
||||
})
|
||||
return data
|
||||
return result
|
||||
},
|
||||
)
|
||||
|
||||
export async function sandboxes(id: string) {
|
||||
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get())
|
||||
if (!row) return []
|
||||
const data = fromRow(row)
|
||||
export async function sandboxes(projectID: string) {
|
||||
const project = await Storage.read<Info>(["project", projectID]).catch(() => undefined)
|
||||
if (!project?.sandboxes) return []
|
||||
const valid: string[] = []
|
||||
for (const dir of data.sandboxes) {
|
||||
const stat = await Bun.file(dir)
|
||||
.stat()
|
||||
.catch(() => undefined)
|
||||
for (const dir of project.sandboxes) {
|
||||
const stat = await fs.stat(dir).catch(() => undefined)
|
||||
if (stat?.isDirectory()) valid.push(dir)
|
||||
}
|
||||
return valid
|
||||
}
|
||||
|
||||
export async function addSandbox(id: string, directory: string) {
|
||||
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get())
|
||||
if (!row) throw new Error(`Project not found: ${id}`)
|
||||
const sandboxes = [...row.sandboxes]
|
||||
if (!sandboxes.includes(directory)) sandboxes.push(directory)
|
||||
const result = Database.use((db) =>
|
||||
db
|
||||
.update(ProjectTable)
|
||||
.set({ sandboxes, time_updated: Date.now() })
|
||||
.where(eq(ProjectTable.id, id))
|
||||
.returning()
|
||||
.get(),
|
||||
)
|
||||
if (!result) throw new Error(`Project not found: ${id}`)
|
||||
const data = fromRow(result)
|
||||
export async function addSandbox(projectID: string, directory: string) {
|
||||
const result = await Storage.update<Info>(["project", projectID], (draft) => {
|
||||
const sandboxes = draft.sandboxes ?? []
|
||||
if (!sandboxes.includes(directory)) sandboxes.push(directory)
|
||||
draft.sandboxes = sandboxes
|
||||
draft.time.updated = Date.now()
|
||||
})
|
||||
GlobalBus.emit("event", {
|
||||
payload: {
|
||||
type: Event.Updated.type,
|
||||
properties: data,
|
||||
properties: result,
|
||||
},
|
||||
})
|
||||
return data
|
||||
return result
|
||||
}
|
||||
|
||||
export async function removeSandbox(id: string, directory: string) {
|
||||
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get())
|
||||
if (!row) throw new Error(`Project not found: ${id}`)
|
||||
const sandboxes = row.sandboxes.filter((s) => s !== directory)
|
||||
const result = Database.use((db) =>
|
||||
db
|
||||
.update(ProjectTable)
|
||||
.set({ sandboxes, time_updated: Date.now() })
|
||||
.where(eq(ProjectTable.id, id))
|
||||
.returning()
|
||||
.get(),
|
||||
)
|
||||
if (!result) throw new Error(`Project not found: ${id}`)
|
||||
const data = fromRow(result)
|
||||
export async function removeSandbox(projectID: string, directory: string) {
|
||||
const result = await Storage.update<Info>(["project", projectID], (draft) => {
|
||||
const sandboxes = draft.sandboxes ?? []
|
||||
draft.sandboxes = sandboxes.filter((sandbox) => sandbox !== directory)
|
||||
draft.time.updated = Date.now()
|
||||
})
|
||||
GlobalBus.emit("event", {
|
||||
payload: {
|
||||
type: Event.Updated.type,
|
||||
properties: data,
|
||||
properties: result,
|
||||
},
|
||||
})
|
||||
return data
|
||||
return result
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { resolver } from "hono-openapi"
|
||||
import z from "zod"
|
||||
import { NotFoundError } from "../storage/db"
|
||||
import { Storage } from "../storage/storage"
|
||||
|
||||
export const ERRORS = {
|
||||
400: {
|
||||
@@ -25,7 +25,7 @@ export const ERRORS = {
|
||||
description: "Not found",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(NotFoundError.Schema),
|
||||
schema: resolver(Storage.NotFoundError.Schema),
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
@@ -3,7 +3,7 @@ import { describeRoute, validator, resolver } from "hono-openapi"
|
||||
import { upgradeWebSocket } from "hono/bun"
|
||||
import z from "zod"
|
||||
import { Pty } from "@/pty"
|
||||
import { NotFoundError } from "../../storage/db"
|
||||
import { Storage } from "../../storage/storage"
|
||||
import { errors } from "../error"
|
||||
import { lazy } from "../../util/lazy"
|
||||
|
||||
@@ -76,7 +76,7 @@ export const PtyRoutes = lazy(() =>
|
||||
async (c) => {
|
||||
const info = Pty.get(c.req.valid("param").ptyID)
|
||||
if (!info) {
|
||||
throw new NotFoundError({ message: "Session not found" })
|
||||
throw new Storage.NotFoundError({ message: "Session not found" })
|
||||
}
|
||||
return c.json(info)
|
||||
},
|
||||
|
||||
@@ -276,15 +276,18 @@ export const SessionRoutes = lazy(() =>
|
||||
const sessionID = c.req.valid("param").sessionID
|
||||
const updates = c.req.valid("json")
|
||||
|
||||
let session = await Session.get(sessionID)
|
||||
if (updates.title !== undefined) {
|
||||
session = await Session.setTitle({ sessionID, title: updates.title })
|
||||
}
|
||||
if (updates.time?.archived !== undefined) {
|
||||
session = await Session.setArchived({ sessionID, time: updates.time.archived })
|
||||
}
|
||||
const updatedSession = await Session.update(
|
||||
sessionID,
|
||||
(session) => {
|
||||
if (updates.title !== undefined) {
|
||||
session.title = updates.title
|
||||
}
|
||||
if (updates.time?.archived !== undefined) session.time.archived = updates.time.archived
|
||||
},
|
||||
{ touch: false },
|
||||
)
|
||||
|
||||
return c.json(session)
|
||||
return c.json(updatedSession)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
|
||||
@@ -31,7 +31,7 @@ import { ExperimentalRoutes } from "./routes/experimental"
|
||||
import { ProviderRoutes } from "./routes/provider"
|
||||
import { lazy } from "../util/lazy"
|
||||
import { InstanceBootstrap } from "../project/bootstrap"
|
||||
import { NotFoundError } from "../storage/db"
|
||||
import { Storage } from "../storage/storage"
|
||||
import type { ContentfulStatusCode } from "hono/utils/http-status"
|
||||
import { websocket } from "hono/bun"
|
||||
import { HTTPException } from "hono/http-exception"
|
||||
@@ -65,7 +65,7 @@ export namespace Server {
|
||||
})
|
||||
if (err instanceof NamedError) {
|
||||
let status: ContentfulStatusCode
|
||||
if (err instanceof NotFoundError) status = 404
|
||||
if (err instanceof Storage.NotFoundError) status = 404
|
||||
else if (err instanceof Provider.ModelNotFoundError) status = 400
|
||||
else if (err.name.startsWith("Worktree")) status = 400
|
||||
else status = 500
|
||||
|
||||
@@ -41,7 +41,9 @@ export namespace SessionCompaction {
|
||||
|
||||
const reserved =
|
||||
config.compaction?.reserved ?? Math.min(COMPACTION_BUFFER, ProviderTransform.maxOutputTokens(input.model))
|
||||
const usable = input.model.limit.input ? input.model.limit.input - reserved : context - reserved
|
||||
const usable = input.model.limit.input
|
||||
? input.model.limit.input - reserved
|
||||
: context - ProviderTransform.maxOutputTokens(input.model)
|
||||
return count >= usable
|
||||
}
|
||||
|
||||
|
||||
@@ -10,9 +10,7 @@ import { Flag } from "../flag/flag"
|
||||
import { Identifier } from "../id/id"
|
||||
import { Installation } from "../installation"
|
||||
|
||||
import { Database, NotFoundError, eq, and, or, like } from "../storage/db"
|
||||
import { SessionTable, MessageTable, PartTable } from "./session.sql"
|
||||
import { Storage } from "@/storage/storage"
|
||||
import { Storage } from "../storage/storage"
|
||||
import { Log } from "../util/log"
|
||||
import { MessageV2 } from "./message-v2"
|
||||
import { Instance } from "../project/instance"
|
||||
@@ -43,64 +41,6 @@ export namespace Session {
|
||||
).test(title)
|
||||
}
|
||||
|
||||
type SessionRow = typeof SessionTable.$inferSelect
|
||||
|
||||
export function fromRow(row: SessionRow): Info {
|
||||
const summary =
|
||||
row.summary_additions !== null || row.summary_deletions !== null || row.summary_files !== null
|
||||
? {
|
||||
additions: row.summary_additions ?? 0,
|
||||
deletions: row.summary_deletions ?? 0,
|
||||
files: row.summary_files ?? 0,
|
||||
diffs: row.summary_diffs ?? undefined,
|
||||
}
|
||||
: undefined
|
||||
const share = row.share_url ? { url: row.share_url } : undefined
|
||||
const revert = row.revert ?? undefined
|
||||
return {
|
||||
id: row.id,
|
||||
slug: row.slug,
|
||||
projectID: row.project_id,
|
||||
directory: row.directory,
|
||||
parentID: row.parent_id ?? undefined,
|
||||
title: row.title,
|
||||
version: row.version,
|
||||
summary,
|
||||
share,
|
||||
revert,
|
||||
permission: row.permission ?? undefined,
|
||||
time: {
|
||||
created: row.time_created,
|
||||
updated: row.time_updated,
|
||||
compacting: row.time_compacting ?? undefined,
|
||||
archived: row.time_archived ?? undefined,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export function toRow(info: Info) {
|
||||
return {
|
||||
id: info.id,
|
||||
project_id: info.projectID,
|
||||
parent_id: info.parentID,
|
||||
slug: info.slug,
|
||||
directory: info.directory,
|
||||
title: info.title,
|
||||
version: info.version,
|
||||
share_url: info.share?.url,
|
||||
summary_additions: info.summary?.additions,
|
||||
summary_deletions: info.summary?.deletions,
|
||||
summary_files: info.summary?.files,
|
||||
summary_diffs: info.summary?.diffs,
|
||||
revert: info.revert ?? null,
|
||||
permission: info.permission,
|
||||
time_created: info.time.created,
|
||||
time_updated: info.time.updated,
|
||||
time_compacting: info.time.compacting,
|
||||
time_archived: info.time.archived,
|
||||
}
|
||||
}
|
||||
|
||||
function getForkedTitle(title: string): string {
|
||||
const match = title.match(/^(.+) \(fork #(\d+)\)$/)
|
||||
if (match) {
|
||||
@@ -154,6 +94,16 @@ export namespace Session {
|
||||
})
|
||||
export type Info = z.output<typeof Info>
|
||||
|
||||
export const ShareInfo = z
|
||||
.object({
|
||||
secret: z.string(),
|
||||
url: z.string(),
|
||||
})
|
||||
.meta({
|
||||
ref: "SessionShare",
|
||||
})
|
||||
export type ShareInfo = z.output<typeof ShareInfo>
|
||||
|
||||
export const Event = {
|
||||
Created: BusEvent.define(
|
||||
"session.created",
|
||||
@@ -250,17 +200,8 @@ export namespace Session {
|
||||
)
|
||||
|
||||
export const touch = fn(Identifier.schema("session"), async (sessionID) => {
|
||||
const now = Date.now()
|
||||
Database.use((db) => {
|
||||
const row = db
|
||||
.update(SessionTable)
|
||||
.set({ time_updated: now })
|
||||
.where(eq(SessionTable.id, sessionID))
|
||||
.returning()
|
||||
.get()
|
||||
if (!row) throw new NotFoundError({ message: `Session not found: ${sessionID}` })
|
||||
const info = fromRow(row)
|
||||
Database.effect(() => Bus.publish(Event.Updated, { info }))
|
||||
await update(sessionID, (draft) => {
|
||||
draft.time.updated = Date.now()
|
||||
})
|
||||
})
|
||||
|
||||
@@ -286,19 +227,21 @@ export namespace Session {
|
||||
},
|
||||
}
|
||||
log.info("created", result)
|
||||
Database.use((db) => {
|
||||
db.insert(SessionTable).values(toRow(result)).run()
|
||||
Database.effect(() =>
|
||||
Bus.publish(Event.Created, {
|
||||
info: result,
|
||||
}),
|
||||
)
|
||||
await Storage.write(["session", Instance.project.id, result.id], result)
|
||||
Bus.publish(Event.Created, {
|
||||
info: result,
|
||||
})
|
||||
const cfg = await Config.get()
|
||||
if (!result.parentID && (Flag.OPENCODE_AUTO_SHARE || cfg.share === "auto"))
|
||||
share(result.id).catch(() => {
|
||||
// Silently ignore sharing errors during session creation
|
||||
})
|
||||
share(result.id)
|
||||
.then((share) => {
|
||||
update(result.id, (draft) => {
|
||||
draft.share = share
|
||||
})
|
||||
})
|
||||
.catch(() => {
|
||||
// Silently ignore sharing errors during session creation
|
||||
})
|
||||
Bus.publish(Event.Updated, {
|
||||
info: result,
|
||||
})
|
||||
@@ -313,9 +256,12 @@ export namespace Session {
|
||||
}
|
||||
|
||||
export const get = fn(Identifier.schema("session"), async (id) => {
|
||||
const row = Database.use((db) => db.select().from(SessionTable).where(eq(SessionTable.id, id)).get())
|
||||
if (!row) throw new NotFoundError({ message: `Session not found: ${id}` })
|
||||
return fromRow(row)
|
||||
const read = await Storage.read<Info>(["session", Instance.project.id, id])
|
||||
return read as Info
|
||||
})
|
||||
|
||||
export const getShare = fn(Identifier.schema("session"), async (id) => {
|
||||
return Storage.read<ShareInfo>(["share", id])
|
||||
})
|
||||
|
||||
export const share = fn(Identifier.schema("session"), async (id) => {
|
||||
@@ -325,12 +271,15 @@ export namespace Session {
|
||||
}
|
||||
const { ShareNext } = await import("@/share/share-next")
|
||||
const share = await ShareNext.create(id)
|
||||
Database.use((db) => {
|
||||
const row = db.update(SessionTable).set({ share_url: share.url }).where(eq(SessionTable.id, id)).returning().get()
|
||||
if (!row) throw new NotFoundError({ message: `Session not found: ${id}` })
|
||||
const info = fromRow(row)
|
||||
Database.effect(() => Bus.publish(Event.Updated, { info }))
|
||||
})
|
||||
await update(
|
||||
id,
|
||||
(draft) => {
|
||||
draft.share = {
|
||||
url: share.url,
|
||||
}
|
||||
},
|
||||
{ touch: false },
|
||||
)
|
||||
return share
|
||||
})
|
||||
|
||||
@@ -338,155 +287,32 @@ export namespace Session {
|
||||
// Use ShareNext to remove the share (same as share function uses ShareNext to create)
|
||||
const { ShareNext } = await import("@/share/share-next")
|
||||
await ShareNext.remove(id)
|
||||
Database.use((db) => {
|
||||
const row = db.update(SessionTable).set({ share_url: null }).where(eq(SessionTable.id, id)).returning().get()
|
||||
if (!row) throw new NotFoundError({ message: `Session not found: ${id}` })
|
||||
const info = fromRow(row)
|
||||
Database.effect(() => Bus.publish(Event.Updated, { info }))
|
||||
})
|
||||
await update(
|
||||
id,
|
||||
(draft) => {
|
||||
draft.share = undefined
|
||||
},
|
||||
{ touch: false },
|
||||
)
|
||||
})
|
||||
|
||||
export const setTitle = fn(
|
||||
z.object({
|
||||
sessionID: Identifier.schema("session"),
|
||||
title: z.string(),
|
||||
}),
|
||||
async (input) => {
|
||||
return Database.use((db) => {
|
||||
const row = db
|
||||
.update(SessionTable)
|
||||
.set({ title: input.title })
|
||||
.where(eq(SessionTable.id, input.sessionID))
|
||||
.returning()
|
||||
.get()
|
||||
if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` })
|
||||
const info = fromRow(row)
|
||||
Database.effect(() => Bus.publish(Event.Updated, { info }))
|
||||
return info
|
||||
})
|
||||
},
|
||||
)
|
||||
|
||||
export const setArchived = fn(
|
||||
z.object({
|
||||
sessionID: Identifier.schema("session"),
|
||||
time: z.number().optional(),
|
||||
}),
|
||||
async (input) => {
|
||||
return Database.use((db) => {
|
||||
const row = db
|
||||
.update(SessionTable)
|
||||
.set({ time_archived: input.time })
|
||||
.where(eq(SessionTable.id, input.sessionID))
|
||||
.returning()
|
||||
.get()
|
||||
if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` })
|
||||
const info = fromRow(row)
|
||||
Database.effect(() => Bus.publish(Event.Updated, { info }))
|
||||
return info
|
||||
})
|
||||
},
|
||||
)
|
||||
|
||||
export const setPermission = fn(
|
||||
z.object({
|
||||
sessionID: Identifier.schema("session"),
|
||||
permission: PermissionNext.Ruleset,
|
||||
}),
|
||||
async (input) => {
|
||||
return Database.use((db) => {
|
||||
const row = db
|
||||
.update(SessionTable)
|
||||
.set({ permission: input.permission, time_updated: Date.now() })
|
||||
.where(eq(SessionTable.id, input.sessionID))
|
||||
.returning()
|
||||
.get()
|
||||
if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` })
|
||||
const info = fromRow(row)
|
||||
Database.effect(() => Bus.publish(Event.Updated, { info }))
|
||||
return info
|
||||
})
|
||||
},
|
||||
)
|
||||
|
||||
export const setRevert = fn(
|
||||
z.object({
|
||||
sessionID: Identifier.schema("session"),
|
||||
revert: Info.shape.revert,
|
||||
summary: Info.shape.summary,
|
||||
}),
|
||||
async (input) => {
|
||||
return Database.use((db) => {
|
||||
const row = db
|
||||
.update(SessionTable)
|
||||
.set({
|
||||
revert: input.revert ?? null,
|
||||
summary_additions: input.summary?.additions,
|
||||
summary_deletions: input.summary?.deletions,
|
||||
summary_files: input.summary?.files,
|
||||
time_updated: Date.now(),
|
||||
})
|
||||
.where(eq(SessionTable.id, input.sessionID))
|
||||
.returning()
|
||||
.get()
|
||||
if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` })
|
||||
const info = fromRow(row)
|
||||
Database.effect(() => Bus.publish(Event.Updated, { info }))
|
||||
return info
|
||||
})
|
||||
},
|
||||
)
|
||||
|
||||
export const clearRevert = fn(Identifier.schema("session"), async (sessionID) => {
|
||||
return Database.use((db) => {
|
||||
const row = db
|
||||
.update(SessionTable)
|
||||
.set({
|
||||
revert: null,
|
||||
time_updated: Date.now(),
|
||||
})
|
||||
.where(eq(SessionTable.id, sessionID))
|
||||
.returning()
|
||||
.get()
|
||||
if (!row) throw new NotFoundError({ message: `Session not found: ${sessionID}` })
|
||||
const info = fromRow(row)
|
||||
Database.effect(() => Bus.publish(Event.Updated, { info }))
|
||||
return info
|
||||
export async function update(id: string, editor: (session: Info) => void, options?: { touch?: boolean }) {
|
||||
const project = Instance.project
|
||||
const result = await Storage.update<Info>(["session", project.id, id], (draft) => {
|
||||
editor(draft)
|
||||
if (options?.touch !== false) {
|
||||
draft.time.updated = Date.now()
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
export const setSummary = fn(
|
||||
z.object({
|
||||
sessionID: Identifier.schema("session"),
|
||||
summary: Info.shape.summary,
|
||||
}),
|
||||
async (input) => {
|
||||
return Database.use((db) => {
|
||||
const row = db
|
||||
.update(SessionTable)
|
||||
.set({
|
||||
summary_additions: input.summary?.additions,
|
||||
summary_deletions: input.summary?.deletions,
|
||||
summary_files: input.summary?.files,
|
||||
time_updated: Date.now(),
|
||||
})
|
||||
.where(eq(SessionTable.id, input.sessionID))
|
||||
.returning()
|
||||
.get()
|
||||
if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` })
|
||||
const info = fromRow(row)
|
||||
Database.effect(() => Bus.publish(Event.Updated, { info }))
|
||||
return info
|
||||
})
|
||||
},
|
||||
)
|
||||
Bus.publish(Event.Updated, {
|
||||
info: result,
|
||||
})
|
||||
return result
|
||||
}
|
||||
|
||||
export const diff = fn(Identifier.schema("session"), async (sessionID) => {
|
||||
try {
|
||||
return await Storage.read<Snapshot.FileDiff[]>(["session_diff", sessionID])
|
||||
} catch {
|
||||
return []
|
||||
}
|
||||
const diffs = await Storage.read<Snapshot.FileDiff[]>(["session_diff", sessionID])
|
||||
return diffs ?? []
|
||||
})
|
||||
|
||||
export const messages = fn(
|
||||
@@ -505,37 +331,25 @@ export namespace Session {
|
||||
},
|
||||
)
|
||||
|
||||
export function* list() {
|
||||
export async function* list() {
|
||||
const project = Instance.project
|
||||
const rel = path.relative(Instance.worktree, Instance.directory)
|
||||
const suffix = path.sep + rel
|
||||
const rows = Database.use((db) =>
|
||||
db
|
||||
.select()
|
||||
.from(SessionTable)
|
||||
.where(
|
||||
and(
|
||||
eq(SessionTable.project_id, project.id),
|
||||
or(eq(SessionTable.directory, Instance.directory), like(SessionTable.directory, `%${suffix}`)),
|
||||
),
|
||||
)
|
||||
.all(),
|
||||
)
|
||||
for (const row of rows) {
|
||||
yield fromRow(row)
|
||||
for (const item of await Storage.list(["session", project.id])) {
|
||||
const session = await Storage.read<Info>(item).catch(() => undefined)
|
||||
if (!session) continue
|
||||
yield session
|
||||
}
|
||||
}
|
||||
|
||||
export const children = fn(Identifier.schema("session"), async (parentID) => {
|
||||
const project = Instance.project
|
||||
const rows = Database.use((db) =>
|
||||
db
|
||||
.select()
|
||||
.from(SessionTable)
|
||||
.where(and(eq(SessionTable.project_id, project.id), eq(SessionTable.parent_id, parentID)))
|
||||
.all(),
|
||||
)
|
||||
return rows.map(fromRow)
|
||||
const result = [] as Session.Info[]
|
||||
for (const item of await Storage.list(["session", project.id])) {
|
||||
const session = await Storage.read<Info>(item).catch(() => undefined)
|
||||
if (!session) continue
|
||||
if (session.parentID !== parentID) continue
|
||||
result.push(session)
|
||||
}
|
||||
return result
|
||||
})
|
||||
|
||||
export const remove = fn(Identifier.schema("session"), async (sessionID) => {
|
||||
@@ -546,14 +360,15 @@ export namespace Session {
|
||||
await remove(child.id)
|
||||
}
|
||||
await unshare(sessionID).catch(() => {})
|
||||
// CASCADE delete handles messages and parts automatically
|
||||
Database.use((db) => {
|
||||
db.delete(SessionTable).where(eq(SessionTable.id, sessionID)).run()
|
||||
Database.effect(() =>
|
||||
Bus.publish(Event.Deleted, {
|
||||
info: session,
|
||||
}),
|
||||
)
|
||||
for (const msg of await Storage.list(["message", sessionID])) {
|
||||
for (const part of await Storage.list(["part", msg.at(-1)!])) {
|
||||
await Storage.remove(part)
|
||||
}
|
||||
await Storage.remove(msg)
|
||||
}
|
||||
await Storage.remove(["session", project.id, sessionID])
|
||||
Bus.publish(Event.Deleted, {
|
||||
info: session,
|
||||
})
|
||||
} catch (e) {
|
||||
log.error(e)
|
||||
@@ -561,23 +376,9 @@ export namespace Session {
|
||||
})
|
||||
|
||||
export const updateMessage = fn(MessageV2.Info, async (msg) => {
|
||||
const time_created = msg.role === "user" ? msg.time.created : msg.time.created
|
||||
const { id, sessionID, ...data } = msg
|
||||
Database.use((db) => {
|
||||
db.insert(MessageTable)
|
||||
.values({
|
||||
id,
|
||||
session_id: sessionID,
|
||||
time_created,
|
||||
data,
|
||||
})
|
||||
.onConflictDoUpdate({ target: MessageTable.id, set: { data } })
|
||||
.run()
|
||||
Database.effect(() =>
|
||||
Bus.publish(MessageV2.Event.Updated, {
|
||||
info: msg,
|
||||
}),
|
||||
)
|
||||
await Storage.write(["message", msg.sessionID, msg.id], msg)
|
||||
Bus.publish(MessageV2.Event.Updated, {
|
||||
info: msg,
|
||||
})
|
||||
return msg
|
||||
})
|
||||
@@ -588,15 +389,10 @@ export namespace Session {
|
||||
messageID: Identifier.schema("message"),
|
||||
}),
|
||||
async (input) => {
|
||||
// CASCADE delete handles parts automatically
|
||||
Database.use((db) => {
|
||||
db.delete(MessageTable).where(eq(MessageTable.id, input.messageID)).run()
|
||||
Database.effect(() =>
|
||||
Bus.publish(MessageV2.Event.Removed, {
|
||||
sessionID: input.sessionID,
|
||||
messageID: input.messageID,
|
||||
}),
|
||||
)
|
||||
await Storage.remove(["message", input.sessionID, input.messageID])
|
||||
Bus.publish(MessageV2.Event.Removed, {
|
||||
sessionID: input.sessionID,
|
||||
messageID: input.messageID,
|
||||
})
|
||||
return input.messageID
|
||||
},
|
||||
@@ -609,58 +405,39 @@ export namespace Session {
|
||||
partID: Identifier.schema("part"),
|
||||
}),
|
||||
async (input) => {
|
||||
Database.use((db) => {
|
||||
db.delete(PartTable).where(eq(PartTable.id, input.partID)).run()
|
||||
Database.effect(() =>
|
||||
Bus.publish(MessageV2.Event.PartRemoved, {
|
||||
sessionID: input.sessionID,
|
||||
messageID: input.messageID,
|
||||
partID: input.partID,
|
||||
}),
|
||||
)
|
||||
await Storage.remove(["part", input.messageID, input.partID])
|
||||
Bus.publish(MessageV2.Event.PartRemoved, {
|
||||
sessionID: input.sessionID,
|
||||
messageID: input.messageID,
|
||||
partID: input.partID,
|
||||
})
|
||||
return input.partID
|
||||
},
|
||||
)
|
||||
|
||||
const UpdatePartInput = MessageV2.Part
|
||||
const UpdatePartInput = z.union([
|
||||
MessageV2.Part,
|
||||
z.object({
|
||||
part: MessageV2.TextPart,
|
||||
delta: z.string(),
|
||||
}),
|
||||
z.object({
|
||||
part: MessageV2.ReasoningPart,
|
||||
delta: z.string(),
|
||||
}),
|
||||
])
|
||||
|
||||
export const updatePart = fn(UpdatePartInput, async (part) => {
|
||||
const { id, messageID, sessionID, ...data } = part
|
||||
const time = Date.now()
|
||||
Database.use((db) => {
|
||||
db.insert(PartTable)
|
||||
.values({
|
||||
id,
|
||||
message_id: messageID,
|
||||
session_id: sessionID,
|
||||
time_created: time,
|
||||
data,
|
||||
})
|
||||
.onConflictDoUpdate({ target: PartTable.id, set: { data } })
|
||||
.run()
|
||||
Database.effect(() =>
|
||||
Bus.publish(MessageV2.Event.PartUpdated, {
|
||||
part,
|
||||
}),
|
||||
)
|
||||
export const updatePart = fn(UpdatePartInput, async (input) => {
|
||||
const part = "delta" in input ? input.part : input
|
||||
const delta = "delta" in input ? input.delta : undefined
|
||||
await Storage.write(["part", part.messageID, part.id], part)
|
||||
Bus.publish(MessageV2.Event.PartUpdated, {
|
||||
part,
|
||||
delta,
|
||||
})
|
||||
return part
|
||||
})
|
||||
|
||||
export const updatePartDelta = fn(
|
||||
z.object({
|
||||
sessionID: z.string(),
|
||||
messageID: z.string(),
|
||||
partID: z.string(),
|
||||
field: z.string(),
|
||||
delta: z.string(),
|
||||
}),
|
||||
async (input) => {
|
||||
Bus.publish(MessageV2.Event.PartDelta, input)
|
||||
},
|
||||
)
|
||||
|
||||
export const getUsage = fn(
|
||||
z.object({
|
||||
model: z.custom<Provider.Model>(),
|
||||
|
||||
@@ -6,10 +6,6 @@ import { Identifier } from "../id/id"
|
||||
import { LSP } from "../lsp"
|
||||
import { Snapshot } from "@/snapshot"
|
||||
import { fn } from "@/util/fn"
|
||||
import { Database, eq, desc, inArray } from "@/storage/db"
|
||||
import { MessageTable, PartTable } from "./session.sql"
|
||||
import { ProviderTransform } from "@/provider/transform"
|
||||
import { STATUS_CODES } from "http"
|
||||
import { Storage } from "@/storage/storage"
|
||||
import { ProviderError } from "@/provider/error"
|
||||
import { iife } from "@/util/iife"
|
||||
@@ -427,16 +423,7 @@ export namespace MessageV2 {
|
||||
"message.part.updated",
|
||||
z.object({
|
||||
part: Part,
|
||||
}),
|
||||
),
|
||||
PartDelta: BusEvent.define(
|
||||
"message.part.delta",
|
||||
z.object({
|
||||
sessionID: z.string(),
|
||||
messageID: z.string(),
|
||||
partID: z.string(),
|
||||
field: z.string(),
|
||||
delta: z.string(),
|
||||
delta: z.string().optional(),
|
||||
}),
|
||||
),
|
||||
PartRemoved: BusEvent.define(
|
||||
@@ -681,65 +668,23 @@ export namespace MessageV2 {
|
||||
}
|
||||
|
||||
export const stream = fn(Identifier.schema("session"), async function* (sessionID) {
|
||||
const size = 50
|
||||
let offset = 0
|
||||
while (true) {
|
||||
const rows = Database.use((db) =>
|
||||
db
|
||||
.select()
|
||||
.from(MessageTable)
|
||||
.where(eq(MessageTable.session_id, sessionID))
|
||||
.orderBy(desc(MessageTable.time_created))
|
||||
.limit(size)
|
||||
.offset(offset)
|
||||
.all(),
|
||||
)
|
||||
if (rows.length === 0) break
|
||||
|
||||
const ids = rows.map((row) => row.id)
|
||||
const partsByMessage = new Map<string, MessageV2.Part[]>()
|
||||
if (ids.length > 0) {
|
||||
const partRows = Database.use((db) =>
|
||||
db
|
||||
.select()
|
||||
.from(PartTable)
|
||||
.where(inArray(PartTable.message_id, ids))
|
||||
.orderBy(PartTable.message_id, PartTable.id)
|
||||
.all(),
|
||||
)
|
||||
for (const row of partRows) {
|
||||
const part = {
|
||||
...row.data,
|
||||
id: row.id,
|
||||
sessionID: row.session_id,
|
||||
messageID: row.message_id,
|
||||
} as MessageV2.Part
|
||||
const list = partsByMessage.get(row.message_id)
|
||||
if (list) list.push(part)
|
||||
else partsByMessage.set(row.message_id, [part])
|
||||
}
|
||||
}
|
||||
|
||||
for (const row of rows) {
|
||||
const info = { ...row.data, id: row.id, sessionID: row.session_id } as MessageV2.Info
|
||||
yield {
|
||||
info,
|
||||
parts: partsByMessage.get(row.id) ?? [],
|
||||
}
|
||||
}
|
||||
|
||||
offset += rows.length
|
||||
if (rows.length < size) break
|
||||
const list = await Array.fromAsync(await Storage.list(["message", sessionID]))
|
||||
for (let i = list.length - 1; i >= 0; i--) {
|
||||
yield await get({
|
||||
sessionID,
|
||||
messageID: list[i][2],
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
export const parts = fn(Identifier.schema("message"), async (message_id) => {
|
||||
const rows = Database.use((db) =>
|
||||
db.select().from(PartTable).where(eq(PartTable.message_id, message_id)).orderBy(PartTable.id).all(),
|
||||
)
|
||||
return rows.map(
|
||||
(row) => ({ ...row.data, id: row.id, sessionID: row.session_id, messageID: row.message_id }) as MessageV2.Part,
|
||||
)
|
||||
export const parts = fn(Identifier.schema("message"), async (messageID) => {
|
||||
const result = [] as MessageV2.Part[]
|
||||
for (const item of await Storage.list(["part", messageID])) {
|
||||
const read = await Storage.read<MessageV2.Part>(item)
|
||||
result.push(read)
|
||||
}
|
||||
result.sort((a, b) => (a.id > b.id ? 1 : -1))
|
||||
return result
|
||||
})
|
||||
|
||||
export const get = fn(
|
||||
@@ -748,11 +693,8 @@ export namespace MessageV2 {
|
||||
messageID: Identifier.schema("message"),
|
||||
}),
|
||||
async (input): Promise<WithParts> => {
|
||||
const row = Database.use((db) => db.select().from(MessageTable).where(eq(MessageTable.id, input.messageID)).get())
|
||||
if (!row) throw new Error(`Message not found: ${input.messageID}`)
|
||||
const info = { ...row.data, id: row.id, sessionID: row.session_id } as MessageV2.Info
|
||||
return {
|
||||
info,
|
||||
info: await Storage.read<MessageV2.Info>(["message", input.sessionID, input.messageID]),
|
||||
parts: await parts(input.messageID),
|
||||
}
|
||||
},
|
||||
|
||||
@@ -63,19 +63,17 @@ export namespace SessionProcessor {
|
||||
if (value.id in reasoningMap) {
|
||||
continue
|
||||
}
|
||||
const reasoningPart = {
|
||||
reasoningMap[value.id] = {
|
||||
id: Identifier.ascending("part"),
|
||||
messageID: input.assistantMessage.id,
|
||||
sessionID: input.assistantMessage.sessionID,
|
||||
type: "reasoning" as const,
|
||||
type: "reasoning",
|
||||
text: "",
|
||||
time: {
|
||||
start: Date.now(),
|
||||
},
|
||||
metadata: value.providerMetadata,
|
||||
}
|
||||
reasoningMap[value.id] = reasoningPart
|
||||
await Session.updatePart(reasoningPart)
|
||||
break
|
||||
|
||||
case "reasoning-delta":
|
||||
@@ -83,13 +81,7 @@ export namespace SessionProcessor {
|
||||
const part = reasoningMap[value.id]
|
||||
part.text += value.text
|
||||
if (value.providerMetadata) part.metadata = value.providerMetadata
|
||||
await Session.updatePartDelta({
|
||||
sessionID: part.sessionID,
|
||||
messageID: part.messageID,
|
||||
partID: part.id,
|
||||
field: "text",
|
||||
delta: value.text,
|
||||
})
|
||||
if (part.text) await Session.updatePart({ part, delta: value.text })
|
||||
}
|
||||
break
|
||||
|
||||
@@ -296,20 +288,17 @@ export namespace SessionProcessor {
|
||||
},
|
||||
metadata: value.providerMetadata,
|
||||
}
|
||||
await Session.updatePart(currentText)
|
||||
break
|
||||
|
||||
case "text-delta":
|
||||
if (currentText) {
|
||||
currentText.text += value.text
|
||||
if (value.providerMetadata) currentText.metadata = value.providerMetadata
|
||||
await Session.updatePartDelta({
|
||||
sessionID: currentText.sessionID,
|
||||
messageID: currentText.messageID,
|
||||
partID: currentText.id,
|
||||
field: "text",
|
||||
delta: value.text,
|
||||
})
|
||||
if (currentText.text)
|
||||
await Session.updatePart({
|
||||
part: currentText,
|
||||
delta: value.text,
|
||||
})
|
||||
}
|
||||
break
|
||||
|
||||
|
||||
@@ -164,7 +164,9 @@ export namespace SessionPrompt {
|
||||
}
|
||||
if (permissions.length > 0) {
|
||||
session.permission = permissions
|
||||
await Session.setPermission({ sessionID: session.id, permission: permissions })
|
||||
await Session.update(session.id, (draft) => {
|
||||
draft.permission = permissions
|
||||
})
|
||||
}
|
||||
|
||||
if (input.noReply === true) {
|
||||
@@ -1020,9 +1022,9 @@ export namespace SessionPrompt {
|
||||
}
|
||||
}
|
||||
}
|
||||
offset = Math.max(start - 1, 0)
|
||||
offset = Math.max(start, 1)
|
||||
if (end) {
|
||||
limit = end - offset
|
||||
limit = end - (offset - 1)
|
||||
}
|
||||
}
|
||||
const args = { filePath: filepath, offset, limit }
|
||||
@@ -1851,16 +1853,21 @@ NOTE: At any point in time through this workflow you should feel free to ask the
|
||||
],
|
||||
})
|
||||
const text = await result.text.catch((err) => log.error("failed to generate title", { error: err }))
|
||||
if (text) {
|
||||
const cleaned = text
|
||||
.replace(/<think>[\s\S]*?<\/think>\s*/g, "")
|
||||
.split("\n")
|
||||
.map((line) => line.trim())
|
||||
.find((line) => line.length > 0)
|
||||
if (!cleaned) return
|
||||
if (text)
|
||||
return Session.update(
|
||||
input.session.id,
|
||||
(draft) => {
|
||||
const cleaned = text
|
||||
.replace(/<think>[\s\S]*?<\/think>\s*/g, "")
|
||||
.split("\n")
|
||||
.map((line) => line.trim())
|
||||
.find((line) => line.length > 0)
|
||||
if (!cleaned) return
|
||||
|
||||
const title = cleaned.length > 100 ? cleaned.substring(0, 97) + "..." : cleaned
|
||||
return Session.setTitle({ sessionID: input.session.id, title })
|
||||
}
|
||||
const title = cleaned.length > 100 ? cleaned.substring(0, 97) + "..." : cleaned
|
||||
draft.title = title
|
||||
},
|
||||
{ touch: false },
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,9 +4,8 @@ import { Snapshot } from "../snapshot"
|
||||
import { MessageV2 } from "./message-v2"
|
||||
import { Session } from "."
|
||||
import { Log } from "../util/log"
|
||||
import { Database, eq } from "../storage/db"
|
||||
import { MessageTable, PartTable } from "./session.sql"
|
||||
import { Storage } from "@/storage/storage"
|
||||
import { splitWhen } from "remeda"
|
||||
import { Storage } from "../storage/storage"
|
||||
import { Bus } from "../bus"
|
||||
import { SessionPrompt } from "./prompt"
|
||||
import { SessionSummary } from "./summary"
|
||||
@@ -66,14 +65,13 @@ export namespace SessionRevert {
|
||||
sessionID: input.sessionID,
|
||||
diff: diffs,
|
||||
})
|
||||
return Session.setRevert({
|
||||
sessionID: input.sessionID,
|
||||
revert,
|
||||
summary: {
|
||||
return Session.update(input.sessionID, (draft) => {
|
||||
draft.revert = revert
|
||||
draft.summary = {
|
||||
additions: diffs.reduce((sum, x) => sum + x.additions, 0),
|
||||
deletions: diffs.reduce((sum, x) => sum + x.deletions, 0),
|
||||
files: diffs.length,
|
||||
},
|
||||
}
|
||||
})
|
||||
}
|
||||
return session
|
||||
@@ -85,54 +83,39 @@ export namespace SessionRevert {
|
||||
const session = await Session.get(input.sessionID)
|
||||
if (!session.revert) return session
|
||||
if (session.revert.snapshot) await Snapshot.restore(session.revert.snapshot)
|
||||
return Session.clearRevert(input.sessionID)
|
||||
const next = await Session.update(input.sessionID, (draft) => {
|
||||
draft.revert = undefined
|
||||
})
|
||||
return next
|
||||
}
|
||||
|
||||
export async function cleanup(session: Session.Info) {
|
||||
if (!session.revert) return
|
||||
const sessionID = session.id
|
||||
const msgs = await Session.messages({ sessionID })
|
||||
let msgs = await Session.messages({ sessionID })
|
||||
const messageID = session.revert.messageID
|
||||
const preserve = [] as MessageV2.WithParts[]
|
||||
const remove = [] as MessageV2.WithParts[]
|
||||
let target: MessageV2.WithParts | undefined
|
||||
for (const msg of msgs) {
|
||||
if (msg.info.id < messageID) {
|
||||
preserve.push(msg)
|
||||
continue
|
||||
}
|
||||
if (msg.info.id > messageID) {
|
||||
remove.push(msg)
|
||||
continue
|
||||
}
|
||||
if (session.revert.partID) {
|
||||
preserve.push(msg)
|
||||
target = msg
|
||||
continue
|
||||
}
|
||||
remove.push(msg)
|
||||
}
|
||||
const [preserve, remove] = splitWhen(msgs, (x) => x.info.id === messageID)
|
||||
msgs = preserve
|
||||
for (const msg of remove) {
|
||||
Database.use((db) => db.delete(MessageTable).where(eq(MessageTable.id, msg.info.id)).run())
|
||||
await Storage.remove(["message", sessionID, msg.info.id])
|
||||
await Bus.publish(MessageV2.Event.Removed, { sessionID: sessionID, messageID: msg.info.id })
|
||||
}
|
||||
if (session.revert.partID && target) {
|
||||
const last = preserve.at(-1)
|
||||
if (session.revert.partID && last) {
|
||||
const partID = session.revert.partID
|
||||
const removeStart = target.parts.findIndex((part) => part.id === partID)
|
||||
if (removeStart >= 0) {
|
||||
const preserveParts = target.parts.slice(0, removeStart)
|
||||
const removeParts = target.parts.slice(removeStart)
|
||||
target.parts = preserveParts
|
||||
for (const part of removeParts) {
|
||||
Database.use((db) => db.delete(PartTable).where(eq(PartTable.id, part.id)).run())
|
||||
await Bus.publish(MessageV2.Event.PartRemoved, {
|
||||
sessionID: sessionID,
|
||||
messageID: target.info.id,
|
||||
partID: part.id,
|
||||
})
|
||||
}
|
||||
const [preserveParts, removeParts] = splitWhen(last.parts, (x) => x.id === partID)
|
||||
last.parts = preserveParts
|
||||
for (const part of removeParts) {
|
||||
await Storage.remove(["part", last.info.id, part.id])
|
||||
await Bus.publish(MessageV2.Event.PartRemoved, {
|
||||
sessionID: sessionID,
|
||||
messageID: last.info.id,
|
||||
partID: part.id,
|
||||
})
|
||||
}
|
||||
}
|
||||
await Session.clearRevert(sessionID)
|
||||
await Session.update(sessionID, (draft) => {
|
||||
draft.revert = undefined
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,88 +0,0 @@
|
||||
import { sqliteTable, text, integer, index, primaryKey } from "drizzle-orm/sqlite-core"
|
||||
import { ProjectTable } from "../project/project.sql"
|
||||
import type { MessageV2 } from "./message-v2"
|
||||
import type { Snapshot } from "@/snapshot"
|
||||
import type { PermissionNext } from "@/permission/next"
|
||||
import { Timestamps } from "@/storage/schema.sql"
|
||||
|
||||
type PartData = Omit<MessageV2.Part, "id" | "sessionID" | "messageID">
|
||||
type InfoData = Omit<MessageV2.Info, "id" | "sessionID">
|
||||
|
||||
export const SessionTable = sqliteTable(
|
||||
"session",
|
||||
{
|
||||
id: text().primaryKey(),
|
||||
project_id: text()
|
||||
.notNull()
|
||||
.references(() => ProjectTable.id, { onDelete: "cascade" }),
|
||||
parent_id: text(),
|
||||
slug: text().notNull(),
|
||||
directory: text().notNull(),
|
||||
title: text().notNull(),
|
||||
version: text().notNull(),
|
||||
share_url: text(),
|
||||
summary_additions: integer(),
|
||||
summary_deletions: integer(),
|
||||
summary_files: integer(),
|
||||
summary_diffs: text({ mode: "json" }).$type<Snapshot.FileDiff[]>(),
|
||||
revert: text({ mode: "json" }).$type<{ messageID: string; partID?: string; snapshot?: string; diff?: string }>(),
|
||||
permission: text({ mode: "json" }).$type<PermissionNext.Ruleset>(),
|
||||
...Timestamps,
|
||||
time_compacting: integer(),
|
||||
time_archived: integer(),
|
||||
},
|
||||
(table) => [index("session_project_idx").on(table.project_id), index("session_parent_idx").on(table.parent_id)],
|
||||
)
|
||||
|
||||
export const MessageTable = sqliteTable(
|
||||
"message",
|
||||
{
|
||||
id: text().primaryKey(),
|
||||
session_id: text()
|
||||
.notNull()
|
||||
.references(() => SessionTable.id, { onDelete: "cascade" }),
|
||||
...Timestamps,
|
||||
data: text({ mode: "json" }).notNull().$type<InfoData>(),
|
||||
},
|
||||
(table) => [index("message_session_idx").on(table.session_id)],
|
||||
)
|
||||
|
||||
export const PartTable = sqliteTable(
|
||||
"part",
|
||||
{
|
||||
id: text().primaryKey(),
|
||||
message_id: text()
|
||||
.notNull()
|
||||
.references(() => MessageTable.id, { onDelete: "cascade" }),
|
||||
session_id: text().notNull(),
|
||||
...Timestamps,
|
||||
data: text({ mode: "json" }).notNull().$type<PartData>(),
|
||||
},
|
||||
(table) => [index("part_message_idx").on(table.message_id), index("part_session_idx").on(table.session_id)],
|
||||
)
|
||||
|
||||
export const TodoTable = sqliteTable(
|
||||
"todo",
|
||||
{
|
||||
session_id: text()
|
||||
.notNull()
|
||||
.references(() => SessionTable.id, { onDelete: "cascade" }),
|
||||
content: text().notNull(),
|
||||
status: text().notNull(),
|
||||
priority: text().notNull(),
|
||||
position: integer().notNull(),
|
||||
...Timestamps,
|
||||
},
|
||||
(table) => [
|
||||
primaryKey({ columns: [table.session_id, table.position] }),
|
||||
index("todo_session_idx").on(table.session_id),
|
||||
],
|
||||
)
|
||||
|
||||
export const PermissionTable = sqliteTable("permission", {
|
||||
project_id: text()
|
||||
.primaryKey()
|
||||
.references(() => ProjectTable.id, { onDelete: "cascade" }),
|
||||
...Timestamps,
|
||||
data: text({ mode: "json" }).notNull().$type<PermissionNext.Ruleset>(),
|
||||
})
|
||||
@@ -90,13 +90,12 @@ export namespace SessionSummary {
|
||||
|
||||
async function summarizeSession(input: { sessionID: string; messages: MessageV2.WithParts[] }) {
|
||||
const diffs = await computeDiff({ messages: input.messages })
|
||||
await Session.setSummary({
|
||||
sessionID: input.sessionID,
|
||||
summary: {
|
||||
await Session.update(input.sessionID, (draft) => {
|
||||
draft.summary = {
|
||||
additions: diffs.reduce((sum, x) => sum + x.additions, 0),
|
||||
deletions: diffs.reduce((sum, x) => sum + x.deletions, 0),
|
||||
files: diffs.length,
|
||||
},
|
||||
}
|
||||
})
|
||||
await Storage.write(["session_diff", input.sessionID], diffs)
|
||||
Bus.publish(Session.Event.Diff, {
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import { BusEvent } from "@/bus/bus-event"
|
||||
import { Bus } from "@/bus"
|
||||
import z from "zod"
|
||||
import { Database, eq, asc } from "../storage/db"
|
||||
import { TodoTable } from "./session.sql"
|
||||
import { Storage } from "../storage/storage"
|
||||
|
||||
export namespace Todo {
|
||||
export const Info = z
|
||||
@@ -10,6 +9,7 @@ export namespace Todo {
|
||||
content: z.string().describe("Brief description of the task"),
|
||||
status: z.string().describe("Current status of the task: pending, in_progress, completed, cancelled"),
|
||||
priority: z.string().describe("Priority level of the task: high, medium, low"),
|
||||
id: z.string().describe("Unique identifier for the todo item"),
|
||||
})
|
||||
.meta({ ref: "Todo" })
|
||||
export type Info = z.infer<typeof Info>
|
||||
@@ -24,33 +24,14 @@ export namespace Todo {
|
||||
),
|
||||
}
|
||||
|
||||
export function update(input: { sessionID: string; todos: Info[] }) {
|
||||
Database.transaction((db) => {
|
||||
db.delete(TodoTable).where(eq(TodoTable.session_id, input.sessionID)).run()
|
||||
if (input.todos.length === 0) return
|
||||
db.insert(TodoTable)
|
||||
.values(
|
||||
input.todos.map((todo, position) => ({
|
||||
session_id: input.sessionID,
|
||||
content: todo.content,
|
||||
status: todo.status,
|
||||
priority: todo.priority,
|
||||
position,
|
||||
})),
|
||||
)
|
||||
.run()
|
||||
})
|
||||
export async function update(input: { sessionID: string; todos: Info[] }) {
|
||||
await Storage.write(["todo", input.sessionID], input.todos)
|
||||
Bus.publish(Event.Updated, input)
|
||||
}
|
||||
|
||||
export function get(sessionID: string) {
|
||||
const rows = Database.use((db) =>
|
||||
db.select().from(TodoTable).where(eq(TodoTable.session_id, sessionID)).orderBy(asc(TodoTable.position)).all(),
|
||||
)
|
||||
return rows.map((row) => ({
|
||||
content: row.content,
|
||||
status: row.status,
|
||||
priority: row.priority,
|
||||
}))
|
||||
export async function get(sessionID: string) {
|
||||
return Storage.read<Info[]>(["todo", sessionID])
|
||||
.then((x) => x || [])
|
||||
.catch(() => [])
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,8 +4,7 @@ import { ulid } from "ulid"
|
||||
import { Provider } from "@/provider/provider"
|
||||
import { Session } from "@/session"
|
||||
import { MessageV2 } from "@/session/message-v2"
|
||||
import { Database, eq } from "@/storage/db"
|
||||
import { SessionShareTable } from "./share.sql"
|
||||
import { Storage } from "@/storage/storage"
|
||||
import { Log } from "@/util/log"
|
||||
import type * as SDK from "@opencode-ai/sdk/v2"
|
||||
|
||||
@@ -78,26 +77,17 @@ export namespace ShareNext {
|
||||
})
|
||||
.then((x) => x.json())
|
||||
.then((x) => x as { id: string; url: string; secret: string })
|
||||
Database.use((db) =>
|
||||
db
|
||||
.insert(SessionShareTable)
|
||||
.values({ session_id: sessionID, id: result.id, secret: result.secret, url: result.url })
|
||||
.onConflictDoUpdate({
|
||||
target: SessionShareTable.session_id,
|
||||
set: { id: result.id, secret: result.secret, url: result.url },
|
||||
})
|
||||
.run(),
|
||||
)
|
||||
await Storage.write(["session_share", sessionID], result)
|
||||
fullSync(sessionID)
|
||||
return result
|
||||
}
|
||||
|
||||
function get(sessionID: string) {
|
||||
const row = Database.use((db) =>
|
||||
db.select().from(SessionShareTable).where(eq(SessionShareTable.session_id, sessionID)).get(),
|
||||
)
|
||||
if (!row) return
|
||||
return { id: row.id, secret: row.secret, url: row.url }
|
||||
return Storage.read<{
|
||||
id: string
|
||||
secret: string
|
||||
url: string
|
||||
}>(["session_share", sessionID])
|
||||
}
|
||||
|
||||
type Data =
|
||||
@@ -142,7 +132,7 @@ export namespace ShareNext {
|
||||
const queued = queue.get(sessionID)
|
||||
if (!queued) return
|
||||
queue.delete(sessionID)
|
||||
const share = get(sessionID)
|
||||
const share = await get(sessionID).catch(() => undefined)
|
||||
if (!share) return
|
||||
|
||||
await fetch(`${await url()}/api/share/${share.id}/sync`, {
|
||||
@@ -162,7 +152,7 @@ export namespace ShareNext {
|
||||
export async function remove(sessionID: string) {
|
||||
if (disabled) return
|
||||
log.info("removing share", { sessionID })
|
||||
const share = get(sessionID)
|
||||
const share = await get(sessionID)
|
||||
if (!share) return
|
||||
await fetch(`${await url()}/api/share/${share.id}`, {
|
||||
method: "DELETE",
|
||||
@@ -173,7 +163,7 @@ export namespace ShareNext {
|
||||
secret: share.secret,
|
||||
}),
|
||||
})
|
||||
Database.use((db) => db.delete(SessionShareTable).where(eq(SessionShareTable.session_id, sessionID)).run())
|
||||
await Storage.remove(["session_share", sessionID])
|
||||
}
|
||||
|
||||
async function fullSync(sessionID: string) {
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
import { sqliteTable, text } from "drizzle-orm/sqlite-core"
|
||||
import { SessionTable } from "../session/session.sql"
|
||||
import { Timestamps } from "@/storage/schema.sql"
|
||||
|
||||
export const SessionShareTable = sqliteTable("session_share", {
|
||||
session_id: text()
|
||||
.primaryKey()
|
||||
.references(() => SessionTable.id, { onDelete: "cascade" }),
|
||||
id: text().notNull(),
|
||||
secret: text().notNull(),
|
||||
url: text().notNull(),
|
||||
...Timestamps,
|
||||
})
|
||||
92
packages/opencode/src/share/share.ts
Normal file
92
packages/opencode/src/share/share.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
import { Bus } from "../bus"
|
||||
import { Installation } from "../installation"
|
||||
import { Session } from "../session"
|
||||
import { MessageV2 } from "../session/message-v2"
|
||||
import { Log } from "../util/log"
|
||||
|
||||
export namespace Share {
|
||||
const log = Log.create({ service: "share" })
|
||||
|
||||
let queue: Promise<void> = Promise.resolve()
|
||||
const pending = new Map<string, any>()
|
||||
|
||||
export async function sync(key: string, content: any) {
|
||||
if (disabled) return
|
||||
const [root, ...splits] = key.split("/")
|
||||
if (root !== "session") return
|
||||
const [sub, sessionID] = splits
|
||||
if (sub === "share") return
|
||||
const share = await Session.getShare(sessionID).catch(() => {})
|
||||
if (!share) return
|
||||
const { secret } = share
|
||||
pending.set(key, content)
|
||||
queue = queue
|
||||
.then(async () => {
|
||||
const content = pending.get(key)
|
||||
if (content === undefined) return
|
||||
pending.delete(key)
|
||||
|
||||
return fetch(`${URL}/share_sync`, {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
sessionID: sessionID,
|
||||
secret,
|
||||
key: key,
|
||||
content,
|
||||
}),
|
||||
})
|
||||
})
|
||||
.then((x) => {
|
||||
if (x) {
|
||||
log.info("synced", {
|
||||
key: key,
|
||||
status: x.status,
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
export function init() {
|
||||
Bus.subscribe(Session.Event.Updated, async (evt) => {
|
||||
await sync("session/info/" + evt.properties.info.id, evt.properties.info)
|
||||
})
|
||||
Bus.subscribe(MessageV2.Event.Updated, async (evt) => {
|
||||
await sync("session/message/" + evt.properties.info.sessionID + "/" + evt.properties.info.id, evt.properties.info)
|
||||
})
|
||||
Bus.subscribe(MessageV2.Event.PartUpdated, async (evt) => {
|
||||
await sync(
|
||||
"session/part/" +
|
||||
evt.properties.part.sessionID +
|
||||
"/" +
|
||||
evt.properties.part.messageID +
|
||||
"/" +
|
||||
evt.properties.part.id,
|
||||
evt.properties.part,
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
export const URL =
|
||||
process.env["OPENCODE_API"] ??
|
||||
(Installation.isPreview() || Installation.isLocal() ? "https://api.dev.opencode.ai" : "https://api.opencode.ai")
|
||||
|
||||
const disabled = process.env["OPENCODE_DISABLE_SHARE"] === "true" || process.env["OPENCODE_DISABLE_SHARE"] === "1"
|
||||
|
||||
export async function create(sessionID: string) {
|
||||
if (disabled) return { url: "", secret: "" }
|
||||
return fetch(`${URL}/share_create`, {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ sessionID: sessionID }),
|
||||
})
|
||||
.then((x) => x.json())
|
||||
.then((x) => x as { url: string; secret: string })
|
||||
}
|
||||
|
||||
export async function remove(sessionID: string, secret: string) {
|
||||
if (disabled) return {}
|
||||
return fetch(`${URL}/share_delete`, {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ sessionID, secret }),
|
||||
}).then((x) => x.json())
|
||||
}
|
||||
}
|
||||
4
packages/opencode/src/sql.d.ts
vendored
4
packages/opencode/src/sql.d.ts
vendored
@@ -1,4 +0,0 @@
|
||||
declare module "*.sql" {
|
||||
const content: string
|
||||
export default content
|
||||
}
|
||||
@@ -1,140 +0,0 @@
|
||||
import { Database as BunDatabase } from "bun:sqlite"
|
||||
import { drizzle, type SQLiteBunDatabase } from "drizzle-orm/bun-sqlite"
|
||||
import { migrate } from "drizzle-orm/bun-sqlite/migrator"
|
||||
import { type SQLiteTransaction } from "drizzle-orm/sqlite-core"
|
||||
export * from "drizzle-orm"
|
||||
import { Context } from "../util/context"
|
||||
import { lazy } from "../util/lazy"
|
||||
import { Global } from "../global"
|
||||
import { Log } from "../util/log"
|
||||
import { NamedError } from "@opencode-ai/util/error"
|
||||
import z from "zod"
|
||||
import path from "path"
|
||||
import { readFileSync, readdirSync } from "fs"
|
||||
import fs from "fs/promises"
|
||||
import { Instance } from "@/project/instance"
|
||||
|
||||
declare const OPENCODE_MIGRATIONS: { sql: string; timestamp: number }[] | undefined
|
||||
|
||||
export const NotFoundError = NamedError.create(
|
||||
"NotFoundError",
|
||||
z.object({
|
||||
message: z.string(),
|
||||
}),
|
||||
)
|
||||
|
||||
const log = Log.create({ service: "db" })
|
||||
|
||||
export namespace Database {
|
||||
export type Transaction = SQLiteTransaction<"sync", void, Record<string, never>, Record<string, never>>
|
||||
|
||||
type Client = SQLiteBunDatabase
|
||||
|
||||
type Journal = { sql: string; timestamp: number }[]
|
||||
|
||||
function time(tag: string) {
|
||||
const match = /^(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})/.exec(tag)
|
||||
if (!match) return 0
|
||||
return Date.UTC(
|
||||
Number(match[1]),
|
||||
Number(match[2]) - 1,
|
||||
Number(match[3]),
|
||||
Number(match[4]),
|
||||
Number(match[5]),
|
||||
Number(match[6]),
|
||||
)
|
||||
}
|
||||
|
||||
function migrations(dir: string): Journal {
|
||||
const dirs = readdirSync(dir, { withFileTypes: true })
|
||||
.filter((entry) => entry.isDirectory())
|
||||
.map((entry) => entry.name)
|
||||
|
||||
const sql = dirs
|
||||
.map((name) => {
|
||||
const file = path.join(dir, name, "migration.sql")
|
||||
if (!Bun.file(file).size) return
|
||||
return {
|
||||
sql: readFileSync(file, "utf-8"),
|
||||
timestamp: time(name),
|
||||
}
|
||||
})
|
||||
.filter(Boolean) as Journal
|
||||
|
||||
return sql.sort((a, b) => a.timestamp - b.timestamp)
|
||||
}
|
||||
|
||||
export const Client = lazy(() => {
|
||||
log.info("opening database", { path: path.join(Global.Path.data, "opencode.db") })
|
||||
|
||||
const sqlite = new BunDatabase(path.join(Global.Path.data, "opencode.db"), { create: true })
|
||||
|
||||
sqlite.run("PRAGMA journal_mode = WAL")
|
||||
sqlite.run("PRAGMA synchronous = NORMAL")
|
||||
sqlite.run("PRAGMA busy_timeout = 5000")
|
||||
sqlite.run("PRAGMA cache_size = -64000")
|
||||
sqlite.run("PRAGMA foreign_keys = ON")
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
|
||||
// Apply schema migrations
|
||||
const entries =
|
||||
typeof OPENCODE_MIGRATIONS !== "undefined"
|
||||
? OPENCODE_MIGRATIONS
|
||||
: migrations(path.join(import.meta.dirname, "../../migration"))
|
||||
if (entries.length > 0) {
|
||||
log.info("applying migrations", {
|
||||
count: entries.length,
|
||||
mode: typeof OPENCODE_MIGRATIONS !== "undefined" ? "bundled" : "dev",
|
||||
})
|
||||
migrate(db, entries)
|
||||
}
|
||||
|
||||
return db
|
||||
})
|
||||
|
||||
export type TxOrDb = Transaction | Client
|
||||
|
||||
const ctx = Context.create<{
|
||||
tx: TxOrDb
|
||||
effects: (() => void | Promise<void>)[]
|
||||
}>("database")
|
||||
|
||||
export function use<T>(callback: (trx: TxOrDb) => T): T {
|
||||
try {
|
||||
return callback(ctx.use().tx)
|
||||
} catch (err) {
|
||||
if (err instanceof Context.NotFound) {
|
||||
const effects: (() => void | Promise<void>)[] = []
|
||||
const result = ctx.provide({ effects, tx: Client() }, () => callback(Client()))
|
||||
for (const effect of effects) effect()
|
||||
return result
|
||||
}
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
export function effect(fn: () => any | Promise<any>) {
|
||||
try {
|
||||
ctx.use().effects.push(fn)
|
||||
} catch {
|
||||
fn()
|
||||
}
|
||||
}
|
||||
|
||||
export function transaction<T>(callback: (tx: TxOrDb) => T): T {
|
||||
try {
|
||||
return callback(ctx.use().tx)
|
||||
} catch (err) {
|
||||
if (err instanceof Context.NotFound) {
|
||||
const effects: (() => void | Promise<void>)[] = []
|
||||
const result = Client().transaction((tx) => {
|
||||
return ctx.provide({ tx, effects }, () => callback(tx))
|
||||
})
|
||||
for (const effect of effects) effect()
|
||||
return result
|
||||
}
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,437 +0,0 @@
|
||||
import { Database } from "bun:sqlite"
|
||||
import { drizzle } from "drizzle-orm/bun-sqlite"
|
||||
import { Global } from "../global"
|
||||
import { Log } from "../util/log"
|
||||
import { ProjectTable } from "../project/project.sql"
|
||||
import { SessionTable, MessageTable, PartTable, TodoTable, PermissionTable } from "../session/session.sql"
|
||||
import { SessionShareTable } from "../share/share.sql"
|
||||
import path from "path"
|
||||
import { existsSync } from "fs"
|
||||
|
||||
export namespace JsonMigration {
|
||||
const log = Log.create({ service: "json-migration" })
|
||||
|
||||
export type Progress = {
|
||||
current: number
|
||||
total: number
|
||||
label: string
|
||||
}
|
||||
|
||||
type Options = {
|
||||
progress?: (event: Progress) => void
|
||||
}
|
||||
|
||||
export async function run(sqlite: Database, options?: Options) {
|
||||
const storageDir = path.join(Global.Path.data, "storage")
|
||||
|
||||
if (!existsSync(storageDir)) {
|
||||
log.info("storage directory does not exist, skipping migration")
|
||||
return {
|
||||
projects: 0,
|
||||
sessions: 0,
|
||||
messages: 0,
|
||||
parts: 0,
|
||||
todos: 0,
|
||||
permissions: 0,
|
||||
shares: 0,
|
||||
errors: [] as string[],
|
||||
}
|
||||
}
|
||||
|
||||
log.info("starting json to sqlite migration", { storageDir })
|
||||
const start = performance.now()
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
|
||||
// Optimize SQLite for bulk inserts
|
||||
sqlite.exec("PRAGMA journal_mode = WAL")
|
||||
sqlite.exec("PRAGMA synchronous = OFF")
|
||||
sqlite.exec("PRAGMA cache_size = 10000")
|
||||
sqlite.exec("PRAGMA temp_store = MEMORY")
|
||||
const stats = {
|
||||
projects: 0,
|
||||
sessions: 0,
|
||||
messages: 0,
|
||||
parts: 0,
|
||||
todos: 0,
|
||||
permissions: 0,
|
||||
shares: 0,
|
||||
errors: [] as string[],
|
||||
}
|
||||
const orphans = {
|
||||
sessions: 0,
|
||||
todos: 0,
|
||||
permissions: 0,
|
||||
shares: 0,
|
||||
}
|
||||
const errs = stats.errors
|
||||
|
||||
const batchSize = 1000
|
||||
const now = Date.now()
|
||||
|
||||
async function list(pattern: string) {
|
||||
const items: string[] = []
|
||||
const scan = new Bun.Glob(pattern)
|
||||
for await (const file of scan.scan({ cwd: storageDir, absolute: true })) {
|
||||
items.push(file)
|
||||
}
|
||||
return items
|
||||
}
|
||||
|
||||
async function read(files: string[], start: number, end: number) {
|
||||
const count = end - start
|
||||
const tasks = new Array(count)
|
||||
for (let i = 0; i < count; i++) {
|
||||
tasks[i] = Bun.file(files[start + i]).json()
|
||||
}
|
||||
const results = await Promise.allSettled(tasks)
|
||||
const items = new Array(count)
|
||||
for (let i = 0; i < results.length; i++) {
|
||||
const result = results[i]
|
||||
if (result.status === "fulfilled") {
|
||||
items[i] = result.value
|
||||
continue
|
||||
}
|
||||
errs.push(`failed to read ${files[start + i]}: ${result.reason}`)
|
||||
}
|
||||
return items
|
||||
}
|
||||
|
||||
function insert(values: any[], table: any, label: string) {
|
||||
if (values.length === 0) return 0
|
||||
try {
|
||||
db.insert(table).values(values).onConflictDoNothing().run()
|
||||
return values.length
|
||||
} catch (e) {
|
||||
errs.push(`failed to migrate ${label} batch: ${e}`)
|
||||
return 0
|
||||
}
|
||||
}
|
||||
|
||||
// Pre-scan all files upfront to avoid repeated glob operations
|
||||
log.info("scanning files...")
|
||||
const [projectFiles, sessionFiles, messageFiles, partFiles, todoFiles, permFiles, shareFiles] = await Promise.all([
|
||||
list("project/*.json"),
|
||||
list("session/*/*.json"),
|
||||
list("message/*/*.json"),
|
||||
list("part/*/*.json"),
|
||||
list("todo/*.json"),
|
||||
list("permission/*.json"),
|
||||
list("session_share/*.json"),
|
||||
])
|
||||
|
||||
log.info("file scan complete", {
|
||||
projects: projectFiles.length,
|
||||
sessions: sessionFiles.length,
|
||||
messages: messageFiles.length,
|
||||
parts: partFiles.length,
|
||||
todos: todoFiles.length,
|
||||
permissions: permFiles.length,
|
||||
shares: shareFiles.length,
|
||||
})
|
||||
|
||||
const total = Math.max(
|
||||
1,
|
||||
projectFiles.length +
|
||||
sessionFiles.length +
|
||||
messageFiles.length +
|
||||
partFiles.length +
|
||||
todoFiles.length +
|
||||
permFiles.length +
|
||||
shareFiles.length,
|
||||
)
|
||||
const progress = options?.progress
|
||||
let current = 0
|
||||
const step = (label: string, count: number) => {
|
||||
current = Math.min(total, current + count)
|
||||
progress?.({ current, total, label })
|
||||
}
|
||||
|
||||
progress?.({ current, total, label: "starting" })
|
||||
|
||||
sqlite.exec("BEGIN TRANSACTION")
|
||||
|
||||
// Migrate projects first (no FK deps)
|
||||
const projectIds = new Set<string>()
|
||||
const projectValues = [] as any[]
|
||||
for (let i = 0; i < projectFiles.length; i += batchSize) {
|
||||
const end = Math.min(i + batchSize, projectFiles.length)
|
||||
const batch = await read(projectFiles, i, end)
|
||||
projectValues.length = 0
|
||||
for (let j = 0; j < batch.length; j++) {
|
||||
const data = batch[j]
|
||||
if (!data) continue
|
||||
if (!data?.id) {
|
||||
errs.push(`project missing id: ${projectFiles[i + j]}`)
|
||||
continue
|
||||
}
|
||||
projectIds.add(data.id)
|
||||
projectValues.push({
|
||||
id: data.id,
|
||||
worktree: data.worktree ?? "/",
|
||||
vcs: data.vcs,
|
||||
name: data.name ?? undefined,
|
||||
icon_url: data.icon?.url,
|
||||
icon_color: data.icon?.color,
|
||||
time_created: data.time?.created ?? now,
|
||||
time_updated: data.time?.updated ?? now,
|
||||
time_initialized: data.time?.initialized,
|
||||
sandboxes: data.sandboxes ?? [],
|
||||
commands: data.commands,
|
||||
})
|
||||
}
|
||||
stats.projects += insert(projectValues, ProjectTable, "project")
|
||||
step("projects", end - i)
|
||||
}
|
||||
log.info("migrated projects", { count: stats.projects, duration: Math.round(performance.now() - start) })
|
||||
|
||||
// Migrate sessions (depends on projects)
|
||||
const sessionIds = new Set<string>()
|
||||
const sessionValues = [] as any[]
|
||||
for (let i = 0; i < sessionFiles.length; i += batchSize) {
|
||||
const end = Math.min(i + batchSize, sessionFiles.length)
|
||||
const batch = await read(sessionFiles, i, end)
|
||||
sessionValues.length = 0
|
||||
for (let j = 0; j < batch.length; j++) {
|
||||
const data = batch[j]
|
||||
if (!data) continue
|
||||
if (!data?.id || !data?.projectID) {
|
||||
errs.push(`session missing id or projectID: ${sessionFiles[i + j]}`)
|
||||
continue
|
||||
}
|
||||
if (!projectIds.has(data.projectID)) {
|
||||
orphans.sessions++
|
||||
continue
|
||||
}
|
||||
sessionIds.add(data.id)
|
||||
sessionValues.push({
|
||||
id: data.id,
|
||||
project_id: data.projectID,
|
||||
parent_id: data.parentID ?? null,
|
||||
slug: data.slug ?? "",
|
||||
directory: data.directory ?? "",
|
||||
title: data.title ?? "",
|
||||
version: data.version ?? "",
|
||||
share_url: data.share?.url ?? null,
|
||||
summary_additions: data.summary?.additions ?? null,
|
||||
summary_deletions: data.summary?.deletions ?? null,
|
||||
summary_files: data.summary?.files ?? null,
|
||||
summary_diffs: data.summary?.diffs ?? null,
|
||||
revert: data.revert ?? null,
|
||||
permission: data.permission ?? null,
|
||||
time_created: data.time?.created ?? now,
|
||||
time_updated: data.time?.updated ?? now,
|
||||
time_compacting: data.time?.compacting ?? null,
|
||||
time_archived: data.time?.archived ?? null,
|
||||
})
|
||||
}
|
||||
stats.sessions += insert(sessionValues, SessionTable, "session")
|
||||
step("sessions", end - i)
|
||||
}
|
||||
log.info("migrated sessions", { count: stats.sessions })
|
||||
if (orphans.sessions > 0) {
|
||||
log.warn("skipped orphaned sessions", { count: orphans.sessions })
|
||||
}
|
||||
|
||||
// Migrate messages using pre-scanned file map
|
||||
const allMessageFiles = [] as string[]
|
||||
const allMessageSessions = [] as string[]
|
||||
const messageSessions = new Map<string, string>()
|
||||
for (const file of messageFiles) {
|
||||
const sessionID = path.basename(path.dirname(file))
|
||||
if (!sessionIds.has(sessionID)) continue
|
||||
allMessageFiles.push(file)
|
||||
allMessageSessions.push(sessionID)
|
||||
}
|
||||
|
||||
for (let i = 0; i < allMessageFiles.length; i += batchSize) {
|
||||
const end = Math.min(i + batchSize, allMessageFiles.length)
|
||||
const batch = await read(allMessageFiles, i, end)
|
||||
const values = new Array(batch.length)
|
||||
let count = 0
|
||||
for (let j = 0; j < batch.length; j++) {
|
||||
const data = batch[j]
|
||||
if (!data) continue
|
||||
const file = allMessageFiles[i + j]
|
||||
const id = data.id ?? path.basename(file, ".json")
|
||||
if (!id) {
|
||||
errs.push(`message missing id: ${file}`)
|
||||
continue
|
||||
}
|
||||
const sessionID = allMessageSessions[i + j]
|
||||
messageSessions.set(id, sessionID)
|
||||
const rest = data
|
||||
delete rest.id
|
||||
delete rest.sessionID
|
||||
values[count++] = {
|
||||
id,
|
||||
session_id: sessionID,
|
||||
time_created: data.time?.created ?? now,
|
||||
time_updated: data.time?.updated ?? now,
|
||||
data: rest,
|
||||
}
|
||||
}
|
||||
values.length = count
|
||||
stats.messages += insert(values, MessageTable, "message")
|
||||
step("messages", end - i)
|
||||
}
|
||||
log.info("migrated messages", { count: stats.messages })
|
||||
|
||||
// Migrate parts using pre-scanned file map
|
||||
for (let i = 0; i < partFiles.length; i += batchSize) {
|
||||
const end = Math.min(i + batchSize, partFiles.length)
|
||||
const batch = await read(partFiles, i, end)
|
||||
const values = new Array(batch.length)
|
||||
let count = 0
|
||||
for (let j = 0; j < batch.length; j++) {
|
||||
const data = batch[j]
|
||||
if (!data) continue
|
||||
const file = partFiles[i + j]
|
||||
const id = data.id ?? path.basename(file, ".json")
|
||||
const messageID = data.messageID ?? path.basename(path.dirname(file))
|
||||
if (!id || !messageID) {
|
||||
errs.push(`part missing id/messageID/sessionID: ${file}`)
|
||||
continue
|
||||
}
|
||||
const sessionID = messageSessions.get(messageID)
|
||||
if (!sessionID) {
|
||||
errs.push(`part missing message session: ${file}`)
|
||||
continue
|
||||
}
|
||||
if (!sessionIds.has(sessionID)) continue
|
||||
const rest = data
|
||||
delete rest.id
|
||||
delete rest.messageID
|
||||
delete rest.sessionID
|
||||
values[count++] = {
|
||||
id,
|
||||
message_id: messageID,
|
||||
session_id: sessionID,
|
||||
time_created: data.time?.created ?? now,
|
||||
time_updated: data.time?.updated ?? now,
|
||||
data: rest,
|
||||
}
|
||||
}
|
||||
values.length = count
|
||||
stats.parts += insert(values, PartTable, "part")
|
||||
step("parts", end - i)
|
||||
}
|
||||
log.info("migrated parts", { count: stats.parts })
|
||||
|
||||
// Migrate todos
|
||||
const todoSessions = todoFiles.map((file) => path.basename(file, ".json"))
|
||||
for (let i = 0; i < todoFiles.length; i += batchSize) {
|
||||
const end = Math.min(i + batchSize, todoFiles.length)
|
||||
const batch = await read(todoFiles, i, end)
|
||||
const values = [] as any[]
|
||||
for (let j = 0; j < batch.length; j++) {
|
||||
const data = batch[j]
|
||||
if (!data) continue
|
||||
const sessionID = todoSessions[i + j]
|
||||
if (!sessionIds.has(sessionID)) {
|
||||
orphans.todos++
|
||||
continue
|
||||
}
|
||||
if (!Array.isArray(data)) {
|
||||
errs.push(`todo not an array: ${todoFiles[i + j]}`)
|
||||
continue
|
||||
}
|
||||
for (let position = 0; position < data.length; position++) {
|
||||
const todo = data[position]
|
||||
if (!todo?.content || !todo?.status || !todo?.priority) continue
|
||||
values.push({
|
||||
session_id: sessionID,
|
||||
content: todo.content,
|
||||
status: todo.status,
|
||||
priority: todo.priority,
|
||||
position,
|
||||
time_created: now,
|
||||
time_updated: now,
|
||||
})
|
||||
}
|
||||
}
|
||||
stats.todos += insert(values, TodoTable, "todo")
|
||||
step("todos", end - i)
|
||||
}
|
||||
log.info("migrated todos", { count: stats.todos })
|
||||
if (orphans.todos > 0) {
|
||||
log.warn("skipped orphaned todos", { count: orphans.todos })
|
||||
}
|
||||
|
||||
// Migrate permissions
|
||||
const permProjects = permFiles.map((file) => path.basename(file, ".json"))
|
||||
const permValues = [] as any[]
|
||||
for (let i = 0; i < permFiles.length; i += batchSize) {
|
||||
const end = Math.min(i + batchSize, permFiles.length)
|
||||
const batch = await read(permFiles, i, end)
|
||||
permValues.length = 0
|
||||
for (let j = 0; j < batch.length; j++) {
|
||||
const data = batch[j]
|
||||
if (!data) continue
|
||||
const projectID = permProjects[i + j]
|
||||
if (!projectIds.has(projectID)) {
|
||||
orphans.permissions++
|
||||
continue
|
||||
}
|
||||
permValues.push({ project_id: projectID, data })
|
||||
}
|
||||
stats.permissions += insert(permValues, PermissionTable, "permission")
|
||||
step("permissions", end - i)
|
||||
}
|
||||
log.info("migrated permissions", { count: stats.permissions })
|
||||
if (orphans.permissions > 0) {
|
||||
log.warn("skipped orphaned permissions", { count: orphans.permissions })
|
||||
}
|
||||
|
||||
// Migrate session shares
|
||||
const shareSessions = shareFiles.map((file) => path.basename(file, ".json"))
|
||||
const shareValues = [] as any[]
|
||||
for (let i = 0; i < shareFiles.length; i += batchSize) {
|
||||
const end = Math.min(i + batchSize, shareFiles.length)
|
||||
const batch = await read(shareFiles, i, end)
|
||||
shareValues.length = 0
|
||||
for (let j = 0; j < batch.length; j++) {
|
||||
const data = batch[j]
|
||||
if (!data) continue
|
||||
const sessionID = shareSessions[i + j]
|
||||
if (!sessionIds.has(sessionID)) {
|
||||
orphans.shares++
|
||||
continue
|
||||
}
|
||||
if (!data?.id || !data?.secret || !data?.url) {
|
||||
errs.push(`session_share missing id/secret/url: ${shareFiles[i + j]}`)
|
||||
continue
|
||||
}
|
||||
shareValues.push({ session_id: sessionID, id: data.id, secret: data.secret, url: data.url })
|
||||
}
|
||||
stats.shares += insert(shareValues, SessionShareTable, "session_share")
|
||||
step("shares", end - i)
|
||||
}
|
||||
log.info("migrated session shares", { count: stats.shares })
|
||||
if (orphans.shares > 0) {
|
||||
log.warn("skipped orphaned session shares", { count: orphans.shares })
|
||||
}
|
||||
|
||||
sqlite.exec("COMMIT")
|
||||
|
||||
log.info("json migration complete", {
|
||||
projects: stats.projects,
|
||||
sessions: stats.sessions,
|
||||
messages: stats.messages,
|
||||
parts: stats.parts,
|
||||
todos: stats.todos,
|
||||
permissions: stats.permissions,
|
||||
shares: stats.shares,
|
||||
errorCount: stats.errors.length,
|
||||
duration: Math.round(performance.now() - start),
|
||||
})
|
||||
|
||||
if (stats.errors.length > 0) {
|
||||
log.warn("migration errors", { errors: stats.errors.slice(0, 20) })
|
||||
}
|
||||
|
||||
progress?.({ current: total, total, label: "complete" })
|
||||
|
||||
return stats
|
||||
}
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
import { integer } from "drizzle-orm/sqlite-core"
|
||||
|
||||
export const Timestamps = {
|
||||
time_created: integer()
|
||||
.notNull()
|
||||
.$default(() => Date.now()),
|
||||
time_updated: integer()
|
||||
.notNull()
|
||||
.$onUpdate(() => Date.now()),
|
||||
}
|
||||
@@ -2,9 +2,9 @@ Performs exact string replacements in files.
|
||||
|
||||
Usage:
|
||||
- You must use your `Read` tool at least once in the conversation before editing. This tool will error if you attempt an edit without reading the file.
|
||||
- When editing text from Read tool output, ensure you preserve the exact indentation (tabs/spaces) as it appears AFTER the line number prefix. The line number prefix format is: spaces + line number + tab. Everything after that tab is the actual file content to match. Never include any part of the line number prefix in the oldString or newString.
|
||||
- When editing text from Read tool output, ensure you preserve the exact indentation (tabs/spaces) as it appears AFTER the line number prefix. The line number prefix format is: line number + colon + space (e.g., `1: `). Everything after that space is the actual file content to match. Never include any part of the line number prefix in the oldString or newString.
|
||||
- ALWAYS prefer editing existing files in the codebase. NEVER write new files unless explicitly required.
|
||||
- Only use emojis if the user explicitly requests it. Avoid adding emojis to files unless asked.
|
||||
- The edit will FAIL if `oldString` is not found in the file with an error "oldString not found in content".
|
||||
- The edit will FAIL if `oldString` is found multiple times in the file with an error "oldString found multiple times and requires more code context to uniquely identify the intended match". Either provide a larger string with more surrounding context to make it unique or use `replaceAll` to change every instance of `oldString`.
|
||||
- The edit will FAIL if `oldString` is found multiple times in the file with an error "Found multiple matches for oldString. Provide more surrounding lines in oldString to identify the correct match." Either provide a larger string with more surrounding context to make it unique or use `replaceAll` to change every instance of `oldString`.
|
||||
- Use `replaceAll` for replacing and renaming strings across the file. This parameter is useful if you want to rename a variable for instance.
|
||||
|
||||
@@ -17,19 +17,26 @@ const MAX_BYTES = 50 * 1024
|
||||
export const ReadTool = Tool.define("read", {
|
||||
description: DESCRIPTION,
|
||||
parameters: z.object({
|
||||
filePath: z.string().describe("The path to the file to read"),
|
||||
offset: z.coerce.number().describe("The line number to start reading from (0-based)").optional(),
|
||||
limit: z.coerce.number().describe("The number of lines to read (defaults to 2000)").optional(),
|
||||
filePath: z.string().describe("The absolute path to the file or directory to read"),
|
||||
offset: z.coerce.number().describe("The line number to start reading from (1-indexed)").optional(),
|
||||
limit: z.coerce.number().describe("The maximum number of lines to read (defaults to 2000)").optional(),
|
||||
}),
|
||||
async execute(params, ctx) {
|
||||
if (params.offset !== undefined && params.offset < 1) {
|
||||
throw new Error("offset must be greater than or equal to 1")
|
||||
}
|
||||
let filepath = params.filePath
|
||||
if (!path.isAbsolute(filepath)) {
|
||||
filepath = path.resolve(Instance.directory, filepath)
|
||||
}
|
||||
const title = path.relative(Instance.worktree, filepath)
|
||||
|
||||
const file = Bun.file(filepath)
|
||||
const stat = await file.stat().catch(() => undefined)
|
||||
|
||||
await assertExternalDirectory(ctx, filepath, {
|
||||
bypass: Boolean(ctx.extra?.["bypassCwdCheck"]),
|
||||
kind: stat?.isDirectory() ? "directory" : "file",
|
||||
})
|
||||
|
||||
await ctx.ask({
|
||||
@@ -39,8 +46,7 @@ export const ReadTool = Tool.define("read", {
|
||||
metadata: {},
|
||||
})
|
||||
|
||||
const file = Bun.file(filepath)
|
||||
if (!(await file.exists())) {
|
||||
if (!stat) {
|
||||
const dir = path.dirname(filepath)
|
||||
const base = path.basename(filepath)
|
||||
|
||||
@@ -60,6 +66,48 @@ export const ReadTool = Tool.define("read", {
|
||||
throw new Error(`File not found: ${filepath}`)
|
||||
}
|
||||
|
||||
if (stat.isDirectory()) {
|
||||
const dirents = await fs.promises.readdir(filepath, { withFileTypes: true })
|
||||
const entries = await Promise.all(
|
||||
dirents.map(async (dirent) => {
|
||||
if (dirent.isDirectory()) return dirent.name + "/"
|
||||
if (dirent.isSymbolicLink()) {
|
||||
const target = await fs.promises.stat(path.join(filepath, dirent.name)).catch(() => undefined)
|
||||
if (target?.isDirectory()) return dirent.name + "/"
|
||||
}
|
||||
return dirent.name
|
||||
}),
|
||||
)
|
||||
entries.sort((a, b) => a.localeCompare(b))
|
||||
|
||||
const limit = params.limit ?? DEFAULT_READ_LIMIT
|
||||
const offset = params.offset ?? 1
|
||||
const start = offset - 1
|
||||
const sliced = entries.slice(start, start + limit)
|
||||
const truncated = start + sliced.length < entries.length
|
||||
|
||||
const output = [
|
||||
`<path>${filepath}</path>`,
|
||||
`<type>directory</type>`,
|
||||
`<entries>`,
|
||||
sliced.join("\n"),
|
||||
truncated
|
||||
? `\n(Showing ${sliced.length} of ${entries.length} entries. Use 'offset' parameter to read beyond entry ${offset + sliced.length})`
|
||||
: `\n(${entries.length} entries)`,
|
||||
`</entries>`,
|
||||
].join("\n")
|
||||
|
||||
return {
|
||||
title,
|
||||
output,
|
||||
metadata: {
|
||||
preview: sliced.slice(0, 20).join("\n"),
|
||||
truncated,
|
||||
loaded: [] as string[],
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
const instructions = await InstructionPrompt.resolve(ctx.messages, filepath, ctx.messageID)
|
||||
|
||||
// Exclude SVG (XML-based) and vnd.fastbidsheet (.fbs extension, commonly FlatBuffers schema files)
|
||||
@@ -75,7 +123,7 @@ export const ReadTool = Tool.define("read", {
|
||||
metadata: {
|
||||
preview: msg,
|
||||
truncated: false,
|
||||
...(instructions.length > 0 && { loaded: instructions.map((i) => i.filepath) }),
|
||||
loaded: instructions.map((i) => i.filepath),
|
||||
},
|
||||
attachments: [
|
||||
{
|
||||
@@ -94,13 +142,15 @@ export const ReadTool = Tool.define("read", {
|
||||
if (isBinary) throw new Error(`Cannot read binary file: ${filepath}`)
|
||||
|
||||
const limit = params.limit ?? DEFAULT_READ_LIMIT
|
||||
const offset = params.offset || 0
|
||||
const offset = params.offset ?? 1
|
||||
const start = offset - 1
|
||||
const lines = await file.text().then((text) => text.split("\n"))
|
||||
if (start >= lines.length) throw new Error(`Offset ${offset} is out of range for this file (${lines.length} lines)`)
|
||||
|
||||
const raw: string[] = []
|
||||
let bytes = 0
|
||||
let truncatedByBytes = false
|
||||
for (let i = offset; i < Math.min(lines.length, offset + limit); i++) {
|
||||
for (let i = start; i < Math.min(lines.length, start + limit); i++) {
|
||||
const line = lines[i].length > MAX_LINE_LENGTH ? lines[i].substring(0, MAX_LINE_LENGTH) + "..." : lines[i]
|
||||
const size = Buffer.byteLength(line, "utf-8") + (raw.length > 0 ? 1 : 0)
|
||||
if (bytes + size > MAX_BYTES) {
|
||||
@@ -112,15 +162,15 @@ export const ReadTool = Tool.define("read", {
|
||||
}
|
||||
|
||||
const content = raw.map((line, index) => {
|
||||
return `${(index + offset + 1).toString().padStart(5, "0")}| ${line}`
|
||||
return `${index + offset}: ${line}`
|
||||
})
|
||||
const preview = raw.slice(0, 20).join("\n")
|
||||
|
||||
let output = "<file>\n"
|
||||
let output = [`<path>${filepath}</path>`, `<type>file</type>`, "<content>"].join("\n")
|
||||
output += content.join("\n")
|
||||
|
||||
const totalLines = lines.length
|
||||
const lastReadLine = offset + raw.length
|
||||
const lastReadLine = offset + raw.length - 1
|
||||
const hasMoreLines = totalLines > lastReadLine
|
||||
const truncated = hasMoreLines || truncatedByBytes
|
||||
|
||||
@@ -131,7 +181,7 @@ export const ReadTool = Tool.define("read", {
|
||||
} else {
|
||||
output += `\n\n(End of file - total ${totalLines} lines)`
|
||||
}
|
||||
output += "\n</file>"
|
||||
output += "\n</content>"
|
||||
|
||||
// just warms the lsp client
|
||||
LSP.touchFile(filepath, false)
|
||||
@@ -147,7 +197,7 @@ export const ReadTool = Tool.define("read", {
|
||||
metadata: {
|
||||
preview,
|
||||
truncated,
|
||||
...(instructions.length > 0 && { loaded: instructions.map((i) => i.filepath) }),
|
||||
loaded: instructions.map((i) => i.filepath),
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
Reads a file from the local filesystem. You can access any file directly by using this tool.
|
||||
Assume this tool is able to read all files on the machine. If the User provides a path to a file assume that path is valid. It is okay to read a file that does not exist; an error will be returned.
|
||||
Read a file or directory from the local filesystem. If the path does not exist, an error is returned.
|
||||
|
||||
Usage:
|
||||
- The filePath parameter must be an absolute path, not a relative path
|
||||
- By default, it reads up to 2000 lines starting from the beginning of the file
|
||||
- You can optionally specify a line offset and limit (especially handy for long files), but it's recommended to read the whole file by not providing these parameters
|
||||
- Any lines longer than 2000 characters will be truncated
|
||||
- Results are returned using cat -n format, with line numbers starting at 1
|
||||
- You have the capability to call multiple tools in a single response. It is always better to speculatively read multiple files as a batch that are potentially useful.
|
||||
- If you read a file that exists but has empty contents you will receive a system reminder warning in place of file contents.
|
||||
- You can read image files using this tool.
|
||||
- The filePath parameter should be an absolute path.
|
||||
- By default, this tool returns up to 2000 lines from the start of the file.
|
||||
- The offset parameter is the line number to start from (1-indexed).
|
||||
- To read later sections, call this tool again with a larger offset.
|
||||
- Use the grep tool to find specific content in large files or files with long lines.
|
||||
- If you are unsure of the correct file path, use the glob tool to look up filenames by glob pattern.
|
||||
- Contents are returned with each line prefixed by its line number as `<line>: <content>`. For example, if a file has contents "foo\n", you will receive "1: foo\n". For directories, entries are returned one per line (without line numbers) with a trailing `/` for subdirectories.
|
||||
- Any line longer than 2000 characters is truncated.
|
||||
- Call this tool in parallel when you know there are multiple files you want to read.
|
||||
- Avoid tiny repeated slices (30 line chunks). If you need more context, read a larger window.
|
||||
- This tool can read image files and PDFs and return them as file attachments.
|
||||
|
||||
@@ -4,14 +4,9 @@ export function lazy<T>(fn: () => T) {
|
||||
|
||||
const result = (): T => {
|
||||
if (loaded) return value as T
|
||||
try {
|
||||
value = fn()
|
||||
loaded = true
|
||||
return value as T
|
||||
} catch (e) {
|
||||
// Don't mark as loaded if initialization failed
|
||||
throw e
|
||||
}
|
||||
loaded = true
|
||||
value = fn()
|
||||
return value as T
|
||||
}
|
||||
|
||||
result.reset = () => {
|
||||
|
||||
@@ -7,8 +7,7 @@ import { Global } from "../global"
|
||||
import { Instance } from "../project/instance"
|
||||
import { InstanceBootstrap } from "../project/bootstrap"
|
||||
import { Project } from "../project/project"
|
||||
import { Database, eq } from "../storage/db"
|
||||
import { ProjectTable } from "../project/project.sql"
|
||||
import { Storage } from "../storage/storage"
|
||||
import { fn } from "../util/fn"
|
||||
import { Log } from "../util/log"
|
||||
import { BusEvent } from "@/bus/bus-event"
|
||||
@@ -308,8 +307,7 @@ export namespace Worktree {
|
||||
}
|
||||
|
||||
async function runStartScripts(directory: string, input: { projectID: string; extra?: string }) {
|
||||
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, input.projectID)).get())
|
||||
const project = row ? Project.fromRow(row) : undefined
|
||||
const project = await Storage.read<Project.Info>(["project", input.projectID]).catch(() => undefined)
|
||||
const startup = project?.commands?.start?.trim() ?? ""
|
||||
const ok = await runStartScript(directory, startup, "project")
|
||||
if (!ok) return false
|
||||
|
||||
@@ -122,20 +122,12 @@ function createFakeAgent() {
|
||||
messages: async () => {
|
||||
return { data: [] }
|
||||
},
|
||||
message: async (params?: any) => {
|
||||
// Return a message with parts that can be looked up by partID
|
||||
message: async () => {
|
||||
return {
|
||||
data: {
|
||||
info: {
|
||||
role: "assistant",
|
||||
},
|
||||
parts: [
|
||||
{
|
||||
id: params?.messageID ? `${params.messageID}_part` : "part_1",
|
||||
type: "text",
|
||||
text: "",
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
},
|
||||
@@ -201,7 +193,7 @@ function createFakeAgent() {
|
||||
}
|
||||
|
||||
describe("acp.agent event subscription", () => {
|
||||
test("routes message.part.delta by the event sessionID (no cross-session pollution)", async () => {
|
||||
test("routes message.part.updated by the event sessionID (no cross-session pollution)", async () => {
|
||||
await using tmp = await tmpdir()
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
@@ -215,12 +207,14 @@ describe("acp.agent event subscription", () => {
|
||||
controller.push({
|
||||
directory: cwd,
|
||||
payload: {
|
||||
type: "message.part.delta",
|
||||
type: "message.part.updated",
|
||||
properties: {
|
||||
sessionID: sessionB,
|
||||
messageID: "msg_1",
|
||||
partID: "msg_1_part",
|
||||
field: "text",
|
||||
part: {
|
||||
sessionID: sessionB,
|
||||
messageID: "msg_1",
|
||||
type: "text",
|
||||
synthetic: false,
|
||||
},
|
||||
delta: "hello",
|
||||
},
|
||||
},
|
||||
@@ -236,7 +230,7 @@ describe("acp.agent event subscription", () => {
|
||||
})
|
||||
})
|
||||
|
||||
test("keeps concurrent sessions isolated when message.part.delta events are interleaved", async () => {
|
||||
test("keeps concurrent sessions isolated when message.part.updated events are interleaved", async () => {
|
||||
await using tmp = await tmpdir()
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
@@ -254,12 +248,14 @@ describe("acp.agent event subscription", () => {
|
||||
controller.push({
|
||||
directory: cwd,
|
||||
payload: {
|
||||
type: "message.part.delta",
|
||||
type: "message.part.updated",
|
||||
properties: {
|
||||
sessionID: sessionId,
|
||||
messageID,
|
||||
partID: `${messageID}_part`,
|
||||
field: "text",
|
||||
part: {
|
||||
sessionID: sessionId,
|
||||
messageID,
|
||||
type: "text",
|
||||
synthetic: false,
|
||||
},
|
||||
delta,
|
||||
},
|
||||
},
|
||||
@@ -406,12 +402,14 @@ describe("acp.agent event subscription", () => {
|
||||
controller.push({
|
||||
directory: cwd,
|
||||
payload: {
|
||||
type: "message.part.delta",
|
||||
type: "message.part.updated",
|
||||
properties: {
|
||||
sessionID: sessionB,
|
||||
messageID: "msg_b",
|
||||
partID: "msg_b_part",
|
||||
field: "text",
|
||||
part: {
|
||||
sessionID: sessionB,
|
||||
messageID: "msg_b",
|
||||
type: "text",
|
||||
synthetic: false,
|
||||
},
|
||||
delta: "session_b_message",
|
||||
},
|
||||
},
|
||||
|
||||
@@ -2,6 +2,7 @@ import { test, expect } from "bun:test"
|
||||
import os from "os"
|
||||
import { PermissionNext } from "../../src/permission/next"
|
||||
import { Instance } from "../../src/project/instance"
|
||||
import { Storage } from "../../src/storage/storage"
|
||||
import { tmpdir } from "../fixture/fixture"
|
||||
|
||||
// fromConfig tests
|
||||
|
||||
@@ -1,70 +1,63 @@
|
||||
// IMPORTANT: Set env vars BEFORE any imports from src/ directory
|
||||
// xdg-basedir reads env vars at import time, so we must set these first
|
||||
import os from "os";
|
||||
import path from "path";
|
||||
import fs from "fs/promises";
|
||||
import fsSync from "fs";
|
||||
import { afterAll } from "bun:test";
|
||||
import os from "os"
|
||||
import path from "path"
|
||||
import fs from "fs/promises"
|
||||
import fsSync from "fs"
|
||||
import { afterAll } from "bun:test"
|
||||
|
||||
// Set XDG env vars FIRST, before any src/ imports
|
||||
const dir = path.join(os.tmpdir(), "opencode-test-data-" + process.pid);
|
||||
await fs.mkdir(dir, { recursive: true });
|
||||
const dir = path.join(os.tmpdir(), "opencode-test-data-" + process.pid)
|
||||
await fs.mkdir(dir, { recursive: true })
|
||||
afterAll(() => {
|
||||
fsSync.rmSync(dir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
process.env["XDG_DATA_HOME"] = path.join(dir, "share");
|
||||
process.env["XDG_CACHE_HOME"] = path.join(dir, "cache");
|
||||
process.env["XDG_CONFIG_HOME"] = path.join(dir, "config");
|
||||
process.env["XDG_STATE_HOME"] = path.join(dir, "state");
|
||||
process.env["OPENCODE_MODELS_PATH"] = path.join(
|
||||
import.meta.dir,
|
||||
"tool",
|
||||
"fixtures",
|
||||
"models-api.json",
|
||||
);
|
||||
|
||||
fsSync.rmSync(dir, { recursive: true, force: true })
|
||||
})
|
||||
// Set test home directory to isolate tests from user's actual home directory
|
||||
// This prevents tests from picking up real user configs/skills from ~/.claude/skills
|
||||
const testHome = path.join(dir, "home");
|
||||
await fs.mkdir(testHome, { recursive: true });
|
||||
process.env["OPENCODE_TEST_HOME"] = testHome;
|
||||
const testHome = path.join(dir, "home")
|
||||
await fs.mkdir(testHome, { recursive: true })
|
||||
process.env["OPENCODE_TEST_HOME"] = testHome
|
||||
|
||||
// Set test managed config directory to isolate tests from system managed settings
|
||||
const testManagedConfigDir = path.join(dir, "managed");
|
||||
process.env["OPENCODE_TEST_MANAGED_CONFIG_DIR"] = testManagedConfigDir;
|
||||
const testManagedConfigDir = path.join(dir, "managed")
|
||||
process.env["OPENCODE_TEST_MANAGED_CONFIG_DIR"] = testManagedConfigDir
|
||||
|
||||
process.env["XDG_DATA_HOME"] = path.join(dir, "share")
|
||||
process.env["XDG_CACHE_HOME"] = path.join(dir, "cache")
|
||||
process.env["XDG_CONFIG_HOME"] = path.join(dir, "config")
|
||||
process.env["XDG_STATE_HOME"] = path.join(dir, "state")
|
||||
process.env["OPENCODE_MODELS_PATH"] = path.join(import.meta.dir, "tool", "fixtures", "models-api.json")
|
||||
|
||||
// Write the cache version file to prevent global/index.ts from clearing the cache
|
||||
const cacheDir = path.join(dir, "cache", "opencode");
|
||||
await fs.mkdir(cacheDir, { recursive: true });
|
||||
await fs.writeFile(path.join(cacheDir, "version"), "14");
|
||||
const cacheDir = path.join(dir, "cache", "opencode")
|
||||
await fs.mkdir(cacheDir, { recursive: true })
|
||||
await fs.writeFile(path.join(cacheDir, "version"), "14")
|
||||
|
||||
// Clear provider env vars to ensure clean test state
|
||||
delete process.env["ANTHROPIC_API_KEY"];
|
||||
delete process.env["OPENAI_API_KEY"];
|
||||
delete process.env["GOOGLE_API_KEY"];
|
||||
delete process.env["GOOGLE_GENERATIVE_AI_API_KEY"];
|
||||
delete process.env["AZURE_OPENAI_API_KEY"];
|
||||
delete process.env["AWS_ACCESS_KEY_ID"];
|
||||
delete process.env["AWS_PROFILE"];
|
||||
delete process.env["AWS_REGION"];
|
||||
delete process.env["AWS_BEARER_TOKEN_BEDROCK"];
|
||||
delete process.env["OPENROUTER_API_KEY"];
|
||||
delete process.env["GROQ_API_KEY"];
|
||||
delete process.env["MISTRAL_API_KEY"];
|
||||
delete process.env["PERPLEXITY_API_KEY"];
|
||||
delete process.env["TOGETHER_API_KEY"];
|
||||
delete process.env["XAI_API_KEY"];
|
||||
delete process.env["DEEPSEEK_API_KEY"];
|
||||
delete process.env["FIREWORKS_API_KEY"];
|
||||
delete process.env["CEREBRAS_API_KEY"];
|
||||
delete process.env["SAMBANOVA_API_KEY"];
|
||||
delete process.env["ANTHROPIC_API_KEY"]
|
||||
delete process.env["OPENAI_API_KEY"]
|
||||
delete process.env["GOOGLE_API_KEY"]
|
||||
delete process.env["GOOGLE_GENERATIVE_AI_API_KEY"]
|
||||
delete process.env["AZURE_OPENAI_API_KEY"]
|
||||
delete process.env["AWS_ACCESS_KEY_ID"]
|
||||
delete process.env["AWS_PROFILE"]
|
||||
delete process.env["AWS_REGION"]
|
||||
delete process.env["AWS_BEARER_TOKEN_BEDROCK"]
|
||||
delete process.env["OPENROUTER_API_KEY"]
|
||||
delete process.env["GROQ_API_KEY"]
|
||||
delete process.env["MISTRAL_API_KEY"]
|
||||
delete process.env["PERPLEXITY_API_KEY"]
|
||||
delete process.env["TOGETHER_API_KEY"]
|
||||
delete process.env["XAI_API_KEY"]
|
||||
delete process.env["DEEPSEEK_API_KEY"]
|
||||
delete process.env["FIREWORKS_API_KEY"]
|
||||
delete process.env["CEREBRAS_API_KEY"]
|
||||
delete process.env["SAMBANOVA_API_KEY"]
|
||||
|
||||
// Now safe to import from src/
|
||||
const { Log } = await import("../src/util/log");
|
||||
const { Log } = await import("../src/util/log")
|
||||
|
||||
Log.init({
|
||||
print: false,
|
||||
dev: true,
|
||||
level: "DEBUG",
|
||||
});
|
||||
})
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { describe, expect, test } from "bun:test"
|
||||
import { Project } from "../../src/project/project"
|
||||
import { Log } from "../../src/util/log"
|
||||
import { Storage } from "../../src/storage/storage"
|
||||
import { $ } from "bun"
|
||||
import path from "path"
|
||||
import { tmpdir } from "../fixture/fixture"
|
||||
@@ -54,50 +55,37 @@ describe("Project.fromDirectory with worktrees", () => {
|
||||
test("should set worktree to root when called from a worktree", async () => {
|
||||
await using tmp = await tmpdir({ git: true })
|
||||
|
||||
const worktreePath = path.join(tmp.path, "..", path.basename(tmp.path) + "-worktree")
|
||||
try {
|
||||
await $`git worktree add ${worktreePath} -b test-branch-${Date.now()}`.cwd(tmp.path).quiet()
|
||||
const worktreePath = path.join(tmp.path, "..", "worktree-test")
|
||||
await $`git worktree add ${worktreePath} -b test-branch`.cwd(tmp.path).quiet()
|
||||
|
||||
const { project, sandbox } = await Project.fromDirectory(worktreePath)
|
||||
const { project, sandbox } = await Project.fromDirectory(worktreePath)
|
||||
|
||||
expect(project.worktree).toBe(tmp.path)
|
||||
expect(sandbox).toBe(worktreePath)
|
||||
expect(project.sandboxes).toContain(worktreePath)
|
||||
expect(project.sandboxes).not.toContain(tmp.path)
|
||||
} finally {
|
||||
await $`git worktree remove ${worktreePath}`
|
||||
.cwd(tmp.path)
|
||||
.quiet()
|
||||
.catch(() => {})
|
||||
}
|
||||
expect(project.worktree).toBe(tmp.path)
|
||||
expect(sandbox).toBe(worktreePath)
|
||||
expect(project.sandboxes).toContain(worktreePath)
|
||||
expect(project.sandboxes).not.toContain(tmp.path)
|
||||
|
||||
await $`git worktree remove ${worktreePath}`.cwd(tmp.path).quiet()
|
||||
})
|
||||
|
||||
test("should accumulate multiple worktrees in sandboxes", async () => {
|
||||
await using tmp = await tmpdir({ git: true })
|
||||
|
||||
const worktree1 = path.join(tmp.path, "..", path.basename(tmp.path) + "-wt1")
|
||||
const worktree2 = path.join(tmp.path, "..", path.basename(tmp.path) + "-wt2")
|
||||
try {
|
||||
await $`git worktree add ${worktree1} -b branch-${Date.now()}`.cwd(tmp.path).quiet()
|
||||
await $`git worktree add ${worktree2} -b branch-${Date.now() + 1}`.cwd(tmp.path).quiet()
|
||||
const worktree1 = path.join(tmp.path, "..", "worktree-1")
|
||||
const worktree2 = path.join(tmp.path, "..", "worktree-2")
|
||||
await $`git worktree add ${worktree1} -b branch-1`.cwd(tmp.path).quiet()
|
||||
await $`git worktree add ${worktree2} -b branch-2`.cwd(tmp.path).quiet()
|
||||
|
||||
await Project.fromDirectory(worktree1)
|
||||
const { project } = await Project.fromDirectory(worktree2)
|
||||
await Project.fromDirectory(worktree1)
|
||||
const { project } = await Project.fromDirectory(worktree2)
|
||||
|
||||
expect(project.worktree).toBe(tmp.path)
|
||||
expect(project.sandboxes).toContain(worktree1)
|
||||
expect(project.sandboxes).toContain(worktree2)
|
||||
expect(project.sandboxes).not.toContain(tmp.path)
|
||||
} finally {
|
||||
await $`git worktree remove ${worktree1}`
|
||||
.cwd(tmp.path)
|
||||
.quiet()
|
||||
.catch(() => {})
|
||||
await $`git worktree remove ${worktree2}`
|
||||
.cwd(tmp.path)
|
||||
.quiet()
|
||||
.catch(() => {})
|
||||
}
|
||||
expect(project.worktree).toBe(tmp.path)
|
||||
expect(project.sandboxes).toContain(worktree1)
|
||||
expect(project.sandboxes).toContain(worktree2)
|
||||
expect(project.sandboxes).not.toContain(tmp.path)
|
||||
|
||||
await $`git worktree remove ${worktree1}`.cwd(tmp.path).quiet()
|
||||
await $`git worktree remove ${worktree2}`.cwd(tmp.path).quiet()
|
||||
})
|
||||
})
|
||||
|
||||
@@ -111,12 +99,11 @@ describe("Project.discover", () => {
|
||||
|
||||
await Project.discover(project)
|
||||
|
||||
const updated = Project.get(project.id)
|
||||
expect(updated).toBeDefined()
|
||||
expect(updated!.icon).toBeDefined()
|
||||
expect(updated!.icon?.url).toStartWith("data:")
|
||||
expect(updated!.icon?.url).toContain("base64")
|
||||
expect(updated!.icon?.color).toBeUndefined()
|
||||
const updated = await Storage.read<Project.Info>(["project", project.id])
|
||||
expect(updated.icon).toBeDefined()
|
||||
expect(updated.icon?.url).toStartWith("data:")
|
||||
expect(updated.icon?.url).toContain("base64")
|
||||
expect(updated.icon?.color).toBeUndefined()
|
||||
})
|
||||
|
||||
test("should not discover non-image files", async () => {
|
||||
@@ -127,8 +114,7 @@ describe("Project.discover", () => {
|
||||
|
||||
await Project.discover(project)
|
||||
|
||||
const updated = Project.get(project.id)
|
||||
expect(updated).toBeDefined()
|
||||
expect(updated!.icon).toBeUndefined()
|
||||
const updated = await Storage.read<Project.Info>(["project", project.id])
|
||||
expect(updated.icon).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,687 +0,0 @@
|
||||
import { describe, test, expect, beforeEach, afterEach } from "bun:test"
|
||||
import { Database } from "bun:sqlite"
|
||||
import { drizzle } from "drizzle-orm/bun-sqlite"
|
||||
import { migrate } from "drizzle-orm/bun-sqlite/migrator"
|
||||
import path from "path"
|
||||
import fs from "fs/promises"
|
||||
import { readFileSync, readdirSync } from "fs"
|
||||
import { JsonMigration } from "../../src/storage/json-migration"
|
||||
import { Global } from "../../src/global"
|
||||
import { ProjectTable } from "../../src/project/project.sql"
|
||||
import { SessionTable, MessageTable, PartTable, TodoTable, PermissionTable } from "../../src/session/session.sql"
|
||||
import { SessionShareTable } from "../../src/share/share.sql"
|
||||
|
||||
// Test fixtures
|
||||
const fixtures = {
|
||||
project: {
|
||||
id: "proj_test123abc",
|
||||
name: "Test Project",
|
||||
worktree: "/test/path",
|
||||
vcs: "git" as const,
|
||||
sandboxes: [],
|
||||
},
|
||||
session: {
|
||||
id: "ses_test456def",
|
||||
projectID: "proj_test123abc",
|
||||
slug: "test-session",
|
||||
directory: "/test/path",
|
||||
title: "Test Session",
|
||||
version: "1.0.0",
|
||||
time: { created: 1700000000000, updated: 1700000001000 },
|
||||
},
|
||||
message: {
|
||||
id: "msg_test789ghi",
|
||||
sessionID: "ses_test456def",
|
||||
role: "user" as const,
|
||||
agent: "default",
|
||||
model: { providerID: "openai", modelID: "gpt-4" },
|
||||
time: { created: 1700000000000 },
|
||||
},
|
||||
part: {
|
||||
id: "prt_testabc123",
|
||||
messageID: "msg_test789ghi",
|
||||
sessionID: "ses_test456def",
|
||||
type: "text" as const,
|
||||
text: "Hello, world!",
|
||||
},
|
||||
}
|
||||
|
||||
// Helper to create test storage directory structure
|
||||
async function setupStorageDir() {
|
||||
const storageDir = path.join(Global.Path.data, "storage")
|
||||
await fs.rm(storageDir, { recursive: true, force: true })
|
||||
await fs.mkdir(path.join(storageDir, "project"), { recursive: true })
|
||||
await fs.mkdir(path.join(storageDir, "session", "proj_test123abc"), { recursive: true })
|
||||
await fs.mkdir(path.join(storageDir, "message", "ses_test456def"), { recursive: true })
|
||||
await fs.mkdir(path.join(storageDir, "part", "msg_test789ghi"), { recursive: true })
|
||||
await fs.mkdir(path.join(storageDir, "session_diff"), { recursive: true })
|
||||
await fs.mkdir(path.join(storageDir, "todo"), { recursive: true })
|
||||
await fs.mkdir(path.join(storageDir, "permission"), { recursive: true })
|
||||
await fs.mkdir(path.join(storageDir, "session_share"), { recursive: true })
|
||||
// Create legacy marker to indicate JSON storage exists
|
||||
await Bun.write(path.join(storageDir, "migration"), "1")
|
||||
return storageDir
|
||||
}
|
||||
|
||||
async function writeProject(storageDir: string, project: Record<string, unknown>) {
|
||||
await Bun.write(path.join(storageDir, "project", `${project.id}.json`), JSON.stringify(project))
|
||||
}
|
||||
|
||||
async function writeSession(storageDir: string, projectID: string, session: Record<string, unknown>) {
|
||||
await Bun.write(path.join(storageDir, "session", projectID, `${session.id}.json`), JSON.stringify(session))
|
||||
}
|
||||
|
||||
// Helper to create in-memory test database with schema
|
||||
function createTestDb() {
|
||||
const sqlite = new Database(":memory:")
|
||||
sqlite.exec("PRAGMA foreign_keys = ON")
|
||||
|
||||
// Apply schema migrations using drizzle migrate
|
||||
const dir = path.join(import.meta.dirname, "../../migration")
|
||||
const entries = readdirSync(dir, { withFileTypes: true })
|
||||
const migrations = entries
|
||||
.filter((entry) => entry.isDirectory())
|
||||
.map((entry) => ({
|
||||
sql: readFileSync(path.join(dir, entry.name, "migration.sql"), "utf-8"),
|
||||
timestamp: Number(entry.name.split("_")[0]),
|
||||
}))
|
||||
.sort((a, b) => a.timestamp - b.timestamp)
|
||||
migrate(drizzle({ client: sqlite }), migrations)
|
||||
|
||||
return sqlite
|
||||
}
|
||||
|
||||
describe("JSON to SQLite migration", () => {
|
||||
let storageDir: string
|
||||
let sqlite: Database
|
||||
|
||||
beforeEach(async () => {
|
||||
storageDir = await setupStorageDir()
|
||||
sqlite = createTestDb()
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
sqlite.close()
|
||||
await fs.rm(storageDir, { recursive: true, force: true })
|
||||
})
|
||||
|
||||
test("migrates project", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_test123abc",
|
||||
worktree: "/test/path",
|
||||
vcs: "git",
|
||||
name: "Test Project",
|
||||
time: { created: 1700000000000, updated: 1700000001000 },
|
||||
sandboxes: ["/test/sandbox"],
|
||||
})
|
||||
|
||||
const stats = await JsonMigration.run(sqlite)
|
||||
|
||||
expect(stats?.projects).toBe(1)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
const projects = db.select().from(ProjectTable).all()
|
||||
expect(projects.length).toBe(1)
|
||||
expect(projects[0].id).toBe("proj_test123abc")
|
||||
expect(projects[0].worktree).toBe("/test/path")
|
||||
expect(projects[0].name).toBe("Test Project")
|
||||
expect(projects[0].sandboxes).toEqual(["/test/sandbox"])
|
||||
})
|
||||
|
||||
test("migrates project with commands", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_with_commands",
|
||||
worktree: "/test/path",
|
||||
vcs: "git",
|
||||
name: "Project With Commands",
|
||||
time: { created: 1700000000000, updated: 1700000001000 },
|
||||
sandboxes: ["/test/sandbox"],
|
||||
commands: { start: "npm run dev" },
|
||||
})
|
||||
|
||||
const stats = await JsonMigration.run(sqlite)
|
||||
|
||||
expect(stats?.projects).toBe(1)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
const projects = db.select().from(ProjectTable).all()
|
||||
expect(projects.length).toBe(1)
|
||||
expect(projects[0].id).toBe("proj_with_commands")
|
||||
expect(projects[0].commands).toEqual({ start: "npm run dev" })
|
||||
})
|
||||
|
||||
test("migrates project without commands field", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_no_commands",
|
||||
worktree: "/test/path",
|
||||
vcs: "git",
|
||||
name: "Project Without Commands",
|
||||
time: { created: 1700000000000, updated: 1700000001000 },
|
||||
sandboxes: [],
|
||||
})
|
||||
|
||||
const stats = await JsonMigration.run(sqlite)
|
||||
|
||||
expect(stats?.projects).toBe(1)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
const projects = db.select().from(ProjectTable).all()
|
||||
expect(projects.length).toBe(1)
|
||||
expect(projects[0].id).toBe("proj_no_commands")
|
||||
expect(projects[0].commands).toBeNull()
|
||||
})
|
||||
|
||||
test("migrates session with individual columns", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_test123abc",
|
||||
worktree: "/test/path",
|
||||
time: { created: Date.now(), updated: Date.now() },
|
||||
sandboxes: [],
|
||||
})
|
||||
|
||||
await writeSession(storageDir, "proj_test123abc", {
|
||||
id: "ses_test456def",
|
||||
projectID: "proj_test123abc",
|
||||
slug: "test-session",
|
||||
directory: "/test/dir",
|
||||
title: "Test Session Title",
|
||||
version: "1.0.0",
|
||||
time: { created: 1700000000000, updated: 1700000001000 },
|
||||
summary: { additions: 10, deletions: 5, files: 3 },
|
||||
share: { url: "https://example.com/share" },
|
||||
})
|
||||
|
||||
await JsonMigration.run(sqlite)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
const sessions = db.select().from(SessionTable).all()
|
||||
expect(sessions.length).toBe(1)
|
||||
expect(sessions[0].id).toBe("ses_test456def")
|
||||
expect(sessions[0].project_id).toBe("proj_test123abc")
|
||||
expect(sessions[0].slug).toBe("test-session")
|
||||
expect(sessions[0].title).toBe("Test Session Title")
|
||||
expect(sessions[0].summary_additions).toBe(10)
|
||||
expect(sessions[0].summary_deletions).toBe(5)
|
||||
expect(sessions[0].share_url).toBe("https://example.com/share")
|
||||
})
|
||||
|
||||
test("migrates messages and parts", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_test123abc",
|
||||
worktree: "/",
|
||||
time: { created: Date.now(), updated: Date.now() },
|
||||
sandboxes: [],
|
||||
})
|
||||
await writeSession(storageDir, "proj_test123abc", { ...fixtures.session })
|
||||
await Bun.write(
|
||||
path.join(storageDir, "message", "ses_test456def", "msg_test789ghi.json"),
|
||||
JSON.stringify({ ...fixtures.message }),
|
||||
)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "part", "msg_test789ghi", "prt_testabc123.json"),
|
||||
JSON.stringify({ ...fixtures.part }),
|
||||
)
|
||||
|
||||
const stats = await JsonMigration.run(sqlite)
|
||||
|
||||
expect(stats?.messages).toBe(1)
|
||||
expect(stats?.parts).toBe(1)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
const messages = db.select().from(MessageTable).all()
|
||||
expect(messages.length).toBe(1)
|
||||
expect(messages[0].id).toBe("msg_test789ghi")
|
||||
|
||||
const parts = db.select().from(PartTable).all()
|
||||
expect(parts.length).toBe(1)
|
||||
expect(parts[0].id).toBe("prt_testabc123")
|
||||
})
|
||||
|
||||
test("migrates legacy parts without ids in body", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_test123abc",
|
||||
worktree: "/",
|
||||
time: { created: Date.now(), updated: Date.now() },
|
||||
sandboxes: [],
|
||||
})
|
||||
await writeSession(storageDir, "proj_test123abc", { ...fixtures.session })
|
||||
await Bun.write(
|
||||
path.join(storageDir, "message", "ses_test456def", "msg_test789ghi.json"),
|
||||
JSON.stringify({
|
||||
role: "user",
|
||||
agent: "default",
|
||||
model: { providerID: "openai", modelID: "gpt-4" },
|
||||
time: { created: 1700000000000 },
|
||||
}),
|
||||
)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "part", "msg_test789ghi", "prt_testabc123.json"),
|
||||
JSON.stringify({
|
||||
type: "text",
|
||||
text: "Hello, world!",
|
||||
}),
|
||||
)
|
||||
|
||||
const stats = await JsonMigration.run(sqlite)
|
||||
|
||||
expect(stats?.messages).toBe(1)
|
||||
expect(stats?.parts).toBe(1)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
const messages = db.select().from(MessageTable).all()
|
||||
expect(messages.length).toBe(1)
|
||||
expect(messages[0].id).toBe("msg_test789ghi")
|
||||
expect(messages[0].session_id).toBe("ses_test456def")
|
||||
expect(messages[0].data).not.toHaveProperty("id")
|
||||
expect(messages[0].data).not.toHaveProperty("sessionID")
|
||||
|
||||
const parts = db.select().from(PartTable).all()
|
||||
expect(parts.length).toBe(1)
|
||||
expect(parts[0].id).toBe("prt_testabc123")
|
||||
expect(parts[0].message_id).toBe("msg_test789ghi")
|
||||
expect(parts[0].session_id).toBe("ses_test456def")
|
||||
expect(parts[0].data).not.toHaveProperty("id")
|
||||
expect(parts[0].data).not.toHaveProperty("messageID")
|
||||
expect(parts[0].data).not.toHaveProperty("sessionID")
|
||||
})
|
||||
|
||||
test("skips orphaned sessions (no parent project)", async () => {
|
||||
await Bun.write(
|
||||
path.join(storageDir, "session", "proj_test123abc", "ses_orphan.json"),
|
||||
JSON.stringify({
|
||||
id: "ses_orphan",
|
||||
projectID: "proj_nonexistent",
|
||||
slug: "orphan",
|
||||
directory: "/",
|
||||
title: "Orphan",
|
||||
version: "1.0.0",
|
||||
time: { created: Date.now(), updated: Date.now() },
|
||||
}),
|
||||
)
|
||||
|
||||
const stats = await JsonMigration.run(sqlite)
|
||||
|
||||
expect(stats?.sessions).toBe(0)
|
||||
})
|
||||
|
||||
test("is idempotent (running twice doesn't duplicate)", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_test123abc",
|
||||
worktree: "/",
|
||||
time: { created: Date.now(), updated: Date.now() },
|
||||
sandboxes: [],
|
||||
})
|
||||
|
||||
await JsonMigration.run(sqlite)
|
||||
await JsonMigration.run(sqlite)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
const projects = db.select().from(ProjectTable).all()
|
||||
expect(projects.length).toBe(1) // Still only 1 due to onConflictDoNothing
|
||||
})
|
||||
|
||||
test("migrates todos", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_test123abc",
|
||||
worktree: "/",
|
||||
time: { created: Date.now(), updated: Date.now() },
|
||||
sandboxes: [],
|
||||
})
|
||||
await writeSession(storageDir, "proj_test123abc", { ...fixtures.session })
|
||||
|
||||
// Create todo file (named by sessionID, contains array of todos)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "todo", "ses_test456def.json"),
|
||||
JSON.stringify([
|
||||
{
|
||||
id: "todo_1",
|
||||
content: "First todo",
|
||||
status: "pending",
|
||||
priority: "high",
|
||||
},
|
||||
{
|
||||
id: "todo_2",
|
||||
content: "Second todo",
|
||||
status: "completed",
|
||||
priority: "medium",
|
||||
},
|
||||
]),
|
||||
)
|
||||
|
||||
const stats = await JsonMigration.run(sqlite)
|
||||
|
||||
expect(stats?.todos).toBe(2)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
const todos = db.select().from(TodoTable).orderBy(TodoTable.position).all()
|
||||
expect(todos.length).toBe(2)
|
||||
expect(todos[0].content).toBe("First todo")
|
||||
expect(todos[0].status).toBe("pending")
|
||||
expect(todos[0].priority).toBe("high")
|
||||
expect(todos[0].position).toBe(0)
|
||||
expect(todos[1].content).toBe("Second todo")
|
||||
expect(todos[1].position).toBe(1)
|
||||
})
|
||||
|
||||
test("todos are ordered by position", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_test123abc",
|
||||
worktree: "/",
|
||||
time: { created: Date.now(), updated: Date.now() },
|
||||
sandboxes: [],
|
||||
})
|
||||
await writeSession(storageDir, "proj_test123abc", { ...fixtures.session })
|
||||
|
||||
await Bun.write(
|
||||
path.join(storageDir, "todo", "ses_test456def.json"),
|
||||
JSON.stringify([
|
||||
{ content: "Third", status: "pending", priority: "low" },
|
||||
{ content: "First", status: "pending", priority: "high" },
|
||||
{ content: "Second", status: "in_progress", priority: "medium" },
|
||||
]),
|
||||
)
|
||||
|
||||
await JsonMigration.run(sqlite)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
const todos = db.select().from(TodoTable).orderBy(TodoTable.position).all()
|
||||
|
||||
expect(todos.length).toBe(3)
|
||||
expect(todos[0].content).toBe("Third")
|
||||
expect(todos[0].position).toBe(0)
|
||||
expect(todos[1].content).toBe("First")
|
||||
expect(todos[1].position).toBe(1)
|
||||
expect(todos[2].content).toBe("Second")
|
||||
expect(todos[2].position).toBe(2)
|
||||
})
|
||||
|
||||
test("migrates permissions", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_test123abc",
|
||||
worktree: "/",
|
||||
time: { created: Date.now(), updated: Date.now() },
|
||||
sandboxes: [],
|
||||
})
|
||||
|
||||
// Create permission file (named by projectID, contains array of rules)
|
||||
const permissionData = [
|
||||
{ permission: "file.read", pattern: "/test/file1.ts", action: "allow" as const },
|
||||
{ permission: "file.write", pattern: "/test/file2.ts", action: "ask" as const },
|
||||
{ permission: "command.run", pattern: "npm install", action: "deny" as const },
|
||||
]
|
||||
await Bun.write(path.join(storageDir, "permission", "proj_test123abc.json"), JSON.stringify(permissionData))
|
||||
|
||||
const stats = await JsonMigration.run(sqlite)
|
||||
|
||||
expect(stats?.permissions).toBe(1)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
const permissions = db.select().from(PermissionTable).all()
|
||||
expect(permissions.length).toBe(1)
|
||||
expect(permissions[0].project_id).toBe("proj_test123abc")
|
||||
expect(permissions[0].data).toEqual(permissionData)
|
||||
})
|
||||
|
||||
test("migrates session shares", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_test123abc",
|
||||
worktree: "/",
|
||||
time: { created: Date.now(), updated: Date.now() },
|
||||
sandboxes: [],
|
||||
})
|
||||
await writeSession(storageDir, "proj_test123abc", { ...fixtures.session })
|
||||
|
||||
// Create session share file (named by sessionID)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "session_share", "ses_test456def.json"),
|
||||
JSON.stringify({
|
||||
id: "share_123",
|
||||
secret: "supersecretkey",
|
||||
url: "https://share.example.com/ses_test456def",
|
||||
}),
|
||||
)
|
||||
|
||||
const stats = await JsonMigration.run(sqlite)
|
||||
|
||||
expect(stats?.shares).toBe(1)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
const shares = db.select().from(SessionShareTable).all()
|
||||
expect(shares.length).toBe(1)
|
||||
expect(shares[0].session_id).toBe("ses_test456def")
|
||||
expect(shares[0].id).toBe("share_123")
|
||||
expect(shares[0].secret).toBe("supersecretkey")
|
||||
expect(shares[0].url).toBe("https://share.example.com/ses_test456def")
|
||||
})
|
||||
|
||||
test("returns empty stats when storage directory does not exist", async () => {
|
||||
await fs.rm(storageDir, { recursive: true, force: true })
|
||||
|
||||
const stats = await JsonMigration.run(sqlite)
|
||||
|
||||
expect(stats.projects).toBe(0)
|
||||
expect(stats.sessions).toBe(0)
|
||||
expect(stats.messages).toBe(0)
|
||||
expect(stats.parts).toBe(0)
|
||||
expect(stats.todos).toBe(0)
|
||||
expect(stats.permissions).toBe(0)
|
||||
expect(stats.shares).toBe(0)
|
||||
expect(stats.errors).toEqual([])
|
||||
})
|
||||
|
||||
test("continues when a JSON file is unreadable and records an error", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_test123abc",
|
||||
worktree: "/",
|
||||
time: { created: Date.now(), updated: Date.now() },
|
||||
sandboxes: [],
|
||||
})
|
||||
await Bun.write(path.join(storageDir, "project", "broken.json"), "{ invalid json")
|
||||
|
||||
const stats = await JsonMigration.run(sqlite)
|
||||
|
||||
expect(stats.projects).toBe(1)
|
||||
expect(stats.errors.some((x) => x.includes("failed to read") && x.includes("broken.json"))).toBe(true)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
const projects = db.select().from(ProjectTable).all()
|
||||
expect(projects.length).toBe(1)
|
||||
expect(projects[0].id).toBe("proj_test123abc")
|
||||
})
|
||||
|
||||
test("skips invalid todo entries while preserving source positions", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_test123abc",
|
||||
worktree: "/",
|
||||
time: { created: Date.now(), updated: Date.now() },
|
||||
sandboxes: [],
|
||||
})
|
||||
await writeSession(storageDir, "proj_test123abc", { ...fixtures.session })
|
||||
|
||||
await Bun.write(
|
||||
path.join(storageDir, "todo", "ses_test456def.json"),
|
||||
JSON.stringify([
|
||||
{ content: "keep-0", status: "pending", priority: "high" },
|
||||
{ content: "drop-1", priority: "low" },
|
||||
{ content: "keep-2", status: "completed", priority: "medium" },
|
||||
]),
|
||||
)
|
||||
|
||||
const stats = await JsonMigration.run(sqlite)
|
||||
expect(stats.todos).toBe(2)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
const todos = db.select().from(TodoTable).orderBy(TodoTable.position).all()
|
||||
expect(todos.length).toBe(2)
|
||||
expect(todos[0].content).toBe("keep-0")
|
||||
expect(todos[0].position).toBe(0)
|
||||
expect(todos[1].content).toBe("keep-2")
|
||||
expect(todos[1].position).toBe(2)
|
||||
})
|
||||
|
||||
test("skips orphaned todos, permissions, and shares", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_test123abc",
|
||||
worktree: "/",
|
||||
time: { created: Date.now(), updated: Date.now() },
|
||||
sandboxes: [],
|
||||
})
|
||||
await writeSession(storageDir, "proj_test123abc", { ...fixtures.session })
|
||||
|
||||
await Bun.write(
|
||||
path.join(storageDir, "todo", "ses_test456def.json"),
|
||||
JSON.stringify([{ content: "valid", status: "pending", priority: "high" }]),
|
||||
)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "todo", "ses_missing.json"),
|
||||
JSON.stringify([{ content: "orphan", status: "pending", priority: "high" }]),
|
||||
)
|
||||
|
||||
await Bun.write(
|
||||
path.join(storageDir, "permission", "proj_test123abc.json"),
|
||||
JSON.stringify([{ permission: "file.read" }]),
|
||||
)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "permission", "proj_missing.json"),
|
||||
JSON.stringify([{ permission: "file.write" }]),
|
||||
)
|
||||
|
||||
await Bun.write(
|
||||
path.join(storageDir, "session_share", "ses_test456def.json"),
|
||||
JSON.stringify({ id: "share_ok", secret: "secret", url: "https://ok.example.com" }),
|
||||
)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "session_share", "ses_missing.json"),
|
||||
JSON.stringify({ id: "share_missing", secret: "secret", url: "https://missing.example.com" }),
|
||||
)
|
||||
|
||||
const stats = await JsonMigration.run(sqlite)
|
||||
|
||||
expect(stats.todos).toBe(1)
|
||||
expect(stats.permissions).toBe(1)
|
||||
expect(stats.shares).toBe(1)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
expect(db.select().from(TodoTable).all().length).toBe(1)
|
||||
expect(db.select().from(PermissionTable).all().length).toBe(1)
|
||||
expect(db.select().from(SessionShareTable).all().length).toBe(1)
|
||||
})
|
||||
|
||||
test("handles mixed corruption and partial validity in one migration run", async () => {
|
||||
await writeProject(storageDir, {
|
||||
id: "proj_test123abc",
|
||||
worktree: "/ok",
|
||||
time: { created: 1700000000000, updated: 1700000001000 },
|
||||
sandboxes: [],
|
||||
})
|
||||
await Bun.write(
|
||||
path.join(storageDir, "project", "proj_missing_id.json"),
|
||||
JSON.stringify({ worktree: "/bad", sandboxes: [] }),
|
||||
)
|
||||
await Bun.write(path.join(storageDir, "project", "proj_broken.json"), "{ nope")
|
||||
|
||||
await writeSession(storageDir, "proj_test123abc", {
|
||||
id: "ses_test456def",
|
||||
projectID: "proj_test123abc",
|
||||
slug: "ok",
|
||||
directory: "/ok",
|
||||
title: "Ok",
|
||||
version: "1",
|
||||
time: { created: 1700000000000, updated: 1700000001000 },
|
||||
})
|
||||
await Bun.write(
|
||||
path.join(storageDir, "session", "proj_test123abc", "ses_missing_project.json"),
|
||||
JSON.stringify({
|
||||
id: "ses_missing_project",
|
||||
slug: "bad",
|
||||
directory: "/bad",
|
||||
title: "Bad",
|
||||
version: "1",
|
||||
}),
|
||||
)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "session", "proj_test123abc", "ses_orphan.json"),
|
||||
JSON.stringify({
|
||||
id: "ses_orphan",
|
||||
projectID: "proj_missing",
|
||||
slug: "orphan",
|
||||
directory: "/bad",
|
||||
title: "Orphan",
|
||||
version: "1",
|
||||
}),
|
||||
)
|
||||
|
||||
await Bun.write(
|
||||
path.join(storageDir, "message", "ses_test456def", "msg_ok.json"),
|
||||
JSON.stringify({ role: "user", time: { created: 1700000000000 } }),
|
||||
)
|
||||
await Bun.write(path.join(storageDir, "message", "ses_test456def", "msg_broken.json"), "{ nope")
|
||||
await Bun.write(
|
||||
path.join(storageDir, "message", "ses_missing", "msg_orphan.json"),
|
||||
JSON.stringify({ role: "user", time: { created: 1700000000000 } }),
|
||||
)
|
||||
|
||||
await Bun.write(
|
||||
path.join(storageDir, "part", "msg_ok", "part_ok.json"),
|
||||
JSON.stringify({ type: "text", text: "ok" }),
|
||||
)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "part", "msg_missing", "part_missing_message.json"),
|
||||
JSON.stringify({ type: "text", text: "bad" }),
|
||||
)
|
||||
await Bun.write(path.join(storageDir, "part", "msg_ok", "part_broken.json"), "{ nope")
|
||||
|
||||
await Bun.write(
|
||||
path.join(storageDir, "todo", "ses_test456def.json"),
|
||||
JSON.stringify([
|
||||
{ content: "ok", status: "pending", priority: "high" },
|
||||
{ content: "skip", status: "pending" },
|
||||
]),
|
||||
)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "todo", "ses_missing.json"),
|
||||
JSON.stringify([{ content: "orphan", status: "pending", priority: "high" }]),
|
||||
)
|
||||
await Bun.write(path.join(storageDir, "todo", "ses_broken.json"), "{ nope")
|
||||
|
||||
await Bun.write(
|
||||
path.join(storageDir, "permission", "proj_test123abc.json"),
|
||||
JSON.stringify([{ permission: "file.read" }]),
|
||||
)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "permission", "proj_missing.json"),
|
||||
JSON.stringify([{ permission: "file.write" }]),
|
||||
)
|
||||
await Bun.write(path.join(storageDir, "permission", "proj_broken.json"), "{ nope")
|
||||
|
||||
await Bun.write(
|
||||
path.join(storageDir, "session_share", "ses_test456def.json"),
|
||||
JSON.stringify({ id: "share_ok", secret: "secret", url: "https://ok.example.com" }),
|
||||
)
|
||||
await Bun.write(
|
||||
path.join(storageDir, "session_share", "ses_missing.json"),
|
||||
JSON.stringify({ id: "share_orphan", secret: "secret", url: "https://missing.example.com" }),
|
||||
)
|
||||
await Bun.write(path.join(storageDir, "session_share", "ses_broken.json"), "{ nope")
|
||||
|
||||
const stats = await JsonMigration.run(sqlite)
|
||||
|
||||
expect(stats.projects).toBe(1)
|
||||
expect(stats.sessions).toBe(1)
|
||||
expect(stats.messages).toBe(1)
|
||||
expect(stats.parts).toBe(1)
|
||||
expect(stats.todos).toBe(1)
|
||||
expect(stats.permissions).toBe(1)
|
||||
expect(stats.shares).toBe(1)
|
||||
expect(stats.errors.length).toBeGreaterThanOrEqual(6)
|
||||
|
||||
const db = drizzle({ client: sqlite })
|
||||
expect(db.select().from(ProjectTable).all().length).toBe(1)
|
||||
expect(db.select().from(SessionTable).all().length).toBe(1)
|
||||
expect(db.select().from(MessageTable).all().length).toBe(1)
|
||||
expect(db.select().from(PartTable).all().length).toBe(1)
|
||||
expect(db.select().from(TodoTable).all().length).toBe(1)
|
||||
expect(db.select().from(PermissionTable).all().length).toBe(1)
|
||||
expect(db.select().from(SessionShareTable).all().length).toBe(1)
|
||||
})
|
||||
})
|
||||
@@ -78,6 +78,32 @@ describe("tool.read external_directory permission", () => {
|
||||
})
|
||||
})
|
||||
|
||||
test("asks for directory-scoped external_directory permission when reading external directory", async () => {
|
||||
await using outerTmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Bun.write(path.join(dir, "external", "a.txt"), "a")
|
||||
},
|
||||
})
|
||||
await using tmp = await tmpdir({ git: true })
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const read = await ReadTool.init()
|
||||
const requests: Array<Omit<PermissionNext.Request, "id" | "sessionID" | "tool">> = []
|
||||
const testCtx = {
|
||||
...ctx,
|
||||
ask: async (req: Omit<PermissionNext.Request, "id" | "sessionID" | "tool">) => {
|
||||
requests.push(req)
|
||||
},
|
||||
}
|
||||
await read.execute({ filePath: path.join(outerTmp.path, "external") }, testCtx)
|
||||
const extDirReq = requests.find((r) => r.permission === "external_directory")
|
||||
expect(extDirReq).toBeDefined()
|
||||
expect(extDirReq!.patterns).toContain(path.join(outerTmp.path, "external", "*"))
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("asks for external_directory permission when reading relative path outside project", async () => {
|
||||
await using tmp = await tmpdir({ git: true })
|
||||
await Instance.provide({
|
||||
@@ -232,7 +258,7 @@ describe("tool.read truncation", () => {
|
||||
test("respects offset parameter", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
const lines = Array.from({ length: 20 }, (_, i) => `line${i}`).join("\n")
|
||||
const lines = Array.from({ length: 20 }, (_, i) => `line${i + 1}`).join("\n")
|
||||
await Bun.write(path.join(dir, "offset.txt"), lines)
|
||||
},
|
||||
})
|
||||
@@ -249,6 +275,43 @@ describe("tool.read truncation", () => {
|
||||
})
|
||||
})
|
||||
|
||||
test("throws when offset is beyond end of file", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
const lines = Array.from({ length: 3 }, (_, i) => `line${i + 1}`).join("\n")
|
||||
await Bun.write(path.join(dir, "short.txt"), lines)
|
||||
},
|
||||
})
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const read = await ReadTool.init()
|
||||
await expect(
|
||||
read.execute({ filePath: path.join(tmp.path, "short.txt"), offset: 4, limit: 5 }, ctx),
|
||||
).rejects.toThrow("Offset 4 is out of range for this file (3 lines)")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("does not mark final directory page as truncated", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Promise.all(
|
||||
Array.from({ length: 10 }, (_, i) => Bun.write(path.join(dir, "dir", `file-${i + 1}.txt`), `line${i}`)),
|
||||
)
|
||||
},
|
||||
})
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const read = await ReadTool.init()
|
||||
const result = await read.execute({ filePath: path.join(tmp.path, "dir"), offset: 6, limit: 5 }, ctx)
|
||||
expect(result.metadata.truncated).toBe(false)
|
||||
expect(result.output).not.toContain("Showing 5 of 10 entries")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("truncates long lines", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
|
||||
@@ -498,17 +498,7 @@ export type EventMessagePartUpdated = {
|
||||
type: "message.part.updated"
|
||||
properties: {
|
||||
part: Part
|
||||
}
|
||||
}
|
||||
|
||||
export type EventMessagePartDelta = {
|
||||
type: "message.part.delta"
|
||||
properties: {
|
||||
sessionID: string
|
||||
messageID: string
|
||||
partID: string
|
||||
field: string
|
||||
delta: string
|
||||
delta?: string
|
||||
}
|
||||
}
|
||||
|
||||
@@ -678,6 +668,10 @@ export type Todo = {
|
||||
* Priority level of the task: high, medium, low
|
||||
*/
|
||||
priority: string
|
||||
/**
|
||||
* Unique identifier for the todo item
|
||||
*/
|
||||
id: string
|
||||
}
|
||||
|
||||
export type EventTodoUpdated = {
|
||||
@@ -926,7 +920,6 @@ export type Event =
|
||||
| EventMessageUpdated
|
||||
| EventMessageRemoved
|
||||
| EventMessagePartUpdated
|
||||
| EventMessagePartDelta
|
||||
| EventMessagePartRemoved
|
||||
| EventPermissionAsked
|
||||
| EventPermissionReplied
|
||||
|
||||
@@ -7255,40 +7255,12 @@
|
||||
"properties": {
|
||||
"part": {
|
||||
"$ref": "#/components/schemas/Part"
|
||||
}
|
||||
},
|
||||
"required": ["part"]
|
||||
}
|
||||
},
|
||||
"required": ["type", "properties"]
|
||||
},
|
||||
"Event.message.part.delta": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": {
|
||||
"type": "string",
|
||||
"const": "message.part.delta"
|
||||
},
|
||||
"properties": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"sessionID": {
|
||||
"type": "string"
|
||||
},
|
||||
"messageID": {
|
||||
"type": "string"
|
||||
},
|
||||
"partID": {
|
||||
"type": "string"
|
||||
},
|
||||
"field": {
|
||||
"type": "string"
|
||||
},
|
||||
"delta": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": ["sessionID", "messageID", "partID", "field", "delta"]
|
||||
"required": ["part"]
|
||||
}
|
||||
},
|
||||
"required": ["type", "properties"]
|
||||
@@ -7702,9 +7674,13 @@
|
||||
"priority": {
|
||||
"description": "Priority level of the task: high, medium, low",
|
||||
"type": "string"
|
||||
},
|
||||
"id": {
|
||||
"description": "Unique identifier for the todo item",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": ["content", "status", "priority"]
|
||||
"required": ["content", "status", "priority", "id"]
|
||||
},
|
||||
"Event.todo.updated": {
|
||||
"type": "object",
|
||||
@@ -8372,9 +8348,6 @@
|
||||
{
|
||||
"$ref": "#/components/schemas/Event.message.part.updated"
|
||||
},
|
||||
{
|
||||
"$ref": "#/components/schemas/Event.message.part.delta"
|
||||
},
|
||||
{
|
||||
"$ref": "#/components/schemas/Event.message.part.removed"
|
||||
},
|
||||
|
||||
@@ -45,7 +45,7 @@ description: هيّئ الوكلاء المتخصصين واستخدمهم.
|
||||
|
||||
---
|
||||
|
||||
### استخدام build
|
||||
### استخدام Build
|
||||
|
||||
_الوضع_: `primary`
|
||||
|
||||
@@ -53,7 +53,7 @@ Build هو الوكيل الأساسي **الافتراضي** مع تفعيل ج
|
||||
|
||||
---
|
||||
|
||||
### استخدام plan
|
||||
### استخدام Plan
|
||||
|
||||
_الوضع_: `primary`
|
||||
|
||||
@@ -67,7 +67,7 @@ _الوضع_: `primary`
|
||||
|
||||
---
|
||||
|
||||
### استخدام general
|
||||
### استخدام General
|
||||
|
||||
_الوضع_: `subagent`
|
||||
|
||||
@@ -75,7 +75,7 @@ _الوضع_: `subagent`
|
||||
|
||||
---
|
||||
|
||||
### استخدام explore
|
||||
### استخدام Explore
|
||||
|
||||
_الوضع_: `subagent`
|
||||
|
||||
@@ -83,7 +83,7 @@ _الوضع_: `subagent`
|
||||
|
||||
---
|
||||
|
||||
### استخدام compaction
|
||||
### استخدام Compaction
|
||||
|
||||
_الوضع_: `primary`
|
||||
|
||||
@@ -91,7 +91,7 @@ _الوضع_: `primary`
|
||||
|
||||
---
|
||||
|
||||
### استخدام title
|
||||
### استخدام Title
|
||||
|
||||
_الوضع_: `primary`
|
||||
|
||||
@@ -99,7 +99,7 @@ _الوضع_: `primary`
|
||||
|
||||
---
|
||||
|
||||
### استخدام summary
|
||||
### استخدام Summary
|
||||
|
||||
_الوضع_: `primary`
|
||||
|
||||
|
||||
@@ -69,10 +69,10 @@ opencode attach [url]
|
||||
يتيح ذلك استخدام واجهة TUI مع واجهة خلفية لـ OpenCode تعمل عن بعد. على سبيل المثال:
|
||||
|
||||
```bash
|
||||
# Start the backend server for web/mobile access
|
||||
# ابدأ خادم الواجهة الخلفية للوصول عبر الويب/الجوال
|
||||
opencode web --port 4096 --hostname 0.0.0.0
|
||||
|
||||
# In another terminal, attach the TUI to the running backend
|
||||
# في محطة طرفية (terminal) أخرى، اربط TUI بالواجهة الخلفية قيد التشغيل
|
||||
opencode attach http://10.20.30.40:4096
|
||||
```
|
||||
|
||||
@@ -326,10 +326,10 @@ opencode run Explain the use of context in Go
|
||||
يمكنك أيضا الإرفاق بمثيل `opencode serve` قيد التشغيل لتجنّب زمن الإقلاع البارد لخوادم MCP في كل تشغيل:
|
||||
|
||||
```bash
|
||||
# Start a headless server in one terminal
|
||||
# ابدأ خادمًا بلا واجهة في محطة طرفية واحدة
|
||||
opencode serve
|
||||
|
||||
# In another terminal, run commands that attach to it
|
||||
# في محطة طرفية أخرى، شغّل الأوامر التي ترتبط به
|
||||
opencode run --attach http://localhost:4096 "Explain async/await in JavaScript"
|
||||
```
|
||||
|
||||
|
||||
@@ -54,11 +54,11 @@ Focus on the failing tests and suggest fixes.
|
||||
{
|
||||
"$schema": "https://opencode.ai/config.json",
|
||||
"command": {
|
||||
// This becomes the name of the command
|
||||
// يصبح هذا اسم الأمر
|
||||
"test": {
|
||||
// This is the prompt that will be sent to the LLM
|
||||
// هذه هي المطالبة التي ستُرسل إلى LLM
|
||||
"template": "Run the full test suite with coverage report and show any failures.\nFocus on the failing tests and suggest fixes.",
|
||||
// This is shown as the description in the TUI
|
||||
// يظهر هذا كوصف في TUI
|
||||
"description": "Run tests with coverage",
|
||||
"agent": "build",
|
||||
"model": "anthropic/claude-3-5-sonnet-20241022"
|
||||
|
||||
@@ -14,7 +14,7 @@ description: استخدام ملف إعدادات OpenCode بصيغة JSON.
|
||||
```jsonc title="opencode.jsonc"
|
||||
{
|
||||
"$schema": "https://opencode.ai/config.json",
|
||||
// Theme configuration
|
||||
// إعدادات السمة
|
||||
"theme": "opencode",
|
||||
"model": "anthropic/claude-sonnet-4-5",
|
||||
"autoupdate": true,
|
||||
@@ -326,7 +326,7 @@ opencode run "Hello world"
|
||||
"model": "anthropic/claude-sonnet-4-5",
|
||||
"prompt": "You are a code reviewer. Focus on security, performance, and maintainability.",
|
||||
"tools": {
|
||||
// Disable file modification tools for review-only agent
|
||||
// تعطيل أدوات تعديل الملفات لوكيل المراجعة فقط
|
||||
"write": false,
|
||||
"edit": false,
|
||||
},
|
||||
|
||||
@@ -221,7 +221,7 @@ How is authentication handled in @packages/functions/src/api/index.ts
|
||||
|
||||
انتقل إليه باستخدام مفتاح **Tab**. سترى مؤشرا لذلك في الزاوية السفلية اليمنى.
|
||||
|
||||
```bash frame="none" title="Switch to Plan mode"
|
||||
```bash frame="none" title="التبديل إلى وضع Plan"
|
||||
<TAB>
|
||||
```
|
||||
|
||||
|
||||
@@ -278,10 +278,10 @@ opencode mcp logout my-oauth-server
|
||||
إذا فشل خادم MCP بعيد في المصادقة، يمكنك تشخيص المشكلة باستخدام:
|
||||
|
||||
```bash
|
||||
# View auth status for all OAuth-capable servers
|
||||
# عرض حالة المصادقة لجميع الخوادم القادرة على OAuth
|
||||
opencode mcp auth list
|
||||
|
||||
# Debug connection and OAuth flow for a specific server
|
||||
# تصحيح الاتصال وتدفق OAuth لخادم محدد
|
||||
opencode mcp debug my-oauth-server
|
||||
```
|
||||
|
||||
|
||||
@@ -12,13 +12,13 @@ description: إعداد الوكلاء والشهادات المخصصة.
|
||||
يتبع OpenCode متغيرات بيئة الوكيل القياسية.
|
||||
|
||||
```bash
|
||||
# HTTPS proxy (recommended)
|
||||
# وكيل HTTPS (موصى به)
|
||||
export HTTPS_PROXY=https://proxy.example.com:8080
|
||||
|
||||
# HTTP proxy (if HTTPS not available)
|
||||
# وكيل HTTP (إذا لم يتوفر HTTPS)
|
||||
export HTTP_PROXY=http://proxy.example.com:8080
|
||||
|
||||
# Bypass proxy for local server (required)
|
||||
# تجاوز الوكيل للخادم المحلي (مطلوب)
|
||||
export NO_PROXY=localhost,127.0.0.1
|
||||
```
|
||||
|
||||
|
||||
@@ -89,7 +89,7 @@ OpenCode Zen هي قائمة نماذج يوفّرها فريق OpenCode وقد
|
||||
فلا تتردد في فتح PR.
|
||||
|
||||
:::note
|
||||
ألا ترى مزوّدا هنا؟ أرسل PR.
|
||||
لم تجد المزوّد الذي تبحث عنه؟ أرسل PR.
|
||||
:::
|
||||
|
||||
---
|
||||
@@ -139,13 +139,13 @@ OpenCode Zen هي قائمة نماذج يوفّرها فريق OpenCode وقد
|
||||
عيّن أحد متغيرات البيئة التالية أثناء تشغيل opencode:
|
||||
|
||||
```bash
|
||||
# Option 1: Using AWS access keys
|
||||
# الخيار 1: استخدام مفاتيح وصول AWS
|
||||
AWS_ACCESS_KEY_ID=XXX AWS_SECRET_ACCESS_KEY=YYY opencode
|
||||
|
||||
# Option 2: Using named AWS profile
|
||||
# الخيار 2: استخدام ملف تعريف AWS مسمّى
|
||||
AWS_PROFILE=my-profile opencode
|
||||
|
||||
# Option 3: Using Bedrock bearer token
|
||||
# الخيار 3: استخدام رمز Bedrock المميز (bearer token)
|
||||
AWS_BEARER_TOKEN_BEDROCK=XXX opencode
|
||||
```
|
||||
|
||||
|
||||
@@ -366,7 +366,7 @@ OPENCODE_ENABLE_EXA=1 opencode
|
||||
|
||||
---
|
||||
|
||||
### Ignore patterns
|
||||
### أنماط التجاهل
|
||||
|
||||
لتضمين ملفات يتم تجاهلها عادة، أنشئ ملف `.ignore` في جذر المشروع. يمكن لهذا الملف السماح صراحة بمسارات محددة.
|
||||
|
||||
|
||||
@@ -290,12 +290,12 @@ How is auth handled in @packages/functions/src/api/index.ts?
|
||||
<Tabs>
|
||||
<TabItem label="Linux/macOS">
|
||||
```bash
|
||||
# Example for nano or vim
|
||||
# مثال لـ nano أو vim
|
||||
export EDITOR=nano
|
||||
export EDITOR=vim
|
||||
|
||||
# For GUI editors, VS Code, Cursor, VSCodium, Windsurf, Zed, etc.
|
||||
# include --wait
|
||||
# للمحررات الرسومية، VS Code، Cursor، VSCodium، Windsurf، Zed، إلخ.
|
||||
# قم بتضمين --wait
|
||||
export EDITOR="code --wait"
|
||||
```
|
||||
|
||||
@@ -308,8 +308,8 @@ How is auth handled in @packages/functions/src/api/index.ts?
|
||||
```bash
|
||||
set EDITOR=notepad
|
||||
|
||||
# For GUI editors, VS Code, Cursor, VSCodium, Windsurf, Zed, etc.
|
||||
# include --wait
|
||||
# للمحررات الرسومية، VS Code، Cursor، VSCodium، Windsurf، Zed، إلخ.
|
||||
# قم بتضمين --wait
|
||||
set EDITOR=code --wait
|
||||
```
|
||||
|
||||
@@ -321,8 +321,8 @@ How is auth handled in @packages/functions/src/api/index.ts?
|
||||
```powershell
|
||||
$env:EDITOR = "notepad"
|
||||
|
||||
# For GUI editors, VS Code, Cursor, VSCodium, Windsurf, Zed, etc.
|
||||
# include --wait
|
||||
# للمحررات الرسومية، VS Code، Cursor، VSCodium، Windsurf، Zed، إلخ.
|
||||
# قم بتضمين --wait
|
||||
$env:EDITOR = "code --wait"
|
||||
```
|
||||
|
||||
|
||||
@@ -113,10 +113,10 @@ OPENCODE_SERVER_PASSWORD=secret opencode web
|
||||
يمكنك إرفاق واجهة terminal (TUI) بخادم ويب قيد التشغيل:
|
||||
|
||||
```bash
|
||||
# Start the web server
|
||||
# ابدأ خادم الويب
|
||||
opencode web --port 4096
|
||||
|
||||
# In another terminal, attach the TUI
|
||||
# في محطة طرفية أخرى، اربط TUI
|
||||
opencode attach http://localhost:4096
|
||||
```
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@ ACP je otvoreni protokol koji standardizira komunikaciju između uređivača kod
|
||||
|
||||
---
|
||||
|
||||
## Konfiguriši
|
||||
## Konfiguracija
|
||||
|
||||
Da biste koristili OpenCode putem ACP-a, konfigurirajte svoj uređivač da pokrene naredbu `opencode acp`.
|
||||
|
||||
@@ -140,7 +140,7 @@ Ova konfiguracija postavlja CodeCompanion da koristi OpenCode kao ACP chat agent
|
||||
|
||||
Ako trebate proslijediti varijable okruženja (kao što je `OPENCODE_API_KEY`), pogledajte [Configuring Adapters: Environment Variables](https://codecompanion.olimorris.dev/getting-started#setting-an-api-key) u dokumentaciji CodeCompanion.nvim.
|
||||
|
||||
## Podrška
|
||||
## Podržane funkcije
|
||||
|
||||
OpenCode radi isto kroz ACP kao i u terminalu. Podržane su sve funkcije:
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ Možete se prebacivati između agenata tokom sesije ili ih pozvati spominj
|
||||
|
||||
## Vrste
|
||||
|
||||
Postoje dvije vrste agenata u OpenCode; primarni agenti i podagenti.
|
||||
Postoje dvije vrste agenata u OpenCode: primarni agenti i podagenti.
|
||||
|
||||
---
|
||||
|
||||
@@ -45,17 +45,17 @@ OpenCode dolazi sa dva ugrađena primarna agenta i dva ugrađena subagenta.
|
||||
|
||||
---
|
||||
|
||||
### Koristi build
|
||||
### Build agent
|
||||
|
||||
_Mode_: `primary`
|
||||
_Režim_: `primary`
|
||||
|
||||
Build je **podrazumevani** primarni agent sa svim omogućenim alatima. Ovo je standardni agent za razvojni rad gdje vam je potreban pun pristup operacijama datoteka i sistemskim komandama.
|
||||
|
||||
---
|
||||
|
||||
### Koristi plan
|
||||
### Plan agent
|
||||
|
||||
_Mode_: `primary`
|
||||
_Režim_: `primary`
|
||||
|
||||
Ograničeni agent dizajniran za planiranje i analizu. Koristimo sistem dozvola kako bismo vam pružili veću kontrolu i spriječili neželjene promjene.
|
||||
Prema zadanim postavkama, sve sljedeće je postavljeno na `ask`:
|
||||
@@ -67,47 +67,47 @@ Ovaj agent je koristan kada želite da LLM analizira kod, predloži promjene ili
|
||||
|
||||
---
|
||||
|
||||
### Koristi general
|
||||
### General agent
|
||||
|
||||
_Mode_: `subagent`
|
||||
_Režim_: `subagent`
|
||||
|
||||
Agent opće namjene za istraživanje složenih pitanja i izvršavanje zadataka u više koraka. Ima potpuni pristup alatima (osim todo), tako da može mijenjati fajlove kada je to potrebno. Koristite ovo za paralelno pokretanje više jedinica rada.
|
||||
|
||||
---
|
||||
|
||||
### Koristi explore
|
||||
### Explore agent
|
||||
|
||||
_Mode_: `subagent`
|
||||
_Režim_: `subagent`
|
||||
|
||||
Brzi agent samo za čitanje za istraživanje kodnih baza. Nije moguće mijenjati fajlove. Koristite ovo kada trebate brzo pronaći datoteke po uzorku, pretražiti kod za ključne riječi ili odgovoriti na pitanja o bazi kodova.
|
||||
|
||||
---
|
||||
|
||||
### Koristi compaction
|
||||
### Compaction agent
|
||||
|
||||
_Mode_: `primary`
|
||||
_Režim_: `primary`
|
||||
|
||||
Skriveni sistemski agent koji sažima dugi kontekst u manji sažetak. Pokreće se automatski kada je potrebno i ne može se odabrati u korisničkom interfejsu.
|
||||
|
||||
---
|
||||
|
||||
### Koristi title
|
||||
### Title agent
|
||||
|
||||
_Mode_: `primary`
|
||||
_Režim_: `primary`
|
||||
|
||||
Skriveni sistemski agent koji generiše kratke naslove sesija. Pokreće se automatski i ne može se odabrati u korisničkom interfejsu.
|
||||
|
||||
---
|
||||
|
||||
### Koristi summary
|
||||
### Summary agent
|
||||
|
||||
_Mode_: `primary`
|
||||
_Režim_: `primary`
|
||||
|
||||
Skriveni sistemski agent koji kreira sažetke sesije. Pokreće se automatski i ne može se odabrati u korisničkom interfejsu.
|
||||
|
||||
---
|
||||
|
||||
## Upotreba
|
||||
## Korištenje
|
||||
|
||||
1. Za primarne agente, koristite taster **Tab** za kretanje kroz njih tokom sesije. Također možete koristiti svoju konfiguriranu vezu tipke `switch_agent`.
|
||||
|
||||
@@ -215,7 +215,7 @@ Pogledajmo ove opcije konfiguracije detaljno.
|
||||
|
||||
---
|
||||
|
||||
### Description
|
||||
### Opis
|
||||
|
||||
Koristite opciju `description` da pružite kratak opis onoga što agent radi i kada ga koristiti.
|
||||
|
||||
@@ -233,7 +233,7 @@ Ovo je **obavezna** opcija konfiguracije.
|
||||
|
||||
---
|
||||
|
||||
### Temperature
|
||||
### Temperatura
|
||||
|
||||
Kontrolišite slučajnost i kreativnost odgovora LLM-a pomoću `temperature` konfiguracije.
|
||||
|
||||
@@ -280,7 +280,7 @@ Ako temperatura nije navedena, OpenCode koristi standardne postavke specifične
|
||||
|
||||
---
|
||||
|
||||
### Max steps
|
||||
### Maksimalan broj koraka
|
||||
|
||||
Kontrolirajte maksimalni broj iteracija agenta koje agent može izvesti prije nego što bude prisiljen da odgovori samo tekstom. Ovo omogućava korisnicima koji žele kontrolirati troškove da postave ograničenje na akcije agenta.
|
||||
|
||||
@@ -306,7 +306,7 @@ Naslijeđeno polje `maxSteps` je zastarjelo. Umjesto toga koristite `steps`.
|
||||
|
||||
---
|
||||
|
||||
### Disable
|
||||
### Onemogućavanje
|
||||
|
||||
Postavite na `true` da onemogućite agenta.
|
||||
|
||||
@@ -322,7 +322,7 @@ Postavite na `true` da onemogućite agenta.
|
||||
|
||||
---
|
||||
|
||||
### Prompt
|
||||
### Upit
|
||||
|
||||
Navedite prilagođenu sistemsku prompt datoteku za ovog agenta sa `prompt` konfiguracijom. Datoteka s promptom treba da sadrži upute specifične za svrhu agenta.
|
||||
|
||||
@@ -362,7 +362,7 @@ ID modela u vašoj OpenCode konfiguraciji koristi format `provider/model-id`. Na
|
||||
|
||||
---
|
||||
|
||||
### Tools
|
||||
### Alati
|
||||
|
||||
Kontrolirajte koji su alati dostupni u ovom agentu koristeći konfiguraciju `tools`. Možete omogućiti ili onemogućiti određene alate tako što ćete ih postaviti na `true` ili `false`.
|
||||
|
||||
@@ -409,7 +409,7 @@ Također možete koristiti zamjenske znakove za kontrolu više alata odjednom. N
|
||||
|
||||
---
|
||||
|
||||
### Permissions
|
||||
### Dozvole
|
||||
|
||||
Možete konfigurirati dozvole za upravljanje radnjama koje agent može poduzeti. Trenutno se dozvole za alate `edit`, `bash` i `webfetch` mogu konfigurirati na:
|
||||
|
||||
@@ -521,7 +521,7 @@ Budući da posljednje podudarno pravilo ima prednost, prvo postavite zamjenski z
|
||||
|
||||
---
|
||||
|
||||
### Mode
|
||||
### Način rada
|
||||
|
||||
Kontrolirajte način rada agenta koristeći konfiguraciju `mode`. Opcija `mode` se koristi da specificira kako se agent može koristiti.
|
||||
|
||||
@@ -539,7 +539,7 @@ Opcija `mode` se može postaviti na `primary`, `subagent` ili `all`. Ako `mode`
|
||||
|
||||
---
|
||||
|
||||
### Hidden
|
||||
### Skriveno
|
||||
|
||||
Sakrij podagenta iz `@` menija za automatsko dovršavanje sa `hidden: true`. Korisno za interne podagente koje bi drugi agenti trebali programski pozvati samo preko Task alata.
|
||||
|
||||
@@ -562,7 +562,7 @@ Odnosi se samo na `mode: subagent` agente.
|
||||
|
||||
---
|
||||
|
||||
### Task permissions
|
||||
### Dozvole zadataka
|
||||
|
||||
Kontrolirajte koje podagente agent može pozvati preko Task alata sa `permission.task`. Koristi glob uzorke za fleksibilno uparivanje.
|
||||
|
||||
@@ -595,7 +595,7 @@ Korisnici uvijek mogu pozvati bilo kojeg subagenta direktno preko `@` menija za
|
||||
|
||||
---
|
||||
|
||||
### Color
|
||||
### Boja
|
||||
|
||||
Prilagodite vizualni izgled agenta u korisničkom sučelju s opcijom `color`. Ovo utiče na to kako se agent pojavljuje u interfejsu.
|
||||
|
||||
@@ -634,7 +634,7 @@ Vrijednosti se kreću od 0.0 do 1.0. Niže vrijednosti su više fokusirane, viš
|
||||
|
||||
---
|
||||
|
||||
### Additional
|
||||
### Dodatno
|
||||
|
||||
Sve druge opcije koje navedete u konfiguraciji agenta će biti **direktno proslijeđene** dobavljaču kao opcije modela. Ovo vam omogućava da koristite karakteristike i parametre specifične za provajdera.
|
||||
|
||||
@@ -661,7 +661,7 @@ Pokrenite `opencode models` da vidite listu dostupnih modela.
|
||||
|
||||
---
|
||||
|
||||
## Kreirajte agente
|
||||
## Kreiranje agenata
|
||||
|
||||
Možete kreirati nove agente koristeći sljedeću naredbu:
|
||||
|
||||
@@ -679,7 +679,7 @@ Ova interaktivna komanda će:
|
||||
|
||||
---
|
||||
|
||||
## Slučajevi upotrebe
|
||||
## Primjeri upotrebe
|
||||
|
||||
Evo nekoliko uobičajenih slučajeva upotrebe različitih agenata.
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ OpenCode CLI po defaultu pokreće [TUI](/docs/tui) kada se pokrene bez ikakvih a
|
||||
opencode
|
||||
```
|
||||
|
||||
Ali takođe prihvata komande kao što je dokumentovano na ovoj stranici. Ovo vam omogućava programsku interakciju sa OpenCode.
|
||||
Ali takođe prihvata naredbe kao što je dokumentovano na ovoj stranici. Ovo vam omogućava programsku interakciju sa OpenCode.
|
||||
|
||||
```bash
|
||||
opencode run "Explain how closures work in JavaScript"
|
||||
@@ -21,36 +21,36 @@ opencode run "Explain how closures work in JavaScript"
|
||||
|
||||
### tui
|
||||
|
||||
Pokrenite korisnički interfejs OpenCode terminala.
|
||||
Pokrenite OpenCode terminalski korisnički interfejs.
|
||||
|
||||
```bash
|
||||
opencode [project]
|
||||
```
|
||||
|
||||
#### Zastave
|
||||
#### Opcije
|
||||
|
||||
| Zastava | Kratko | Opis |
|
||||
| Opcija | Kratko | Opis |
|
||||
| ------------ | ------ | ------------------------------------------------------------------------ |
|
||||
| `--continue` | `-c` | Nastavite posljednju sesiju |
|
||||
| `--session` | `-s` | ID sesije za nastavak |
|
||||
| `--fork` | | Forkujte sesiju pri nastavku (koristiti sa `--continue` ili `--session`) |
|
||||
| `--prompt` | | Uputstvo za upotrebu |
|
||||
| `--model` | `-m` | Model za korištenje u obliku dobavljača/modela |
|
||||
| `--prompt` | | Prompt za upotrebu |
|
||||
| `--model` | `-m` | Model za korištenje u obliku provider/model |
|
||||
| `--agent` | | Agent za korištenje |
|
||||
| `--port` | | Port za slušanje na |
|
||||
| `--hostname` | | Slušajte ime hosta |
|
||||
| `--port` | | Port na kojem treba slušati |
|
||||
| `--hostname` | | Hostname na kojem treba slušati |
|
||||
|
||||
---
|
||||
|
||||
## Commands
|
||||
## Naredbe
|
||||
|
||||
OpenCode CLI takođe ima sledeće komande.
|
||||
OpenCode CLI takođe ima sljedeće naredbe.
|
||||
|
||||
---
|
||||
|
||||
### agent
|
||||
|
||||
Upravljajte agentima za OpenCode.
|
||||
Upravljajte OpenCode agentima.
|
||||
|
||||
```bash
|
||||
opencode agent [command]
|
||||
@@ -60,13 +60,13 @@ opencode agent [command]
|
||||
|
||||
### attach
|
||||
|
||||
Priključite terminal na već pokrenut OpenCode backend server pokrenut putem `serve` ili `web` komandi.
|
||||
Priključite terminal na već pokrenut OpenCode backend server pokrenut putem `serve` ili `web` naredbi.
|
||||
|
||||
```bash
|
||||
opencode attach [url]
|
||||
```
|
||||
|
||||
Ovo omogućava korištenje TUI-ja sa udaljenim OpenCode backend-om. na primjer:
|
||||
Ovo omogućava korištenje TUI-ja sa udaljenim OpenCode backend-om. Na primjer:
|
||||
|
||||
```bash
|
||||
# Start the backend server for web/mobile access
|
||||
@@ -76,11 +76,11 @@ opencode web --port 4096 --hostname 0.0.0.0
|
||||
opencode attach http://10.20.30.40:4096
|
||||
```
|
||||
|
||||
#### Zastave
|
||||
#### Opcije
|
||||
|
||||
| Zastava | Kratko | Opis |
|
||||
| Opcija | Kratko | Opis |
|
||||
| ----------- | ------ | ------------------------------------ |
|
||||
| `--dir` | | Radni direktorij za pokretanje TUI u |
|
||||
| `--dir` | | Radni direktorij za pokretanje TUI-a |
|
||||
| `--session` | `-s` | ID sesije za nastavak |
|
||||
|
||||
---
|
||||
@@ -93,7 +93,7 @@ Kreirajte novog agenta s prilagođenom konfiguracijom.
|
||||
opencode agent create
|
||||
```
|
||||
|
||||
Ova komanda će vas voditi kroz kreiranje novog agenta sa prilagođenim sistemskim promptom i konfiguracijom alata.
|
||||
Ova naredba će vas voditi kroz kreiranje novog agenta sa prilagođenim sistemskim promptom i konfiguracijom alata.
|
||||
|
||||
---
|
||||
|
||||
@@ -131,7 +131,7 @@ Kada se OpenCode pokrene, učitava dobavljače iz datoteke vjerodajnica. I ako p
|
||||
|
||||
#### list
|
||||
|
||||
Navodi sve autentifikovane dobavljače pohranjene u datoteci akreditiva.
|
||||
Navodi sve autentifikovane dobavljače pohranjene u datoteci vjerodajnica.
|
||||
|
||||
```bash
|
||||
opencode auth list
|
||||
@@ -157,7 +157,7 @@ opencode auth logout
|
||||
|
||||
### github
|
||||
|
||||
Upravljajte GitHub agentom za automatizaciju spremišta.
|
||||
Upravljajte GitHub agentom za automatizaciju repozitorija.
|
||||
|
||||
```bash
|
||||
opencode github [command]
|
||||
@@ -167,7 +167,7 @@ opencode github [command]
|
||||
|
||||
#### install
|
||||
|
||||
Instalirajte GitHub agenta u svoje spremište.
|
||||
Instalirajte GitHub agenta u svoj repozitorij.
|
||||
|
||||
```bash
|
||||
opencode github install
|
||||
@@ -179,24 +179,24 @@ Ovo postavlja neophodni tok rada GitHub Actions i vodi vas kroz proces konfigura
|
||||
|
||||
#### run
|
||||
|
||||
Pokrenite GitHub agent. Ovo se obično koristi u GitHub akcijama.
|
||||
Pokrenite GitHub agent. Ovo se obično koristi u GitHub Actions.
|
||||
|
||||
```bash
|
||||
opencode github run
|
||||
```
|
||||
|
||||
##### Zastave
|
||||
##### Opcije
|
||||
|
||||
| Zastava | Opis |
|
||||
| --------- | -------------------------------------------- |
|
||||
| `--event` | GitHub lažni događaj za pokretanje agenta za |
|
||||
| `--token` | GitHub token ličnog pristupa |
|
||||
| Opcija | Opis |
|
||||
| --------- | -------------------------------------- |
|
||||
| `--event` | GitHub mock event za pokretanje agenta |
|
||||
| `--token` | GitHub Personal Access Token |
|
||||
|
||||
---
|
||||
|
||||
### mcp
|
||||
|
||||
Upravljajte serverima protokola konteksta modela.
|
||||
Upravljajte Model Context Protocol (MCP) serverima.
|
||||
|
||||
```bash
|
||||
opencode mcp [command]
|
||||
@@ -212,7 +212,7 @@ Dodajte MCP server svojoj konfiguraciji.
|
||||
opencode mcp add
|
||||
```
|
||||
|
||||
Ova komanda će vas voditi kroz dodavanje lokalnog ili udaljenog MCP servera.
|
||||
Ova naredba će vas voditi kroz dodavanje lokalnog ili udaljenog MCP servera.
|
||||
|
||||
---
|
||||
|
||||
@@ -241,7 +241,7 @@ opencode mcp auth [name]
|
||||
```
|
||||
|
||||
Ako ne navedete ime servera, od vas će biti zatraženo da izaberete neki od dostupnih servera koji podržavaju OAuth.
|
||||
Također možete navesti servere koji podržavaju OAuth i njihov status provjere autentičnosti.
|
||||
Također možete navesti servere koji podržavaju OAuth i njihov status autentifikacije.
|
||||
|
||||
```bash
|
||||
opencode mcp auth list
|
||||
@@ -267,7 +267,7 @@ opencode mcp logout [name]
|
||||
|
||||
#### debug
|
||||
|
||||
Otklanjanje grešaka OAuth veze sa MCP serverom.
|
||||
Otklanjanje grešaka (debug) OAuth veze sa MCP serverom.
|
||||
|
||||
```bash
|
||||
opencode mcp debug <name>
|
||||
@@ -291,11 +291,11 @@ Opciono možete proslijediti ID provajdera za filtriranje modela po tom dobavlja
|
||||
opencode models anthropic
|
||||
```
|
||||
|
||||
#### Zastave
|
||||
#### Opcije
|
||||
|
||||
| Zastava | Opis |
|
||||
| Opcija | Opis |
|
||||
| ----------- | ------------------------------------------------------------------------ |
|
||||
| `--refresh` | Osvježite predmemoriju modela sa models.dev |
|
||||
| `--refresh` | Osvježite keš modela sa models.dev |
|
||||
| `--verbose` | Koristite detaljniji izlaz modela (uključuje metapodatke poput troškova) |
|
||||
|
||||
Koristite `--refresh` zastavicu da ažurirate keširanu listu modela. Ovo je korisno kada su novi modeli dodani provajderu i želite da ih vidite u OpenCode.
|
||||
@@ -308,13 +308,13 @@ opencode models --refresh
|
||||
|
||||
### run
|
||||
|
||||
Pokrenite opencode u neinteraktivnom modu tako što ćete direktno proslijediti prompt.
|
||||
Pokrenite OpenCode u neinteraktivnom modu tako što ćete direktno proslijediti prompt.
|
||||
|
||||
```bash
|
||||
opencode run [message..]
|
||||
```
|
||||
|
||||
Ovo je korisno za skriptiranje, automatizaciju ili kada želite brz odgovor bez pokretanja punog TUI-ja. Na primjer.
|
||||
Ovo je korisno za skriptiranje, automatizaciju ili kada želite brz odgovor bez pokretanja punog TUI-ja. Na primjer:
|
||||
|
||||
```bash "opencode run"
|
||||
opencode run Explain the use of context in Go
|
||||
@@ -330,11 +330,11 @@ opencode serve
|
||||
opencode run --attach http://localhost:4096 "Explain async/await in JavaScript"
|
||||
```
|
||||
|
||||
#### Zastave
|
||||
#### Opcije
|
||||
|
||||
| Zastava | Kratko | Opis |
|
||||
| Opcija | Kratko | Opis |
|
||||
| ------------ | ------ | ------------------------------------------------------------------------ |
|
||||
| `--command` | | Naredba za pokretanje, koristite poruku za args |
|
||||
| `--command` | | Naredba za pokretanje, koristite poruku za argumente |
|
||||
| `--continue` | `-c` | Nastavite posljednju sesiju |
|
||||
| `--session` | `-s` | ID sesije za nastavak |
|
||||
| `--fork` | | Forkujte sesiju pri nastavku (koristiti sa `--continue` ili `--session`) |
|
||||
@@ -344,29 +344,29 @@ opencode run --attach http://localhost:4096 "Explain async/await in JavaScript"
|
||||
| `--file` | `-f` | Fajlovi koje treba priložiti poruci |
|
||||
| `--format` | | Format: default (formatiran) ili json (sirovi JSON događaji) |
|
||||
| `--title` | | Naslov sesije (koristi skraćeni prompt ako nije navedena vrijednost) |
|
||||
| `--attach` | | Priključite na pokrenuti opencode server (npr. http://localhost:4096) |
|
||||
| `--attach` | | Priključite na pokrenuti OpenCode server (npr. http://localhost:4096) |
|
||||
| `--port` | | Port za lokalni server (zadano na nasumični port) |
|
||||
|
||||
---
|
||||
|
||||
### serve
|
||||
|
||||
Pokrenite OpenCode server bez glave za pristup API-ju. Pogledajte [server docs](/docs/server) za kompletan HTTP interfejs.
|
||||
Pokrenite OpenCode headless server za API pristup. Pogledajte [server docs](/docs/server) za kompletan HTTP interfejs.
|
||||
|
||||
```bash
|
||||
opencode serve
|
||||
```
|
||||
|
||||
Ovo pokreće HTTP server koji pruža API pristup funkcionalnosti otvorenog koda bez TUI interfejsa. Postavite `OPENCODE_SERVER_PASSWORD` da omogućite HTTP osnovnu auth (korisničko ime je zadano na `opencode`).
|
||||
Ovo pokreće HTTP server koji pruža API pristup funkcionalnosti OpenCode-a bez TUI interfejsa. Postavite `OPENCODE_SERVER_PASSWORD` da omogućite HTTP osnovnu auth (korisničko ime je zadano na `opencode`).
|
||||
|
||||
#### Zastave
|
||||
#### Opcije
|
||||
|
||||
| Zastava | Opis |
|
||||
| ------------ | ---------------------------------------------------- |
|
||||
| `--port` | Port za slušanje na |
|
||||
| `--hostname` | Ime hosta za slušanje |
|
||||
| `--mdns` | Omogući mDNS otkrivanje |
|
||||
| `--cors` | Dodatni izvor(a) pretraživača koji dozvoljavaju CORS |
|
||||
| Opcija | Opis |
|
||||
| ------------ | ----------------------------------------------------- |
|
||||
| `--port` | Port na kojem treba slušati |
|
||||
| `--hostname` | Hostname na kojem treba slušati |
|
||||
| `--mdns` | Omogući mDNS otkrivanje |
|
||||
| `--cors` | Dodatni origin(i) pretraživača koji dozvoljavaju CORS |
|
||||
|
||||
---
|
||||
|
||||
@@ -388,12 +388,12 @@ Navedite sve OpenCode sesije.
|
||||
opencode session list
|
||||
```
|
||||
|
||||
##### Zastave
|
||||
##### Opcije
|
||||
|
||||
| Zastava | Kratko | Opis |
|
||||
| ------------- | ------ | ----------------------------------------- |
|
||||
| `--max-count` | `-n` | Ograničenje na N najnovijih sesija |
|
||||
| `--format` | | Izlazni format: tablica ili json (tabela) |
|
||||
| Opcija | Kratko | Opis |
|
||||
| ------------- | ------ | -------------------------------------- |
|
||||
| `--max-count` | `-n` | Ograničenje na N najnovijih sesija |
|
||||
| `--format` | | Izlazni format: table ili json (table) |
|
||||
|
||||
---
|
||||
|
||||
@@ -405,14 +405,14 @@ Prikaži statistiku upotrebe tokena i troškova za vaše OpenCode sesije.
|
||||
opencode stats
|
||||
```
|
||||
|
||||
#### Zastave
|
||||
#### Opcije
|
||||
|
||||
| Zastava | Opis |
|
||||
| ----------- | ----------------------------------------------------------------------------------------------------------- |
|
||||
| `--days` | Prikaži statistiku za zadnjih N dana (sva vremena) |
|
||||
| `--tools` | Broj alata za prikaz (svi) |
|
||||
| `--models` | Prikaži raščlambu korištenja modela (skriveno prema zadanim postavkama). Proslijedite broj za prikaz vrha N |
|
||||
| `--project` | Filtriraj po projektu (svi projekti, prazan niz: trenutni projekt) |
|
||||
| Opcija | Opis |
|
||||
| ----------- | ---------------------------------------------------------------------------------------------------------- |
|
||||
| `--days` | Prikaži statistiku za zadnjih N dana (sva vremena) |
|
||||
| `--tools` | Broj alata za prikaz (svi) |
|
||||
| `--models` | Prikaži raščlambu korištenja modela (skriveno prema zadanim postavkama). Proslijedite broj za prikaz top N |
|
||||
| `--project` | Filtriraj po projektu (svi projekti, prazan niz: trenutni projekt) |
|
||||
|
||||
---
|
||||
|
||||
@@ -447,22 +447,22 @@ opencode import https://opncd.ai/s/abc123
|
||||
|
||||
### web
|
||||
|
||||
Pokrenite OpenCode server bez glave sa web interfejsom.
|
||||
Pokrenite OpenCode headless server sa web interfejsom.
|
||||
|
||||
```bash
|
||||
opencode web
|
||||
```
|
||||
|
||||
Ovo pokreće HTTP server i otvara web pretraživač za pristup OpenCode preko web interfejsa. Postavite `OPENCODE_SERVER_PASSWORD` da omogućite HTTP osnovnu auth (korisničko ime je zadano na `opencode`).
|
||||
Ovo pokreće HTTP server i otvara web pretraživač za pristup OpenCode-u preko web interfejsa. Postavite `OPENCODE_SERVER_PASSWORD` da omogućite HTTP osnovnu auth (korisničko ime je zadano na `opencode`).
|
||||
|
||||
#### Zastave
|
||||
#### Opcije
|
||||
|
||||
| Zastava | Opis |
|
||||
| ------------ | ---------------------------------------------------- |
|
||||
| `--port` | Port za slušanje na |
|
||||
| `--hostname` | Ime hosta za slušanje |
|
||||
| `--mdns` | Omogući mDNS otkrivanje |
|
||||
| `--cors` | Dodatni izvor(a) pretraživača koji dozvoljavaju CORS |
|
||||
| Opcija | Opis |
|
||||
| ------------ | ----------------------------------------------------- |
|
||||
| `--port` | Port na kojem treba slušati |
|
||||
| `--hostname` | Hostname na kojem treba slušati |
|
||||
| `--mdns` | Omogući mDNS otkrivanje |
|
||||
| `--cors` | Dodatni origin(i) pretraživača koji dozvoljavaju CORS |
|
||||
|
||||
---
|
||||
|
||||
@@ -474,15 +474,15 @@ Pokrenite ACP (Agent Client Protocol) server.
|
||||
opencode acp
|
||||
```
|
||||
|
||||
Ova komanda pokreće ACP server koji komunicira preko stdin/stdout koristeći nd-JSON.
|
||||
Ova naredba pokreće ACP server koji komunicira preko stdin/stdout koristeći nd-JSON.
|
||||
|
||||
#### Zastave
|
||||
#### Opcije
|
||||
|
||||
| Zastava | Opis |
|
||||
| ------------ | ------------------- |
|
||||
| `--cwd` | Radni imenik |
|
||||
| `--port` | Port za slušanje na |
|
||||
| `--hostname` | Slušajte ime hosta |
|
||||
| Opcija | Opis |
|
||||
| ------------ | --------------------------- |
|
||||
| `--cwd` | Radni direktorij |
|
||||
| `--port` | Port na kojem treba slušati |
|
||||
| `--hostname` | Hostname na kojem slušati |
|
||||
|
||||
---
|
||||
|
||||
@@ -494,12 +494,12 @@ Deinstalirajte OpenCode i uklonite sve povezane datoteke.
|
||||
opencode uninstall
|
||||
```
|
||||
|
||||
#### Zastave
|
||||
#### Opcije
|
||||
|
||||
| Zastava | Kratko | Opis |
|
||||
| Opcija | Kratko | Opis |
|
||||
| --------------- | ------ | --------------------------------------------- |
|
||||
| `--keep-config` | `-c` | Čuvajte konfiguracijske datoteke |
|
||||
| `--keep-data` | `-d` | Čuvajte podatke i snimke sesije |
|
||||
| `--keep-config` | `-c` | Sačuvajte konfiguracijske datoteke |
|
||||
| `--keep-data` | `-d` | Sačuvajte podatke i snimke sesije |
|
||||
| `--dry-run` | | Pokažite šta bi bilo uklonjeno bez uklanjanja |
|
||||
| `--force` | `-f` | Preskoči upite za potvrdu |
|
||||
|
||||
@@ -507,7 +507,7 @@ opencode uninstall
|
||||
|
||||
### upgrade
|
||||
|
||||
Ažurira opencode na najnoviju verziju ili određenu verziju.
|
||||
Ažurira OpenCode na najnoviju verziju ili određenu verziju.
|
||||
|
||||
```bash
|
||||
opencode upgrade [target]
|
||||
@@ -525,73 +525,76 @@ Za nadogradnju na određenu verziju.
|
||||
opencode upgrade v0.1.48
|
||||
```
|
||||
|
||||
#### Zastave
|
||||
#### Opcije
|
||||
|
||||
| Zastava | Kratko | Opis |
|
||||
| Opcija | Kratko | Opis |
|
||||
| ---------- | ------ | ------------------------------------------------------- |
|
||||
| `--method` | `-m` | Korišteni način instalacije; curl, npm, pnpm, bun, brew |
|
||||
|
||||
---
|
||||
|
||||
## Globalne zastave
|
||||
## Globalne opcije
|
||||
|
||||
CLI otvorenog koda uzima sljedeće globalne zastavice.
|
||||
| Zastava | Kratko | Opis
|
||||
|-------------- | ----- | ------------------------------------ |
|
||||
| `--help` | `-h` | Prikaži pomoć |
|
||||
| `--version` | `-v` | Odštampaj broj verzije |
|
||||
| `--print-logs` | | Ispis zapisnika u stderr |
|
||||
| `--log-level` | | Nivo dnevnika (DEBUG, INFO, WARN, ERROR) |
|
||||
OpenCode CLI prihvata sljedeće globalne zastavice.
|
||||
|
||||
| Opcija | Kratko | Opis |
|
||||
| -------------- | ------ | ----------------------------------------- |
|
||||
| `--help` | `-h` | Prikaži pomoć |
|
||||
| `--version` | `-v` | Ispiši broj verzije |
|
||||
| `--print-logs` | | Ispis logova u stderr |
|
||||
| `--log-level` | | Nivo logovanja (DEBUG, INFO, WARN, ERROR) |
|
||||
|
||||
---
|
||||
|
||||
## Varijable okruženja
|
||||
|
||||
OpenCode se može konfigurirati pomoću varijabli okruženja.
|
||||
| Varijabilna | Vrsta | Opis
|
||||
|------------------------------------- | ------- | ------------------------------------------------- |
|
||||
| `OPENCODE_AUTO_SHARE` | boolean | Automatski dijeli sesije |
|
||||
| `OPENCODE_GIT_BASH_PATH` | string | Putanja do Git Bash izvršne datoteke na Windows |
|
||||
| `OPENCODE_CONFIG` | string | Put do konfiguracionog fajla |
|
||||
| `OPENCODE_CONFIG_DIR` | string | Put do konfiguracijskog direktorija |
|
||||
| `OPENCODE_CONFIG_CONTENT` | string | Inline json konfiguracijski sadržaj |
|
||||
| `OPENCODE_DISABLE_AUTOUPDATE` | boolean | Onemogući automatske provjere ažuriranja |
|
||||
| `OPENCODE_DISABLE_PRUNE` | boolean | Onemogući orezivanje starih podataka |
|
||||
| `OPENCODE_DISABLE_TERMINAL_TITLE` | boolean | Onemogući automatsko ažuriranje naslova terminala |
|
||||
| `OPENCODE_PERMISSION` | string | Umetnuta json konfiguracija dozvola |
|
||||
| `OPENCODE_DISABLE_DEFAULT_PLUGINS` | boolean | Onemogući podrazumevane dodatke |
|
||||
| `OPENCODE_DISABLE_LSP_DOWNLOAD` | boolean | Onemogući automatsko preuzimanje LSP servera |
|
||||
| `OPENCODE_ENABLE_EXPERIMENTAL_MODELS` | boolean | Omogući eksperimentalne modele |
|
||||
| `OPENCODE_DISABLE_AUTOCOMPACT` | boolean | Onemogući automatsko sažimanje konteksta |
|
||||
| `OPENCODE_DISABLE_CLAUDE_CODE` | boolean | Onemogući čitanje sa `.claude` (prompt + vještine) |
|
||||
| `OPENCODE_DISABLE_CLAUDE_CODE_PROMPT` | boolean | Onemogući čitanje `~/.claude/CLAUDE.md` |
|
||||
| `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | boolean | Onemogući učitavanje `.claude/skills` |
|
||||
| `OPENCODE_DISABLE_MODELS_FETCH` | boolean | Onemogući dohvaćanje modela iz udaljenih izvora |
|
||||
| `OPENCODE_FAKE_VCS` | string | Lažni VCS provajder za potrebe testiranja |
|
||||
| `OPENCODE_DISABLE_FILETIME_CHECK` | boolean | Onemogući provjeru vremena datoteke radi optimizacije |
|
||||
| `OPENCODE_CLIENT` | string | Identifikator klijenta (zadano na `cli`) |
|
||||
| `OPENCODE_ENABLE_EXA` | boolean | Omogući Exa alate za web pretraživanje |
|
||||
| `OPENCODE_SERVER_PASSWORD` | string | Omogući osnovnu autorizaciju za `serve`/`web` |
|
||||
| `OPENCODE_SERVER_USERNAME` | string | Poništi osnovno korisničko ime autentifikacije (zadano `opencode`) |
|
||||
| `OPENCODE_MODELS_URL` | string | Prilagođeni URL za dohvaćanje konfiguracije modela |
|
||||
|
||||
| Varijabla | Tip | Opis |
|
||||
| ------------------------------------- | ------- | ------------------------------------------------------------------ |
|
||||
| `OPENCODE_AUTO_SHARE` | boolean | Automatski dijeli sesije |
|
||||
| `OPENCODE_GIT_BASH_PATH` | string | Putanja do Git Bash izvršne datoteke na Windows-u |
|
||||
| `OPENCODE_CONFIG` | string | Putanja do konfiguracijskog fajla |
|
||||
| `OPENCODE_CONFIG_DIR` | string | Putanja do konfiguracijskog direktorija |
|
||||
| `OPENCODE_CONFIG_CONTENT` | string | Inline json konfiguracijski sadržaj |
|
||||
| `OPENCODE_DISABLE_AUTOUPDATE` | boolean | Onemogući automatske provjere ažuriranja |
|
||||
| `OPENCODE_DISABLE_PRUNE` | boolean | Onemogući brisanje (pruning) starih podataka |
|
||||
| `OPENCODE_DISABLE_TERMINAL_TITLE` | boolean | Onemogući automatsko ažuriranje naslova terminala |
|
||||
| `OPENCODE_PERMISSION` | string | Inline json konfiguracija dozvola |
|
||||
| `OPENCODE_DISABLE_DEFAULT_PLUGINS` | boolean | Onemogući podrazumijevane dodatke (plugins) |
|
||||
| `OPENCODE_DISABLE_LSP_DOWNLOAD` | boolean | Onemogući automatsko preuzimanje LSP servera |
|
||||
| `OPENCODE_ENABLE_EXPERIMENTAL_MODELS` | boolean | Omogući eksperimentalne modele |
|
||||
| `OPENCODE_DISABLE_AUTOCOMPACT` | boolean | Onemogući automatsko sažimanje konteksta |
|
||||
| `OPENCODE_DISABLE_CLAUDE_CODE` | boolean | Onemogući čitanje iz `.claude` (prompt + vještine) |
|
||||
| `OPENCODE_DISABLE_CLAUDE_CODE_PROMPT` | boolean | Onemogući čitanje `~/.claude/CLAUDE.md` |
|
||||
| `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | boolean | Onemogući učitavanje `.claude/skills` |
|
||||
| `OPENCODE_DISABLE_MODELS_FETCH` | boolean | Onemogući dohvaćanje modela iz udaljenih izvora |
|
||||
| `OPENCODE_FAKE_VCS` | string | Lažni VCS provajder za potrebe testiranja |
|
||||
| `OPENCODE_DISABLE_FILETIME_CHECK` | boolean | Onemogući provjeru vremena datoteke radi optimizacije |
|
||||
| `OPENCODE_CLIENT` | string | Identifikator klijenta (zadano na `cli`) |
|
||||
| `OPENCODE_ENABLE_EXA` | boolean | Omogući Exa alate za web pretraživanje |
|
||||
| `OPENCODE_SERVER_PASSWORD` | string | Omogući osnovnu autentifikaciju za `serve`/`web` |
|
||||
| `OPENCODE_SERVER_USERNAME` | string | Poništi osnovno korisničko ime autentifikacije (zadano `opencode`) |
|
||||
| `OPENCODE_MODELS_URL` | string | Prilagođeni URL za dohvaćanje konfiguracije modela |
|
||||
|
||||
---
|
||||
|
||||
### Eksperimentalno
|
||||
|
||||
Ove varijable okruženja omogućavaju eksperimentalne karakteristike koje se mogu promijeniti ili ukloniti.
|
||||
| Varijabilna | Vrsta | Opis
|
||||
|----------------------------------------------- | ------- | --------------------------------------- |
|
||||
| `OPENCODE_EXPERIMENTAL` | boolean | Omogući sve eksperimentalne funkcije |
|
||||
| `OPENCODE_EXPERIMENTAL_ICON_DISCOVERY` | boolean | Omogući otkrivanje ikona |
|
||||
| `OPENCODE_EXPERIMENTAL_DISABLE_COPY_ON_SELECT` | boolean | Onemogući kopiranje pri odabiru u TUI |
|
||||
| `OPENCODE_EXPERIMENTAL_BASH_DEFAULT_TIMEOUT_MS` | broj | Zadano vremensko ograničenje za bash komande u ms |
|
||||
| `OPENCODE_EXPERIMENTAL_OUTPUT_TOKEN_MAX` | broj | Maksimalni izlazni tokeni za LLM odgovore |
|
||||
| `OPENCODE_EXPERIMENTAL_FILEWATCHER` | boolean | Omogući praćenje datoteka za cijeli dir |
|
||||
| `OPENCODE_EXPERIMENTAL_OXFMT` | boolean | Omogući oxfmt formatter |
|
||||
| `OPENCODE_EXPERIMENTAL_LSP_TOOL` | boolean | Omogući eksperimentalni LSP alat |
|
||||
| `OPENCODE_EXPERIMENTAL_DISABLE_FILEWATCHER` | boolean | Onemogući praćenje datoteka |
|
||||
| `OPENCODE_EXPERIMENTAL_EXA` | boolean | Omogući eksperimentalne Exa funkcije |
|
||||
| `OPENCODE_EXPERIMENTAL_LSP_TY` | boolean | Omogući eksperimentalnu provjeru tipa LSP |
|
||||
| `OPENCODE_EXPERIMENTAL_MARKDOWN` | boolean | Omogući eksperimentalne Markdown funkcije |
|
||||
| `OPENCODE_EXPERIMENTAL_PLAN_MODE` | boolean | Omogući režim plana |
|
||||
|
||||
| Varijabla | Tip | Opis |
|
||||
| ----------------------------------------------- | ------- | ------------------------------------------------- |
|
||||
| `OPENCODE_EXPERIMENTAL` | boolean | Omogući sve eksperimentalne funkcije |
|
||||
| `OPENCODE_EXPERIMENTAL_ICON_DISCOVERY` | boolean | Omogući otkrivanje ikona |
|
||||
| `OPENCODE_EXPERIMENTAL_DISABLE_COPY_ON_SELECT` | boolean | Onemogući kopiranje pri odabiru u TUI |
|
||||
| `OPENCODE_EXPERIMENTAL_BASH_DEFAULT_TIMEOUT_MS` | number | Zadano vremensko ograničenje za bash naredbe u ms |
|
||||
| `OPENCODE_EXPERIMENTAL_OUTPUT_TOKEN_MAX` | number | Maksimalni izlazni tokeni za LLM odgovore |
|
||||
| `OPENCODE_EXPERIMENTAL_FILEWATCHER` | boolean | Omogući praćenje datoteka za cijeli direktorij |
|
||||
| `OPENCODE_EXPERIMENTAL_OXFMT` | boolean | Omogući oxfmt formatter |
|
||||
| `OPENCODE_EXPERIMENTAL_LSP_TOOL` | boolean | Omogući eksperimentalni LSP alat |
|
||||
| `OPENCODE_EXPERIMENTAL_DISABLE_FILEWATCHER` | boolean | Onemogući praćenje datoteka |
|
||||
| `OPENCODE_EXPERIMENTAL_EXA` | boolean | Omogući eksperimentalne Exa funkcije |
|
||||
| `OPENCODE_EXPERIMENTAL_LSP_TY` | boolean | Omogući eksperimentalnu provjeru tipa LSP |
|
||||
| `OPENCODE_EXPERIMENTAL_MARKDOWN` | boolean | Omogući eksperimentalne Markdown funkcije |
|
||||
| `OPENCODE_EXPERIMENTAL_PLAN_MODE` | boolean | Omogući Plan mod |
|
||||
|
||||
@@ -13,7 +13,7 @@ Prilagođene komande su dodatak ugrađenim komandama kao što su `/init`, `/undo
|
||||
|
||||
---
|
||||
|
||||
## Kreirajte komandne fajlove
|
||||
## Kreiranje datoteka naredbi
|
||||
|
||||
Kreirajte markdown fajlove u direktorijumu `commands/` da definišete prilagođene komande.
|
||||
Kreiraj `.opencode/commands/test.md`:
|
||||
@@ -38,7 +38,7 @@ Koristite komandu tako što ćete upisati `/` nakon čega slijedi naziv komande.
|
||||
|
||||
---
|
||||
|
||||
## Konfiguriši
|
||||
## Konfiguracija
|
||||
|
||||
Možete dodati prilagođene komande kroz OpenCode konfiguraciju ili kreiranjem markdown datoteka u direktoriju `commands/`.
|
||||
|
||||
@@ -99,7 +99,7 @@ Ime markdown datoteke postaje ime naredbe. Na primjer, `test.md` vam omogućava
|
||||
|
||||
---
|
||||
|
||||
## Prompt config
|
||||
## Konfiguracija upita
|
||||
|
||||
Promptovi za prilagođene komande podržavaju nekoliko posebnih čuvara mjesta i sintakse.
|
||||
|
||||
@@ -190,7 +190,7 @@ Naredbe se pokreću u korijenskom direktoriju vašeg projekta i njihov izlaz pos
|
||||
|
||||
---
|
||||
|
||||
### Reference fajlova
|
||||
### Reference datoteka
|
||||
|
||||
Uključite datoteke u svoju naredbu koristeći `@` nakon čega slijedi naziv datoteke.
|
||||
|
||||
@@ -213,7 +213,7 @@ Pogledajmo detaljno opcije konfiguracije.
|
||||
|
||||
---
|
||||
|
||||
### Template
|
||||
### Šablon
|
||||
|
||||
Opcija `template` definira prompt koji će biti poslan LLM-u kada se naredba izvrši.
|
||||
|
||||
@@ -269,7 +269,7 @@ Ovo je **opciona** opcija konfiguracije. Ako nije navedeno, podrazumevano je va
|
||||
|
||||
---
|
||||
|
||||
### Subtask
|
||||
### Podzadatak
|
||||
|
||||
Koristite `subtask` boolean da prisilite naredbu da pokrene [subagent](/docs/agents/#subagents) pozivanje.
|
||||
Ovo je korisno ako želite da naredba ne zagađuje vaš primarni kontekst i da će **primorati** agenta da djeluje kao subagent,
|
||||
@@ -307,9 +307,9 @@ Ovo je **opciona** opcija konfiguracije.
|
||||
|
||||
---
|
||||
|
||||
## Ugrađene
|
||||
## Ugrađene naredbe
|
||||
|
||||
opencode uključuje nekoliko ugrađenih naredbi kao što su `/init`, `/undo`, `/redo`, `/share`, `/help`; [saznaj više](/docs/tui#commands).
|
||||
OpenCode uključuje nekoliko ugrađenih naredbi kao što su `/init`, `/undo`, `/redo`, `/share`, `/help`; [saznaj više](/docs/tui#commands).
|
||||
:::note
|
||||
Prilagođene komande mogu nadjačati ugrađene komande.
|
||||
:::
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
title: Config
|
||||
title: Konfiguracija
|
||||
description: Korištenje OpenCode JSON konfiguracije.
|
||||
---
|
||||
|
||||
@@ -25,10 +25,10 @@ OpenCode podržava i **JSON** i **JSONC** (JSON sa komentarima) formate.
|
||||
|
||||
## Lokacije
|
||||
|
||||
Možete postaviti svoju konfiguraciju na nekoliko različitih lokacija i one imaju a
|
||||
drugačiji redosled prioriteta.
|
||||
Možete postaviti svoju konfiguraciju na nekoliko različitih lokacija i one imaju drugačiji redoslijed prioriteta.
|
||||
|
||||
:::note
|
||||
Konfiguracijski fajlovi su **spojeni zajedno**, a ne zamijenjeni.
|
||||
Konfiguracijski fajlovi se **spajaju**, ne zamjenjuju.
|
||||
:::
|
||||
Konfiguracijski fajlovi se spajaju, ne zamjenjuju. Kombiniraju se postavke sa sljedećih konfiguracijskih lokacija. Kasnije konfiguracije poništavaju prethodne samo za konfliktne ključeve. Nekonfliktne postavke iz svih konfiguracija su sačuvane.
|
||||
Na primjer, ako vaša globalna konfiguracija postavlja `theme: "opencode"` i `autoupdate: true`, a vaša projektna konfiguracija postavlja `model: "anthropic/claude-sonnet-4-5"`, konačna konfiguracija će uključivati sve tri postavke.
|
||||
@@ -39,12 +39,13 @@ Na primjer, ako vaša globalna konfiguracija postavlja `theme: "opencode"` i `au
|
||||
|
||||
Izvori konfiguracije se učitavaju ovim redoslijedom (kasniji izvori poništavaju ranije):
|
||||
|
||||
1. **Udaljena konfiguracija** (od `.well-known/opencode`) - organizacione postavke
|
||||
1. **Udaljena konfiguracija** (od `.well-known/opencode`) - organizacijske postavke
|
||||
2. **Globalna konfiguracija** (`~/.config/opencode/opencode.json`) - korisničke postavke
|
||||
3. **Prilagođena konfiguracija** (`OPENCODE_CONFIG` env var) - prilagođena zaobilaženja
|
||||
3. **Prilagođena konfiguracija** (`OPENCODE_CONFIG` env var) - prilagođena preinačenja
|
||||
4. **Konfiguracija projekta** (`opencode.json` u projektu) - postavke specifične za projekat
|
||||
5. **`.opencode` direktoriji** - agenti, komande, dodaci
|
||||
6. **Inline config** (`OPENCODE_CONFIG_CONTENT` env var) - runtime nadjačava
|
||||
6. **Inline konfiguracija** (`OPENCODE_CONFIG_CONTENT` env var) - runtime preinačenja
|
||||
|
||||
To znači da konfiguracije projekta mogu nadjačati globalne zadane postavke, a globalne konfiguracije mogu nadjačati postavke udaljene organizacije.
|
||||
:::note
|
||||
Direktoriji `.opencode` i `~/.config/opencode` koriste **imena u množini** za poddirektorije: `agents/`, `commands/`, `modes/`, `plugins/`, `skills/`, `tools/` i `themes/`. Pojedinačna imena (npr. `agent/`) su također podržana za kompatibilnost unatrag.
|
||||
@@ -52,9 +53,9 @@ Izvori konfiguracije se učitavaju ovim redoslijedom (kasniji izvori poništavaj
|
||||
|
||||
---
|
||||
|
||||
### Daljinski
|
||||
### Udaljeno (Remote)
|
||||
|
||||
Organizacije mogu pružiti zadanu konfiguraciju preko `.well-known/opencode` krajnje točke. Ovo se automatski preuzima kada se autentifikujete kod provajdera koji to podržava.
|
||||
Organizacije mogu pružiti zadanu konfiguraciju preko `.well-known/opencode` krajnje tačke. Ovo se automatski preuzima kada se autentifikujete kod provajdera koji to podržava.
|
||||
Prvo se učitava udaljena konfiguracija koja služi kao osnovni sloj. Svi ostali izvori konfiguracije (globalni, projektni) mogu nadjačati ove zadane postavke.
|
||||
Na primjer, ako vaša organizacija nudi MCP servere koji su po defaultu onemogućeni:
|
||||
|
||||
@@ -86,14 +87,14 @@ Možete omogućiti određene servere u vašoj lokalnoj konfiguraciji:
|
||||
|
||||
---
|
||||
|
||||
### Global
|
||||
### Globalno
|
||||
|
||||
Postavite svoju globalnu OpenCode konfiguraciju u `~/.config/opencode/opencode.json`. Koristite globalnu konfiguraciju za korisničke preferencije kao što su teme, provajderi ili veze tipki.
|
||||
Globalna konfiguracija poništava zadane postavke udaljene organizacije.
|
||||
|
||||
---
|
||||
|
||||
### Project
|
||||
### Projekt
|
||||
|
||||
Dodajte `opencode.json` u korijen projekta. Konfiguracija projekta ima najveći prioritet među standardnim konfiguracijskim datotekama - ona nadjačava globalne i udaljene konfiguracije.
|
||||
:::tip
|
||||
@@ -104,7 +105,7 @@ Ovo je također sigurno provjeriti u Git i koristi istu shemu kao globalna.
|
||||
|
||||
---
|
||||
|
||||
### Custom config
|
||||
### Prilagođena konfiguracija
|
||||
|
||||
Navedite prilagođenu putanju konfiguracijske datoteke koristeći varijablu okruženja `OPENCODE_CONFIG`.
|
||||
|
||||
@@ -117,12 +118,9 @@ Prilagođena konfiguracija se učitava između globalne i projektne konfiguracij
|
||||
|
||||
---
|
||||
|
||||
### Custom directory
|
||||
### Prilagođeni direktorij
|
||||
|
||||
Navedite prilagođeni konfiguracijski direktorij koristeći `OPENCODE_CONFIG_DIR`
|
||||
varijabla okruženja. U ovom direktoriju će se tražiti agenti, komande,
|
||||
modove i dodatke baš kao standardni `.opencode` direktorij, i trebali bi
|
||||
prate istu strukturu.
|
||||
Navedite prilagođeni konfiguracijski direktorij koristeći `OPENCODE_CONFIG_DIR` varijablu okruženja. U ovom direktoriju će se tražiti agenti, komande, modovi i dodaci baš kao standardni `.opencode` direktorij, i trebali bi pratiti istu strukturu.
|
||||
|
||||
```bash
|
||||
export OPENCODE_CONFIG_DIR=/path/to/my/config-directory
|
||||
@@ -133,10 +131,10 @@ Prilagođeni direktorij se učitava nakon direktorija globalne konfiguracije i `
|
||||
|
||||
---
|
||||
|
||||
## Shema
|
||||
## Šema
|
||||
|
||||
Konfiguracijski fajl ima šemu koja je definirana u [**`opencode.ai/config.json`**](https://opencode.ai/config.json).
|
||||
Vaš uređivač bi trebao biti u mogućnosti da potvrdi i autodovršava na osnovu šeme.
|
||||
Vaš editor bi trebao biti u mogućnosti da validira i autodovršava na osnovu šeme.
|
||||
|
||||
---
|
||||
|
||||
@@ -162,13 +160,13 @@ Dostupne opcije:
|
||||
- `scroll_acceleration.enabled` - Omogući ubrzanje skrolovanja u macOS stilu. **Ima prednost nad `scroll_speed`.**
|
||||
- `scroll_speed` - Prilagođeni množitelj brzine pomicanja (podrazumevano: `3`, minimalno: `1`). Zanemareno ako je `scroll_acceleration.enabled` `true`.
|
||||
- `diff_style` - Kontrola prikaza razlike. `"auto"` se prilagođava širini terminala, `"stacked"` uvijek prikazuje jednu kolonu.
|
||||
[Ovdje saznajte više o korištenju TUI](/docs/tui).
|
||||
[Saznajte više o korištenju TUI](/docs/tui) ovdje.
|
||||
|
||||
---
|
||||
|
||||
### Server
|
||||
|
||||
Možete konfigurirati postavke servera za komande `opencode serve` i `opencode web` putem opcije `server`.
|
||||
Možete konfigurirati postavke servera za naredbe `opencode serve` i `opencode web` putem opcije `server`.
|
||||
|
||||
```json title="opencode.json"
|
||||
{
|
||||
@@ -186,15 +184,15 @@ Možete konfigurirati postavke servera za komande `opencode serve` i `opencode w
|
||||
Dostupne opcije:
|
||||
|
||||
- `port` - Port za slušanje.
|
||||
- `hostname` - Ime hosta za slušanje. Kada je `mdns` omogućen i nije postavljeno ime hosta, podrazumevano je `0.0.0.0`.
|
||||
- `mdns` - Omogući otkrivanje mDNS usluge. Ovo omogućava drugim uređajima na mreži da otkriju vaš OpenCode server.
|
||||
- `mdnsDomain` - Prilagođeno ime domene za mDNS uslugu. Zadano je `opencode.local`. Korisno za pokretanje više instanci na istoj mreži.
|
||||
- `cors` - Dodatni izvori koji omogućavaju CORS kada koristite HTTP server iz klijenta baziranog na pretraživaču. Vrijednosti moraju biti punog porijekla (šema + host + opcijski port), npr. `https://app.example.com`.
|
||||
- `hostname` - Hostname za slušanje. Kada je `mdns` omogućen i nije postavljeno ime hosta, podrazumevano je `0.0.0.0`.
|
||||
- `mdns` - Omogući mDNS otkrivanje servisa. Ovo omogućava drugim uređajima na mreži da otkriju vaš OpenCode server.
|
||||
- `mdnsDomain` - Prilagođeno ime domene za mDNS servis. Zadano je `opencode.local`. Korisno za pokretanje više instanci na istoj mreži.
|
||||
- `cors` - Dodatni origini koji omogućavaju CORS kada koristite HTTP server iz klijenta baziranog na pretraživaču. Vrijednosti moraju biti puni origin (shema + host + opcijski port), npr. `https://app.example.com`.
|
||||
[Saznajte više o serveru](/docs/server) ovdje.
|
||||
|
||||
---
|
||||
|
||||
### Tools
|
||||
### Alati
|
||||
|
||||
Možete upravljati alatima koje LLM može koristiti putem opcije `tools`.
|
||||
|
||||
@@ -244,7 +242,7 @@ Opcije provajdera mogu uključivati `timeout` i `setCacheKey`:
|
||||
|
||||
- `timeout` - Vrijeme čekanja zahtjeva u milisekundama (podrazumevano: 300000). Postavite na `false` da onemogućite.
|
||||
- `setCacheKey` - Osigurajte da je ključ keš memorije uvijek postavljen za određenog provajdera.
|
||||
Također možete konfigurirati [lokalni modeli](/docs/models#local). [Saznajte više](/docs/models).
|
||||
Također možete konfigurirati [lokalne modele](/docs/models#local). [Saznajte više](/docs/models).
|
||||
|
||||
---
|
||||
|
||||
@@ -272,8 +270,8 @@ Amazon Bedrock podržava konfiguraciju specifičnu za AWS:
|
||||
```
|
||||
|
||||
- `region` - AWS regija za Bedrock (zadano na `AWS_REGION` env var ili `us-east-1`)
|
||||
- `profile` - AWS imenovan profil od `~/.aws/credentials` (podrazumevano na `AWS_PROFILE` env var)
|
||||
- `endpoint` - URL prilagođene krajnje tačke za VPC krajnje tačke. Ovo je pseudonim za generičku opciju `baseURL` koristeći terminologiju specifičnu za AWS. Ako su oba navedena, `endpoint` ima prednost.
|
||||
- `profile` - AWS imenovani profil iz `~/.aws/credentials` (zadano na `AWS_PROFILE` env var)
|
||||
- `endpoint` - URL prilagođene krajnje tačke za VPC krajnje tačke. Ovo je alias za generičku opciju `baseURL` koristeći terminologiju specifičnu za AWS. Ako su oba navedena, `endpoint` ima prednost.
|
||||
:::note
|
||||
Tokeni nosioca (`AWS_BEARER_TOKEN_BEDROCK` ili `/connect`) imaju prednost nad autentifikacijom zasnovanom na profilu. Pogledajte [prednost autentifikacije](/docs/providers#authentication-precedence) za detalje.
|
||||
:::
|
||||
@@ -281,7 +279,7 @@ Amazon Bedrock podržava konfiguraciju specifičnu za AWS:
|
||||
|
||||
---
|
||||
|
||||
### Theme
|
||||
### Tema
|
||||
|
||||
Možete konfigurirati temu koju želite koristiti u svojoj OpenCode konfiguraciji putem opcije `theme`.
|
||||
|
||||
@@ -333,7 +331,7 @@ Možete postaviti zadanog agenta koristeći opciju `default_agent`. Ovo određuj
|
||||
}
|
||||
```
|
||||
|
||||
Zadani agent mora biti primarni agent (ne podagent). Ovo može biti ugrađeni agent kao što je `"build"` ili `"plan"`, ili [prilagođeni agent](/docs/agents) koji ste definirali. Ako navedeni agent ne postoji ili je subagent, OpenCode će se vratiti na `"build"` s upozorenjem.
|
||||
Zadani agent mora biti primarni agent (ne podagent). Ovo može biti ugrađeni agent kao što je `"build"` ili `"plan"`, ili [prilagođeni agent](/docs/agents) koji ste definirali. Ako navedeni agent ne postoji ili je podagent, OpenCode će se vratiti na `"build"` s upozorenjem.
|
||||
Ova postavka se primjenjuje na sva sučelja: TUI, CLI (`opencode run`), desktop aplikaciju i GitHub Action.
|
||||
|
||||
---
|
||||
@@ -349,18 +347,18 @@ Možete konfigurirati funkciju [share](/docs/share) putem opcije `share`.
|
||||
}
|
||||
```
|
||||
|
||||
Ovo traje:
|
||||
Ovo prihvata:
|
||||
|
||||
- `"manual"` - Dozvoli ručno dijeljenje putem komandi (podrazumevano)
|
||||
- `"manual"` - Dozvoli ručno dijeljenje putem naredbi (podrazumevano)
|
||||
- `"auto"` - Automatski dijelite nove razgovore
|
||||
- `"disabled"` - Onemogući dijeljenje u potpunosti
|
||||
Podrazumevano, dijeljenje je postavljeno na ručni način rada gdje trebate eksplicitno dijeliti razgovore pomoću naredbe `/share`.
|
||||
|
||||
---
|
||||
|
||||
### Command
|
||||
### Naredbe
|
||||
|
||||
Možete konfigurirati prilagođene komande za ponavljanje zadataka putem opcije `command`.
|
||||
Možete konfigurirati prilagođene naredbe za ponavljanje zadataka putem opcije `command`.
|
||||
|
||||
```jsonc title="opencode.jsonc"
|
||||
{
|
||||
@@ -380,13 +378,13 @@ Možete konfigurirati prilagođene komande za ponavljanje zadataka putem opcije
|
||||
}
|
||||
```
|
||||
|
||||
Također možete definirati komande koristeći markdown fajlove u `~/.config/opencode/commands/` ili `.opencode/commands/`. [Saznajte više ovdje](/docs/commands).
|
||||
Također možete definirati naredbe koristeći markdown fajlove u `~/.config/opencode/commands/` ili `.opencode/commands/`. [Saznajte više ovdje](/docs/commands).
|
||||
|
||||
---
|
||||
|
||||
### Keybinds
|
||||
### Prečice tipki
|
||||
|
||||
Možete prilagoditi svoje veze ključeva putem opcije `keybinds`.
|
||||
Možete prilagoditi svoje veze tipki putem opcije `keybinds`.
|
||||
|
||||
```json title="opencode.json"
|
||||
{
|
||||
@@ -415,7 +413,7 @@ Imajte na umu da ovo funkcionira samo ako nije instalirano pomoću upravitelja p
|
||||
|
||||
---
|
||||
|
||||
### Formatters
|
||||
### Formateri
|
||||
|
||||
Možete konfigurirati formatere koda putem opcije `formatter`.
|
||||
|
||||
@@ -437,13 +435,13 @@ Možete konfigurirati formatere koda putem opcije `formatter`.
|
||||
}
|
||||
```
|
||||
|
||||
[Saznajte više o formatterima](/docs/formatters) ovdje.
|
||||
[Saznajte više o formaterima](/docs/formatters) ovdje.
|
||||
|
||||
---
|
||||
|
||||
### Dozvole
|
||||
|
||||
Prema zadanim postavkama, opencode **dopušta sve operacije** bez potrebe za eksplicitnim dopuštenjem. Ovo možete promijeniti koristeći opciju `permission`.
|
||||
Prema zadanim postavkama, OpenCode **dopušta sve operacije** bez potrebe za eksplicitnim dopuštenjem. Ovo možete promijeniti koristeći opciju `permission`.
|
||||
Na primjer, da osigurate da alati `edit` i `bash` zahtijevaju odobrenje korisnika:
|
||||
|
||||
```json title="opencode.json"
|
||||
@@ -456,11 +454,11 @@ Na primjer, da osigurate da alati `edit` i `bash` zahtijevaju odobrenje korisnik
|
||||
}
|
||||
```
|
||||
|
||||
[Ovdje saznajte više o ](/docs/permissions) dozvolama.
|
||||
[Saznajte više o dozvolama](/docs/permissions) ovdje.
|
||||
|
||||
---
|
||||
|
||||
### Compaction
|
||||
### Sažimanje
|
||||
|
||||
Možete kontrolirati ponašanje sažimanja konteksta putem opcije `compaction`.
|
||||
|
||||
@@ -479,7 +477,7 @@ Možete kontrolirati ponašanje sažimanja konteksta putem opcije `compaction`.
|
||||
|
||||
---
|
||||
|
||||
### Watcher
|
||||
### Promatrač (Watcher)
|
||||
|
||||
Možete konfigurirati obrasce ignoriranja promatrača datoteka putem opcije `watcher`.
|
||||
|
||||
@@ -511,7 +509,7 @@ Možete konfigurirati MCP servere koje želite koristiti putem opcije `mcp`.
|
||||
|
||||
---
|
||||
|
||||
### Extras
|
||||
### Dodaci
|
||||
|
||||
[Plugins](/docs/plugins) proširuju OpenCode sa prilagođenim alatima, kukicama i integracijama.
|
||||
Postavite datoteke dodataka u `.opencode/plugins/` ili `~/.config/opencode/plugins/`. Također možete učitati dodatke iz npm-a preko opcije `plugin`.
|
||||
@@ -538,8 +536,7 @@ Možete konfigurirati upute za model koji koristite putem opcije `instructions`.
|
||||
}
|
||||
```
|
||||
|
||||
Ovo uzima niz putanja i uzoraka globusa do datoteka instrukcija. [Saznajte više
|
||||
o pravilima ovdje](/docs/rules).
|
||||
Ovo uzima niz putanja i glob uzoraka do datoteka instrukcija. [Saznajte više o pravilima ovdje](/docs/rules).
|
||||
|
||||
---
|
||||
|
||||
@@ -560,7 +557,7 @@ Možete onemogućiti dobavljače koji se automatski učitavaju preko opcije `dis
|
||||
Opcija `disabled_providers` prihvata niz ID-ova provajdera. Kada je provajder onemogućen:
|
||||
|
||||
- Neće se učitati čak i ako su varijable okruženja postavljene.
|
||||
- Neće se učitati čak i ako su API ključevi konfigurirani putem `/connect` komande.
|
||||
- Neće se učitati čak i ako su API ključevi konfigurirani putem `/connect` naredbe.
|
||||
- Modeli dobavljača se neće pojaviti na listi za odabir modela.
|
||||
|
||||
---
|
||||
@@ -576,7 +573,7 @@ Možete odrediti listu dozvoljenih dobavljača putem opcije `enabled_providers`.
|
||||
}
|
||||
```
|
||||
|
||||
Ovo je korisno kada želite da ograničite OpenCode da koristi samo određene provajdere umesto da ih onemogućavate jednog po jednog.
|
||||
Ovo je korisno kada želite da ograničite OpenCode da koristi samo određene provajdere umjesto da ih onemogućavate jednog po jednog.
|
||||
:::note
|
||||
`disabled_providers` ima prioritet nad `enabled_providers`.
|
||||
:::
|
||||
@@ -607,7 +604,7 @@ Možete koristiti zamjenu varijabli u vašim konfiguracijskim datotekama da bist
|
||||
|
||||
---
|
||||
|
||||
### Env vars
|
||||
### Varijable okruženja
|
||||
|
||||
Koristite `{env:VARIABLE_NAME}` za zamjenu varijabli okruženja:
|
||||
|
||||
@@ -630,7 +627,7 @@ Ako varijabla okruženja nije postavljena, bit će zamijenjena praznim nizom.
|
||||
|
||||
---
|
||||
|
||||
### Fajlovi
|
||||
### Datoteke
|
||||
|
||||
Koristite `{file:path/to/file}` da zamijenite sadržaj fajla:
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
title: Custom Tools
|
||||
title: Prilagođeni alati
|
||||
description: Kreirajte alate koje LLM može pozvati u otvorenom kodu.
|
||||
---
|
||||
|
||||
@@ -133,7 +133,7 @@ Koristite `context.worktree` za korijen git radnog stabla.
|
||||
|
||||
## Primjeri
|
||||
|
||||
### Napišite alat u Python-u
|
||||
### Pisanje alata u Python-u
|
||||
|
||||
Možete pisati svoje alate na bilo kom jeziku koji želite. Evo primjera koji zbraja dva broja koristeći Python.
|
||||
Prvo kreirajte alat kao Python skriptu:
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
title: Enterprise
|
||||
title: Za preduzeća
|
||||
description: Sigurno korištenje OpenCode u vašoj organizaciji.
|
||||
---
|
||||
|
||||
@@ -19,7 +19,7 @@ Da započnete s OpenCode Enterprise:
|
||||
|
||||
---
|
||||
|
||||
## Trial
|
||||
## Proba
|
||||
|
||||
OpenCode je otvorenog koda i ne pohranjuje vaš kod niti kontekstualne podatke, tako da vaši developeri mogu jednostavno [započeti](/docs/) i provesti probu.
|
||||
|
||||
@@ -60,19 +60,19 @@ Preporučujemo da ovo onemogućite tokom probe.
|
||||
|
||||
---
|
||||
|
||||
## Pricing
|
||||
## Cijene
|
||||
|
||||
Koristimo model naplate po sjedištu za OpenCode Enterprise. Ako imate vlastiti LLM gateway, ne naplaćujemo korištene tokene. Za više detalja o cijenama i opcijama implementacije, **<a href={email}>kontaktirajte nas</a>**.
|
||||
|
||||
---
|
||||
|
||||
## Deployment
|
||||
## Postavljanje
|
||||
|
||||
Nakon što završite probni period i spremni ste koristiti OpenCode u svojoj organizaciji, možete **<a href={email}>kontaktirati nas</a>** da razgovaramo o cijenama i opcijama implementacije.
|
||||
|
||||
---
|
||||
|
||||
### Central Config
|
||||
### Centralna konfiguracija
|
||||
|
||||
Možemo postaviti OpenCode da koristi jednu centralnu konfiguraciju za cijelu organizaciju.
|
||||
|
||||
@@ -96,7 +96,7 @@ Također možete onemogućiti sve druge AI provajdere, čime osiguravate da svi
|
||||
|
||||
---
|
||||
|
||||
### Self-hosting
|
||||
### Samostalno hostovanje
|
||||
|
||||
Iako preporučujemo onemogućavanje share stranica kako biste osigurali da podaci nikada ne napuštaju vašu organizaciju, možemo vam pomoći i da ih samostalno hostujete na vlastitoj infrastrukturi.
|
||||
|
||||
@@ -104,17 +104,17 @@ Ovo je trenutno na našoj mapi puta. Ako ste zainteresovani, **<a href={email}>j
|
||||
|
||||
---
|
||||
|
||||
## FAQ
|
||||
## Često postavljana pitanja
|
||||
|
||||
<details>
|
||||
<summary>What is OpenCode Enterprise?</summary>
|
||||
<summary>Šta je OpenCode Enterprise?</summary>
|
||||
|
||||
OpenCode Enterprise je za organizacije koje žele osigurati da njihov kod i podaci nikada ne napuštaju njihovu infrastrukturu. To omogućava centralizovana konfiguracija koja se integriše s vašim SSO-om i internim AI gateway-om.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>How do I get started with OpenCode Enterprise?</summary>
|
||||
<summary>Kako započeti s OpenCode Enterprise?</summary>
|
||||
|
||||
Jednostavno započnite internu probu sa svojim timom. OpenCode po defaultu ne pohranjuje vaš kod ni kontekstualne podatke, što olakšava početak.
|
||||
|
||||
@@ -123,21 +123,21 @@ Zatim **<a href={email}>kontaktirajte nas</a>** da razgovaramo o cijenama i opci
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>How does enterprise pricing work?</summary>
|
||||
<summary>Kako funkcionišu enterprise cijene?</summary>
|
||||
|
||||
Nudimo enterprise cijene po sjedištu. Ako imate vlastiti LLM gateway, ne naplaćujemo korištene tokene. Za više detalja, **<a href={email}>kontaktirajte nas</a>** za prilagođenu ponudu prema potrebama vaše organizacije.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Is my data secure with OpenCode Enterprise?</summary>
|
||||
<summary>Jesu li moji podaci sigurni uz OpenCode Enterprise?</summary>
|
||||
|
||||
Da. OpenCode ne pohranjuje vaš kod niti kontekstualne podatke. Sva obrada se odvija lokalno ili putem direktnih API poziva vašem AI provajderu. Uz centralnu konfiguraciju i SSO integraciju, vaši podaci ostaju sigurni unutar infrastrukture vaše organizacije.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Can we use our own private NPM registry?</summary>
|
||||
<summary>Možemo li koristiti vlastiti privatni NPM registar?</summary>
|
||||
|
||||
OpenCode podržava privatne npm registre kroz Bunovu izvornu podršku za `.npmrc` datoteku. Ako vaša organizacija koristi privatni registar, kao što je JFrog Artifactory, Nexus ili slično, osigurajte da su developeri autentifikovani prije pokretanja OpenCode.
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
title: Formatters
|
||||
title: Formateri
|
||||
description: OpenCode koristi formatere specifične za jezik.
|
||||
---
|
||||
|
||||
@@ -7,7 +7,7 @@ OpenCode automatski formatira datoteke nakon što su napisane ili uređene pomo
|
||||
|
||||
---
|
||||
|
||||
## Ugrađeno
|
||||
## Ugrađeni
|
||||
|
||||
OpenCode dolazi sa nekoliko ugrađenih formatera za popularne jezike i okvire. Ispod je lista formatera, podržanih ekstenzija datoteka i naredbi ili opcija konfiguracije koje su mu potrebne.
|
||||
| Formatter | Ekstenzije | Zahtjevi
|
||||
@@ -49,7 +49,7 @@ Kada OpenCode piše ili uređuje datoteku, on:
|
||||
|
||||
---
|
||||
|
||||
## Konfiguriši
|
||||
## Konfiguracija
|
||||
|
||||
Možete prilagoditi formatere kroz `formatter` odjeljak u vašoj OpenCode konfiguraciji.
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ OpenCode se integriše sa vašim GitHub tokovom rada. Spomenite `/opencode` ili
|
||||
|
||||
---
|
||||
|
||||
## Karakteristike
|
||||
## Funkcije
|
||||
|
||||
- **Problemi trijaže**: Zamolite OpenCode da ispita problem i objasni vam ga.
|
||||
- **Popravi i implementiraj**: Zamolite OpenCode da popravi problem ili implementira funkciju. I radit će u novoj poslovnici i dostavljati PR sa svim promjenama.
|
||||
@@ -244,7 +244,7 @@ Za `issues` događaje, `prompt` unos je **potreban** jer nema komentara za izvla
|
||||
|
||||
---
|
||||
|
||||
## Prilagođene upite
|
||||
## Prilagođeni upiti
|
||||
|
||||
Zaobiđite zadani prompt da biste prilagodili ponašanje OpenCode za vaš tok posla.
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ Ovdje koristimo CI/CD komponentu kreiranu u zajednici za OpenCode — [nagyv/git
|
||||
|
||||
---
|
||||
|
||||
### Karakteristike
|
||||
### Funkcije
|
||||
|
||||
- **Koristite prilagođenu konfiguraciju po poslu**: Konfigurirajte OpenCode s prilagođenim konfiguracijskim direktorijem, na primjer `./config/#custom-directory` da omogućite ili onemogućite funkcionalnost po OpenCode pozivanju.
|
||||
- **Minimalno podešavanje**: CI komponenta postavlja OpenCode u pozadini, samo trebate kreirati OpenCode konfiguraciju i početnu prompt.
|
||||
@@ -49,7 +49,7 @@ Spomenite `@opencode` u komentaru i OpenCode će izvršiti zadatke unutar vašeg
|
||||
|
||||
---
|
||||
|
||||
### Karakteristike
|
||||
### Funkcije
|
||||
|
||||
- **Problemi trijaže**: Zamolite OpenCode da ispita problem i objasni vam ga.
|
||||
- **Popravi i implementiraj**: Zamolite OpenCode da popravi problem ili implementira funkciju.
|
||||
@@ -74,7 +74,7 @@ Pogledajte [**GitLab dokumente**](https://docs.gitlab.com/user/duo_agent_platfor
|
||||
|
||||
<details>
|
||||
|
||||
<summary>Flow configuration</summary>
|
||||
<summary>Konfiguracija toka</summary>
|
||||
|
||||
```yaml
|
||||
image: node:22-slim
|
||||
|
||||
@@ -7,7 +7,7 @@ OpenCode se integriše sa VS kodom, Cursor-om ili bilo kojim IDE-om koji podrža
|
||||
|
||||
---
|
||||
|
||||
## Upotreba
|
||||
## Korištenje
|
||||
|
||||
- **Brzo pokretanje**: Koristite `Cmd+Esc` (Mac) ili `Ctrl+Esc` (Windows/Linux) da otvorite OpenCode u prikazu podijeljenog terminala ili fokusirajte postojeću terminalsku sesiju ako je već pokrenuta.
|
||||
- **Nova sesija**: Koristite `Cmd+Shift+Esc` (Mac) ili `Ctrl+Shift+Esc` (Windows/Linux) da započnete novu OpenCode terminalsku sesiju, čak i ako je ona već otvorena. Takođe možete kliknuti na dugme OpenCode u korisničkom sučelju.
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
---
|
||||
title: Intro
|
||||
description: Započnite s OpenCode.
|
||||
title: Uvod
|
||||
description: Započnite sa OpenCode.
|
||||
---
|
||||
|
||||
import { Tabs, TabItem } from "@astrojs/starlight/components"
|
||||
import config from "../../../../config.mjs"
|
||||
export const console = config.console
|
||||
|
||||
[**OpenCode**](/) je agent za AI kodiranje otvorenog koda. Dostupan je kao interfejs baziran na terminalu, desktop aplikacija ili IDE ekstenzija.
|
||||
[**OpenCode**](/) je AI agent za kodiranje otvorenog koda. Dostupan je kao interfejs baziran na terminalu, desktop aplikacija ili IDE ekstenzija.
|
||||

|
||||
Hajde da počnemo.
|
||||
|
||||
@@ -18,17 +18,17 @@ Hajde da počnemo.
|
||||
Da biste koristili OpenCode u svom terminalu, trebat će vam:
|
||||
|
||||
1. Moderan emulator terminala kao što su:
|
||||
- [WezTerm](https://wezterm.org), multi-platforma
|
||||
- [Alacritty](https://alacritty.org), više platforma
|
||||
- [WezTerm](https://wezterm.org), više platformi
|
||||
- [Alacritty](https://alacritty.org), više platformi
|
||||
- [Ghostty](https://ghostty.org), Linux i macOS
|
||||
- [Kitty](https://sw.kovidgoyal.net/kitty/), Linux i macOS
|
||||
2. API ključevi za LLM provajdere koje želite koristiti.
|
||||
|
||||
---
|
||||
|
||||
## Instaliraj
|
||||
## Instalacija
|
||||
|
||||
Najlakši način za instaliranje OpenCode je putem instalacione skripte.
|
||||
Najlakši način za instaliranje OpenCode je putem instalacijske skripte.
|
||||
|
||||
```bash
|
||||
curl -fsSL https://opencode.ai/install | bash
|
||||
@@ -36,7 +36,7 @@ curl -fsSL https://opencode.ai/install | bash
|
||||
|
||||
Također ga možete instalirati pomoću sljedećih naredbi:
|
||||
|
||||
- **Korišćenje Node.js**
|
||||
- **Korištenje Node.js**
|
||||
|
||||
<Tabs>
|
||||
|
||||
@@ -70,7 +70,7 @@ Također ga možete instalirati pomoću sljedećih naredbi:
|
||||
|
||||
</Tabs>
|
||||
|
||||
- **Korišćenje Homebrew-a na macOS-u i Linux-u**
|
||||
- **Korištenje Homebrew-a na macOS-u i Linux-u**
|
||||
|
||||
```bash
|
||||
brew install anomalyco/tap/opencode
|
||||
@@ -78,7 +78,7 @@ Također ga možete instalirati pomoću sljedećih naredbi:
|
||||
|
||||
> Preporučujemo korištenje OpenCode tap za najnovija izdanja. Službenu formulu `brew install opencode` održava Homebrew tim i ažurira se rjeđe.
|
||||
|
||||
- **Korišćenje Parua na Arch Linuxu**
|
||||
- **Korištenje Parua na Arch Linuxu**
|
||||
|
||||
```bash
|
||||
paru -S opencode-bin
|
||||
@@ -87,16 +87,16 @@ Također ga možete instalirati pomoću sljedećih naredbi:
|
||||
#### Windows
|
||||
|
||||
:::tip[Preporučeno: Koristite WSL]
|
||||
Za najbolje iskustvo na Windows-u preporučujemo korištenje [Windows Subsystem for Linux (WSL)](/docs/windows-wsl). Pruža bolje performanse i potpunu kompatibilnost sa OpenCode karakteristikama.
|
||||
Za najbolje iskustvo na Windows-u preporučujemo korištenje [Windows Subsystem for Linux (WSL)](/docs/windows-wsl). Pruža bolje performanse i potpunu kompatibilnost sa OpenCode funkcijama.
|
||||
:::
|
||||
|
||||
- **Upotreba Chocolatey-a**
|
||||
- **Korištenje Chocolatey-a**
|
||||
|
||||
```bash
|
||||
choco install opencode
|
||||
```
|
||||
|
||||
- **Upotreba Scoop-a**
|
||||
- **Korištenje Scoop-a**
|
||||
|
||||
```bash
|
||||
scoop install opencode
|
||||
@@ -108,13 +108,13 @@ Za najbolje iskustvo na Windows-u preporučujemo korištenje [Windows Subsystem
|
||||
npm install -g opencode-ai
|
||||
```
|
||||
|
||||
- **Korišćenje Mise**
|
||||
- **Korištenje Mise**
|
||||
|
||||
```bash
|
||||
mise use -g github:anomalyco/opencode
|
||||
```
|
||||
|
||||
- **Korišćenje Dockera**
|
||||
- **Korištenje Dockera**
|
||||
|
||||
```bash
|
||||
docker run -it --rm ghcr.io/anomalyco/opencode
|
||||
@@ -125,12 +125,11 @@ Također možete preuzeti binarnu datoteku iz [Releases](https://github.com/anom
|
||||
|
||||
---
|
||||
|
||||
## Konfiguriši
|
||||
## Konfiguracija
|
||||
|
||||
Uz OpenCode možete koristiti bilo kojeg LLM provajdera tako što ćete konfigurirati njihove API ključeve.
|
||||
Ako ste tek počeli koristiti LLM provajdere, preporučujemo korištenje [OpenCode Zen](/docs/zen).
|
||||
To je kurirana lista modela koji su testirani i verifikovani od strane OpenCode
|
||||
tim.
|
||||
To je kurirana lista modela koji su testirani i verifikovani od strane OpenCode tima.
|
||||
|
||||
1. Pokrenite naredbu `/connect` u TUI-u, odaberite opencode i idite na [opencode.ai/auth](https://opencode.ai/auth).
|
||||
|
||||
@@ -152,10 +151,9 @@ Alternativno, možete odabrati jednog od drugih provajdera. [Saznajte više](/do
|
||||
|
||||
---
|
||||
|
||||
## Inicijaliziraj
|
||||
## Inicijalizacija
|
||||
|
||||
Sada kada ste konfigurisali provajdera, možete se kretati do projekta koji
|
||||
na čemu želite da radite.
|
||||
Sada kada ste konfigurisali provajdera, možete se kretati do projekta na kojem želite raditi.
|
||||
|
||||
```bash
|
||||
cd /path/to/project
|
||||
@@ -173,26 +171,22 @@ Zatim inicijalizirajte OpenCode za projekat pokretanjem sljedeće naredbe.
|
||||
/init
|
||||
```
|
||||
|
||||
Ovo će omogućiti OpenCode da analizira vaš projekat i kreira `AGENTS.md` fajl u njemu
|
||||
korijen projekta.
|
||||
Ovo će omogućiti OpenCode da analizira vaš projekat i kreira `AGENTS.md` fajl u korijenu projekta.
|
||||
:::tip
|
||||
Trebali biste urezati datoteku `AGENTS.md` vašeg projekta u Git.
|
||||
Trebali biste komitovati datoteku `AGENTS.md` vašeg projekta u Git.
|
||||
:::
|
||||
Ovo pomaže OpenCode da razumije strukturu projekta i obrasce kodiranja
|
||||
korišteno.
|
||||
Ovo pomaže OpenCode da razumije strukturu projekta i obrasce kodiranja koji se koriste.
|
||||
|
||||
---
|
||||
|
||||
## Upotreba
|
||||
## Korištenje
|
||||
|
||||
Sada ste spremni da koristite OpenCode za rad na svom projektu. Slobodno pitajte
|
||||
bilo šta!
|
||||
Ako ste novi u korištenju agenta za AI kodiranje, evo nekoliko primjera koji bi mogli
|
||||
pomoć.
|
||||
Sada ste spremni da koristite OpenCode za rad na svom projektu. Slobodno pitajte bilo šta!
|
||||
Ako ste novi u korištenju agenta za AI kodiranje, evo nekoliko primjera koji bi mogli pomoći.
|
||||
|
||||
---
|
||||
|
||||
### Postavljajte pitanja
|
||||
### Postavljanje pitanja
|
||||
|
||||
Možete zamoliti OpenCode da vam objasni kodnu bazu.
|
||||
:::tip
|
||||
@@ -207,13 +201,12 @@ Ovo je korisno ako postoji dio kodne baze na kojem niste radili.
|
||||
|
||||
---
|
||||
|
||||
### Dodajte karakteristike
|
||||
### Dodavanje funkcija
|
||||
|
||||
Možete zamoliti OpenCode da vašem projektu doda nove funkcije. Iako preporučujemo da ga prvo zamolite da napravi plan.
|
||||
|
||||
1. **Kreirajte plan**
|
||||
OpenCode ima _Plan mod_ koji onemogućuje njegovu sposobnost da pravi promjene i
|
||||
umjesto toga predložite _kako_ će implementirati ovu funkciju.
|
||||
OpenCode ima _Plan mod_ koji onemogućuje njegovu sposobnost da pravi promjene i umjesto toga predlaže _kako_ će implementirati ovu funkciju.
|
||||
Prebacite se na njega pomoću tipke **Tab**. Vidjet ćete indikator za ovo u donjem desnom uglu.
|
||||
|
||||
```bash frame="none" title="Switch to Plan mode"
|
||||
@@ -223,35 +216,31 @@ Možete zamoliti OpenCode da vašem projektu doda nove funkcije. Iako preporuču
|
||||
Hajde sada da opišemo šta želimo da uradi.
|
||||
|
||||
```txt frame="none"
|
||||
When a user deletes a note, we'd like to flag it as deleted in the database.
|
||||
Then create a screen that shows all the recently deleted notes.
|
||||
From this screen, the user can undelete a note or permanently delete it.
|
||||
When a user deletes a note, we'd like to flag it as deleted in the database.
|
||||
Then create a screen that shows all the recently deleted notes.
|
||||
From this screen, the user can undelete a note or permanently delete it.
|
||||
```
|
||||
|
||||
Želite da date OpenCode dovoljno detalja da razumete šta želite. Pomaže
|
||||
da razgovarate s njim kao da razgovarate sa mlađim programerom u svom timu.
|
||||
Želite da date OpenCode dovoljno detalja da razumije šta želite. Pomaže da razgovarate s njim kao da razgovarate sa mlađim programerom u svom timu.
|
||||
:::tip
|
||||
Dajte OpenCode dosta konteksta i primjera koji će mu pomoći da razumije šta vi
|
||||
želim.
|
||||
Dajte OpenCode dosta konteksta i primjera koji će mu pomoći da razumije šta vi želite.
|
||||
:::
|
||||
|
||||
2. **Ponovite plan**
|
||||
Kada vam da plan, možete mu dati povratne informacije ili dodati više detalja.
|
||||
|
||||
```txt frame="none"
|
||||
We'd like to design this new screen using a design I've used before.
|
||||
[Image #1] Take a look at this image and use it as a reference.
|
||||
We'd like to design this new screen using a design I've used before.
|
||||
[Image #1] Take a look at this image and use it as a reference.
|
||||
```
|
||||
|
||||
:::tip
|
||||
Prevucite i ispustite slike u terminal da biste ih dodali u prompt.
|
||||
:::
|
||||
OpenCode može skenirati sve slike koje mu date i dodati ih u prompt. Možeš
|
||||
učinite to povlačenjem i ispuštanjem slike u terminal.
|
||||
OpenCode može skenirati sve slike koje mu date i dodati ih u prompt. Možete to učiniti povlačenjem i ispuštanjem slike u terminal.
|
||||
|
||||
3. **Izgradite funkciju**
|
||||
Kada se osjećate ugodno s planom, vratite se na _Build mode_ do
|
||||
ponovnim pritiskom na taster **Tab**.
|
||||
Kada se osjećate ugodno s planom, vratite se na _Build mode_ ponovnim pritiskom na taster **Tab**.
|
||||
|
||||
```bash frame="none"
|
||||
<TAB>
|
||||
@@ -265,10 +254,9 @@ I tražeći od njega da napravi promjene.
|
||||
|
||||
---
|
||||
|
||||
### Napravite promjene
|
||||
### Pravljenje izmjena
|
||||
|
||||
Za jednostavnije promjene, možete zamoliti OpenCode da ga direktno izgradi
|
||||
bez potrebe da prvo pregledate plan.
|
||||
Za jednostavnije promjene, možete zamoliti OpenCode da ga direktno izgradi bez potrebe da prvo pregledate plan.
|
||||
|
||||
```txt frame="none" "@packages/functions/src/settings.ts" "@packages/functions/src/notes.ts"
|
||||
We need to add authentication to the /settings route. Take a look at how this is
|
||||
@@ -276,12 +264,11 @@ handled in the /notes route in @packages/functions/src/notes.ts and implement
|
||||
the same logic in @packages/functions/src/settings.ts
|
||||
```
|
||||
|
||||
Želite da budete sigurni da ste pružili dobru količinu detalja kako bi OpenCode bio ispravan
|
||||
promjene.
|
||||
Želite da budete sigurni da ste pružili dobru količinu detalja kako bi OpenCode napravio ispravne promjene.
|
||||
|
||||
---
|
||||
|
||||
### Poništi promjene
|
||||
### Poništavanje izmjena
|
||||
|
||||
Recimo da tražite od OpenCode da izvrši neke promjene.
|
||||
|
||||
@@ -289,15 +276,13 @@ Recimo da tražite od OpenCode da izvrši neke promjene.
|
||||
Can you refactor the function in @packages/functions/src/api/index.ts?
|
||||
```
|
||||
|
||||
Ali shvatate da to nije ono što ste želeli. Možete **poništiti** promjene
|
||||
koristeći naredbu `/undo`.
|
||||
Ali shvatate da to nije ono što ste željeli. Možete **poništiti** promjene koristeći naredbu `/undo`.
|
||||
|
||||
```bash frame="none"
|
||||
/undo
|
||||
```
|
||||
|
||||
OpenCode će sada poništiti promjene koje ste napravili i prikazati vašu originalnu poruku
|
||||
opet.
|
||||
OpenCode će sada poništiti promjene koje ste napravili i ponovo prikazati vašu originalnu poruku.
|
||||
|
||||
```txt frame="none" "@packages/functions/src/api/index.ts"
|
||||
Can you refactor the function in @packages/functions/src/api/index.ts?
|
||||
@@ -315,10 +300,9 @@ Ili **možete ponoviti** promjene koristeći naredbu `/redo`.
|
||||
|
||||
---
|
||||
|
||||
## Dijeli
|
||||
## Dijeljenje
|
||||
|
||||
Razgovore koje imate sa OpenCode možete [dijeliti sa vašim
|
||||
tim](/docs/share).
|
||||
Razgovore koje imate sa OpenCode možete [dijeliti sa vašim timom](/docs/share).
|
||||
|
||||
```bash frame="none"
|
||||
/share
|
||||
@@ -332,7 +316,7 @@ Evo [primjer razgovora](https://opencode.ai/s/4XP1fce5) sa OpenCode.
|
||||
|
||||
---
|
||||
|
||||
## Prilagodi
|
||||
## Prilagođavanje
|
||||
|
||||
I to je to! Sada ste profesionalac u korištenju OpenCode.
|
||||
Da biste to učinili svojim, preporučujemo [odabir teme](/docs/themes), [prilagođavanje povezivanja tipki](/docs/keybinds), [konfiguriranje formatera koda](/docs/formatters), [kreiranje prilagođenih komandi](/docs/commands), ili igranje sa [OpenCode config](/docs/config).
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
---
|
||||
title: Keybinds
|
||||
description: Prilagodite svoje veze dugmadi.
|
||||
title: Prečice tipki
|
||||
description: Prilagodite svoje veze tipki.
|
||||
---
|
||||
|
||||
OpenCode ima listu veza ključeva koje možete prilagoditi preko OpenCode konfiguracije.
|
||||
OpenCode ima listu veza tipki koje možete prilagoditi preko OpenCode konfiguracije.
|
||||
|
||||
```json title="opencode.json"
|
||||
{
|
||||
@@ -105,15 +105,15 @@ OpenCode ima listu veza ključeva koje možete prilagoditi preko OpenCode konfig
|
||||
|
||||
---
|
||||
|
||||
## Leader key
|
||||
## Leader tipka
|
||||
|
||||
OpenCode koristi `leader` ključ za većinu povezivanja tipki. Ovo izbjegava sukobe u vašem terminalu.
|
||||
Prema zadanim postavkama, `ctrl+x` je vodeći taster i većina radnji zahtijeva da prvo pritisnete vodeći taster, a zatim i prečicu. Na primjer, da biste započeli novu sesiju, prvo pritisnite `ctrl+x`, a zatim pritisnite `n`.
|
||||
Ne morate koristiti vodeći ključ za svoje veze tipki, ali preporučujemo da to učinite.
|
||||
OpenCode koristi `leader` (vodeću) tipku za većinu povezivanja tipki. Ovo izbjegava sukobe u vašem terminalu.
|
||||
Prema zadanim postavkama, `ctrl+x` je vodeća tipka i većina radnji zahtijeva da prvo pritisnete vodeću tipku, a zatim i prečicu. Na primjer, da biste započeli novu sesiju, prvo pritisnite `ctrl+x`, a zatim pritisnite `n`.
|
||||
Ne morate koristiti vodeću tipku za svoje veze tipki, ali preporučujemo da to učinite.
|
||||
|
||||
---
|
||||
|
||||
## Onemogući povezivanje tastera
|
||||
## Onemogućavanje prečica tipki
|
||||
|
||||
Možete onemogućiti spajanje tipki dodavanjem ključa u svoju konfiguraciju s vrijednošću "none".
|
||||
|
||||
@@ -130,21 +130,22 @@ Možete onemogućiti spajanje tipki dodavanjem ključa u svoju konfiguraciju s v
|
||||
|
||||
## Prečice za radnu površinu
|
||||
|
||||
Unos prompta aplikacije OpenCode za desktop podržava uobičajene prečice u stilu Readline/Emacs za uređivanje teksta. Oni su ugrađeni i trenutno se ne mogu konfigurirati putem `opencode.json`.
|
||||
| Prečica | Akcija
|
||||
|-------- | ---------------------------------------- |
|
||||
| `ctrl+a` | Prelazak na početak tekućeg reda |
|
||||
| `ctrl+e` | Prelazak na kraj trenutnog reda |
|
||||
| `ctrl+b` | Pomeri kursor za jedan znak unazad |
|
||||
| `ctrl+f` | Pomicanje kursora naprijed za jedan znak |
|
||||
| `alt+b` | Pomeri kursor za jednu reč unazad |
|
||||
| `alt+f` | Pomeri kursor za jednu reč unapred |
|
||||
| `ctrl+d` | Izbriši znak ispod kursora |
|
||||
| `ctrl+k` | Kill do kraja reda |
|
||||
| `ctrl+u` | Kill do početka reda |
|
||||
| `ctrl+w` | Kill prethodnu riječ |
|
||||
| `alt+d` | Kill sljedeću riječ |
|
||||
| `ctrl+t` | Transponirajte znakove |
|
||||
Unos prompta aplikacije OpenCode za desktop podržava uobičajene prečice u stilu Readline/Emacs za uređivanje teksta. One su ugrađene i trenutno se ne mogu konfigurirati putem `opencode.json`.
|
||||
|
||||
| Prečica | Akcija |
|
||||
| -------- | ------------------------------------------------ |
|
||||
| `ctrl+a` | Prelazak na početak trenutnog reda |
|
||||
| `ctrl+e` | Prelazak na kraj trenutnog reda |
|
||||
| `ctrl+b` | Pomjeri kursor za jedan znak unazad |
|
||||
| `ctrl+f` | Pomicanje kursora naprijed za jedan znak |
|
||||
| `alt+b` | Pomjeri kursor za jednu riječ unazad |
|
||||
| `alt+f` | Pomjeri kursor za jednu riječ unaprijed |
|
||||
| `ctrl+d` | Izbriši znak ispod kursora |
|
||||
| `ctrl+k` | Kill do kraja reda |
|
||||
| `ctrl+u` | Kill do početka reda |
|
||||
| `ctrl+w` | Kill prethodnu riječ |
|
||||
| `alt+d` | Kill sljedeću riječ |
|
||||
| `ctrl+t` | Transponirajte znakove |
|
||||
| `ctrl+g` | Otkaži iskakanje / poništi odgovor na pokretanje |
|
||||
|
||||
---
|
||||
@@ -153,7 +154,7 @@ Unos prompta aplikacije OpenCode za desktop podržava uobičajene prečice u sti
|
||||
|
||||
Neki terminali ne šalju modifikatorske tipke sa Enter prema zadanim postavkama. Možda ćete trebati konfigurirati svoj terminal da pošalje `Shift+Enter` kao escape sekvencu.
|
||||
|
||||
### Windows terminal
|
||||
### Windows Terminal
|
||||
|
||||
Otvorite svoj `settings.json` na:
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ description: OpenCode se integriše sa vašim LSP serverima.
|
||||
|
||||
OpenCode se integriše sa vašim Language Server Protocol (LSP) serverima kako bi pomogao LLM-u u interakciji s kodnom bazom. Koristi dijagnostiku za slanje povratnih informacija modelu.
|
||||
|
||||
## Ugrađeno
|
||||
## Ugrađeni
|
||||
|
||||
OpenCode dolazi sa nekoliko ugrađenih LSP servera za popularne jezike:
|
||||
| LSP server | Ekstenzije | Zahtjevi
|
||||
@@ -58,7 +58,7 @@ Kada opencode otvori fajl, on:
|
||||
|
||||
---
|
||||
|
||||
## Konfiguriši
|
||||
## Konfiguracija
|
||||
|
||||
Možete prilagoditi LSP servere kroz `lsp` odjeljak u vašoj opencode konfiguraciji.
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ Jednom dodani, MCP alati su automatski dostupni LLM-u zajedno sa ugrađenim alat
|
||||
|
||||
---
|
||||
|
||||
#### Upozorenja
|
||||
#### Upozorenje
|
||||
|
||||
Kada koristite MCP server, on dodaje u kontekst. Ovo se može brzo zbrojiti ako imate puno alata. Stoga preporučujemo da pazite koje MCP servere koristite.
|
||||
:::tip
|
||||
@@ -17,7 +17,7 @@ MCP serveri dodaju vaš kontekst, tako da želite da budete pažljivi s tim koje
|
||||
|
||||
Određeni MCP serveri, poput GitHub MCP servera, mogu dodati mnogo tokena i lako premašiti limit konteksta.
|
||||
|
||||
## Omogući
|
||||
## Omogućavanje
|
||||
|
||||
Možete definirati MCP servere u vašoj [OpenCode Config](https://opencode.ai/docs/config/) pod `mcp`. Dodajte svaki MCP sa jedinstvenim imenom. Možete se pozvati na taj MCP po imenu kada tražite LLM.
|
||||
|
||||
@@ -38,7 +38,7 @@ Možete definirati MCP servere u vašoj [OpenCode Config](https://opencode.ai/do
|
||||
|
||||
Također možete onemogućiti server postavljanjem `enabled` na `false`. To je korisno kada ga želite privremeno isključiti bez uklanjanja iz konfiguracije.
|
||||
|
||||
### Poništavanje daljinskih zadanih postavki
|
||||
### Poništavanje udaljenih zadanih postavki
|
||||
|
||||
Organizacije mogu obezbijediti zadane MCP servere preko svoje krajnje tačke `.well-known/opencode`. Ovi serveri mogu biti onemogućeni prema zadanim postavkama, omogućavajući korisnicima da se odluče za one koji su im potrebni.
|
||||
Da omogućite određeni server iz udaljene konfiguracije vaše organizacije, dodajte ga u svoju lokalnu konfiguraciju sa `enabled: true`:
|
||||
@@ -115,7 +115,7 @@ Ovdje su sve opcije za konfiguriranje lokalnog MCP servera.
|
||||
|
||||
---
|
||||
|
||||
## Daljinski
|
||||
## Udaljeno
|
||||
|
||||
Dodajte udaljene MCP servere postavljanjem `type` na `"remote"`.
|
||||
|
||||
@@ -178,7 +178,7 @@ Za većinu MCP servera sa omogućenim OAuthom nije potrebna posebna konfiguracij
|
||||
|
||||
Ako server zahtijeva autentifikaciju, OpenCode će vas tražiti prijavu pri prvom korištenju. Ako se to ne desi, možete [ručno pokrenuti tok](#authenticating) naredbom `opencode mcp auth <server-name>`.
|
||||
|
||||
### Prethodno registrovano
|
||||
### Prethodno registrirano
|
||||
|
||||
Ako imate klijentske vjerodajnice od dobavljača MCP servera, možete ih konfigurirati:
|
||||
|
||||
@@ -269,11 +269,11 @@ opencode mcp debug my-oauth-server
|
||||
|
||||
Komanda `mcp debug` prikazuje trenutni auth status, testira HTTP povezanost i pokušava OAuth discovery flow.
|
||||
|
||||
## Manage
|
||||
## Upravljanje
|
||||
|
||||
Vaši MCP serveri su dostupni kao alati u OpenCode, zajedno s ugrađenim alatima. Možete njima upravljati kroz OpenCode konfiguraciju kao i bilo kojim drugim alatom.
|
||||
|
||||
### Global
|
||||
### Globalno
|
||||
|
||||
To znači da ih možete omogućiti ili onemogućiti globalno.
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ Saznajte više o [providers](/docs/providers).
|
||||
|
||||
---
|
||||
|
||||
## Odaberite model
|
||||
## Odabir modela
|
||||
|
||||
Nakon što konfigurirate svog provajdera, možete odabrati model koji želite upisivanjem:
|
||||
|
||||
@@ -41,7 +41,7 @@ Evo nekoliko modela koji dobro rade sa OpenCode, bez posebnog redosleda. (Ovo ni
|
||||
|
||||
---
|
||||
|
||||
## Postavite zadano
|
||||
## Postavljanje zadanog
|
||||
|
||||
Da postavite jedan od ovih kao zadani model, možete postaviti ključ `model` u svom
|
||||
OpenCode config.
|
||||
@@ -58,7 +58,7 @@ Ako ste konfigurirali [prilagođenog provajdera](/docs/providers#custom), `provi
|
||||
|
||||
---
|
||||
|
||||
## Konfigurišite modele
|
||||
## Konfiguracija modela
|
||||
|
||||
Možete globalno konfigurirati opcije modela kroz config.
|
||||
|
||||
@@ -180,7 +180,7 @@ Možete nadjačati postojeće varijante ili dodati svoje:
|
||||
}
|
||||
```
|
||||
|
||||
### Cycle variants
|
||||
### Kruženje kroz varijante
|
||||
|
||||
Koristite keybind `variant_cycle` za brzo prebacivanje između varijanti. [Saznajte više](/docs/keybinds).
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user