mirror of
https://github.com/anomalyco/opencode.git
synced 2026-02-19 23:34:32 +00:00
Compare commits
4 Commits
config-spl
...
chore/dupl
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1c7bab77cb | ||
|
|
2a75aadf02 | ||
|
|
7560913b7d | ||
|
|
ba02738d34 |
172
.github/workflows/duplicate-issues.yml
vendored
172
.github/workflows/duplicate-issues.yml
vendored
@@ -17,8 +17,6 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- uses: ./.github/actions/setup-bun
|
||||
|
||||
- name: Install opencode
|
||||
run: curl -fsSL https://opencode.ai/install | bash
|
||||
|
||||
@@ -28,94 +26,44 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
OPENCODE_PERMISSION: |
|
||||
{
|
||||
"bash": {
|
||||
"*": "deny",
|
||||
"gh issue*": "allow"
|
||||
},
|
||||
"webfetch": "deny"
|
||||
"bash": "deny",
|
||||
"webfetch": "deny",
|
||||
"edit": "deny",
|
||||
"write": "deny"
|
||||
}
|
||||
ISSUE_NUMBER: ${{ github.event.issue.number }}
|
||||
REPO: ${{ github.repository }}
|
||||
run: |
|
||||
opencode run -m opencode/claude-sonnet-4-6 "A new issue has been created:
|
||||
ISSUE_TITLE=$(gh issue view "$ISSUE_NUMBER" --repo "$REPO" --json title --jq .title)
|
||||
ISSUE_BODY=$(gh issue view "$ISSUE_NUMBER" --repo "$REPO" --json body --jq .body)
|
||||
|
||||
Issue number: ${{ github.event.issue.number }}
|
||||
PROMPT=$(cat <<EOF
|
||||
Check this new issue for compliance and duplicates:
|
||||
|
||||
Lookup this issue with gh issue view ${{ github.event.issue.number }}.
|
||||
CURRENT_ISSUE_NUMBER: $ISSUE_NUMBER
|
||||
|
||||
You have TWO tasks. Perform both, then post a SINGLE comment (if needed).
|
||||
Title: $ISSUE_TITLE
|
||||
|
||||
---
|
||||
Description:
|
||||
$ISSUE_BODY
|
||||
EOF
|
||||
)
|
||||
|
||||
TASK 1: CONTRIBUTING GUIDELINES COMPLIANCE CHECK
|
||||
COMMENT=$(opencode run --agent duplicate-issue "$PROMPT")
|
||||
|
||||
Check whether the issue follows our contributing guidelines and issue templates.
|
||||
if [ "$COMMENT" = "No action required" ]; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
This project has three issue templates that every issue MUST use one of:
|
||||
BODY="_The following comment was made by an LLM, it may be inaccurate:_
|
||||
|
||||
1. Bug Report - requires a Description field with real content
|
||||
2. Feature Request - requires a verification checkbox and description, title should start with [FEATURE]:
|
||||
3. Question - requires the Question field with real content
|
||||
$COMMENT"
|
||||
|
||||
Additionally check:
|
||||
- No AI-generated walls of text (long, AI-generated descriptions are not acceptable)
|
||||
- The issue has real content, not just template placeholder text left unchanged
|
||||
- Bug reports should include some context about how to reproduce
|
||||
- Feature requests should explain the problem or need
|
||||
- We want to push for having the user provide system description & information
|
||||
gh issue comment "$ISSUE_NUMBER" --repo "$REPO" --body "$BODY"
|
||||
|
||||
Do NOT be nitpicky about optional fields. Only flag real problems like: no template used, required fields empty or placeholder text only, obviously AI-generated walls of text, or completely empty/nonsensical content.
|
||||
|
||||
---
|
||||
|
||||
TASK 2: DUPLICATE CHECK
|
||||
|
||||
Search through existing issues (excluding #${{ github.event.issue.number }}) to find potential duplicates.
|
||||
Consider:
|
||||
1. Similar titles or descriptions
|
||||
2. Same error messages or symptoms
|
||||
3. Related functionality or components
|
||||
4. Similar feature requests
|
||||
|
||||
Additionally, if the issue mentions keybinds, keyboard shortcuts, or key bindings, note the pinned keybinds issue #4997.
|
||||
|
||||
---
|
||||
|
||||
POSTING YOUR COMMENT:
|
||||
|
||||
Based on your findings, post a SINGLE comment on issue #${{ github.event.issue.number }}. Build the comment as follows:
|
||||
|
||||
If the issue is NOT compliant, start the comment with:
|
||||
<!-- issue-compliance -->
|
||||
Then explain what needs to be fixed and that they have 2 hours to edit the issue before it is automatically closed. Also add the label needs:compliance to the issue using: gh issue edit ${{ github.event.issue.number }} --add-label needs:compliance
|
||||
|
||||
If duplicates were found, include a section about potential duplicates with links.
|
||||
|
||||
If the issue mentions keybinds/keyboard shortcuts, include a note about #4997.
|
||||
|
||||
If the issue IS compliant AND no duplicates were found AND no keybind reference, do NOT comment at all.
|
||||
|
||||
Use this format for the comment:
|
||||
|
||||
[If not compliant:]
|
||||
<!-- issue-compliance -->
|
||||
This issue doesn't fully meet our [contributing guidelines](../blob/dev/CONTRIBUTING.md).
|
||||
|
||||
**What needs to be fixed:**
|
||||
- [specific reasons]
|
||||
|
||||
Please edit this issue to address the above within **2 hours**, or it will be automatically closed.
|
||||
|
||||
[If duplicates found, add:]
|
||||
---
|
||||
This issue might be a duplicate of existing issues. Please check:
|
||||
- #[issue_number]: [brief description of similarity]
|
||||
|
||||
[If keybind-related, add:]
|
||||
For keybind-related issues, please also check our pinned keybinds documentation: #4997
|
||||
|
||||
[End with if not compliant:]
|
||||
If you believe this was flagged incorrectly, please let a maintainer know.
|
||||
|
||||
Remember: post at most ONE comment combining all findings. If everything is fine, post nothing."
|
||||
if [[ "$COMMENT" == *"<!-- issue-compliance -->"* ]]; then
|
||||
gh issue edit "$ISSUE_NUMBER" --repo "$REPO" --add-label needs:compliance
|
||||
fi
|
||||
|
||||
recheck-compliance:
|
||||
if: github.event.action == 'edited' && contains(github.event.issue.labels.*.name, 'needs:compliance')
|
||||
@@ -129,8 +77,6 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- uses: ./.github/actions/setup-bun
|
||||
|
||||
- name: Install opencode
|
||||
run: curl -fsSL https://opencode.ai/install | bash
|
||||
|
||||
@@ -140,38 +86,54 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
OPENCODE_PERMISSION: |
|
||||
{
|
||||
"bash": {
|
||||
"*": "deny",
|
||||
"gh issue*": "allow"
|
||||
},
|
||||
"webfetch": "deny"
|
||||
"bash": "deny",
|
||||
"webfetch": "deny",
|
||||
"edit": "deny",
|
||||
"write": "deny"
|
||||
}
|
||||
ISSUE_NUMBER: ${{ github.event.issue.number }}
|
||||
REPO: ${{ github.repository }}
|
||||
run: |
|
||||
opencode run -m opencode/claude-sonnet-4-6 "Issue #${{ github.event.issue.number }} was previously flagged as non-compliant and has been edited.
|
||||
ISSUE_TITLE=$(gh issue view "$ISSUE_NUMBER" --repo "$REPO" --json title --jq .title)
|
||||
ISSUE_BODY=$(gh issue view "$ISSUE_NUMBER" --repo "$REPO" --json body --jq .body)
|
||||
|
||||
Lookup this issue with gh issue view ${{ github.event.issue.number }}.
|
||||
PROMPT=$(cat <<EOF
|
||||
Recheck this edited issue for compliance:
|
||||
|
||||
Re-check whether the issue now follows our contributing guidelines and issue templates.
|
||||
MODE: recheck-compliance
|
||||
CURRENT_ISSUE_NUMBER: $ISSUE_NUMBER
|
||||
|
||||
This project has three issue templates that every issue MUST use one of:
|
||||
Title: $ISSUE_TITLE
|
||||
|
||||
1. Bug Report - requires a Description field with real content
|
||||
2. Feature Request - requires a verification checkbox and description, title should start with [FEATURE]:
|
||||
3. Question - requires the Question field with real content
|
||||
Description:
|
||||
$ISSUE_BODY
|
||||
EOF
|
||||
)
|
||||
|
||||
Additionally check:
|
||||
- No AI-generated walls of text (long, AI-generated descriptions are not acceptable)
|
||||
- The issue has real content, not just template placeholder text left unchanged
|
||||
- Bug reports should include some context about how to reproduce
|
||||
- Feature requests should explain the problem or need
|
||||
- We want to push for having the user provide system description & information
|
||||
COMMENT=$(opencode run --agent duplicate-issue "$PROMPT")
|
||||
|
||||
Do NOT be nitpicky about optional fields. Only flag real problems like: no template used, required fields empty or placeholder text only, obviously AI-generated walls of text, or completely empty/nonsensical content.
|
||||
if [ "$COMMENT" = "No action required" ]; then
|
||||
gh issue edit "$ISSUE_NUMBER" --repo "$REPO" --remove-label needs:compliance || true
|
||||
|
||||
If the issue is NOW compliant:
|
||||
1. Remove the needs:compliance label: gh issue edit ${{ github.event.issue.number }} --remove-label needs:compliance
|
||||
2. Find and delete the previous compliance comment (the one containing <!-- issue-compliance -->) using: gh api repos/${{ github.repository }}/issues/${{ github.event.issue.number }}/comments --jq '.[] | select(.body | contains(\"<!-- issue-compliance -->\")) | .id' then delete it with: gh api -X DELETE repos/${{ github.repository }}/issues/${{ github.event.issue.number }}/comments/{id}
|
||||
3. Post a short comment thanking them for updating the issue.
|
||||
IDS=$(gh api "repos/$REPO/issues/$ISSUE_NUMBER/comments" --jq '.[] | select(.body | contains("<!-- issue-compliance -->")) | .id')
|
||||
for id in $IDS; do
|
||||
gh api -X DELETE "repos/$REPO/issues/comments/$id"
|
||||
done
|
||||
|
||||
If the issue is STILL not compliant:
|
||||
Post a comment explaining what still needs to be fixed. Keep the needs:compliance label."
|
||||
gh issue comment "$ISSUE_NUMBER" --repo "$REPO" --body "Thanks for updating your issue. It now meets our contributing guidelines. :+1:"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
gh issue edit "$ISSUE_NUMBER" --repo "$REPO" --add-label needs:compliance
|
||||
|
||||
BODY="_The following comment was made by an LLM, it may be inaccurate:_
|
||||
|
||||
$COMMENT"
|
||||
|
||||
EXISTING=$(gh api "repos/$REPO/issues/$ISSUE_NUMBER/comments" --jq '[.[] | select(.body | contains("<!-- issue-compliance -->")) | .id] | last // empty')
|
||||
if [ -n "$EXISTING" ]; then
|
||||
gh api -X PATCH "repos/$REPO/issues/comments/$EXISTING" -f body="$BODY"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
gh issue comment "$ISSUE_NUMBER" --repo "$REPO" --body "$BODY"
|
||||
|
||||
12
.github/workflows/pr-standards.yml
vendored
12
.github/workflows/pr-standards.yml
vendored
@@ -6,6 +6,18 @@ on:
|
||||
|
||||
jobs:
|
||||
check-standards:
|
||||
if: |
|
||||
github.event.pull_request.user.login != 'actions-user' &&
|
||||
github.event.pull_request.user.login != 'opencode' &&
|
||||
github.event.pull_request.user.login != 'rekram1-node' &&
|
||||
github.event.pull_request.user.login != 'thdxr' &&
|
||||
github.event.pull_request.user.login != 'kommander' &&
|
||||
github.event.pull_request.user.login != 'jayair' &&
|
||||
github.event.pull_request.user.login != 'fwang' &&
|
||||
github.event.pull_request.user.login != 'adamdotdevin' &&
|
||||
github.event.pull_request.user.login != 'iamdavidhill' &&
|
||||
github.event.pull_request.user.login != 'R44VC0RP' &&
|
||||
github.event.pull_request.user.login != 'opencode-agent[bot]'
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
98
.opencode/agent/duplicate-issue.md
Normal file
98
.opencode/agent/duplicate-issue.md
Normal file
@@ -0,0 +1,98 @@
|
||||
---
|
||||
mode: primary
|
||||
hidden: true
|
||||
model: opencode/claude-haiku-4-5
|
||||
color: "#E67E22"
|
||||
tools:
|
||||
"*": false
|
||||
"github-issue-search": true
|
||||
---
|
||||
|
||||
You are a duplicate issue detection agent. When an issue is opened, your job is to search for potentially duplicate or related open issues.
|
||||
|
||||
You have two jobs:
|
||||
|
||||
1. Check if the issue follows our issue templates/contributing requirements.
|
||||
2. Check for potential duplicate issues.
|
||||
|
||||
Use the github-issue-search tool to find potentially related issues.
|
||||
|
||||
IMPORTANT: The input will contain a line `CURRENT_ISSUE_NUMBER: NNNN`. Never mark that issue as a duplicate of itself.
|
||||
|
||||
The input may also contain `MODE: recheck-compliance`.
|
||||
|
||||
- When MODE is `recheck-compliance`, ONLY run compliance checks. Do not run duplicate checks. Do not include duplicate or keybind sections.
|
||||
- When MODE is missing, do the full opened-issue behavior (compliance + duplicates + keybind note).
|
||||
|
||||
## Compliance checks
|
||||
|
||||
This project has three issue templates:
|
||||
|
||||
1. Bug Report - needs a Description field with real content.
|
||||
2. Feature Request - title should start with `[FEATURE]:` and include verification checkbox + meaningful description.
|
||||
3. Question - needs a Question field with real content.
|
||||
|
||||
Also check:
|
||||
|
||||
- no AI-generated walls of text
|
||||
- required sections are not placeholder-only / unchanged template text
|
||||
- bug reports include some repro context
|
||||
- feature requests explain the problem/need
|
||||
- encourage system information where relevant
|
||||
|
||||
Do not be nitpicky about optional fields. Only flag real issues (missing template/required content, placeholder-only content, obviously AI-generated wall of text, empty/nonsensical issue).
|
||||
|
||||
## Duplicate checks
|
||||
|
||||
Search for duplicates by trying multiple keyword combinations from the issue title/body. Prioritize:
|
||||
|
||||
- similar title/description
|
||||
- same error/symptoms
|
||||
- same component/feature area
|
||||
|
||||
If the issue mentions keybinds, keyboard shortcuts, or key bindings, include a note to check pinned issue #4997.
|
||||
|
||||
## Output rules
|
||||
|
||||
If MODE is `recheck-compliance` and the issue is compliant, output exactly:
|
||||
|
||||
No action required
|
||||
|
||||
If MODE is missing and the issue is compliant AND no duplicates are found AND no keybind note is needed, output exactly:
|
||||
|
||||
No action required
|
||||
|
||||
Otherwise output exactly one markdown comment body with this structure:
|
||||
|
||||
- In `recheck-compliance` mode: include only the non-compliant section and ending note.
|
||||
- In default mode: include sections as needed (non-compliant, duplicates, keybind).
|
||||
|
||||
- If non-compliant, start with:
|
||||
|
||||
<!-- issue-compliance -->
|
||||
|
||||
This issue doesn't fully meet our [contributing guidelines](../blob/dev/CONTRIBUTING.md).
|
||||
|
||||
**What needs to be fixed:**
|
||||
|
||||
- [specific reason]
|
||||
|
||||
Please edit this issue to address the above within **2 hours**, or it will be automatically closed.
|
||||
|
||||
- If duplicates were found, add:
|
||||
|
||||
---
|
||||
|
||||
This issue might be a duplicate of existing issues. Please check:
|
||||
|
||||
- #1234: [brief reason]
|
||||
|
||||
- If keybind-related, add:
|
||||
|
||||
For keybind-related issues, please also check our pinned keybinds documentation: #4997
|
||||
|
||||
- If non-compliant, end with:
|
||||
|
||||
If you believe this was flagged incorrectly, please let a maintainer know.
|
||||
|
||||
Keep output concise. Do not wrap output in code fences.
|
||||
42
.opencode/skill/bun-file-io/SKILL.md
Normal file
42
.opencode/skill/bun-file-io/SKILL.md
Normal file
@@ -0,0 +1,42 @@
|
||||
---
|
||||
name: bun-file-io
|
||||
description: Use this when you are working on file operations like reading, writing, scanning, or deleting files. It summarizes the preferred file APIs and patterns used in this repo. It also notes when to use filesystem helpers for directories.
|
||||
---
|
||||
|
||||
## Use this when
|
||||
|
||||
- Editing file I/O or scans in `packages/opencode`
|
||||
- Handling directory operations or external tools
|
||||
|
||||
## Bun file APIs (from Bun docs)
|
||||
|
||||
- `Bun.file(path)` is lazy; call `text`, `json`, `stream`, `arrayBuffer`, `bytes`, `exists` to read.
|
||||
- Metadata: `file.size`, `file.type`, `file.name`.
|
||||
- `Bun.write(dest, input)` writes strings, buffers, Blobs, Responses, or files.
|
||||
- `Bun.file(...).delete()` deletes a file.
|
||||
- `file.writer()` returns a FileSink for incremental writes.
|
||||
- `Bun.Glob` + `Array.fromAsync(glob.scan({ cwd, absolute, onlyFiles, dot }))` for scans.
|
||||
- Use `Bun.which` to find a binary, then `Bun.spawn` to run it.
|
||||
- `Bun.readableStreamToText/Bytes/JSON` for stream output.
|
||||
|
||||
## When to use node:fs
|
||||
|
||||
- Use `node:fs/promises` for directories (`mkdir`, `readdir`, recursive operations).
|
||||
|
||||
## Repo patterns
|
||||
|
||||
- Prefer Bun APIs over Node `fs` for file access.
|
||||
- Check `Bun.file(...).exists()` before reading.
|
||||
- For binary/large files use `arrayBuffer()` and MIME checks via `file.type`.
|
||||
- Use `Bun.Glob` + `Array.fromAsync` for scans.
|
||||
- Decode tool stderr with `Bun.readableStreamToText`.
|
||||
- For large writes, use `Bun.write(Bun.file(path), text)`.
|
||||
|
||||
NOTE: Bun.file(...).exists() will return `false` if the value is a directory.
|
||||
Use Filesystem.exists(...) instead if path can be file or directory
|
||||
|
||||
## Quick checklist
|
||||
|
||||
- Use Bun APIs first.
|
||||
- Use `path.join`/`path.resolve` for paths.
|
||||
- Prefer promise `.catch(...)` over `try/catch` when possible.
|
||||
57
.opencode/tool/github-issue-search.ts
Normal file
57
.opencode/tool/github-issue-search.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
/// <reference path="../env.d.ts" />
|
||||
import { tool } from "@opencode-ai/plugin"
|
||||
import DESCRIPTION from "./github-issue-search.txt"
|
||||
|
||||
async function githubFetch(endpoint: string, options: RequestInit = {}) {
|
||||
const response = await fetch(`https://api.github.com${endpoint}`, {
|
||||
...options,
|
||||
headers: {
|
||||
Authorization: `Bearer ${process.env.GITHUB_TOKEN}`,
|
||||
Accept: "application/vnd.github+json",
|
||||
"Content-Type": "application/json",
|
||||
...options.headers,
|
||||
},
|
||||
})
|
||||
if (!response.ok) {
|
||||
throw new Error(`GitHub API error: ${response.status} ${response.statusText}`)
|
||||
}
|
||||
return response.json()
|
||||
}
|
||||
|
||||
interface Issue {
|
||||
title: string
|
||||
html_url: string
|
||||
}
|
||||
|
||||
export default tool({
|
||||
description: DESCRIPTION,
|
||||
args: {
|
||||
query: tool.schema.string().describe("Search query for issue titles and descriptions"),
|
||||
limit: tool.schema.number().describe("Maximum number of results to return").default(10),
|
||||
offset: tool.schema.number().describe("Number of results to skip for pagination").default(0),
|
||||
},
|
||||
async execute(args) {
|
||||
const owner = "anomalyco"
|
||||
const repo = "opencode"
|
||||
|
||||
const page = Math.floor(args.offset / args.limit) + 1
|
||||
const searchQuery = encodeURIComponent(`${args.query} repo:${owner}/${repo} type:issue state:open`)
|
||||
const result = await githubFetch(
|
||||
`/search/issues?q=${searchQuery}&per_page=${args.limit}&page=${page}&sort=updated&order=desc`,
|
||||
)
|
||||
|
||||
if (result.total_count === 0) {
|
||||
return `No issues found matching "${args.query}"`
|
||||
}
|
||||
|
||||
const issues = result.items as Issue[]
|
||||
|
||||
if (issues.length === 0) {
|
||||
return `No other issues found matching "${args.query}"`
|
||||
}
|
||||
|
||||
const formatted = issues.map((issue) => `${issue.title}\n${issue.html_url}`).join("\n\n")
|
||||
|
||||
return `Found ${result.total_count} issues (showing ${issues.length}):\n\n${formatted}`
|
||||
},
|
||||
})
|
||||
10
.opencode/tool/github-issue-search.txt
Normal file
10
.opencode/tool/github-issue-search.txt
Normal file
@@ -0,0 +1,10 @@
|
||||
Use this tool to search GitHub issues by title and description.
|
||||
|
||||
This tool searches issues in the sst/opencode repository and returns LLM-friendly results including:
|
||||
- Issue number and title
|
||||
- Author
|
||||
- State (open/closed/merged)
|
||||
- Labels
|
||||
- Description snippet
|
||||
|
||||
Use the query parameter to search for keywords that might appear in issue titles or descriptions.
|
||||
254
bun.lock
254
bun.lock
@@ -288,7 +288,6 @@
|
||||
"@ai-sdk/togetherai": "1.0.34",
|
||||
"@ai-sdk/vercel": "1.0.33",
|
||||
"@ai-sdk/xai": "2.0.51",
|
||||
"@aws-sdk/credential-providers": "3.993.0",
|
||||
"@clack/prompts": "1.0.0-alpha.1",
|
||||
"@gitlab/gitlab-ai-provider": "3.6.0",
|
||||
"@gitlab/opencode-gitlab-auth": "1.3.3",
|
||||
@@ -321,7 +320,6 @@
|
||||
"diff": "catalog:",
|
||||
"drizzle-orm": "1.0.0-beta.12-a5629fb",
|
||||
"fuzzysort": "3.1.0",
|
||||
"google-auth-library": "10.5.0",
|
||||
"gray-matter": "4.0.3",
|
||||
"hono": "catalog:",
|
||||
"hono-openapi": "catalog:",
|
||||
@@ -672,35 +670,27 @@
|
||||
|
||||
"@aws-crypto/util": ["@aws-crypto/util@5.2.0", "", { "dependencies": { "@aws-sdk/types": "^3.222.0", "@smithy/util-utf8": "^2.0.0", "tslib": "^2.6.2" } }, "sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ=="],
|
||||
|
||||
"@aws-sdk/client-cognito-identity": ["@aws-sdk/client-cognito-identity@3.993.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.973.11", "@aws-sdk/credential-provider-node": "^3.972.10", "@aws-sdk/middleware-host-header": "^3.972.3", "@aws-sdk/middleware-logger": "^3.972.3", "@aws-sdk/middleware-recursion-detection": "^3.972.3", "@aws-sdk/middleware-user-agent": "^3.972.11", "@aws-sdk/region-config-resolver": "^3.972.3", "@aws-sdk/types": "^3.973.1", "@aws-sdk/util-endpoints": "3.993.0", "@aws-sdk/util-user-agent-browser": "^3.972.3", "@aws-sdk/util-user-agent-node": "^3.972.9", "@smithy/config-resolver": "^4.4.6", "@smithy/core": "^3.23.2", "@smithy/fetch-http-handler": "^5.3.9", "@smithy/hash-node": "^4.2.8", "@smithy/invalid-dependency": "^4.2.8", "@smithy/middleware-content-length": "^4.2.8", "@smithy/middleware-endpoint": "^4.4.16", "@smithy/middleware-retry": "^4.4.33", "@smithy/middleware-serde": "^4.2.9", "@smithy/middleware-stack": "^4.2.8", "@smithy/node-config-provider": "^4.3.8", "@smithy/node-http-handler": "^4.4.10", "@smithy/protocol-http": "^5.3.8", "@smithy/smithy-client": "^4.11.5", "@smithy/types": "^4.12.0", "@smithy/url-parser": "^4.2.8", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.32", "@smithy/util-defaults-mode-node": "^4.2.35", "@smithy/util-endpoints": "^3.2.8", "@smithy/util-middleware": "^4.2.8", "@smithy/util-retry": "^4.2.8", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-7Ne3Yk/bgQPVebAkv7W+RfhiwTRSbfER9BtbhOa2w/+dIr902LrJf6vrZlxiqaJbGj2ALx8M+ZK1YIHVxSwu9A=="],
|
||||
|
||||
"@aws-sdk/client-s3": ["@aws-sdk/client-s3@3.933.0", "", { "dependencies": { "@aws-crypto/sha1-browser": "5.2.0", "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.932.0", "@aws-sdk/credential-provider-node": "3.933.0", "@aws-sdk/middleware-bucket-endpoint": "3.930.0", "@aws-sdk/middleware-expect-continue": "3.930.0", "@aws-sdk/middleware-flexible-checksums": "3.932.0", "@aws-sdk/middleware-host-header": "3.930.0", "@aws-sdk/middleware-location-constraint": "3.930.0", "@aws-sdk/middleware-logger": "3.930.0", "@aws-sdk/middleware-recursion-detection": "3.933.0", "@aws-sdk/middleware-sdk-s3": "3.932.0", "@aws-sdk/middleware-ssec": "3.930.0", "@aws-sdk/middleware-user-agent": "3.932.0", "@aws-sdk/region-config-resolver": "3.930.0", "@aws-sdk/signature-v4-multi-region": "3.932.0", "@aws-sdk/types": "3.930.0", "@aws-sdk/util-endpoints": "3.930.0", "@aws-sdk/util-user-agent-browser": "3.930.0", "@aws-sdk/util-user-agent-node": "3.932.0", "@smithy/config-resolver": "^4.4.3", "@smithy/core": "^3.18.2", "@smithy/eventstream-serde-browser": "^4.2.5", "@smithy/eventstream-serde-config-resolver": "^4.3.5", "@smithy/eventstream-serde-node": "^4.2.5", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/hash-blob-browser": "^4.2.6", "@smithy/hash-node": "^4.2.5", "@smithy/hash-stream-node": "^4.2.5", "@smithy/invalid-dependency": "^4.2.5", "@smithy/md5-js": "^4.2.5", "@smithy/middleware-content-length": "^4.2.5", "@smithy/middleware-endpoint": "^4.3.9", "@smithy/middleware-retry": "^4.4.9", "@smithy/middleware-serde": "^4.2.5", "@smithy/middleware-stack": "^4.2.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/node-http-handler": "^4.4.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.5", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.8", "@smithy/util-defaults-mode-node": "^4.2.11", "@smithy/util-endpoints": "^3.2.5", "@smithy/util-middleware": "^4.2.5", "@smithy/util-retry": "^4.2.5", "@smithy/util-stream": "^4.5.6", "@smithy/util-utf8": "^4.2.0", "@smithy/util-waiter": "^4.2.5", "tslib": "^2.6.2" } }, "sha512-KxwZvdxdCeWK6o8mpnb+kk7Kgb8V+8AjTwSXUWH1UAD85B0tjdo1cSfE5zoR5fWGol4Ml5RLez12a6LPhsoTqA=="],
|
||||
|
||||
"@aws-sdk/client-sso": ["@aws-sdk/client-sso@3.993.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.973.11", "@aws-sdk/middleware-host-header": "^3.972.3", "@aws-sdk/middleware-logger": "^3.972.3", "@aws-sdk/middleware-recursion-detection": "^3.972.3", "@aws-sdk/middleware-user-agent": "^3.972.11", "@aws-sdk/region-config-resolver": "^3.972.3", "@aws-sdk/types": "^3.973.1", "@aws-sdk/util-endpoints": "3.993.0", "@aws-sdk/util-user-agent-browser": "^3.972.3", "@aws-sdk/util-user-agent-node": "^3.972.9", "@smithy/config-resolver": "^4.4.6", "@smithy/core": "^3.23.2", "@smithy/fetch-http-handler": "^5.3.9", "@smithy/hash-node": "^4.2.8", "@smithy/invalid-dependency": "^4.2.8", "@smithy/middleware-content-length": "^4.2.8", "@smithy/middleware-endpoint": "^4.4.16", "@smithy/middleware-retry": "^4.4.33", "@smithy/middleware-serde": "^4.2.9", "@smithy/middleware-stack": "^4.2.8", "@smithy/node-config-provider": "^4.3.8", "@smithy/node-http-handler": "^4.4.10", "@smithy/protocol-http": "^5.3.8", "@smithy/smithy-client": "^4.11.5", "@smithy/types": "^4.12.0", "@smithy/url-parser": "^4.2.8", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.32", "@smithy/util-defaults-mode-node": "^4.2.35", "@smithy/util-endpoints": "^3.2.8", "@smithy/util-middleware": "^4.2.8", "@smithy/util-retry": "^4.2.8", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-VLUN+wIeNX24fg12SCbzTUBnBENlL014yMKZvRhPkcn4wHR6LKgNrjsG3fZ03Xs0XoKaGtNFi1VVrq666sGBoQ=="],
|
||||
"@aws-sdk/client-sso": ["@aws-sdk/client-sso@3.933.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.932.0", "@aws-sdk/middleware-host-header": "3.930.0", "@aws-sdk/middleware-logger": "3.930.0", "@aws-sdk/middleware-recursion-detection": "3.933.0", "@aws-sdk/middleware-user-agent": "3.932.0", "@aws-sdk/region-config-resolver": "3.930.0", "@aws-sdk/types": "3.930.0", "@aws-sdk/util-endpoints": "3.930.0", "@aws-sdk/util-user-agent-browser": "3.930.0", "@aws-sdk/util-user-agent-node": "3.932.0", "@smithy/config-resolver": "^4.4.3", "@smithy/core": "^3.18.2", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/hash-node": "^4.2.5", "@smithy/invalid-dependency": "^4.2.5", "@smithy/middleware-content-length": "^4.2.5", "@smithy/middleware-endpoint": "^4.3.9", "@smithy/middleware-retry": "^4.4.9", "@smithy/middleware-serde": "^4.2.5", "@smithy/middleware-stack": "^4.2.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/node-http-handler": "^4.4.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.5", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.8", "@smithy/util-defaults-mode-node": "^4.2.11", "@smithy/util-endpoints": "^3.2.5", "@smithy/util-middleware": "^4.2.5", "@smithy/util-retry": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-zwGLSiK48z3PzKpQiDMKP85+fpIrPMF1qQOQW9OW7BGj5AuBZIisT2O4VzIgYJeh+t47MLU7VgBQL7muc+MJDg=="],
|
||||
|
||||
"@aws-sdk/client-sts": ["@aws-sdk/client-sts@3.782.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.775.0", "@aws-sdk/credential-provider-node": "3.782.0", "@aws-sdk/middleware-host-header": "3.775.0", "@aws-sdk/middleware-logger": "3.775.0", "@aws-sdk/middleware-recursion-detection": "3.775.0", "@aws-sdk/middleware-user-agent": "3.782.0", "@aws-sdk/region-config-resolver": "3.775.0", "@aws-sdk/types": "3.775.0", "@aws-sdk/util-endpoints": "3.782.0", "@aws-sdk/util-user-agent-browser": "3.775.0", "@aws-sdk/util-user-agent-node": "3.782.0", "@smithy/config-resolver": "^4.1.0", "@smithy/core": "^3.2.0", "@smithy/fetch-http-handler": "^5.0.2", "@smithy/hash-node": "^4.0.2", "@smithy/invalid-dependency": "^4.0.2", "@smithy/middleware-content-length": "^4.0.2", "@smithy/middleware-endpoint": "^4.1.0", "@smithy/middleware-retry": "^4.1.0", "@smithy/middleware-serde": "^4.0.3", "@smithy/middleware-stack": "^4.0.2", "@smithy/node-config-provider": "^4.0.2", "@smithy/node-http-handler": "^4.0.4", "@smithy/protocol-http": "^5.1.0", "@smithy/smithy-client": "^4.2.0", "@smithy/types": "^4.2.0", "@smithy/url-parser": "^4.0.2", "@smithy/util-base64": "^4.0.0", "@smithy/util-body-length-browser": "^4.0.0", "@smithy/util-body-length-node": "^4.0.0", "@smithy/util-defaults-mode-browser": "^4.0.8", "@smithy/util-defaults-mode-node": "^4.0.8", "@smithy/util-endpoints": "^3.0.2", "@smithy/util-middleware": "^4.0.2", "@smithy/util-retry": "^4.0.2", "@smithy/util-utf8": "^4.0.0", "tslib": "^2.6.2" } }, "sha512-Q1QLY3xE2z1trgriusP/6w40mI/yJjM524bN4gs+g6YX4sZGufpa7+Dj+JjL4fz8f9BCJ3ZlI+p4WxFxH7qvdQ=="],
|
||||
|
||||
"@aws-sdk/core": ["@aws-sdk/core@3.932.0", "", { "dependencies": { "@aws-sdk/types": "3.930.0", "@aws-sdk/xml-builder": "3.930.0", "@smithy/core": "^3.18.2", "@smithy/node-config-provider": "^4.3.5", "@smithy/property-provider": "^4.2.5", "@smithy/protocol-http": "^5.3.5", "@smithy/signature-v4": "^5.3.5", "@smithy/smithy-client": "^4.9.5", "@smithy/types": "^4.9.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-middleware": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-AS8gypYQCbNojwgjvZGkJocC2CoEICDx9ZJ15ILsv+MlcCVLtUJSRSx3VzJOUY2EEIaGLRrPNlIqyn/9/fySvA=="],
|
||||
|
||||
"@aws-sdk/credential-provider-cognito-identity": ["@aws-sdk/credential-provider-cognito-identity@3.972.3", "", { "dependencies": { "@aws-sdk/client-cognito-identity": "3.980.0", "@aws-sdk/types": "^3.973.1", "@smithy/property-provider": "^4.2.8", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-dW/DqTk90XW7hIngqntAVtJJyrkS51wcLhGz39lOMe0TlSmZl+5R/UGnAZqNbXmWuJHLzxe+MLgagxH41aTsAQ=="],
|
||||
"@aws-sdk/credential-provider-env": ["@aws-sdk/credential-provider-env@3.932.0", "", { "dependencies": { "@aws-sdk/core": "3.932.0", "@aws-sdk/types": "3.930.0", "@smithy/property-provider": "^4.2.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-ozge/c7NdHUDyHqro6+P5oHt8wfKSUBN+olttiVfBe9Mw3wBMpPa3gQ0pZnG+gwBkKskBuip2bMR16tqYvUSEA=="],
|
||||
|
||||
"@aws-sdk/credential-provider-env": ["@aws-sdk/credential-provider-env@3.972.9", "", { "dependencies": { "@aws-sdk/core": "^3.973.11", "@aws-sdk/types": "^3.973.1", "@smithy/property-provider": "^4.2.8", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-ZptrOwQynfupubvcngLkbdIq/aXvl/czdpEG8XJ8mN8Nb19BR0jaK0bR+tfuMU36Ez9q4xv7GGkHFqEEP2hUUQ=="],
|
||||
"@aws-sdk/credential-provider-http": ["@aws-sdk/credential-provider-http@3.932.0", "", { "dependencies": { "@aws-sdk/core": "3.932.0", "@aws-sdk/types": "3.930.0", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/node-http-handler": "^4.4.5", "@smithy/property-provider": "^4.2.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.5", "@smithy/types": "^4.9.0", "@smithy/util-stream": "^4.5.6", "tslib": "^2.6.2" } }, "sha512-b6N9Nnlg8JInQwzBkUq5spNaXssM3h3zLxGzpPrnw0nHSIWPJPTbZzA5Ca285fcDUFuKP+qf3qkuqlAjGOdWhg=="],
|
||||
|
||||
"@aws-sdk/credential-provider-http": ["@aws-sdk/credential-provider-http@3.972.11", "", { "dependencies": { "@aws-sdk/core": "^3.973.11", "@aws-sdk/types": "^3.973.1", "@smithy/fetch-http-handler": "^5.3.9", "@smithy/node-http-handler": "^4.4.10", "@smithy/property-provider": "^4.2.8", "@smithy/protocol-http": "^5.3.8", "@smithy/smithy-client": "^4.11.5", "@smithy/types": "^4.12.0", "@smithy/util-stream": "^4.5.12", "tslib": "^2.6.2" } }, "sha512-hECWoOoH386bGr89NQc9vA/abkGf5TJrMREt+lhNcnSNmoBS04fK7vc3LrJBSQAUGGVj0Tz3f4dHB3w5veovig=="],
|
||||
|
||||
"@aws-sdk/credential-provider-ini": ["@aws-sdk/credential-provider-ini@3.972.9", "", { "dependencies": { "@aws-sdk/core": "^3.973.11", "@aws-sdk/credential-provider-env": "^3.972.9", "@aws-sdk/credential-provider-http": "^3.972.11", "@aws-sdk/credential-provider-login": "^3.972.9", "@aws-sdk/credential-provider-process": "^3.972.9", "@aws-sdk/credential-provider-sso": "^3.972.9", "@aws-sdk/credential-provider-web-identity": "^3.972.9", "@aws-sdk/nested-clients": "3.993.0", "@aws-sdk/types": "^3.973.1", "@smithy/credential-provider-imds": "^4.2.8", "@smithy/property-provider": "^4.2.8", "@smithy/shared-ini-file-loader": "^4.4.3", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-zr1csEu9n4eDiHMTYJabX1mDGuGLgjgUnNckIivvk43DocJC9/f6DefFrnUPZXE+GHtbW50YuXb+JIxKykU74A=="],
|
||||
|
||||
"@aws-sdk/credential-provider-login": ["@aws-sdk/credential-provider-login@3.972.9", "", { "dependencies": { "@aws-sdk/core": "^3.973.11", "@aws-sdk/nested-clients": "3.993.0", "@aws-sdk/types": "^3.973.1", "@smithy/property-provider": "^4.2.8", "@smithy/protocol-http": "^5.3.8", "@smithy/shared-ini-file-loader": "^4.4.3", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-m4RIpVgZChv0vWS/HKChg1xLgZPpx8Z+ly9Fv7FwA8SOfuC6I3htcSaBz2Ch4bneRIiBUhwP4ziUo0UZgtJStQ=="],
|
||||
"@aws-sdk/credential-provider-ini": ["@aws-sdk/credential-provider-ini@3.933.0", "", { "dependencies": { "@aws-sdk/core": "3.932.0", "@aws-sdk/credential-provider-env": "3.932.0", "@aws-sdk/credential-provider-http": "3.932.0", "@aws-sdk/credential-provider-process": "3.932.0", "@aws-sdk/credential-provider-sso": "3.933.0", "@aws-sdk/credential-provider-web-identity": "3.933.0", "@aws-sdk/nested-clients": "3.933.0", "@aws-sdk/types": "3.930.0", "@smithy/credential-provider-imds": "^4.2.5", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-HygGyKuMG5AaGXsmM0d81miWDon55xwalRHB3UmDg3QBhtunbNIoIaWUbNTKuBZXcIN6emeeEZw/YgSMqLc0YA=="],
|
||||
|
||||
"@aws-sdk/credential-provider-node": ["@aws-sdk/credential-provider-node@3.933.0", "", { "dependencies": { "@aws-sdk/credential-provider-env": "3.932.0", "@aws-sdk/credential-provider-http": "3.932.0", "@aws-sdk/credential-provider-ini": "3.933.0", "@aws-sdk/credential-provider-process": "3.932.0", "@aws-sdk/credential-provider-sso": "3.933.0", "@aws-sdk/credential-provider-web-identity": "3.933.0", "@aws-sdk/types": "3.930.0", "@smithy/credential-provider-imds": "^4.2.5", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-L2dE0Y7iMLammQewPKNeEh1z/fdJyYEU+/QsLBD9VEh+SXcN/FIyTi21Isw8wPZN6lMB9PDVtISzBnF8HuSFrw=="],
|
||||
|
||||
"@aws-sdk/credential-provider-process": ["@aws-sdk/credential-provider-process@3.972.9", "", { "dependencies": { "@aws-sdk/core": "^3.973.11", "@aws-sdk/types": "^3.973.1", "@smithy/property-provider": "^4.2.8", "@smithy/shared-ini-file-loader": "^4.4.3", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-gOWl0Fe2gETj5Bk151+LYKpeGi2lBDLNu+NMNpHRlIrKHdBmVun8/AalwMK8ci4uRfG5a3/+zvZBMpuen1SZ0A=="],
|
||||
"@aws-sdk/credential-provider-process": ["@aws-sdk/credential-provider-process@3.932.0", "", { "dependencies": { "@aws-sdk/core": "3.932.0", "@aws-sdk/types": "3.930.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-BodZYKvT4p/Dkm28Ql/FhDdS1+p51bcZeMMu2TRtU8PoMDHnVDhHz27zASEKSZwmhvquxHrZHB0IGuVqjZUtSQ=="],
|
||||
|
||||
"@aws-sdk/credential-provider-sso": ["@aws-sdk/credential-provider-sso@3.972.9", "", { "dependencies": { "@aws-sdk/client-sso": "3.993.0", "@aws-sdk/core": "^3.973.11", "@aws-sdk/token-providers": "3.993.0", "@aws-sdk/types": "^3.973.1", "@smithy/property-provider": "^4.2.8", "@smithy/shared-ini-file-loader": "^4.4.3", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-ey7S686foGTArvFhi3ifQXmgptKYvLSGE2250BAQceMSXZddz7sUSNERGJT2S7u5KIe/kgugxrt01hntXVln6w=="],
|
||||
"@aws-sdk/credential-provider-sso": ["@aws-sdk/credential-provider-sso@3.933.0", "", { "dependencies": { "@aws-sdk/client-sso": "3.933.0", "@aws-sdk/core": "3.932.0", "@aws-sdk/token-providers": "3.933.0", "@aws-sdk/types": "3.930.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-/R1DBR7xNcuZIhS2RirU+P2o8E8/fOk+iLAhbqeSTq+g09fP/F6W7ouFpS5eVE2NIfWG7YBFoVddOhvuqpn51g=="],
|
||||
|
||||
"@aws-sdk/credential-provider-web-identity": ["@aws-sdk/credential-provider-web-identity@3.972.9", "", { "dependencies": { "@aws-sdk/core": "^3.973.11", "@aws-sdk/nested-clients": "3.993.0", "@aws-sdk/types": "^3.973.1", "@smithy/property-provider": "^4.2.8", "@smithy/shared-ini-file-loader": "^4.4.3", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-8LnfS76nHXoEc9aRRiMMpxZxJeDG0yusdyo3NvPhCgESmBUgpMa4luhGbClW5NoX/qRcGxxM6Z/esqANSNMTow=="],
|
||||
|
||||
"@aws-sdk/credential-providers": ["@aws-sdk/credential-providers@3.993.0", "", { "dependencies": { "@aws-sdk/client-cognito-identity": "3.993.0", "@aws-sdk/core": "^3.973.11", "@aws-sdk/credential-provider-cognito-identity": "^3.972.3", "@aws-sdk/credential-provider-env": "^3.972.9", "@aws-sdk/credential-provider-http": "^3.972.11", "@aws-sdk/credential-provider-ini": "^3.972.9", "@aws-sdk/credential-provider-login": "^3.972.9", "@aws-sdk/credential-provider-node": "^3.972.10", "@aws-sdk/credential-provider-process": "^3.972.9", "@aws-sdk/credential-provider-sso": "^3.972.9", "@aws-sdk/credential-provider-web-identity": "^3.972.9", "@aws-sdk/nested-clients": "3.993.0", "@aws-sdk/types": "^3.973.1", "@smithy/config-resolver": "^4.4.6", "@smithy/core": "^3.23.2", "@smithy/credential-provider-imds": "^4.2.8", "@smithy/node-config-provider": "^4.3.8", "@smithy/property-provider": "^4.2.8", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-1M/nukgPSLqe9krzOKHnE8OylUaKAiokAV3xRLdeExVHcRE7WG5uzCTKWTj1imKvPjDqXq/FWhlbbdWIn7xIwA=="],
|
||||
"@aws-sdk/credential-provider-web-identity": ["@aws-sdk/credential-provider-web-identity@3.933.0", "", { "dependencies": { "@aws-sdk/core": "3.932.0", "@aws-sdk/nested-clients": "3.933.0", "@aws-sdk/types": "3.930.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-c7Eccw2lhFx2/+qJn3g+uIDWRuWi2A6Sz3PVvckFUEzPsP0dPUo19hlvtarwP5GzrsXn0yEPRVhpewsIaSCGaQ=="],
|
||||
|
||||
"@aws-sdk/middleware-bucket-endpoint": ["@aws-sdk/middleware-bucket-endpoint@3.930.0", "", { "dependencies": { "@aws-sdk/types": "3.930.0", "@aws-sdk/util-arn-parser": "3.893.0", "@smithy/node-config-provider": "^4.3.5", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "@smithy/util-config-provider": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-cnCLWeKPYgvV4yRYPFH6pWMdUByvu2cy2BAlfsPpvnm4RaVioztyvxmQj5PmVN5fvWs5w/2d6U7le8X9iye2sA=="],
|
||||
|
||||
@@ -722,13 +712,13 @@
|
||||
|
||||
"@aws-sdk/middleware-user-agent": ["@aws-sdk/middleware-user-agent@3.932.0", "", { "dependencies": { "@aws-sdk/core": "3.932.0", "@aws-sdk/types": "3.930.0", "@aws-sdk/util-endpoints": "3.930.0", "@smithy/core": "^3.18.2", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-9BGTbJyA/4PTdwQWE9hAFIJGpsYkyEW20WON3i15aDqo5oRZwZmqaVageOD57YYqG8JDJjvcwKyDdR4cc38dvg=="],
|
||||
|
||||
"@aws-sdk/nested-clients": ["@aws-sdk/nested-clients@3.993.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.973.11", "@aws-sdk/middleware-host-header": "^3.972.3", "@aws-sdk/middleware-logger": "^3.972.3", "@aws-sdk/middleware-recursion-detection": "^3.972.3", "@aws-sdk/middleware-user-agent": "^3.972.11", "@aws-sdk/region-config-resolver": "^3.972.3", "@aws-sdk/types": "^3.973.1", "@aws-sdk/util-endpoints": "3.993.0", "@aws-sdk/util-user-agent-browser": "^3.972.3", "@aws-sdk/util-user-agent-node": "^3.972.9", "@smithy/config-resolver": "^4.4.6", "@smithy/core": "^3.23.2", "@smithy/fetch-http-handler": "^5.3.9", "@smithy/hash-node": "^4.2.8", "@smithy/invalid-dependency": "^4.2.8", "@smithy/middleware-content-length": "^4.2.8", "@smithy/middleware-endpoint": "^4.4.16", "@smithy/middleware-retry": "^4.4.33", "@smithy/middleware-serde": "^4.2.9", "@smithy/middleware-stack": "^4.2.8", "@smithy/node-config-provider": "^4.3.8", "@smithy/node-http-handler": "^4.4.10", "@smithy/protocol-http": "^5.3.8", "@smithy/smithy-client": "^4.11.5", "@smithy/types": "^4.12.0", "@smithy/url-parser": "^4.2.8", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.32", "@smithy/util-defaults-mode-node": "^4.2.35", "@smithy/util-endpoints": "^3.2.8", "@smithy/util-middleware": "^4.2.8", "@smithy/util-retry": "^4.2.8", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-iOq86f2H67924kQUIPOAvlmMaOAvOLoDOIb66I2YqSUpMYB6ufiuJW3RlREgskxv86S5qKzMnfy/X6CqMjK6XQ=="],
|
||||
"@aws-sdk/nested-clients": ["@aws-sdk/nested-clients@3.933.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.932.0", "@aws-sdk/middleware-host-header": "3.930.0", "@aws-sdk/middleware-logger": "3.930.0", "@aws-sdk/middleware-recursion-detection": "3.933.0", "@aws-sdk/middleware-user-agent": "3.932.0", "@aws-sdk/region-config-resolver": "3.930.0", "@aws-sdk/types": "3.930.0", "@aws-sdk/util-endpoints": "3.930.0", "@aws-sdk/util-user-agent-browser": "3.930.0", "@aws-sdk/util-user-agent-node": "3.932.0", "@smithy/config-resolver": "^4.4.3", "@smithy/core": "^3.18.2", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/hash-node": "^4.2.5", "@smithy/invalid-dependency": "^4.2.5", "@smithy/middleware-content-length": "^4.2.5", "@smithy/middleware-endpoint": "^4.3.9", "@smithy/middleware-retry": "^4.4.9", "@smithy/middleware-serde": "^4.2.5", "@smithy/middleware-stack": "^4.2.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/node-http-handler": "^4.4.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.5", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.8", "@smithy/util-defaults-mode-node": "^4.2.11", "@smithy/util-endpoints": "^3.2.5", "@smithy/util-middleware": "^4.2.5", "@smithy/util-retry": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-o1GX0+IPlFi/D8ei9y/jj3yucJWNfPnbB5appVBWevAyUdZA5KzQ2nK/hDxiu9olTZlFEFpf1m1Rn3FaGxHqsw=="],
|
||||
|
||||
"@aws-sdk/region-config-resolver": ["@aws-sdk/region-config-resolver@3.930.0", "", { "dependencies": { "@aws-sdk/types": "3.930.0", "@smithy/config-resolver": "^4.4.3", "@smithy/node-config-provider": "^4.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-KL2JZqH6aYeQssu1g1KuWsReupdfOoxD6f1as2VC+rdwYFUu4LfzMsFfXnBvvQWWqQ7rZHWOw1T+o5gJmg7Dzw=="],
|
||||
|
||||
"@aws-sdk/signature-v4-multi-region": ["@aws-sdk/signature-v4-multi-region@3.932.0", "", { "dependencies": { "@aws-sdk/middleware-sdk-s3": "3.932.0", "@aws-sdk/types": "3.930.0", "@smithy/protocol-http": "^5.3.5", "@smithy/signature-v4": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-NCIRJvoRc9246RZHIusY1+n/neeG2yGhBGdKhghmrNdM+mLLN6Ii7CKFZjx3DhxtpHMpl1HWLTMhdVrGwP2upw=="],
|
||||
|
||||
"@aws-sdk/token-providers": ["@aws-sdk/token-providers@3.993.0", "", { "dependencies": { "@aws-sdk/core": "^3.973.11", "@aws-sdk/nested-clients": "3.993.0", "@aws-sdk/types": "^3.973.1", "@smithy/property-provider": "^4.2.8", "@smithy/shared-ini-file-loader": "^4.4.3", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-+35g4c+8r7sB9Sjp1KPdM8qxGn6B/shBjJtEUN4e+Edw9UEQlZKIzioOGu3UAbyE0a/s450LdLZr4wbJChtmww=="],
|
||||
"@aws-sdk/token-providers": ["@aws-sdk/token-providers@3.933.0", "", { "dependencies": { "@aws-sdk/core": "3.932.0", "@aws-sdk/nested-clients": "3.933.0", "@aws-sdk/types": "3.930.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-Qzq7zj9yXUgAAJEbbmqRhm0jmUndl8nHG0AbxFEfCfQRVZWL96Qzx0mf8lYwT9hIMrXncLwy31HOthmbXwFRwQ=="],
|
||||
|
||||
"@aws-sdk/types": ["@aws-sdk/types@3.930.0", "", { "dependencies": { "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-we/vaAgwlEFW7IeftmCLlLMw+6hFs3DzZPJw7lVHbj/5HJ0bz9gndxEsS2lQoeJ1zhiiLqAqvXxmM43s0MBg0A=="],
|
||||
|
||||
@@ -4262,48 +4252,6 @@
|
||||
|
||||
"@aws-crypto/util/@smithy/util-utf8": ["@smithy/util-utf8@2.3.0", "", { "dependencies": { "@smithy/util-buffer-from": "^2.2.0", "tslib": "^2.6.2" } }, "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A=="],
|
||||
|
||||
"@aws-sdk/client-cognito-identity/@aws-sdk/core": ["@aws-sdk/core@3.973.11", "", { "dependencies": { "@aws-sdk/types": "^3.973.1", "@aws-sdk/xml-builder": "^3.972.5", "@smithy/core": "^3.23.2", "@smithy/node-config-provider": "^4.3.8", "@smithy/property-provider": "^4.2.8", "@smithy/protocol-http": "^5.3.8", "@smithy/signature-v4": "^5.3.8", "@smithy/smithy-client": "^4.11.5", "@smithy/types": "^4.12.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-middleware": "^4.2.8", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-wdQ8vrvHkKIV7yNUKXyjPWKCdYEUrZTHJ8Ojd5uJxXp9vqPCkUR1dpi1NtOLcrDgueJH7MUH5lQZxshjFPSbDA=="],
|
||||
|
||||
"@aws-sdk/client-cognito-identity/@aws-sdk/credential-provider-node": ["@aws-sdk/credential-provider-node@3.972.10", "", { "dependencies": { "@aws-sdk/credential-provider-env": "^3.972.9", "@aws-sdk/credential-provider-http": "^3.972.11", "@aws-sdk/credential-provider-ini": "^3.972.9", "@aws-sdk/credential-provider-process": "^3.972.9", "@aws-sdk/credential-provider-sso": "^3.972.9", "@aws-sdk/credential-provider-web-identity": "^3.972.9", "@aws-sdk/types": "^3.973.1", "@smithy/credential-provider-imds": "^4.2.8", "@smithy/property-provider": "^4.2.8", "@smithy/shared-ini-file-loader": "^4.4.3", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-70nCESlvnzjo4LjJ8By8MYIiBogkYPSXl3WmMZfH9RZcB/Nt9qVWbFpYj6Fk1vLa4Vk8qagFVeXgxdieMxG1QA=="],
|
||||
|
||||
"@aws-sdk/client-cognito-identity/@aws-sdk/middleware-host-header": ["@aws-sdk/middleware-host-header@3.972.3", "", { "dependencies": { "@aws-sdk/types": "^3.973.1", "@smithy/protocol-http": "^5.3.8", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-aknPTb2M+G3s+0qLCx4Li/qGZH8IIYjugHMv15JTYMe6mgZO8VBpYgeGYsNMGCqCZOcWzuf900jFBG5bopfzmA=="],
|
||||
|
||||
"@aws-sdk/client-cognito-identity/@aws-sdk/middleware-logger": ["@aws-sdk/middleware-logger@3.972.3", "", { "dependencies": { "@aws-sdk/types": "^3.973.1", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-Ftg09xNNRqaz9QNzlfdQWfpqMCJbsQdnZVJP55jfhbKi1+FTWxGuvfPoBhDHIovqWKjqbuiew3HuhxbJ0+OjgA=="],
|
||||
|
||||
"@aws-sdk/client-cognito-identity/@aws-sdk/middleware-recursion-detection": ["@aws-sdk/middleware-recursion-detection@3.972.3", "", { "dependencies": { "@aws-sdk/types": "^3.973.1", "@aws/lambda-invoke-store": "^0.2.2", "@smithy/protocol-http": "^5.3.8", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-PY57QhzNuXHnwbJgbWYTrqIDHYSeOlhfYERTAuc16LKZpTZRJUjzBFokp9hF7u1fuGeE3D70ERXzdbMBOqQz7Q=="],
|
||||
|
||||
"@aws-sdk/client-cognito-identity/@aws-sdk/middleware-user-agent": ["@aws-sdk/middleware-user-agent@3.972.11", "", { "dependencies": { "@aws-sdk/core": "^3.973.11", "@aws-sdk/types": "^3.973.1", "@aws-sdk/util-endpoints": "3.993.0", "@smithy/core": "^3.23.2", "@smithy/protocol-http": "^5.3.8", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-R8CvPsPHXwzIHCAza+bllY6PrctEk4lYq/SkHJz9NLoBHCcKQrbOcsfXxO6xmipSbUNIbNIUhH0lBsJGgsRdiw=="],
|
||||
|
||||
"@aws-sdk/client-cognito-identity/@aws-sdk/region-config-resolver": ["@aws-sdk/region-config-resolver@3.972.3", "", { "dependencies": { "@aws-sdk/types": "^3.973.1", "@smithy/config-resolver": "^4.4.6", "@smithy/node-config-provider": "^4.3.8", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-v4J8qYAWfOMcZ4MJUyatntOicTzEMaU7j3OpkRCGGFSL2NgXQ5VbxauIyORA+pxdKZ0qQG2tCQjQjZDlXEC3Ow=="],
|
||||
|
||||
"@aws-sdk/client-cognito-identity/@aws-sdk/types": ["@aws-sdk/types@3.973.1", "", { "dependencies": { "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-DwHBiMNOB468JiX6+i34c+THsKHErYUdNQ3HexeXZvVn4zouLjgaS4FejiGSi2HyBuzuyHg7SuOPmjSvoU9NRg=="],
|
||||
|
||||
"@aws-sdk/client-cognito-identity/@aws-sdk/util-endpoints": ["@aws-sdk/util-endpoints@3.993.0", "", { "dependencies": { "@aws-sdk/types": "^3.973.1", "@smithy/types": "^4.12.0", "@smithy/url-parser": "^4.2.8", "@smithy/util-endpoints": "^3.2.8", "tslib": "^2.6.2" } }, "sha512-j6vioBeRZ4eHX4SWGvGPpwGg/xSOcK7f1GL0VM+rdf3ZFTIsUEhCFmD78B+5r2PgztcECSzEfvHQX01k8dPQPw=="],
|
||||
|
||||
"@aws-sdk/client-cognito-identity/@aws-sdk/util-user-agent-browser": ["@aws-sdk/util-user-agent-browser@3.972.3", "", { "dependencies": { "@aws-sdk/types": "^3.973.1", "@smithy/types": "^4.12.0", "bowser": "^2.11.0", "tslib": "^2.6.2" } }, "sha512-JurOwkRUcXD/5MTDBcqdyQ9eVedtAsZgw5rBwktsPTN7QtPiS2Ld1jkJepNgYoCufz1Wcut9iup7GJDoIHp8Fw=="],
|
||||
|
||||
"@aws-sdk/client-cognito-identity/@aws-sdk/util-user-agent-node": ["@aws-sdk/util-user-agent-node@3.972.9", "", { "dependencies": { "@aws-sdk/middleware-user-agent": "^3.972.11", "@aws-sdk/types": "^3.973.1", "@smithy/node-config-provider": "^4.3.8", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" }, "peerDependencies": { "aws-crt": ">=1.0.0" }, "optionalPeers": ["aws-crt"] }, "sha512-JNswdsLdQemxqaSIBL2HRhsHPUBBziAgoi5RQv6/9avmE5g5RSdt1hWr3mHJ7OxqRYf+KeB11ExWbiqfrnoeaA=="],
|
||||
|
||||
"@aws-sdk/client-sso/@aws-sdk/core": ["@aws-sdk/core@3.973.11", "", { "dependencies": { "@aws-sdk/types": "^3.973.1", "@aws-sdk/xml-builder": "^3.972.5", "@smithy/core": "^3.23.2", "@smithy/node-config-provider": "^4.3.8", "@smithy/property-provider": "^4.2.8", "@smithy/protocol-http": "^5.3.8", "@smithy/signature-v4": "^5.3.8", "@smithy/smithy-client": "^4.11.5", "@smithy/types": "^4.12.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-middleware": "^4.2.8", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-wdQ8vrvHkKIV7yNUKXyjPWKCdYEUrZTHJ8Ojd5uJxXp9vqPCkUR1dpi1NtOLcrDgueJH7MUH5lQZxshjFPSbDA=="],
|
||||
|
||||
"@aws-sdk/client-sso/@aws-sdk/middleware-host-header": ["@aws-sdk/middleware-host-header@3.972.3", "", { "dependencies": { "@aws-sdk/types": "^3.973.1", "@smithy/protocol-http": "^5.3.8", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-aknPTb2M+G3s+0qLCx4Li/qGZH8IIYjugHMv15JTYMe6mgZO8VBpYgeGYsNMGCqCZOcWzuf900jFBG5bopfzmA=="],
|
||||
|
||||
"@aws-sdk/client-sso/@aws-sdk/middleware-logger": ["@aws-sdk/middleware-logger@3.972.3", "", { "dependencies": { "@aws-sdk/types": "^3.973.1", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-Ftg09xNNRqaz9QNzlfdQWfpqMCJbsQdnZVJP55jfhbKi1+FTWxGuvfPoBhDHIovqWKjqbuiew3HuhxbJ0+OjgA=="],
|
||||
|
||||
"@aws-sdk/client-sso/@aws-sdk/middleware-recursion-detection": ["@aws-sdk/middleware-recursion-detection@3.972.3", "", { "dependencies": { "@aws-sdk/types": "^3.973.1", "@aws/lambda-invoke-store": "^0.2.2", "@smithy/protocol-http": "^5.3.8", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-PY57QhzNuXHnwbJgbWYTrqIDHYSeOlhfYERTAuc16LKZpTZRJUjzBFokp9hF7u1fuGeE3D70ERXzdbMBOqQz7Q=="],
|
||||
|
||||
"@aws-sdk/client-sso/@aws-sdk/middleware-user-agent": ["@aws-sdk/middleware-user-agent@3.972.11", "", { "dependencies": { "@aws-sdk/core": "^3.973.11", "@aws-sdk/types": "^3.973.1", "@aws-sdk/util-endpoints": "3.993.0", "@smithy/core": "^3.23.2", "@smithy/protocol-http": "^5.3.8", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-R8CvPsPHXwzIHCAza+bllY6PrctEk4lYq/SkHJz9NLoBHCcKQrbOcsfXxO6xmipSbUNIbNIUhH0lBsJGgsRdiw=="],
|
||||
|
||||
"@aws-sdk/client-sso/@aws-sdk/region-config-resolver": ["@aws-sdk/region-config-resolver@3.972.3", "", { "dependencies": { "@aws-sdk/types": "^3.973.1", "@smithy/config-resolver": "^4.4.6", "@smithy/node-config-provider": "^4.3.8", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-v4J8qYAWfOMcZ4MJUyatntOicTzEMaU7j3OpkRCGGFSL2NgXQ5VbxauIyORA+pxdKZ0qQG2tCQjQjZDlXEC3Ow=="],
|
||||
|
||||
"@aws-sdk/client-sso/@aws-sdk/types": ["@aws-sdk/types@3.973.1", "", { "dependencies": { "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-DwHBiMNOB468JiX6+i34c+THsKHErYUdNQ3HexeXZvVn4zouLjgaS4FejiGSi2HyBuzuyHg7SuOPmjSvoU9NRg=="],
|
||||
|
||||
"@aws-sdk/client-sso/@aws-sdk/util-endpoints": ["@aws-sdk/util-endpoints@3.993.0", "", { "dependencies": { "@aws-sdk/types": "^3.973.1", "@smithy/types": "^4.12.0", "@smithy/url-parser": "^4.2.8", "@smithy/util-endpoints": "^3.2.8", "tslib": "^2.6.2" } }, "sha512-j6vioBeRZ4eHX4SWGvGPpwGg/xSOcK7f1GL0VM+rdf3ZFTIsUEhCFmD78B+5r2PgztcECSzEfvHQX01k8dPQPw=="],
|
||||
|
||||
"@aws-sdk/client-sso/@aws-sdk/util-user-agent-browser": ["@aws-sdk/util-user-agent-browser@3.972.3", "", { "dependencies": { "@aws-sdk/types": "^3.973.1", "@smithy/types": "^4.12.0", "bowser": "^2.11.0", "tslib": "^2.6.2" } }, "sha512-JurOwkRUcXD/5MTDBcqdyQ9eVedtAsZgw5rBwktsPTN7QtPiS2Ld1jkJepNgYoCufz1Wcut9iup7GJDoIHp8Fw=="],
|
||||
|
||||
"@aws-sdk/client-sso/@aws-sdk/util-user-agent-node": ["@aws-sdk/util-user-agent-node@3.972.9", "", { "dependencies": { "@aws-sdk/middleware-user-agent": "^3.972.11", "@aws-sdk/types": "^3.973.1", "@smithy/node-config-provider": "^4.3.8", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" }, "peerDependencies": { "aws-crt": ">=1.0.0" }, "optionalPeers": ["aws-crt"] }, "sha512-JNswdsLdQemxqaSIBL2HRhsHPUBBziAgoi5RQv6/9avmE5g5RSdt1hWr3mHJ7OxqRYf+KeB11ExWbiqfrnoeaA=="],
|
||||
|
||||
"@aws-sdk/client-sts/@aws-sdk/core": ["@aws-sdk/core@3.775.0", "", { "dependencies": { "@aws-sdk/types": "3.775.0", "@smithy/core": "^3.2.0", "@smithy/node-config-provider": "^4.0.2", "@smithy/property-provider": "^4.0.2", "@smithy/protocol-http": "^5.1.0", "@smithy/signature-v4": "^5.0.2", "@smithy/smithy-client": "^4.2.0", "@smithy/types": "^4.2.0", "@smithy/util-middleware": "^4.0.2", "fast-xml-parser": "4.4.1", "tslib": "^2.6.2" } }, "sha512-8vpW4WihVfz0DX+7WnnLGm3GuQER++b0IwQG35JlQMlgqnc44M//KbJPsIHA0aJUJVwJAEShgfr5dUbY8WUzaA=="],
|
||||
|
||||
"@aws-sdk/client-sts/@aws-sdk/credential-provider-node": ["@aws-sdk/credential-provider-node@3.782.0", "", { "dependencies": { "@aws-sdk/credential-provider-env": "3.775.0", "@aws-sdk/credential-provider-http": "3.775.0", "@aws-sdk/credential-provider-ini": "3.782.0", "@aws-sdk/credential-provider-process": "3.775.0", "@aws-sdk/credential-provider-sso": "3.782.0", "@aws-sdk/credential-provider-web-identity": "3.782.0", "@aws-sdk/types": "3.775.0", "@smithy/credential-provider-imds": "^4.0.2", "@smithy/property-provider": "^4.0.2", "@smithy/shared-ini-file-loader": "^4.0.2", "@smithy/types": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-HZiAF+TCEyKjju9dgysjiPIWgt/+VerGaeEp18mvKLNfgKz1d+/82A2USEpNKTze7v3cMFASx3CvL8yYyF7mJw=="],
|
||||
@@ -4326,80 +4274,6 @@
|
||||
|
||||
"@aws-sdk/client-sts/@aws-sdk/util-user-agent-node": ["@aws-sdk/util-user-agent-node@3.782.0", "", { "dependencies": { "@aws-sdk/middleware-user-agent": "3.782.0", "@aws-sdk/types": "3.775.0", "@smithy/node-config-provider": "^4.0.2", "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "peerDependencies": { "aws-crt": ">=1.0.0" }, "optionalPeers": ["aws-crt"] }, "sha512-dMFkUBgh2Bxuw8fYZQoH/u3H4afQ12VSkzEi//qFiDTwbKYq+u+RYjc8GLDM6JSK1BShMu5AVR7HD4ap1TYUnA=="],
|
||||
|
||||
"@aws-sdk/credential-provider-cognito-identity/@aws-sdk/client-cognito-identity": ["@aws-sdk/client-cognito-identity@3.980.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.973.5", "@aws-sdk/credential-provider-node": "^3.972.4", "@aws-sdk/middleware-host-header": "^3.972.3", "@aws-sdk/middleware-logger": "^3.972.3", "@aws-sdk/middleware-recursion-detection": "^3.972.3", "@aws-sdk/middleware-user-agent": "^3.972.5", "@aws-sdk/region-config-resolver": "^3.972.3", "@aws-sdk/types": "^3.973.1", "@aws-sdk/util-endpoints": "3.980.0", "@aws-sdk/util-user-agent-browser": "^3.972.3", "@aws-sdk/util-user-agent-node": "^3.972.3", "@smithy/config-resolver": "^4.4.6", "@smithy/core": "^3.22.0", "@smithy/fetch-http-handler": "^5.3.9", "@smithy/hash-node": "^4.2.8", "@smithy/invalid-dependency": "^4.2.8", "@smithy/middleware-content-length": "^4.2.8", "@smithy/middleware-endpoint": "^4.4.12", "@smithy/middleware-retry": "^4.4.29", "@smithy/middleware-serde": "^4.2.9", "@smithy/middleware-stack": "^4.2.8", "@smithy/node-config-provider": "^4.3.8", "@smithy/node-http-handler": "^4.4.8", "@smithy/protocol-http": "^5.3.8", "@smithy/smithy-client": "^4.11.1", "@smithy/types": "^4.12.0", "@smithy/url-parser": "^4.2.8", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.28", "@smithy/util-defaults-mode-node": "^4.2.31", "@smithy/util-endpoints": "^3.2.8", "@smithy/util-middleware": "^4.2.8", "@smithy/util-retry": "^4.2.8", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-nLgMW2drTzv+dTo3ORCcotQPcrUaTQ+xoaDTdSaUXdZO7zbbVyk7ysE5GDTnJdZWcUjHOSB8xfNQhOTTNVPhFw=="],
|
||||
|
||||
"@aws-sdk/credential-provider-cognito-identity/@aws-sdk/types": ["@aws-sdk/types@3.973.1", "", { "dependencies": { "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-DwHBiMNOB468JiX6+i34c+THsKHErYUdNQ3HexeXZvVn4zouLjgaS4FejiGSi2HyBuzuyHg7SuOPmjSvoU9NRg=="],
|
||||
|
||||
"@aws-sdk/credential-provider-env/@aws-sdk/core": ["@aws-sdk/core@3.973.11", "", { "dependencies": { "@aws-sdk/types": "^3.973.1", "@aws-sdk/xml-builder": "^3.972.5", "@smithy/core": "^3.23.2", "@smithy/node-config-provider": "^4.3.8", "@smithy/property-provider": "^4.2.8", "@smithy/protocol-http": "^5.3.8", "@smithy/signature-v4": "^5.3.8", "@smithy/smithy-client": "^4.11.5", "@smithy/types": "^4.12.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-middleware": "^4.2.8", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-wdQ8vrvHkKIV7yNUKXyjPWKCdYEUrZTHJ8Ojd5uJxXp9vqPCkUR1dpi1NtOLcrDgueJH7MUH5lQZxshjFPSbDA=="],
|
||||
|
||||
"@aws-sdk/credential-provider-env/@aws-sdk/types": ["@aws-sdk/types@3.973.1", "", { "dependencies": { "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-DwHBiMNOB468JiX6+i34c+THsKHErYUdNQ3HexeXZvVn4zouLjgaS4FejiGSi2HyBuzuyHg7SuOPmjSvoU9NRg=="],
|
||||
|
||||
"@aws-sdk/credential-provider-http/@aws-sdk/core": ["@aws-sdk/core@3.973.11", "", { "dependencies": { "@aws-sdk/types": "^3.973.1", "@aws-sdk/xml-builder": "^3.972.5", "@smithy/core": "^3.23.2", "@smithy/node-config-provider": "^4.3.8", "@smithy/property-provider": "^4.2.8", "@smithy/protocol-http": "^5.3.8", "@smithy/signature-v4": "^5.3.8", "@smithy/smithy-client": "^4.11.5", "@smithy/types": "^4.12.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-middleware": "^4.2.8", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-wdQ8vrvHkKIV7yNUKXyjPWKCdYEUrZTHJ8Ojd5uJxXp9vqPCkUR1dpi1NtOLcrDgueJH7MUH5lQZxshjFPSbDA=="],
|
||||
|
||||
"@aws-sdk/credential-provider-http/@aws-sdk/types": ["@aws-sdk/types@3.973.1", "", { "dependencies": { "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-DwHBiMNOB468JiX6+i34c+THsKHErYUdNQ3HexeXZvVn4zouLjgaS4FejiGSi2HyBuzuyHg7SuOPmjSvoU9NRg=="],
|
||||
|
||||
"@aws-sdk/credential-provider-ini/@aws-sdk/core": ["@aws-sdk/core@3.973.11", "", { "dependencies": { "@aws-sdk/types": "^3.973.1", "@aws-sdk/xml-builder": "^3.972.5", "@smithy/core": "^3.23.2", "@smithy/node-config-provider": "^4.3.8", "@smithy/property-provider": "^4.2.8", "@smithy/protocol-http": "^5.3.8", "@smithy/signature-v4": "^5.3.8", "@smithy/smithy-client": "^4.11.5", "@smithy/types": "^4.12.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-middleware": "^4.2.8", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-wdQ8vrvHkKIV7yNUKXyjPWKCdYEUrZTHJ8Ojd5uJxXp9vqPCkUR1dpi1NtOLcrDgueJH7MUH5lQZxshjFPSbDA=="],
|
||||
|
||||
"@aws-sdk/credential-provider-ini/@aws-sdk/types": ["@aws-sdk/types@3.973.1", "", { "dependencies": { "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-DwHBiMNOB468JiX6+i34c+THsKHErYUdNQ3HexeXZvVn4zouLjgaS4FejiGSi2HyBuzuyHg7SuOPmjSvoU9NRg=="],
|
||||
|
||||
"@aws-sdk/credential-provider-login/@aws-sdk/core": ["@aws-sdk/core@3.973.11", "", { "dependencies": { "@aws-sdk/types": "^3.973.1", "@aws-sdk/xml-builder": "^3.972.5", "@smithy/core": "^3.23.2", "@smithy/node-config-provider": "^4.3.8", "@smithy/property-provider": "^4.2.8", "@smithy/protocol-http": "^5.3.8", "@smithy/signature-v4": "^5.3.8", "@smithy/smithy-client": "^4.11.5", "@smithy/types": "^4.12.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-middleware": "^4.2.8", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-wdQ8vrvHkKIV7yNUKXyjPWKCdYEUrZTHJ8Ojd5uJxXp9vqPCkUR1dpi1NtOLcrDgueJH7MUH5lQZxshjFPSbDA=="],
|
||||
|
||||
"@aws-sdk/credential-provider-login/@aws-sdk/types": ["@aws-sdk/types@3.973.1", "", { "dependencies": { "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-DwHBiMNOB468JiX6+i34c+THsKHErYUdNQ3HexeXZvVn4zouLjgaS4FejiGSi2HyBuzuyHg7SuOPmjSvoU9NRg=="],
|
||||
|
||||
"@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-env": ["@aws-sdk/credential-provider-env@3.932.0", "", { "dependencies": { "@aws-sdk/core": "3.932.0", "@aws-sdk/types": "3.930.0", "@smithy/property-provider": "^4.2.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-ozge/c7NdHUDyHqro6+P5oHt8wfKSUBN+olttiVfBe9Mw3wBMpPa3gQ0pZnG+gwBkKskBuip2bMR16tqYvUSEA=="],
|
||||
|
||||
"@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-http": ["@aws-sdk/credential-provider-http@3.932.0", "", { "dependencies": { "@aws-sdk/core": "3.932.0", "@aws-sdk/types": "3.930.0", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/node-http-handler": "^4.4.5", "@smithy/property-provider": "^4.2.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.5", "@smithy/types": "^4.9.0", "@smithy/util-stream": "^4.5.6", "tslib": "^2.6.2" } }, "sha512-b6N9Nnlg8JInQwzBkUq5spNaXssM3h3zLxGzpPrnw0nHSIWPJPTbZzA5Ca285fcDUFuKP+qf3qkuqlAjGOdWhg=="],
|
||||
|
||||
"@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-ini": ["@aws-sdk/credential-provider-ini@3.933.0", "", { "dependencies": { "@aws-sdk/core": "3.932.0", "@aws-sdk/credential-provider-env": "3.932.0", "@aws-sdk/credential-provider-http": "3.932.0", "@aws-sdk/credential-provider-process": "3.932.0", "@aws-sdk/credential-provider-sso": "3.933.0", "@aws-sdk/credential-provider-web-identity": "3.933.0", "@aws-sdk/nested-clients": "3.933.0", "@aws-sdk/types": "3.930.0", "@smithy/credential-provider-imds": "^4.2.5", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-HygGyKuMG5AaGXsmM0d81miWDon55xwalRHB3UmDg3QBhtunbNIoIaWUbNTKuBZXcIN6emeeEZw/YgSMqLc0YA=="],
|
||||
|
||||
"@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-process": ["@aws-sdk/credential-provider-process@3.932.0", "", { "dependencies": { "@aws-sdk/core": "3.932.0", "@aws-sdk/types": "3.930.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-BodZYKvT4p/Dkm28Ql/FhDdS1+p51bcZeMMu2TRtU8PoMDHnVDhHz27zASEKSZwmhvquxHrZHB0IGuVqjZUtSQ=="],
|
||||
|
||||
"@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-sso": ["@aws-sdk/credential-provider-sso@3.933.0", "", { "dependencies": { "@aws-sdk/client-sso": "3.933.0", "@aws-sdk/core": "3.932.0", "@aws-sdk/token-providers": "3.933.0", "@aws-sdk/types": "3.930.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-/R1DBR7xNcuZIhS2RirU+P2o8E8/fOk+iLAhbqeSTq+g09fP/F6W7ouFpS5eVE2NIfWG7YBFoVddOhvuqpn51g=="],
|
||||
|
||||
"@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-web-identity": ["@aws-sdk/credential-provider-web-identity@3.933.0", "", { "dependencies": { "@aws-sdk/core": "3.932.0", "@aws-sdk/nested-clients": "3.933.0", "@aws-sdk/types": "3.930.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-c7Eccw2lhFx2/+qJn3g+uIDWRuWi2A6Sz3PVvckFUEzPsP0dPUo19hlvtarwP5GzrsXn0yEPRVhpewsIaSCGaQ=="],
|
||||
|
||||
"@aws-sdk/credential-provider-process/@aws-sdk/core": ["@aws-sdk/core@3.973.11", "", { "dependencies": { "@aws-sdk/types": "^3.973.1", "@aws-sdk/xml-builder": "^3.972.5", "@smithy/core": "^3.23.2", "@smithy/node-config-provider": "^4.3.8", "@smithy/property-provider": "^4.2.8", "@smithy/protocol-http": "^5.3.8", "@smithy/signature-v4": "^5.3.8", "@smithy/smithy-client": "^4.11.5", "@smithy/types": "^4.12.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-middleware": "^4.2.8", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-wdQ8vrvHkKIV7yNUKXyjPWKCdYEUrZTHJ8Ojd5uJxXp9vqPCkUR1dpi1NtOLcrDgueJH7MUH5lQZxshjFPSbDA=="],
|
||||
|
||||
"@aws-sdk/credential-provider-process/@aws-sdk/types": ["@aws-sdk/types@3.973.1", "", { "dependencies": { "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-DwHBiMNOB468JiX6+i34c+THsKHErYUdNQ3HexeXZvVn4zouLjgaS4FejiGSi2HyBuzuyHg7SuOPmjSvoU9NRg=="],
|
||||
|
||||
"@aws-sdk/credential-provider-sso/@aws-sdk/core": ["@aws-sdk/core@3.973.11", "", { "dependencies": { "@aws-sdk/types": "^3.973.1", "@aws-sdk/xml-builder": "^3.972.5", "@smithy/core": "^3.23.2", "@smithy/node-config-provider": "^4.3.8", "@smithy/property-provider": "^4.2.8", "@smithy/protocol-http": "^5.3.8", "@smithy/signature-v4": "^5.3.8", "@smithy/smithy-client": "^4.11.5", "@smithy/types": "^4.12.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-middleware": "^4.2.8", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-wdQ8vrvHkKIV7yNUKXyjPWKCdYEUrZTHJ8Ojd5uJxXp9vqPCkUR1dpi1NtOLcrDgueJH7MUH5lQZxshjFPSbDA=="],
|
||||
|
||||
"@aws-sdk/credential-provider-sso/@aws-sdk/types": ["@aws-sdk/types@3.973.1", "", { "dependencies": { "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-DwHBiMNOB468JiX6+i34c+THsKHErYUdNQ3HexeXZvVn4zouLjgaS4FejiGSi2HyBuzuyHg7SuOPmjSvoU9NRg=="],
|
||||
|
||||
"@aws-sdk/credential-provider-web-identity/@aws-sdk/core": ["@aws-sdk/core@3.973.11", "", { "dependencies": { "@aws-sdk/types": "^3.973.1", "@aws-sdk/xml-builder": "^3.972.5", "@smithy/core": "^3.23.2", "@smithy/node-config-provider": "^4.3.8", "@smithy/property-provider": "^4.2.8", "@smithy/protocol-http": "^5.3.8", "@smithy/signature-v4": "^5.3.8", "@smithy/smithy-client": "^4.11.5", "@smithy/types": "^4.12.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-middleware": "^4.2.8", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-wdQ8vrvHkKIV7yNUKXyjPWKCdYEUrZTHJ8Ojd5uJxXp9vqPCkUR1dpi1NtOLcrDgueJH7MUH5lQZxshjFPSbDA=="],
|
||||
|
||||
"@aws-sdk/credential-provider-web-identity/@aws-sdk/types": ["@aws-sdk/types@3.973.1", "", { "dependencies": { "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-DwHBiMNOB468JiX6+i34c+THsKHErYUdNQ3HexeXZvVn4zouLjgaS4FejiGSi2HyBuzuyHg7SuOPmjSvoU9NRg=="],
|
||||
|
||||
"@aws-sdk/credential-providers/@aws-sdk/core": ["@aws-sdk/core@3.973.11", "", { "dependencies": { "@aws-sdk/types": "^3.973.1", "@aws-sdk/xml-builder": "^3.972.5", "@smithy/core": "^3.23.2", "@smithy/node-config-provider": "^4.3.8", "@smithy/property-provider": "^4.2.8", "@smithy/protocol-http": "^5.3.8", "@smithy/signature-v4": "^5.3.8", "@smithy/smithy-client": "^4.11.5", "@smithy/types": "^4.12.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-middleware": "^4.2.8", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-wdQ8vrvHkKIV7yNUKXyjPWKCdYEUrZTHJ8Ojd5uJxXp9vqPCkUR1dpi1NtOLcrDgueJH7MUH5lQZxshjFPSbDA=="],
|
||||
|
||||
"@aws-sdk/credential-providers/@aws-sdk/credential-provider-node": ["@aws-sdk/credential-provider-node@3.972.10", "", { "dependencies": { "@aws-sdk/credential-provider-env": "^3.972.9", "@aws-sdk/credential-provider-http": "^3.972.11", "@aws-sdk/credential-provider-ini": "^3.972.9", "@aws-sdk/credential-provider-process": "^3.972.9", "@aws-sdk/credential-provider-sso": "^3.972.9", "@aws-sdk/credential-provider-web-identity": "^3.972.9", "@aws-sdk/types": "^3.973.1", "@smithy/credential-provider-imds": "^4.2.8", "@smithy/property-provider": "^4.2.8", "@smithy/shared-ini-file-loader": "^4.4.3", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-70nCESlvnzjo4LjJ8By8MYIiBogkYPSXl3WmMZfH9RZcB/Nt9qVWbFpYj6Fk1vLa4Vk8qagFVeXgxdieMxG1QA=="],
|
||||
|
||||
"@aws-sdk/credential-providers/@aws-sdk/types": ["@aws-sdk/types@3.973.1", "", { "dependencies": { "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-DwHBiMNOB468JiX6+i34c+THsKHErYUdNQ3HexeXZvVn4zouLjgaS4FejiGSi2HyBuzuyHg7SuOPmjSvoU9NRg=="],
|
||||
|
||||
"@aws-sdk/nested-clients/@aws-sdk/core": ["@aws-sdk/core@3.973.11", "", { "dependencies": { "@aws-sdk/types": "^3.973.1", "@aws-sdk/xml-builder": "^3.972.5", "@smithy/core": "^3.23.2", "@smithy/node-config-provider": "^4.3.8", "@smithy/property-provider": "^4.2.8", "@smithy/protocol-http": "^5.3.8", "@smithy/signature-v4": "^5.3.8", "@smithy/smithy-client": "^4.11.5", "@smithy/types": "^4.12.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-middleware": "^4.2.8", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-wdQ8vrvHkKIV7yNUKXyjPWKCdYEUrZTHJ8Ojd5uJxXp9vqPCkUR1dpi1NtOLcrDgueJH7MUH5lQZxshjFPSbDA=="],
|
||||
|
||||
"@aws-sdk/nested-clients/@aws-sdk/middleware-host-header": ["@aws-sdk/middleware-host-header@3.972.3", "", { "dependencies": { "@aws-sdk/types": "^3.973.1", "@smithy/protocol-http": "^5.3.8", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-aknPTb2M+G3s+0qLCx4Li/qGZH8IIYjugHMv15JTYMe6mgZO8VBpYgeGYsNMGCqCZOcWzuf900jFBG5bopfzmA=="],
|
||||
|
||||
"@aws-sdk/nested-clients/@aws-sdk/middleware-logger": ["@aws-sdk/middleware-logger@3.972.3", "", { "dependencies": { "@aws-sdk/types": "^3.973.1", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-Ftg09xNNRqaz9QNzlfdQWfpqMCJbsQdnZVJP55jfhbKi1+FTWxGuvfPoBhDHIovqWKjqbuiew3HuhxbJ0+OjgA=="],
|
||||
|
||||
"@aws-sdk/nested-clients/@aws-sdk/middleware-recursion-detection": ["@aws-sdk/middleware-recursion-detection@3.972.3", "", { "dependencies": { "@aws-sdk/types": "^3.973.1", "@aws/lambda-invoke-store": "^0.2.2", "@smithy/protocol-http": "^5.3.8", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-PY57QhzNuXHnwbJgbWYTrqIDHYSeOlhfYERTAuc16LKZpTZRJUjzBFokp9hF7u1fuGeE3D70ERXzdbMBOqQz7Q=="],
|
||||
|
||||
"@aws-sdk/nested-clients/@aws-sdk/middleware-user-agent": ["@aws-sdk/middleware-user-agent@3.972.11", "", { "dependencies": { "@aws-sdk/core": "^3.973.11", "@aws-sdk/types": "^3.973.1", "@aws-sdk/util-endpoints": "3.993.0", "@smithy/core": "^3.23.2", "@smithy/protocol-http": "^5.3.8", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-R8CvPsPHXwzIHCAza+bllY6PrctEk4lYq/SkHJz9NLoBHCcKQrbOcsfXxO6xmipSbUNIbNIUhH0lBsJGgsRdiw=="],
|
||||
|
||||
"@aws-sdk/nested-clients/@aws-sdk/region-config-resolver": ["@aws-sdk/region-config-resolver@3.972.3", "", { "dependencies": { "@aws-sdk/types": "^3.973.1", "@smithy/config-resolver": "^4.4.6", "@smithy/node-config-provider": "^4.3.8", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-v4J8qYAWfOMcZ4MJUyatntOicTzEMaU7j3OpkRCGGFSL2NgXQ5VbxauIyORA+pxdKZ0qQG2tCQjQjZDlXEC3Ow=="],
|
||||
|
||||
"@aws-sdk/nested-clients/@aws-sdk/types": ["@aws-sdk/types@3.973.1", "", { "dependencies": { "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-DwHBiMNOB468JiX6+i34c+THsKHErYUdNQ3HexeXZvVn4zouLjgaS4FejiGSi2HyBuzuyHg7SuOPmjSvoU9NRg=="],
|
||||
|
||||
"@aws-sdk/nested-clients/@aws-sdk/util-endpoints": ["@aws-sdk/util-endpoints@3.993.0", "", { "dependencies": { "@aws-sdk/types": "^3.973.1", "@smithy/types": "^4.12.0", "@smithy/url-parser": "^4.2.8", "@smithy/util-endpoints": "^3.2.8", "tslib": "^2.6.2" } }, "sha512-j6vioBeRZ4eHX4SWGvGPpwGg/xSOcK7f1GL0VM+rdf3ZFTIsUEhCFmD78B+5r2PgztcECSzEfvHQX01k8dPQPw=="],
|
||||
|
||||
"@aws-sdk/nested-clients/@aws-sdk/util-user-agent-browser": ["@aws-sdk/util-user-agent-browser@3.972.3", "", { "dependencies": { "@aws-sdk/types": "^3.973.1", "@smithy/types": "^4.12.0", "bowser": "^2.11.0", "tslib": "^2.6.2" } }, "sha512-JurOwkRUcXD/5MTDBcqdyQ9eVedtAsZgw5rBwktsPTN7QtPiS2Ld1jkJepNgYoCufz1Wcut9iup7GJDoIHp8Fw=="],
|
||||
|
||||
"@aws-sdk/nested-clients/@aws-sdk/util-user-agent-node": ["@aws-sdk/util-user-agent-node@3.972.9", "", { "dependencies": { "@aws-sdk/middleware-user-agent": "^3.972.11", "@aws-sdk/types": "^3.973.1", "@smithy/node-config-provider": "^4.3.8", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" }, "peerDependencies": { "aws-crt": ">=1.0.0" }, "optionalPeers": ["aws-crt"] }, "sha512-JNswdsLdQemxqaSIBL2HRhsHPUBBziAgoi5RQv6/9avmE5g5RSdt1hWr3mHJ7OxqRYf+KeB11ExWbiqfrnoeaA=="],
|
||||
|
||||
"@aws-sdk/token-providers/@aws-sdk/core": ["@aws-sdk/core@3.973.11", "", { "dependencies": { "@aws-sdk/types": "^3.973.1", "@aws-sdk/xml-builder": "^3.972.5", "@smithy/core": "^3.23.2", "@smithy/node-config-provider": "^4.3.8", "@smithy/property-provider": "^4.2.8", "@smithy/protocol-http": "^5.3.8", "@smithy/signature-v4": "^5.3.8", "@smithy/smithy-client": "^4.11.5", "@smithy/types": "^4.12.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-middleware": "^4.2.8", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-wdQ8vrvHkKIV7yNUKXyjPWKCdYEUrZTHJ8Ojd5uJxXp9vqPCkUR1dpi1NtOLcrDgueJH7MUH5lQZxshjFPSbDA=="],
|
||||
|
||||
"@aws-sdk/token-providers/@aws-sdk/types": ["@aws-sdk/types@3.973.1", "", { "dependencies": { "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-DwHBiMNOB468JiX6+i34c+THsKHErYUdNQ3HexeXZvVn4zouLjgaS4FejiGSi2HyBuzuyHg7SuOPmjSvoU9NRg=="],
|
||||
|
||||
"@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.2.5", "", { "dependencies": { "strnum": "^2.1.0" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-pfX9uG9Ki0yekDHx2SiuRIyFdyAr1kMIMitPvb0YBo8SUfKvia7w7FIyd/l6av85pFYRhZscS75MwMnbvY+hcQ=="],
|
||||
|
||||
"@azure/core-http/@azure/abort-controller": ["@azure/abort-controller@1.1.0", "", { "dependencies": { "tslib": "^2.2.0" } }, "sha512-TrRLIoSQVzfAJX9H1JeFjzAoDGcoK1IYX1UImfceTZpsyYfWr09Ss1aHW1y5TrrR3iq6RZLBwJ3E24uwPhwahw=="],
|
||||
@@ -4930,10 +4804,6 @@
|
||||
|
||||
"@aws-crypto/util/@smithy/util-utf8/@smithy/util-buffer-from": ["@smithy/util-buffer-from@2.2.0", "", { "dependencies": { "@smithy/is-array-buffer": "^2.2.0", "tslib": "^2.6.2" } }, "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA=="],
|
||||
|
||||
"@aws-sdk/client-cognito-identity/@aws-sdk/core/@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.972.5", "", { "dependencies": { "@smithy/types": "^4.12.0", "fast-xml-parser": "5.3.6", "tslib": "^2.6.2" } }, "sha512-mCae5Ys6Qm1LDu0qdGwx2UQ63ONUe+FHw908fJzLDqFKTDBK4LDZUqKWm4OkTCNFq19bftjsBSESIGLD/s3/rA=="],
|
||||
|
||||
"@aws-sdk/client-sso/@aws-sdk/core/@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.972.5", "", { "dependencies": { "@smithy/types": "^4.12.0", "fast-xml-parser": "5.3.6", "tslib": "^2.6.2" } }, "sha512-mCae5Ys6Qm1LDu0qdGwx2UQ63ONUe+FHw908fJzLDqFKTDBK4LDZUqKWm4OkTCNFq19bftjsBSESIGLD/s3/rA=="],
|
||||
|
||||
"@aws-sdk/client-sts/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-env": ["@aws-sdk/credential-provider-env@3.775.0", "", { "dependencies": { "@aws-sdk/core": "3.775.0", "@aws-sdk/types": "3.775.0", "@smithy/property-provider": "^4.0.2", "@smithy/types": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-6ESVxwCbGm7WZ17kY1fjmxQud43vzJFoLd4bmlR+idQSWdqlzGDYdcfzpjDKTcivdtNrVYmFvcH1JBUwCRAZhw=="],
|
||||
|
||||
"@aws-sdk/client-sts/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-http": ["@aws-sdk/credential-provider-http@3.775.0", "", { "dependencies": { "@aws-sdk/core": "3.775.0", "@aws-sdk/types": "3.775.0", "@smithy/fetch-http-handler": "^5.0.2", "@smithy/node-http-handler": "^4.0.4", "@smithy/property-provider": "^4.0.2", "@smithy/protocol-http": "^5.1.0", "@smithy/smithy-client": "^4.2.0", "@smithy/types": "^4.2.0", "@smithy/util-stream": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-PjDQeDH/J1S0yWV32wCj2k5liRo0ssXMseCBEkCsD3SqsU8o5cU82b0hMX4sAib/RkglCSZqGO0xMiN0/7ndww=="],
|
||||
@@ -4946,54 +4816,6 @@
|
||||
|
||||
"@aws-sdk/client-sts/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-web-identity": ["@aws-sdk/credential-provider-web-identity@3.782.0", "", { "dependencies": { "@aws-sdk/core": "3.775.0", "@aws-sdk/nested-clients": "3.782.0", "@aws-sdk/types": "3.775.0", "@smithy/property-provider": "^4.0.2", "@smithy/types": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-xCna0opVPaueEbJoclj5C6OpDNi0Gynj+4d7tnuXGgQhTHPyAz8ZyClkVqpi5qvHTgxROdUEDxWqEO5jqRHZHQ=="],
|
||||
|
||||
"@aws-sdk/credential-provider-cognito-identity/@aws-sdk/client-cognito-identity/@aws-sdk/core": ["@aws-sdk/core@3.973.11", "", { "dependencies": { "@aws-sdk/types": "^3.973.1", "@aws-sdk/xml-builder": "^3.972.5", "@smithy/core": "^3.23.2", "@smithy/node-config-provider": "^4.3.8", "@smithy/property-provider": "^4.2.8", "@smithy/protocol-http": "^5.3.8", "@smithy/signature-v4": "^5.3.8", "@smithy/smithy-client": "^4.11.5", "@smithy/types": "^4.12.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-middleware": "^4.2.8", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-wdQ8vrvHkKIV7yNUKXyjPWKCdYEUrZTHJ8Ojd5uJxXp9vqPCkUR1dpi1NtOLcrDgueJH7MUH5lQZxshjFPSbDA=="],
|
||||
|
||||
"@aws-sdk/credential-provider-cognito-identity/@aws-sdk/client-cognito-identity/@aws-sdk/credential-provider-node": ["@aws-sdk/credential-provider-node@3.972.10", "", { "dependencies": { "@aws-sdk/credential-provider-env": "^3.972.9", "@aws-sdk/credential-provider-http": "^3.972.11", "@aws-sdk/credential-provider-ini": "^3.972.9", "@aws-sdk/credential-provider-process": "^3.972.9", "@aws-sdk/credential-provider-sso": "^3.972.9", "@aws-sdk/credential-provider-web-identity": "^3.972.9", "@aws-sdk/types": "^3.973.1", "@smithy/credential-provider-imds": "^4.2.8", "@smithy/property-provider": "^4.2.8", "@smithy/shared-ini-file-loader": "^4.4.3", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-70nCESlvnzjo4LjJ8By8MYIiBogkYPSXl3WmMZfH9RZcB/Nt9qVWbFpYj6Fk1vLa4Vk8qagFVeXgxdieMxG1QA=="],
|
||||
|
||||
"@aws-sdk/credential-provider-cognito-identity/@aws-sdk/client-cognito-identity/@aws-sdk/middleware-host-header": ["@aws-sdk/middleware-host-header@3.972.3", "", { "dependencies": { "@aws-sdk/types": "^3.973.1", "@smithy/protocol-http": "^5.3.8", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-aknPTb2M+G3s+0qLCx4Li/qGZH8IIYjugHMv15JTYMe6mgZO8VBpYgeGYsNMGCqCZOcWzuf900jFBG5bopfzmA=="],
|
||||
|
||||
"@aws-sdk/credential-provider-cognito-identity/@aws-sdk/client-cognito-identity/@aws-sdk/middleware-logger": ["@aws-sdk/middleware-logger@3.972.3", "", { "dependencies": { "@aws-sdk/types": "^3.973.1", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-Ftg09xNNRqaz9QNzlfdQWfpqMCJbsQdnZVJP55jfhbKi1+FTWxGuvfPoBhDHIovqWKjqbuiew3HuhxbJ0+OjgA=="],
|
||||
|
||||
"@aws-sdk/credential-provider-cognito-identity/@aws-sdk/client-cognito-identity/@aws-sdk/middleware-recursion-detection": ["@aws-sdk/middleware-recursion-detection@3.972.3", "", { "dependencies": { "@aws-sdk/types": "^3.973.1", "@aws/lambda-invoke-store": "^0.2.2", "@smithy/protocol-http": "^5.3.8", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-PY57QhzNuXHnwbJgbWYTrqIDHYSeOlhfYERTAuc16LKZpTZRJUjzBFokp9hF7u1fuGeE3D70ERXzdbMBOqQz7Q=="],
|
||||
|
||||
"@aws-sdk/credential-provider-cognito-identity/@aws-sdk/client-cognito-identity/@aws-sdk/middleware-user-agent": ["@aws-sdk/middleware-user-agent@3.972.11", "", { "dependencies": { "@aws-sdk/core": "^3.973.11", "@aws-sdk/types": "^3.973.1", "@aws-sdk/util-endpoints": "3.993.0", "@smithy/core": "^3.23.2", "@smithy/protocol-http": "^5.3.8", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-R8CvPsPHXwzIHCAza+bllY6PrctEk4lYq/SkHJz9NLoBHCcKQrbOcsfXxO6xmipSbUNIbNIUhH0lBsJGgsRdiw=="],
|
||||
|
||||
"@aws-sdk/credential-provider-cognito-identity/@aws-sdk/client-cognito-identity/@aws-sdk/region-config-resolver": ["@aws-sdk/region-config-resolver@3.972.3", "", { "dependencies": { "@aws-sdk/types": "^3.973.1", "@smithy/config-resolver": "^4.4.6", "@smithy/node-config-provider": "^4.3.8", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-v4J8qYAWfOMcZ4MJUyatntOicTzEMaU7j3OpkRCGGFSL2NgXQ5VbxauIyORA+pxdKZ0qQG2tCQjQjZDlXEC3Ow=="],
|
||||
|
||||
"@aws-sdk/credential-provider-cognito-identity/@aws-sdk/client-cognito-identity/@aws-sdk/util-endpoints": ["@aws-sdk/util-endpoints@3.980.0", "", { "dependencies": { "@aws-sdk/types": "^3.973.1", "@smithy/types": "^4.12.0", "@smithy/url-parser": "^4.2.8", "@smithy/util-endpoints": "^3.2.8", "tslib": "^2.6.2" } }, "sha512-AjKBNEc+rjOZQE1HwcD9aCELqg1GmUj1rtICKuY8cgwB73xJ4U/kNyqKKpN2k9emGqlfDY2D8itIp/vDc6OKpw=="],
|
||||
|
||||
"@aws-sdk/credential-provider-cognito-identity/@aws-sdk/client-cognito-identity/@aws-sdk/util-user-agent-browser": ["@aws-sdk/util-user-agent-browser@3.972.3", "", { "dependencies": { "@aws-sdk/types": "^3.973.1", "@smithy/types": "^4.12.0", "bowser": "^2.11.0", "tslib": "^2.6.2" } }, "sha512-JurOwkRUcXD/5MTDBcqdyQ9eVedtAsZgw5rBwktsPTN7QtPiS2Ld1jkJepNgYoCufz1Wcut9iup7GJDoIHp8Fw=="],
|
||||
|
||||
"@aws-sdk/credential-provider-cognito-identity/@aws-sdk/client-cognito-identity/@aws-sdk/util-user-agent-node": ["@aws-sdk/util-user-agent-node@3.972.9", "", { "dependencies": { "@aws-sdk/middleware-user-agent": "^3.972.11", "@aws-sdk/types": "^3.973.1", "@smithy/node-config-provider": "^4.3.8", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" }, "peerDependencies": { "aws-crt": ">=1.0.0" }, "optionalPeers": ["aws-crt"] }, "sha512-JNswdsLdQemxqaSIBL2HRhsHPUBBziAgoi5RQv6/9avmE5g5RSdt1hWr3mHJ7OxqRYf+KeB11ExWbiqfrnoeaA=="],
|
||||
|
||||
"@aws-sdk/credential-provider-env/@aws-sdk/core/@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.972.5", "", { "dependencies": { "@smithy/types": "^4.12.0", "fast-xml-parser": "5.3.6", "tslib": "^2.6.2" } }, "sha512-mCae5Ys6Qm1LDu0qdGwx2UQ63ONUe+FHw908fJzLDqFKTDBK4LDZUqKWm4OkTCNFq19bftjsBSESIGLD/s3/rA=="],
|
||||
|
||||
"@aws-sdk/credential-provider-http/@aws-sdk/core/@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.972.5", "", { "dependencies": { "@smithy/types": "^4.12.0", "fast-xml-parser": "5.3.6", "tslib": "^2.6.2" } }, "sha512-mCae5Ys6Qm1LDu0qdGwx2UQ63ONUe+FHw908fJzLDqFKTDBK4LDZUqKWm4OkTCNFq19bftjsBSESIGLD/s3/rA=="],
|
||||
|
||||
"@aws-sdk/credential-provider-ini/@aws-sdk/core/@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.972.5", "", { "dependencies": { "@smithy/types": "^4.12.0", "fast-xml-parser": "5.3.6", "tslib": "^2.6.2" } }, "sha512-mCae5Ys6Qm1LDu0qdGwx2UQ63ONUe+FHw908fJzLDqFKTDBK4LDZUqKWm4OkTCNFq19bftjsBSESIGLD/s3/rA=="],
|
||||
|
||||
"@aws-sdk/credential-provider-login/@aws-sdk/core/@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.972.5", "", { "dependencies": { "@smithy/types": "^4.12.0", "fast-xml-parser": "5.3.6", "tslib": "^2.6.2" } }, "sha512-mCae5Ys6Qm1LDu0qdGwx2UQ63ONUe+FHw908fJzLDqFKTDBK4LDZUqKWm4OkTCNFq19bftjsBSESIGLD/s3/rA=="],
|
||||
|
||||
"@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-ini/@aws-sdk/nested-clients": ["@aws-sdk/nested-clients@3.933.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.932.0", "@aws-sdk/middleware-host-header": "3.930.0", "@aws-sdk/middleware-logger": "3.930.0", "@aws-sdk/middleware-recursion-detection": "3.933.0", "@aws-sdk/middleware-user-agent": "3.932.0", "@aws-sdk/region-config-resolver": "3.930.0", "@aws-sdk/types": "3.930.0", "@aws-sdk/util-endpoints": "3.930.0", "@aws-sdk/util-user-agent-browser": "3.930.0", "@aws-sdk/util-user-agent-node": "3.932.0", "@smithy/config-resolver": "^4.4.3", "@smithy/core": "^3.18.2", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/hash-node": "^4.2.5", "@smithy/invalid-dependency": "^4.2.5", "@smithy/middleware-content-length": "^4.2.5", "@smithy/middleware-endpoint": "^4.3.9", "@smithy/middleware-retry": "^4.4.9", "@smithy/middleware-serde": "^4.2.5", "@smithy/middleware-stack": "^4.2.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/node-http-handler": "^4.4.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.5", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.8", "@smithy/util-defaults-mode-node": "^4.2.11", "@smithy/util-endpoints": "^3.2.5", "@smithy/util-middleware": "^4.2.5", "@smithy/util-retry": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-o1GX0+IPlFi/D8ei9y/jj3yucJWNfPnbB5appVBWevAyUdZA5KzQ2nK/hDxiu9olTZlFEFpf1m1Rn3FaGxHqsw=="],
|
||||
|
||||
"@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-sso/@aws-sdk/client-sso": ["@aws-sdk/client-sso@3.933.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.932.0", "@aws-sdk/middleware-host-header": "3.930.0", "@aws-sdk/middleware-logger": "3.930.0", "@aws-sdk/middleware-recursion-detection": "3.933.0", "@aws-sdk/middleware-user-agent": "3.932.0", "@aws-sdk/region-config-resolver": "3.930.0", "@aws-sdk/types": "3.930.0", "@aws-sdk/util-endpoints": "3.930.0", "@aws-sdk/util-user-agent-browser": "3.930.0", "@aws-sdk/util-user-agent-node": "3.932.0", "@smithy/config-resolver": "^4.4.3", "@smithy/core": "^3.18.2", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/hash-node": "^4.2.5", "@smithy/invalid-dependency": "^4.2.5", "@smithy/middleware-content-length": "^4.2.5", "@smithy/middleware-endpoint": "^4.3.9", "@smithy/middleware-retry": "^4.4.9", "@smithy/middleware-serde": "^4.2.5", "@smithy/middleware-stack": "^4.2.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/node-http-handler": "^4.4.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.5", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.8", "@smithy/util-defaults-mode-node": "^4.2.11", "@smithy/util-endpoints": "^3.2.5", "@smithy/util-middleware": "^4.2.5", "@smithy/util-retry": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-zwGLSiK48z3PzKpQiDMKP85+fpIrPMF1qQOQW9OW7BGj5AuBZIisT2O4VzIgYJeh+t47MLU7VgBQL7muc+MJDg=="],
|
||||
|
||||
"@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-sso/@aws-sdk/token-providers": ["@aws-sdk/token-providers@3.933.0", "", { "dependencies": { "@aws-sdk/core": "3.932.0", "@aws-sdk/nested-clients": "3.933.0", "@aws-sdk/types": "3.930.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-Qzq7zj9yXUgAAJEbbmqRhm0jmUndl8nHG0AbxFEfCfQRVZWL96Qzx0mf8lYwT9hIMrXncLwy31HOthmbXwFRwQ=="],
|
||||
|
||||
"@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-web-identity/@aws-sdk/nested-clients": ["@aws-sdk/nested-clients@3.933.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.932.0", "@aws-sdk/middleware-host-header": "3.930.0", "@aws-sdk/middleware-logger": "3.930.0", "@aws-sdk/middleware-recursion-detection": "3.933.0", "@aws-sdk/middleware-user-agent": "3.932.0", "@aws-sdk/region-config-resolver": "3.930.0", "@aws-sdk/types": "3.930.0", "@aws-sdk/util-endpoints": "3.930.0", "@aws-sdk/util-user-agent-browser": "3.930.0", "@aws-sdk/util-user-agent-node": "3.932.0", "@smithy/config-resolver": "^4.4.3", "@smithy/core": "^3.18.2", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/hash-node": "^4.2.5", "@smithy/invalid-dependency": "^4.2.5", "@smithy/middleware-content-length": "^4.2.5", "@smithy/middleware-endpoint": "^4.3.9", "@smithy/middleware-retry": "^4.4.9", "@smithy/middleware-serde": "^4.2.5", "@smithy/middleware-stack": "^4.2.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/node-http-handler": "^4.4.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.5", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.8", "@smithy/util-defaults-mode-node": "^4.2.11", "@smithy/util-endpoints": "^3.2.5", "@smithy/util-middleware": "^4.2.5", "@smithy/util-retry": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-o1GX0+IPlFi/D8ei9y/jj3yucJWNfPnbB5appVBWevAyUdZA5KzQ2nK/hDxiu9olTZlFEFpf1m1Rn3FaGxHqsw=="],
|
||||
|
||||
"@aws-sdk/credential-provider-process/@aws-sdk/core/@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.972.5", "", { "dependencies": { "@smithy/types": "^4.12.0", "fast-xml-parser": "5.3.6", "tslib": "^2.6.2" } }, "sha512-mCae5Ys6Qm1LDu0qdGwx2UQ63ONUe+FHw908fJzLDqFKTDBK4LDZUqKWm4OkTCNFq19bftjsBSESIGLD/s3/rA=="],
|
||||
|
||||
"@aws-sdk/credential-provider-sso/@aws-sdk/core/@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.972.5", "", { "dependencies": { "@smithy/types": "^4.12.0", "fast-xml-parser": "5.3.6", "tslib": "^2.6.2" } }, "sha512-mCae5Ys6Qm1LDu0qdGwx2UQ63ONUe+FHw908fJzLDqFKTDBK4LDZUqKWm4OkTCNFq19bftjsBSESIGLD/s3/rA=="],
|
||||
|
||||
"@aws-sdk/credential-provider-web-identity/@aws-sdk/core/@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.972.5", "", { "dependencies": { "@smithy/types": "^4.12.0", "fast-xml-parser": "5.3.6", "tslib": "^2.6.2" } }, "sha512-mCae5Ys6Qm1LDu0qdGwx2UQ63ONUe+FHw908fJzLDqFKTDBK4LDZUqKWm4OkTCNFq19bftjsBSESIGLD/s3/rA=="],
|
||||
|
||||
"@aws-sdk/credential-providers/@aws-sdk/core/@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.972.5", "", { "dependencies": { "@smithy/types": "^4.12.0", "fast-xml-parser": "5.3.6", "tslib": "^2.6.2" } }, "sha512-mCae5Ys6Qm1LDu0qdGwx2UQ63ONUe+FHw908fJzLDqFKTDBK4LDZUqKWm4OkTCNFq19bftjsBSESIGLD/s3/rA=="],
|
||||
|
||||
"@aws-sdk/nested-clients/@aws-sdk/core/@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.972.5", "", { "dependencies": { "@smithy/types": "^4.12.0", "fast-xml-parser": "5.3.6", "tslib": "^2.6.2" } }, "sha512-mCae5Ys6Qm1LDu0qdGwx2UQ63ONUe+FHw908fJzLDqFKTDBK4LDZUqKWm4OkTCNFq19bftjsBSESIGLD/s3/rA=="],
|
||||
|
||||
"@aws-sdk/token-providers/@aws-sdk/core/@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.972.5", "", { "dependencies": { "@smithy/types": "^4.12.0", "fast-xml-parser": "5.3.6", "tslib": "^2.6.2" } }, "sha512-mCae5Ys6Qm1LDu0qdGwx2UQ63ONUe+FHw908fJzLDqFKTDBK4LDZUqKWm4OkTCNFq19bftjsBSESIGLD/s3/rA=="],
|
||||
|
||||
"@aws-sdk/xml-builder/fast-xml-parser/strnum": ["strnum@2.1.2", "", {}, "sha512-l63NF9y/cLROq/yqKXSLtcMeeyOfnSQlfMSlzFt/K73oIaD8DGaQWd7Z34X9GPiKqP5rbSh84Hl4bOlLcjiSrQ=="],
|
||||
|
||||
"@azure/core-xml/fast-xml-parser/strnum": ["strnum@2.1.2", "", {}, "sha512-l63NF9y/cLROq/yqKXSLtcMeeyOfnSQlfMSlzFt/K73oIaD8DGaQWd7Z34X9GPiKqP5rbSh84Hl4bOlLcjiSrQ=="],
|
||||
@@ -5432,10 +5254,6 @@
|
||||
|
||||
"@aws-crypto/util/@smithy/util-utf8/@smithy/util-buffer-from/@smithy/is-array-buffer": ["@smithy/is-array-buffer@2.2.0", "", { "dependencies": { "tslib": "^2.6.2" } }, "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA=="],
|
||||
|
||||
"@aws-sdk/client-cognito-identity/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.3.6", "", { "dependencies": { "strnum": "^2.1.2" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-QNI3sAvSvaOiaMl8FYU4trnEzCwiRr8XMWgAHzlrWpTSj+QaCSvOf1h82OEP1s4hiAXhnbXSyFWCf4ldZzZRVA=="],
|
||||
|
||||
"@aws-sdk/client-sso/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.3.6", "", { "dependencies": { "strnum": "^2.1.2" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-QNI3sAvSvaOiaMl8FYU4trnEzCwiRr8XMWgAHzlrWpTSj+QaCSvOf1h82OEP1s4hiAXhnbXSyFWCf4ldZzZRVA=="],
|
||||
|
||||
"@aws-sdk/client-sts/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-ini/@aws-sdk/nested-clients": ["@aws-sdk/nested-clients@3.782.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.775.0", "@aws-sdk/middleware-host-header": "3.775.0", "@aws-sdk/middleware-logger": "3.775.0", "@aws-sdk/middleware-recursion-detection": "3.775.0", "@aws-sdk/middleware-user-agent": "3.782.0", "@aws-sdk/region-config-resolver": "3.775.0", "@aws-sdk/types": "3.775.0", "@aws-sdk/util-endpoints": "3.782.0", "@aws-sdk/util-user-agent-browser": "3.775.0", "@aws-sdk/util-user-agent-node": "3.782.0", "@smithy/config-resolver": "^4.1.0", "@smithy/core": "^3.2.0", "@smithy/fetch-http-handler": "^5.0.2", "@smithy/hash-node": "^4.0.2", "@smithy/invalid-dependency": "^4.0.2", "@smithy/middleware-content-length": "^4.0.2", "@smithy/middleware-endpoint": "^4.1.0", "@smithy/middleware-retry": "^4.1.0", "@smithy/middleware-serde": "^4.0.3", "@smithy/middleware-stack": "^4.0.2", "@smithy/node-config-provider": "^4.0.2", "@smithy/node-http-handler": "^4.0.4", "@smithy/protocol-http": "^5.1.0", "@smithy/smithy-client": "^4.2.0", "@smithy/types": "^4.2.0", "@smithy/url-parser": "^4.0.2", "@smithy/util-base64": "^4.0.0", "@smithy/util-body-length-browser": "^4.0.0", "@smithy/util-body-length-node": "^4.0.0", "@smithy/util-defaults-mode-browser": "^4.0.8", "@smithy/util-defaults-mode-node": "^4.0.8", "@smithy/util-endpoints": "^3.0.2", "@smithy/util-middleware": "^4.0.2", "@smithy/util-retry": "^4.0.2", "@smithy/util-utf8": "^4.0.0", "tslib": "^2.6.2" } }, "sha512-QOYC8q7luzHFXrP0xYAqBctoPkynjfV0r9dqntFu4/IWMTyC1vlo1UTxFAjIPyclYw92XJyEkVCVg9v/nQnsUA=="],
|
||||
|
||||
"@aws-sdk/client-sts/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-sso/@aws-sdk/client-sso": ["@aws-sdk/client-sso@3.782.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.775.0", "@aws-sdk/middleware-host-header": "3.775.0", "@aws-sdk/middleware-logger": "3.775.0", "@aws-sdk/middleware-recursion-detection": "3.775.0", "@aws-sdk/middleware-user-agent": "3.782.0", "@aws-sdk/region-config-resolver": "3.775.0", "@aws-sdk/types": "3.775.0", "@aws-sdk/util-endpoints": "3.782.0", "@aws-sdk/util-user-agent-browser": "3.775.0", "@aws-sdk/util-user-agent-node": "3.782.0", "@smithy/config-resolver": "^4.1.0", "@smithy/core": "^3.2.0", "@smithy/fetch-http-handler": "^5.0.2", "@smithy/hash-node": "^4.0.2", "@smithy/invalid-dependency": "^4.0.2", "@smithy/middleware-content-length": "^4.0.2", "@smithy/middleware-endpoint": "^4.1.0", "@smithy/middleware-retry": "^4.1.0", "@smithy/middleware-serde": "^4.0.3", "@smithy/middleware-stack": "^4.0.2", "@smithy/node-config-provider": "^4.0.2", "@smithy/node-http-handler": "^4.0.4", "@smithy/protocol-http": "^5.1.0", "@smithy/smithy-client": "^4.2.0", "@smithy/types": "^4.2.0", "@smithy/url-parser": "^4.0.2", "@smithy/util-base64": "^4.0.0", "@smithy/util-body-length-browser": "^4.0.0", "@smithy/util-body-length-node": "^4.0.0", "@smithy/util-defaults-mode-browser": "^4.0.8", "@smithy/util-defaults-mode-node": "^4.0.8", "@smithy/util-endpoints": "^3.0.2", "@smithy/util-middleware": "^4.0.2", "@smithy/util-retry": "^4.0.2", "@smithy/util-utf8": "^4.0.0", "tslib": "^2.6.2" } }, "sha512-5GlJBejo8wqMpSSEKb45WE82YxI2k73YuebjLH/eWDNQeE6VI5Bh9lA1YQ7xNkLLH8hIsb0pSfKVuwh0VEzVrg=="],
|
||||
@@ -5444,32 +5262,6 @@
|
||||
|
||||
"@aws-sdk/client-sts/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-web-identity/@aws-sdk/nested-clients": ["@aws-sdk/nested-clients@3.782.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.775.0", "@aws-sdk/middleware-host-header": "3.775.0", "@aws-sdk/middleware-logger": "3.775.0", "@aws-sdk/middleware-recursion-detection": "3.775.0", "@aws-sdk/middleware-user-agent": "3.782.0", "@aws-sdk/region-config-resolver": "3.775.0", "@aws-sdk/types": "3.775.0", "@aws-sdk/util-endpoints": "3.782.0", "@aws-sdk/util-user-agent-browser": "3.775.0", "@aws-sdk/util-user-agent-node": "3.782.0", "@smithy/config-resolver": "^4.1.0", "@smithy/core": "^3.2.0", "@smithy/fetch-http-handler": "^5.0.2", "@smithy/hash-node": "^4.0.2", "@smithy/invalid-dependency": "^4.0.2", "@smithy/middleware-content-length": "^4.0.2", "@smithy/middleware-endpoint": "^4.1.0", "@smithy/middleware-retry": "^4.1.0", "@smithy/middleware-serde": "^4.0.3", "@smithy/middleware-stack": "^4.0.2", "@smithy/node-config-provider": "^4.0.2", "@smithy/node-http-handler": "^4.0.4", "@smithy/protocol-http": "^5.1.0", "@smithy/smithy-client": "^4.2.0", "@smithy/types": "^4.2.0", "@smithy/url-parser": "^4.0.2", "@smithy/util-base64": "^4.0.0", "@smithy/util-body-length-browser": "^4.0.0", "@smithy/util-body-length-node": "^4.0.0", "@smithy/util-defaults-mode-browser": "^4.0.8", "@smithy/util-defaults-mode-node": "^4.0.8", "@smithy/util-endpoints": "^3.0.2", "@smithy/util-middleware": "^4.0.2", "@smithy/util-retry": "^4.0.2", "@smithy/util-utf8": "^4.0.0", "tslib": "^2.6.2" } }, "sha512-QOYC8q7luzHFXrP0xYAqBctoPkynjfV0r9dqntFu4/IWMTyC1vlo1UTxFAjIPyclYw92XJyEkVCVg9v/nQnsUA=="],
|
||||
|
||||
"@aws-sdk/credential-provider-cognito-identity/@aws-sdk/client-cognito-identity/@aws-sdk/core/@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.972.5", "", { "dependencies": { "@smithy/types": "^4.12.0", "fast-xml-parser": "5.3.6", "tslib": "^2.6.2" } }, "sha512-mCae5Ys6Qm1LDu0qdGwx2UQ63ONUe+FHw908fJzLDqFKTDBK4LDZUqKWm4OkTCNFq19bftjsBSESIGLD/s3/rA=="],
|
||||
|
||||
"@aws-sdk/credential-provider-cognito-identity/@aws-sdk/client-cognito-identity/@aws-sdk/middleware-user-agent/@aws-sdk/util-endpoints": ["@aws-sdk/util-endpoints@3.993.0", "", { "dependencies": { "@aws-sdk/types": "^3.973.1", "@smithy/types": "^4.12.0", "@smithy/url-parser": "^4.2.8", "@smithy/util-endpoints": "^3.2.8", "tslib": "^2.6.2" } }, "sha512-j6vioBeRZ4eHX4SWGvGPpwGg/xSOcK7f1GL0VM+rdf3ZFTIsUEhCFmD78B+5r2PgztcECSzEfvHQX01k8dPQPw=="],
|
||||
|
||||
"@aws-sdk/credential-provider-env/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.3.6", "", { "dependencies": { "strnum": "^2.1.2" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-QNI3sAvSvaOiaMl8FYU4trnEzCwiRr8XMWgAHzlrWpTSj+QaCSvOf1h82OEP1s4hiAXhnbXSyFWCf4ldZzZRVA=="],
|
||||
|
||||
"@aws-sdk/credential-provider-http/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.3.6", "", { "dependencies": { "strnum": "^2.1.2" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-QNI3sAvSvaOiaMl8FYU4trnEzCwiRr8XMWgAHzlrWpTSj+QaCSvOf1h82OEP1s4hiAXhnbXSyFWCf4ldZzZRVA=="],
|
||||
|
||||
"@aws-sdk/credential-provider-ini/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.3.6", "", { "dependencies": { "strnum": "^2.1.2" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-QNI3sAvSvaOiaMl8FYU4trnEzCwiRr8XMWgAHzlrWpTSj+QaCSvOf1h82OEP1s4hiAXhnbXSyFWCf4ldZzZRVA=="],
|
||||
|
||||
"@aws-sdk/credential-provider-login/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.3.6", "", { "dependencies": { "strnum": "^2.1.2" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-QNI3sAvSvaOiaMl8FYU4trnEzCwiRr8XMWgAHzlrWpTSj+QaCSvOf1h82OEP1s4hiAXhnbXSyFWCf4ldZzZRVA=="],
|
||||
|
||||
"@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-sso/@aws-sdk/token-providers/@aws-sdk/nested-clients": ["@aws-sdk/nested-clients@3.933.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.932.0", "@aws-sdk/middleware-host-header": "3.930.0", "@aws-sdk/middleware-logger": "3.930.0", "@aws-sdk/middleware-recursion-detection": "3.933.0", "@aws-sdk/middleware-user-agent": "3.932.0", "@aws-sdk/region-config-resolver": "3.930.0", "@aws-sdk/types": "3.930.0", "@aws-sdk/util-endpoints": "3.930.0", "@aws-sdk/util-user-agent-browser": "3.930.0", "@aws-sdk/util-user-agent-node": "3.932.0", "@smithy/config-resolver": "^4.4.3", "@smithy/core": "^3.18.2", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/hash-node": "^4.2.5", "@smithy/invalid-dependency": "^4.2.5", "@smithy/middleware-content-length": "^4.2.5", "@smithy/middleware-endpoint": "^4.3.9", "@smithy/middleware-retry": "^4.4.9", "@smithy/middleware-serde": "^4.2.5", "@smithy/middleware-stack": "^4.2.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/node-http-handler": "^4.4.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.5", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.8", "@smithy/util-defaults-mode-node": "^4.2.11", "@smithy/util-endpoints": "^3.2.5", "@smithy/util-middleware": "^4.2.5", "@smithy/util-retry": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-o1GX0+IPlFi/D8ei9y/jj3yucJWNfPnbB5appVBWevAyUdZA5KzQ2nK/hDxiu9olTZlFEFpf1m1Rn3FaGxHqsw=="],
|
||||
|
||||
"@aws-sdk/credential-provider-process/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.3.6", "", { "dependencies": { "strnum": "^2.1.2" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-QNI3sAvSvaOiaMl8FYU4trnEzCwiRr8XMWgAHzlrWpTSj+QaCSvOf1h82OEP1s4hiAXhnbXSyFWCf4ldZzZRVA=="],
|
||||
|
||||
"@aws-sdk/credential-provider-sso/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.3.6", "", { "dependencies": { "strnum": "^2.1.2" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-QNI3sAvSvaOiaMl8FYU4trnEzCwiRr8XMWgAHzlrWpTSj+QaCSvOf1h82OEP1s4hiAXhnbXSyFWCf4ldZzZRVA=="],
|
||||
|
||||
"@aws-sdk/credential-provider-web-identity/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.3.6", "", { "dependencies": { "strnum": "^2.1.2" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-QNI3sAvSvaOiaMl8FYU4trnEzCwiRr8XMWgAHzlrWpTSj+QaCSvOf1h82OEP1s4hiAXhnbXSyFWCf4ldZzZRVA=="],
|
||||
|
||||
"@aws-sdk/credential-providers/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.3.6", "", { "dependencies": { "strnum": "^2.1.2" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-QNI3sAvSvaOiaMl8FYU4trnEzCwiRr8XMWgAHzlrWpTSj+QaCSvOf1h82OEP1s4hiAXhnbXSyFWCf4ldZzZRVA=="],
|
||||
|
||||
"@aws-sdk/nested-clients/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.3.6", "", { "dependencies": { "strnum": "^2.1.2" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-QNI3sAvSvaOiaMl8FYU4trnEzCwiRr8XMWgAHzlrWpTSj+QaCSvOf1h82OEP1s4hiAXhnbXSyFWCf4ldZzZRVA=="],
|
||||
|
||||
"@aws-sdk/token-providers/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.3.6", "", { "dependencies": { "strnum": "^2.1.2" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-QNI3sAvSvaOiaMl8FYU4trnEzCwiRr8XMWgAHzlrWpTSj+QaCSvOf1h82OEP1s4hiAXhnbXSyFWCf4ldZzZRVA=="],
|
||||
|
||||
"@jsx-email/cli/tailwindcss/chokidar/glob-parent": ["glob-parent@5.1.2", "", { "dependencies": { "is-glob": "^4.0.1" } }, "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="],
|
||||
|
||||
"@jsx-email/cli/tailwindcss/chokidar/readdirp": ["readdirp@3.6.0", "", { "dependencies": { "picomatch": "^2.2.1" } }, "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA=="],
|
||||
@@ -5620,34 +5412,8 @@
|
||||
|
||||
"@astrojs/check/yargs/string-width/strip-ansi/ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="],
|
||||
|
||||
"@aws-sdk/client-cognito-identity/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser/strnum": ["strnum@2.1.2", "", {}, "sha512-l63NF9y/cLROq/yqKXSLtcMeeyOfnSQlfMSlzFt/K73oIaD8DGaQWd7Z34X9GPiKqP5rbSh84Hl4bOlLcjiSrQ=="],
|
||||
|
||||
"@aws-sdk/client-sso/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser/strnum": ["strnum@2.1.2", "", {}, "sha512-l63NF9y/cLROq/yqKXSLtcMeeyOfnSQlfMSlzFt/K73oIaD8DGaQWd7Z34X9GPiKqP5rbSh84Hl4bOlLcjiSrQ=="],
|
||||
|
||||
"@aws-sdk/client-sts/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-sso/@aws-sdk/token-providers/@aws-sdk/nested-clients": ["@aws-sdk/nested-clients@3.782.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.775.0", "@aws-sdk/middleware-host-header": "3.775.0", "@aws-sdk/middleware-logger": "3.775.0", "@aws-sdk/middleware-recursion-detection": "3.775.0", "@aws-sdk/middleware-user-agent": "3.782.0", "@aws-sdk/region-config-resolver": "3.775.0", "@aws-sdk/types": "3.775.0", "@aws-sdk/util-endpoints": "3.782.0", "@aws-sdk/util-user-agent-browser": "3.775.0", "@aws-sdk/util-user-agent-node": "3.782.0", "@smithy/config-resolver": "^4.1.0", "@smithy/core": "^3.2.0", "@smithy/fetch-http-handler": "^5.0.2", "@smithy/hash-node": "^4.0.2", "@smithy/invalid-dependency": "^4.0.2", "@smithy/middleware-content-length": "^4.0.2", "@smithy/middleware-endpoint": "^4.1.0", "@smithy/middleware-retry": "^4.1.0", "@smithy/middleware-serde": "^4.0.3", "@smithy/middleware-stack": "^4.0.2", "@smithy/node-config-provider": "^4.0.2", "@smithy/node-http-handler": "^4.0.4", "@smithy/protocol-http": "^5.1.0", "@smithy/smithy-client": "^4.2.0", "@smithy/types": "^4.2.0", "@smithy/url-parser": "^4.0.2", "@smithy/util-base64": "^4.0.0", "@smithy/util-body-length-browser": "^4.0.0", "@smithy/util-body-length-node": "^4.0.0", "@smithy/util-defaults-mode-browser": "^4.0.8", "@smithy/util-defaults-mode-node": "^4.0.8", "@smithy/util-endpoints": "^3.0.2", "@smithy/util-middleware": "^4.0.2", "@smithy/util-retry": "^4.0.2", "@smithy/util-utf8": "^4.0.0", "tslib": "^2.6.2" } }, "sha512-QOYC8q7luzHFXrP0xYAqBctoPkynjfV0r9dqntFu4/IWMTyC1vlo1UTxFAjIPyclYw92XJyEkVCVg9v/nQnsUA=="],
|
||||
|
||||
"@aws-sdk/credential-provider-cognito-identity/@aws-sdk/client-cognito-identity/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.3.6", "", { "dependencies": { "strnum": "^2.1.2" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-QNI3sAvSvaOiaMl8FYU4trnEzCwiRr8XMWgAHzlrWpTSj+QaCSvOf1h82OEP1s4hiAXhnbXSyFWCf4ldZzZRVA=="],
|
||||
|
||||
"@aws-sdk/credential-provider-env/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser/strnum": ["strnum@2.1.2", "", {}, "sha512-l63NF9y/cLROq/yqKXSLtcMeeyOfnSQlfMSlzFt/K73oIaD8DGaQWd7Z34X9GPiKqP5rbSh84Hl4bOlLcjiSrQ=="],
|
||||
|
||||
"@aws-sdk/credential-provider-http/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser/strnum": ["strnum@2.1.2", "", {}, "sha512-l63NF9y/cLROq/yqKXSLtcMeeyOfnSQlfMSlzFt/K73oIaD8DGaQWd7Z34X9GPiKqP5rbSh84Hl4bOlLcjiSrQ=="],
|
||||
|
||||
"@aws-sdk/credential-provider-ini/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser/strnum": ["strnum@2.1.2", "", {}, "sha512-l63NF9y/cLROq/yqKXSLtcMeeyOfnSQlfMSlzFt/K73oIaD8DGaQWd7Z34X9GPiKqP5rbSh84Hl4bOlLcjiSrQ=="],
|
||||
|
||||
"@aws-sdk/credential-provider-login/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser/strnum": ["strnum@2.1.2", "", {}, "sha512-l63NF9y/cLROq/yqKXSLtcMeeyOfnSQlfMSlzFt/K73oIaD8DGaQWd7Z34X9GPiKqP5rbSh84Hl4bOlLcjiSrQ=="],
|
||||
|
||||
"@aws-sdk/credential-provider-process/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser/strnum": ["strnum@2.1.2", "", {}, "sha512-l63NF9y/cLROq/yqKXSLtcMeeyOfnSQlfMSlzFt/K73oIaD8DGaQWd7Z34X9GPiKqP5rbSh84Hl4bOlLcjiSrQ=="],
|
||||
|
||||
"@aws-sdk/credential-provider-sso/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser/strnum": ["strnum@2.1.2", "", {}, "sha512-l63NF9y/cLROq/yqKXSLtcMeeyOfnSQlfMSlzFt/K73oIaD8DGaQWd7Z34X9GPiKqP5rbSh84Hl4bOlLcjiSrQ=="],
|
||||
|
||||
"@aws-sdk/credential-provider-web-identity/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser/strnum": ["strnum@2.1.2", "", {}, "sha512-l63NF9y/cLROq/yqKXSLtcMeeyOfnSQlfMSlzFt/K73oIaD8DGaQWd7Z34X9GPiKqP5rbSh84Hl4bOlLcjiSrQ=="],
|
||||
|
||||
"@aws-sdk/credential-providers/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser/strnum": ["strnum@2.1.2", "", {}, "sha512-l63NF9y/cLROq/yqKXSLtcMeeyOfnSQlfMSlzFt/K73oIaD8DGaQWd7Z34X9GPiKqP5rbSh84Hl4bOlLcjiSrQ=="],
|
||||
|
||||
"@aws-sdk/nested-clients/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser/strnum": ["strnum@2.1.2", "", {}, "sha512-l63NF9y/cLROq/yqKXSLtcMeeyOfnSQlfMSlzFt/K73oIaD8DGaQWd7Z34X9GPiKqP5rbSh84Hl4bOlLcjiSrQ=="],
|
||||
|
||||
"@aws-sdk/token-providers/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser/strnum": ["strnum@2.1.2", "", {}, "sha512-l63NF9y/cLROq/yqKXSLtcMeeyOfnSQlfMSlzFt/K73oIaD8DGaQWd7Z34X9GPiKqP5rbSh84Hl4bOlLcjiSrQ=="],
|
||||
|
||||
"@jsx-email/cli/tailwindcss/chokidar/readdirp/picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="],
|
||||
|
||||
"@solidjs/start/shiki/@shikijs/engine-javascript/oniguruma-to-es/regex": ["regex@5.1.1", "", { "dependencies": { "regex-utilities": "^2.3.0" } }, "sha512-dN5I359AVGPnwzJm2jN1k0W9LPZ+ePvoOeVMMfqIMFz53sSwXkxaJoxr50ptnsC771lK95BnTrVSZxq0b9yCGw=="],
|
||||
@@ -5682,8 +5448,6 @@
|
||||
|
||||
"tw-to-css/tailwindcss/chokidar/readdirp/picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="],
|
||||
|
||||
"@aws-sdk/credential-provider-cognito-identity/@aws-sdk/client-cognito-identity/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser/strnum": ["strnum@2.1.2", "", {}, "sha512-l63NF9y/cLROq/yqKXSLtcMeeyOfnSQlfMSlzFt/K73oIaD8DGaQWd7Z34X9GPiKqP5rbSh84Hl4bOlLcjiSrQ=="],
|
||||
|
||||
"archiver-utils/glob/jackspeak/@isaacs/cliui/string-width/emoji-regex": ["emoji-regex@9.2.2", "", {}, "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg=="],
|
||||
|
||||
"archiver-utils/glob/jackspeak/@isaacs/cliui/wrap-ansi/ansi-styles": ["ansi-styles@6.2.3", "", {}, "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg=="],
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
{
|
||||
"nodeModules": {
|
||||
"x86_64-linux": "sha256-zs3o4OrLGqECnOxzbawP1UC+a7U3pZKr9QE+36qW+iA=",
|
||||
"aarch64-linux": "sha256-bg0xtNJBbaZpDleCw+S6aay9Ntcil/h4HW7a1jGfc8Q=",
|
||||
"aarch64-darwin": "sha256-alEZaFnNgd/7evGv+HLUieeRr8+YVN/FxhH2sNQBMcQ=",
|
||||
"x86_64-darwin": "sha256-NMBZX6Y7JCUqK6ntCoaf7/a6tFArzDSV/TnBCTtwGMw="
|
||||
"x86_64-linux": "sha256-7y6gQyIxyrdp2DaG/0oOEpuL+1n9oa8arUn1CuDiDhA=",
|
||||
"aarch64-linux": "sha256-7dnHO2WqQZ9A8cG3EC8p7408YR9n2F5C6DG5rNWHqNY=",
|
||||
"aarch64-darwin": "sha256-jxjhnVfE61RVOHaWvDO4mGLk6guQ8jHeXv/pbu5nbaE=",
|
||||
"x86_64-darwin": "sha256-22yM4FEtVxGWRug6H0rKog86Q/cYE3QsADrRbLeJKVQ="
|
||||
}
|
||||
}
|
||||
|
||||
@@ -320,6 +320,8 @@ export const Terminal = (props: TerminalProps) => {
|
||||
const mod = loaded.mod
|
||||
const g = loaded.ghostty
|
||||
|
||||
const once = { value: false }
|
||||
|
||||
const restore = typeof local.pty.buffer === "string" ? local.pty.buffer : ""
|
||||
const restoreSize =
|
||||
restore &&
|
||||
@@ -414,28 +416,20 @@ export const Terminal = (props: TerminalProps) => {
|
||||
cleanups.push(() => window.removeEventListener("resize", handleResize))
|
||||
}
|
||||
|
||||
const write = (data: string) =>
|
||||
new Promise<void>((resolve) => {
|
||||
if (!output) {
|
||||
resolve()
|
||||
return
|
||||
}
|
||||
output.push(data)
|
||||
output.flush(resolve)
|
||||
})
|
||||
|
||||
if (restore && restoreSize) {
|
||||
await write(restore)
|
||||
fit.fit()
|
||||
scheduleSize(t.cols, t.rows)
|
||||
if (typeof local.pty.scrollY === "number") t.scrollToLine(local.pty.scrollY)
|
||||
startResize()
|
||||
t.write(restore, () => {
|
||||
fit.fit()
|
||||
scheduleSize(t.cols, t.rows)
|
||||
if (typeof local.pty.scrollY === "number") t.scrollToLine(local.pty.scrollY)
|
||||
startResize()
|
||||
})
|
||||
} else {
|
||||
fit.fit()
|
||||
scheduleSize(t.cols, t.rows)
|
||||
if (restore) {
|
||||
await write(restore)
|
||||
if (typeof local.pty.scrollY === "number") t.scrollToLine(local.pty.scrollY)
|
||||
t.write(restore, () => {
|
||||
if (typeof local.pty.scrollY === "number") t.scrollToLine(local.pty.scrollY)
|
||||
})
|
||||
}
|
||||
startResize()
|
||||
}
|
||||
@@ -444,32 +438,38 @@ export const Terminal = (props: TerminalProps) => {
|
||||
// console.log("Scroll position:", ydisp)
|
||||
// })
|
||||
|
||||
const once = { value: false }
|
||||
let closing = false
|
||||
|
||||
const url = new URL(sdk.url + `/pty/${local.pty.id}/connect`)
|
||||
url.searchParams.set("directory", sdk.directory)
|
||||
url.searchParams.set("cursor", String(start !== undefined ? start : local.pty.buffer ? -1 : 0))
|
||||
url.protocol = url.protocol === "https:" ? "wss:" : "ws:"
|
||||
url.username = server.current?.http.username ?? ""
|
||||
url.password = server.current?.http.password ?? ""
|
||||
|
||||
const socket = new WebSocket(url)
|
||||
socket.binaryType = "arraybuffer"
|
||||
ws = socket
|
||||
cleanups.push(() => {
|
||||
if (socket.readyState !== WebSocket.CLOSED && socket.readyState !== WebSocket.CLOSING) socket.close()
|
||||
})
|
||||
if (disposed) {
|
||||
cleanup()
|
||||
return
|
||||
}
|
||||
|
||||
const handleOpen = () => {
|
||||
local.onConnect?.()
|
||||
scheduleSize(t.cols, t.rows)
|
||||
}
|
||||
socket.addEventListener("open", handleOpen)
|
||||
cleanups.push(() => socket.removeEventListener("open", handleOpen))
|
||||
|
||||
if (socket.readyState === WebSocket.OPEN) handleOpen()
|
||||
|
||||
const decoder = new TextDecoder()
|
||||
|
||||
const handleMessage = (event: MessageEvent) => {
|
||||
if (disposed) return
|
||||
if (closing) return
|
||||
if (event.data instanceof ArrayBuffer) {
|
||||
// WebSocket control frame: 0x00 + UTF-8 JSON (currently { cursor }).
|
||||
const bytes = new Uint8Array(event.data)
|
||||
if (bytes[0] !== 0) return
|
||||
const json = decoder.decode(bytes.subarray(1))
|
||||
@@ -491,20 +491,20 @@ export const Terminal = (props: TerminalProps) => {
|
||||
cursor += data.length
|
||||
}
|
||||
socket.addEventListener("message", handleMessage)
|
||||
cleanups.push(() => socket.removeEventListener("message", handleMessage))
|
||||
|
||||
const handleError = (error: Event) => {
|
||||
if (disposed) return
|
||||
if (closing) return
|
||||
if (once.value) return
|
||||
once.value = true
|
||||
console.error("WebSocket error:", error)
|
||||
local.onConnectError?.(error)
|
||||
}
|
||||
socket.addEventListener("error", handleError)
|
||||
cleanups.push(() => socket.removeEventListener("error", handleError))
|
||||
|
||||
const handleClose = (event: CloseEvent) => {
|
||||
if (disposed) return
|
||||
if (closing) return
|
||||
// Normal closure (code 1000) means PTY process exited - server event handles cleanup
|
||||
// For other codes (network issues, server restart), trigger error handler
|
||||
if (event.code !== 1000) {
|
||||
@@ -514,15 +514,7 @@ export const Terminal = (props: TerminalProps) => {
|
||||
}
|
||||
}
|
||||
socket.addEventListener("close", handleClose)
|
||||
|
||||
cleanups.push(() => {
|
||||
closing = true
|
||||
socket.removeEventListener("open", handleOpen)
|
||||
socket.removeEventListener("message", handleMessage)
|
||||
socket.removeEventListener("error", handleError)
|
||||
socket.removeEventListener("close", handleClose)
|
||||
if (socket.readyState !== WebSocket.CLOSED && socket.readyState !== WebSocket.CLOSING) socket.close(1000)
|
||||
})
|
||||
cleanups.push(() => socket.removeEventListener("close", handleClose))
|
||||
}
|
||||
|
||||
void run().catch((err) => {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { createSimpleContext } from "@opencode-ai/ui/context"
|
||||
import type { AsyncStorage, SyncStorage } from "@solid-primitives/storage"
|
||||
import { AsyncStorage, SyncStorage } from "@solid-primitives/storage"
|
||||
import type { Accessor } from "solid-js"
|
||||
|
||||
type PickerPaths = string | string[] | null
|
||||
@@ -58,7 +58,7 @@ export type Platform = {
|
||||
fetch?: typeof fetch
|
||||
|
||||
/** Get the configured default server URL (platform-specific) */
|
||||
getDefaultServerUrl?(): Promise<string | null>
|
||||
getDefaultServerUrl?(): Promise<string | null> | string | null
|
||||
|
||||
/** Set the default server URL to use on app startup (platform-specific) */
|
||||
setDefaultServerUrl?(url: string | null): Promise<void> | void
|
||||
|
||||
@@ -106,7 +106,7 @@ const platform: Platform = {
|
||||
forward,
|
||||
restart,
|
||||
notify,
|
||||
getDefaultServerUrl: async () => readDefaultServerUrl(),
|
||||
getDefaultServerUrl: readDefaultServerUrl,
|
||||
setDefaultServerUrl: writeDefaultServerUrl,
|
||||
}
|
||||
|
||||
|
||||
@@ -366,10 +366,7 @@ export function MessageTimeline(props: {
|
||||
}}
|
||||
onClick={props.onAutoScrollInteraction}
|
||||
class="relative min-w-0 w-full h-full overflow-y-auto session-scroller"
|
||||
style={{
|
||||
"--session-title-height": showHeader() ? "40px" : "0px",
|
||||
"--sticky-accordion-top": showHeader() ? "64px" : "0px",
|
||||
}}
|
||||
style={{ "--session-title-height": showHeader() ? "40px" : "0px" }}
|
||||
>
|
||||
<Show when={showHeader()}>
|
||||
<div
|
||||
|
||||
@@ -38,34 +38,9 @@ export function TerminalPanel() {
|
||||
|
||||
const [store, setStore] = createStore({
|
||||
autoCreated: false,
|
||||
everOpened: false,
|
||||
activeDraggable: undefined as string | undefined,
|
||||
})
|
||||
|
||||
const rendered = createMemo(() => isDesktop() && (opened() || store.everOpened))
|
||||
|
||||
createEffect(
|
||||
on(open, (isOpen, prev) => {
|
||||
if (isOpen) {
|
||||
if (!store.everOpened) setStore("everOpened", true)
|
||||
const activeId = terminal.active()
|
||||
if (!activeId) return
|
||||
if (document.activeElement instanceof HTMLElement) {
|
||||
document.activeElement.blur()
|
||||
}
|
||||
setTimeout(() => focusTerminalById(activeId), 0)
|
||||
return
|
||||
}
|
||||
|
||||
if (!prev) return
|
||||
const panel = document.getElementById("terminal-panel")
|
||||
const activeElement = document.activeElement
|
||||
if (!panel || !(activeElement instanceof HTMLElement)) return
|
||||
if (!panel.contains(activeElement)) return
|
||||
activeElement.blur()
|
||||
}),
|
||||
)
|
||||
|
||||
createEffect(() => {
|
||||
if (!opened()) {
|
||||
setStore("autoCreated", false)
|
||||
@@ -92,7 +67,7 @@ export function TerminalPanel() {
|
||||
on(
|
||||
() => terminal.active(),
|
||||
(activeId) => {
|
||||
if (!activeId || !open()) return
|
||||
if (!activeId || !opened()) return
|
||||
if (document.activeElement instanceof HTMLElement) {
|
||||
document.activeElement.blur()
|
||||
}
|
||||
@@ -158,32 +133,23 @@ export function TerminalPanel() {
|
||||
}
|
||||
|
||||
return (
|
||||
<Show when={rendered()}>
|
||||
<Show when={open()}>
|
||||
<div
|
||||
id="terminal-panel"
|
||||
role="region"
|
||||
aria-label={language.t("terminal.title")}
|
||||
classList={{
|
||||
"relative w-full flex flex-col shrink-0 overflow-hidden": true,
|
||||
"border-t border-border-weak-base": open(),
|
||||
"pointer-events-none": !open(),
|
||||
}}
|
||||
style={{
|
||||
height: `${height()}px`,
|
||||
display: open() ? "flex" : "none",
|
||||
}}
|
||||
class="relative w-full flex flex-col shrink-0 border-t border-border-weak-base"
|
||||
style={{ height: `${height()}px` }}
|
||||
>
|
||||
<Show when={open()}>
|
||||
<ResizeHandle
|
||||
direction="vertical"
|
||||
size={height()}
|
||||
min={100}
|
||||
max={typeof window === "undefined" ? 1000 : window.innerHeight * 0.6}
|
||||
collapseThreshold={50}
|
||||
onResize={layout.terminal.resize}
|
||||
onCollapse={close}
|
||||
/>
|
||||
</Show>
|
||||
<ResizeHandle
|
||||
direction="vertical"
|
||||
size={height()}
|
||||
min={100}
|
||||
max={typeof window === "undefined" ? 1000 : window.innerHeight * 0.6}
|
||||
collapseThreshold={50}
|
||||
onResize={layout.terminal.resize}
|
||||
onCollapse={close}
|
||||
/>
|
||||
<Show
|
||||
when={terminal.ready()}
|
||||
fallback={
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"typecheck": "tsgo --noEmit",
|
||||
"dev": "vite dev --host 0.0.0.0",
|
||||
"dev:remote": "VITE_AUTH_URL=https://auth.dev.opencode.ai VITE_STRIPE_PUBLISHABLE_KEY=pk_test_51RtuLNE7fOCwHSD4mewwzFejyytjdGoSDK7CAvhbffwaZnPbNb2rwJICw6LTOXCmWO320fSNXvb5NzI08RZVkAxd00syfqrW7t bun sst shell --stage=dev bun dev",
|
||||
"build": "./script/generate-sitemap.ts && vite build && ../../opencode/script/schema.ts ./.output/public/config.json ./.output/public/tui.json",
|
||||
"build": "./script/generate-sitemap.ts && vite build && ../../opencode/script/schema.ts ./.output/public/config.json",
|
||||
"start": "vite start"
|
||||
},
|
||||
"dependencies": {
|
||||
|
||||
@@ -426,12 +426,6 @@ void listenForDeepLinks()
|
||||
render(() => {
|
||||
const platform = createPlatform()
|
||||
|
||||
const [defaultServer] = createResource(() =>
|
||||
platform.getDefaultServerUrl?.().then((url) => {
|
||||
if (url) return ServerConnection.key({ type: "http", http: { url } })
|
||||
}),
|
||||
)
|
||||
|
||||
function handleClick(e: MouseEvent) {
|
||||
const link = (e.target as HTMLElement).closest("a.external-link") as HTMLAnchorElement | null
|
||||
if (link?.href) {
|
||||
@@ -472,13 +466,9 @@ render(() => {
|
||||
}
|
||||
|
||||
return (
|
||||
<Show when={defaultServer.loading ? false : defaultServer.latest}>
|
||||
{(defaultServer) => (
|
||||
<AppInterface defaultServer={defaultServer() ?? ServerConnection.key(server)} servers={[server]}>
|
||||
<Inner />
|
||||
</AppInterface>
|
||||
)}
|
||||
</Show>
|
||||
<AppInterface defaultServer={ServerConnection.key(server)} servers={[server]}>
|
||||
<Inner />
|
||||
</AppInterface>
|
||||
)
|
||||
}}
|
||||
</ServerGate>
|
||||
|
||||
@@ -74,7 +74,6 @@
|
||||
"@ai-sdk/togetherai": "1.0.34",
|
||||
"@ai-sdk/vercel": "1.0.33",
|
||||
"@ai-sdk/xai": "2.0.51",
|
||||
"@aws-sdk/credential-providers": "3.993.0",
|
||||
"@clack/prompts": "1.0.0-alpha.1",
|
||||
"@gitlab/gitlab-ai-provider": "3.6.0",
|
||||
"@gitlab/opencode-gitlab-auth": "1.3.3",
|
||||
@@ -107,7 +106,6 @@
|
||||
"diff": "catalog:",
|
||||
"drizzle-orm": "1.0.0-beta.12-a5629fb",
|
||||
"fuzzysort": "3.1.0",
|
||||
"google-auth-library": "10.5.0",
|
||||
"gray-matter": "4.0.3",
|
||||
"hono": "catalog:",
|
||||
"hono-openapi": "catalog:",
|
||||
|
||||
@@ -2,62 +2,46 @@
|
||||
|
||||
import { z } from "zod"
|
||||
import { Config } from "../src/config/config"
|
||||
import { TuiConfig } from "../src/config/tui"
|
||||
|
||||
function generate(schema: z.ZodType) {
|
||||
const result = z.toJSONSchema(schema, {
|
||||
io: "input", // Generate input shape (treats optional().default() as not required)
|
||||
/**
|
||||
* We'll use the `default` values of the field as the only value in `examples`.
|
||||
* This will ensure no docs are needed to be read, as the configuration is
|
||||
* self-documenting.
|
||||
*
|
||||
* See https://json-schema.org/draft/2020-12/draft-bhutton-json-schema-validation-00#rfc.section.9.5
|
||||
*/
|
||||
override(ctx) {
|
||||
const schema = ctx.jsonSchema
|
||||
const file = process.argv[2]
|
||||
console.log(file)
|
||||
|
||||
// Preserve strictness: set additionalProperties: false for objects
|
||||
if (
|
||||
schema &&
|
||||
typeof schema === "object" &&
|
||||
schema.type === "object" &&
|
||||
schema.additionalProperties === undefined
|
||||
) {
|
||||
schema.additionalProperties = false
|
||||
const result = z.toJSONSchema(Config.Info, {
|
||||
io: "input", // Generate input shape (treats optional().default() as not required)
|
||||
/**
|
||||
* We'll use the `default` values of the field as the only value in `examples`.
|
||||
* This will ensure no docs are needed to be read, as the configuration is
|
||||
* self-documenting.
|
||||
*
|
||||
* See https://json-schema.org/draft/2020-12/draft-bhutton-json-schema-validation-00#rfc.section.9.5
|
||||
*/
|
||||
override(ctx) {
|
||||
const schema = ctx.jsonSchema
|
||||
|
||||
// Preserve strictness: set additionalProperties: false for objects
|
||||
if (schema && typeof schema === "object" && schema.type === "object" && schema.additionalProperties === undefined) {
|
||||
schema.additionalProperties = false
|
||||
}
|
||||
|
||||
// Add examples and default descriptions for string fields with defaults
|
||||
if (schema && typeof schema === "object" && "type" in schema && schema.type === "string" && schema?.default) {
|
||||
if (!schema.examples) {
|
||||
schema.examples = [schema.default]
|
||||
}
|
||||
|
||||
// Add examples and default descriptions for string fields with defaults
|
||||
if (schema && typeof schema === "object" && "type" in schema && schema.type === "string" && schema?.default) {
|
||||
if (!schema.examples) {
|
||||
schema.examples = [schema.default]
|
||||
}
|
||||
|
||||
schema.description = [schema.description || "", `default: \`${schema.default}\``]
|
||||
.filter(Boolean)
|
||||
.join("\n\n")
|
||||
.trim()
|
||||
}
|
||||
},
|
||||
}) as Record<string, unknown> & {
|
||||
allowComments?: boolean
|
||||
allowTrailingCommas?: boolean
|
||||
}
|
||||
|
||||
// used for json lsps since config supports jsonc
|
||||
result.allowComments = true
|
||||
result.allowTrailingCommas = true
|
||||
|
||||
return result
|
||||
schema.description = [schema.description || "", `default: \`${schema.default}\``]
|
||||
.filter(Boolean)
|
||||
.join("\n\n")
|
||||
.trim()
|
||||
}
|
||||
},
|
||||
}) as Record<string, unknown> & {
|
||||
allowComments?: boolean
|
||||
allowTrailingCommas?: boolean
|
||||
}
|
||||
|
||||
const configFile = process.argv[2]
|
||||
const tuiFile = process.argv[3]
|
||||
// used for json lsps since config supports jsonc
|
||||
result.allowComments = true
|
||||
result.allowTrailingCommas = true
|
||||
|
||||
console.log(configFile)
|
||||
await Bun.write(configFile, JSON.stringify(generate(Config.Info), null, 2))
|
||||
|
||||
if (tuiFile) {
|
||||
console.log(tuiFile)
|
||||
await Bun.write(tuiFile, JSON.stringify(generate(TuiConfig.Info), null, 2))
|
||||
}
|
||||
await Bun.write(file, JSON.stringify(result, null, 2))
|
||||
|
||||
@@ -38,8 +38,6 @@ import { ArgsProvider, useArgs, type Args } from "./context/args"
|
||||
import open from "open"
|
||||
import { writeHeapSnapshot } from "v8"
|
||||
import { PromptRefProvider, usePromptRef } from "./context/prompt"
|
||||
import { TuiConfigProvider } from "./context/tui-config"
|
||||
import { TuiConfig } from "@/config/tui"
|
||||
|
||||
async function getTerminalBackgroundColor(): Promise<"dark" | "light"> {
|
||||
// can't set raw mode if not a TTY
|
||||
@@ -106,7 +104,6 @@ import type { EventSource } from "./context/sdk"
|
||||
export function tui(input: {
|
||||
url: string
|
||||
args: Args
|
||||
config: TuiConfig.Info
|
||||
directory?: string
|
||||
fetch?: typeof fetch
|
||||
headers?: RequestInit["headers"]
|
||||
@@ -141,37 +138,35 @@ export function tui(input: {
|
||||
<KVProvider>
|
||||
<ToastProvider>
|
||||
<RouteProvider>
|
||||
<TuiConfigProvider config={input.config}>
|
||||
<SDKProvider
|
||||
url={input.url}
|
||||
directory={input.directory}
|
||||
fetch={input.fetch}
|
||||
headers={input.headers}
|
||||
events={input.events}
|
||||
>
|
||||
<SyncProvider>
|
||||
<ThemeProvider mode={mode}>
|
||||
<LocalProvider>
|
||||
<KeybindProvider>
|
||||
<PromptStashProvider>
|
||||
<DialogProvider>
|
||||
<CommandProvider>
|
||||
<FrecencyProvider>
|
||||
<PromptHistoryProvider>
|
||||
<PromptRefProvider>
|
||||
<App />
|
||||
</PromptRefProvider>
|
||||
</PromptHistoryProvider>
|
||||
</FrecencyProvider>
|
||||
</CommandProvider>
|
||||
</DialogProvider>
|
||||
</PromptStashProvider>
|
||||
</KeybindProvider>
|
||||
</LocalProvider>
|
||||
</ThemeProvider>
|
||||
</SyncProvider>
|
||||
</SDKProvider>
|
||||
</TuiConfigProvider>
|
||||
<SDKProvider
|
||||
url={input.url}
|
||||
directory={input.directory}
|
||||
fetch={input.fetch}
|
||||
headers={input.headers}
|
||||
events={input.events}
|
||||
>
|
||||
<SyncProvider>
|
||||
<ThemeProvider mode={mode}>
|
||||
<LocalProvider>
|
||||
<KeybindProvider>
|
||||
<PromptStashProvider>
|
||||
<DialogProvider>
|
||||
<CommandProvider>
|
||||
<FrecencyProvider>
|
||||
<PromptHistoryProvider>
|
||||
<PromptRefProvider>
|
||||
<App />
|
||||
</PromptRefProvider>
|
||||
</PromptHistoryProvider>
|
||||
</FrecencyProvider>
|
||||
</CommandProvider>
|
||||
</DialogProvider>
|
||||
</PromptStashProvider>
|
||||
</KeybindProvider>
|
||||
</LocalProvider>
|
||||
</ThemeProvider>
|
||||
</SyncProvider>
|
||||
</SDKProvider>
|
||||
</RouteProvider>
|
||||
</ToastProvider>
|
||||
</KVProvider>
|
||||
|
||||
@@ -2,9 +2,6 @@ import { cmd } from "../cmd"
|
||||
import { UI } from "@/cli/ui"
|
||||
import { tui } from "./app"
|
||||
import { win32DisableProcessedInput, win32InstallCtrlCGuard } from "./win32"
|
||||
import { TuiConfig } from "@/config/tui"
|
||||
import { Instance } from "@/project/instance"
|
||||
import { existsSync } from "fs"
|
||||
|
||||
export const AttachCommand = cmd({
|
||||
command: "attach <url>",
|
||||
@@ -66,13 +63,8 @@ export const AttachCommand = cmd({
|
||||
const auth = `Basic ${Buffer.from(`opencode:${password}`).toString("base64")}`
|
||||
return { Authorization: auth }
|
||||
})()
|
||||
const config = await Instance.provide({
|
||||
directory: directory && existsSync(directory) ? directory : process.cwd(),
|
||||
fn: () => TuiConfig.get(),
|
||||
})
|
||||
await tui({
|
||||
url: args.url,
|
||||
config,
|
||||
args: {
|
||||
continue: args.continue,
|
||||
sessionID: args.session,
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
import path from "path"
|
||||
import { Global } from "@/global"
|
||||
import { Filesystem } from "@/util/filesystem"
|
||||
import { onMount } from "solid-js"
|
||||
import { createStore } from "solid-js/store"
|
||||
import { createSimpleContext } from "../../context/helper"
|
||||
import { appendFile, writeFile } from "fs/promises"
|
||||
import { appendFile } from "fs/promises"
|
||||
|
||||
function calculateFrecency(entry?: { frequency: number; lastOpen: number }): number {
|
||||
if (!entry) return 0
|
||||
@@ -18,9 +17,9 @@ const MAX_FRECENCY_ENTRIES = 1000
|
||||
export const { use: useFrecency, provider: FrecencyProvider } = createSimpleContext({
|
||||
name: "Frecency",
|
||||
init: () => {
|
||||
const frecencyPath = path.join(Global.Path.state, "frecency.jsonl")
|
||||
const frecencyFile = Bun.file(path.join(Global.Path.state, "frecency.jsonl"))
|
||||
onMount(async () => {
|
||||
const text = await Filesystem.readText(frecencyPath).catch(() => "")
|
||||
const text = await frecencyFile.text().catch(() => "")
|
||||
const lines = text
|
||||
.split("\n")
|
||||
.filter(Boolean)
|
||||
@@ -54,7 +53,7 @@ export const { use: useFrecency, provider: FrecencyProvider } = createSimpleCont
|
||||
|
||||
if (sorted.length > 0) {
|
||||
const content = sorted.map((entry) => JSON.stringify(entry)).join("\n") + "\n"
|
||||
writeFile(frecencyPath, content).catch(() => {})
|
||||
Bun.write(frecencyFile, content).catch(() => {})
|
||||
}
|
||||
})
|
||||
|
||||
@@ -69,7 +68,7 @@ export const { use: useFrecency, provider: FrecencyProvider } = createSimpleCont
|
||||
lastOpen: Date.now(),
|
||||
}
|
||||
setStore("data", absolutePath, newEntry)
|
||||
appendFile(frecencyPath, JSON.stringify({ path: absolutePath, ...newEntry }) + "\n").catch(() => {})
|
||||
appendFile(frecencyFile.name!, JSON.stringify({ path: absolutePath, ...newEntry }) + "\n").catch(() => {})
|
||||
|
||||
if (Object.keys(store.data).length > MAX_FRECENCY_ENTRIES) {
|
||||
const sorted = Object.entries(store.data)
|
||||
@@ -77,7 +76,7 @@ export const { use: useFrecency, provider: FrecencyProvider } = createSimpleCont
|
||||
.slice(0, MAX_FRECENCY_ENTRIES)
|
||||
setStore("data", Object.fromEntries(sorted))
|
||||
const content = sorted.map(([path, entry]) => JSON.stringify({ path, ...entry })).join("\n") + "\n"
|
||||
writeFile(frecencyPath, content).catch(() => {})
|
||||
Bun.write(frecencyFile, content).catch(() => {})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import path from "path"
|
||||
import { Global } from "@/global"
|
||||
import { Filesystem } from "@/util/filesystem"
|
||||
import { onMount } from "solid-js"
|
||||
import { createStore, produce } from "solid-js/store"
|
||||
import { clone } from "remeda"
|
||||
@@ -31,9 +30,9 @@ const MAX_HISTORY_ENTRIES = 50
|
||||
export const { use: usePromptHistory, provider: PromptHistoryProvider } = createSimpleContext({
|
||||
name: "PromptHistory",
|
||||
init: () => {
|
||||
const historyPath = path.join(Global.Path.state, "prompt-history.jsonl")
|
||||
const historyFile = Bun.file(path.join(Global.Path.state, "prompt-history.jsonl"))
|
||||
onMount(async () => {
|
||||
const text = await Filesystem.readText(historyPath).catch(() => "")
|
||||
const text = await historyFile.text().catch(() => "")
|
||||
const lines = text
|
||||
.split("\n")
|
||||
.filter(Boolean)
|
||||
@@ -52,7 +51,7 @@ export const { use: usePromptHistory, provider: PromptHistoryProvider } = create
|
||||
// Rewrite file with only valid entries to self-heal corruption
|
||||
if (lines.length > 0) {
|
||||
const content = lines.map((line) => JSON.stringify(line)).join("\n") + "\n"
|
||||
writeFile(historyPath, content).catch(() => {})
|
||||
writeFile(historyFile.name!, content).catch(() => {})
|
||||
}
|
||||
})
|
||||
|
||||
@@ -98,11 +97,11 @@ export const { use: usePromptHistory, provider: PromptHistoryProvider } = create
|
||||
|
||||
if (trimmed) {
|
||||
const content = store.history.map((line) => JSON.stringify(line)).join("\n") + "\n"
|
||||
writeFile(historyPath, content).catch(() => {})
|
||||
writeFile(historyFile.name!, content).catch(() => {})
|
||||
return
|
||||
}
|
||||
|
||||
appendFile(historyPath, JSON.stringify(entry) + "\n").catch(() => {})
|
||||
appendFile(historyFile.name!, JSON.stringify(entry) + "\n").catch(() => {})
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
import { BoxRenderable, TextareaRenderable, MouseEvent, PasteEvent, t, dim, fg } from "@opentui/core"
|
||||
import { createEffect, createMemo, type JSX, onMount, createSignal, onCleanup, on, Show, Switch, Match } from "solid-js"
|
||||
import "opentui-spinner/solid"
|
||||
import path from "path"
|
||||
import { Filesystem } from "@/util/filesystem"
|
||||
import { useLocal } from "@tui/context/local"
|
||||
import { useTheme } from "@tui/context/theme"
|
||||
import { EmptyBorder } from "@tui/component/border"
|
||||
@@ -933,26 +931,26 @@ export function Prompt(props: PromptProps) {
|
||||
const isUrl = /^(https?):\/\//.test(filepath)
|
||||
if (!isUrl) {
|
||||
try {
|
||||
const mime = Filesystem.mimeType(filepath)
|
||||
const filename = path.basename(filepath)
|
||||
const file = Bun.file(filepath)
|
||||
// Handle SVG as raw text content, not as base64 image
|
||||
if (mime === "image/svg+xml") {
|
||||
if (file.type === "image/svg+xml") {
|
||||
event.preventDefault()
|
||||
const content = await Filesystem.readText(filepath).catch(() => {})
|
||||
const content = await file.text().catch(() => {})
|
||||
if (content) {
|
||||
pasteText(content, `[SVG: ${filename ?? "image"}]`)
|
||||
pasteText(content, `[SVG: ${file.name ?? "image"}]`)
|
||||
return
|
||||
}
|
||||
}
|
||||
if (mime.startsWith("image/")) {
|
||||
if (file.type.startsWith("image/")) {
|
||||
event.preventDefault()
|
||||
const content = await Filesystem.readArrayBuffer(filepath)
|
||||
const content = await file
|
||||
.arrayBuffer()
|
||||
.then((buffer) => Buffer.from(buffer).toString("base64"))
|
||||
.catch(() => {})
|
||||
if (content) {
|
||||
await pasteImage({
|
||||
filename,
|
||||
mime,
|
||||
filename: file.name,
|
||||
mime: file.type,
|
||||
content,
|
||||
})
|
||||
return
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import path from "path"
|
||||
import { Global } from "@/global"
|
||||
import { Filesystem } from "@/util/filesystem"
|
||||
import { onMount } from "solid-js"
|
||||
import { createStore, produce } from "solid-js/store"
|
||||
import { clone } from "remeda"
|
||||
@@ -19,9 +18,9 @@ const MAX_STASH_ENTRIES = 50
|
||||
export const { use: usePromptStash, provider: PromptStashProvider } = createSimpleContext({
|
||||
name: "PromptStash",
|
||||
init: () => {
|
||||
const stashPath = path.join(Global.Path.state, "prompt-stash.jsonl")
|
||||
const stashFile = Bun.file(path.join(Global.Path.state, "prompt-stash.jsonl"))
|
||||
onMount(async () => {
|
||||
const text = await Filesystem.readText(stashPath).catch(() => "")
|
||||
const text = await stashFile.text().catch(() => "")
|
||||
const lines = text
|
||||
.split("\n")
|
||||
.filter(Boolean)
|
||||
@@ -40,7 +39,7 @@ export const { use: usePromptStash, provider: PromptStashProvider } = createSimp
|
||||
// Rewrite file with only valid entries to self-heal corruption
|
||||
if (lines.length > 0) {
|
||||
const content = lines.map((line) => JSON.stringify(line)).join("\n") + "\n"
|
||||
writeFile(stashPath, content).catch(() => {})
|
||||
writeFile(stashFile.name!, content).catch(() => {})
|
||||
}
|
||||
})
|
||||
|
||||
@@ -67,11 +66,11 @@ export const { use: usePromptStash, provider: PromptStashProvider } = createSimp
|
||||
|
||||
if (trimmed) {
|
||||
const content = store.entries.map((line) => JSON.stringify(line)).join("\n") + "\n"
|
||||
writeFile(stashPath, content).catch(() => {})
|
||||
writeFile(stashFile.name!, content).catch(() => {})
|
||||
return
|
||||
}
|
||||
|
||||
appendFile(stashPath, JSON.stringify(stash) + "\n").catch(() => {})
|
||||
appendFile(stashFile.name!, JSON.stringify(stash) + "\n").catch(() => {})
|
||||
},
|
||||
pop() {
|
||||
if (store.entries.length === 0) return undefined
|
||||
@@ -83,7 +82,7 @@ export const { use: usePromptStash, provider: PromptStashProvider } = createSimp
|
||||
)
|
||||
const content =
|
||||
store.entries.length > 0 ? store.entries.map((line) => JSON.stringify(line)).join("\n") + "\n" : ""
|
||||
writeFile(stashPath, content).catch(() => {})
|
||||
writeFile(stashFile.name!, content).catch(() => {})
|
||||
return entry
|
||||
},
|
||||
remove(index: number) {
|
||||
@@ -95,7 +94,7 @@ export const { use: usePromptStash, provider: PromptStashProvider } = createSimp
|
||||
)
|
||||
const content =
|
||||
store.entries.length > 0 ? store.entries.map((line) => JSON.stringify(line)).join("\n") + "\n" : ""
|
||||
writeFile(stashPath, content).catch(() => {})
|
||||
writeFile(stashFile.name!, content).catch(() => {})
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
@@ -80,11 +80,11 @@ const TIPS = [
|
||||
"Switch to {highlight}Plan{/highlight} agent to get suggestions without making actual changes",
|
||||
"Use {highlight}@agent-name{/highlight} in prompts to invoke specialized subagents",
|
||||
"Press {highlight}Ctrl+X Right/Left{/highlight} to cycle through parent and child sessions",
|
||||
"Create {highlight}opencode.json{/highlight} for server settings and {highlight}tui.json{/highlight} for TUI settings",
|
||||
"Place TUI settings in {highlight}~/.config/opencode/tui.json{/highlight} for global config",
|
||||
"Create {highlight}opencode.json{/highlight} in project root for project-specific settings",
|
||||
"Place settings in {highlight}~/.config/opencode/opencode.json{/highlight} for global config",
|
||||
"Add {highlight}$schema{/highlight} to your config for autocomplete in your editor",
|
||||
"Configure {highlight}model{/highlight} in config to set your default model",
|
||||
"Override any keybind in {highlight}tui.json{/highlight} via the {highlight}keybinds{/highlight} section",
|
||||
"Override any keybind in config via the {highlight}keybinds{/highlight} section",
|
||||
"Set any keybind to {highlight}none{/highlight} to disable it completely",
|
||||
"Configure local or remote MCP servers in the {highlight}mcp{/highlight} config section",
|
||||
"OpenCode auto-handles OAuth for remote MCP servers requiring auth",
|
||||
@@ -140,7 +140,7 @@ const TIPS = [
|
||||
"Press {highlight}Ctrl+X G{/highlight} or {highlight}/timeline{/highlight} to jump to specific messages",
|
||||
"Press {highlight}Ctrl+X H{/highlight} to toggle code block visibility in messages",
|
||||
"Press {highlight}Ctrl+X S{/highlight} or {highlight}/status{/highlight} to see system status info",
|
||||
"Enable {highlight}scroll_acceleration{/highlight} in {highlight}tui.json{/highlight} for smooth macOS-style scrolling",
|
||||
"Enable {highlight}tui.scroll_acceleration{/highlight} for smooth macOS-style scrolling",
|
||||
"Toggle username display in chat via command palette ({highlight}Ctrl+P{/highlight})",
|
||||
"Run {highlight}docker run -it --rm ghcr.io/anomalyco/opencode{/highlight} for containerized use",
|
||||
"Use {highlight}/connect{/highlight} with OpenCode Zen for curated, tested models",
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { createMemo } from "solid-js"
|
||||
import { useSync } from "@tui/context/sync"
|
||||
import { Keybind } from "@/util/keybind"
|
||||
import { pipe, mapValues } from "remeda"
|
||||
import type { KeybindsConfig } from "@opencode-ai/sdk/v2"
|
||||
@@ -6,15 +7,14 @@ import type { ParsedKey, Renderable } from "@opentui/core"
|
||||
import { createStore } from "solid-js/store"
|
||||
import { useKeyboard, useRenderer } from "@opentui/solid"
|
||||
import { createSimpleContext } from "./helper"
|
||||
import { useTuiConfig } from "./tui-config"
|
||||
|
||||
export const { use: useKeybind, provider: KeybindProvider } = createSimpleContext({
|
||||
name: "Keybind",
|
||||
init: () => {
|
||||
const config = useTuiConfig()
|
||||
const keybinds = createMemo<Record<string, Keybind.Info[]>>(() => {
|
||||
const sync = useSync()
|
||||
const keybinds = createMemo(() => {
|
||||
return pipe(
|
||||
(config.keybinds ?? {}) as Record<string, string>,
|
||||
sync.data.config.keybinds ?? {},
|
||||
mapValues((value) => Keybind.parse(value)),
|
||||
)
|
||||
})
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { Global } from "@/global"
|
||||
import { Filesystem } from "@/util/filesystem"
|
||||
import { createSignal, type Setter } from "solid-js"
|
||||
import { createStore } from "solid-js/store"
|
||||
import { createSimpleContext } from "./helper"
|
||||
@@ -10,9 +9,10 @@ export const { use: useKV, provider: KVProvider } = createSimpleContext({
|
||||
init: () => {
|
||||
const [ready, setReady] = createSignal(false)
|
||||
const [store, setStore] = createStore<Record<string, any>>()
|
||||
const filePath = path.join(Global.Path.state, "kv.json")
|
||||
const file = Bun.file(path.join(Global.Path.state, "kv.json"))
|
||||
|
||||
Filesystem.readJson(filePath)
|
||||
file
|
||||
.json()
|
||||
.then((x) => {
|
||||
setStore(x)
|
||||
})
|
||||
@@ -44,7 +44,7 @@ export const { use: useKV, provider: KVProvider } = createSimpleContext({
|
||||
},
|
||||
set(key: string, value: any) {
|
||||
setStore(key, value)
|
||||
Filesystem.writeJson(filePath, store)
|
||||
Bun.write(file, JSON.stringify(store, null, 2))
|
||||
},
|
||||
}
|
||||
return result
|
||||
|
||||
@@ -12,7 +12,6 @@ import { Provider } from "@/provider/provider"
|
||||
import { useArgs } from "./args"
|
||||
import { useSDK } from "./sdk"
|
||||
import { RGBA } from "@opentui/core"
|
||||
import { Filesystem } from "@/util/filesystem"
|
||||
|
||||
export const { use: useLocal, provider: LocalProvider } = createSimpleContext({
|
||||
name: "Local",
|
||||
@@ -120,7 +119,7 @@ export const { use: useLocal, provider: LocalProvider } = createSimpleContext({
|
||||
variant: {},
|
||||
})
|
||||
|
||||
const filePath = path.join(Global.Path.state, "model.json")
|
||||
const file = Bun.file(path.join(Global.Path.state, "model.json"))
|
||||
const state = {
|
||||
pending: false,
|
||||
}
|
||||
@@ -131,15 +130,19 @@ export const { use: useLocal, provider: LocalProvider } = createSimpleContext({
|
||||
return
|
||||
}
|
||||
state.pending = false
|
||||
Filesystem.writeJson(filePath, {
|
||||
recent: modelStore.recent,
|
||||
favorite: modelStore.favorite,
|
||||
variant: modelStore.variant,
|
||||
})
|
||||
Bun.write(
|
||||
file,
|
||||
JSON.stringify({
|
||||
recent: modelStore.recent,
|
||||
favorite: modelStore.favorite,
|
||||
variant: modelStore.variant,
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
Filesystem.readJson(filePath)
|
||||
.then((x: any) => {
|
||||
file
|
||||
.json()
|
||||
.then((x) => {
|
||||
if (Array.isArray(x.recent)) setModelStore("recent", x.recent)
|
||||
if (Array.isArray(x.favorite)) setModelStore("favorite", x.favorite)
|
||||
if (typeof x.variant === "object" && x.variant !== null) setModelStore("variant", x.variant)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { SyntaxStyle, RGBA, type TerminalColors } from "@opentui/core"
|
||||
import path from "path"
|
||||
import { createEffect, createMemo, onMount } from "solid-js"
|
||||
import { useSync } from "@tui/context/sync"
|
||||
import { createSimpleContext } from "./helper"
|
||||
import aura from "./theme/aura.json" with { type: "json" }
|
||||
import ayu from "./theme/ayu.json" with { type: "json" }
|
||||
@@ -40,7 +41,6 @@ import { useRenderer } from "@opentui/solid"
|
||||
import { createStore, produce } from "solid-js/store"
|
||||
import { Global } from "@/global"
|
||||
import { Filesystem } from "@/util/filesystem"
|
||||
import { useTuiConfig } from "./tui-config"
|
||||
|
||||
type ThemeColors = {
|
||||
primary: RGBA
|
||||
@@ -279,17 +279,17 @@ function ansiToRgba(code: number): RGBA {
|
||||
export const { use: useTheme, provider: ThemeProvider } = createSimpleContext({
|
||||
name: "Theme",
|
||||
init: (props: { mode: "dark" | "light" }) => {
|
||||
const config = useTuiConfig()
|
||||
const sync = useSync()
|
||||
const kv = useKV()
|
||||
const [store, setStore] = createStore({
|
||||
themes: DEFAULT_THEMES,
|
||||
mode: kv.get("theme_mode", props.mode),
|
||||
active: (config.theme ?? kv.get("theme", "opencode")) as string,
|
||||
active: (sync.data.config.theme ?? kv.get("theme", "opencode")) as string,
|
||||
ready: false,
|
||||
})
|
||||
|
||||
createEffect(() => {
|
||||
const theme = config.theme
|
||||
const theme = sync.data.config.theme
|
||||
if (theme) setStore("active", theme)
|
||||
})
|
||||
|
||||
@@ -412,7 +412,7 @@ async function getCustomThemes() {
|
||||
cwd: dir,
|
||||
})) {
|
||||
const name = path.basename(item, ".json")
|
||||
result[name] = await Filesystem.readJson(item)
|
||||
result[name] = await Bun.file(item).json()
|
||||
}
|
||||
}
|
||||
return result
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
import { TuiConfig } from "@/config/tui"
|
||||
import { createSimpleContext } from "./helper"
|
||||
|
||||
export const { use: useTuiConfig, provider: TuiConfigProvider } = createSimpleContext({
|
||||
name: "TuiConfig",
|
||||
init: (props: { config: TuiConfig.Info }) => {
|
||||
return props.config
|
||||
},
|
||||
})
|
||||
@@ -78,7 +78,6 @@ import { QuestionPrompt } from "./question"
|
||||
import { DialogExportOptions } from "../../ui/dialog-export-options"
|
||||
import { formatTranscript } from "../../util/transcript"
|
||||
import { UI } from "@/cli/ui.ts"
|
||||
import { useTuiConfig } from "../../context/tui-config"
|
||||
|
||||
addDefaultParsers(parsers.parsers)
|
||||
|
||||
@@ -101,7 +100,6 @@ const context = createContext<{
|
||||
showDetails: () => boolean
|
||||
diffWrapMode: () => "word" | "none"
|
||||
sync: ReturnType<typeof useSync>
|
||||
tui: ReturnType<typeof useTuiConfig>
|
||||
}>()
|
||||
|
||||
function use() {
|
||||
@@ -114,7 +112,6 @@ export function Session() {
|
||||
const route = useRouteData("session")
|
||||
const { navigate } = useRoute()
|
||||
const sync = useSync()
|
||||
const tuiConfig = useTuiConfig()
|
||||
const kv = useKV()
|
||||
const { theme } = useTheme()
|
||||
const promptRef = usePromptRef()
|
||||
@@ -167,7 +164,7 @@ export function Session() {
|
||||
const contentWidth = createMemo(() => dimensions().width - (sidebarVisible() ? 42 : 0) - 4)
|
||||
|
||||
const scrollAcceleration = createMemo(() => {
|
||||
const tui = tuiConfig
|
||||
const tui = sync.data.config.tui
|
||||
if (tui?.scroll_acceleration?.enabled) {
|
||||
return new MacOSScrollAccel()
|
||||
}
|
||||
@@ -979,7 +976,6 @@ export function Session() {
|
||||
showDetails,
|
||||
diffWrapMode,
|
||||
sync,
|
||||
tui: tuiConfig,
|
||||
}}
|
||||
>
|
||||
<box flexDirection="row">
|
||||
@@ -1924,7 +1920,7 @@ function Edit(props: ToolProps<typeof EditTool>) {
|
||||
const { theme, syntax } = useTheme()
|
||||
|
||||
const view = createMemo(() => {
|
||||
const diffStyle = ctx.tui.diff_style
|
||||
const diffStyle = ctx.sync.data.config.tui?.diff_style
|
||||
if (diffStyle === "stacked") return "unified"
|
||||
// Default to "auto" behavior
|
||||
return ctx.width > 120 ? "split" : "unified"
|
||||
@@ -1995,7 +1991,7 @@ function ApplyPatch(props: ToolProps<typeof ApplyPatchTool>) {
|
||||
const files = createMemo(() => props.metadata.files ?? [])
|
||||
|
||||
const view = createMemo(() => {
|
||||
const diffStyle = ctx.tui.diff_style
|
||||
const diffStyle = ctx.sync.data.config.tui?.diff_style
|
||||
if (diffStyle === "stacked") return "unified"
|
||||
return ctx.width > 120 ? "split" : "unified"
|
||||
})
|
||||
|
||||
@@ -15,7 +15,6 @@ import { Keybind } from "@/util/keybind"
|
||||
import { Locale } from "@/util/locale"
|
||||
import { Global } from "@/global"
|
||||
import { useDialog } from "../../ui/dialog"
|
||||
import { useTuiConfig } from "../../context/tui-config"
|
||||
|
||||
type PermissionStage = "permission" | "always" | "reject"
|
||||
|
||||
@@ -49,14 +48,14 @@ function EditBody(props: { request: PermissionRequest }) {
|
||||
const themeState = useTheme()
|
||||
const theme = themeState.theme
|
||||
const syntax = themeState.syntax
|
||||
const config = useTuiConfig()
|
||||
const sync = useSync()
|
||||
const dimensions = useTerminalDimensions()
|
||||
|
||||
const filepath = createMemo(() => (props.request.metadata?.filepath as string) ?? "")
|
||||
const diff = createMemo(() => (props.request.metadata?.diff as string) ?? "")
|
||||
|
||||
const view = createMemo(() => {
|
||||
const diffStyle = config.diff_style
|
||||
const diffStyle = sync.data.config.tui?.diff_style
|
||||
if (diffStyle === "stacked") return "unified"
|
||||
return dimensions().width > 120 ? "split" : "unified"
|
||||
})
|
||||
|
||||
@@ -3,17 +3,13 @@ import { tui } from "./app"
|
||||
import { Rpc } from "@/util/rpc"
|
||||
import { type rpc } from "./worker"
|
||||
import path from "path"
|
||||
import { fileURLToPath } from "url"
|
||||
import { UI } from "@/cli/ui"
|
||||
import { iife } from "@/util/iife"
|
||||
import { Log } from "@/util/log"
|
||||
import { withNetworkOptions, resolveNetworkOptions } from "@/cli/network"
|
||||
import { Filesystem } from "@/util/filesystem"
|
||||
import type { Event } from "@opencode-ai/sdk/v2"
|
||||
import type { EventSource } from "./context/sdk"
|
||||
import { win32DisableProcessedInput, win32InstallCtrlCGuard } from "./win32"
|
||||
import { TuiConfig } from "@/config/tui"
|
||||
import { Instance } from "@/project/instance"
|
||||
|
||||
declare global {
|
||||
const OPENCODE_WORKER_PATH: string
|
||||
@@ -103,7 +99,7 @@ export const TuiThreadCommand = cmd({
|
||||
const distWorker = new URL("./cli/cmd/tui/worker.js", import.meta.url)
|
||||
const workerPath = await iife(async () => {
|
||||
if (typeof OPENCODE_WORKER_PATH !== "undefined") return OPENCODE_WORKER_PATH
|
||||
if (await Filesystem.exists(fileURLToPath(distWorker))) return distWorker
|
||||
if (await Bun.file(distWorker).exists()) return distWorker
|
||||
return localWorker
|
||||
})
|
||||
try {
|
||||
@@ -137,10 +133,6 @@ export const TuiThreadCommand = cmd({
|
||||
if (!args.prompt) return piped
|
||||
return piped ? piped + "\n" + args.prompt : args.prompt
|
||||
})
|
||||
const config = await Instance.provide({
|
||||
directory: cwd,
|
||||
fn: () => TuiConfig.get(),
|
||||
})
|
||||
|
||||
// Check if server should be started (port or hostname explicitly set in CLI or config)
|
||||
const networkOpts = await resolveNetworkOptions(args)
|
||||
@@ -169,8 +161,6 @@ export const TuiThreadCommand = cmd({
|
||||
|
||||
const tuiPromise = tui({
|
||||
url,
|
||||
config,
|
||||
directory: cwd,
|
||||
fetch: customFetch,
|
||||
events,
|
||||
args: {
|
||||
|
||||
@@ -3,6 +3,7 @@ import path from "path"
|
||||
import { pathToFileURL } from "url"
|
||||
import os from "os"
|
||||
import z from "zod"
|
||||
import { Filesystem } from "../util/filesystem"
|
||||
import { ModelsDev } from "../provider/models"
|
||||
import { mergeDeep, pipe, unique } from "remeda"
|
||||
import { Global } from "../global"
|
||||
@@ -31,8 +32,6 @@ import { PackageRegistry } from "@/bun/registry"
|
||||
import { proxied } from "@/util/proxied"
|
||||
import { iife } from "@/util/iife"
|
||||
import { Control } from "@/control"
|
||||
import { ConfigPaths } from "./paths"
|
||||
import { Filesystem } from "@/util/filesystem"
|
||||
|
||||
export namespace Config {
|
||||
const ModelId = z.string().meta({ $ref: "https://models.dev/model-schema.json#/$defs/Model" })
|
||||
@@ -41,7 +40,7 @@ export namespace Config {
|
||||
|
||||
// Managed settings directory for enterprise deployments (highest priority, admin-controlled)
|
||||
// These settings override all user and project settings
|
||||
function systemManagedConfigDir(): string {
|
||||
function getManagedConfigDir(): string {
|
||||
switch (process.platform) {
|
||||
case "darwin":
|
||||
return "/Library/Application Support/opencode"
|
||||
@@ -52,14 +51,10 @@ export namespace Config {
|
||||
}
|
||||
}
|
||||
|
||||
export function managedConfigDir() {
|
||||
return process.env.OPENCODE_TEST_MANAGED_CONFIG_DIR || systemManagedConfigDir()
|
||||
}
|
||||
|
||||
const managedDir = managedConfigDir()
|
||||
const managedConfigDir = process.env.OPENCODE_TEST_MANAGED_CONFIG_DIR || getManagedConfigDir()
|
||||
|
||||
// Custom merge function that concatenates array fields instead of replacing them
|
||||
function mergeConfigConcatArrays(target: Info, source: Info): Info {
|
||||
function merge(target: Info, source: Info): Info {
|
||||
const merged = mergeDeep(target, source)
|
||||
if (target.plugin && source.plugin) {
|
||||
merged.plugin = Array.from(new Set([...target.plugin, ...source.plugin]))
|
||||
@@ -94,13 +89,7 @@ export namespace Config {
|
||||
const remoteConfig = wellknown.config ?? {}
|
||||
// Add $schema to prevent load() from trying to write back to a non-existent file
|
||||
if (!remoteConfig.$schema) remoteConfig.$schema = "https://opencode.ai/config.json"
|
||||
result = mergeConfigConcatArrays(
|
||||
result,
|
||||
await load(JSON.stringify(remoteConfig), {
|
||||
dir: path.dirname(`${key}/.well-known/opencode`),
|
||||
source: `${key}/.well-known/opencode`,
|
||||
}),
|
||||
)
|
||||
result = merge(result, await load(JSON.stringify(remoteConfig), `${key}/.well-known/opencode`))
|
||||
log.debug("loaded remote config from well-known", { url: key })
|
||||
}
|
||||
}
|
||||
@@ -110,18 +99,21 @@ export namespace Config {
|
||||
}
|
||||
|
||||
// Global user config overrides remote config.
|
||||
result = mergeConfigConcatArrays(result, await global())
|
||||
result = merge(result, await global())
|
||||
|
||||
// Custom config path overrides global config.
|
||||
if (Flag.OPENCODE_CONFIG) {
|
||||
result = mergeConfigConcatArrays(result, await loadFile(Flag.OPENCODE_CONFIG))
|
||||
result = merge(result, await loadFile(Flag.OPENCODE_CONFIG))
|
||||
log.debug("loaded custom config", { path: Flag.OPENCODE_CONFIG })
|
||||
}
|
||||
|
||||
// Project config overrides global and remote config.
|
||||
if (!Flag.OPENCODE_DISABLE_PROJECT_CONFIG) {
|
||||
for (const file of await ConfigPaths.projectFiles("opencode", Instance.directory, Instance.worktree)) {
|
||||
result = mergeConfigConcatArrays(result, await loadFile(file))
|
||||
for (const file of ["opencode.jsonc", "opencode.json"]) {
|
||||
const found = await Filesystem.findUp(file, Instance.directory, Instance.worktree)
|
||||
for (const resolved of found.toReversed()) {
|
||||
result = merge(result, await loadFile(resolved))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -129,10 +121,31 @@ export namespace Config {
|
||||
result.mode = result.mode || {}
|
||||
result.plugin = result.plugin || []
|
||||
|
||||
const directories = await ConfigPaths.directories(Instance.directory, Instance.worktree)
|
||||
const directories = [
|
||||
Global.Path.config,
|
||||
// Only scan project .opencode/ directories when project discovery is enabled
|
||||
...(!Flag.OPENCODE_DISABLE_PROJECT_CONFIG
|
||||
? await Array.fromAsync(
|
||||
Filesystem.up({
|
||||
targets: [".opencode"],
|
||||
start: Instance.directory,
|
||||
stop: Instance.worktree,
|
||||
}),
|
||||
)
|
||||
: []),
|
||||
// Always scan ~/.opencode/ (user home directory)
|
||||
...(await Array.fromAsync(
|
||||
Filesystem.up({
|
||||
targets: [".opencode"],
|
||||
start: Global.Path.home,
|
||||
stop: Global.Path.home,
|
||||
}),
|
||||
)),
|
||||
]
|
||||
|
||||
// .opencode directory config overrides (project and global) config sources.
|
||||
if (Flag.OPENCODE_CONFIG_DIR) {
|
||||
directories.push(Flag.OPENCODE_CONFIG_DIR)
|
||||
log.debug("loading config from OPENCODE_CONFIG_DIR", { path: Flag.OPENCODE_CONFIG_DIR })
|
||||
}
|
||||
|
||||
@@ -142,7 +155,7 @@ export namespace Config {
|
||||
if (dir.endsWith(".opencode") || dir === Flag.OPENCODE_CONFIG_DIR) {
|
||||
for (const file of ["opencode.jsonc", "opencode.json"]) {
|
||||
log.debug(`loading config from ${path.join(dir, file)}`)
|
||||
result = mergeConfigConcatArrays(result, await loadFile(path.join(dir, file)))
|
||||
result = merge(result, await loadFile(path.join(dir, file)))
|
||||
// to satisfy the type checker
|
||||
result.agent ??= {}
|
||||
result.mode ??= {}
|
||||
@@ -164,14 +177,8 @@ export namespace Config {
|
||||
}
|
||||
|
||||
// Inline config content overrides all non-managed config sources.
|
||||
if (process.env.OPENCODE_CONFIG_CONTENT) {
|
||||
result = mergeConfigConcatArrays(
|
||||
result,
|
||||
await load(process.env.OPENCODE_CONFIG_CONTENT, {
|
||||
dir: Instance.directory,
|
||||
source: "OPENCODE_CONFIG_CONTENT",
|
||||
}),
|
||||
)
|
||||
if (Flag.OPENCODE_CONFIG_CONTENT) {
|
||||
result = merge(result, JSON.parse(Flag.OPENCODE_CONFIG_CONTENT))
|
||||
log.debug("loaded custom config from OPENCODE_CONFIG_CONTENT")
|
||||
}
|
||||
|
||||
@@ -179,9 +186,9 @@ export namespace Config {
|
||||
// Kept separate from directories array to avoid write operations when installing plugins
|
||||
// which would fail on system directories requiring elevated permissions
|
||||
// This way it only loads config file and not skills/plugins/commands
|
||||
if (existsSync(managedDir)) {
|
||||
if (existsSync(managedConfigDir)) {
|
||||
for (const file of ["opencode.jsonc", "opencode.json"]) {
|
||||
result = mergeConfigConcatArrays(result, await loadFile(path.join(managedDir, file)))
|
||||
result = merge(result, await loadFile(path.join(managedConfigDir, file)))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -220,6 +227,8 @@ export namespace Config {
|
||||
result.share = "auto"
|
||||
}
|
||||
|
||||
if (!result.keybinds) result.keybinds = Info.shape.keybinds.parse({})
|
||||
|
||||
// Apply flag overrides for compaction settings
|
||||
if (Flag.OPENCODE_DISABLE_AUTOCOMPACT) {
|
||||
result.compaction = { ...result.compaction, auto: false }
|
||||
@@ -282,7 +291,7 @@ export namespace Config {
|
||||
}
|
||||
}
|
||||
|
||||
export async function needsInstall(dir: string) {
|
||||
async function needsInstall(dir: string) {
|
||||
// Some config dirs may be read-only.
|
||||
// Installing deps there will fail; skip installation in that case.
|
||||
const writable = await isWritable(dir)
|
||||
@@ -909,6 +918,20 @@ export namespace Config {
|
||||
ref: "KeybindsConfig",
|
||||
})
|
||||
|
||||
export const TUI = z.object({
|
||||
scroll_speed: z.number().min(0.001).optional().describe("TUI scroll speed"),
|
||||
scroll_acceleration: z
|
||||
.object({
|
||||
enabled: z.boolean().describe("Enable scroll acceleration"),
|
||||
})
|
||||
.optional()
|
||||
.describe("Scroll acceleration settings"),
|
||||
diff_style: z
|
||||
.enum(["auto", "stacked"])
|
||||
.optional()
|
||||
.describe("Control diff rendering style: 'auto' adapts to terminal width, 'stacked' always shows single column"),
|
||||
})
|
||||
|
||||
export const Server = z
|
||||
.object({
|
||||
port: z.number().int().positive().optional().describe("Port to listen on"),
|
||||
@@ -983,7 +1006,10 @@ export namespace Config {
|
||||
export const Info = z
|
||||
.object({
|
||||
$schema: z.string().optional().describe("JSON schema reference for configuration validation"),
|
||||
theme: z.string().optional().describe("Theme name to use for the interface"),
|
||||
keybinds: Keybinds.optional().describe("Custom keybind configurations"),
|
||||
logLevel: Log.Level.optional().describe("Log level"),
|
||||
tui: TUI.optional().describe("TUI specific settings"),
|
||||
server: Server.optional().describe("Server configuration for opencode serve and web commands"),
|
||||
command: z
|
||||
.record(z.string(), Command)
|
||||
@@ -1203,49 +1229,95 @@ export namespace Config {
|
||||
return result
|
||||
})
|
||||
|
||||
export const { readFile } = ConfigPaths
|
||||
|
||||
async function loadFile(filepath: string): Promise<Info> {
|
||||
log.info("loading", { path: filepath })
|
||||
const text = await readFile(filepath)
|
||||
let text = await Filesystem.readText(filepath).catch((err: any) => {
|
||||
if (err.code === "ENOENT") return
|
||||
throw new JsonError({ path: filepath }, { cause: err })
|
||||
})
|
||||
if (!text) return {}
|
||||
return load(text, { path: filepath })
|
||||
return load(text, filepath)
|
||||
}
|
||||
|
||||
async function load(text: string, options: { path: string } | { dir: string; source: string }) {
|
||||
async function load(text: string, configFilepath: string) {
|
||||
const original = text
|
||||
const source = "path" in options ? options.path : options.source
|
||||
const isFile = "path" in options
|
||||
const data = await ConfigPaths.parseText(
|
||||
text,
|
||||
"path" in options ? options.path : { source: options.source, dir: options.dir },
|
||||
)
|
||||
text = text.replace(/\{env:([^}]+)\}/g, (_, varName) => {
|
||||
return process.env[varName] || ""
|
||||
})
|
||||
|
||||
const normalized = (() => {
|
||||
if (!data || typeof data !== "object" || Array.isArray(data)) return data
|
||||
const copy = { ...(data as Record<string, unknown>) }
|
||||
const hadLegacy = "theme" in copy || "keybinds" in copy || "tui" in copy
|
||||
if (!hadLegacy) return copy
|
||||
delete copy.theme
|
||||
delete copy.keybinds
|
||||
delete copy.tui
|
||||
log.warn("tui keys in opencode config are deprecated; move them to tui.json", { path: source })
|
||||
return copy
|
||||
})()
|
||||
const fileMatches = text.match(/\{file:[^}]+\}/g)
|
||||
if (fileMatches) {
|
||||
const configDir = path.dirname(configFilepath)
|
||||
const lines = text.split("\n")
|
||||
|
||||
const parsed = Info.safeParse(normalized)
|
||||
for (const match of fileMatches) {
|
||||
const lineIndex = lines.findIndex((line) => line.includes(match))
|
||||
if (lineIndex !== -1 && lines[lineIndex].trim().startsWith("//")) {
|
||||
continue // Skip if line is commented
|
||||
}
|
||||
let filePath = match.replace(/^\{file:/, "").replace(/\}$/, "")
|
||||
if (filePath.startsWith("~/")) {
|
||||
filePath = path.join(os.homedir(), filePath.slice(2))
|
||||
}
|
||||
const resolvedPath = path.isAbsolute(filePath) ? filePath : path.resolve(configDir, filePath)
|
||||
const fileContent = (
|
||||
await Filesystem.readText(resolvedPath).catch((error: any) => {
|
||||
const errMsg = `bad file reference: "${match}"`
|
||||
if (error.code === "ENOENT") {
|
||||
throw new InvalidError(
|
||||
{
|
||||
path: configFilepath,
|
||||
message: errMsg + ` ${resolvedPath} does not exist`,
|
||||
},
|
||||
{ cause: error },
|
||||
)
|
||||
}
|
||||
throw new InvalidError({ path: configFilepath, message: errMsg }, { cause: error })
|
||||
})
|
||||
).trim()
|
||||
// escape newlines/quotes, strip outer quotes
|
||||
text = text.replace(match, () => JSON.stringify(fileContent).slice(1, -1))
|
||||
}
|
||||
}
|
||||
|
||||
const errors: JsoncParseError[] = []
|
||||
const data = parseJsonc(text, errors, { allowTrailingComma: true })
|
||||
if (errors.length) {
|
||||
const lines = text.split("\n")
|
||||
const errorDetails = errors
|
||||
.map((e) => {
|
||||
const beforeOffset = text.substring(0, e.offset).split("\n")
|
||||
const line = beforeOffset.length
|
||||
const column = beforeOffset[beforeOffset.length - 1].length + 1
|
||||
const problemLine = lines[line - 1]
|
||||
|
||||
const error = `${printParseErrorCode(e.error)} at line ${line}, column ${column}`
|
||||
if (!problemLine) return error
|
||||
|
||||
return `${error}\n Line ${line}: ${problemLine}\n${"".padStart(column + 9)}^`
|
||||
})
|
||||
.join("\n")
|
||||
|
||||
throw new JsonError({
|
||||
path: configFilepath,
|
||||
message: `\n--- JSONC Input ---\n${text}\n--- Errors ---\n${errorDetails}\n--- End ---`,
|
||||
})
|
||||
}
|
||||
|
||||
const parsed = Info.safeParse(data)
|
||||
if (parsed.success) {
|
||||
if (!parsed.data.$schema && isFile) {
|
||||
if (!parsed.data.$schema) {
|
||||
parsed.data.$schema = "https://opencode.ai/config.json"
|
||||
// Write the $schema to the original text to preserve variables like {env:VAR}
|
||||
const updated = original.replace(/^\s*\{/, '{\n "$schema": "https://opencode.ai/config.json",')
|
||||
await Bun.write(options.path, updated).catch(() => {})
|
||||
await Filesystem.write(configFilepath, updated).catch(() => {})
|
||||
}
|
||||
const data = parsed.data
|
||||
if (data.plugin && isFile) {
|
||||
if (data.plugin) {
|
||||
for (let i = 0; i < data.plugin.length; i++) {
|
||||
const plugin = data.plugin[i]
|
||||
try {
|
||||
data.plugin[i] = import.meta.resolve!(plugin, options.path)
|
||||
data.plugin[i] = import.meta.resolve!(plugin, configFilepath)
|
||||
} catch (err) {}
|
||||
}
|
||||
}
|
||||
@@ -1253,11 +1325,17 @@ export namespace Config {
|
||||
}
|
||||
|
||||
throw new InvalidError({
|
||||
path: source,
|
||||
path: configFilepath,
|
||||
issues: parsed.error.issues,
|
||||
})
|
||||
}
|
||||
export const { JsonError, InvalidError } = ConfigPaths
|
||||
export const JsonError = NamedError.create(
|
||||
"ConfigJsonError",
|
||||
z.object({
|
||||
path: z.string(),
|
||||
message: z.string().optional(),
|
||||
}),
|
||||
)
|
||||
|
||||
export const ConfigDirectoryTypoError = NamedError.create(
|
||||
"ConfigDirectoryTypoError",
|
||||
@@ -1268,6 +1346,15 @@ export namespace Config {
|
||||
}),
|
||||
)
|
||||
|
||||
export const InvalidError = NamedError.create(
|
||||
"ConfigInvalidError",
|
||||
z.object({
|
||||
path: z.string(),
|
||||
issues: z.custom<z.core.$ZodIssue[]>().optional(),
|
||||
message: z.string().optional(),
|
||||
}),
|
||||
)
|
||||
|
||||
export async function get() {
|
||||
return state().then((x) => x.config)
|
||||
}
|
||||
|
||||
@@ -1,155 +0,0 @@
|
||||
import path from "path"
|
||||
import { type ParseError as JsoncParseError, applyEdits, modify, parse as parseJsonc } from "jsonc-parser"
|
||||
import { unique } from "remeda"
|
||||
import z from "zod"
|
||||
import { ConfigPaths } from "./paths"
|
||||
import { TuiInfo, TuiOptions } from "./tui-schema"
|
||||
import { Instance } from "@/project/instance"
|
||||
import { Flag } from "@/flag/flag"
|
||||
import { Log } from "@/util/log"
|
||||
import { Filesystem } from "@/util/filesystem"
|
||||
import { Global } from "@/global"
|
||||
|
||||
const log = Log.create({ service: "tui.migrate" })
|
||||
|
||||
const TUI_SCHEMA_URL = "https://opencode.ai/tui.json"
|
||||
|
||||
const LegacyTheme = TuiInfo.shape.theme.optional()
|
||||
const LegacyRecord = z.record(z.string(), z.unknown()).optional()
|
||||
|
||||
const TuiLegacy = z
|
||||
.object({
|
||||
scroll_speed: TuiOptions.shape.scroll_speed.catch(undefined),
|
||||
scroll_acceleration: TuiOptions.shape.scroll_acceleration.catch(undefined),
|
||||
diff_style: TuiOptions.shape.diff_style.catch(undefined),
|
||||
})
|
||||
.strip()
|
||||
|
||||
interface MigrateInput {
|
||||
directories: string[]
|
||||
custom?: string
|
||||
managed: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrates tui-specific keys (theme, keybinds, tui) from opencode.json files
|
||||
* into dedicated tui.json files. Migration is performed per-directory and
|
||||
* skips only locations where a tui.json already exists.
|
||||
*/
|
||||
export async function migrateTuiConfig(input: MigrateInput) {
|
||||
const opencode = await opencodeFiles(input)
|
||||
for (const file of opencode) {
|
||||
const source = await Filesystem.readText(file).catch((error) => {
|
||||
log.warn("failed to read config for tui migration", { path: file, error })
|
||||
return undefined
|
||||
})
|
||||
if (!source) continue
|
||||
const errors: JsoncParseError[] = []
|
||||
const data = parseJsonc(source, errors, { allowTrailingComma: true })
|
||||
if (errors.length || !data || typeof data !== "object" || Array.isArray(data)) continue
|
||||
|
||||
const theme = LegacyTheme.safeParse("theme" in data ? data.theme : undefined)
|
||||
const keybinds = LegacyRecord.safeParse("keybinds" in data ? data.keybinds : undefined)
|
||||
const legacyTui = LegacyRecord.safeParse("tui" in data ? data.tui : undefined)
|
||||
const extracted = {
|
||||
theme: theme.success ? theme.data : undefined,
|
||||
keybinds: keybinds.success ? keybinds.data : undefined,
|
||||
tui: legacyTui.success ? legacyTui.data : undefined,
|
||||
}
|
||||
const tui = extracted.tui ? normalizeTui(extracted.tui) : undefined
|
||||
if (extracted.theme === undefined && extracted.keybinds === undefined && !tui) continue
|
||||
|
||||
const target = path.join(path.dirname(file), "tui.json")
|
||||
const targetExists = await Filesystem.exists(target)
|
||||
if (targetExists) continue
|
||||
|
||||
const payload: Record<string, unknown> = {
|
||||
$schema: TUI_SCHEMA_URL,
|
||||
}
|
||||
if (extracted.theme !== undefined) payload.theme = extracted.theme
|
||||
if (extracted.keybinds !== undefined) payload.keybinds = extracted.keybinds
|
||||
if (tui) Object.assign(payload, tui)
|
||||
|
||||
const wrote = await Bun.write(target, JSON.stringify(payload, null, 2))
|
||||
.then(() => true)
|
||||
.catch((error) => {
|
||||
log.warn("failed to write tui migration target", { from: file, to: target, error })
|
||||
return false
|
||||
})
|
||||
if (!wrote) continue
|
||||
|
||||
const stripped = await backupAndStripLegacy(file, source)
|
||||
if (!stripped) {
|
||||
log.warn("tui config migrated but source file was not stripped", { from: file, to: target })
|
||||
continue
|
||||
}
|
||||
log.info("migrated tui config", { from: file, to: target })
|
||||
}
|
||||
}
|
||||
|
||||
function normalizeTui(data: Record<string, unknown>) {
|
||||
const parsed = TuiLegacy.parse(data)
|
||||
if (
|
||||
parsed.scroll_speed === undefined &&
|
||||
parsed.diff_style === undefined &&
|
||||
parsed.scroll_acceleration === undefined
|
||||
) {
|
||||
return
|
||||
}
|
||||
return parsed
|
||||
}
|
||||
|
||||
async function backupAndStripLegacy(file: string, source: string) {
|
||||
const backup = file + ".tui-migration.bak"
|
||||
const hasBackup = await Filesystem.exists(backup)
|
||||
const backed = hasBackup
|
||||
? true
|
||||
: await Bun.write(backup, source)
|
||||
.then(() => true)
|
||||
.catch((error) => {
|
||||
log.warn("failed to backup source config during tui migration", { path: file, backup, error })
|
||||
return false
|
||||
})
|
||||
if (!backed) return false
|
||||
|
||||
const text = ["theme", "keybinds", "tui"].reduce((acc, key) => {
|
||||
const edits = modify(acc, [key], undefined, {
|
||||
formattingOptions: {
|
||||
insertSpaces: true,
|
||||
tabSize: 2,
|
||||
},
|
||||
})
|
||||
if (!edits.length) return acc
|
||||
return applyEdits(acc, edits)
|
||||
}, source)
|
||||
|
||||
return Bun.write(file, text)
|
||||
.then(() => {
|
||||
log.info("stripped tui keys from server config", { path: file, backup })
|
||||
return true
|
||||
})
|
||||
.catch((error) => {
|
||||
log.warn("failed to strip legacy tui keys from server config", { path: file, backup, error })
|
||||
return false
|
||||
})
|
||||
}
|
||||
|
||||
async function opencodeFiles(input: { directories: string[]; managed: string }) {
|
||||
const project = Flag.OPENCODE_DISABLE_PROJECT_CONFIG
|
||||
? []
|
||||
: await ConfigPaths.projectFiles("opencode", Instance.directory, Instance.worktree)
|
||||
const files = [...project, ...ConfigPaths.fileInDirectory(Global.Path.config, "opencode")]
|
||||
for (const dir of unique(input.directories)) {
|
||||
files.push(...ConfigPaths.fileInDirectory(dir, "opencode"))
|
||||
}
|
||||
if (Flag.OPENCODE_CONFIG) files.push(Flag.OPENCODE_CONFIG)
|
||||
files.push(...ConfigPaths.fileInDirectory(input.managed, "opencode"))
|
||||
|
||||
const existing = await Promise.all(
|
||||
unique(files).map(async (file) => {
|
||||
const ok = await Filesystem.exists(file)
|
||||
return ok ? file : undefined
|
||||
}),
|
||||
)
|
||||
return existing.filter((file): file is string => !!file)
|
||||
}
|
||||
@@ -1,174 +0,0 @@
|
||||
import path from "path"
|
||||
import os from "os"
|
||||
import z from "zod"
|
||||
import { type ParseError as JsoncParseError, parse as parseJsonc, printParseErrorCode } from "jsonc-parser"
|
||||
import { NamedError } from "@opencode-ai/util/error"
|
||||
import { Filesystem } from "@/util/filesystem"
|
||||
import { Flag } from "@/flag/flag"
|
||||
import { Global } from "@/global"
|
||||
|
||||
export namespace ConfigPaths {
|
||||
export async function projectFiles(name: string, directory: string, worktree: string) {
|
||||
const files: string[] = []
|
||||
for (const file of [`${name}.jsonc`, `${name}.json`]) {
|
||||
const found = await Filesystem.findUp(file, directory, worktree)
|
||||
for (const resolved of found.toReversed()) {
|
||||
files.push(resolved)
|
||||
}
|
||||
}
|
||||
return files
|
||||
}
|
||||
|
||||
export async function directories(directory: string, worktree: string) {
|
||||
return [
|
||||
Global.Path.config,
|
||||
...(!Flag.OPENCODE_DISABLE_PROJECT_CONFIG
|
||||
? await Array.fromAsync(
|
||||
Filesystem.up({
|
||||
targets: [".opencode"],
|
||||
start: directory,
|
||||
stop: worktree,
|
||||
}),
|
||||
)
|
||||
: []),
|
||||
...(await Array.fromAsync(
|
||||
Filesystem.up({
|
||||
targets: [".opencode"],
|
||||
start: Global.Path.home,
|
||||
stop: Global.Path.home,
|
||||
}),
|
||||
)),
|
||||
...(Flag.OPENCODE_CONFIG_DIR ? [Flag.OPENCODE_CONFIG_DIR] : []),
|
||||
]
|
||||
}
|
||||
|
||||
export function fileInDirectory(dir: string, name: string) {
|
||||
return [path.join(dir, `${name}.jsonc`), path.join(dir, `${name}.json`)]
|
||||
}
|
||||
|
||||
export const JsonError = NamedError.create(
|
||||
"ConfigJsonError",
|
||||
z.object({
|
||||
path: z.string(),
|
||||
message: z.string().optional(),
|
||||
}),
|
||||
)
|
||||
|
||||
export const InvalidError = NamedError.create(
|
||||
"ConfigInvalidError",
|
||||
z.object({
|
||||
path: z.string(),
|
||||
issues: z.custom<z.core.$ZodIssue[]>().optional(),
|
||||
message: z.string().optional(),
|
||||
}),
|
||||
)
|
||||
|
||||
/** Read a config file, returning undefined for missing files and throwing JsonError for other failures. */
|
||||
export async function readFile(filepath: string) {
|
||||
return Filesystem.readText(filepath).catch((err: NodeJS.ErrnoException) => {
|
||||
if (err.code === "ENOENT") return
|
||||
throw new JsonError({ path: filepath }, { cause: err })
|
||||
})
|
||||
}
|
||||
|
||||
type ParseSource = string | { source: string; dir: string }
|
||||
|
||||
function source(input: ParseSource) {
|
||||
return typeof input === "string" ? input : input.source
|
||||
}
|
||||
|
||||
function dir(input: ParseSource) {
|
||||
return typeof input === "string" ? path.dirname(input) : input.dir
|
||||
}
|
||||
|
||||
/** Apply {env:VAR} and {file:path} substitutions to config text. */
|
||||
async function substitute(text: string, input: ParseSource, missing: "error" | "empty" = "error") {
|
||||
text = text.replace(/\{env:([^}]+)\}/g, (_, varName) => {
|
||||
return process.env[varName] || ""
|
||||
})
|
||||
|
||||
const fileMatches = Array.from(text.matchAll(/\{file:[^}]+\}/g))
|
||||
if (!fileMatches.length) return text
|
||||
|
||||
const configDir = dir(input)
|
||||
const configSource = source(input)
|
||||
let out = ""
|
||||
let cursor = 0
|
||||
|
||||
for (const match of fileMatches) {
|
||||
const token = match[0]
|
||||
const index = match.index!
|
||||
out += text.slice(cursor, index)
|
||||
|
||||
const lineStart = text.lastIndexOf("\n", index - 1) + 1
|
||||
const prefix = text.slice(lineStart, index).trimStart()
|
||||
if (prefix.startsWith("//")) {
|
||||
out += token
|
||||
cursor = index + token.length
|
||||
continue
|
||||
}
|
||||
|
||||
let filePath = token.replace(/^\{file:/, "").replace(/\}$/, "")
|
||||
if (filePath.startsWith("~/")) {
|
||||
filePath = path.join(os.homedir(), filePath.slice(2))
|
||||
}
|
||||
|
||||
const resolvedPath = path.isAbsolute(filePath) ? filePath : path.resolve(configDir, filePath)
|
||||
const fileContent = (
|
||||
await Filesystem.readText(resolvedPath).catch((error: NodeJS.ErrnoException) => {
|
||||
if (missing === "empty") return ""
|
||||
|
||||
const errMsg = `bad file reference: "${token}"`
|
||||
if (error.code === "ENOENT") {
|
||||
throw new InvalidError(
|
||||
{
|
||||
path: configSource,
|
||||
message: errMsg + ` ${resolvedPath} does not exist`,
|
||||
},
|
||||
{ cause: error },
|
||||
)
|
||||
}
|
||||
throw new InvalidError({ path: configSource, message: errMsg }, { cause: error })
|
||||
})
|
||||
).trim()
|
||||
|
||||
out += JSON.stringify(fileContent).slice(1, -1)
|
||||
cursor = index + token.length
|
||||
}
|
||||
|
||||
out += text.slice(cursor)
|
||||
return out
|
||||
}
|
||||
|
||||
/** Substitute and parse JSONC text, throwing JsonError on syntax errors. */
|
||||
export async function parseText(text: string, input: ParseSource, missing: "error" | "empty" = "error") {
|
||||
const configSource = source(input)
|
||||
text = await substitute(text, input, missing)
|
||||
|
||||
const errors: JsoncParseError[] = []
|
||||
const data = parseJsonc(text, errors, { allowTrailingComma: true })
|
||||
if (errors.length) {
|
||||
const lines = text.split("\n")
|
||||
const errorDetails = errors
|
||||
.map((e) => {
|
||||
const beforeOffset = text.substring(0, e.offset).split("\n")
|
||||
const line = beforeOffset.length
|
||||
const column = beforeOffset[beforeOffset.length - 1].length + 1
|
||||
const problemLine = lines[line - 1]
|
||||
|
||||
const error = `${printParseErrorCode(e.error)} at line ${line}, column ${column}`
|
||||
if (!problemLine) return error
|
||||
|
||||
return `${error}\n Line ${line}: ${problemLine}\n${"".padStart(column + 9)}^`
|
||||
})
|
||||
.join("\n")
|
||||
|
||||
throw new JsonError({
|
||||
path: configSource,
|
||||
message: `\n--- JSONC Input ---\n${text}\n--- Errors ---\n${errorDetails}\n--- End ---`,
|
||||
})
|
||||
}
|
||||
|
||||
return data
|
||||
}
|
||||
}
|
||||
@@ -1,34 +0,0 @@
|
||||
import z from "zod"
|
||||
import { Config } from "./config"
|
||||
|
||||
const KeybindOverride = z
|
||||
.object(
|
||||
Object.fromEntries(Object.keys(Config.Keybinds.shape).map((key) => [key, z.string().optional()])) as Record<
|
||||
string,
|
||||
z.ZodOptional<z.ZodString>
|
||||
>,
|
||||
)
|
||||
.strict()
|
||||
|
||||
export const TuiOptions = z.object({
|
||||
scroll_speed: z.number().min(0.001).optional().describe("TUI scroll speed"),
|
||||
scroll_acceleration: z
|
||||
.object({
|
||||
enabled: z.boolean().describe("Enable scroll acceleration"),
|
||||
})
|
||||
.optional()
|
||||
.describe("Scroll acceleration settings"),
|
||||
diff_style: z
|
||||
.enum(["auto", "stacked"])
|
||||
.optional()
|
||||
.describe("Control diff rendering style: 'auto' adapts to terminal width, 'stacked' always shows single column"),
|
||||
})
|
||||
|
||||
export const TuiInfo = z
|
||||
.object({
|
||||
$schema: z.string().optional(),
|
||||
theme: z.string().optional(),
|
||||
keybinds: KeybindOverride.optional(),
|
||||
})
|
||||
.extend(TuiOptions.shape)
|
||||
.strict()
|
||||
@@ -1,118 +0,0 @@
|
||||
import { existsSync } from "fs"
|
||||
import z from "zod"
|
||||
import { mergeDeep, unique } from "remeda"
|
||||
import { Config } from "./config"
|
||||
import { ConfigPaths } from "./paths"
|
||||
import { migrateTuiConfig } from "./migrate-tui-config"
|
||||
import { TuiInfo } from "./tui-schema"
|
||||
import { Instance } from "@/project/instance"
|
||||
import { Flag } from "@/flag/flag"
|
||||
import { Log } from "@/util/log"
|
||||
import { Global } from "@/global"
|
||||
|
||||
export namespace TuiConfig {
|
||||
const log = Log.create({ service: "tui.config" })
|
||||
|
||||
export const Info = TuiInfo
|
||||
|
||||
export type Info = z.output<typeof Info>
|
||||
|
||||
function mergeInfo(target: Info, source: Info): Info {
|
||||
return mergeDeep(target, source)
|
||||
}
|
||||
|
||||
function customPath() {
|
||||
return Flag.OPENCODE_TUI_CONFIG
|
||||
}
|
||||
|
||||
const state = Instance.state(async () => {
|
||||
let projectFiles = Flag.OPENCODE_DISABLE_PROJECT_CONFIG
|
||||
? []
|
||||
: await ConfigPaths.projectFiles("tui", Instance.directory, Instance.worktree)
|
||||
const directories = await ConfigPaths.directories(Instance.directory, Instance.worktree)
|
||||
const custom = customPath()
|
||||
const managed = Config.managedConfigDir()
|
||||
await migrateTuiConfig({ directories, custom, managed })
|
||||
// Re-compute after migration since migrateTuiConfig may have created new tui.json files
|
||||
projectFiles = Flag.OPENCODE_DISABLE_PROJECT_CONFIG
|
||||
? []
|
||||
: await ConfigPaths.projectFiles("tui", Instance.directory, Instance.worktree)
|
||||
|
||||
let result: Info = {}
|
||||
|
||||
for (const file of ConfigPaths.fileInDirectory(Global.Path.config, "tui")) {
|
||||
result = mergeInfo(result, await loadFile(file))
|
||||
}
|
||||
|
||||
if (custom) {
|
||||
result = mergeInfo(result, await loadFile(custom))
|
||||
log.debug("loaded custom tui config", { path: custom })
|
||||
}
|
||||
|
||||
for (const file of projectFiles) {
|
||||
result = mergeInfo(result, await loadFile(file))
|
||||
}
|
||||
|
||||
for (const dir of unique(directories)) {
|
||||
if (!dir.endsWith(".opencode") && dir !== Flag.OPENCODE_CONFIG_DIR) continue
|
||||
for (const file of ConfigPaths.fileInDirectory(dir, "tui")) {
|
||||
result = mergeInfo(result, await loadFile(file))
|
||||
}
|
||||
}
|
||||
|
||||
if (existsSync(managed)) {
|
||||
for (const file of ConfigPaths.fileInDirectory(managed, "tui")) {
|
||||
result = mergeInfo(result, await loadFile(file))
|
||||
}
|
||||
}
|
||||
|
||||
result.keybinds = Config.Keybinds.parse(result.keybinds ?? {})
|
||||
|
||||
return {
|
||||
config: result,
|
||||
}
|
||||
})
|
||||
|
||||
export async function get() {
|
||||
return state().then((x) => x.config)
|
||||
}
|
||||
|
||||
async function loadFile(filepath: string): Promise<Info> {
|
||||
const text = await ConfigPaths.readFile(filepath)
|
||||
if (!text) return {}
|
||||
return load(text, filepath).catch((error) => {
|
||||
log.warn("failed to load tui config", { path: filepath, error })
|
||||
return {}
|
||||
})
|
||||
}
|
||||
|
||||
async function load(text: string, configFilepath: string): Promise<Info> {
|
||||
const data = await ConfigPaths.parseText(text, configFilepath, "empty")
|
||||
if (!data || typeof data !== "object" || Array.isArray(data)) return {}
|
||||
|
||||
// Flatten a nested "tui" key so users who wrote `{ "tui": { ... } }` inside tui.json
|
||||
// (mirroring the old opencode.json shape) still get their settings applied.
|
||||
const normalized = (() => {
|
||||
const copy = { ...(data as Record<string, unknown>) }
|
||||
if (!("tui" in copy)) return copy
|
||||
if (!copy.tui || typeof copy.tui !== "object" || Array.isArray(copy.tui)) {
|
||||
delete copy.tui
|
||||
return copy
|
||||
}
|
||||
const tui = copy.tui as Record<string, unknown>
|
||||
delete copy.tui
|
||||
return {
|
||||
...tui,
|
||||
...copy,
|
||||
}
|
||||
})()
|
||||
|
||||
const parsed = Info.safeParse(normalized)
|
||||
if (!parsed.success) {
|
||||
log.warn("invalid tui config", { path: configFilepath, issues: parsed.error.issues })
|
||||
return {}
|
||||
}
|
||||
|
||||
return parsed.data
|
||||
}
|
||||
}
|
||||
@@ -166,6 +166,7 @@ export namespace File {
|
||||
"efi",
|
||||
"rom",
|
||||
"com",
|
||||
"bat",
|
||||
"cmd",
|
||||
"ps1",
|
||||
"sh",
|
||||
@@ -202,77 +203,11 @@ export namespace File {
|
||||
"x3f",
|
||||
])
|
||||
|
||||
const textExtensions = new Set([
|
||||
"ts",
|
||||
"tsx",
|
||||
"mts",
|
||||
"cts",
|
||||
"mtsx",
|
||||
"ctsx",
|
||||
"js",
|
||||
"jsx",
|
||||
"mjs",
|
||||
"cjs",
|
||||
"sh",
|
||||
"bash",
|
||||
"zsh",
|
||||
"fish",
|
||||
"ps1",
|
||||
"psm1",
|
||||
"cmd",
|
||||
"bat",
|
||||
"json",
|
||||
"jsonc",
|
||||
"json5",
|
||||
"yaml",
|
||||
"yml",
|
||||
"toml",
|
||||
"md",
|
||||
"mdx",
|
||||
"txt",
|
||||
"xml",
|
||||
"html",
|
||||
"htm",
|
||||
"css",
|
||||
"scss",
|
||||
"sass",
|
||||
"less",
|
||||
"graphql",
|
||||
"gql",
|
||||
"sql",
|
||||
"ini",
|
||||
"cfg",
|
||||
"conf",
|
||||
"env",
|
||||
])
|
||||
|
||||
const textNames = new Set([
|
||||
"dockerfile",
|
||||
"makefile",
|
||||
".gitignore",
|
||||
".gitattributes",
|
||||
".editorconfig",
|
||||
".npmrc",
|
||||
".nvmrc",
|
||||
".prettierrc",
|
||||
".eslintrc",
|
||||
])
|
||||
|
||||
function isImageByExtension(filepath: string): boolean {
|
||||
const ext = path.extname(filepath).toLowerCase().slice(1)
|
||||
return imageExtensions.has(ext)
|
||||
}
|
||||
|
||||
function isTextByExtension(filepath: string): boolean {
|
||||
const ext = path.extname(filepath).toLowerCase().slice(1)
|
||||
return textExtensions.has(ext)
|
||||
}
|
||||
|
||||
function isTextByName(filepath: string): boolean {
|
||||
const name = path.basename(filepath).toLowerCase()
|
||||
return textNames.has(name)
|
||||
}
|
||||
|
||||
function getImageMimeType(filepath: string): string {
|
||||
const ext = path.extname(filepath).toLowerCase().slice(1)
|
||||
const mimeTypes: Record<string, string> = {
|
||||
@@ -510,9 +445,7 @@ export namespace File {
|
||||
return { type: "text", content: "" }
|
||||
}
|
||||
|
||||
const text = isTextByExtension(file) || isTextByName(file)
|
||||
|
||||
if (isBinaryByExtension(file) && !text) {
|
||||
if (isBinaryByExtension(file)) {
|
||||
return { type: "binary", content: "" }
|
||||
}
|
||||
|
||||
@@ -521,7 +454,7 @@ export namespace File {
|
||||
}
|
||||
|
||||
const mimeType = Filesystem.mimeType(full)
|
||||
const encode = text ? false : await shouldEncode(mimeType)
|
||||
const encode = await shouldEncode(mimeType)
|
||||
|
||||
if (encode && !isImage(mimeType)) {
|
||||
return { type: "binary", content: "", mimeType }
|
||||
|
||||
@@ -7,7 +7,6 @@ export namespace Flag {
|
||||
export const OPENCODE_AUTO_SHARE = truthy("OPENCODE_AUTO_SHARE")
|
||||
export const OPENCODE_GIT_BASH_PATH = process.env["OPENCODE_GIT_BASH_PATH"]
|
||||
export const OPENCODE_CONFIG = process.env["OPENCODE_CONFIG"]
|
||||
export declare const OPENCODE_TUI_CONFIG: string | undefined
|
||||
export declare const OPENCODE_CONFIG_DIR: string | undefined
|
||||
export const OPENCODE_CONFIG_CONTENT = process.env["OPENCODE_CONFIG_CONTENT"]
|
||||
export const OPENCODE_DISABLE_AUTOUPDATE = truthy("OPENCODE_DISABLE_AUTOUPDATE")
|
||||
@@ -75,17 +74,6 @@ Object.defineProperty(Flag, "OPENCODE_DISABLE_PROJECT_CONFIG", {
|
||||
configurable: false,
|
||||
})
|
||||
|
||||
// Dynamic getter for OPENCODE_TUI_CONFIG
|
||||
// This must be evaluated at access time, not module load time,
|
||||
// because tests and external tooling may set this env var at runtime
|
||||
Object.defineProperty(Flag, "OPENCODE_TUI_CONFIG", {
|
||||
get() {
|
||||
return process.env["OPENCODE_TUI_CONFIG"]
|
||||
},
|
||||
enumerable: true,
|
||||
configurable: false,
|
||||
})
|
||||
|
||||
// Dynamic getter for OPENCODE_CONFIG_DIR
|
||||
// This must be evaluated at access time, not module load time,
|
||||
// because external tooling may set this env var at runtime
|
||||
|
||||
@@ -147,7 +147,8 @@ export namespace LSPClient {
|
||||
notify: {
|
||||
async open(input: { path: string }) {
|
||||
input.path = path.isAbsolute(input.path) ? input.path : path.resolve(Instance.directory, input.path)
|
||||
const text = await Filesystem.readText(input.path)
|
||||
const file = Bun.file(input.path)
|
||||
const text = await file.text()
|
||||
const extension = path.extname(input.path)
|
||||
const languageId = LANGUAGE_EXTENSIONS[extension] ?? "plaintext"
|
||||
|
||||
|
||||
@@ -131,7 +131,7 @@ export namespace LSPServer {
|
||||
"bin",
|
||||
"vue-language-server.js",
|
||||
)
|
||||
if (!(await Filesystem.exists(js))) {
|
||||
if (!(await Bun.file(js).exists())) {
|
||||
if (Flag.OPENCODE_DISABLE_LSP_DOWNLOAD) return
|
||||
await Bun.spawn([BunProc.which(), "install", "@vue/language-server"], {
|
||||
cwd: Global.Path.bin,
|
||||
@@ -173,14 +173,14 @@ export namespace LSPServer {
|
||||
if (!eslint) return
|
||||
log.info("spawning eslint server")
|
||||
const serverPath = path.join(Global.Path.bin, "vscode-eslint", "server", "out", "eslintServer.js")
|
||||
if (!(await Filesystem.exists(serverPath))) {
|
||||
if (!(await Bun.file(serverPath).exists())) {
|
||||
if (Flag.OPENCODE_DISABLE_LSP_DOWNLOAD) return
|
||||
log.info("downloading and building VS Code ESLint server")
|
||||
const response = await fetch("https://github.com/microsoft/vscode-eslint/archive/refs/heads/main.zip")
|
||||
if (!response.ok) return
|
||||
|
||||
const zipPath = path.join(Global.Path.bin, "vscode-eslint.zip")
|
||||
if (response.body) await Filesystem.writeStream(zipPath, response.body)
|
||||
await Bun.file(zipPath).write(response)
|
||||
|
||||
const ok = await Archive.extractZip(zipPath, Global.Path.bin)
|
||||
.then(() => true)
|
||||
@@ -242,7 +242,7 @@ export namespace LSPServer {
|
||||
|
||||
const resolveBin = async (target: string) => {
|
||||
const localBin = path.join(root, target)
|
||||
if (await Filesystem.exists(localBin)) return localBin
|
||||
if (await Bun.file(localBin).exists()) return localBin
|
||||
|
||||
const candidates = Filesystem.up({
|
||||
targets: [target],
|
||||
@@ -326,7 +326,7 @@ export namespace LSPServer {
|
||||
async spawn(root) {
|
||||
const localBin = path.join(root, "node_modules", ".bin", "biome")
|
||||
let bin: string | undefined
|
||||
if (await Filesystem.exists(localBin)) bin = localBin
|
||||
if (await Bun.file(localBin).exists()) bin = localBin
|
||||
if (!bin) {
|
||||
const found = Bun.which("biome")
|
||||
if (found) bin = found
|
||||
@@ -467,7 +467,7 @@ export namespace LSPServer {
|
||||
const potentialPythonPath = isWindows
|
||||
? path.join(venvPath, "Scripts", "python.exe")
|
||||
: path.join(venvPath, "bin", "python")
|
||||
if (await Filesystem.exists(potentialPythonPath)) {
|
||||
if (await Bun.file(potentialPythonPath).exists()) {
|
||||
initialization["pythonPath"] = potentialPythonPath
|
||||
break
|
||||
}
|
||||
@@ -479,7 +479,7 @@ export namespace LSPServer {
|
||||
const potentialTyPath = isWindows
|
||||
? path.join(venvPath, "Scripts", "ty.exe")
|
||||
: path.join(venvPath, "bin", "ty")
|
||||
if (await Filesystem.exists(potentialTyPath)) {
|
||||
if (await Bun.file(potentialTyPath).exists()) {
|
||||
binary = potentialTyPath
|
||||
break
|
||||
}
|
||||
@@ -511,7 +511,7 @@ export namespace LSPServer {
|
||||
const args = []
|
||||
if (!binary) {
|
||||
const js = path.join(Global.Path.bin, "node_modules", "pyright", "dist", "pyright-langserver.js")
|
||||
if (!(await Filesystem.exists(js))) {
|
||||
if (!(await Bun.file(js).exists())) {
|
||||
if (Flag.OPENCODE_DISABLE_LSP_DOWNLOAD) return
|
||||
await Bun.spawn([BunProc.which(), "install", "pyright"], {
|
||||
cwd: Global.Path.bin,
|
||||
@@ -536,7 +536,7 @@ export namespace LSPServer {
|
||||
const potentialPythonPath = isWindows
|
||||
? path.join(venvPath, "Scripts", "python.exe")
|
||||
: path.join(venvPath, "bin", "python")
|
||||
if (await Filesystem.exists(potentialPythonPath)) {
|
||||
if (await Bun.file(potentialPythonPath).exists()) {
|
||||
initialization["pythonPath"] = potentialPythonPath
|
||||
break
|
||||
}
|
||||
@@ -571,7 +571,7 @@ export namespace LSPServer {
|
||||
process.platform === "win32" ? "language_server.bat" : "language_server.sh",
|
||||
)
|
||||
|
||||
if (!(await Filesystem.exists(binary))) {
|
||||
if (!(await Bun.file(binary).exists())) {
|
||||
const elixir = Bun.which("elixir")
|
||||
if (!elixir) {
|
||||
log.error("elixir is required to run elixir-ls")
|
||||
@@ -584,7 +584,7 @@ export namespace LSPServer {
|
||||
const response = await fetch("https://github.com/elixir-lsp/elixir-ls/archive/refs/heads/master.zip")
|
||||
if (!response.ok) return
|
||||
const zipPath = path.join(Global.Path.bin, "elixir-ls.zip")
|
||||
if (response.body) await Filesystem.writeStream(zipPath, response.body)
|
||||
await Bun.file(zipPath).write(response)
|
||||
|
||||
const ok = await Archive.extractZip(zipPath, Global.Path.bin)
|
||||
.then(() => true)
|
||||
@@ -692,7 +692,7 @@ export namespace LSPServer {
|
||||
}
|
||||
|
||||
const tempPath = path.join(Global.Path.bin, assetName)
|
||||
if (downloadResponse.body) await Filesystem.writeStream(tempPath, downloadResponse.body)
|
||||
await Bun.file(tempPath).write(downloadResponse)
|
||||
|
||||
if (ext === "zip") {
|
||||
const ok = await Archive.extractZip(tempPath, Global.Path.bin)
|
||||
@@ -710,7 +710,7 @@ export namespace LSPServer {
|
||||
|
||||
bin = path.join(Global.Path.bin, "zls" + (platform === "win32" ? ".exe" : ""))
|
||||
|
||||
if (!(await Filesystem.exists(bin))) {
|
||||
if (!(await Bun.file(bin).exists())) {
|
||||
log.error("Failed to extract zls binary")
|
||||
return
|
||||
}
|
||||
@@ -857,7 +857,7 @@ export namespace LSPServer {
|
||||
// Stop at filesystem root
|
||||
const cargoTomlPath = path.join(currentDir, "Cargo.toml")
|
||||
try {
|
||||
const cargoTomlContent = await Filesystem.readText(cargoTomlPath)
|
||||
const cargoTomlContent = await Bun.file(cargoTomlPath).text()
|
||||
if (cargoTomlContent.includes("[workspace]")) {
|
||||
return currentDir
|
||||
}
|
||||
@@ -907,7 +907,7 @@ export namespace LSPServer {
|
||||
|
||||
const ext = process.platform === "win32" ? ".exe" : ""
|
||||
const direct = path.join(Global.Path.bin, "clangd" + ext)
|
||||
if (await Filesystem.exists(direct)) {
|
||||
if (await Bun.file(direct).exists()) {
|
||||
return {
|
||||
process: spawn(direct, args, {
|
||||
cwd: root,
|
||||
@@ -920,7 +920,7 @@ export namespace LSPServer {
|
||||
if (!entry.isDirectory()) continue
|
||||
if (!entry.name.startsWith("clangd_")) continue
|
||||
const candidate = path.join(Global.Path.bin, entry.name, "bin", "clangd" + ext)
|
||||
if (await Filesystem.exists(candidate)) {
|
||||
if (await Bun.file(candidate).exists()) {
|
||||
return {
|
||||
process: spawn(candidate, args, {
|
||||
cwd: root,
|
||||
@@ -990,7 +990,7 @@ export namespace LSPServer {
|
||||
log.error("Failed to write clangd archive")
|
||||
return
|
||||
}
|
||||
await Filesystem.write(archive, Buffer.from(buf))
|
||||
await Bun.write(archive, buf)
|
||||
|
||||
const zip = name.endsWith(".zip")
|
||||
const tar = name.endsWith(".tar.xz")
|
||||
@@ -1014,7 +1014,7 @@ export namespace LSPServer {
|
||||
await fs.rm(archive, { force: true })
|
||||
|
||||
const bin = path.join(Global.Path.bin, "clangd_" + tag, "bin", "clangd" + ext)
|
||||
if (!(await Filesystem.exists(bin))) {
|
||||
if (!(await Bun.file(bin).exists())) {
|
||||
log.error("Failed to extract clangd binary")
|
||||
return
|
||||
}
|
||||
@@ -1045,7 +1045,7 @@ export namespace LSPServer {
|
||||
const args: string[] = []
|
||||
if (!binary) {
|
||||
const js = path.join(Global.Path.bin, "node_modules", "svelte-language-server", "bin", "server.js")
|
||||
if (!(await Filesystem.exists(js))) {
|
||||
if (!(await Bun.file(js).exists())) {
|
||||
if (Flag.OPENCODE_DISABLE_LSP_DOWNLOAD) return
|
||||
await Bun.spawn([BunProc.which(), "install", "svelte-language-server"], {
|
||||
cwd: Global.Path.bin,
|
||||
@@ -1092,7 +1092,7 @@ export namespace LSPServer {
|
||||
const args: string[] = []
|
||||
if (!binary) {
|
||||
const js = path.join(Global.Path.bin, "node_modules", "@astrojs", "language-server", "bin", "nodeServer.js")
|
||||
if (!(await Filesystem.exists(js))) {
|
||||
if (!(await Bun.file(js).exists())) {
|
||||
if (Flag.OPENCODE_DISABLE_LSP_DOWNLOAD) return
|
||||
await Bun.spawn([BunProc.which(), "install", "@astrojs/language-server"], {
|
||||
cwd: Global.Path.bin,
|
||||
@@ -1248,7 +1248,7 @@ export namespace LSPServer {
|
||||
const distPath = path.join(Global.Path.bin, "kotlin-ls")
|
||||
const launcherScript =
|
||||
process.platform === "win32" ? path.join(distPath, "kotlin-lsp.cmd") : path.join(distPath, "kotlin-lsp.sh")
|
||||
const installed = await Filesystem.exists(launcherScript)
|
||||
const installed = await Bun.file(launcherScript).exists()
|
||||
if (!installed) {
|
||||
if (Flag.OPENCODE_DISABLE_LSP_DOWNLOAD) return
|
||||
log.info("Downloading Kotlin Language Server from GitHub.")
|
||||
@@ -1307,7 +1307,7 @@ export namespace LSPServer {
|
||||
}
|
||||
log.info("Installed Kotlin Language Server", { path: launcherScript })
|
||||
}
|
||||
if (!(await Filesystem.exists(launcherScript))) {
|
||||
if (!(await Bun.file(launcherScript).exists())) {
|
||||
log.error(`Failed to locate the Kotlin LS launcher script in the installed directory: ${distPath}.`)
|
||||
return
|
||||
}
|
||||
@@ -1336,7 +1336,7 @@ export namespace LSPServer {
|
||||
"src",
|
||||
"server.js",
|
||||
)
|
||||
const exists = await Filesystem.exists(js)
|
||||
const exists = await Bun.file(js).exists()
|
||||
if (!exists) {
|
||||
if (Flag.OPENCODE_DISABLE_LSP_DOWNLOAD) return
|
||||
await Bun.spawn([BunProc.which(), "install", "yaml-language-server"], {
|
||||
@@ -1443,7 +1443,7 @@ export namespace LSPServer {
|
||||
}
|
||||
|
||||
const tempPath = path.join(Global.Path.bin, assetName)
|
||||
if (downloadResponse.body) await Filesystem.writeStream(tempPath, downloadResponse.body)
|
||||
await Bun.file(tempPath).write(downloadResponse)
|
||||
|
||||
// Unlike zls which is a single self-contained binary,
|
||||
// lua-language-server needs supporting files (meta/, locale/, etc.)
|
||||
@@ -1482,7 +1482,7 @@ export namespace LSPServer {
|
||||
// Binary is located in bin/ subdirectory within the extracted archive
|
||||
bin = path.join(installDir, "bin", "lua-language-server" + (platform === "win32" ? ".exe" : ""))
|
||||
|
||||
if (!(await Filesystem.exists(bin))) {
|
||||
if (!(await Bun.file(bin).exists())) {
|
||||
log.error("Failed to extract lua-language-server binary")
|
||||
return
|
||||
}
|
||||
@@ -1516,7 +1516,7 @@ export namespace LSPServer {
|
||||
const args: string[] = []
|
||||
if (!binary) {
|
||||
const js = path.join(Global.Path.bin, "node_modules", "intelephense", "lib", "intelephense.js")
|
||||
if (!(await Filesystem.exists(js))) {
|
||||
if (!(await Bun.file(js).exists())) {
|
||||
if (Flag.OPENCODE_DISABLE_LSP_DOWNLOAD) return
|
||||
await Bun.spawn([BunProc.which(), "install", "intelephense"], {
|
||||
cwd: Global.Path.bin,
|
||||
@@ -1613,7 +1613,7 @@ export namespace LSPServer {
|
||||
const args: string[] = []
|
||||
if (!binary) {
|
||||
const js = path.join(Global.Path.bin, "node_modules", "bash-language-server", "out", "cli.js")
|
||||
if (!(await Filesystem.exists(js))) {
|
||||
if (!(await Bun.file(js).exists())) {
|
||||
if (Flag.OPENCODE_DISABLE_LSP_DOWNLOAD) return
|
||||
await Bun.spawn([BunProc.which(), "install", "bash-language-server"], {
|
||||
cwd: Global.Path.bin,
|
||||
@@ -1654,17 +1654,22 @@ export namespace LSPServer {
|
||||
|
||||
if (!bin) {
|
||||
if (Flag.OPENCODE_DISABLE_LSP_DOWNLOAD) return
|
||||
log.info("downloading terraform-ls from HashiCorp releases")
|
||||
log.info("downloading terraform-ls from GitHub releases")
|
||||
|
||||
const releaseResponse = await fetch("https://api.releases.hashicorp.com/v1/releases/terraform-ls/latest")
|
||||
const releaseResponse = await fetch("https://api.github.com/repos/hashicorp/terraform-ls/releases/latest")
|
||||
if (!releaseResponse.ok) {
|
||||
log.error("Failed to fetch terraform-ls release info")
|
||||
return
|
||||
}
|
||||
|
||||
const release = (await releaseResponse.json()) as {
|
||||
version?: string
|
||||
builds?: { arch?: string; os?: string; url?: string }[]
|
||||
tag_name?: string
|
||||
assets?: { name?: string; browser_download_url?: string }[]
|
||||
}
|
||||
const version = release.tag_name?.replace("v", "")
|
||||
if (!version) {
|
||||
log.error("terraform-ls release did not include a version tag")
|
||||
return
|
||||
}
|
||||
|
||||
const platform = process.platform
|
||||
@@ -1673,21 +1678,23 @@ export namespace LSPServer {
|
||||
const tfArch = arch === "arm64" ? "arm64" : "amd64"
|
||||
const tfPlatform = platform === "win32" ? "windows" : platform
|
||||
|
||||
const builds = release.builds ?? []
|
||||
const build = builds.find((b) => b.arch === tfArch && b.os === tfPlatform)
|
||||
if (!build?.url) {
|
||||
log.error(`Could not find build for ${tfPlatform}/${tfArch} terraform-ls release version ${release.version}`)
|
||||
const assetName = `terraform-ls_${version}_${tfPlatform}_${tfArch}.zip`
|
||||
|
||||
const assets = release.assets ?? []
|
||||
const asset = assets.find((a) => a.name === assetName)
|
||||
if (!asset?.browser_download_url) {
|
||||
log.error(`Could not find asset ${assetName} in terraform-ls release`)
|
||||
return
|
||||
}
|
||||
|
||||
const downloadResponse = await fetch(build.url)
|
||||
const downloadResponse = await fetch(asset.browser_download_url)
|
||||
if (!downloadResponse.ok) {
|
||||
log.error("Failed to download terraform-ls")
|
||||
return
|
||||
}
|
||||
|
||||
const tempPath = path.join(Global.Path.bin, "terraform-ls.zip")
|
||||
if (downloadResponse.body) await Filesystem.writeStream(tempPath, downloadResponse.body)
|
||||
const tempPath = path.join(Global.Path.bin, assetName)
|
||||
await Bun.file(tempPath).write(downloadResponse)
|
||||
|
||||
const ok = await Archive.extractZip(tempPath, Global.Path.bin)
|
||||
.then(() => true)
|
||||
@@ -1700,7 +1707,7 @@ export namespace LSPServer {
|
||||
|
||||
bin = path.join(Global.Path.bin, "terraform-ls" + (platform === "win32" ? ".exe" : ""))
|
||||
|
||||
if (!(await Filesystem.exists(bin))) {
|
||||
if (!(await Bun.file(bin).exists())) {
|
||||
log.error("Failed to extract terraform-ls binary")
|
||||
return
|
||||
}
|
||||
@@ -1777,7 +1784,7 @@ export namespace LSPServer {
|
||||
}
|
||||
|
||||
const tempPath = path.join(Global.Path.bin, assetName)
|
||||
if (downloadResponse.body) await Filesystem.writeStream(tempPath, downloadResponse.body)
|
||||
await Bun.file(tempPath).write(downloadResponse)
|
||||
|
||||
if (ext === "zip") {
|
||||
const ok = await Archive.extractZip(tempPath, Global.Path.bin)
|
||||
@@ -1796,7 +1803,7 @@ export namespace LSPServer {
|
||||
|
||||
bin = path.join(Global.Path.bin, "texlab" + (platform === "win32" ? ".exe" : ""))
|
||||
|
||||
if (!(await Filesystem.exists(bin))) {
|
||||
if (!(await Bun.file(bin).exists())) {
|
||||
log.error("Failed to extract texlab binary")
|
||||
return
|
||||
}
|
||||
@@ -1825,7 +1832,7 @@ export namespace LSPServer {
|
||||
const args: string[] = []
|
||||
if (!binary) {
|
||||
const js = path.join(Global.Path.bin, "node_modules", "dockerfile-language-server-nodejs", "lib", "server.js")
|
||||
if (!(await Filesystem.exists(js))) {
|
||||
if (!(await Bun.file(js).exists())) {
|
||||
if (Flag.OPENCODE_DISABLE_LSP_DOWNLOAD) return
|
||||
await Bun.spawn([BunProc.which(), "install", "dockerfile-language-server-nodejs"], {
|
||||
cwd: Global.Path.bin,
|
||||
@@ -1983,7 +1990,7 @@ export namespace LSPServer {
|
||||
}
|
||||
|
||||
const tempPath = path.join(Global.Path.bin, assetName)
|
||||
if (downloadResponse.body) await Filesystem.writeStream(tempPath, downloadResponse.body)
|
||||
await Bun.file(tempPath).write(downloadResponse)
|
||||
|
||||
if (ext === "zip") {
|
||||
const ok = await Archive.extractZip(tempPath, Global.Path.bin)
|
||||
@@ -2001,7 +2008,7 @@ export namespace LSPServer {
|
||||
|
||||
bin = path.join(Global.Path.bin, "tinymist" + (platform === "win32" ? ".exe" : ""))
|
||||
|
||||
if (!(await Filesystem.exists(bin))) {
|
||||
if (!(await Bun.file(bin).exists())) {
|
||||
log.error("Failed to extract tinymist binary")
|
||||
return
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import path from "path"
|
||||
import z from "zod"
|
||||
import { Global } from "../global"
|
||||
import { Filesystem } from "../util/filesystem"
|
||||
|
||||
export namespace McpAuth {
|
||||
export const Tokens = z.object({
|
||||
@@ -54,22 +53,25 @@ export namespace McpAuth {
|
||||
}
|
||||
|
||||
export async function all(): Promise<Record<string, Entry>> {
|
||||
return Filesystem.readJson<Record<string, Entry>>(filepath).catch(() => ({}))
|
||||
const file = Bun.file(filepath)
|
||||
return file.json().catch(() => ({}))
|
||||
}
|
||||
|
||||
export async function set(mcpName: string, entry: Entry, serverUrl?: string): Promise<void> {
|
||||
const file = Bun.file(filepath)
|
||||
const data = await all()
|
||||
// Always update serverUrl if provided
|
||||
if (serverUrl) {
|
||||
entry.serverUrl = serverUrl
|
||||
}
|
||||
await Filesystem.writeJson(filepath, { ...data, [mcpName]: entry }, 0o600)
|
||||
await Bun.write(file, JSON.stringify({ ...data, [mcpName]: entry }, null, 2), { mode: 0o600 })
|
||||
}
|
||||
|
||||
export async function remove(mcpName: string): Promise<void> {
|
||||
const file = Bun.file(filepath)
|
||||
const data = await all()
|
||||
delete data[mcpName]
|
||||
await Filesystem.writeJson(filepath, data, 0o600)
|
||||
await Bun.write(file, JSON.stringify(data, null, 2), { mode: 0o600 })
|
||||
}
|
||||
|
||||
export async function updateTokens(mcpName: string, tokens: Tokens, serverUrl?: string): Promise<void> {
|
||||
|
||||
@@ -86,7 +86,8 @@ export namespace Project {
|
||||
const gitBinary = Bun.which("git")
|
||||
|
||||
// cached id calculation
|
||||
let id = await Filesystem.readText(path.join(dotgit, "opencode"))
|
||||
let id = await Bun.file(path.join(dotgit, "opencode"))
|
||||
.text()
|
||||
.then((x) => x.trim())
|
||||
.catch(() => undefined)
|
||||
|
||||
@@ -124,7 +125,9 @@ export namespace Project {
|
||||
|
||||
id = roots[0]
|
||||
if (id) {
|
||||
void Filesystem.write(path.join(dotgit, "opencode"), id).catch(() => undefined)
|
||||
void Bun.file(path.join(dotgit, "opencode"))
|
||||
.write(id)
|
||||
.catch(() => undefined)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -274,9 +277,10 @@ export namespace Project {
|
||||
)
|
||||
const shortest = matches.sort((a, b) => a.length - b.length)[0]
|
||||
if (!shortest) return
|
||||
const buffer = await Filesystem.readBytes(shortest)
|
||||
const base64 = buffer.toString("base64")
|
||||
const mime = Filesystem.mimeType(shortest) || "image/png"
|
||||
const file = Bun.file(shortest)
|
||||
const buffer = await file.arrayBuffer()
|
||||
const base64 = Buffer.from(buffer).toString("base64")
|
||||
const mime = file.type || "image/png"
|
||||
const url = `data:${mime};base64,${base64}`
|
||||
await update({
|
||||
projectID: input.id,
|
||||
@@ -377,8 +381,10 @@ export namespace Project {
|
||||
const data = fromRow(row)
|
||||
const valid: string[] = []
|
||||
for (const dir of data.sandboxes) {
|
||||
const s = Filesystem.stat(dir)
|
||||
if (s?.isDirectory()) valid.push(dir)
|
||||
const stat = await Bun.file(dir)
|
||||
.stat()
|
||||
.catch(() => undefined)
|
||||
if (stat?.isDirectory()) valid.push(dir)
|
||||
}
|
||||
return valid
|
||||
}
|
||||
|
||||
@@ -5,7 +5,6 @@ import z from "zod"
|
||||
import { Installation } from "../installation"
|
||||
import { Flag } from "../flag/flag"
|
||||
import { lazy } from "@/util/lazy"
|
||||
import { Filesystem } from "../util/filesystem"
|
||||
|
||||
// Try to import bundled snapshot (generated at build time)
|
||||
// Falls back to undefined in dev mode when snapshot doesn't exist
|
||||
@@ -86,7 +85,8 @@ export namespace ModelsDev {
|
||||
}
|
||||
|
||||
export const Data = lazy(async () => {
|
||||
const result = await Filesystem.readJson(Flag.OPENCODE_MODELS_PATH ?? filepath).catch(() => {})
|
||||
const file = Bun.file(Flag.OPENCODE_MODELS_PATH ?? filepath)
|
||||
const result = await file.json().catch(() => {})
|
||||
if (result) return result
|
||||
// @ts-ignore
|
||||
const snapshot = await import("./models-snapshot")
|
||||
@@ -104,6 +104,7 @@ export namespace ModelsDev {
|
||||
}
|
||||
|
||||
export async function refresh() {
|
||||
const file = Bun.file(filepath)
|
||||
const result = await fetch(`${url()}/api.json`, {
|
||||
headers: {
|
||||
"User-Agent": Installation.USER_AGENT,
|
||||
@@ -115,7 +116,7 @@ export namespace ModelsDev {
|
||||
})
|
||||
})
|
||||
if (result && result.ok) {
|
||||
await Filesystem.write(filepath, await result.text())
|
||||
await Bun.write(file, await result.text())
|
||||
ModelsDev.Data.reset()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,7 +16,6 @@ import { Flag } from "../flag/flag"
|
||||
import { iife } from "@/util/iife"
|
||||
import { Global } from "../global"
|
||||
import path from "path"
|
||||
import { Filesystem } from "../util/filesystem"
|
||||
|
||||
// Direct imports for bundled providers
|
||||
import { createAmazonBedrock, type AmazonBedrockProviderSettings } from "@ai-sdk/amazon-bedrock"
|
||||
@@ -40,8 +39,6 @@ import { createTogetherAI } from "@ai-sdk/togetherai"
|
||||
import { createPerplexity } from "@ai-sdk/perplexity"
|
||||
import { createVercel } from "@ai-sdk/vercel"
|
||||
import { createGitLab, VERSION as GITLAB_PROVIDER_VERSION } from "@gitlab/gitlab-ai-provider"
|
||||
import { fromNodeProviderChain } from "@aws-sdk/credential-providers"
|
||||
import { GoogleAuth } from "google-auth-library"
|
||||
import { ProviderTransform } from "./transform"
|
||||
import { Installation } from "../installation"
|
||||
|
||||
@@ -254,6 +251,8 @@ export namespace Provider {
|
||||
// Only use credential chain if no bearer token exists
|
||||
// Bearer token takes precedence over credential chain (profiles, access keys, IAM roles, web identity tokens)
|
||||
if (!awsBearerToken) {
|
||||
const { fromNodeProviderChain } = await import(await BunProc.install("@aws-sdk/credential-providers"))
|
||||
|
||||
// Build credential provider options (only pass profile if specified)
|
||||
const credentialProviderOptions = profile ? { profile } : {}
|
||||
|
||||
@@ -396,9 +395,11 @@ export namespace Provider {
|
||||
project,
|
||||
location,
|
||||
fetch: async (input: RequestInfo | URL, init?: RequestInit) => {
|
||||
const { GoogleAuth } = await import(await BunProc.install("google-auth-library"))
|
||||
const auth = new GoogleAuth()
|
||||
const client = await auth.getApplicationDefault()
|
||||
const token = await client.credential.getAccessToken()
|
||||
const credentials = await client.credential
|
||||
const token = await credentials.getAccessToken()
|
||||
|
||||
const headers = new Headers(init?.headers)
|
||||
headers.set("Authorization", `Bearer ${token.token}`)
|
||||
@@ -1290,9 +1291,8 @@ export namespace Provider {
|
||||
if (cfg.model) return parseModel(cfg.model)
|
||||
|
||||
const providers = await list()
|
||||
const recent = (await Filesystem.readJson<{ recent?: { providerID: string; modelID: string }[] }>(
|
||||
path.join(Global.Path.state, "model.json"),
|
||||
)
|
||||
const recent = (await Bun.file(path.join(Global.Path.state, "model.json"))
|
||||
.json()
|
||||
.then((x) => (Array.isArray(x.recent) ? x.recent : []))
|
||||
.catch(() => [])) as { providerID: string; modelID: string }[]
|
||||
for (const entry of recent) {
|
||||
|
||||
@@ -18,26 +18,27 @@ export namespace Pty {
|
||||
|
||||
type Socket = {
|
||||
readyState: number
|
||||
send: (data: string | Uint8Array | ArrayBuffer) => void
|
||||
data: object
|
||||
send: (data: string | Uint8Array<ArrayBuffer> | ArrayBuffer) => void
|
||||
close: (code?: number, reason?: string) => void
|
||||
}
|
||||
|
||||
type Subscriber = {
|
||||
id: number
|
||||
// Bun's ServerWebSocket has a per-connection `.data` object (set during
|
||||
// `server.upgrade`) that changes when the underlying connection is recycled.
|
||||
// We keep a reference to a stable part of it so output can't leak even when
|
||||
// websocket objects are reused.
|
||||
const token = (ws: Socket) => {
|
||||
const data = ws.data
|
||||
const events = (data as { events?: unknown }).events
|
||||
if (events && typeof events === "object") return events
|
||||
|
||||
const url = (data as { url?: unknown }).url
|
||||
if (url && typeof url === "object") return url
|
||||
|
||||
return data
|
||||
}
|
||||
|
||||
const sockets = new WeakMap<object, number>()
|
||||
const owners = new WeakMap<object, string>()
|
||||
let socketCounter = 0
|
||||
|
||||
const tagSocket = (ws: Socket) => {
|
||||
if (!ws || typeof ws !== "object") return
|
||||
const next = (socketCounter = (socketCounter + 1) % Number.MAX_SAFE_INTEGER)
|
||||
sockets.set(ws, next)
|
||||
return next
|
||||
}
|
||||
|
||||
// WebSocket control frame: 0x00 + UTF-8 JSON.
|
||||
// WebSocket control frame: 0x00 + UTF-8 JSON (currently { cursor }).
|
||||
const meta = (cursor: number) => {
|
||||
const json = JSON.stringify({ cursor })
|
||||
const bytes = encoder.encode(json)
|
||||
@@ -101,7 +102,7 @@ export namespace Pty {
|
||||
buffer: string
|
||||
bufferCursor: number
|
||||
cursor: number
|
||||
subscribers: Map<Socket, Subscriber>
|
||||
subscribers: Map<Socket, object>
|
||||
}
|
||||
|
||||
const state = Instance.state(
|
||||
@@ -184,13 +185,13 @@ export namespace Pty {
|
||||
ptyProcess.onData((chunk) => {
|
||||
session.cursor += chunk.length
|
||||
|
||||
for (const [ws, sub] of session.subscribers) {
|
||||
for (const [ws, data] of session.subscribers) {
|
||||
if (ws.readyState !== 1) {
|
||||
session.subscribers.delete(ws)
|
||||
continue
|
||||
}
|
||||
|
||||
if (typeof ws === "object" && sockets.get(ws) !== sub.id) {
|
||||
if (token(ws) !== data) {
|
||||
session.subscribers.delete(ws)
|
||||
continue
|
||||
}
|
||||
@@ -279,25 +280,6 @@ export namespace Pty {
|
||||
}
|
||||
log.info("client connected to session", { id })
|
||||
|
||||
const socketId = tagSocket(ws)
|
||||
if (socketId === undefined) {
|
||||
ws.close()
|
||||
return
|
||||
}
|
||||
|
||||
const previous = owners.get(ws)
|
||||
if (previous && previous !== id) {
|
||||
state().get(previous)?.subscribers.delete(ws)
|
||||
}
|
||||
|
||||
owners.set(ws, id)
|
||||
session.subscribers.set(ws, { id: socketId })
|
||||
|
||||
const cleanup = () => {
|
||||
session.subscribers.delete(ws)
|
||||
if (owners.get(ws) === id) owners.delete(ws)
|
||||
}
|
||||
|
||||
const start = session.bufferCursor
|
||||
const end = session.cursor
|
||||
|
||||
@@ -318,7 +300,6 @@ export namespace Pty {
|
||||
ws.send(data.slice(i, i + BUFFER_CHUNK))
|
||||
}
|
||||
} catch {
|
||||
cleanup()
|
||||
ws.close()
|
||||
return
|
||||
}
|
||||
@@ -327,17 +308,23 @@ export namespace Pty {
|
||||
try {
|
||||
ws.send(meta(end))
|
||||
} catch {
|
||||
cleanup()
|
||||
ws.close()
|
||||
return
|
||||
}
|
||||
|
||||
if (!ws.data || typeof ws.data !== "object") {
|
||||
ws.close()
|
||||
return
|
||||
}
|
||||
|
||||
session.subscribers.set(ws, token(ws))
|
||||
return {
|
||||
onMessage: (message: string | ArrayBuffer) => {
|
||||
session.process.write(String(message))
|
||||
},
|
||||
onClose: () => {
|
||||
log.info("client disconnected from session", { id })
|
||||
cleanup()
|
||||
session.subscribers.delete(ws)
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -163,13 +163,18 @@ export const PtyRoutes = lazy(() =>
|
||||
|
||||
type Socket = {
|
||||
readyState: number
|
||||
send: (data: string | Uint8Array | ArrayBuffer) => void
|
||||
data: object
|
||||
send: (data: string | Uint8Array<ArrayBuffer> | ArrayBuffer) => void
|
||||
close: (code?: number, reason?: string) => void
|
||||
}
|
||||
|
||||
const isSocket = (value: unknown): value is Socket => {
|
||||
if (!value || typeof value !== "object") return false
|
||||
if (!("readyState" in value)) return false
|
||||
if (!("data" in value)) return false
|
||||
if (!((value as { data?: unknown }).data && typeof (value as { data?: unknown }).data === "object")) {
|
||||
return false
|
||||
}
|
||||
if (!("send" in value) || typeof (value as { send?: unknown }).send !== "function") return false
|
||||
if (!("close" in value) || typeof (value as { close?: unknown }).close !== "function") return false
|
||||
return typeof (value as { readyState?: unknown }).readyState === "number"
|
||||
@@ -177,12 +182,12 @@ export const PtyRoutes = lazy(() =>
|
||||
|
||||
return {
|
||||
onOpen(_event, ws) {
|
||||
const socket = ws.raw
|
||||
if (!isSocket(socket)) {
|
||||
const raw = ws.raw
|
||||
if (!isSocket(raw)) {
|
||||
ws.close()
|
||||
return
|
||||
}
|
||||
handler = Pty.connect(id, socket, cursor)
|
||||
handler = Pty.connect(id, raw, cursor)
|
||||
},
|
||||
onMessage(event) {
|
||||
if (typeof event.data !== "string") return
|
||||
|
||||
@@ -85,7 +85,7 @@ export namespace InstructionPrompt {
|
||||
}
|
||||
|
||||
for (const file of globalFiles()) {
|
||||
if (await Filesystem.exists(file)) {
|
||||
if (await Bun.file(file).exists()) {
|
||||
paths.add(path.resolve(file))
|
||||
break
|
||||
}
|
||||
@@ -120,7 +120,9 @@ export namespace InstructionPrompt {
|
||||
const paths = await systemPaths()
|
||||
|
||||
const files = Array.from(paths).map(async (p) => {
|
||||
const content = await Filesystem.readText(p).catch(() => "")
|
||||
const content = await Bun.file(p)
|
||||
.text()
|
||||
.catch(() => "")
|
||||
return content ? "Instructions from: " + p + "\n" + content : ""
|
||||
})
|
||||
|
||||
@@ -162,7 +164,7 @@ export namespace InstructionPrompt {
|
||||
export async function find(dir: string) {
|
||||
for (const file of FILES) {
|
||||
const filepath = path.resolve(path.join(dir, file))
|
||||
if (await Filesystem.exists(filepath)) return filepath
|
||||
if (await Bun.file(filepath).exists()) return filepath
|
||||
}
|
||||
}
|
||||
|
||||
@@ -180,7 +182,9 @@ export namespace InstructionPrompt {
|
||||
|
||||
if (found && found !== target && !system.has(found) && !already.has(found) && !isClaimed(messageID, found)) {
|
||||
claim(messageID, found)
|
||||
const content = await Filesystem.readText(found).catch(() => undefined)
|
||||
const content = await Bun.file(found)
|
||||
.text()
|
||||
.catch(() => undefined)
|
||||
if (content) {
|
||||
results.push({ filepath: found, content: "Instructions from: " + found + "\n" + content })
|
||||
}
|
||||
|
||||
@@ -2,7 +2,6 @@ import path from "path"
|
||||
import os from "os"
|
||||
import fs from "fs/promises"
|
||||
import z from "zod"
|
||||
import { Filesystem } from "../util/filesystem"
|
||||
import { Identifier } from "../id/id"
|
||||
import { MessageV2 } from "./message-v2"
|
||||
import { Log } from "../util/log"
|
||||
@@ -1083,9 +1082,11 @@ export namespace SessionPrompt {
|
||||
// have to normalize, symbol search returns absolute paths
|
||||
// Decode the pathname since URL constructor doesn't automatically decode it
|
||||
const filepath = fileURLToPath(part.url)
|
||||
const s = Filesystem.stat(filepath)
|
||||
const stat = await Bun.file(filepath)
|
||||
.stat()
|
||||
.catch(() => undefined)
|
||||
|
||||
if (s?.isDirectory()) {
|
||||
if (stat?.isDirectory()) {
|
||||
part.mime = "application/x-directory"
|
||||
}
|
||||
|
||||
@@ -1232,13 +1233,14 @@ export namespace SessionPrompt {
|
||||
]
|
||||
}
|
||||
|
||||
const file = Bun.file(filepath)
|
||||
FileTime.read(input.sessionID, filepath)
|
||||
return [
|
||||
{
|
||||
messageID: info.id,
|
||||
sessionID: input.sessionID,
|
||||
type: "text",
|
||||
text: `Called the Read tool with the following input: {"filePath":"${filepath}"}`,
|
||||
text: `Called the Read tool with the following input: {\"filePath\":\"${filepath}\"}`,
|
||||
synthetic: true,
|
||||
},
|
||||
{
|
||||
@@ -1246,7 +1248,7 @@ export namespace SessionPrompt {
|
||||
messageID: info.id,
|
||||
sessionID: input.sessionID,
|
||||
type: "file",
|
||||
url: `data:${part.mime};base64,` + (await Filesystem.readBytes(filepath)).toString("base64"),
|
||||
url: `data:${part.mime};base64,` + Buffer.from(await file.bytes()).toString("base64"),
|
||||
mime: part.mime,
|
||||
filename: part.filename!,
|
||||
source: part.source,
|
||||
@@ -1352,7 +1354,7 @@ export namespace SessionPrompt {
|
||||
// Switching from plan mode to build mode
|
||||
if (input.agent.name !== "plan" && assistantMessage?.info.agent === "plan") {
|
||||
const plan = Session.plan(input.session)
|
||||
const exists = await Filesystem.exists(plan)
|
||||
const exists = await Bun.file(plan).exists()
|
||||
if (exists) {
|
||||
const part = await Session.updatePart({
|
||||
id: Identifier.ascending("part"),
|
||||
@@ -1371,7 +1373,7 @@ export namespace SessionPrompt {
|
||||
// Entering plan mode
|
||||
if (input.agent.name === "plan" && assistantMessage?.info.agent !== "plan") {
|
||||
const plan = Session.plan(input.session)
|
||||
const exists = await Filesystem.exists(plan)
|
||||
const exists = await Bun.file(plan).exists()
|
||||
if (!exists) await fs.mkdir(path.dirname(plan), { recursive: true })
|
||||
const part = await Session.updatePart({
|
||||
id: Identifier.ascending("part"),
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { Flag } from "@/flag/flag"
|
||||
import { lazy } from "@/util/lazy"
|
||||
import { Filesystem } from "@/util/filesystem"
|
||||
import path from "path"
|
||||
import { spawn, type ChildProcess } from "child_process"
|
||||
|
||||
@@ -44,7 +43,7 @@ export namespace Shell {
|
||||
// git.exe is typically at: C:\Program Files\Git\cmd\git.exe
|
||||
// bash.exe is at: C:\Program Files\Git\bin\bash.exe
|
||||
const bash = path.join(git, "..", "..", "bin", "bash.exe")
|
||||
if (Filesystem.stat(bash)?.size) return bash
|
||||
if (Bun.file(bash).size) return bash
|
||||
}
|
||||
return process.env.COMSPEC || "cmd.exe"
|
||||
}
|
||||
|
||||
@@ -2,7 +2,6 @@ import path from "path"
|
||||
import { mkdir } from "fs/promises"
|
||||
import { Log } from "../util/log"
|
||||
import { Global } from "../global"
|
||||
import { Filesystem } from "../util/filesystem"
|
||||
|
||||
export namespace Discovery {
|
||||
const log = Log.create({ service: "skill-discovery" })
|
||||
@@ -20,14 +19,14 @@ export namespace Discovery {
|
||||
}
|
||||
|
||||
async function get(url: string, dest: string): Promise<boolean> {
|
||||
if (await Filesystem.exists(dest)) return true
|
||||
if (await Bun.file(dest).exists()) return true
|
||||
return fetch(url)
|
||||
.then(async (response) => {
|
||||
if (!response.ok) {
|
||||
log.error("failed to download", { url, status: response.status })
|
||||
return false
|
||||
}
|
||||
if (response.body) await Filesystem.writeStream(dest, response.body)
|
||||
await Bun.write(dest, await response.text())
|
||||
return true
|
||||
})
|
||||
.catch((err) => {
|
||||
@@ -89,7 +88,7 @@ export namespace Discovery {
|
||||
)
|
||||
|
||||
const md = path.join(root, "SKILL.md")
|
||||
if (await Filesystem.exists(md)) result.push(root)
|
||||
if (await Bun.file(md).exists()) result.push(root)
|
||||
}),
|
||||
)
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ import { Log } from "../util/log"
|
||||
import { NamedError } from "@opencode-ai/util/error"
|
||||
import z from "zod"
|
||||
import path from "path"
|
||||
import { readFileSync, readdirSync, existsSync } from "fs"
|
||||
import { readFileSync, readdirSync } from "fs"
|
||||
import * as schema from "./schema"
|
||||
|
||||
declare const OPENCODE_MIGRATIONS: { sql: string; timestamp: number }[] | undefined
|
||||
@@ -54,7 +54,7 @@ export namespace Database {
|
||||
const sql = dirs
|
||||
.map((name) => {
|
||||
const file = path.join(dir, name, "migration.sql")
|
||||
if (!existsSync(file)) return
|
||||
if (!Bun.file(file).size) return
|
||||
return {
|
||||
sql: readFileSync(file, "utf-8"),
|
||||
timestamp: time(name),
|
||||
|
||||
@@ -7,7 +7,6 @@ import { SessionTable, MessageTable, PartTable, TodoTable, PermissionTable } fro
|
||||
import { SessionShareTable } from "../share/share.sql"
|
||||
import path from "path"
|
||||
import { existsSync } from "fs"
|
||||
import { Filesystem } from "../util/filesystem"
|
||||
|
||||
export namespace JsonMigration {
|
||||
const log = Log.create({ service: "json-migration" })
|
||||
@@ -83,7 +82,7 @@ export namespace JsonMigration {
|
||||
const count = end - start
|
||||
const tasks = new Array(count)
|
||||
for (let i = 0; i < count; i++) {
|
||||
tasks[i] = Filesystem.readJson(files[start + i])
|
||||
tasks[i] = Bun.file(files[start + i]).json()
|
||||
}
|
||||
const results = await Promise.allSettled(tasks)
|
||||
const items = new Array(count)
|
||||
|
||||
@@ -39,7 +39,7 @@ export namespace Storage {
|
||||
cwd: path.join(project, projectDir),
|
||||
absolute: true,
|
||||
})) {
|
||||
const json = await Filesystem.readJson<any>(msgFile)
|
||||
const json = await Bun.file(msgFile).json()
|
||||
worktree = json.path?.root
|
||||
if (worktree) break
|
||||
}
|
||||
@@ -60,15 +60,18 @@ export namespace Storage {
|
||||
if (!id) continue
|
||||
projectID = id
|
||||
|
||||
await Filesystem.writeJson(path.join(dir, "project", projectID + ".json"), {
|
||||
id,
|
||||
vcs: "git",
|
||||
worktree,
|
||||
time: {
|
||||
created: Date.now(),
|
||||
initialized: Date.now(),
|
||||
},
|
||||
})
|
||||
await Bun.write(
|
||||
path.join(dir, "project", projectID + ".json"),
|
||||
JSON.stringify({
|
||||
id,
|
||||
vcs: "git",
|
||||
worktree,
|
||||
time: {
|
||||
created: Date.now(),
|
||||
initialized: Date.now(),
|
||||
},
|
||||
}),
|
||||
)
|
||||
|
||||
log.info(`migrating sessions for project ${projectID}`)
|
||||
for await (const sessionFile of new Bun.Glob("storage/session/info/*.json").scan({
|
||||
@@ -80,8 +83,8 @@ export namespace Storage {
|
||||
sessionFile,
|
||||
dest,
|
||||
})
|
||||
const session = await Filesystem.readJson<any>(sessionFile)
|
||||
await Filesystem.writeJson(dest, session)
|
||||
const session = await Bun.file(sessionFile).json()
|
||||
await Bun.write(dest, JSON.stringify(session))
|
||||
log.info(`migrating messages for session ${session.id}`)
|
||||
for await (const msgFile of new Bun.Glob(`storage/session/message/${session.id}/*.json`).scan({
|
||||
cwd: fullProjectDir,
|
||||
@@ -92,8 +95,8 @@ export namespace Storage {
|
||||
msgFile,
|
||||
dest,
|
||||
})
|
||||
const message = await Filesystem.readJson<any>(msgFile)
|
||||
await Filesystem.writeJson(dest, message)
|
||||
const message = await Bun.file(msgFile).json()
|
||||
await Bun.write(dest, JSON.stringify(message))
|
||||
|
||||
log.info(`migrating parts for message ${message.id}`)
|
||||
for await (const partFile of new Bun.Glob(`storage/session/part/${session.id}/${message.id}/*.json`).scan(
|
||||
@@ -103,12 +106,12 @@ export namespace Storage {
|
||||
},
|
||||
)) {
|
||||
const dest = path.join(dir, "part", message.id, path.basename(partFile))
|
||||
const part = await Filesystem.readJson(partFile)
|
||||
const part = await Bun.file(partFile).json()
|
||||
log.info("copying", {
|
||||
partFile,
|
||||
dest,
|
||||
})
|
||||
await Filesystem.writeJson(dest, part)
|
||||
await Bun.write(dest, JSON.stringify(part))
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -120,32 +123,35 @@ export namespace Storage {
|
||||
cwd: dir,
|
||||
absolute: true,
|
||||
})) {
|
||||
const session = await Filesystem.readJson<any>(item)
|
||||
const session = await Bun.file(item).json()
|
||||
if (!session.projectID) continue
|
||||
if (!session.summary?.diffs) continue
|
||||
const { diffs } = session.summary
|
||||
await Filesystem.write(path.join(dir, "session_diff", session.id + ".json"), JSON.stringify(diffs))
|
||||
await Filesystem.writeJson(path.join(dir, "session", session.projectID, session.id + ".json"), {
|
||||
...session,
|
||||
summary: {
|
||||
additions: diffs.reduce((sum: any, x: any) => sum + x.additions, 0),
|
||||
deletions: diffs.reduce((sum: any, x: any) => sum + x.deletions, 0),
|
||||
},
|
||||
})
|
||||
await Bun.file(path.join(dir, "session_diff", session.id + ".json")).write(JSON.stringify(diffs))
|
||||
await Bun.file(path.join(dir, "session", session.projectID, session.id + ".json")).write(
|
||||
JSON.stringify({
|
||||
...session,
|
||||
summary: {
|
||||
additions: diffs.reduce((sum: any, x: any) => sum + x.additions, 0),
|
||||
deletions: diffs.reduce((sum: any, x: any) => sum + x.deletions, 0),
|
||||
},
|
||||
}),
|
||||
)
|
||||
}
|
||||
},
|
||||
]
|
||||
|
||||
const state = lazy(async () => {
|
||||
const dir = path.join(Global.Path.data, "storage")
|
||||
const migration = await Filesystem.readJson<string>(path.join(dir, "migration"))
|
||||
const migration = await Bun.file(path.join(dir, "migration"))
|
||||
.json()
|
||||
.then((x) => parseInt(x))
|
||||
.catch(() => 0)
|
||||
for (let index = migration; index < MIGRATIONS.length; index++) {
|
||||
log.info("running migration", { index })
|
||||
const migration = MIGRATIONS[index]
|
||||
await migration(dir).catch(() => log.error("failed to run migration", { index }))
|
||||
await Filesystem.write(path.join(dir, "migration"), (index + 1).toString())
|
||||
await Bun.write(path.join(dir, "migration"), (index + 1).toString())
|
||||
}
|
||||
return {
|
||||
dir,
|
||||
@@ -165,7 +171,7 @@ export namespace Storage {
|
||||
const target = path.join(dir, ...key) + ".json"
|
||||
return withErrorHandling(async () => {
|
||||
using _ = await Lock.read(target)
|
||||
const result = await Filesystem.readJson<T>(target)
|
||||
const result = await Bun.file(target).json()
|
||||
return result as T
|
||||
})
|
||||
}
|
||||
@@ -175,10 +181,10 @@ export namespace Storage {
|
||||
const target = path.join(dir, ...key) + ".json"
|
||||
return withErrorHandling(async () => {
|
||||
using _ = await Lock.write(target)
|
||||
const content = await Filesystem.readJson<T>(target)
|
||||
fn(content as T)
|
||||
await Filesystem.writeJson(target, content)
|
||||
return content
|
||||
const content = await Bun.file(target).json()
|
||||
fn(content)
|
||||
await Bun.write(target, JSON.stringify(content, null, 2))
|
||||
return content as T
|
||||
})
|
||||
}
|
||||
|
||||
@@ -187,7 +193,7 @@ export namespace Storage {
|
||||
const target = path.join(dir, ...key) + ".json"
|
||||
return withErrorHandling(async () => {
|
||||
using _ = await Lock.write(target)
|
||||
await Filesystem.writeJson(target, content)
|
||||
await Bun.write(target, JSON.stringify(content, null, 2))
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -49,7 +49,7 @@ export const EditTool = Tool.define("edit", {
|
||||
let contentNew = ""
|
||||
await FileTime.withLock(filePath, async () => {
|
||||
if (params.oldString === "") {
|
||||
const existed = await Filesystem.exists(filePath)
|
||||
const existed = await Bun.file(filePath).exists()
|
||||
contentNew = params.newString
|
||||
diff = trimDiff(createTwoFilesPatch(filePath, filePath, contentOld, contentNew))
|
||||
await ctx.ask({
|
||||
@@ -61,7 +61,7 @@ export const EditTool = Tool.define("edit", {
|
||||
diff,
|
||||
},
|
||||
})
|
||||
await Filesystem.write(filePath, params.newString)
|
||||
await Bun.write(filePath, params.newString)
|
||||
await Bus.publish(File.Event.Edited, {
|
||||
file: filePath,
|
||||
})
|
||||
@@ -73,11 +73,12 @@ export const EditTool = Tool.define("edit", {
|
||||
return
|
||||
}
|
||||
|
||||
const stats = Filesystem.stat(filePath)
|
||||
const file = Bun.file(filePath)
|
||||
const stats = await file.stat().catch(() => {})
|
||||
if (!stats) throw new Error(`File ${filePath} not found`)
|
||||
if (stats.isDirectory()) throw new Error(`Path is a directory, not a file: ${filePath}`)
|
||||
await FileTime.assert(ctx.sessionID, filePath)
|
||||
contentOld = await Filesystem.readText(filePath)
|
||||
contentOld = await file.text()
|
||||
contentNew = replace(contentOld, params.oldString, params.newString, params.replaceAll)
|
||||
|
||||
diff = trimDiff(
|
||||
@@ -93,7 +94,7 @@ export const EditTool = Tool.define("edit", {
|
||||
},
|
||||
})
|
||||
|
||||
await Filesystem.write(filePath, contentNew)
|
||||
await file.write(contentNew)
|
||||
await Bus.publish(File.Event.Edited, {
|
||||
file: filePath,
|
||||
})
|
||||
@@ -101,7 +102,7 @@ export const EditTool = Tool.define("edit", {
|
||||
file: filePath,
|
||||
event: "change",
|
||||
})
|
||||
contentNew = await Filesystem.readText(filePath)
|
||||
contentNew = await file.text()
|
||||
diff = trimDiff(
|
||||
createTwoFilesPatch(filePath, filePath, normalizeLineEndings(contentOld), normalizeLineEndings(contentNew)),
|
||||
)
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import z from "zod"
|
||||
import path from "path"
|
||||
import { Tool } from "./tool"
|
||||
import { Filesystem } from "../util/filesystem"
|
||||
import DESCRIPTION from "./glob.txt"
|
||||
import { Ripgrep } from "../file/ripgrep"
|
||||
import { Instance } from "../project/instance"
|
||||
@@ -46,7 +45,10 @@ export const GlobTool = Tool.define("glob", {
|
||||
break
|
||||
}
|
||||
const full = path.resolve(search, file)
|
||||
const stats = Filesystem.stat(full)?.mtime.getTime() ?? 0
|
||||
const stats = await Bun.file(full)
|
||||
.stat()
|
||||
.then((x) => x.mtime.getTime())
|
||||
.catch(() => 0)
|
||||
files.push({
|
||||
path: full,
|
||||
mtime: stats,
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import z from "zod"
|
||||
import { Tool } from "./tool"
|
||||
import { Filesystem } from "../util/filesystem"
|
||||
import { Ripgrep } from "../file/ripgrep"
|
||||
|
||||
import DESCRIPTION from "./grep.txt"
|
||||
@@ -84,7 +83,8 @@ export const GrepTool = Tool.define("grep", {
|
||||
const lineNum = parseInt(lineNumStr, 10)
|
||||
const lineText = lineTextParts.join("|")
|
||||
|
||||
const stats = Filesystem.stat(filePath)
|
||||
const file = Bun.file(filePath)
|
||||
const stats = await file.stat().catch(() => null)
|
||||
if (!stats) continue
|
||||
|
||||
matches.push({
|
||||
|
||||
@@ -6,7 +6,6 @@ import DESCRIPTION from "./lsp.txt"
|
||||
import { Instance } from "../project/instance"
|
||||
import { pathToFileURL } from "url"
|
||||
import { assertExternalDirectory } from "./external-directory"
|
||||
import { Filesystem } from "../util/filesystem"
|
||||
|
||||
const operations = [
|
||||
"goToDefinition",
|
||||
@@ -48,7 +47,7 @@ export const LspTool = Tool.define("lsp", {
|
||||
const relPath = path.relative(Instance.worktree, file)
|
||||
const title = `${args.operation} ${relPath}:${args.line}:${args.character}`
|
||||
|
||||
const exists = await Filesystem.exists(file)
|
||||
const exists = await Bun.file(file).exists()
|
||||
if (!exists) {
|
||||
throw new Error(`File not found: ${file}`)
|
||||
}
|
||||
|
||||
@@ -10,7 +10,6 @@ import DESCRIPTION from "./read.txt"
|
||||
import { Instance } from "../project/instance"
|
||||
import { assertExternalDirectory } from "./external-directory"
|
||||
import { InstructionPrompt } from "../session/instruction"
|
||||
import { Filesystem } from "../util/filesystem"
|
||||
|
||||
const DEFAULT_READ_LIMIT = 2000
|
||||
const MAX_LINE_LENGTH = 2000
|
||||
@@ -35,7 +34,8 @@ export const ReadTool = Tool.define("read", {
|
||||
}
|
||||
const title = path.relative(Instance.worktree, filepath)
|
||||
|
||||
const stat = Filesystem.stat(filepath)
|
||||
const file = Bun.file(filepath)
|
||||
const stat = await file.stat().catch(() => undefined)
|
||||
|
||||
await assertExternalDirectory(ctx, filepath, {
|
||||
bypass: Boolean(ctx.extra?.["bypassCwdCheck"]),
|
||||
@@ -118,10 +118,11 @@ export const ReadTool = Tool.define("read", {
|
||||
const instructions = await InstructionPrompt.resolve(ctx.messages, filepath, ctx.messageID)
|
||||
|
||||
// Exclude SVG (XML-based) and vnd.fastbidsheet (.fbs extension, commonly FlatBuffers schema files)
|
||||
const mime = Filesystem.mimeType(filepath)
|
||||
const isImage = mime.startsWith("image/") && mime !== "image/svg+xml" && mime !== "image/vnd.fastbidsheet"
|
||||
const isPdf = mime === "application/pdf"
|
||||
const isImage =
|
||||
file.type.startsWith("image/") && file.type !== "image/svg+xml" && file.type !== "image/vnd.fastbidsheet"
|
||||
const isPdf = file.type === "application/pdf"
|
||||
if (isImage || isPdf) {
|
||||
const mime = file.type
|
||||
const msg = `${isImage ? "Image" : "PDF"} read successfully`
|
||||
return {
|
||||
title,
|
||||
@@ -135,13 +136,13 @@ export const ReadTool = Tool.define("read", {
|
||||
{
|
||||
type: "file",
|
||||
mime,
|
||||
url: `data:${mime};base64,${Buffer.from(await Filesystem.readBytes(filepath)).toString("base64")}`,
|
||||
url: `data:${mime};base64,${Buffer.from(await file.bytes()).toString("base64")}`,
|
||||
},
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
const isBinary = await isBinaryFile(filepath, Number(stat.size))
|
||||
const isBinary = await isBinaryFile(filepath, stat.size)
|
||||
if (isBinary) throw new Error(`Cannot read binary file: ${filepath}`)
|
||||
|
||||
const stream = createReadStream(filepath, { encoding: "utf8" })
|
||||
|
||||
@@ -5,7 +5,6 @@ import { Identifier } from "../id/id"
|
||||
import { PermissionNext } from "../permission/next"
|
||||
import type { Agent } from "../agent/agent"
|
||||
import { Scheduler } from "../scheduler"
|
||||
import { Filesystem } from "../util/filesystem"
|
||||
|
||||
export namespace Truncate {
|
||||
export const MAX_LINES = 2000
|
||||
@@ -92,7 +91,7 @@ export namespace Truncate {
|
||||
|
||||
const id = Identifier.ascending("tool")
|
||||
const filepath = path.join(DIR, id)
|
||||
await Filesystem.write(filepath, text)
|
||||
await Bun.write(Bun.file(filepath), text)
|
||||
|
||||
const hint = hasTaskTool(agent)
|
||||
? `The tool call succeeded but the output was truncated. Full output saved to: ${filepath}\nUse the Task tool to have explore agent process this file with Grep and Read (with offset/limit). Do NOT read the full file yourself - delegate to save context.`
|
||||
|
||||
@@ -26,8 +26,9 @@ export const WriteTool = Tool.define("write", {
|
||||
const filepath = path.isAbsolute(params.filePath) ? params.filePath : path.join(Instance.directory, params.filePath)
|
||||
await assertExternalDirectory(ctx, filepath)
|
||||
|
||||
const exists = await Filesystem.exists(filepath)
|
||||
const contentOld = exists ? await Filesystem.readText(filepath) : ""
|
||||
const file = Bun.file(filepath)
|
||||
const exists = await file.exists()
|
||||
const contentOld = exists ? await file.text() : ""
|
||||
if (exists) await FileTime.assert(ctx.sessionID, filepath)
|
||||
|
||||
const diff = trimDiff(createTwoFilesPatch(filepath, filepath, contentOld, params.content))
|
||||
@@ -41,7 +42,7 @@ export const WriteTool = Tool.define("write", {
|
||||
},
|
||||
})
|
||||
|
||||
await Filesystem.write(filepath, params.content)
|
||||
await Bun.write(filepath, params.content)
|
||||
await Bus.publish(File.Event.Edited, {
|
||||
file: filepath,
|
||||
})
|
||||
|
||||
@@ -1,10 +1,8 @@
|
||||
import { chmod, mkdir, readFile, writeFile } from "fs/promises"
|
||||
import { createWriteStream, existsSync, statSync } from "fs"
|
||||
import { mkdir, readFile, writeFile } from "fs/promises"
|
||||
import { existsSync, statSync } from "fs"
|
||||
import { lookup } from "mime-types"
|
||||
import { realpathSync } from "fs"
|
||||
import { dirname, join, relative } from "path"
|
||||
import { Readable } from "stream"
|
||||
import { pipeline } from "stream/promises"
|
||||
|
||||
export namespace Filesystem {
|
||||
// Fast sync version for metadata checks
|
||||
@@ -41,16 +39,11 @@ export namespace Filesystem {
|
||||
return readFile(p)
|
||||
}
|
||||
|
||||
export async function readArrayBuffer(p: string): Promise<ArrayBuffer> {
|
||||
const buf = await readFile(p)
|
||||
return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength) as ArrayBuffer
|
||||
}
|
||||
|
||||
function isEnoent(e: unknown): e is { code: "ENOENT" } {
|
||||
return typeof e === "object" && e !== null && "code" in e && (e as { code: string }).code === "ENOENT"
|
||||
}
|
||||
|
||||
export async function write(p: string, content: string | Buffer | Uint8Array, mode?: number): Promise<void> {
|
||||
export async function write(p: string, content: string | Buffer, mode?: number): Promise<void> {
|
||||
try {
|
||||
if (mode) {
|
||||
await writeFile(p, content, { mode })
|
||||
@@ -75,25 +68,6 @@ export namespace Filesystem {
|
||||
return write(p, JSON.stringify(data, null, 2), mode)
|
||||
}
|
||||
|
||||
export async function writeStream(
|
||||
p: string,
|
||||
stream: ReadableStream<Uint8Array> | Readable,
|
||||
mode?: number,
|
||||
): Promise<void> {
|
||||
const dir = dirname(p)
|
||||
if (!existsSync(dir)) {
|
||||
await mkdir(dir, { recursive: true })
|
||||
}
|
||||
|
||||
const nodeStream = stream instanceof ReadableStream ? Readable.fromWeb(stream as any) : stream
|
||||
const writeStream = createWriteStream(p)
|
||||
await pipeline(nodeStream, writeStream)
|
||||
|
||||
if (mode) {
|
||||
await chmod(p, mode)
|
||||
}
|
||||
}
|
||||
|
||||
export function mimeType(p: string): string {
|
||||
return lookup(p) || "application/octet-stream"
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import path from "path"
|
||||
import fs from "fs/promises"
|
||||
import { createWriteStream } from "fs"
|
||||
import { Global } from "../global"
|
||||
import z from "zod"
|
||||
|
||||
@@ -64,15 +63,13 @@ export namespace Log {
|
||||
Global.Path.log,
|
||||
options.dev ? "dev.log" : new Date().toISOString().split(".")[0].replace(/:/g, "") + ".log",
|
||||
)
|
||||
const logfile = Bun.file(logpath)
|
||||
await fs.truncate(logpath).catch(() => {})
|
||||
const stream = createWriteStream(logpath, { flags: "a" })
|
||||
const writer = logfile.writer()
|
||||
write = async (msg: any) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
stream.write(msg, (err) => {
|
||||
if (err) reject(err)
|
||||
else resolve(msg.length)
|
||||
})
|
||||
})
|
||||
const num = writer.write(msg)
|
||||
writer.flush()
|
||||
return num
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -7,7 +7,6 @@ import path from "path"
|
||||
import fs from "fs/promises"
|
||||
import { pathToFileURL } from "url"
|
||||
import { Global } from "../../src/global"
|
||||
import { Filesystem } from "../../src/util/filesystem"
|
||||
|
||||
// Get managed config directory from environment (set in preload.ts)
|
||||
const managedConfigDir = process.env.OPENCODE_TEST_MANAGED_CONFIG_DIR!
|
||||
@@ -18,11 +17,11 @@ afterEach(async () => {
|
||||
|
||||
async function writeManagedSettings(settings: object, filename = "opencode.json") {
|
||||
await fs.mkdir(managedConfigDir, { recursive: true })
|
||||
await Filesystem.write(path.join(managedConfigDir, filename), JSON.stringify(settings))
|
||||
await Bun.write(path.join(managedConfigDir, filename), JSON.stringify(settings))
|
||||
}
|
||||
|
||||
async function writeConfig(dir: string, config: object, name = "opencode.json") {
|
||||
await Filesystem.write(path.join(dir, name), JSON.stringify(config))
|
||||
await Bun.write(path.join(dir, name), JSON.stringify(config))
|
||||
}
|
||||
|
||||
test("loads config with defaults when no files exist", async () => {
|
||||
@@ -56,32 +55,10 @@ test("loads JSON config file", async () => {
|
||||
})
|
||||
})
|
||||
|
||||
test("ignores legacy tui keys in opencode config", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await writeConfig(dir, {
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
model: "test/model",
|
||||
theme: "legacy",
|
||||
tui: { scroll_speed: 4 },
|
||||
})
|
||||
},
|
||||
})
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const config = await Config.get()
|
||||
expect(config.model).toBe("test/model")
|
||||
expect((config as Record<string, unknown>).theme).toBeUndefined()
|
||||
expect((config as Record<string, unknown>).tui).toBeUndefined()
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("loads JSONC config file", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.jsonc"),
|
||||
`{
|
||||
// This is a comment
|
||||
@@ -132,14 +109,14 @@ test("merges multiple config files with correct precedence", async () => {
|
||||
|
||||
test("handles environment variable substitution", async () => {
|
||||
const originalEnv = process.env["TEST_VAR"]
|
||||
process.env["TEST_VAR"] = "test-user"
|
||||
process.env["TEST_VAR"] = "test_theme"
|
||||
|
||||
try {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await writeConfig(dir, {
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
username: "{env:TEST_VAR}",
|
||||
theme: "{env:TEST_VAR}",
|
||||
})
|
||||
},
|
||||
})
|
||||
@@ -147,7 +124,7 @@ test("handles environment variable substitution", async () => {
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const config = await Config.get()
|
||||
expect(config.username).toBe("test-user")
|
||||
expect(config.theme).toBe("test_theme")
|
||||
},
|
||||
})
|
||||
} finally {
|
||||
@@ -167,10 +144,10 @@ test("preserves env variables when adding $schema to config", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
// Config without $schema - should trigger auto-add
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
username: "{env:PRESERVE_VAR}",
|
||||
theme: "{env:PRESERVE_VAR}",
|
||||
}),
|
||||
)
|
||||
},
|
||||
@@ -179,10 +156,10 @@ test("preserves env variables when adding $schema to config", async () => {
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const config = await Config.get()
|
||||
expect(config.username).toBe("secret_value")
|
||||
expect(config.theme).toBe("secret_value")
|
||||
|
||||
// Read the file to verify the env variable was preserved
|
||||
const content = await Filesystem.readText(path.join(tmp.path, "opencode.json"))
|
||||
const content = await Bun.file(path.join(tmp.path, "opencode.json")).text()
|
||||
expect(content).toContain("{env:PRESERVE_VAR}")
|
||||
expect(content).not.toContain("secret_value")
|
||||
expect(content).toContain("$schema")
|
||||
@@ -200,10 +177,10 @@ test("preserves env variables when adding $schema to config", async () => {
|
||||
test("handles file inclusion substitution", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Filesystem.write(path.join(dir, "included.txt"), "test-user")
|
||||
await Bun.write(path.join(dir, "included.txt"), "test_theme")
|
||||
await writeConfig(dir, {
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
username: "{file:included.txt}",
|
||||
theme: "{file:included.txt}",
|
||||
})
|
||||
},
|
||||
})
|
||||
@@ -211,7 +188,7 @@ test("handles file inclusion substitution", async () => {
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const config = await Config.get()
|
||||
expect(config.username).toBe("test-user")
|
||||
expect(config.theme).toBe("test_theme")
|
||||
},
|
||||
})
|
||||
})
|
||||
@@ -219,10 +196,10 @@ test("handles file inclusion substitution", async () => {
|
||||
test("handles file inclusion with replacement tokens", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Filesystem.write(path.join(dir, "included.md"), "const out = await Bun.$`echo hi`")
|
||||
await Bun.write(path.join(dir, "included.md"), "const out = await Bun.$`echo hi`")
|
||||
await writeConfig(dir, {
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
username: "{file:included.md}",
|
||||
theme: "{file:included.md}",
|
||||
})
|
||||
},
|
||||
})
|
||||
@@ -230,7 +207,7 @@ test("handles file inclusion with replacement tokens", async () => {
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const config = await Config.get()
|
||||
expect(config.username).toBe("const out = await Bun.$`echo hi`")
|
||||
expect(config.theme).toBe("const out = await Bun.$`echo hi`")
|
||||
},
|
||||
})
|
||||
})
|
||||
@@ -256,7 +233,7 @@ test("validates config schema and throws on invalid fields", async () => {
|
||||
test("throws error for invalid JSON", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Filesystem.write(path.join(dir, "opencode.json"), "{ invalid json }")
|
||||
await Bun.write(path.join(dir, "opencode.json"), "{ invalid json }")
|
||||
},
|
||||
})
|
||||
await Instance.provide({
|
||||
@@ -359,7 +336,7 @@ test("handles command configuration", async () => {
|
||||
test("migrates autoshare to share field", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -381,7 +358,7 @@ test("migrates autoshare to share field", async () => {
|
||||
test("migrates mode field to agent field", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -418,7 +395,7 @@ test("loads config from .opencode directory", async () => {
|
||||
const agentDir = path.join(opencodeDir, "agent")
|
||||
await fs.mkdir(agentDir, { recursive: true })
|
||||
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(agentDir, "test.md"),
|
||||
`---
|
||||
model: test/model
|
||||
@@ -451,7 +428,7 @@ test("loads agents from .opencode/agents (plural)", async () => {
|
||||
const agentsDir = path.join(opencodeDir, "agents")
|
||||
await fs.mkdir(path.join(agentsDir, "nested"), { recursive: true })
|
||||
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(agentsDir, "helper.md"),
|
||||
`---
|
||||
model: test/model
|
||||
@@ -460,7 +437,7 @@ mode: subagent
|
||||
Helper agent prompt`,
|
||||
)
|
||||
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(agentsDir, "nested", "child.md"),
|
||||
`---
|
||||
model: test/model
|
||||
@@ -502,7 +479,7 @@ test("loads commands from .opencode/command (singular)", async () => {
|
||||
const commandDir = path.join(opencodeDir, "command")
|
||||
await fs.mkdir(path.join(commandDir, "nested"), { recursive: true })
|
||||
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(commandDir, "hello.md"),
|
||||
`---
|
||||
description: Test command
|
||||
@@ -510,7 +487,7 @@ description: Test command
|
||||
Hello from singular command`,
|
||||
)
|
||||
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(commandDir, "nested", "child.md"),
|
||||
`---
|
||||
description: Nested command
|
||||
@@ -547,7 +524,7 @@ test("loads commands from .opencode/commands (plural)", async () => {
|
||||
const commandsDir = path.join(opencodeDir, "commands")
|
||||
await fs.mkdir(path.join(commandsDir, "nested"), { recursive: true })
|
||||
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(commandsDir, "hello.md"),
|
||||
`---
|
||||
description: Test command
|
||||
@@ -555,7 +532,7 @@ description: Test command
|
||||
Hello from plural commands`,
|
||||
)
|
||||
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(commandsDir, "nested", "child.md"),
|
||||
`---
|
||||
description: Nested command
|
||||
@@ -591,7 +568,7 @@ test("updates config and writes to file", async () => {
|
||||
const newConfig = { model: "updated/model" }
|
||||
await Config.update(newConfig as any)
|
||||
|
||||
const writtenConfig = await Filesystem.readJson(path.join(tmp.path, "config.json"))
|
||||
const writtenConfig = JSON.parse(await Bun.file(path.join(tmp.path, "config.json")).text())
|
||||
expect(writtenConfig.model).toBe("updated/model")
|
||||
},
|
||||
})
|
||||
@@ -662,8 +639,8 @@ test("installs dependencies in writable OPENCODE_CONFIG_DIR", async () => {
|
||||
},
|
||||
})
|
||||
|
||||
expect(await Filesystem.exists(path.join(tmp.extra, "package.json"))).toBe(true)
|
||||
expect(await Filesystem.exists(path.join(tmp.extra, ".gitignore"))).toBe(true)
|
||||
expect(await Bun.file(path.join(tmp.extra, "package.json")).exists()).toBe(true)
|
||||
expect(await Bun.file(path.join(tmp.extra, ".gitignore")).exists()).toBe(true)
|
||||
} finally {
|
||||
if (prev === undefined) delete process.env.OPENCODE_CONFIG_DIR
|
||||
else process.env.OPENCODE_CONFIG_DIR = prev
|
||||
@@ -676,12 +653,12 @@ test("resolves scoped npm plugins in config", async () => {
|
||||
const pluginDir = path.join(dir, "node_modules", "@scope", "plugin")
|
||||
await fs.mkdir(pluginDir, { recursive: true })
|
||||
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "package.json"),
|
||||
JSON.stringify({ name: "config-fixture", version: "1.0.0", type: "module" }, null, 2),
|
||||
)
|
||||
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(pluginDir, "package.json"),
|
||||
JSON.stringify(
|
||||
{
|
||||
@@ -695,9 +672,9 @@ test("resolves scoped npm plugins in config", async () => {
|
||||
),
|
||||
)
|
||||
|
||||
await Filesystem.write(path.join(pluginDir, "index.js"), "export default {}\n")
|
||||
await Bun.write(path.join(pluginDir, "index.js"), "export default {}\n")
|
||||
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.json"),
|
||||
JSON.stringify({ $schema: "https://opencode.ai/config.json", plugin: ["@scope/plugin"] }, null, 2),
|
||||
)
|
||||
@@ -731,7 +708,7 @@ test("merges plugin arrays from global and local configs", async () => {
|
||||
await fs.mkdir(opencodeDir, { recursive: true })
|
||||
|
||||
// Global config with plugins
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -740,7 +717,7 @@ test("merges plugin arrays from global and local configs", async () => {
|
||||
)
|
||||
|
||||
// Local .opencode config with different plugins
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(opencodeDir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -776,7 +753,7 @@ test("does not error when only custom agent is a subagent", async () => {
|
||||
const agentDir = path.join(opencodeDir, "agent")
|
||||
await fs.mkdir(agentDir, { recursive: true })
|
||||
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(agentDir, "helper.md"),
|
||||
`---
|
||||
model: test/model
|
||||
@@ -807,7 +784,7 @@ test("merges instructions arrays from global and local configs", async () => {
|
||||
const opencodeDir = path.join(projectDir, ".opencode")
|
||||
await fs.mkdir(opencodeDir, { recursive: true })
|
||||
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -815,7 +792,7 @@ test("merges instructions arrays from global and local configs", async () => {
|
||||
}),
|
||||
)
|
||||
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(opencodeDir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -846,7 +823,7 @@ test("deduplicates duplicate instructions from global and local configs", async
|
||||
const opencodeDir = path.join(projectDir, ".opencode")
|
||||
await fs.mkdir(opencodeDir, { recursive: true })
|
||||
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -854,7 +831,7 @@ test("deduplicates duplicate instructions from global and local configs", async
|
||||
}),
|
||||
)
|
||||
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(opencodeDir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -890,7 +867,7 @@ test("deduplicates duplicate plugins from global and local configs", async () =>
|
||||
await fs.mkdir(opencodeDir, { recursive: true })
|
||||
|
||||
// Global config with plugins
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -899,7 +876,7 @@ test("deduplicates duplicate plugins from global and local configs", async () =>
|
||||
)
|
||||
|
||||
// Local .opencode config with some overlapping plugins
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(opencodeDir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -938,7 +915,7 @@ test("deduplicates duplicate plugins from global and local configs", async () =>
|
||||
test("migrates legacy tools config to permissions - allow", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -969,7 +946,7 @@ test("migrates legacy tools config to permissions - allow", async () => {
|
||||
test("migrates legacy tools config to permissions - deny", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -1000,7 +977,7 @@ test("migrates legacy tools config to permissions - deny", async () => {
|
||||
test("migrates legacy write tool to edit permission", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -1065,6 +1042,7 @@ test("managed settings override project settings", async () => {
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
autoupdate: true,
|
||||
disabled_providers: [],
|
||||
theme: "dark",
|
||||
})
|
||||
},
|
||||
})
|
||||
@@ -1081,6 +1059,7 @@ test("managed settings override project settings", async () => {
|
||||
const config = await Config.get()
|
||||
expect(config.autoupdate).toBe(false)
|
||||
expect(config.disabled_providers).toEqual(["openai"])
|
||||
expect(config.theme).toBe("dark")
|
||||
},
|
||||
})
|
||||
})
|
||||
@@ -1107,7 +1086,7 @@ test("missing managed settings file is not an error", async () => {
|
||||
test("migrates legacy edit tool to edit permission", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -1136,7 +1115,7 @@ test("migrates legacy edit tool to edit permission", async () => {
|
||||
test("migrates legacy patch tool to edit permission", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -1165,7 +1144,7 @@ test("migrates legacy patch tool to edit permission", async () => {
|
||||
test("migrates legacy multiedit tool to edit permission", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -1194,7 +1173,7 @@ test("migrates legacy multiedit tool to edit permission", async () => {
|
||||
test("migrates mixed legacy tools config", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -1229,7 +1208,7 @@ test("migrates mixed legacy tools config", async () => {
|
||||
test("merges legacy tools with existing permission config", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -1262,7 +1241,7 @@ test("merges legacy tools with existing permission config", async () => {
|
||||
test("permission config preserves key order", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -1310,7 +1289,7 @@ test("project config can override MCP server enabled status", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
// Simulates a base config (like from remote .well-known) with disabled MCP
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.jsonc"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -1329,7 +1308,7 @@ test("project config can override MCP server enabled status", async () => {
|
||||
}),
|
||||
)
|
||||
// Project config enables just jira
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -1368,7 +1347,7 @@ test("MCP config deep merges preserving base config properties", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
// Base config with full MCP definition
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.jsonc"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -1385,7 +1364,7 @@ test("MCP config deep merges preserving base config properties", async () => {
|
||||
}),
|
||||
)
|
||||
// Override just enables it, should preserve other properties
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -1420,7 +1399,7 @@ test("local .opencode config can override MCP from project config", async () =>
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
// Project config with disabled MCP
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -1436,7 +1415,7 @@ test("local .opencode config can override MCP from project config", async () =>
|
||||
// Local .opencode directory config enables it
|
||||
const opencodeDir = path.join(dir, ".opencode")
|
||||
await fs.mkdir(opencodeDir, { recursive: true })
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(opencodeDir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -1504,7 +1483,7 @@ test("project config overrides remote well-known config", async () => {
|
||||
git: true,
|
||||
init: async (dir) => {
|
||||
// Project config enables jira (overriding remote default)
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -1597,7 +1576,7 @@ describe("deduplicatePlugins", () => {
|
||||
const pluginDir = path.join(opencodeDir, "plugin")
|
||||
await fs.mkdir(pluginDir, { recursive: true })
|
||||
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -1605,7 +1584,7 @@ describe("deduplicatePlugins", () => {
|
||||
}),
|
||||
)
|
||||
|
||||
await Filesystem.write(path.join(pluginDir, "my-plugin.js"), "export default {}")
|
||||
await Bun.write(path.join(pluginDir, "my-plugin.js"), "export default {}")
|
||||
},
|
||||
})
|
||||
|
||||
@@ -1632,7 +1611,7 @@ describe("OPENCODE_DISABLE_PROJECT_CONFIG", () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
// Create a project config that would normally be loaded
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -1670,7 +1649,7 @@ describe("OPENCODE_DISABLE_PROJECT_CONFIG", () => {
|
||||
// Create a .opencode directory with a command
|
||||
const opencodeDir = path.join(dir, ".opencode", "command")
|
||||
await fs.mkdir(opencodeDir, { recursive: true })
|
||||
await Filesystem.write(path.join(opencodeDir, "test-cmd.md"), "# Test Command\nThis is a test command.")
|
||||
await Bun.write(path.join(opencodeDir, "test-cmd.md"), "# Test Command\nThis is a test command.")
|
||||
},
|
||||
})
|
||||
await Instance.provide({
|
||||
@@ -1727,7 +1706,7 @@ describe("OPENCODE_DISABLE_PROJECT_CONFIG", () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
// Create a config with relative instruction path
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -1735,7 +1714,7 @@ describe("OPENCODE_DISABLE_PROJECT_CONFIG", () => {
|
||||
}),
|
||||
)
|
||||
// Create the instruction file (should be skipped)
|
||||
await Filesystem.write(path.join(dir, "CUSTOM.md"), "# Custom Instructions")
|
||||
await Bun.write(path.join(dir, "CUSTOM.md"), "# Custom Instructions")
|
||||
},
|
||||
})
|
||||
|
||||
@@ -1773,7 +1752,7 @@ describe("OPENCODE_DISABLE_PROJECT_CONFIG", () => {
|
||||
await using configDirTmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
// Create config in the custom config dir
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -1786,7 +1765,7 @@ describe("OPENCODE_DISABLE_PROJECT_CONFIG", () => {
|
||||
await using projectTmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
// Create config in project (should be ignored)
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -1821,66 +1800,3 @@ describe("OPENCODE_DISABLE_PROJECT_CONFIG", () => {
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe("OPENCODE_CONFIG_CONTENT token substitution", () => {
|
||||
test("substitutes {env:} tokens in OPENCODE_CONFIG_CONTENT", async () => {
|
||||
const originalEnv = process.env["OPENCODE_CONFIG_CONTENT"]
|
||||
const originalTestVar = process.env["TEST_CONFIG_VAR"]
|
||||
process.env["TEST_CONFIG_VAR"] = "test_api_key_12345"
|
||||
process.env["OPENCODE_CONFIG_CONTENT"] = JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
username: "{env:TEST_CONFIG_VAR}",
|
||||
})
|
||||
|
||||
try {
|
||||
await using tmp = await tmpdir()
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const config = await Config.get()
|
||||
expect(config.username).toBe("test_api_key_12345")
|
||||
},
|
||||
})
|
||||
} finally {
|
||||
if (originalEnv !== undefined) {
|
||||
process.env["OPENCODE_CONFIG_CONTENT"] = originalEnv
|
||||
} else {
|
||||
delete process.env["OPENCODE_CONFIG_CONTENT"]
|
||||
}
|
||||
if (originalTestVar !== undefined) {
|
||||
process.env["TEST_CONFIG_VAR"] = originalTestVar
|
||||
} else {
|
||||
delete process.env["TEST_CONFIG_VAR"]
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
test("substitutes {file:} tokens in OPENCODE_CONFIG_CONTENT", async () => {
|
||||
const originalEnv = process.env["OPENCODE_CONFIG_CONTENT"]
|
||||
|
||||
try {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Filesystem.write(path.join(dir, "api_key.txt"), "secret_key_from_file")
|
||||
process.env["OPENCODE_CONFIG_CONTENT"] = JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
username: "{file:./api_key.txt}",
|
||||
})
|
||||
},
|
||||
})
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const config = await Config.get()
|
||||
expect(config.username).toBe("secret_key_from_file")
|
||||
},
|
||||
})
|
||||
} finally {
|
||||
if (originalEnv !== undefined) {
|
||||
process.env["OPENCODE_CONFIG_CONTENT"] = originalEnv
|
||||
} else {
|
||||
delete process.env["OPENCODE_CONFIG_CONTENT"]
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,510 +0,0 @@
|
||||
import { afterEach, expect, test } from "bun:test"
|
||||
import path from "path"
|
||||
import fs from "fs/promises"
|
||||
import { tmpdir } from "../fixture/fixture"
|
||||
import { Instance } from "../../src/project/instance"
|
||||
import { TuiConfig } from "../../src/config/tui"
|
||||
import { Global } from "../../src/global"
|
||||
import { Filesystem } from "../../src/util/filesystem"
|
||||
|
||||
const managedConfigDir = process.env.OPENCODE_TEST_MANAGED_CONFIG_DIR!
|
||||
|
||||
afterEach(async () => {
|
||||
delete process.env.OPENCODE_CONFIG
|
||||
delete process.env.OPENCODE_TUI_CONFIG
|
||||
await fs.rm(path.join(Global.Path.config, "tui.json"), { force: true }).catch(() => {})
|
||||
await fs.rm(path.join(Global.Path.config, "tui.jsonc"), { force: true }).catch(() => {})
|
||||
await fs.rm(managedConfigDir, { force: true, recursive: true }).catch(() => {})
|
||||
})
|
||||
|
||||
test("loads tui config with the same precedence order as server config paths", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Bun.write(path.join(Global.Path.config, "tui.json"), JSON.stringify({ theme: "global" }, null, 2))
|
||||
await Bun.write(path.join(dir, "tui.json"), JSON.stringify({ theme: "project" }, null, 2))
|
||||
await fs.mkdir(path.join(dir, ".opencode"), { recursive: true })
|
||||
await Bun.write(
|
||||
path.join(dir, ".opencode", "tui.json"),
|
||||
JSON.stringify({ theme: "local", diff_style: "stacked" }, null, 2),
|
||||
)
|
||||
},
|
||||
})
|
||||
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const config = await TuiConfig.get()
|
||||
expect(config.theme).toBe("local")
|
||||
expect(config.diff_style).toBe("stacked")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("migrates tui-specific keys from opencode.json when tui.json does not exist", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.json"),
|
||||
JSON.stringify(
|
||||
{
|
||||
theme: "migrated-theme",
|
||||
tui: { scroll_speed: 5 },
|
||||
keybinds: { app_exit: "ctrl+q" },
|
||||
},
|
||||
null,
|
||||
2,
|
||||
),
|
||||
)
|
||||
},
|
||||
})
|
||||
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const config = await TuiConfig.get()
|
||||
expect(config.theme).toBe("migrated-theme")
|
||||
expect(config.scroll_speed).toBe(5)
|
||||
expect(config.keybinds?.app_exit).toBe("ctrl+q")
|
||||
const text = await Filesystem.readText(path.join(tmp.path, "tui.json"))
|
||||
expect(JSON.parse(text)).toMatchObject({
|
||||
theme: "migrated-theme",
|
||||
scroll_speed: 5,
|
||||
})
|
||||
const server = JSON.parse(await Filesystem.readText(path.join(tmp.path, "opencode.json")))
|
||||
expect(server.theme).toBeUndefined()
|
||||
expect(server.keybinds).toBeUndefined()
|
||||
expect(server.tui).toBeUndefined()
|
||||
expect(await Filesystem.exists(path.join(tmp.path, "opencode.json.tui-migration.bak"))).toBe(true)
|
||||
expect(await Filesystem.exists(path.join(tmp.path, "tui.json"))).toBe(true)
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("migrates project legacy tui keys even when global tui.json already exists", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Bun.write(path.join(Global.Path.config, "tui.json"), JSON.stringify({ theme: "global" }, null, 2))
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.json"),
|
||||
JSON.stringify(
|
||||
{
|
||||
theme: "project-migrated",
|
||||
tui: { scroll_speed: 2 },
|
||||
},
|
||||
null,
|
||||
2,
|
||||
),
|
||||
)
|
||||
},
|
||||
})
|
||||
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const config = await TuiConfig.get()
|
||||
expect(config.theme).toBe("project-migrated")
|
||||
expect(config.scroll_speed).toBe(2)
|
||||
expect(await Filesystem.exists(path.join(tmp.path, "tui.json"))).toBe(true)
|
||||
|
||||
const server = JSON.parse(await Filesystem.readText(path.join(tmp.path, "opencode.json")))
|
||||
expect(server.theme).toBeUndefined()
|
||||
expect(server.tui).toBeUndefined()
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("drops unknown legacy tui keys during migration", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.json"),
|
||||
JSON.stringify(
|
||||
{
|
||||
theme: "migrated-theme",
|
||||
tui: { scroll_speed: 2, foo: 1 },
|
||||
},
|
||||
null,
|
||||
2,
|
||||
),
|
||||
)
|
||||
},
|
||||
})
|
||||
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const config = await TuiConfig.get()
|
||||
expect(config.theme).toBe("migrated-theme")
|
||||
expect(config.scroll_speed).toBe(2)
|
||||
|
||||
const text = await Filesystem.readText(path.join(tmp.path, "tui.json"))
|
||||
const migrated = JSON.parse(text)
|
||||
expect(migrated.scroll_speed).toBe(2)
|
||||
expect(migrated.foo).toBeUndefined()
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("skips migration when opencode.jsonc is syntactically invalid", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.jsonc"),
|
||||
`{
|
||||
"theme": "broken-theme",
|
||||
"tui": { "scroll_speed": 2 }
|
||||
"username": "still-broken"
|
||||
}`,
|
||||
)
|
||||
},
|
||||
})
|
||||
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const config = await TuiConfig.get()
|
||||
expect(config.theme).toBeUndefined()
|
||||
expect(config.scroll_speed).toBeUndefined()
|
||||
expect(await Filesystem.exists(path.join(tmp.path, "tui.json"))).toBe(false)
|
||||
expect(await Filesystem.exists(path.join(tmp.path, "opencode.jsonc.tui-migration.bak"))).toBe(false)
|
||||
const source = await Filesystem.readText(path.join(tmp.path, "opencode.jsonc"))
|
||||
expect(source).toContain('"theme": "broken-theme"')
|
||||
expect(source).toContain('"tui": { "scroll_speed": 2 }')
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("skips migration when tui.json already exists", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Bun.write(path.join(dir, "opencode.json"), JSON.stringify({ theme: "legacy" }, null, 2))
|
||||
await Bun.write(path.join(dir, "tui.json"), JSON.stringify({ diff_style: "stacked" }, null, 2))
|
||||
},
|
||||
})
|
||||
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const config = await TuiConfig.get()
|
||||
expect(config.diff_style).toBe("stacked")
|
||||
expect(config.theme).toBeUndefined()
|
||||
|
||||
const server = JSON.parse(await Filesystem.readText(path.join(tmp.path, "opencode.json")))
|
||||
expect(server.theme).toBe("legacy")
|
||||
expect(await Filesystem.exists(path.join(tmp.path, "opencode.json.tui-migration.bak"))).toBe(false)
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("continues loading tui config when legacy source cannot be stripped", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Bun.write(path.join(dir, "opencode.json"), JSON.stringify({ theme: "readonly-theme" }, null, 2))
|
||||
},
|
||||
})
|
||||
|
||||
const source = path.join(tmp.path, "opencode.json")
|
||||
await fs.chmod(source, 0o444)
|
||||
|
||||
try {
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const config = await TuiConfig.get()
|
||||
expect(config.theme).toBe("readonly-theme")
|
||||
expect(await Filesystem.exists(path.join(tmp.path, "tui.json"))).toBe(true)
|
||||
|
||||
const server = JSON.parse(await Filesystem.readText(source))
|
||||
expect(server.theme).toBe("readonly-theme")
|
||||
},
|
||||
})
|
||||
} finally {
|
||||
await fs.chmod(source, 0o644)
|
||||
}
|
||||
})
|
||||
|
||||
test("migration backup preserves JSONC comments", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.jsonc"),
|
||||
`{
|
||||
// top-level comment
|
||||
"theme": "jsonc-theme",
|
||||
"tui": {
|
||||
// nested comment
|
||||
"scroll_speed": 1.5
|
||||
}
|
||||
}`,
|
||||
)
|
||||
},
|
||||
})
|
||||
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
await TuiConfig.get()
|
||||
const backup = await Filesystem.readText(path.join(tmp.path, "opencode.jsonc.tui-migration.bak"))
|
||||
expect(backup).toContain("// top-level comment")
|
||||
expect(backup).toContain("// nested comment")
|
||||
expect(backup).toContain('"theme": "jsonc-theme"')
|
||||
expect(backup).toContain('"scroll_speed": 1.5')
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("migrates legacy tui keys across multiple opencode.json levels", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
const nested = path.join(dir, "apps", "client")
|
||||
await fs.mkdir(nested, { recursive: true })
|
||||
await Bun.write(path.join(dir, "opencode.json"), JSON.stringify({ theme: "root-theme" }, null, 2))
|
||||
await Bun.write(path.join(nested, "opencode.json"), JSON.stringify({ theme: "nested-theme" }, null, 2))
|
||||
},
|
||||
})
|
||||
|
||||
await Instance.provide({
|
||||
directory: path.join(tmp.path, "apps", "client"),
|
||||
fn: async () => {
|
||||
const config = await TuiConfig.get()
|
||||
expect(config.theme).toBe("nested-theme")
|
||||
expect(await Filesystem.exists(path.join(tmp.path, "tui.json"))).toBe(true)
|
||||
expect(await Filesystem.exists(path.join(tmp.path, "apps", "client", "tui.json"))).toBe(true)
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("flattens nested tui key inside tui.json", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Bun.write(
|
||||
path.join(dir, "tui.json"),
|
||||
JSON.stringify({
|
||||
theme: "outer",
|
||||
tui: { scroll_speed: 3, diff_style: "stacked" },
|
||||
}),
|
||||
)
|
||||
},
|
||||
})
|
||||
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const config = await TuiConfig.get()
|
||||
expect(config.scroll_speed).toBe(3)
|
||||
expect(config.diff_style).toBe("stacked")
|
||||
// top-level keys take precedence over nested tui keys
|
||||
expect(config.theme).toBe("outer")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("top-level keys in tui.json take precedence over nested tui key", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Bun.write(
|
||||
path.join(dir, "tui.json"),
|
||||
JSON.stringify({
|
||||
diff_style: "auto",
|
||||
tui: { diff_style: "stacked", scroll_speed: 2 },
|
||||
}),
|
||||
)
|
||||
},
|
||||
})
|
||||
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const config = await TuiConfig.get()
|
||||
expect(config.diff_style).toBe("auto")
|
||||
expect(config.scroll_speed).toBe(2)
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("project config takes precedence over OPENCODE_TUI_CONFIG (matches OPENCODE_CONFIG)", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Bun.write(path.join(dir, "tui.json"), JSON.stringify({ theme: "project", diff_style: "auto" }))
|
||||
const custom = path.join(dir, "custom-tui.json")
|
||||
await Bun.write(custom, JSON.stringify({ theme: "custom", diff_style: "stacked" }))
|
||||
process.env.OPENCODE_TUI_CONFIG = custom
|
||||
},
|
||||
})
|
||||
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const config = await TuiConfig.get()
|
||||
// project tui.json overrides the custom path, same as server config precedence
|
||||
expect(config.theme).toBe("project")
|
||||
// project also set diff_style, so that wins
|
||||
expect(config.diff_style).toBe("auto")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("merges keybind overrides across precedence layers", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Bun.write(path.join(Global.Path.config, "tui.json"), JSON.stringify({ keybinds: { app_exit: "ctrl+q" } }))
|
||||
await Bun.write(path.join(dir, "tui.json"), JSON.stringify({ keybinds: { theme_list: "ctrl+k" } }))
|
||||
},
|
||||
})
|
||||
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const config = await TuiConfig.get()
|
||||
expect(config.keybinds?.app_exit).toBe("ctrl+q")
|
||||
expect(config.keybinds?.theme_list).toBe("ctrl+k")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("OPENCODE_TUI_CONFIG provides settings when no project config exists", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
const custom = path.join(dir, "custom-tui.json")
|
||||
await Bun.write(custom, JSON.stringify({ theme: "from-env", diff_style: "stacked" }))
|
||||
process.env.OPENCODE_TUI_CONFIG = custom
|
||||
},
|
||||
})
|
||||
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const config = await TuiConfig.get()
|
||||
expect(config.theme).toBe("from-env")
|
||||
expect(config.diff_style).toBe("stacked")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("does not derive tui path from OPENCODE_CONFIG", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
const customDir = path.join(dir, "custom")
|
||||
await fs.mkdir(customDir, { recursive: true })
|
||||
await Bun.write(path.join(customDir, "opencode.json"), JSON.stringify({ model: "test/model" }))
|
||||
await Bun.write(path.join(customDir, "tui.json"), JSON.stringify({ theme: "should-not-load" }))
|
||||
process.env.OPENCODE_CONFIG = path.join(customDir, "opencode.json")
|
||||
},
|
||||
})
|
||||
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const config = await TuiConfig.get()
|
||||
expect(config.theme).toBeUndefined()
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("applies env and file substitutions in tui.json", async () => {
|
||||
const original = process.env.TUI_THEME_TEST
|
||||
process.env.TUI_THEME_TEST = "env-theme"
|
||||
try {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Bun.write(path.join(dir, "keybind.txt"), "ctrl+q")
|
||||
await Bun.write(
|
||||
path.join(dir, "tui.json"),
|
||||
JSON.stringify({
|
||||
theme: "{env:TUI_THEME_TEST}",
|
||||
keybinds: { app_exit: "{file:keybind.txt}" },
|
||||
}),
|
||||
)
|
||||
},
|
||||
})
|
||||
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const config = await TuiConfig.get()
|
||||
expect(config.theme).toBe("env-theme")
|
||||
expect(config.keybinds?.app_exit).toBe("ctrl+q")
|
||||
},
|
||||
})
|
||||
} finally {
|
||||
if (original === undefined) delete process.env.TUI_THEME_TEST
|
||||
else process.env.TUI_THEME_TEST = original
|
||||
}
|
||||
})
|
||||
|
||||
test("applies file substitutions when first identical token is in a commented line", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Bun.write(path.join(dir, "theme.txt"), "resolved-theme")
|
||||
await Bun.write(
|
||||
path.join(dir, "tui.jsonc"),
|
||||
`{
|
||||
// "theme": "{file:theme.txt}",
|
||||
"theme": "{file:theme.txt}"
|
||||
}`,
|
||||
)
|
||||
},
|
||||
})
|
||||
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const config = await TuiConfig.get()
|
||||
expect(config.theme).toBe("resolved-theme")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("loads managed tui config and gives it highest precedence", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Bun.write(path.join(dir, "tui.json"), JSON.stringify({ theme: "project-theme" }, null, 2))
|
||||
await fs.mkdir(managedConfigDir, { recursive: true })
|
||||
await Bun.write(path.join(managedConfigDir, "tui.json"), JSON.stringify({ theme: "managed-theme" }, null, 2))
|
||||
},
|
||||
})
|
||||
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const config = await TuiConfig.get()
|
||||
expect(config.theme).toBe("managed-theme")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("loads .opencode/tui.json", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await fs.mkdir(path.join(dir, ".opencode"), { recursive: true })
|
||||
await Bun.write(path.join(dir, ".opencode", "tui.json"), JSON.stringify({ diff_style: "stacked" }, null, 2))
|
||||
},
|
||||
})
|
||||
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const config = await TuiConfig.get()
|
||||
expect(config.diff_style).toBe("stacked")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("gracefully falls back when tui.json has invalid JSON", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Bun.write(path.join(dir, "tui.json"), "{ invalid json }")
|
||||
await fs.mkdir(managedConfigDir, { recursive: true })
|
||||
await Bun.write(path.join(managedConfigDir, "tui.json"), JSON.stringify({ theme: "managed-fallback" }, null, 2))
|
||||
},
|
||||
})
|
||||
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const config = await TuiConfig.get()
|
||||
expect(config.theme).toBe("managed-fallback")
|
||||
expect(config.keybinds).toBeDefined()
|
||||
},
|
||||
})
|
||||
})
|
||||
@@ -3,12 +3,11 @@ import path from "path"
|
||||
import fs from "fs/promises"
|
||||
import { File } from "../../src/file"
|
||||
import { Instance } from "../../src/project/instance"
|
||||
import { Filesystem } from "../../src/util/filesystem"
|
||||
import { tmpdir } from "../fixture/fixture"
|
||||
|
||||
describe("file/index Filesystem patterns", () => {
|
||||
describe("file/index Bun.file patterns", () => {
|
||||
describe("File.read() - text content", () => {
|
||||
test("reads text file via Filesystem.readText()", async () => {
|
||||
test("reads text file via Bun.file().text()", async () => {
|
||||
await using tmp = await tmpdir()
|
||||
const filepath = path.join(tmp.path, "test.txt")
|
||||
await fs.writeFile(filepath, "Hello World", "utf-8")
|
||||
@@ -23,7 +22,7 @@ describe("file/index Filesystem patterns", () => {
|
||||
})
|
||||
})
|
||||
|
||||
test("reads with Filesystem.exists() check", async () => {
|
||||
test("reads with Bun.file().exists() check", async () => {
|
||||
await using tmp = await tmpdir()
|
||||
|
||||
await Instance.provide({
|
||||
@@ -82,7 +81,7 @@ describe("file/index Filesystem patterns", () => {
|
||||
})
|
||||
|
||||
describe("File.read() - binary content", () => {
|
||||
test("reads binary file via Filesystem.readArrayBuffer()", async () => {
|
||||
test("reads binary file via Bun.file().arrayBuffer()", async () => {
|
||||
await using tmp = await tmpdir()
|
||||
const filepath = path.join(tmp.path, "image.png")
|
||||
const binaryContent = Buffer.from([0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a])
|
||||
@@ -116,8 +115,8 @@ describe("file/index Filesystem patterns", () => {
|
||||
})
|
||||
})
|
||||
|
||||
describe("File.read() - Filesystem.mimeType()", () => {
|
||||
test("detects MIME type via Filesystem.mimeType()", async () => {
|
||||
describe("File.read() - Bun.file().type", () => {
|
||||
test("detects MIME type via Bun.file().type", async () => {
|
||||
await using tmp = await tmpdir()
|
||||
const filepath = path.join(tmp.path, "test.json")
|
||||
await fs.writeFile(filepath, '{"key": "value"}', "utf-8")
|
||||
@@ -125,7 +124,8 @@ describe("file/index Filesystem patterns", () => {
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
expect(Filesystem.mimeType(filepath)).toContain("application/json")
|
||||
const bunFile = Bun.file(filepath)
|
||||
expect(bunFile.type).toContain("application/json")
|
||||
|
||||
const result = await File.read("test.json")
|
||||
expect(result.type).toBe("text")
|
||||
@@ -149,15 +149,16 @@ describe("file/index Filesystem patterns", () => {
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
expect(Filesystem.mimeType(filepath)).toContain(mime)
|
||||
const bunFile = Bun.file(filepath)
|
||||
expect(bunFile.type).toContain(mime)
|
||||
},
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe("File.list() - Filesystem.exists() and readText()", () => {
|
||||
test("reads .gitignore via Filesystem.exists() and readText()", async () => {
|
||||
describe("File.list() - Bun.file().exists() and .text()", () => {
|
||||
test("reads .gitignore via Bun.file().exists() and .text()", async () => {
|
||||
await using tmp = await tmpdir({ git: true })
|
||||
|
||||
await Instance.provide({
|
||||
@@ -167,9 +168,10 @@ describe("file/index Filesystem patterns", () => {
|
||||
await fs.writeFile(gitignorePath, "node_modules\ndist\n", "utf-8")
|
||||
|
||||
// This is used internally in File.list()
|
||||
expect(await Filesystem.exists(gitignorePath)).toBe(true)
|
||||
const bunFile = Bun.file(gitignorePath)
|
||||
expect(await bunFile.exists()).toBe(true)
|
||||
|
||||
const content = await Filesystem.readText(gitignorePath)
|
||||
const content = await bunFile.text()
|
||||
expect(content).toContain("node_modules")
|
||||
},
|
||||
})
|
||||
@@ -184,8 +186,9 @@ describe("file/index Filesystem patterns", () => {
|
||||
const ignorePath = path.join(tmp.path, ".ignore")
|
||||
await fs.writeFile(ignorePath, "*.log\n.env\n", "utf-8")
|
||||
|
||||
expect(await Filesystem.exists(ignorePath)).toBe(true)
|
||||
expect(await Filesystem.readText(ignorePath)).toContain("*.log")
|
||||
const bunFile = Bun.file(ignorePath)
|
||||
expect(await bunFile.exists()).toBe(true)
|
||||
expect(await bunFile.text()).toContain("*.log")
|
||||
},
|
||||
})
|
||||
})
|
||||
@@ -197,7 +200,8 @@ describe("file/index Filesystem patterns", () => {
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const gitignorePath = path.join(tmp.path, ".gitignore")
|
||||
expect(await Filesystem.exists(gitignorePath)).toBe(false)
|
||||
const bunFile = Bun.file(gitignorePath)
|
||||
expect(await bunFile.exists()).toBe(false)
|
||||
|
||||
// File.list() should still work
|
||||
const nodes = await File.list()
|
||||
@@ -207,8 +211,8 @@ describe("file/index Filesystem patterns", () => {
|
||||
})
|
||||
})
|
||||
|
||||
describe("File.changed() - Filesystem.readText() for untracked files", () => {
|
||||
test("reads untracked files via Filesystem.readText()", async () => {
|
||||
describe("File.changed() - Bun.file().text() for untracked files", () => {
|
||||
test("reads untracked files via Bun.file().text()", async () => {
|
||||
await using tmp = await tmpdir({ git: true })
|
||||
|
||||
await Instance.provide({
|
||||
@@ -218,7 +222,8 @@ describe("file/index Filesystem patterns", () => {
|
||||
await fs.writeFile(untrackedPath, "new content\nwith multiple lines", "utf-8")
|
||||
|
||||
// This is how File.changed() reads untracked files
|
||||
const content = await Filesystem.readText(untrackedPath)
|
||||
const bunFile = Bun.file(untrackedPath)
|
||||
const content = await bunFile.text()
|
||||
const lines = content.split("\n").length
|
||||
expect(lines).toBe(2)
|
||||
},
|
||||
@@ -227,7 +232,7 @@ describe("file/index Filesystem patterns", () => {
|
||||
})
|
||||
|
||||
describe("Error handling", () => {
|
||||
test("handles errors gracefully in Filesystem.readText()", async () => {
|
||||
test("handles errors gracefully in Bun.file().text()", async () => {
|
||||
await using tmp = await tmpdir()
|
||||
const filepath = path.join(tmp.path, "readonly.txt")
|
||||
await fs.writeFile(filepath, "content", "utf-8")
|
||||
@@ -235,9 +240,9 @@ describe("file/index Filesystem patterns", () => {
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const nonExistentPath = path.join(tmp.path, "does-not-exist.txt")
|
||||
// Filesystem.readText() on non-existent file throws
|
||||
await expect(Filesystem.readText(nonExistentPath)).rejects.toThrow()
|
||||
const nonExistentFile = Bun.file(path.join(tmp.path, "does-not-exist.txt"))
|
||||
// Bun.file().text() on non-existent file throws
|
||||
await expect(nonExistentFile.text()).rejects.toThrow()
|
||||
|
||||
// But File.read() handles this gracefully
|
||||
const result = await File.read("does-not-exist.txt")
|
||||
@@ -246,14 +251,14 @@ describe("file/index Filesystem patterns", () => {
|
||||
})
|
||||
})
|
||||
|
||||
test("handles errors in Filesystem.readArrayBuffer()", async () => {
|
||||
test("handles errors in Bun.file().arrayBuffer()", async () => {
|
||||
await using tmp = await tmpdir()
|
||||
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const nonExistentPath = path.join(tmp.path, "does-not-exist.bin")
|
||||
const buffer = await Filesystem.readArrayBuffer(nonExistentPath).catch(() => new ArrayBuffer(0))
|
||||
const nonExistentFile = Bun.file(path.join(tmp.path, "does-not-exist.bin"))
|
||||
const buffer = await nonExistentFile.arrayBuffer().catch(() => new ArrayBuffer(0))
|
||||
expect(buffer.byteLength).toBe(0)
|
||||
},
|
||||
})
|
||||
@@ -267,6 +272,7 @@ describe("file/index Filesystem patterns", () => {
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const bunFile = Bun.file(filepath)
|
||||
// File.read() handles missing images gracefully
|
||||
const result = await File.read("broken.png")
|
||||
expect(result.type).toBe("text")
|
||||
@@ -277,66 +283,6 @@ describe("file/index Filesystem patterns", () => {
|
||||
})
|
||||
|
||||
describe("shouldEncode() logic", () => {
|
||||
test("treats .ts files as text", async () => {
|
||||
await using tmp = await tmpdir()
|
||||
const filepath = path.join(tmp.path, "test.ts")
|
||||
await fs.writeFile(filepath, "export const value = 1", "utf-8")
|
||||
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const result = await File.read("test.ts")
|
||||
expect(result.type).toBe("text")
|
||||
expect(result.content).toBe("export const value = 1")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("treats .mts files as text", async () => {
|
||||
await using tmp = await tmpdir()
|
||||
const filepath = path.join(tmp.path, "test.mts")
|
||||
await fs.writeFile(filepath, "export const value = 1", "utf-8")
|
||||
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const result = await File.read("test.mts")
|
||||
expect(result.type).toBe("text")
|
||||
expect(result.content).toBe("export const value = 1")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("treats .sh files as text", async () => {
|
||||
await using tmp = await tmpdir()
|
||||
const filepath = path.join(tmp.path, "test.sh")
|
||||
await fs.writeFile(filepath, "#!/usr/bin/env bash\necho hello", "utf-8")
|
||||
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const result = await File.read("test.sh")
|
||||
expect(result.type).toBe("text")
|
||||
expect(result.content).toBe("#!/usr/bin/env bash\necho hello")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("treats Dockerfile as text", async () => {
|
||||
await using tmp = await tmpdir()
|
||||
const filepath = path.join(tmp.path, "Dockerfile")
|
||||
await fs.writeFile(filepath, "FROM alpine:3.20", "utf-8")
|
||||
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const result = await File.read("Dockerfile")
|
||||
expect(result.type).toBe("text")
|
||||
expect(result.content).toBe("FROM alpine:3.20")
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("returns encoding info for text files", async () => {
|
||||
await using tmp = await tmpdir()
|
||||
const filepath = path.join(tmp.path, "test.txt")
|
||||
|
||||
@@ -3,7 +3,6 @@ import path from "path"
|
||||
import fs from "fs/promises"
|
||||
import { FileTime } from "../../src/file/time"
|
||||
import { Instance } from "../../src/project/instance"
|
||||
import { Filesystem } from "../../src/util/filesystem"
|
||||
import { tmpdir } from "../fixture/fixture"
|
||||
|
||||
describe("file/time", () => {
|
||||
@@ -313,8 +312,8 @@ describe("file/time", () => {
|
||||
})
|
||||
})
|
||||
|
||||
describe("stat() Filesystem.stat pattern", () => {
|
||||
test("reads file modification time via Filesystem.stat()", async () => {
|
||||
describe("stat() Bun.file pattern", () => {
|
||||
test("reads file modification time via Bun.file().stat()", async () => {
|
||||
await using tmp = await tmpdir()
|
||||
const filepath = path.join(tmp.path, "file.txt")
|
||||
await fs.writeFile(filepath, "content", "utf-8")
|
||||
@@ -324,9 +323,9 @@ describe("file/time", () => {
|
||||
fn: async () => {
|
||||
FileTime.read(sessionID, filepath)
|
||||
|
||||
const stats = Filesystem.stat(filepath)
|
||||
expect(stats?.mtime).toBeInstanceOf(Date)
|
||||
expect(stats!.mtime.getTime()).toBeGreaterThan(0)
|
||||
const stats = await Bun.file(filepath).stat()
|
||||
expect(stats.mtime).toBeInstanceOf(Date)
|
||||
expect(stats.mtime.getTime()).toBeGreaterThan(0)
|
||||
|
||||
// FileTime.assert uses this stat internally
|
||||
await FileTime.assert(sessionID, filepath)
|
||||
@@ -344,14 +343,14 @@ describe("file/time", () => {
|
||||
fn: async () => {
|
||||
FileTime.read(sessionID, filepath)
|
||||
|
||||
const originalStat = Filesystem.stat(filepath)
|
||||
const originalStat = await Bun.file(filepath).stat()
|
||||
|
||||
// Wait and modify
|
||||
await new Promise((resolve) => setTimeout(resolve, 100))
|
||||
await fs.writeFile(filepath, "modified", "utf-8")
|
||||
|
||||
const newStat = Filesystem.stat(filepath)
|
||||
expect(newStat!.mtime.getTime()).toBeGreaterThan(originalStat!.mtime.getTime())
|
||||
const newStat = await Bun.file(filepath).stat()
|
||||
expect(newStat.mtime.getTime()).toBeGreaterThan(originalStat.mtime.getTime())
|
||||
|
||||
await expect(FileTime.assert(sessionID, filepath)).rejects.toThrow()
|
||||
},
|
||||
|
||||
@@ -4,7 +4,6 @@ import { Log } from "../../src/util/log"
|
||||
import { $ } from "bun"
|
||||
import path from "path"
|
||||
import { tmpdir } from "../fixture/fixture"
|
||||
import { Filesystem } from "../../src/util/filesystem"
|
||||
import { GlobalBus } from "../../src/bus/global"
|
||||
|
||||
Log.init({ print: false })
|
||||
@@ -79,7 +78,7 @@ describe("Project.fromDirectory", () => {
|
||||
expect(project.worktree).toBe(tmp.path)
|
||||
|
||||
const opencodeFile = path.join(tmp.path, ".git", "opencode")
|
||||
const fileExists = await Filesystem.exists(opencodeFile)
|
||||
const fileExists = await Bun.file(opencodeFile).exists()
|
||||
expect(fileExists).toBe(false)
|
||||
})
|
||||
|
||||
@@ -95,7 +94,7 @@ describe("Project.fromDirectory", () => {
|
||||
expect(project.worktree).toBe(tmp.path)
|
||||
|
||||
const opencodeFile = path.join(tmp.path, ".git", "opencode")
|
||||
const fileExists = await Filesystem.exists(opencodeFile)
|
||||
const fileExists = await Bun.file(opencodeFile).exists()
|
||||
expect(fileExists).toBe(true)
|
||||
})
|
||||
|
||||
|
||||
@@ -4,7 +4,6 @@ import fs from "fs/promises"
|
||||
import path from "path"
|
||||
import { Instance } from "../../src/project/instance"
|
||||
import { Worktree } from "../../src/worktree"
|
||||
import { Filesystem } from "../../src/util/filesystem"
|
||||
import { tmpdir } from "../fixture/fixture"
|
||||
|
||||
describe("Worktree.remove", () => {
|
||||
@@ -54,7 +53,7 @@ describe("Worktree.remove", () => {
|
||||
})()
|
||||
|
||||
expect(ok).toBe(true)
|
||||
expect(await Filesystem.exists(dir)).toBe(false)
|
||||
expect(await Bun.file(dir).exists()).toBe(false)
|
||||
|
||||
const list = await $`git worktree list --porcelain`.cwd(root).quiet().text()
|
||||
expect(list).not.toContain(`worktree ${dir}`)
|
||||
|
||||
@@ -7,12 +7,11 @@ import { Instance } from "../../src/project/instance"
|
||||
import { Provider } from "../../src/provider/provider"
|
||||
import { Env } from "../../src/env"
|
||||
import { Global } from "../../src/global"
|
||||
import { Filesystem } from "../../src/util/filesystem"
|
||||
|
||||
test("Bedrock: config region takes precedence over AWS_REGION env var", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -44,7 +43,7 @@ test("Bedrock: config region takes precedence over AWS_REGION env var", async ()
|
||||
test("Bedrock: falls back to AWS_REGION env var when no config region", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -69,7 +68,7 @@ test("Bedrock: falls back to AWS_REGION env var when no config region", async ()
|
||||
test("Bedrock: loads when bearer token from auth.json is present", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -90,14 +89,14 @@ test("Bedrock: loads when bearer token from auth.json is present", async () => {
|
||||
// Save original auth.json if it exists
|
||||
let originalAuth: string | undefined
|
||||
try {
|
||||
originalAuth = await Filesystem.readText(authPath)
|
||||
originalAuth = await Bun.file(authPath).text()
|
||||
} catch {
|
||||
// File doesn't exist, that's fine
|
||||
}
|
||||
|
||||
try {
|
||||
// Write test auth.json
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
authPath,
|
||||
JSON.stringify({
|
||||
"amazon-bedrock": {
|
||||
@@ -123,7 +122,7 @@ test("Bedrock: loads when bearer token from auth.json is present", async () => {
|
||||
} finally {
|
||||
// Restore original or delete
|
||||
if (originalAuth !== undefined) {
|
||||
await Filesystem.write(authPath, originalAuth)
|
||||
await Bun.write(authPath, originalAuth)
|
||||
} else {
|
||||
try {
|
||||
await unlink(authPath)
|
||||
@@ -137,7 +136,7 @@ test("Bedrock: loads when bearer token from auth.json is present", async () => {
|
||||
test("Bedrock: config profile takes precedence over AWS_PROFILE env var", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -170,7 +169,7 @@ test("Bedrock: config profile takes precedence over AWS_PROFILE env var", async
|
||||
test("Bedrock: includes custom endpoint in options when specified", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -203,7 +202,7 @@ test("Bedrock: includes custom endpoint in options when specified", async () =>
|
||||
test("Bedrock: autoloads when AWS_WEB_IDENTITY_TOKEN_FILE is present", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -241,7 +240,7 @@ test("Bedrock: autoloads when AWS_WEB_IDENTITY_TOKEN_FILE is present", async ()
|
||||
test("Bedrock: model with us. prefix should not be double-prefixed", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -278,7 +277,7 @@ test("Bedrock: model with us. prefix should not be double-prefixed", async () =>
|
||||
test("Bedrock: model with global. prefix should not be prefixed", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -314,7 +313,7 @@ test("Bedrock: model with global. prefix should not be prefixed", async () => {
|
||||
test("Bedrock: model with eu. prefix should not be double-prefixed", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
@@ -350,7 +349,7 @@ test("Bedrock: model with eu. prefix should not be double-prefixed", async () =>
|
||||
test("Bedrock: model without prefix in US region should get us. prefix added", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Filesystem.write(
|
||||
await Bun.write(
|
||||
path.join(dir, "opencode.json"),
|
||||
JSON.stringify({
|
||||
$schema: "https://opencode.ai/config.json",
|
||||
|
||||
@@ -18,6 +18,7 @@ describe("pty", () => {
|
||||
|
||||
const ws = {
|
||||
readyState: 1,
|
||||
data: { events: { connection: "a" } },
|
||||
send: (data: unknown) => {
|
||||
outA.push(typeof data === "string" ? data : Buffer.from(data as Uint8Array).toString("utf8"))
|
||||
},
|
||||
@@ -30,6 +31,7 @@ describe("pty", () => {
|
||||
Pty.connect(a.id, ws as any)
|
||||
|
||||
// Now "reuse" the same ws object for another connection.
|
||||
ws.data = { events: { connection: "b" } }
|
||||
ws.send = (data: unknown) => {
|
||||
outB.push(typeof data === "string" ? data : Buffer.from(data as Uint8Array).toString("utf8"))
|
||||
}
|
||||
@@ -51,4 +53,48 @@ describe("pty", () => {
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("does not leak output when Bun recycles websocket objects before re-connect", async () => {
|
||||
await using dir = await tmpdir({ git: true })
|
||||
|
||||
await Instance.provide({
|
||||
directory: dir.path,
|
||||
fn: async () => {
|
||||
const a = await Pty.create({ command: "cat", title: "a" })
|
||||
try {
|
||||
const outA: string[] = []
|
||||
const outB: string[] = []
|
||||
|
||||
const ws = {
|
||||
readyState: 1,
|
||||
data: { events: { connection: "a" } },
|
||||
send: (data: unknown) => {
|
||||
outA.push(typeof data === "string" ? data : Buffer.from(data as Uint8Array).toString("utf8"))
|
||||
},
|
||||
close: () => {
|
||||
// no-op (simulate abrupt drop)
|
||||
},
|
||||
}
|
||||
|
||||
// Connect "a" first.
|
||||
Pty.connect(a.id, ws as any)
|
||||
outA.length = 0
|
||||
|
||||
// Simulate Bun reusing the same websocket object for another connection
|
||||
// before the new onOpen handler has a chance to tag it.
|
||||
ws.data = { events: { connection: "b" } }
|
||||
ws.send = (data: unknown) => {
|
||||
outB.push(typeof data === "string" ? data : Buffer.from(data as Uint8Array).toString("utf8"))
|
||||
}
|
||||
|
||||
Pty.write(a.id, "AAA\n")
|
||||
await Bun.sleep(100)
|
||||
|
||||
expect(outB.join("")).not.toContain("AAA")
|
||||
} finally {
|
||||
await Pty.remove(a.id)
|
||||
}
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -7,7 +7,6 @@ import { Instance } from "../../src/project/instance"
|
||||
import { Provider } from "../../src/provider/provider"
|
||||
import { ProviderTransform } from "../../src/provider/transform"
|
||||
import { ModelsDev } from "../../src/provider/models"
|
||||
import { Filesystem } from "../../src/util/filesystem"
|
||||
import { tmpdir } from "../fixture/fixture"
|
||||
import type { Agent } from "../../src/agent/agent"
|
||||
import type { MessageV2 } from "../../src/session/message-v2"
|
||||
@@ -186,7 +185,7 @@ function createChatStream(text: string) {
|
||||
|
||||
async function loadFixture(providerID: string, modelID: string) {
|
||||
const fixturePath = path.join(import.meta.dir, "../tool/fixtures/models-api.json")
|
||||
const data = await Filesystem.readJson<Record<string, ModelsDev.Provider>>(fixturePath)
|
||||
const data = (await Bun.file(fixturePath).json()) as Record<string, ModelsDev.Provider>
|
||||
const provider = data[providerID]
|
||||
if (!provider) {
|
||||
throw new Error(`Missing provider in fixture: ${providerID}`)
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { describe, test, expect } from "bun:test"
|
||||
import { Discovery } from "../../src/skill/discovery"
|
||||
import { Filesystem } from "../../src/util/filesystem"
|
||||
import path from "path"
|
||||
|
||||
const CLOUDFLARE_SKILLS_URL = "https://developers.cloudflare.com/.well-known/skills/"
|
||||
@@ -12,7 +11,7 @@ describe("Discovery.pull", () => {
|
||||
for (const dir of dirs) {
|
||||
expect(dir).toStartWith(Discovery.dir())
|
||||
const md = path.join(dir, "SKILL.md")
|
||||
expect(await Filesystem.exists(md)).toBe(true)
|
||||
expect(await Bun.file(md).exists()).toBe(true)
|
||||
}
|
||||
}, 30_000)
|
||||
|
||||
@@ -21,7 +20,7 @@ describe("Discovery.pull", () => {
|
||||
expect(dirs.length).toBeGreaterThan(0)
|
||||
for (const dir of dirs) {
|
||||
const md = path.join(dir, "SKILL.md")
|
||||
expect(await Filesystem.exists(md)).toBe(true)
|
||||
expect(await Bun.file(md).exists()).toBe(true)
|
||||
}
|
||||
}, 30_000)
|
||||
|
||||
@@ -41,7 +40,7 @@ describe("Discovery.pull", () => {
|
||||
const agentsSdk = dirs.find((d) => d.endsWith("/agents-sdk"))
|
||||
if (agentsSdk) {
|
||||
const refs = path.join(agentsSdk, "references")
|
||||
expect(await Filesystem.exists(path.join(agentsSdk, "SKILL.md"))).toBe(true)
|
||||
expect(await Bun.file(path.join(agentsSdk, "SKILL.md")).exists()).toBe(true)
|
||||
// agents-sdk has reference files per the index
|
||||
const refDir = await Array.fromAsync(new Bun.Glob("**/*.md").scan({ cwd: refs, onlyFiles: true }))
|
||||
expect(refDir.length).toBeGreaterThan(0)
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
import { test, expect } from "bun:test"
|
||||
import { $ } from "bun"
|
||||
import fs from "fs/promises"
|
||||
import { Snapshot } from "../../src/snapshot"
|
||||
import { Instance } from "../../src/project/instance"
|
||||
import { Filesystem } from "../../src/util/filesystem"
|
||||
import { tmpdir } from "../fixture/fixture"
|
||||
|
||||
async function bootstrap() {
|
||||
@@ -13,8 +11,8 @@ async function bootstrap() {
|
||||
const unique = Math.random().toString(36).slice(2)
|
||||
const aContent = `A${unique}`
|
||||
const bContent = `B${unique}`
|
||||
await Filesystem.write(`${dir}/a.txt`, aContent)
|
||||
await Filesystem.write(`${dir}/b.txt`, bContent)
|
||||
await Bun.write(`${dir}/a.txt`, aContent)
|
||||
await Bun.write(`${dir}/b.txt`, bContent)
|
||||
await $`git add .`.cwd(dir).quiet()
|
||||
await $`git commit --no-gpg-sign -m init`.cwd(dir).quiet()
|
||||
return {
|
||||
@@ -48,16 +46,11 @@ test("revert should remove new files", async () => {
|
||||
const before = await Snapshot.track()
|
||||
expect(before).toBeTruthy()
|
||||
|
||||
await Filesystem.write(`${tmp.path}/new.txt`, "NEW")
|
||||
await Bun.write(`${tmp.path}/new.txt`, "NEW")
|
||||
|
||||
await Snapshot.revert([await Snapshot.patch(before!)])
|
||||
|
||||
expect(
|
||||
await fs
|
||||
.access(`${tmp.path}/new.txt`)
|
||||
.then(() => true)
|
||||
.catch(() => false),
|
||||
).toBe(false)
|
||||
expect(await Bun.file(`${tmp.path}/new.txt`).exists()).toBe(false)
|
||||
},
|
||||
})
|
||||
})
|
||||
@@ -71,16 +64,11 @@ test("revert in subdirectory", async () => {
|
||||
expect(before).toBeTruthy()
|
||||
|
||||
await $`mkdir -p ${tmp.path}/sub`.quiet()
|
||||
await Filesystem.write(`${tmp.path}/sub/file.txt`, "SUB")
|
||||
await Bun.write(`${tmp.path}/sub/file.txt`, "SUB")
|
||||
|
||||
await Snapshot.revert([await Snapshot.patch(before!)])
|
||||
|
||||
expect(
|
||||
await fs
|
||||
.access(`${tmp.path}/sub/file.txt`)
|
||||
.then(() => true)
|
||||
.catch(() => false),
|
||||
).toBe(false)
|
||||
expect(await Bun.file(`${tmp.path}/sub/file.txt`).exists()).toBe(false)
|
||||
// Note: revert currently only removes files, not directories
|
||||
// The empty subdirectory will remain
|
||||
},
|
||||
@@ -96,23 +84,18 @@ test("multiple file operations", async () => {
|
||||
expect(before).toBeTruthy()
|
||||
|
||||
await $`rm ${tmp.path}/a.txt`.quiet()
|
||||
await Filesystem.write(`${tmp.path}/c.txt`, "C")
|
||||
await Bun.write(`${tmp.path}/c.txt`, "C")
|
||||
await $`mkdir -p ${tmp.path}/dir`.quiet()
|
||||
await Filesystem.write(`${tmp.path}/dir/d.txt`, "D")
|
||||
await Filesystem.write(`${tmp.path}/b.txt`, "MODIFIED")
|
||||
await Bun.write(`${tmp.path}/dir/d.txt`, "D")
|
||||
await Bun.write(`${tmp.path}/b.txt`, "MODIFIED")
|
||||
|
||||
await Snapshot.revert([await Snapshot.patch(before!)])
|
||||
|
||||
expect(await fs.readFile(`${tmp.path}/a.txt`, "utf-8")).toBe(tmp.extra.aContent)
|
||||
expect(
|
||||
await fs
|
||||
.access(`${tmp.path}/c.txt`)
|
||||
.then(() => true)
|
||||
.catch(() => false),
|
||||
).toBe(false)
|
||||
expect(await Bun.file(`${tmp.path}/a.txt`).text()).toBe(tmp.extra.aContent)
|
||||
expect(await Bun.file(`${tmp.path}/c.txt`).exists()).toBe(false)
|
||||
// Note: revert currently only removes files, not directories
|
||||
// The empty directory will remain
|
||||
expect(await fs.readFile(`${tmp.path}/b.txt`, "utf-8")).toBe(tmp.extra.bContent)
|
||||
expect(await Bun.file(`${tmp.path}/b.txt`).text()).toBe(tmp.extra.bContent)
|
||||
},
|
||||
})
|
||||
})
|
||||
@@ -140,18 +123,13 @@ test("binary file handling", async () => {
|
||||
const before = await Snapshot.track()
|
||||
expect(before).toBeTruthy()
|
||||
|
||||
await Filesystem.write(`${tmp.path}/image.png`, new Uint8Array([0x89, 0x50, 0x4e, 0x47]))
|
||||
await Bun.write(`${tmp.path}/image.png`, new Uint8Array([0x89, 0x50, 0x4e, 0x47]))
|
||||
|
||||
const patch = await Snapshot.patch(before!)
|
||||
expect(patch.files).toContain(`${tmp.path}/image.png`)
|
||||
|
||||
await Snapshot.revert([patch])
|
||||
expect(
|
||||
await fs
|
||||
.access(`${tmp.path}/image.png`)
|
||||
.then(() => true)
|
||||
.catch(() => false),
|
||||
).toBe(false)
|
||||
expect(await Bun.file(`${tmp.path}/image.png`).exists()).toBe(false)
|
||||
},
|
||||
})
|
||||
})
|
||||
@@ -179,7 +157,7 @@ test("large file handling", async () => {
|
||||
const before = await Snapshot.track()
|
||||
expect(before).toBeTruthy()
|
||||
|
||||
await Filesystem.write(`${tmp.path}/large.txt`, "x".repeat(1024 * 1024))
|
||||
await Bun.write(`${tmp.path}/large.txt`, "x".repeat(1024 * 1024))
|
||||
|
||||
expect((await Snapshot.patch(before!)).files).toContain(`${tmp.path}/large.txt`)
|
||||
},
|
||||
@@ -195,16 +173,11 @@ test("nested directory revert", async () => {
|
||||
expect(before).toBeTruthy()
|
||||
|
||||
await $`mkdir -p ${tmp.path}/level1/level2/level3`.quiet()
|
||||
await Filesystem.write(`${tmp.path}/level1/level2/level3/deep.txt`, "DEEP")
|
||||
await Bun.write(`${tmp.path}/level1/level2/level3/deep.txt`, "DEEP")
|
||||
|
||||
await Snapshot.revert([await Snapshot.patch(before!)])
|
||||
|
||||
expect(
|
||||
await fs
|
||||
.access(`${tmp.path}/level1/level2/level3/deep.txt`)
|
||||
.then(() => true)
|
||||
.catch(() => false),
|
||||
).toBe(false)
|
||||
expect(await Bun.file(`${tmp.path}/level1/level2/level3/deep.txt`).exists()).toBe(false)
|
||||
},
|
||||
})
|
||||
})
|
||||
@@ -217,9 +190,9 @@ test("special characters in filenames", async () => {
|
||||
const before = await Snapshot.track()
|
||||
expect(before).toBeTruthy()
|
||||
|
||||
await Filesystem.write(`${tmp.path}/file with spaces.txt`, "SPACES")
|
||||
await Filesystem.write(`${tmp.path}/file-with-dashes.txt`, "DASHES")
|
||||
await Filesystem.write(`${tmp.path}/file_with_underscores.txt`, "UNDERSCORES")
|
||||
await Bun.write(`${tmp.path}/file with spaces.txt`, "SPACES")
|
||||
await Bun.write(`${tmp.path}/file-with-dashes.txt`, "DASHES")
|
||||
await Bun.write(`${tmp.path}/file_with_underscores.txt`, "UNDERSCORES")
|
||||
|
||||
const files = (await Snapshot.patch(before!)).files
|
||||
expect(files).toContain(`${tmp.path}/file with spaces.txt`)
|
||||
@@ -252,7 +225,7 @@ test("patch with invalid hash", async () => {
|
||||
expect(before).toBeTruthy()
|
||||
|
||||
// Create a change
|
||||
await Filesystem.write(`${tmp.path}/test.txt`, "TEST")
|
||||
await Bun.write(`${tmp.path}/test.txt`, "TEST")
|
||||
|
||||
// Try to patch with invalid hash - should handle gracefully
|
||||
const patch = await Snapshot.patch("invalid-hash-12345")
|
||||
@@ -300,7 +273,7 @@ test("unicode filenames", async () => {
|
||||
]
|
||||
|
||||
for (const file of unicodeFiles) {
|
||||
await Filesystem.write(file.path, file.content)
|
||||
await Bun.write(file.path, file.content)
|
||||
}
|
||||
|
||||
const patch = await Snapshot.patch(before!)
|
||||
@@ -313,12 +286,7 @@ test("unicode filenames", async () => {
|
||||
await Snapshot.revert([patch])
|
||||
|
||||
for (const file of unicodeFiles) {
|
||||
expect(
|
||||
await fs
|
||||
.access(file.path)
|
||||
.then(() => true)
|
||||
.catch(() => false),
|
||||
).toBe(false)
|
||||
expect(await Bun.file(file.path).exists()).toBe(false)
|
||||
}
|
||||
},
|
||||
})
|
||||
@@ -332,14 +300,14 @@ test.skip("unicode filenames modification and restore", async () => {
|
||||
const chineseFile = `${tmp.path}/文件.txt`
|
||||
const cyrillicFile = `${tmp.path}/файл.txt`
|
||||
|
||||
await Filesystem.write(chineseFile, "original chinese")
|
||||
await Filesystem.write(cyrillicFile, "original cyrillic")
|
||||
await Bun.write(chineseFile, "original chinese")
|
||||
await Bun.write(cyrillicFile, "original cyrillic")
|
||||
|
||||
const before = await Snapshot.track()
|
||||
expect(before).toBeTruthy()
|
||||
|
||||
await Filesystem.write(chineseFile, "modified chinese")
|
||||
await Filesystem.write(cyrillicFile, "modified cyrillic")
|
||||
await Bun.write(chineseFile, "modified chinese")
|
||||
await Bun.write(cyrillicFile, "modified cyrillic")
|
||||
|
||||
const patch = await Snapshot.patch(before!)
|
||||
expect(patch.files).toContain(chineseFile)
|
||||
@@ -347,8 +315,8 @@ test.skip("unicode filenames modification and restore", async () => {
|
||||
|
||||
await Snapshot.revert([patch])
|
||||
|
||||
expect(await fs.readFile(chineseFile, "utf-8")).toBe("original chinese")
|
||||
expect(await fs.readFile(cyrillicFile, "utf-8")).toBe("original cyrillic")
|
||||
expect(await Bun.file(chineseFile).text()).toBe("original chinese")
|
||||
expect(await Bun.file(cyrillicFile).text()).toBe("original cyrillic")
|
||||
},
|
||||
})
|
||||
})
|
||||
@@ -363,18 +331,13 @@ test("unicode filenames in subdirectories", async () => {
|
||||
|
||||
await $`mkdir -p "${tmp.path}/目录/подкаталог"`.quiet()
|
||||
const deepFile = `${tmp.path}/目录/подкаталог/文件.txt`
|
||||
await Filesystem.write(deepFile, "deep unicode content")
|
||||
await Bun.write(deepFile, "deep unicode content")
|
||||
|
||||
const patch = await Snapshot.patch(before!)
|
||||
expect(patch.files).toContain(deepFile)
|
||||
|
||||
await Snapshot.revert([patch])
|
||||
expect(
|
||||
await fs
|
||||
.access(deepFile)
|
||||
.then(() => true)
|
||||
.catch(() => false),
|
||||
).toBe(false)
|
||||
expect(await Bun.file(deepFile).exists()).toBe(false)
|
||||
},
|
||||
})
|
||||
})
|
||||
@@ -390,18 +353,13 @@ test("very long filenames", async () => {
|
||||
const longName = "a".repeat(200) + ".txt"
|
||||
const longFile = `${tmp.path}/${longName}`
|
||||
|
||||
await Filesystem.write(longFile, "long filename content")
|
||||
await Bun.write(longFile, "long filename content")
|
||||
|
||||
const patch = await Snapshot.patch(before!)
|
||||
expect(patch.files).toContain(longFile)
|
||||
|
||||
await Snapshot.revert([patch])
|
||||
expect(
|
||||
await fs
|
||||
.access(longFile)
|
||||
.then(() => true)
|
||||
.catch(() => false),
|
||||
).toBe(false)
|
||||
expect(await Bun.file(longFile).exists()).toBe(false)
|
||||
},
|
||||
})
|
||||
})
|
||||
@@ -414,9 +372,9 @@ test("hidden files", async () => {
|
||||
const before = await Snapshot.track()
|
||||
expect(before).toBeTruthy()
|
||||
|
||||
await Filesystem.write(`${tmp.path}/.hidden`, "hidden content")
|
||||
await Filesystem.write(`${tmp.path}/.gitignore`, "*.log")
|
||||
await Filesystem.write(`${tmp.path}/.config`, "config content")
|
||||
await Bun.write(`${tmp.path}/.hidden`, "hidden content")
|
||||
await Bun.write(`${tmp.path}/.gitignore`, "*.log")
|
||||
await Bun.write(`${tmp.path}/.config`, "config content")
|
||||
|
||||
const patch = await Snapshot.patch(before!)
|
||||
expect(patch.files).toContain(`${tmp.path}/.hidden`)
|
||||
@@ -435,7 +393,7 @@ test("nested symlinks", async () => {
|
||||
expect(before).toBeTruthy()
|
||||
|
||||
await $`mkdir -p ${tmp.path}/sub/dir`.quiet()
|
||||
await Filesystem.write(`${tmp.path}/sub/dir/target.txt`, "target content")
|
||||
await Bun.write(`${tmp.path}/sub/dir/target.txt`, "target content")
|
||||
await $`ln -s ${tmp.path}/sub/dir/target.txt ${tmp.path}/sub/dir/link.txt`.quiet()
|
||||
await $`ln -s ${tmp.path}/sub ${tmp.path}/sub-link`.quiet()
|
||||
|
||||
@@ -492,9 +450,9 @@ test("gitignore changes", async () => {
|
||||
const before = await Snapshot.track()
|
||||
expect(before).toBeTruthy()
|
||||
|
||||
await Filesystem.write(`${tmp.path}/.gitignore`, "*.ignored")
|
||||
await Filesystem.write(`${tmp.path}/test.ignored`, "ignored content")
|
||||
await Filesystem.write(`${tmp.path}/normal.txt`, "normal content")
|
||||
await Bun.write(`${tmp.path}/.gitignore`, "*.ignored")
|
||||
await Bun.write(`${tmp.path}/test.ignored`, "ignored content")
|
||||
await Bun.write(`${tmp.path}/normal.txt`, "normal content")
|
||||
|
||||
const patch = await Snapshot.patch(before!)
|
||||
|
||||
@@ -519,7 +477,7 @@ test("concurrent file operations during patch", async () => {
|
||||
// Start creating files
|
||||
const createPromise = (async () => {
|
||||
for (let i = 0; i < 10; i++) {
|
||||
await Filesystem.write(`${tmp.path}/concurrent${i}.txt`, `concurrent${i}`)
|
||||
await Bun.write(`${tmp.path}/concurrent${i}.txt`, `concurrent${i}`)
|
||||
// Small delay to simulate concurrent operations
|
||||
await new Promise((resolve) => setTimeout(resolve, 1))
|
||||
}
|
||||
@@ -546,7 +504,7 @@ test("snapshot state isolation between projects", async () => {
|
||||
directory: tmp1.path,
|
||||
fn: async () => {
|
||||
const before1 = await Snapshot.track()
|
||||
await Filesystem.write(`${tmp1.path}/project1.txt`, "project1 content")
|
||||
await Bun.write(`${tmp1.path}/project1.txt`, "project1 content")
|
||||
const patch1 = await Snapshot.patch(before1!)
|
||||
expect(patch1.files).toContain(`${tmp1.path}/project1.txt`)
|
||||
},
|
||||
@@ -556,7 +514,7 @@ test("snapshot state isolation between projects", async () => {
|
||||
directory: tmp2.path,
|
||||
fn: async () => {
|
||||
const before2 = await Snapshot.track()
|
||||
await Filesystem.write(`${tmp2.path}/project2.txt`, "project2 content")
|
||||
await Bun.write(`${tmp2.path}/project2.txt`, "project2 content")
|
||||
const patch2 = await Snapshot.patch(before2!)
|
||||
expect(patch2.files).toContain(`${tmp2.path}/project2.txt`)
|
||||
|
||||
@@ -586,7 +544,7 @@ test("patch detects changes in secondary worktree", async () => {
|
||||
expect(before).toBeTruthy()
|
||||
|
||||
const worktreeFile = `${worktreePath}/worktree.txt`
|
||||
await Filesystem.write(worktreeFile, "worktree content")
|
||||
await Bun.write(worktreeFile, "worktree content")
|
||||
|
||||
const patch = await Snapshot.patch(before!)
|
||||
expect(patch.files).toContain(worktreeFile)
|
||||
@@ -611,7 +569,7 @@ test("revert only removes files in invoking worktree", async () => {
|
||||
},
|
||||
})
|
||||
const primaryFile = `${tmp.path}/worktree.txt`
|
||||
await Filesystem.write(primaryFile, "primary content")
|
||||
await Bun.write(primaryFile, "primary content")
|
||||
|
||||
await Instance.provide({
|
||||
directory: worktreePath,
|
||||
@@ -620,21 +578,16 @@ test("revert only removes files in invoking worktree", async () => {
|
||||
expect(before).toBeTruthy()
|
||||
|
||||
const worktreeFile = `${worktreePath}/worktree.txt`
|
||||
await Filesystem.write(worktreeFile, "worktree content")
|
||||
await Bun.write(worktreeFile, "worktree content")
|
||||
|
||||
const patch = await Snapshot.patch(before!)
|
||||
await Snapshot.revert([patch])
|
||||
|
||||
expect(
|
||||
await fs
|
||||
.access(worktreeFile)
|
||||
.then(() => true)
|
||||
.catch(() => false),
|
||||
).toBe(false)
|
||||
expect(await Bun.file(worktreeFile).exists()).toBe(false)
|
||||
},
|
||||
})
|
||||
|
||||
expect(await fs.readFile(primaryFile, "utf-8")).toBe("primary content")
|
||||
expect(await Bun.file(primaryFile).text()).toBe("primary content")
|
||||
} finally {
|
||||
await $`git worktree remove --force ${worktreePath}`.cwd(tmp.path).quiet().nothrow()
|
||||
await $`rm -rf ${worktreePath}`.quiet()
|
||||
@@ -661,10 +614,10 @@ test("diff reports worktree-only/shared edits and ignores primary-only", async (
|
||||
const before = await Snapshot.track()
|
||||
expect(before).toBeTruthy()
|
||||
|
||||
await Filesystem.write(`${worktreePath}/worktree-only.txt`, "worktree diff content")
|
||||
await Filesystem.write(`${worktreePath}/shared.txt`, "worktree edit")
|
||||
await Filesystem.write(`${tmp.path}/shared.txt`, "primary edit")
|
||||
await Filesystem.write(`${tmp.path}/primary-only.txt`, "primary change")
|
||||
await Bun.write(`${worktreePath}/worktree-only.txt`, "worktree diff content")
|
||||
await Bun.write(`${worktreePath}/shared.txt`, "worktree edit")
|
||||
await Bun.write(`${tmp.path}/shared.txt`, "primary edit")
|
||||
await Bun.write(`${tmp.path}/primary-only.txt`, "primary change")
|
||||
|
||||
const diff = await Snapshot.diff(before!)
|
||||
expect(diff).toContain("worktree-only.txt")
|
||||
@@ -709,8 +662,8 @@ test("diff function with various changes", async () => {
|
||||
|
||||
// Make various changes
|
||||
await $`rm ${tmp.path}/a.txt`.quiet()
|
||||
await Filesystem.write(`${tmp.path}/new.txt`, "new content")
|
||||
await Filesystem.write(`${tmp.path}/b.txt`, "modified content")
|
||||
await Bun.write(`${tmp.path}/new.txt`, "new content")
|
||||
await Bun.write(`${tmp.path}/b.txt`, "modified content")
|
||||
|
||||
const diff = await Snapshot.diff(before!)
|
||||
expect(diff).toContain("a.txt")
|
||||
@@ -730,26 +683,16 @@ test("restore function", async () => {
|
||||
|
||||
// Make changes
|
||||
await $`rm ${tmp.path}/a.txt`.quiet()
|
||||
await Filesystem.write(`${tmp.path}/new.txt`, "new content")
|
||||
await Filesystem.write(`${tmp.path}/b.txt`, "modified")
|
||||
await Bun.write(`${tmp.path}/new.txt`, "new content")
|
||||
await Bun.write(`${tmp.path}/b.txt`, "modified")
|
||||
|
||||
// Restore to original state
|
||||
await Snapshot.restore(before!)
|
||||
|
||||
expect(
|
||||
await fs
|
||||
.access(`${tmp.path}/a.txt`)
|
||||
.then(() => true)
|
||||
.catch(() => false),
|
||||
).toBe(true)
|
||||
expect(await fs.readFile(`${tmp.path}/a.txt`, "utf-8")).toBe(tmp.extra.aContent)
|
||||
expect(
|
||||
await fs
|
||||
.access(`${tmp.path}/new.txt`)
|
||||
.then(() => true)
|
||||
.catch(() => false),
|
||||
).toBe(true) // New files should remain
|
||||
expect(await fs.readFile(`${tmp.path}/b.txt`, "utf-8")).toBe(tmp.extra.bContent)
|
||||
expect(await Bun.file(`${tmp.path}/a.txt`).exists()).toBe(true)
|
||||
expect(await Bun.file(`${tmp.path}/a.txt`).text()).toBe(tmp.extra.aContent)
|
||||
expect(await Bun.file(`${tmp.path}/new.txt`).exists()).toBe(true) // New files should remain
|
||||
expect(await Bun.file(`${tmp.path}/b.txt`).text()).toBe(tmp.extra.bContent)
|
||||
},
|
||||
})
|
||||
})
|
||||
@@ -767,19 +710,14 @@ test("revert should not delete files that existed but were deleted in snapshot",
|
||||
const snapshot2 = await Snapshot.track()
|
||||
expect(snapshot2).toBeTruthy()
|
||||
|
||||
await Filesystem.write(`${tmp.path}/a.txt`, "recreated content")
|
||||
await Bun.write(`${tmp.path}/a.txt`, "recreated content")
|
||||
|
||||
const patch = await Snapshot.patch(snapshot2!)
|
||||
expect(patch.files).toContain(`${tmp.path}/a.txt`)
|
||||
|
||||
await Snapshot.revert([patch])
|
||||
|
||||
expect(
|
||||
await fs
|
||||
.access(`${tmp.path}/a.txt`)
|
||||
.then(() => true)
|
||||
.catch(() => false),
|
||||
).toBe(false)
|
||||
expect(await Bun.file(`${tmp.path}/a.txt`).exists()).toBe(false)
|
||||
},
|
||||
})
|
||||
})
|
||||
@@ -789,14 +727,14 @@ test("revert preserves file that existed in snapshot when deleted then recreated
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
await Filesystem.write(`${tmp.path}/existing.txt`, "original content")
|
||||
await Bun.write(`${tmp.path}/existing.txt`, "original content")
|
||||
|
||||
const snapshot = await Snapshot.track()
|
||||
expect(snapshot).toBeTruthy()
|
||||
|
||||
await $`rm ${tmp.path}/existing.txt`.quiet()
|
||||
await Filesystem.write(`${tmp.path}/existing.txt`, "recreated")
|
||||
await Filesystem.write(`${tmp.path}/newfile.txt`, "new")
|
||||
await Bun.write(`${tmp.path}/existing.txt`, "recreated")
|
||||
await Bun.write(`${tmp.path}/newfile.txt`, "new")
|
||||
|
||||
const patch = await Snapshot.patch(snapshot!)
|
||||
expect(patch.files).toContain(`${tmp.path}/existing.txt`)
|
||||
@@ -804,19 +742,9 @@ test("revert preserves file that existed in snapshot when deleted then recreated
|
||||
|
||||
await Snapshot.revert([patch])
|
||||
|
||||
expect(
|
||||
await fs
|
||||
.access(`${tmp.path}/newfile.txt`)
|
||||
.then(() => true)
|
||||
.catch(() => false),
|
||||
).toBe(false)
|
||||
expect(
|
||||
await fs
|
||||
.access(`${tmp.path}/existing.txt`)
|
||||
.then(() => true)
|
||||
.catch(() => false),
|
||||
).toBe(true)
|
||||
expect(await fs.readFile(`${tmp.path}/existing.txt`, "utf-8")).toBe("original content")
|
||||
expect(await Bun.file(`${tmp.path}/newfile.txt`).exists()).toBe(false)
|
||||
expect(await Bun.file(`${tmp.path}/existing.txt`).exists()).toBe(true)
|
||||
expect(await Bun.file(`${tmp.path}/existing.txt`).text()).toBe("original content")
|
||||
},
|
||||
})
|
||||
})
|
||||
@@ -826,17 +754,17 @@ test("diffFull sets status based on git change type", async () => {
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
await Filesystem.write(`${tmp.path}/grow.txt`, "one\n")
|
||||
await Filesystem.write(`${tmp.path}/trim.txt`, "line1\nline2\n")
|
||||
await Filesystem.write(`${tmp.path}/delete.txt`, "gone")
|
||||
await Bun.write(`${tmp.path}/grow.txt`, "one\n")
|
||||
await Bun.write(`${tmp.path}/trim.txt`, "line1\nline2\n")
|
||||
await Bun.write(`${tmp.path}/delete.txt`, "gone")
|
||||
|
||||
const before = await Snapshot.track()
|
||||
expect(before).toBeTruthy()
|
||||
|
||||
await Filesystem.write(`${tmp.path}/grow.txt`, "one\ntwo\n")
|
||||
await Filesystem.write(`${tmp.path}/trim.txt`, "line1\n")
|
||||
await Bun.write(`${tmp.path}/grow.txt`, "one\ntwo\n")
|
||||
await Bun.write(`${tmp.path}/trim.txt`, "line1\n")
|
||||
await $`rm ${tmp.path}/delete.txt`.quiet()
|
||||
await Filesystem.write(`${tmp.path}/added.txt`, "new")
|
||||
await Bun.write(`${tmp.path}/added.txt`, "new")
|
||||
|
||||
const after = await Snapshot.track()
|
||||
expect(after).toBeTruthy()
|
||||
@@ -875,7 +803,7 @@ test("diffFull with new file additions", async () => {
|
||||
const before = await Snapshot.track()
|
||||
expect(before).toBeTruthy()
|
||||
|
||||
await Filesystem.write(`${tmp.path}/new.txt`, "new content")
|
||||
await Bun.write(`${tmp.path}/new.txt`, "new content")
|
||||
|
||||
const after = await Snapshot.track()
|
||||
expect(after).toBeTruthy()
|
||||
@@ -901,7 +829,7 @@ test("diffFull with file modifications", async () => {
|
||||
const before = await Snapshot.track()
|
||||
expect(before).toBeTruthy()
|
||||
|
||||
await Filesystem.write(`${tmp.path}/b.txt`, "modified content")
|
||||
await Bun.write(`${tmp.path}/b.txt`, "modified content")
|
||||
|
||||
const after = await Snapshot.track()
|
||||
expect(after).toBeTruthy()
|
||||
@@ -953,7 +881,7 @@ test("diffFull with multiple line additions", async () => {
|
||||
const before = await Snapshot.track()
|
||||
expect(before).toBeTruthy()
|
||||
|
||||
await Filesystem.write(`${tmp.path}/multi.txt`, "line1\nline2\nline3")
|
||||
await Bun.write(`${tmp.path}/multi.txt`, "line1\nline2\nline3")
|
||||
|
||||
const after = await Snapshot.track()
|
||||
expect(after).toBeTruthy()
|
||||
@@ -979,7 +907,7 @@ test("diffFull with addition and deletion", async () => {
|
||||
const before = await Snapshot.track()
|
||||
expect(before).toBeTruthy()
|
||||
|
||||
await Filesystem.write(`${tmp.path}/added.txt`, "added content")
|
||||
await Bun.write(`${tmp.path}/added.txt`, "added content")
|
||||
await $`rm ${tmp.path}/a.txt`.quiet()
|
||||
|
||||
const after = await Snapshot.track()
|
||||
@@ -1013,8 +941,8 @@ test("diffFull with multiple additions and deletions", async () => {
|
||||
const before = await Snapshot.track()
|
||||
expect(before).toBeTruthy()
|
||||
|
||||
await Filesystem.write(`${tmp.path}/multi1.txt`, "line1\nline2\nline3")
|
||||
await Filesystem.write(`${tmp.path}/multi2.txt`, "single line")
|
||||
await Bun.write(`${tmp.path}/multi1.txt`, "line1\nline2\nline3")
|
||||
await Bun.write(`${tmp.path}/multi2.txt`, "single line")
|
||||
await $`rm ${tmp.path}/a.txt`.quiet()
|
||||
await $`rm ${tmp.path}/b.txt`.quiet()
|
||||
|
||||
@@ -1072,7 +1000,7 @@ test("diffFull with binary file changes", async () => {
|
||||
const before = await Snapshot.track()
|
||||
expect(before).toBeTruthy()
|
||||
|
||||
await Filesystem.write(`${tmp.path}/binary.bin`, new Uint8Array([0x00, 0x01, 0x02, 0x03]))
|
||||
await Bun.write(`${tmp.path}/binary.bin`, new Uint8Array([0x00, 0x01, 0x02, 0x03]))
|
||||
|
||||
const after = await Snapshot.track()
|
||||
expect(after).toBeTruthy()
|
||||
@@ -1092,11 +1020,11 @@ test("diffFull with whitespace changes", async () => {
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
await Filesystem.write(`${tmp.path}/whitespace.txt`, "line1\nline2")
|
||||
await Bun.write(`${tmp.path}/whitespace.txt`, "line1\nline2")
|
||||
const before = await Snapshot.track()
|
||||
expect(before).toBeTruthy()
|
||||
|
||||
await Filesystem.write(`${tmp.path}/whitespace.txt`, "line1\n\nline2\n")
|
||||
await Bun.write(`${tmp.path}/whitespace.txt`, "line1\n\nline2\n")
|
||||
|
||||
const after = await Snapshot.track()
|
||||
expect(after).toBeTruthy()
|
||||
|
||||
@@ -2,7 +2,6 @@ import { describe, expect, test } from "bun:test"
|
||||
import path from "path"
|
||||
import { BashTool } from "../../src/tool/bash"
|
||||
import { Instance } from "../../src/project/instance"
|
||||
import { Filesystem } from "../../src/util/filesystem"
|
||||
import { tmpdir } from "../fixture/fixture"
|
||||
import type { PermissionNext } from "../../src/permission/next"
|
||||
import { Truncate } from "../../src/tool/truncation"
|
||||
@@ -389,7 +388,7 @@ describe("tool.bash truncation", () => {
|
||||
const filepath = (result.metadata as any).outputPath
|
||||
expect(filepath).toBeTruthy()
|
||||
|
||||
const saved = await Filesystem.readText(filepath)
|
||||
const saved = await Bun.file(filepath).text()
|
||||
const lines = saved.trim().split("\n")
|
||||
expect(lines.length).toBe(lineCount)
|
||||
expect(lines[0]).toBe("1")
|
||||
|
||||
@@ -2,7 +2,6 @@ import { describe, expect, test } from "bun:test"
|
||||
import path from "path"
|
||||
import { ReadTool } from "../../src/tool/read"
|
||||
import { Instance } from "../../src/project/instance"
|
||||
import { Filesystem } from "../../src/util/filesystem"
|
||||
import { tmpdir } from "../fixture/fixture"
|
||||
import { PermissionNext } from "../../src/permission/next"
|
||||
import { Agent } from "../../src/agent/agent"
|
||||
@@ -200,10 +199,10 @@ describe("tool.read truncation", () => {
|
||||
test("truncates large file by bytes and sets truncated metadata", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
const base = await Filesystem.readText(path.join(FIXTURES_DIR, "models-api.json"))
|
||||
const base = await Bun.file(path.join(FIXTURES_DIR, "models-api.json")).text()
|
||||
const target = 60 * 1024
|
||||
const content = base.length >= target ? base : base.repeat(Math.ceil(target / base.length))
|
||||
await Filesystem.write(path.join(dir, "large.json"), content)
|
||||
await Bun.write(path.join(dir, "large.json"), content)
|
||||
},
|
||||
})
|
||||
await Instance.provide({
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { describe, test, expect, afterAll } from "bun:test"
|
||||
import { Truncate } from "../../src/tool/truncation"
|
||||
import { Identifier } from "../../src/id/id"
|
||||
import { Filesystem } from "../../src/util/filesystem"
|
||||
import fs from "fs/promises"
|
||||
import path from "path"
|
||||
|
||||
@@ -10,7 +9,7 @@ const FIXTURES_DIR = path.join(import.meta.dir, "fixtures")
|
||||
describe("Truncate", () => {
|
||||
describe("output", () => {
|
||||
test("truncates large json file by bytes", async () => {
|
||||
const content = await Filesystem.readText(path.join(FIXTURES_DIR, "models-api.json"))
|
||||
const content = await Bun.file(path.join(FIXTURES_DIR, "models-api.json")).text()
|
||||
const result = await Truncate.output(content)
|
||||
|
||||
expect(result.truncated).toBe(true)
|
||||
@@ -70,7 +69,7 @@ describe("Truncate", () => {
|
||||
})
|
||||
|
||||
test("large single-line file truncates with byte message", async () => {
|
||||
const content = await Filesystem.readText(path.join(FIXTURES_DIR, "models-api.json"))
|
||||
const content = await Bun.file(path.join(FIXTURES_DIR, "models-api.json")).text()
|
||||
const result = await Truncate.output(content)
|
||||
|
||||
expect(result.truncated).toBe(true)
|
||||
@@ -89,7 +88,7 @@ describe("Truncate", () => {
|
||||
expect(result.outputPath).toBeDefined()
|
||||
expect(result.outputPath).toContain("tool_")
|
||||
|
||||
const written = await Filesystem.readText(result.outputPath!)
|
||||
const written = await Bun.file(result.outputPath).text()
|
||||
expect(written).toBe(lines)
|
||||
})
|
||||
|
||||
@@ -140,21 +139,21 @@ describe("Truncate", () => {
|
||||
const oldTimestamp = Date.now() - 10 * DAY_MS
|
||||
const oldId = Identifier.create("tool", false, oldTimestamp)
|
||||
oldFile = path.join(Truncate.DIR, oldId)
|
||||
await Filesystem.write(oldFile, "old content")
|
||||
await Bun.write(Bun.file(oldFile), "old content")
|
||||
|
||||
// Create a recent file (3 days ago)
|
||||
const recentTimestamp = Date.now() - 3 * DAY_MS
|
||||
const recentId = Identifier.create("tool", false, recentTimestamp)
|
||||
recentFile = path.join(Truncate.DIR, recentId)
|
||||
await Filesystem.write(recentFile, "recent content")
|
||||
await Bun.write(Bun.file(recentFile), "recent content")
|
||||
|
||||
await Truncate.cleanup()
|
||||
|
||||
// Old file should be deleted
|
||||
expect(await Filesystem.exists(oldFile)).toBe(false)
|
||||
expect(await Bun.file(oldFile).exists()).toBe(false)
|
||||
|
||||
// Recent file should still exist
|
||||
expect(await Filesystem.exists(recentFile)).toBe(true)
|
||||
expect(await Bun.file(recentFile).exists()).toBe(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -285,125 +285,4 @@ describe("filesystem", () => {
|
||||
expect(Filesystem.mimeType("Makefile")).toBe("application/octet-stream")
|
||||
})
|
||||
})
|
||||
|
||||
describe("writeStream()", () => {
|
||||
test("writes from Web ReadableStream", async () => {
|
||||
await using tmp = await tmpdir()
|
||||
const filepath = path.join(tmp.path, "streamed.txt")
|
||||
const content = "Hello from stream!"
|
||||
const encoder = new TextEncoder()
|
||||
const stream = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue(encoder.encode(content))
|
||||
controller.close()
|
||||
},
|
||||
})
|
||||
|
||||
await Filesystem.writeStream(filepath, stream)
|
||||
|
||||
expect(await fs.readFile(filepath, "utf-8")).toBe(content)
|
||||
})
|
||||
|
||||
test("writes from Node.js Readable stream", async () => {
|
||||
await using tmp = await tmpdir()
|
||||
const filepath = path.join(tmp.path, "node-streamed.txt")
|
||||
const content = "Hello from Node stream!"
|
||||
const { Readable } = await import("stream")
|
||||
const stream = Readable.from([content])
|
||||
|
||||
await Filesystem.writeStream(filepath, stream)
|
||||
|
||||
expect(await fs.readFile(filepath, "utf-8")).toBe(content)
|
||||
})
|
||||
|
||||
test("writes binary data from Web ReadableStream", async () => {
|
||||
await using tmp = await tmpdir()
|
||||
const filepath = path.join(tmp.path, "binary.dat")
|
||||
const binaryData = new Uint8Array([0x00, 0x01, 0x02, 0x03, 0xff])
|
||||
const stream = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue(binaryData)
|
||||
controller.close()
|
||||
},
|
||||
})
|
||||
|
||||
await Filesystem.writeStream(filepath, stream)
|
||||
|
||||
const read = await fs.readFile(filepath)
|
||||
expect(Buffer.from(read)).toEqual(Buffer.from(binaryData))
|
||||
})
|
||||
|
||||
test("writes large content in chunks", async () => {
|
||||
await using tmp = await tmpdir()
|
||||
const filepath = path.join(tmp.path, "large.txt")
|
||||
const chunks = ["chunk1", "chunk2", "chunk3", "chunk4", "chunk5"]
|
||||
const stream = new ReadableStream({
|
||||
start(controller) {
|
||||
for (const chunk of chunks) {
|
||||
controller.enqueue(new TextEncoder().encode(chunk))
|
||||
}
|
||||
controller.close()
|
||||
},
|
||||
})
|
||||
|
||||
await Filesystem.writeStream(filepath, stream)
|
||||
|
||||
expect(await fs.readFile(filepath, "utf-8")).toBe(chunks.join(""))
|
||||
})
|
||||
|
||||
test("creates parent directories", async () => {
|
||||
await using tmp = await tmpdir()
|
||||
const filepath = path.join(tmp.path, "nested", "deep", "streamed.txt")
|
||||
const content = "nested stream content"
|
||||
const stream = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue(new TextEncoder().encode(content))
|
||||
controller.close()
|
||||
},
|
||||
})
|
||||
|
||||
await Filesystem.writeStream(filepath, stream)
|
||||
|
||||
expect(await fs.readFile(filepath, "utf-8")).toBe(content)
|
||||
})
|
||||
|
||||
test("writes with permissions", async () => {
|
||||
await using tmp = await tmpdir()
|
||||
const filepath = path.join(tmp.path, "protected-stream.txt")
|
||||
const content = "secret stream content"
|
||||
const stream = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue(new TextEncoder().encode(content))
|
||||
controller.close()
|
||||
},
|
||||
})
|
||||
|
||||
await Filesystem.writeStream(filepath, stream, 0o600)
|
||||
|
||||
const stats = await fs.stat(filepath)
|
||||
if (process.platform !== "win32") {
|
||||
expect(stats.mode & 0o777).toBe(0o600)
|
||||
}
|
||||
})
|
||||
|
||||
test("writes executable with permissions", async () => {
|
||||
await using tmp = await tmpdir()
|
||||
const filepath = path.join(tmp.path, "script.sh")
|
||||
const content = "#!/bin/bash\necho hello"
|
||||
const stream = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue(new TextEncoder().encode(content))
|
||||
controller.close()
|
||||
},
|
||||
})
|
||||
|
||||
await Filesystem.writeStream(filepath, stream, 0o755)
|
||||
|
||||
const stats = await fs.stat(filepath)
|
||||
if (process.platform !== "win32") {
|
||||
expect(stats.mode & 0o777).toBe(0o755)
|
||||
}
|
||||
expect(await fs.readFile(filepath, "utf-8")).toBe(content)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: flex-start;
|
||||
gap: 0px;
|
||||
gap: 8px;
|
||||
align-self: stretch;
|
||||
|
||||
[data-slot="accordion-item"] {
|
||||
@@ -11,11 +11,7 @@
|
||||
flex-direction: column;
|
||||
align-items: flex-start;
|
||||
align-self: stretch;
|
||||
overflow: visible;
|
||||
|
||||
& + [data-slot="accordion-item"] {
|
||||
margin-top: -1px;
|
||||
}
|
||||
overflow: clip;
|
||||
|
||||
[data-slot="accordion-header"] {
|
||||
width: 100%;
|
||||
@@ -35,10 +31,9 @@
|
||||
cursor: default;
|
||||
user-select: none;
|
||||
|
||||
background-color: var(--background-stronger);
|
||||
background-color: var(--surface-base);
|
||||
border: 1px solid var(--border-weak-base);
|
||||
border-radius: 0;
|
||||
box-shadow: none;
|
||||
border-radius: var(--radius-md);
|
||||
overflow: clip;
|
||||
color: var(--text-strong);
|
||||
transition: background-color 0.15s ease;
|
||||
@@ -52,10 +47,7 @@
|
||||
letter-spacing: var(--letter-spacing-normal);
|
||||
|
||||
&:hover {
|
||||
background-color: var(--surface-base-hover);
|
||||
}
|
||||
&:active {
|
||||
background-color: var(--surface-base-active);
|
||||
background-color: var(--surface-base);
|
||||
}
|
||||
&:focus-visible {
|
||||
outline: none;
|
||||
@@ -66,40 +58,23 @@
|
||||
}
|
||||
}
|
||||
|
||||
&:first-child {
|
||||
[data-slot="accordion-header"] [data-slot="accordion-trigger"] {
|
||||
border-top-left-radius: var(--radius-lg);
|
||||
border-top-right-radius: var(--radius-lg);
|
||||
}
|
||||
}
|
||||
|
||||
&:last-child:not([data-expanded]) {
|
||||
[data-slot="accordion-header"] [data-slot="accordion-trigger"] {
|
||||
border-bottom-left-radius: var(--radius-lg);
|
||||
border-bottom-right-radius: var(--radius-lg);
|
||||
}
|
||||
}
|
||||
|
||||
&[data-expanded] {
|
||||
[data-slot="accordion-trigger"] {
|
||||
border-bottom-left-radius: 0;
|
||||
border-bottom-right-radius: 0;
|
||||
}
|
||||
|
||||
[data-slot="accordion-content"] {
|
||||
border: 1px solid var(--border-weak-base);
|
||||
border-top: 0;
|
||||
background-color: var(--background-stronger);
|
||||
}
|
||||
}
|
||||
|
||||
&:last-child[data-expanded] {
|
||||
[data-slot="accordion-content"] {
|
||||
border-bottom-left-radius: var(--radius-lg);
|
||||
border-bottom-right-radius: var(--radius-lg);
|
||||
border-top: none;
|
||||
border-bottom-left-radius: var(--radius-md);
|
||||
border-bottom-right-radius: var(--radius-md);
|
||||
}
|
||||
}
|
||||
|
||||
[data-slot="accordion-content"] {
|
||||
overflow: hidden;
|
||||
width: 100%;
|
||||
border: 0;
|
||||
background-color: transparent;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
border: none;
|
||||
transition: background-color 0.15s ease;
|
||||
border-radius: var(--radius-md);
|
||||
overflow: visible;
|
||||
overflow: clip;
|
||||
|
||||
&.tool-collapsible {
|
||||
gap: 8px;
|
||||
@@ -82,10 +82,6 @@
|
||||
overflow: hidden;
|
||||
/* animation: slideUp 250ms ease-out; */
|
||||
|
||||
&[data-expanded] {
|
||||
overflow: visible;
|
||||
}
|
||||
|
||||
/* &[data-expanded] { */
|
||||
/* animation: slideDown 250ms ease-out; */
|
||||
/* } */
|
||||
|
||||
@@ -1288,6 +1288,8 @@
|
||||
}
|
||||
|
||||
[data-component="apply-patch-file-diff"] {
|
||||
border-top: 1px solid var(--border-weaker-base);
|
||||
max-height: 420px;
|
||||
overflow-y: auto;
|
||||
scrollbar-width: none;
|
||||
-ms-overflow-style: none;
|
||||
|
||||
@@ -36,7 +36,6 @@ import { useI18n } from "../context/i18n"
|
||||
import { BasicTool } from "./basic-tool"
|
||||
import { GenericTool } from "./basic-tool"
|
||||
import { Accordion } from "./accordion"
|
||||
import { StickyAccordionHeader } from "./sticky-accordion-header"
|
||||
import { Button } from "./button"
|
||||
import { Card } from "./card"
|
||||
import { Collapsible } from "./collapsible"
|
||||
@@ -1646,7 +1645,7 @@ ToolRegistry.register({
|
||||
|
||||
return (
|
||||
<Accordion.Item value={file.filePath} data-type={file.type}>
|
||||
<StickyAccordionHeader>
|
||||
<Accordion.Header>
|
||||
<Accordion.Trigger>
|
||||
<div data-slot="apply-patch-trigger-content">
|
||||
<div data-slot="apply-patch-file-info">
|
||||
@@ -1683,7 +1682,7 @@ ToolRegistry.register({
|
||||
</div>
|
||||
</div>
|
||||
</Accordion.Trigger>
|
||||
</StickyAccordionHeader>
|
||||
</Accordion.Header>
|
||||
<Accordion.Content>
|
||||
<Show when={visible()}>
|
||||
<div data-component="apply-patch-file-diff">
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
[data-component="session-review"] {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 0px;
|
||||
gap: 8px;
|
||||
height: 100%;
|
||||
overflow-y: auto;
|
||||
scrollbar-width: none;
|
||||
@@ -19,8 +19,7 @@
|
||||
top: 0;
|
||||
z-index: 20;
|
||||
background-color: var(--background-stronger);
|
||||
height: 40px;
|
||||
padding-bottom: 8px;
|
||||
height: 32px;
|
||||
flex-shrink: 0;
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
@@ -58,13 +57,70 @@
|
||||
}
|
||||
|
||||
[data-component="sticky-accordion-header"] {
|
||||
--sticky-accordion-top: 40px;
|
||||
top: 40px;
|
||||
}
|
||||
|
||||
[data-slot="session-review-accordion-item"][data-selected]
|
||||
[data-slot="accordion-header"]
|
||||
[data-slot="accordion-trigger"] {
|
||||
background-color: var(--surface-base-active);
|
||||
[data-component="sticky-accordion-header"][data-expanded]::before,
|
||||
[data-slot="accordion-item"][data-expanded] [data-component="sticky-accordion-header"]::before {
|
||||
top: -40px;
|
||||
}
|
||||
|
||||
[data-slot="session-review-diffs-group"] {
|
||||
background-color: var(--background-stronger);
|
||||
border-radius: var(--radius-lg);
|
||||
border: 1px solid var(--border-weak-base);
|
||||
overflow: clip;
|
||||
|
||||
[data-component="accordion"] {
|
||||
gap: 0;
|
||||
}
|
||||
|
||||
[data-component="accordion"] [data-slot="accordion-item"] {
|
||||
overflow: visible;
|
||||
}
|
||||
|
||||
[data-component="accordion"]
|
||||
[data-slot="accordion-item"]
|
||||
[data-slot="accordion-header"]
|
||||
[data-slot="accordion-trigger"] {
|
||||
border: 0;
|
||||
border-radius: 0;
|
||||
box-shadow: none;
|
||||
background-color: transparent;
|
||||
|
||||
&:hover {
|
||||
background-color: var(--surface-base-hover);
|
||||
}
|
||||
|
||||
&:active {
|
||||
background-color: var(--surface-base-active);
|
||||
}
|
||||
}
|
||||
|
||||
[data-component="accordion"]
|
||||
[data-slot="accordion-item"]
|
||||
+ [data-slot="accordion-item"]
|
||||
[data-slot="accordion-header"]
|
||||
[data-slot="accordion-trigger"] {
|
||||
border-top: 1px solid var(--border-weak-base);
|
||||
}
|
||||
|
||||
[data-component="accordion"] [data-slot="accordion-item"][data-expanded] [data-slot="accordion-content"] {
|
||||
border: 0;
|
||||
border-top: 1px solid var(--border-weak-base);
|
||||
border-radius: 0;
|
||||
}
|
||||
|
||||
[data-component="sticky-accordion-header"][data-expanded]::before,
|
||||
[data-slot="accordion-item"][data-expanded] [data-component="sticky-accordion-header"]::before {
|
||||
top: 0;
|
||||
}
|
||||
|
||||
[data-slot="session-review-accordion-item"][data-selected]
|
||||
[data-slot="accordion-header"]
|
||||
[data-slot="accordion-trigger"] {
|
||||
background-color: var(--surface-base-active);
|
||||
}
|
||||
}
|
||||
|
||||
[data-slot="accordion-item"] {
|
||||
|
||||
@@ -320,393 +320,395 @@ export const SessionReview = (props: SessionReviewProps) => {
|
||||
</div>
|
||||
<div data-slot="session-review-container" class={props.classes?.container}>
|
||||
<Show when={hasDiffs()} fallback={props.empty}>
|
||||
<Accordion multiple value={open()} onChange={handleChange}>
|
||||
<For each={props.diffs}>
|
||||
{(diff) => {
|
||||
let wrapper: HTMLDivElement | undefined
|
||||
<div data-slot="session-review-diffs-group">
|
||||
<Accordion multiple value={open()} onChange={handleChange}>
|
||||
<For each={props.diffs}>
|
||||
{(diff) => {
|
||||
let wrapper: HTMLDivElement | undefined
|
||||
|
||||
const expanded = createMemo(() => open().includes(diff.file))
|
||||
const [force, setForce] = createSignal(false)
|
||||
const expanded = createMemo(() => open().includes(diff.file))
|
||||
const [force, setForce] = createSignal(false)
|
||||
|
||||
const comments = createMemo(() => (props.comments ?? []).filter((c) => c.file === diff.file))
|
||||
const commentedLines = createMemo(() => comments().map((c) => c.selection))
|
||||
const comments = createMemo(() => (props.comments ?? []).filter((c) => c.file === diff.file))
|
||||
const commentedLines = createMemo(() => comments().map((c) => c.selection))
|
||||
|
||||
const beforeText = () => (typeof diff.before === "string" ? diff.before : "")
|
||||
const afterText = () => (typeof diff.after === "string" ? diff.after : "")
|
||||
const changedLines = () => diff.additions + diff.deletions
|
||||
const beforeText = () => (typeof diff.before === "string" ? diff.before : "")
|
||||
const afterText = () => (typeof diff.after === "string" ? diff.after : "")
|
||||
const changedLines = () => diff.additions + diff.deletions
|
||||
|
||||
const tooLarge = createMemo(() => {
|
||||
if (!expanded()) return false
|
||||
if (force()) return false
|
||||
if (isImageFile(diff.file)) return false
|
||||
return changedLines() > MAX_DIFF_CHANGED_LINES
|
||||
})
|
||||
const tooLarge = createMemo(() => {
|
||||
if (!expanded()) return false
|
||||
if (force()) return false
|
||||
if (isImageFile(diff.file)) return false
|
||||
return changedLines() > MAX_DIFF_CHANGED_LINES
|
||||
})
|
||||
|
||||
const isAdded = () => diff.status === "added" || (beforeText().length === 0 && afterText().length > 0)
|
||||
const isDeleted = () =>
|
||||
diff.status === "deleted" || (afterText().length === 0 && beforeText().length > 0)
|
||||
const isImage = () => isImageFile(diff.file)
|
||||
const isAudio = () => isAudioFile(diff.file)
|
||||
const isAdded = () => diff.status === "added" || (beforeText().length === 0 && afterText().length > 0)
|
||||
const isDeleted = () =>
|
||||
diff.status === "deleted" || (afterText().length === 0 && beforeText().length > 0)
|
||||
const isImage = () => isImageFile(diff.file)
|
||||
const isAudio = () => isAudioFile(diff.file)
|
||||
|
||||
const diffImageSrc = dataUrlFromValue(diff.after) ?? dataUrlFromValue(diff.before)
|
||||
const [imageSrc, setImageSrc] = createSignal<string | undefined>(diffImageSrc)
|
||||
const [imageStatus, setImageStatus] = createSignal<"idle" | "loading" | "error">("idle")
|
||||
const diffImageSrc = dataUrlFromValue(diff.after) ?? dataUrlFromValue(diff.before)
|
||||
const [imageSrc, setImageSrc] = createSignal<string | undefined>(diffImageSrc)
|
||||
const [imageStatus, setImageStatus] = createSignal<"idle" | "loading" | "error">("idle")
|
||||
|
||||
const diffAudioSrc = dataUrlFromValue(diff.after) ?? dataUrlFromValue(diff.before)
|
||||
const [audioSrc, setAudioSrc] = createSignal<string | undefined>(diffAudioSrc)
|
||||
const [audioStatus, setAudioStatus] = createSignal<"idle" | "loading" | "error">("idle")
|
||||
const [audioMime, setAudioMime] = createSignal<string | undefined>(undefined)
|
||||
const diffAudioSrc = dataUrlFromValue(diff.after) ?? dataUrlFromValue(diff.before)
|
||||
const [audioSrc, setAudioSrc] = createSignal<string | undefined>(diffAudioSrc)
|
||||
const [audioStatus, setAudioStatus] = createSignal<"idle" | "loading" | "error">("idle")
|
||||
const [audioMime, setAudioMime] = createSignal<string | undefined>(undefined)
|
||||
|
||||
const selectedLines = createMemo(() => {
|
||||
const current = selection()
|
||||
if (!current || current.file !== diff.file) return null
|
||||
return current.range
|
||||
})
|
||||
const selectedLines = createMemo(() => {
|
||||
const current = selection()
|
||||
if (!current || current.file !== diff.file) return null
|
||||
return current.range
|
||||
})
|
||||
|
||||
const draftRange = createMemo(() => {
|
||||
const current = commenting()
|
||||
if (!current || current.file !== diff.file) return null
|
||||
return current.range
|
||||
})
|
||||
const draftRange = createMemo(() => {
|
||||
const current = commenting()
|
||||
if (!current || current.file !== diff.file) return null
|
||||
return current.range
|
||||
})
|
||||
|
||||
const [draft, setDraft] = createSignal("")
|
||||
const [positions, setPositions] = createSignal<Record<string, number>>({})
|
||||
const [draftTop, setDraftTop] = createSignal<number | undefined>(undefined)
|
||||
const [draft, setDraft] = createSignal("")
|
||||
const [positions, setPositions] = createSignal<Record<string, number>>({})
|
||||
const [draftTop, setDraftTop] = createSignal<number | undefined>(undefined)
|
||||
|
||||
const getRoot = () => {
|
||||
const el = wrapper
|
||||
if (!el) return
|
||||
const getRoot = () => {
|
||||
const el = wrapper
|
||||
if (!el) return
|
||||
|
||||
const host = el.querySelector("diffs-container")
|
||||
if (!(host instanceof HTMLElement)) return
|
||||
return host.shadowRoot ?? undefined
|
||||
}
|
||||
|
||||
const updateAnchors = () => {
|
||||
const el = wrapper
|
||||
if (!el) return
|
||||
|
||||
const root = getRoot()
|
||||
if (!root) return
|
||||
|
||||
const next: Record<string, number> = {}
|
||||
for (const item of comments()) {
|
||||
const marker = findMarker(root, item.selection)
|
||||
if (!marker) continue
|
||||
next[item.id] = markerTop(el, marker)
|
||||
}
|
||||
setPositions(next)
|
||||
|
||||
const range = draftRange()
|
||||
if (!range) {
|
||||
setDraftTop(undefined)
|
||||
return
|
||||
const host = el.querySelector("diffs-container")
|
||||
if (!(host instanceof HTMLElement)) return
|
||||
return host.shadowRoot ?? undefined
|
||||
}
|
||||
|
||||
const marker = findMarker(root, range)
|
||||
if (!marker) {
|
||||
setDraftTop(undefined)
|
||||
return
|
||||
const updateAnchors = () => {
|
||||
const el = wrapper
|
||||
if (!el) return
|
||||
|
||||
const root = getRoot()
|
||||
if (!root) return
|
||||
|
||||
const next: Record<string, number> = {}
|
||||
for (const item of comments()) {
|
||||
const marker = findMarker(root, item.selection)
|
||||
if (!marker) continue
|
||||
next[item.id] = markerTop(el, marker)
|
||||
}
|
||||
setPositions(next)
|
||||
|
||||
const range = draftRange()
|
||||
if (!range) {
|
||||
setDraftTop(undefined)
|
||||
return
|
||||
}
|
||||
|
||||
const marker = findMarker(root, range)
|
||||
if (!marker) {
|
||||
setDraftTop(undefined)
|
||||
return
|
||||
}
|
||||
|
||||
setDraftTop(markerTop(el, marker))
|
||||
}
|
||||
|
||||
setDraftTop(markerTop(el, marker))
|
||||
}
|
||||
const scheduleAnchors = () => {
|
||||
requestAnimationFrame(updateAnchors)
|
||||
}
|
||||
|
||||
const scheduleAnchors = () => {
|
||||
requestAnimationFrame(updateAnchors)
|
||||
}
|
||||
createEffect(() => {
|
||||
comments()
|
||||
scheduleAnchors()
|
||||
})
|
||||
|
||||
createEffect(() => {
|
||||
comments()
|
||||
scheduleAnchors()
|
||||
})
|
||||
createEffect(() => {
|
||||
const range = draftRange()
|
||||
if (!range) return
|
||||
setDraft("")
|
||||
scheduleAnchors()
|
||||
})
|
||||
|
||||
createEffect(() => {
|
||||
const range = draftRange()
|
||||
if (!range) return
|
||||
setDraft("")
|
||||
scheduleAnchors()
|
||||
})
|
||||
createEffect(() => {
|
||||
if (!open().includes(diff.file)) return
|
||||
if (!isImage()) return
|
||||
if (imageSrc()) return
|
||||
if (imageStatus() !== "idle") return
|
||||
if (isDeleted()) return
|
||||
|
||||
createEffect(() => {
|
||||
if (!open().includes(diff.file)) return
|
||||
if (!isImage()) return
|
||||
if (imageSrc()) return
|
||||
if (imageStatus() !== "idle") return
|
||||
if (isDeleted()) return
|
||||
const reader = props.readFile
|
||||
if (!reader) return
|
||||
|
||||
const reader = props.readFile
|
||||
if (!reader) return
|
||||
|
||||
setImageStatus("loading")
|
||||
reader(diff.file)
|
||||
.then((result) => {
|
||||
const src = dataUrl(result)
|
||||
if (!src) {
|
||||
setImageStatus("loading")
|
||||
reader(diff.file)
|
||||
.then((result) => {
|
||||
const src = dataUrl(result)
|
||||
if (!src) {
|
||||
setImageStatus("error")
|
||||
return
|
||||
}
|
||||
setImageSrc(src)
|
||||
setImageStatus("idle")
|
||||
})
|
||||
.catch(() => {
|
||||
setImageStatus("error")
|
||||
return
|
||||
}
|
||||
setImageSrc(src)
|
||||
setImageStatus("idle")
|
||||
})
|
||||
.catch(() => {
|
||||
setImageStatus("error")
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
createEffect(() => {
|
||||
if (!open().includes(diff.file)) return
|
||||
if (!isAudio()) return
|
||||
if (audioSrc()) return
|
||||
if (audioStatus() !== "idle") return
|
||||
createEffect(() => {
|
||||
if (!open().includes(diff.file)) return
|
||||
if (!isAudio()) return
|
||||
if (audioSrc()) return
|
||||
if (audioStatus() !== "idle") return
|
||||
|
||||
const reader = props.readFile
|
||||
if (!reader) return
|
||||
const reader = props.readFile
|
||||
if (!reader) return
|
||||
|
||||
setAudioStatus("loading")
|
||||
reader(diff.file)
|
||||
.then((result) => {
|
||||
const src = dataUrl(result)
|
||||
if (!src) {
|
||||
setAudioStatus("loading")
|
||||
reader(diff.file)
|
||||
.then((result) => {
|
||||
const src = dataUrl(result)
|
||||
if (!src) {
|
||||
setAudioStatus("error")
|
||||
return
|
||||
}
|
||||
setAudioMime(normalizeMimeType(result?.mimeType))
|
||||
setAudioSrc(src)
|
||||
setAudioStatus("idle")
|
||||
})
|
||||
.catch(() => {
|
||||
setAudioStatus("error")
|
||||
return
|
||||
}
|
||||
setAudioMime(normalizeMimeType(result?.mimeType))
|
||||
setAudioSrc(src)
|
||||
setAudioStatus("idle")
|
||||
})
|
||||
.catch(() => {
|
||||
setAudioStatus("error")
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
const handleLineSelected = (range: SelectedLineRange | null) => {
|
||||
if (!props.onLineComment) return
|
||||
const handleLineSelected = (range: SelectedLineRange | null) => {
|
||||
if (!props.onLineComment) return
|
||||
|
||||
if (!range) {
|
||||
setSelection(null)
|
||||
return
|
||||
if (!range) {
|
||||
setSelection(null)
|
||||
return
|
||||
}
|
||||
|
||||
setSelection({ file: diff.file, range })
|
||||
}
|
||||
|
||||
setSelection({ file: diff.file, range })
|
||||
}
|
||||
const handleLineSelectionEnd = (range: SelectedLineRange | null) => {
|
||||
if (!props.onLineComment) return
|
||||
|
||||
const handleLineSelectionEnd = (range: SelectedLineRange | null) => {
|
||||
if (!props.onLineComment) return
|
||||
if (!range) {
|
||||
setCommenting(null)
|
||||
return
|
||||
}
|
||||
|
||||
if (!range) {
|
||||
setCommenting(null)
|
||||
return
|
||||
setSelection({ file: diff.file, range })
|
||||
setCommenting({ file: diff.file, range })
|
||||
}
|
||||
|
||||
setSelection({ file: diff.file, range })
|
||||
setCommenting({ file: diff.file, range })
|
||||
}
|
||||
const openComment = (comment: SessionReviewComment) => {
|
||||
setOpened({ file: comment.file, id: comment.id })
|
||||
setSelection({ file: comment.file, range: comment.selection })
|
||||
}
|
||||
|
||||
const openComment = (comment: SessionReviewComment) => {
|
||||
setOpened({ file: comment.file, id: comment.id })
|
||||
setSelection({ file: comment.file, range: comment.selection })
|
||||
}
|
||||
const isCommentOpen = (comment: SessionReviewComment) => {
|
||||
const current = opened()
|
||||
if (!current) return false
|
||||
return current.file === comment.file && current.id === comment.id
|
||||
}
|
||||
|
||||
const isCommentOpen = (comment: SessionReviewComment) => {
|
||||
const current = opened()
|
||||
if (!current) return false
|
||||
return current.file === comment.file && current.id === comment.id
|
||||
}
|
||||
|
||||
return (
|
||||
<Accordion.Item
|
||||
value={diff.file}
|
||||
id={diffId(diff.file)}
|
||||
data-file={diff.file}
|
||||
data-slot="session-review-accordion-item"
|
||||
data-selected={props.focusedFile === diff.file ? "" : undefined}
|
||||
>
|
||||
<StickyAccordionHeader>
|
||||
<Accordion.Trigger>
|
||||
<div data-slot="session-review-trigger-content">
|
||||
<div data-slot="session-review-file-info">
|
||||
<FileIcon node={{ path: diff.file, type: "file" }} />
|
||||
<div data-slot="session-review-file-name-container">
|
||||
<Show when={diff.file.includes("/")}>
|
||||
<span data-slot="session-review-directory">{`\u202A${getDirectory(diff.file)}\u202C`}</span>
|
||||
</Show>
|
||||
<span data-slot="session-review-filename">{getFilename(diff.file)}</span>
|
||||
<Show when={props.onViewFile}>
|
||||
<Tooltip value="Open file" placement="top" gutter={4}>
|
||||
<button
|
||||
data-slot="session-review-view-button"
|
||||
type="button"
|
||||
aria-label="Open file"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
props.onViewFile?.(diff.file)
|
||||
}}
|
||||
>
|
||||
<Icon name="open-file" size="small" />
|
||||
</button>
|
||||
</Tooltip>
|
||||
</Show>
|
||||
return (
|
||||
<Accordion.Item
|
||||
value={diff.file}
|
||||
id={diffId(diff.file)}
|
||||
data-file={diff.file}
|
||||
data-slot="session-review-accordion-item"
|
||||
data-selected={props.focusedFile === diff.file ? "" : undefined}
|
||||
>
|
||||
<StickyAccordionHeader>
|
||||
<Accordion.Trigger>
|
||||
<div data-slot="session-review-trigger-content">
|
||||
<div data-slot="session-review-file-info">
|
||||
<FileIcon node={{ path: diff.file, type: "file" }} />
|
||||
<div data-slot="session-review-file-name-container">
|
||||
<Show when={diff.file.includes("/")}>
|
||||
<span data-slot="session-review-directory">{`\u202A${getDirectory(diff.file)}\u202C`}</span>
|
||||
</Show>
|
||||
<span data-slot="session-review-filename">{getFilename(diff.file)}</span>
|
||||
<Show when={props.onViewFile}>
|
||||
<Tooltip value="Open file" placement="top" gutter={4}>
|
||||
<button
|
||||
data-slot="session-review-view-button"
|
||||
type="button"
|
||||
aria-label="Open file"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
props.onViewFile?.(diff.file)
|
||||
}}
|
||||
>
|
||||
<Icon name="open-file" size="small" />
|
||||
</button>
|
||||
</Tooltip>
|
||||
</Show>
|
||||
</div>
|
||||
</div>
|
||||
<div data-slot="session-review-trigger-actions">
|
||||
<Switch>
|
||||
<Match when={isAdded()}>
|
||||
<div data-slot="session-review-change-group" data-type="added">
|
||||
<span data-slot="session-review-change" data-type="added">
|
||||
{i18n.t("ui.sessionReview.change.added")}
|
||||
</span>
|
||||
<DiffChanges changes={diff} />
|
||||
</div>
|
||||
</Match>
|
||||
<Match when={isDeleted()}>
|
||||
<span data-slot="session-review-change" data-type="removed">
|
||||
{i18n.t("ui.sessionReview.change.removed")}
|
||||
</span>
|
||||
</Match>
|
||||
<Match when={isImage()}>
|
||||
<span data-slot="session-review-change" data-type="modified">
|
||||
{i18n.t("ui.sessionReview.change.modified")}
|
||||
</span>
|
||||
</Match>
|
||||
<Match when={true}>
|
||||
<DiffChanges changes={diff} />
|
||||
</Match>
|
||||
</Switch>
|
||||
<span data-slot="session-review-diff-chevron">
|
||||
<Icon name="chevron-down" size="small" />
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
<div data-slot="session-review-trigger-actions">
|
||||
</Accordion.Trigger>
|
||||
</StickyAccordionHeader>
|
||||
<Accordion.Content data-slot="session-review-accordion-content">
|
||||
<div
|
||||
data-slot="session-review-diff-wrapper"
|
||||
ref={(el) => {
|
||||
wrapper = el
|
||||
anchors.set(diff.file, el)
|
||||
scheduleAnchors()
|
||||
}}
|
||||
>
|
||||
<Show when={expanded()}>
|
||||
<Switch>
|
||||
<Match when={isAdded()}>
|
||||
<div data-slot="session-review-change-group" data-type="added">
|
||||
<span data-slot="session-review-change" data-type="added">
|
||||
{i18n.t("ui.sessionReview.change.added")}
|
||||
<Match when={isImage() && imageSrc()}>
|
||||
<div data-slot="session-review-image-container">
|
||||
<img data-slot="session-review-image" src={imageSrc()} alt={diff.file} />
|
||||
</div>
|
||||
</Match>
|
||||
<Match when={isImage() && isDeleted()}>
|
||||
<div data-slot="session-review-image-container" data-removed>
|
||||
<span data-slot="session-review-image-placeholder">
|
||||
{i18n.t("ui.sessionReview.change.removed")}
|
||||
</span>
|
||||
<DiffChanges changes={diff} />
|
||||
</div>
|
||||
</Match>
|
||||
<Match when={isDeleted()}>
|
||||
<span data-slot="session-review-change" data-type="removed">
|
||||
{i18n.t("ui.sessionReview.change.removed")}
|
||||
</span>
|
||||
</Match>
|
||||
<Match when={isImage()}>
|
||||
<span data-slot="session-review-change" data-type="modified">
|
||||
{i18n.t("ui.sessionReview.change.modified")}
|
||||
</span>
|
||||
</Match>
|
||||
<Match when={true}>
|
||||
<DiffChanges changes={diff} />
|
||||
</Match>
|
||||
</Switch>
|
||||
<span data-slot="session-review-diff-chevron">
|
||||
<Icon name="chevron-down" size="small" />
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</Accordion.Trigger>
|
||||
</StickyAccordionHeader>
|
||||
<Accordion.Content data-slot="session-review-accordion-content">
|
||||
<div
|
||||
data-slot="session-review-diff-wrapper"
|
||||
ref={(el) => {
|
||||
wrapper = el
|
||||
anchors.set(diff.file, el)
|
||||
scheduleAnchors()
|
||||
}}
|
||||
>
|
||||
<Show when={expanded()}>
|
||||
<Switch>
|
||||
<Match when={isImage() && imageSrc()}>
|
||||
<div data-slot="session-review-image-container">
|
||||
<img data-slot="session-review-image" src={imageSrc()} alt={diff.file} />
|
||||
</div>
|
||||
</Match>
|
||||
<Match when={isImage() && isDeleted()}>
|
||||
<div data-slot="session-review-image-container" data-removed>
|
||||
<span data-slot="session-review-image-placeholder">
|
||||
{i18n.t("ui.sessionReview.change.removed")}
|
||||
</span>
|
||||
</div>
|
||||
</Match>
|
||||
<Match when={isImage() && !imageSrc()}>
|
||||
<div data-slot="session-review-image-container">
|
||||
<span data-slot="session-review-image-placeholder">
|
||||
{imageStatus() === "loading"
|
||||
? i18n.t("ui.sessionReview.image.loading")
|
||||
: i18n.t("ui.sessionReview.image.placeholder")}
|
||||
</span>
|
||||
</div>
|
||||
</Match>
|
||||
<Match when={!isImage() && tooLarge()}>
|
||||
<div data-slot="session-review-large-diff">
|
||||
<div data-slot="session-review-large-diff-title">
|
||||
{i18n.t("ui.sessionReview.largeDiff.title")}
|
||||
<Match when={isImage() && !imageSrc()}>
|
||||
<div data-slot="session-review-image-container">
|
||||
<span data-slot="session-review-image-placeholder">
|
||||
{imageStatus() === "loading"
|
||||
? i18n.t("ui.sessionReview.image.loading")
|
||||
: i18n.t("ui.sessionReview.image.placeholder")}
|
||||
</span>
|
||||
</div>
|
||||
<div data-slot="session-review-large-diff-meta">
|
||||
{i18n.t("ui.sessionReview.largeDiff.meta", {
|
||||
limit: MAX_DIFF_CHANGED_LINES.toLocaleString(),
|
||||
current: changedLines().toLocaleString(),
|
||||
})}
|
||||
</Match>
|
||||
<Match when={!isImage() && tooLarge()}>
|
||||
<div data-slot="session-review-large-diff">
|
||||
<div data-slot="session-review-large-diff-title">
|
||||
{i18n.t("ui.sessionReview.largeDiff.title")}
|
||||
</div>
|
||||
<div data-slot="session-review-large-diff-meta">
|
||||
{i18n.t("ui.sessionReview.largeDiff.meta", {
|
||||
limit: MAX_DIFF_CHANGED_LINES.toLocaleString(),
|
||||
current: changedLines().toLocaleString(),
|
||||
})}
|
||||
</div>
|
||||
<div data-slot="session-review-large-diff-actions">
|
||||
<Button size="normal" variant="secondary" onClick={() => setForce(true)}>
|
||||
{i18n.t("ui.sessionReview.largeDiff.renderAnyway")}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
<div data-slot="session-review-large-diff-actions">
|
||||
<Button size="normal" variant="secondary" onClick={() => setForce(true)}>
|
||||
{i18n.t("ui.sessionReview.largeDiff.renderAnyway")}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</Match>
|
||||
<Match when={!isImage()}>
|
||||
<Dynamic
|
||||
component={diffComponent}
|
||||
preloadedDiff={diff.preloaded}
|
||||
diffStyle={diffStyle()}
|
||||
onRendered={() => {
|
||||
props.onDiffRendered?.()
|
||||
scheduleAnchors()
|
||||
}}
|
||||
enableLineSelection={props.onLineComment != null}
|
||||
onLineSelected={handleLineSelected}
|
||||
onLineSelectionEnd={handleLineSelectionEnd}
|
||||
selectedLines={selectedLines()}
|
||||
commentedLines={commentedLines()}
|
||||
before={{
|
||||
name: diff.file!,
|
||||
contents: typeof diff.before === "string" ? diff.before : "",
|
||||
}}
|
||||
after={{
|
||||
name: diff.file!,
|
||||
contents: typeof diff.after === "string" ? diff.after : "",
|
||||
}}
|
||||
/>
|
||||
</Match>
|
||||
</Switch>
|
||||
|
||||
<For each={comments()}>
|
||||
{(comment) => (
|
||||
<LineComment
|
||||
id={comment.id}
|
||||
top={positions()[comment.id]}
|
||||
onMouseEnter={() => setSelection({ file: comment.file, range: comment.selection })}
|
||||
onClick={() => {
|
||||
if (isCommentOpen(comment)) {
|
||||
setOpened(null)
|
||||
return
|
||||
}
|
||||
|
||||
openComment(comment)
|
||||
}}
|
||||
open={isCommentOpen(comment)}
|
||||
comment={comment.comment}
|
||||
selection={selectionLabel(comment.selection)}
|
||||
/>
|
||||
)}
|
||||
</For>
|
||||
|
||||
<Show when={draftRange()}>
|
||||
{(range) => (
|
||||
<Show when={draftTop() !== undefined}>
|
||||
<LineCommentEditor
|
||||
top={draftTop()}
|
||||
value={draft()}
|
||||
selection={selectionLabel(range())}
|
||||
onInput={setDraft}
|
||||
onCancel={() => setCommenting(null)}
|
||||
onSubmit={(comment) => {
|
||||
props.onLineComment?.({
|
||||
file: diff.file,
|
||||
selection: range(),
|
||||
comment,
|
||||
preview: selectionPreview(diff, range()),
|
||||
})
|
||||
setCommenting(null)
|
||||
</Match>
|
||||
<Match when={!isImage()}>
|
||||
<Dynamic
|
||||
component={diffComponent}
|
||||
preloadedDiff={diff.preloaded}
|
||||
diffStyle={diffStyle()}
|
||||
onRendered={() => {
|
||||
props.onDiffRendered?.()
|
||||
scheduleAnchors()
|
||||
}}
|
||||
enableLineSelection={props.onLineComment != null}
|
||||
onLineSelected={handleLineSelected}
|
||||
onLineSelectionEnd={handleLineSelectionEnd}
|
||||
selectedLines={selectedLines()}
|
||||
commentedLines={commentedLines()}
|
||||
before={{
|
||||
name: diff.file!,
|
||||
contents: typeof diff.before === "string" ? diff.before : "",
|
||||
}}
|
||||
after={{
|
||||
name: diff.file!,
|
||||
contents: typeof diff.after === "string" ? diff.after : "",
|
||||
}}
|
||||
/>
|
||||
</Show>
|
||||
)}
|
||||
</Match>
|
||||
</Switch>
|
||||
|
||||
<For each={comments()}>
|
||||
{(comment) => (
|
||||
<LineComment
|
||||
id={comment.id}
|
||||
top={positions()[comment.id]}
|
||||
onMouseEnter={() => setSelection({ file: comment.file, range: comment.selection })}
|
||||
onClick={() => {
|
||||
if (isCommentOpen(comment)) {
|
||||
setOpened(null)
|
||||
return
|
||||
}
|
||||
|
||||
openComment(comment)
|
||||
}}
|
||||
open={isCommentOpen(comment)}
|
||||
comment={comment.comment}
|
||||
selection={selectionLabel(comment.selection)}
|
||||
/>
|
||||
)}
|
||||
</For>
|
||||
|
||||
<Show when={draftRange()}>
|
||||
{(range) => (
|
||||
<Show when={draftTop() !== undefined}>
|
||||
<LineCommentEditor
|
||||
top={draftTop()}
|
||||
value={draft()}
|
||||
selection={selectionLabel(range())}
|
||||
onInput={setDraft}
|
||||
onCancel={() => setCommenting(null)}
|
||||
onSubmit={(comment) => {
|
||||
props.onLineComment?.({
|
||||
file: diff.file,
|
||||
selection: range(),
|
||||
comment,
|
||||
preview: selectionPreview(diff, range()),
|
||||
})
|
||||
setCommenting(null)
|
||||
}}
|
||||
/>
|
||||
</Show>
|
||||
)}
|
||||
</Show>
|
||||
</Show>
|
||||
</Show>
|
||||
</div>
|
||||
</Accordion.Content>
|
||||
</Accordion.Item>
|
||||
)
|
||||
}}
|
||||
</For>
|
||||
</Accordion>
|
||||
</div>
|
||||
</Accordion.Content>
|
||||
</Accordion.Item>
|
||||
)
|
||||
}}
|
||||
</For>
|
||||
</Accordion>
|
||||
</div>
|
||||
</Show>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -129,6 +129,49 @@
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
[data-slot="session-turn-diffs-group"] {
|
||||
background-color: var(--background-stronger);
|
||||
border-radius: var(--radius-lg);
|
||||
border: 1px solid var(--border-weak-base);
|
||||
overflow: clip;
|
||||
|
||||
[data-component="accordion"] {
|
||||
gap: 0;
|
||||
}
|
||||
|
||||
[data-component="accordion"]
|
||||
[data-slot="accordion-item"]
|
||||
[data-slot="accordion-header"]
|
||||
[data-slot="accordion-trigger"] {
|
||||
border: 0;
|
||||
border-radius: 0;
|
||||
box-shadow: none;
|
||||
background-color: transparent;
|
||||
|
||||
&:hover {
|
||||
background-color: var(--surface-base-hover);
|
||||
}
|
||||
|
||||
&:active {
|
||||
background-color: var(--surface-base-active);
|
||||
}
|
||||
}
|
||||
|
||||
[data-component="accordion"]
|
||||
[data-slot="accordion-item"]
|
||||
+ [data-slot="accordion-item"]
|
||||
[data-slot="accordion-header"]
|
||||
[data-slot="accordion-trigger"] {
|
||||
border-top: 1px solid var(--border-weak-base);
|
||||
}
|
||||
|
||||
[data-component="accordion"] [data-slot="accordion-item"][data-expanded] [data-slot="accordion-content"] {
|
||||
border: 0;
|
||||
border-top: 1px solid var(--border-weak-base);
|
||||
border-radius: 0;
|
||||
}
|
||||
}
|
||||
|
||||
[data-slot="session-turn-diff-trigger"] {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
@@ -187,6 +230,7 @@
|
||||
background-color: var(--surface-inset-base);
|
||||
width: 100%;
|
||||
min-width: 0;
|
||||
max-height: 420px;
|
||||
overflow-y: auto;
|
||||
overflow-x: hidden;
|
||||
scrollbar-width: none;
|
||||
|
||||
@@ -9,7 +9,6 @@ import { Dynamic } from "solid-js/web"
|
||||
import { AssistantParts, Message } from "./message-part"
|
||||
import { Card } from "./card"
|
||||
import { Accordion } from "./accordion"
|
||||
import { StickyAccordionHeader } from "./sticky-accordion-header"
|
||||
import { Collapsible } from "./collapsible"
|
||||
import { DiffChanges } from "./diff-changes"
|
||||
import { Icon } from "./icon"
|
||||
@@ -316,76 +315,78 @@ export function SessionTurn(
|
||||
<Collapsible.Content>
|
||||
<Show when={open()}>
|
||||
<div data-component="session-turn-diffs-content">
|
||||
<Accordion
|
||||
multiple
|
||||
value={expanded()}
|
||||
onChange={(value) => setExpanded(Array.isArray(value) ? value : value ? [value] : [])}
|
||||
>
|
||||
<For each={diffs()}>
|
||||
{(diff) => {
|
||||
const active = createMemo(() => expanded().includes(diff.file))
|
||||
const [visible, setVisible] = createSignal(false)
|
||||
<div data-slot="session-turn-diffs-group">
|
||||
<Accordion
|
||||
multiple
|
||||
value={expanded()}
|
||||
onChange={(value) => setExpanded(Array.isArray(value) ? value : value ? [value] : [])}
|
||||
>
|
||||
<For each={diffs()}>
|
||||
{(diff) => {
|
||||
const active = createMemo(() => expanded().includes(diff.file))
|
||||
const [visible, setVisible] = createSignal(false)
|
||||
|
||||
createEffect(
|
||||
on(
|
||||
active,
|
||||
(value) => {
|
||||
if (!value) {
|
||||
setVisible(false)
|
||||
return
|
||||
}
|
||||
createEffect(
|
||||
on(
|
||||
active,
|
||||
(value) => {
|
||||
if (!value) {
|
||||
setVisible(false)
|
||||
return
|
||||
}
|
||||
|
||||
requestAnimationFrame(() => {
|
||||
if (!active()) return
|
||||
setVisible(true)
|
||||
})
|
||||
},
|
||||
{ defer: true },
|
||||
),
|
||||
)
|
||||
requestAnimationFrame(() => {
|
||||
if (!active()) return
|
||||
setVisible(true)
|
||||
})
|
||||
},
|
||||
{ defer: true },
|
||||
),
|
||||
)
|
||||
|
||||
return (
|
||||
<Accordion.Item value={diff.file}>
|
||||
<StickyAccordionHeader>
|
||||
<Accordion.Trigger>
|
||||
<div data-slot="session-turn-diff-trigger">
|
||||
<span data-slot="session-turn-diff-path">
|
||||
<Show when={diff.file.includes("/")}>
|
||||
<span data-slot="session-turn-diff-directory">
|
||||
{`\u202A${getDirectory(diff.file)}\u202C`}
|
||||
return (
|
||||
<Accordion.Item value={diff.file}>
|
||||
<Accordion.Header>
|
||||
<Accordion.Trigger>
|
||||
<div data-slot="session-turn-diff-trigger">
|
||||
<span data-slot="session-turn-diff-path">
|
||||
<Show when={diff.file.includes("/")}>
|
||||
<span data-slot="session-turn-diff-directory">
|
||||
{`\u202A${getDirectory(diff.file)}\u202C`}
|
||||
</span>
|
||||
</Show>
|
||||
<span data-slot="session-turn-diff-filename">
|
||||
{getFilename(diff.file)}
|
||||
</span>
|
||||
</Show>
|
||||
<span data-slot="session-turn-diff-filename">
|
||||
{getFilename(diff.file)}
|
||||
</span>
|
||||
</span>
|
||||
<div data-slot="session-turn-diff-meta">
|
||||
<span data-slot="session-turn-diff-changes">
|
||||
<DiffChanges changes={diff} />
|
||||
</span>
|
||||
<span data-slot="session-turn-diff-chevron">
|
||||
<Icon name="chevron-down" size="small" />
|
||||
</span>
|
||||
<div data-slot="session-turn-diff-meta">
|
||||
<span data-slot="session-turn-diff-changes">
|
||||
<DiffChanges changes={diff} />
|
||||
</span>
|
||||
<span data-slot="session-turn-diff-chevron">
|
||||
<Icon name="chevron-down" size="small" />
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</Accordion.Trigger>
|
||||
</StickyAccordionHeader>
|
||||
<Accordion.Content>
|
||||
<Show when={visible()}>
|
||||
<div data-slot="session-turn-diff-view" data-scrollable>
|
||||
<Dynamic
|
||||
component={diffComponent}
|
||||
before={{ name: diff.file, contents: diff.before }}
|
||||
after={{ name: diff.file, contents: diff.after }}
|
||||
/>
|
||||
</div>
|
||||
</Show>
|
||||
</Accordion.Content>
|
||||
</Accordion.Item>
|
||||
)
|
||||
}}
|
||||
</For>
|
||||
</Accordion>
|
||||
</Accordion.Trigger>
|
||||
</Accordion.Header>
|
||||
<Accordion.Content>
|
||||
<Show when={visible()}>
|
||||
<div data-slot="session-turn-diff-view" data-scrollable>
|
||||
<Dynamic
|
||||
component={diffComponent}
|
||||
before={{ name: diff.file, contents: diff.before }}
|
||||
after={{ name: diff.file, contents: diff.after }}
|
||||
/>
|
||||
</div>
|
||||
</Show>
|
||||
</Accordion.Content>
|
||||
</Accordion.Item>
|
||||
)
|
||||
}}
|
||||
</For>
|
||||
</Accordion>
|
||||
</div>
|
||||
</div>
|
||||
</Show>
|
||||
</Collapsible.Content>
|
||||
|
||||
@@ -1,14 +1,18 @@
|
||||
[data-component="sticky-accordion-header"] {
|
||||
--sticky-accordion-top: 0px;
|
||||
position: sticky;
|
||||
top: var(--sticky-accordion-top);
|
||||
}
|
||||
|
||||
[data-slot="accordion-item"]:first-child [data-component="sticky-accordion-header"] {
|
||||
background-color: var(--background-stronger);
|
||||
top: 0px;
|
||||
}
|
||||
|
||||
[data-component="sticky-accordion-header"][data-expanded],
|
||||
[data-slot="accordion-item"][data-expanded] [data-component="sticky-accordion-header"] {
|
||||
z-index: 10;
|
||||
}
|
||||
|
||||
[data-component="sticky-accordion-header"][data-expanded]::before,
|
||||
[data-slot="accordion-item"][data-expanded] [data-component="sticky-accordion-header"]::before {
|
||||
content: "";
|
||||
z-index: -10;
|
||||
position: absolute;
|
||||
inset: 0;
|
||||
background-color: var(--background-stronger);
|
||||
}
|
||||
|
||||
@@ -104,8 +104,7 @@ const unsafeCSS = `
|
||||
}
|
||||
|
||||
[data-diff-header],
|
||||
[data-diff],
|
||||
[data-file] {
|
||||
[data-diff] {
|
||||
[data-separator] {
|
||||
height: 24px;
|
||||
}
|
||||
@@ -123,7 +122,6 @@ const unsafeCSS = `
|
||||
}
|
||||
[data-code] {
|
||||
overflow-x: auto !important;
|
||||
overflow-y: hidden !important;
|
||||
}
|
||||
}`
|
||||
|
||||
|
||||
@@ -558,7 +558,6 @@ OpenCode can be configured using environment variables.
|
||||
| `OPENCODE_AUTO_SHARE` | boolean | Automatically share sessions |
|
||||
| `OPENCODE_GIT_BASH_PATH` | string | Path to Git Bash executable on Windows |
|
||||
| `OPENCODE_CONFIG` | string | Path to config file |
|
||||
| `OPENCODE_TUI_CONFIG` | string | Path to TUI config file |
|
||||
| `OPENCODE_CONFIG_DIR` | string | Path to config directory |
|
||||
| `OPENCODE_CONFIG_CONTENT` | string | Inline json config content |
|
||||
| `OPENCODE_DISABLE_AUTOUPDATE` | boolean | Disable automatic update checks |
|
||||
|
||||
@@ -14,11 +14,10 @@ OpenCode supports both **JSON** and **JSONC** (JSON with Comments) formats.
|
||||
```jsonc title="opencode.jsonc"
|
||||
{
|
||||
"$schema": "https://opencode.ai/config.json",
|
||||
// Theme configuration
|
||||
"theme": "opencode",
|
||||
"model": "anthropic/claude-sonnet-4-5",
|
||||
"autoupdate": true,
|
||||
"server": {
|
||||
"port": 4096,
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
@@ -35,7 +34,7 @@ Configuration files are **merged together**, not replaced.
|
||||
|
||||
Configuration files are merged together, not replaced. Settings from the following config locations are combined. Later configs override earlier ones only for conflicting keys. Non-conflicting settings from all configs are preserved.
|
||||
|
||||
For example, if your global config sets `autoupdate: true` and your project config sets `model: "anthropic/claude-sonnet-4-5"`, the final configuration will include both settings.
|
||||
For example, if your global config sets `theme: "opencode"` and `autoupdate: true`, and your project config sets `model: "anthropic/claude-sonnet-4-5"`, the final configuration will include all three settings.
|
||||
|
||||
---
|
||||
|
||||
@@ -96,9 +95,7 @@ You can enable specific servers in your local config:
|
||||
|
||||
### Global
|
||||
|
||||
Place your global OpenCode config in `~/.config/opencode/opencode.json`. Use global config for user-wide server/runtime preferences like providers, models, and permissions.
|
||||
|
||||
For TUI-specific settings, use `~/.config/opencode/tui.json`.
|
||||
Place your global OpenCode config in `~/.config/opencode/opencode.json`. Use global config for user-wide preferences like themes, providers, or keybinds.
|
||||
|
||||
Global config overrides remote organizational defaults.
|
||||
|
||||
@@ -108,8 +105,6 @@ Global config overrides remote organizational defaults.
|
||||
|
||||
Add `opencode.json` in your project root. Project config has the highest precedence among standard config files - it overrides both global and remote configs.
|
||||
|
||||
For project-specific TUI settings, add `tui.json` alongside it.
|
||||
|
||||
:::tip
|
||||
Place project specific config in the root of your project.
|
||||
:::
|
||||
@@ -151,9 +146,7 @@ The custom directory is loaded after the global config and `.opencode` directori
|
||||
|
||||
## Schema
|
||||
|
||||
The server/runtime config schema is defined in [**`opencode.ai/config.json`**](https://opencode.ai/config.json).
|
||||
|
||||
TUI config uses [**`opencode.ai/tui.json`**](https://opencode.ai/tui.json).
|
||||
The config file has a schema that's defined in [**`opencode.ai/config.json`**](https://opencode.ai/config.json).
|
||||
|
||||
Your editor should be able to validate and autocomplete based on the schema.
|
||||
|
||||
@@ -161,24 +154,28 @@ Your editor should be able to validate and autocomplete based on the schema.
|
||||
|
||||
### TUI
|
||||
|
||||
Use a dedicated `tui.json` (or `tui.jsonc`) file for TUI-specific settings.
|
||||
You can configure TUI-specific settings through the `tui` option.
|
||||
|
||||
```json title="tui.json"
|
||||
```json title="opencode.json"
|
||||
{
|
||||
"$schema": "https://opencode.ai/tui.json",
|
||||
"scroll_speed": 3,
|
||||
"scroll_acceleration": {
|
||||
"enabled": true
|
||||
},
|
||||
"diff_style": "auto"
|
||||
"$schema": "https://opencode.ai/config.json",
|
||||
"tui": {
|
||||
"scroll_speed": 3,
|
||||
"scroll_acceleration": {
|
||||
"enabled": true
|
||||
},
|
||||
"diff_style": "auto"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Use `OPENCODE_TUI_CONFIG` to point to a custom TUI config file.
|
||||
Available options:
|
||||
|
||||
Legacy `theme`, `keybinds`, and `tui` keys in `opencode.json` are deprecated and automatically migrated when possible.
|
||||
- `scroll_acceleration.enabled` - Enable macOS-style scroll acceleration. **Takes precedence over `scroll_speed`.**
|
||||
- `scroll_speed` - Custom scroll speed multiplier (default: `3`, minimum: `1`). Ignored if `scroll_acceleration.enabled` is `true`.
|
||||
- `diff_style` - Control diff rendering. `"auto"` adapts to terminal width, `"stacked"` always shows single column.
|
||||
|
||||
[Learn more about TUI configuration here](/docs/tui#configure).
|
||||
[Learn more about using the TUI here](/docs/tui).
|
||||
|
||||
---
|
||||
|
||||
@@ -304,12 +301,12 @@ Bearer tokens (`AWS_BEARER_TOKEN_BEDROCK` or `/connect`) take precedence over pr
|
||||
|
||||
### Themes
|
||||
|
||||
Set your UI theme in `tui.json`.
|
||||
You can configure the theme you want to use in your OpenCode config through the `theme` option.
|
||||
|
||||
```json title="tui.json"
|
||||
```json title="opencode.json"
|
||||
{
|
||||
"$schema": "https://opencode.ai/tui.json",
|
||||
"theme": "tokyonight"
|
||||
"$schema": "https://opencode.ai/config.json",
|
||||
"theme": ""
|
||||
}
|
||||
```
|
||||
|
||||
@@ -409,11 +406,11 @@ You can also define commands using markdown files in `~/.config/opencode/command
|
||||
|
||||
### Keybinds
|
||||
|
||||
Customize keybinds in `tui.json`.
|
||||
You can customize your keybinds through the `keybinds` option.
|
||||
|
||||
```json title="tui.json"
|
||||
```json title="opencode.json"
|
||||
{
|
||||
"$schema": "https://opencode.ai/tui.json",
|
||||
"$schema": "https://opencode.ai/config.json",
|
||||
"keybinds": {}
|
||||
}
|
||||
```
|
||||
|
||||
@@ -3,11 +3,11 @@ title: Keybinds
|
||||
description: Customize your keybinds.
|
||||
---
|
||||
|
||||
OpenCode has a list of keybinds that you can customize through `tui.json`.
|
||||
OpenCode has a list of keybinds that you can customize through the OpenCode config.
|
||||
|
||||
```json title="tui.json"
|
||||
```json title="opencode.json"
|
||||
{
|
||||
"$schema": "https://opencode.ai/tui.json",
|
||||
"$schema": "https://opencode.ai/config.json",
|
||||
"keybinds": {
|
||||
"leader": "ctrl+x",
|
||||
"app_exit": "ctrl+c,ctrl+d,<leader>q",
|
||||
@@ -117,11 +117,11 @@ You don't need to use a leader key for your keybinds but we recommend doing so.
|
||||
|
||||
## Disable keybind
|
||||
|
||||
You can disable a keybind by adding the key to `tui.json` with a value of "none".
|
||||
You can disable a keybind by adding the key to your config with a value of "none".
|
||||
|
||||
```json title="tui.json"
|
||||
```json title="opencode.json"
|
||||
{
|
||||
"$schema": "https://opencode.ai/tui.json",
|
||||
"$schema": "https://opencode.ai/config.json",
|
||||
"keybinds": {
|
||||
"session_compact": "none"
|
||||
}
|
||||
|
||||
@@ -61,11 +61,11 @@ The system theme is for users who:
|
||||
|
||||
## Using a theme
|
||||
|
||||
You can select a theme by bringing up the theme select with the `/theme` command. Or you can specify it in `tui.json`.
|
||||
You can select a theme by bringing up the theme select with the `/theme` command. Or you can specify it in your [config](/docs/config).
|
||||
|
||||
```json title="tui.json" {3}
|
||||
```json title="opencode.json" {3}
|
||||
{
|
||||
"$schema": "https://opencode.ai/tui.json",
|
||||
"$schema": "https://opencode.ai/config.json",
|
||||
"theme": "tokyonight"
|
||||
}
|
||||
```
|
||||
|
||||
@@ -355,34 +355,24 @@ Some editors need command-line arguments to run in blocking mode. The `--wait` f
|
||||
|
||||
## Configure
|
||||
|
||||
You can customize TUI behavior through `tui.json` (or `tui.jsonc`).
|
||||
You can customize TUI behavior through your OpenCode config file.
|
||||
|
||||
```json title="tui.json"
|
||||
```json title="opencode.json"
|
||||
{
|
||||
"$schema": "https://opencode.ai/tui.json",
|
||||
"theme": "opencode",
|
||||
"keybinds": {
|
||||
"leader": "ctrl+x"
|
||||
},
|
||||
"scroll_speed": 3,
|
||||
"scroll_acceleration": {
|
||||
"enabled": true
|
||||
},
|
||||
"diff_style": "auto"
|
||||
"$schema": "https://opencode.ai/config.json",
|
||||
"tui": {
|
||||
"scroll_speed": 3,
|
||||
"scroll_acceleration": {
|
||||
"enabled": true
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
This is separate from `opencode.json`, which configures server/runtime behavior.
|
||||
|
||||
### Options
|
||||
|
||||
- `theme` - Sets your UI theme. [Learn more](/docs/themes).
|
||||
- `keybinds` - Customizes keyboard shortcuts. [Learn more](/docs/keybinds).
|
||||
- `scroll_acceleration.enabled` - Enable macOS-style scroll acceleration for smooth, natural scrolling. When enabled, scroll speed increases with rapid scrolling gestures and stays precise for slower movements. **This setting takes precedence over `scroll_speed` and overrides it when enabled.**
|
||||
- `scroll_speed` - Controls how fast the TUI scrolls when using scroll commands (minimum: `0.001`, supports decimal values). Defaults to `3`. **Note: This is ignored if `scroll_acceleration.enabled` is set to `true`.**
|
||||
- `diff_style` - Controls diff rendering. `"auto"` adapts to terminal width, `"stacked"` always shows a single-column layout.
|
||||
|
||||
Use `OPENCODE_TUI_CONFIG` to load a custom TUI config path.
|
||||
- `scroll_acceleration` - Enable macOS-style scroll acceleration for smooth, natural scrolling. When enabled, scroll speed increases with rapid scrolling gestures and stays precise for slower movements. **This setting takes precedence over `scroll_speed` and overrides it when enabled.**
|
||||
- `scroll_speed` - Controls how fast the TUI scrolls when using scroll commands (minimum: `1`). Defaults to `3`. **Note: This is ignored if `scroll_acceleration.enabled` is set to `true`.**
|
||||
|
||||
---
|
||||
|
||||
|
||||
40
patches/ghostty-web@0.3.0.patch
Normal file
40
patches/ghostty-web@0.3.0.patch
Normal file
@@ -0,0 +1,40 @@
|
||||
diff --git a/dist/ghostty-web.js b/dist/ghostty-web.js
|
||||
index 7c9d64a617bbeb29d757a1acd54686e582868313..2d61098cdb77fa66cbb162897c5590f35cfcf791 100644
|
||||
--- a/dist/ghostty-web.js
|
||||
+++ b/dist/ghostty-web.js
|
||||
@@ -1285,7 +1285,7 @@ const e = class H {
|
||||
continue;
|
||||
}
|
||||
const C = g.getCodepoint();
|
||||
- C === 0 || C < 32 ? B.push(" ") : B.push(String.fromCodePoint(C));
|
||||
+ C === 0 || C < 32 || C > 1114111 || (C >= 55296 && C <= 57343) ? B.push(" ") : B.push(String.fromCodePoint(C));
|
||||
}
|
||||
return B.join("");
|
||||
}
|
||||
@@ -1484,7 +1484,7 @@ class _ {
|
||||
return;
|
||||
let J = "";
|
||||
A.flags & U.ITALIC && (J += "italic "), A.flags & U.BOLD && (J += "bold "), this.ctx.font = `${J}${this.fontSize}px ${this.fontFamily}`, this.ctx.fillStyle = this.rgbToCSS(w, o, i), A.flags & U.FAINT && (this.ctx.globalAlpha = 0.5);
|
||||
- const s = g, F = C + this.metrics.baseline, a = String.fromCodePoint(A.codepoint || 32);
|
||||
+ const s = g, F = C + this.metrics.baseline, a = (A.codepoint === 0 || A.codepoint == null || A.codepoint < 0 || A.codepoint > 1114111 || (A.codepoint >= 55296 && A.codepoint <= 57343)) ? " " : String.fromCodePoint(A.codepoint);
|
||||
if (this.ctx.fillText(a, s, F), A.flags & U.FAINT && (this.ctx.globalAlpha = 1), A.flags & U.UNDERLINE) {
|
||||
const N = C + this.metrics.baseline + 2;
|
||||
this.ctx.strokeStyle = this.ctx.fillStyle, this.ctx.lineWidth = 1, this.ctx.beginPath(), this.ctx.moveTo(g, N), this.ctx.lineTo(g + I, N), this.ctx.stroke();
|
||||
@@ -1730,7 +1730,7 @@ const L = class R {
|
||||
let G = "";
|
||||
for (let J = M; J <= k; J++) {
|
||||
const s = o[J];
|
||||
- if (s && s.codepoint !== 0) {
|
||||
+ if (s && s.codepoint !== 0 && s.codepoint <= 1114111 && !(s.codepoint >= 55296 && s.codepoint <= 57343)) {
|
||||
const F = String.fromCodePoint(s.codepoint);
|
||||
G += F, F.trim() && (i = G.length);
|
||||
} else
|
||||
@@ -1995,7 +1995,7 @@ const L = class R {
|
||||
if (!Q)
|
||||
return null;
|
||||
const g = (w) => {
|
||||
- if (!w || w.codepoint === 0)
|
||||
+ if (!w || w.codepoint === 0 || w.codepoint > 1114111 || (w.codepoint >= 55296 && w.codepoint <= 57343))
|
||||
return !1;
|
||||
const o = String.fromCodePoint(w.codepoint);
|
||||
return /[\w-]/.test(o);
|
||||
Reference in New Issue
Block a user