mirror of
https://github.com/anomalyco/opencode.git
synced 2026-02-11 03:14:29 +00:00
Compare commits
11 Commits
github-v1.
...
github-v1.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
31e6ed6806 | ||
|
|
da56319af4 | ||
|
|
2198f9400f | ||
|
|
ffc4d53923 | ||
|
|
18d3c054a3 | ||
|
|
59c5da9b6c | ||
|
|
15880195a2 | ||
|
|
117de64f39 | ||
|
|
388156704a | ||
|
|
faf443132f | ||
|
|
36a9be040b |
4
bun.lock
4
bun.lock
@@ -241,7 +241,7 @@
|
||||
"@opencode-ai/script": "workspace:*",
|
||||
"@opencode-ai/sdk": "workspace:*",
|
||||
"@opencode-ai/util": "workspace:*",
|
||||
"@openrouter/ai-sdk-provider": "1.2.8",
|
||||
"@openrouter/ai-sdk-provider": "1.5.2",
|
||||
"@opentui/core": "0.1.60",
|
||||
"@opentui/solid": "0.1.60",
|
||||
"@parcel/watcher": "2.5.1",
|
||||
@@ -1141,7 +1141,7 @@
|
||||
|
||||
"@opencode-ai/web": ["@opencode-ai/web@workspace:packages/web"],
|
||||
|
||||
"@openrouter/ai-sdk-provider": ["@openrouter/ai-sdk-provider@1.2.8", "", { "dependencies": { "@openrouter/sdk": "^0.1.8" }, "peerDependencies": { "ai": "^5.0.0", "zod": "^3.24.1 || ^v4" } }, "sha512-pQT8AzZBKg9f4bkt4doF486ZlhK0XjKkevrLkiqYgfh1Jplovieu28nK4Y+xy3sF18/mxjqh9/2y6jh01qzLrA=="],
|
||||
"@openrouter/ai-sdk-provider": ["@openrouter/ai-sdk-provider@1.5.2", "", { "dependencies": { "@openrouter/sdk": "^0.1.27" }, "peerDependencies": { "@toon-format/toon": "^2.0.0", "ai": "^5.0.0", "zod": "^3.24.1 || ^v4" }, "optionalPeers": ["@toon-format/toon"] }, "sha512-3Th0vmJ9pjnwcPc2H1f59Mb0LFvwaREZAScfOQIpUxAHjZ7ZawVKDP27qgsteZPmMYqccNMy4r4Y3kgUnNcKAg=="],
|
||||
|
||||
"@openrouter/sdk": ["@openrouter/sdk@0.1.27", "", { "dependencies": { "zod": "^3.25.0 || ^4.0.0" } }, "sha512-RH//L10bSmc81q25zAZudiI4kNkLgxF2E+WU42vghp3N6TEvZ6F0jK7uT3tOxkEn91gzmMw9YVmDENy7SJsajQ=="],
|
||||
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
{
|
||||
"nodeModules": "sha256-pwjePZClFKDhDr5xrTNCBlO0xTwxLQYiQYaIEd0FdQQ="
|
||||
"nodeModules": "sha256-JT8J+Nd2kk0x46BcyotmBbM39tuKOW7VzXfOV3R3sqQ="
|
||||
}
|
||||
|
||||
@@ -70,7 +70,7 @@
|
||||
"@opencode-ai/script": "workspace:*",
|
||||
"@opencode-ai/sdk": "workspace:*",
|
||||
"@opencode-ai/util": "workspace:*",
|
||||
"@openrouter/ai-sdk-provider": "1.2.8",
|
||||
"@openrouter/ai-sdk-provider": "1.5.2",
|
||||
"@opentui/core": "0.1.60",
|
||||
"@opentui/solid": "0.1.60",
|
||||
"@parcel/watcher": "2.5.1",
|
||||
|
||||
@@ -403,12 +403,12 @@ export const GithubRunCommand = cmd({
|
||||
let appToken: string
|
||||
let octoRest: Octokit
|
||||
let octoGraph: typeof graphql
|
||||
let commentId: number
|
||||
let gitConfig: string
|
||||
let session: { id: string; title: string; version: string }
|
||||
let shareId: string | undefined
|
||||
let exitCode = 0
|
||||
type PromptFiles = Awaited<ReturnType<typeof getUserPrompt>>["promptFiles"]
|
||||
const triggerCommentId = payload.comment.id
|
||||
|
||||
try {
|
||||
const actionToken = isMock ? args.token! : await getOidcToken()
|
||||
@@ -422,8 +422,7 @@ export const GithubRunCommand = cmd({
|
||||
await configureGit(appToken)
|
||||
await assertPermissions()
|
||||
|
||||
const comment = await createComment()
|
||||
commentId = comment.data.id
|
||||
await addReaction("eyes")
|
||||
|
||||
// Setup opencode session
|
||||
const repoData = await fetchRepo()
|
||||
@@ -455,7 +454,8 @@ export const GithubRunCommand = cmd({
|
||||
await pushToLocalBranch(summary, uncommittedChanges)
|
||||
}
|
||||
const hasShared = prData.comments.nodes.some((c) => c.body.includes(`${shareBaseUrl}/s/${shareId}`))
|
||||
await updateComment(`${response}${footer({ image: !hasShared })}`)
|
||||
await createComment(`${response}${footer({ image: !hasShared })}`)
|
||||
await removeReaction()
|
||||
}
|
||||
// Fork PR
|
||||
else {
|
||||
@@ -469,7 +469,8 @@ export const GithubRunCommand = cmd({
|
||||
await pushToForkBranch(summary, prData, uncommittedChanges)
|
||||
}
|
||||
const hasShared = prData.comments.nodes.some((c) => c.body.includes(`${shareBaseUrl}/s/${shareId}`))
|
||||
await updateComment(`${response}${footer({ image: !hasShared })}`)
|
||||
await createComment(`${response}${footer({ image: !hasShared })}`)
|
||||
await removeReaction()
|
||||
}
|
||||
}
|
||||
// Issue
|
||||
@@ -489,9 +490,11 @@ export const GithubRunCommand = cmd({
|
||||
summary,
|
||||
`${response}\n\nCloses #${issueId}${footer({ image: true })}`,
|
||||
)
|
||||
await updateComment(`Created PR #${pr}${footer({ image: true })}`)
|
||||
await createComment(`Created PR #${pr}${footer({ image: true })}`)
|
||||
await removeReaction()
|
||||
} else {
|
||||
await updateComment(`${response}${footer({ image: true })}`)
|
||||
await createComment(`${response}${footer({ image: true })}`)
|
||||
await removeReaction()
|
||||
}
|
||||
}
|
||||
} catch (e: any) {
|
||||
@@ -503,7 +506,8 @@ export const GithubRunCommand = cmd({
|
||||
} else if (e instanceof Error) {
|
||||
msg = e.message
|
||||
}
|
||||
await updateComment(`${msg}${footer()}`)
|
||||
await createComment(`${msg}${footer()}`)
|
||||
await removeReaction()
|
||||
core.setFailed(msg)
|
||||
// Also output the clean error message for the action to capture
|
||||
//core.setOutput("prepare_error", e.message);
|
||||
@@ -931,24 +935,41 @@ Co-authored-by: ${actor} <${actor}@users.noreply.github.com>"`
|
||||
if (!["admin", "write"].includes(permission)) throw new Error(`User ${actor} does not have write permissions`)
|
||||
}
|
||||
|
||||
async function createComment() {
|
||||
async function addReaction(reaction: "eyes") {
|
||||
console.log("Adding reaction...")
|
||||
return await octoRest.rest.reactions.createForIssueComment({
|
||||
owner,
|
||||
repo,
|
||||
comment_id: triggerCommentId,
|
||||
content: reaction,
|
||||
})
|
||||
}
|
||||
|
||||
async function removeReaction() {
|
||||
console.log("Removing reaction...")
|
||||
const reactions = await octoRest.rest.reactions.listForIssueComment({
|
||||
owner,
|
||||
repo,
|
||||
comment_id: triggerCommentId,
|
||||
})
|
||||
|
||||
const eyesReaction = reactions.data.find((r) => r.content === "eyes")
|
||||
if (!eyesReaction) return
|
||||
|
||||
await octoRest.rest.reactions.deleteForIssueComment({
|
||||
owner,
|
||||
repo,
|
||||
comment_id: triggerCommentId,
|
||||
reaction_id: eyesReaction.id,
|
||||
})
|
||||
}
|
||||
|
||||
async function createComment(body: string) {
|
||||
console.log("Creating comment...")
|
||||
return await octoRest.rest.issues.createComment({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: issueId,
|
||||
body: `[Working...](${runUrl})`,
|
||||
})
|
||||
}
|
||||
|
||||
async function updateComment(body: string) {
|
||||
if (!commentId) return
|
||||
|
||||
console.log("Updating comment...")
|
||||
return await octoRest.rest.issues.updateComment({
|
||||
owner,
|
||||
repo,
|
||||
comment_id: commentId,
|
||||
body,
|
||||
})
|
||||
}
|
||||
@@ -1029,7 +1050,7 @@ query($owner: String!, $repo: String!, $number: Int!) {
|
||||
const comments = (issue.comments?.nodes || [])
|
||||
.filter((c) => {
|
||||
const id = parseInt(c.databaseId)
|
||||
return id !== commentId && id !== payload.comment.id
|
||||
return id !== payload.comment.id
|
||||
})
|
||||
.map((c) => ` - ${c.author.login} at ${c.createdAt}: ${c.body}`)
|
||||
|
||||
@@ -1148,7 +1169,7 @@ query($owner: String!, $repo: String!, $number: Int!) {
|
||||
const comments = (pr.comments?.nodes || [])
|
||||
.filter((c) => {
|
||||
const id = parseInt(c.databaseId)
|
||||
return id !== commentId && id !== payload.comment.id
|
||||
return id !== payload.comment.id
|
||||
})
|
||||
.map((c) => `- ${c.author.login} at ${c.createdAt}: ${c.body}`)
|
||||
|
||||
|
||||
@@ -12,6 +12,7 @@ export namespace ModelsDev {
|
||||
export const Model = z.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
family: z.string().optional(),
|
||||
release_date: z.string(),
|
||||
attachment: z.boolean(),
|
||||
reasoning: z.boolean(),
|
||||
|
||||
@@ -330,6 +330,7 @@ export namespace Provider {
|
||||
npm: z.string(),
|
||||
}),
|
||||
name: z.string(),
|
||||
family: z.string().optional(),
|
||||
capabilities: z.object({
|
||||
temperature: z.boolean(),
|
||||
reasoning: z.boolean(),
|
||||
@@ -407,6 +408,7 @@ export namespace Provider {
|
||||
id: model.id,
|
||||
providerID: provider.id,
|
||||
name: model.name,
|
||||
family: model.family,
|
||||
api: {
|
||||
id: model.id,
|
||||
url: provider.api!,
|
||||
|
||||
@@ -74,23 +74,23 @@ export namespace ProviderTransform {
|
||||
return result
|
||||
}
|
||||
|
||||
// DeepSeek: Handle reasoning_content for tool call continuations
|
||||
// - With tool calls: Include reasoning_content in providerOptions so model can continue reasoning
|
||||
// - Without tool calls: Strip reasoning (new turn doesn't need previous reasoning)
|
||||
// See: https://api-docs.deepseek.com/guides/thinking_mode
|
||||
if (model.providerID === "deepseek" || model.api.id.toLowerCase().includes("deepseek")) {
|
||||
if (
|
||||
model.providerID === "deepseek" ||
|
||||
model.api.id.toLowerCase().includes("deepseek") ||
|
||||
(model.capabilities.interleaved &&
|
||||
typeof model.capabilities.interleaved === "object" &&
|
||||
model.capabilities.interleaved.field === "reasoning_content")
|
||||
) {
|
||||
return msgs.map((msg) => {
|
||||
if (msg.role === "assistant" && Array.isArray(msg.content)) {
|
||||
const reasoningParts = msg.content.filter((part: any) => part.type === "reasoning")
|
||||
const hasToolCalls = msg.content.some((part: any) => part.type === "tool-call")
|
||||
const reasoningText = reasoningParts.map((part: any) => part.text).join("")
|
||||
|
||||
// Filter out reasoning parts from content
|
||||
const filteredContent = msg.content.filter((part: any) => part.type !== "reasoning")
|
||||
|
||||
// If this message has tool calls and reasoning, include reasoning_content
|
||||
// so DeepSeek can continue reasoning after tool execution
|
||||
if (hasToolCalls && reasoningText) {
|
||||
// Include reasoning_content directly on the message for all assistant messages
|
||||
if (reasoningText) {
|
||||
return {
|
||||
...msg,
|
||||
content: filteredContent,
|
||||
@@ -104,12 +104,12 @@ export namespace ProviderTransform {
|
||||
}
|
||||
}
|
||||
|
||||
// For final answers (no tool calls), just strip reasoning
|
||||
return {
|
||||
...msg,
|
||||
content: filteredContent,
|
||||
}
|
||||
}
|
||||
|
||||
return msg
|
||||
})
|
||||
}
|
||||
@@ -212,22 +212,26 @@ export namespace ProviderTransform {
|
||||
): Record<string, any> {
|
||||
const result: Record<string, any> = {}
|
||||
|
||||
// switch to providerID later, for now use this
|
||||
if (model.api.npm === "@openrouter/ai-sdk-provider") {
|
||||
result["usage"] = {
|
||||
include: true,
|
||||
}
|
||||
if (model.api.id.includes("gemini-3")) {
|
||||
result["reasoning"] = { effort: "high" }
|
||||
}
|
||||
}
|
||||
|
||||
if (model.providerID === "baseten") {
|
||||
result["chat_template_args"] = { enable_thinking: true }
|
||||
}
|
||||
|
||||
if (model.providerID === "openai" || providerOptions?.setCacheKey) {
|
||||
result["promptCacheKey"] = sessionID
|
||||
}
|
||||
|
||||
if (
|
||||
model.providerID === "google" ||
|
||||
(model.providerID.startsWith("opencode") && model.api.id.includes("gemini-3"))
|
||||
) {
|
||||
if (model.api.npm === "@ai-sdk/google" || model.api.npm === "@ai-sdk/google-vertex") {
|
||||
result["thinkingConfig"] = {
|
||||
thinkingLevel: "high",
|
||||
includeThoughts: true,
|
||||
}
|
||||
}
|
||||
@@ -273,23 +277,7 @@ export namespace ProviderTransform {
|
||||
return options
|
||||
}
|
||||
|
||||
export function providerOptions(model: Provider.Model, options: { [x: string]: any }, messages: ModelMessage[]) {
|
||||
if (model.capabilities.interleaved && typeof model.capabilities.interleaved === "object") {
|
||||
const cot = []
|
||||
const assistantMessages = messages.filter((msg) => msg.role === "assistant")
|
||||
for (const msg of assistantMessages) {
|
||||
for (const part of msg.content) {
|
||||
if (typeof part === "string") {
|
||||
continue
|
||||
}
|
||||
if (part.type === "reasoning") {
|
||||
cot.push(part)
|
||||
}
|
||||
}
|
||||
}
|
||||
options[model.capabilities.interleaved.field] = cot
|
||||
}
|
||||
|
||||
export function providerOptions(model: Provider.Model, options: { [x: string]: any }) {
|
||||
switch (model.api.npm) {
|
||||
case "@ai-sdk/openai":
|
||||
case "@ai-sdk/azure":
|
||||
|
||||
@@ -1460,12 +1460,15 @@ export namespace Server {
|
||||
}
|
||||
}
|
||||
|
||||
const providers = mapValues(filteredProviders, (x) => Provider.fromModelsDevProvider(x))
|
||||
const connected = await Provider.list().then((x) => Object.keys(x))
|
||||
const connected = await Provider.list()
|
||||
const providers = Object.assign(
|
||||
mapValues(filteredProviders, (x) => Provider.fromModelsDevProvider(x)),
|
||||
connected,
|
||||
)
|
||||
return c.json({
|
||||
all: Object.values(providers),
|
||||
default: mapValues(providers, (item) => Provider.sort(Object.values(item.models))[0].id),
|
||||
connected,
|
||||
connected: Object.keys(connected),
|
||||
})
|
||||
},
|
||||
)
|
||||
|
||||
@@ -143,7 +143,6 @@ export namespace SessionCompaction {
|
||||
providerOptions: ProviderTransform.providerOptions(
|
||||
model,
|
||||
pipe({}, mergeDeep(ProviderTransform.options(model, input.sessionID)), mergeDeep(model.options)),
|
||||
[],
|
||||
),
|
||||
headers: model.headers,
|
||||
abortSignal: input.abort,
|
||||
|
||||
@@ -593,7 +593,7 @@ export namespace SessionPrompt {
|
||||
OUTPUT_TOKEN_MAX,
|
||||
),
|
||||
abortSignal: abort,
|
||||
providerOptions: ProviderTransform.providerOptions(model, params.options, messages),
|
||||
providerOptions: ProviderTransform.providerOptions(model, params.options),
|
||||
stopWhen: stepCountIs(1),
|
||||
temperature: params.temperature,
|
||||
topP: params.topP,
|
||||
@@ -1473,7 +1473,7 @@ export namespace SessionPrompt {
|
||||
await generateText({
|
||||
// use higher # for reasoning models since reasoning tokens eat up a lot of the budget
|
||||
maxOutputTokens: small.capabilities.reasoning ? 3000 : 20,
|
||||
providerOptions: ProviderTransform.providerOptions(small, options, []),
|
||||
providerOptions: ProviderTransform.providerOptions(small, options),
|
||||
messages: [
|
||||
...SystemPrompt.title(small.providerID).map(
|
||||
(x): ModelMessage => ({
|
||||
|
||||
@@ -91,7 +91,7 @@ export namespace SessionSummary {
|
||||
if (textPart && !userMsg.summary?.title) {
|
||||
const result = await generateText({
|
||||
maxOutputTokens: small.capabilities.reasoning ? 1500 : 20,
|
||||
providerOptions: ProviderTransform.providerOptions(small, options, []),
|
||||
providerOptions: ProviderTransform.providerOptions(small, options),
|
||||
messages: [
|
||||
...SystemPrompt.title(small.providerID).map(
|
||||
(x): ModelMessage => ({
|
||||
@@ -144,7 +144,7 @@ export namespace SessionSummary {
|
||||
const result = await generateText({
|
||||
model: language,
|
||||
maxOutputTokens: 100,
|
||||
providerOptions: ProviderTransform.providerOptions(small, options, []),
|
||||
providerOptions: ProviderTransform.providerOptions(small, options),
|
||||
messages: [
|
||||
...SystemPrompt.summarize(small.providerID).map(
|
||||
(x): ModelMessage => ({
|
||||
|
||||
@@ -158,54 +158,6 @@ describe("ProviderTransform.message - DeepSeek reasoning content", () => {
|
||||
expect(result[0].providerOptions?.openaiCompatible?.reasoning_content).toBe("Let me think about this...")
|
||||
})
|
||||
|
||||
test("DeepSeek without tool calls strips reasoning from content", () => {
|
||||
const msgs = [
|
||||
{
|
||||
role: "assistant",
|
||||
content: [
|
||||
{ type: "reasoning", text: "Let me think about this..." },
|
||||
{ type: "text", text: "Final answer" },
|
||||
],
|
||||
},
|
||||
] as any[]
|
||||
|
||||
const result = ProviderTransform.message(msgs, {
|
||||
id: "deepseek/deepseek-chat",
|
||||
providerID: "deepseek",
|
||||
api: {
|
||||
id: "deepseek-chat",
|
||||
url: "https://api.deepseek.com",
|
||||
npm: "@ai-sdk/openai-compatible",
|
||||
},
|
||||
name: "DeepSeek Chat",
|
||||
capabilities: {
|
||||
temperature: true,
|
||||
reasoning: true,
|
||||
attachment: false,
|
||||
toolcall: true,
|
||||
input: { text: true, audio: false, image: false, video: false, pdf: false },
|
||||
output: { text: true, audio: false, image: false, video: false, pdf: false },
|
||||
interleaved: false,
|
||||
},
|
||||
cost: {
|
||||
input: 0.001,
|
||||
output: 0.002,
|
||||
cache: { read: 0.0001, write: 0.0002 },
|
||||
},
|
||||
limit: {
|
||||
context: 128000,
|
||||
output: 8192,
|
||||
},
|
||||
status: "active",
|
||||
options: {},
|
||||
headers: {},
|
||||
})
|
||||
|
||||
expect(result).toHaveLength(1)
|
||||
expect(result[0].content).toEqual([{ type: "text", text: "Final answer" }])
|
||||
expect(result[0].providerOptions?.openaiCompatible?.reasoning_content).toBeUndefined()
|
||||
})
|
||||
|
||||
test("DeepSeek model ID containing 'deepseek' matches (case insensitive)", () => {
|
||||
const msgs = [
|
||||
{
|
||||
|
||||
@@ -1035,6 +1035,7 @@ export type ProviderConfig = {
|
||||
[key: string]: {
|
||||
id?: string
|
||||
name?: string
|
||||
family?: string
|
||||
release_date?: string
|
||||
attachment?: boolean
|
||||
reasoning?: boolean
|
||||
@@ -1461,6 +1462,7 @@ export type Model = {
|
||||
npm: string
|
||||
}
|
||||
name: string
|
||||
family?: string
|
||||
capabilities: {
|
||||
temperature: boolean
|
||||
reasoning: boolean
|
||||
@@ -3027,6 +3029,7 @@ export type ProviderListResponses = {
|
||||
[key: string]: {
|
||||
id: string
|
||||
name: string
|
||||
family?: string
|
||||
release_date: string
|
||||
attachment: boolean
|
||||
reasoning: boolean
|
||||
|
||||
@@ -2788,6 +2788,9 @@
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"family": {
|
||||
"type": "string"
|
||||
},
|
||||
"release_date": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -7300,6 +7303,9 @@
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"family": {
|
||||
"type": "string"
|
||||
},
|
||||
"release_date": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -8287,6 +8293,9 @@
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"family": {
|
||||
"type": "string"
|
||||
},
|
||||
"capabilities": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
|
||||
@@ -15,14 +15,15 @@ You can also check out [awesome-opencode](https://github.com/awesome-opencode/aw
|
||||
|
||||
## Plugins
|
||||
|
||||
| Name | Description |
|
||||
| ------------------------------------------------------------------------------------------------- | ------------------------------------------------------------- |
|
||||
| [opencode-skills](https://github.com/malhashemi/opencode-skills) | Manage and organize OpenCode skills and capabilities |
|
||||
| [opencode-openai-codex-auth](https://github.com/numman-ali/opencode-openai-codex-auth) | Use your ChatGPT Plus/Pro subscription instead of API credits |
|
||||
| [opencode-gemini-auth](https://github.com/jenslys/opencode-gemini-auth) | Use your existing Gemini plan instead of API billing |
|
||||
| [opencode-antigravity-auth](https://github.com/NoeFabris/opencode-antigravity-auth) | Use Antigravity's free models instead of API billing |
|
||||
| [opencode-dynamic-context-pruning](https://github.com/Tarquinen/opencode-dynamic-context-pruning) | Optimize token usage by pruning obsolete tool outputs |
|
||||
| [opencode-wakatime](https://github.com/angristan/opencode-wakatime) | Track OpenCode usage with Wakatime |
|
||||
| Name | Description |
|
||||
| ------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------- |
|
||||
| [opencode-skills](https://github.com/malhashemi/opencode-skills) | Manage and organize OpenCode skills and capabilities |
|
||||
| [opencode-type-inject](https://github.com/nick-vi/opencode-type-inject) | Auto-inject TypeScript/Svelte types into file reads with lookup tools |
|
||||
| [opencode-openai-codex-auth](https://github.com/numman-ali/opencode-openai-codex-auth) | Use your ChatGPT Plus/Pro subscription instead of API credits |
|
||||
| [opencode-gemini-auth](https://github.com/jenslys/opencode-gemini-auth) | Use your existing Gemini plan instead of API billing |
|
||||
| [opencode-antigravity-auth](https://github.com/NoeFabris/opencode-antigravity-auth) | Use Antigravity's free models instead of API billing |
|
||||
| [opencode-dynamic-context-pruning](https://github.com/Tarquinen/opencode-dynamic-context-pruning) | Optimize token usage by pruning obsolete tool outputs |
|
||||
| [opencode-wakatime](https://github.com/angristan/opencode-wakatime) | Track OpenCode usage with Wakatime |
|
||||
|
||||
---
|
||||
|
||||
|
||||
Reference in New Issue
Block a user