From 5f57cee8e46abc6b1368c10991a22b2d676cf2a4 Mon Sep 17 00:00:00 2001 From: Aiden Cline <63023139+rekram1-node@users.noreply.github.com> Date: Tue, 16 Dec 2025 13:42:21 -0800 Subject: [PATCH] =?UTF-8?q?fix:=20user=20invoked=20subtasks=20causing=20to?= =?UTF-8?q?ol=5Fuse=20or=20missing=20thinking=20signa=E2=80=A6=20(#5650)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .opencode/command/commit.md | 1 + packages/opencode/src/session/prompt.ts | 24 ++++++++++++++++++++++++ 2 files changed, 25 insertions(+) diff --git a/.opencode/command/commit.md b/.opencode/command/commit.md index c318ed54b1..8e9346ebc8 100644 --- a/.opencode/command/commit.md +++ b/.opencode/command/commit.md @@ -1,6 +1,7 @@ --- description: git commit and push model: opencode/glm-4.6 +subtask: true --- commit and push diff --git a/packages/opencode/src/session/prompt.ts b/packages/opencode/src/session/prompt.ts index 3be4c45fd6..4ae7469a30 100644 --- a/packages/opencode/src/session/prompt.ts +++ b/packages/opencode/src/session/prompt.ts @@ -396,6 +396,30 @@ export namespace SessionPrompt { }, } satisfies MessageV2.ToolPart) } + + // Add synthetic user message to prevent certain reasoning models from erroring + // If we create assistant messages w/ out user ones following mid loop thinking signatures + // will be missing and it can cause errors for models like gemini for example + const summaryUserMsg: MessageV2.User = { + id: Identifier.ascending("message"), + sessionID, + role: "user", + time: { + created: Date.now(), + }, + agent: lastUser.agent, + model: lastUser.model, + } + await Session.updateMessage(summaryUserMsg) + await Session.updatePart({ + id: Identifier.ascending("part"), + messageID: summaryUserMsg.id, + sessionID, + type: "text", + text: "Summarize the task tool output above and continue with your task.", + synthetic: true, + } satisfies MessageV2.TextPart) + continue }