refactor(session): effectify session processor (#19485)

This commit is contained in:
Kit Langton
2026-03-28 12:09:47 -04:00
committed by GitHub
parent 2b86b36c8c
commit 860531c275
11 changed files with 2159 additions and 593 deletions

View File

@@ -12,6 +12,83 @@ import { tmpdir } from "../fixture/fixture"
Log.init({ print: false })
function defer<T>() {
let resolve!: (value: T | PromiseLike<T>) => void
const promise = new Promise<T>((done) => {
resolve = done
})
return { promise, resolve }
}
function chat(text: string) {
const payload =
[
`data: ${JSON.stringify({
id: "chatcmpl-1",
object: "chat.completion.chunk",
choices: [{ delta: { role: "assistant" } }],
})}`,
`data: ${JSON.stringify({
id: "chatcmpl-1",
object: "chat.completion.chunk",
choices: [{ delta: { content: text } }],
})}`,
`data: ${JSON.stringify({
id: "chatcmpl-1",
object: "chat.completion.chunk",
choices: [{ delta: {}, finish_reason: "stop" }],
})}`,
"data: [DONE]",
].join("\n\n") + "\n\n"
const encoder = new TextEncoder()
return new ReadableStream<Uint8Array>({
start(ctrl) {
ctrl.enqueue(encoder.encode(payload))
ctrl.close()
},
})
}
function hanging(ready: () => void) {
const encoder = new TextEncoder()
let timer: ReturnType<typeof setTimeout> | undefined
const first =
`data: ${JSON.stringify({
id: "chatcmpl-1",
object: "chat.completion.chunk",
choices: [{ delta: { role: "assistant" } }],
})}` + "\n\n"
const rest =
[
`data: ${JSON.stringify({
id: "chatcmpl-1",
object: "chat.completion.chunk",
choices: [{ delta: { content: "late" } }],
})}`,
`data: ${JSON.stringify({
id: "chatcmpl-1",
object: "chat.completion.chunk",
choices: [{ delta: {}, finish_reason: "stop" }],
})}`,
"data: [DONE]",
].join("\n\n") + "\n\n"
return new ReadableStream<Uint8Array>({
start(ctrl) {
ctrl.enqueue(encoder.encode(first))
ready()
timer = setTimeout(() => {
ctrl.enqueue(encoder.encode(rest))
ctrl.close()
}, 10000)
},
cancel() {
if (timer) clearTimeout(timer)
},
})
}
describe("session.prompt missing file", () => {
test("does not fail the prompt when a file part is missing", async () => {
await using tmp = await tmpdir({
@@ -149,6 +226,159 @@ describe("session.prompt special characters", () => {
})
})
describe("session.prompt regression", () => {
test("does not loop empty assistant turns for a simple reply", async () => {
let calls = 0
const server = Bun.serve({
port: 0,
fetch(req) {
const url = new URL(req.url)
if (!url.pathname.endsWith("/chat/completions")) {
return new Response("not found", { status: 404 })
}
calls++
return new Response(chat("packages/opencode/src/session/processor.ts"), {
status: 200,
headers: { "Content-Type": "text/event-stream" },
})
},
})
try {
await using tmp = await tmpdir({
git: true,
init: async (dir) => {
await Bun.write(
path.join(dir, "opencode.json"),
JSON.stringify({
$schema: "https://opencode.ai/config.json",
enabled_providers: ["alibaba"],
provider: {
alibaba: {
options: {
apiKey: "test-key",
baseURL: `${server.url.origin}/v1`,
},
},
},
agent: {
build: {
model: "alibaba/qwen-plus",
},
},
}),
)
},
})
await Instance.provide({
directory: tmp.path,
fn: async () => {
const session = await Session.create({ title: "Prompt regression" })
const result = await SessionPrompt.prompt({
sessionID: session.id,
agent: "build",
parts: [{ type: "text", text: "Where is SessionProcessor?" }],
})
expect(result.info.role).toBe("assistant")
expect(result.parts.some((part) => part.type === "text" && part.text.includes("processor.ts"))).toBe(true)
const msgs = await Session.messages({ sessionID: session.id })
expect(msgs.filter((msg) => msg.info.role === "assistant")).toHaveLength(1)
expect(calls).toBe(1)
},
})
} finally {
server.stop(true)
}
})
test("records aborted errors when prompt is cancelled mid-stream", async () => {
const ready = defer<void>()
const server = Bun.serve({
port: 0,
fetch(req) {
const url = new URL(req.url)
if (!url.pathname.endsWith("/chat/completions")) {
return new Response("not found", { status: 404 })
}
return new Response(
hanging(() => ready.resolve()),
{
status: 200,
headers: { "Content-Type": "text/event-stream" },
},
)
},
})
try {
await using tmp = await tmpdir({
git: true,
init: async (dir) => {
await Bun.write(
path.join(dir, "opencode.json"),
JSON.stringify({
$schema: "https://opencode.ai/config.json",
enabled_providers: ["alibaba"],
provider: {
alibaba: {
options: {
apiKey: "test-key",
baseURL: `${server.url.origin}/v1`,
},
},
},
agent: {
build: {
model: "alibaba/qwen-plus",
},
},
}),
)
},
})
await Instance.provide({
directory: tmp.path,
fn: async () => {
const session = await Session.create({ title: "Prompt cancel regression" })
const run = SessionPrompt.prompt({
sessionID: session.id,
agent: "build",
parts: [{ type: "text", text: "Cancel me" }],
})
await ready.promise
await SessionPrompt.cancel(session.id)
const result = await Promise.race([
run,
new Promise<never>((_, reject) =>
setTimeout(() => reject(new Error("timed out waiting for cancel")), 1000),
),
])
expect(result.info.role).toBe("assistant")
if (result.info.role === "assistant") {
expect(result.info.error?.name).toBe("MessageAbortedError")
}
const msgs = await Session.messages({ sessionID: session.id })
const last = msgs.findLast((msg) => msg.info.role === "assistant")
expect(last?.info.role).toBe("assistant")
if (last?.info.role === "assistant") {
expect(last.info.error?.name).toBe("MessageAbortedError")
}
},
})
} finally {
server.stop(true)
}
})
})
describe("session.prompt agent variant", () => {
test("applies agent variant only when using agent model", async () => {
const prev = process.env.OPENAI_API_KEY