Compare commits

...

2 Commits

Author SHA1 Message Date
aibrahim-oai
89cee48442 fix: render streaming updates 2025-07-12 23:44:18 -07:00
aibrahim-oai
afcb2f4f82 feat(cli): stream delta response items 2025-07-12 19:05:27 -07:00
3 changed files with 93 additions and 48 deletions

View File

@@ -255,7 +255,18 @@ export default function TerminalChat({
onItem: (item) => {
log(`onItem: ${JSON.stringify(item)}`);
setItems((prev) => {
const updated = uniqueById([...prev, item as ResponseItem]);
let updated = prev;
if (item.id) {
const idx = prev.findIndex((i) => i.id === item.id);
if (idx !== -1) {
updated = [...prev];
updated[idx] = item as ResponseItem;
} else {
updated = uniqueById([...prev, item as ResponseItem]);
}
} else {
updated = uniqueById([...prev, item as ResponseItem]);
}
saveRollout(sessionId, updated);
return updated;
});

View File

@@ -6,7 +6,7 @@ import type { FileOpenerScheme } from "src/utils/config.js";
import TerminalChatResponseItem from "./terminal-chat-response-item.js";
import TerminalHeader from "./terminal-header.js";
import { Box, Static } from "ink";
import { Box } from "ink";
import React, { useMemo } from "react";
// A batch entry can either be a standalone response item or a grouped set of
@@ -42,50 +42,41 @@ const TerminalMessageHistory: React.FC<TerminalMessageHistoryProps> = ({
return (
<Box flexDirection="column">
{/* The dedicated thinking indicator in the input area now displays the
elapsed time, so we no longer render a separate counter here. */}
<Static items={["header", ...messages]}>
{(item, index) => {
if (item === "header") {
return <TerminalHeader key="header" {...headerProps} />;
}
{/* Render header first so subsequent updates do not cause it to reappear */}
<TerminalHeader {...headerProps} />
{messages.map((message, index) => {
// Suppress empty reasoning updates (i.e. items with an empty summary).
const msg = message as unknown as { summary?: Array<unknown> };
if (msg.summary?.length === 0) {
return null;
}
// After the guard above, item is a ResponseItem
const message = item as ResponseItem;
// Suppress empty reasoning updates (i.e. items with an empty summary).
const msg = message as unknown as { summary?: Array<unknown> };
if (msg.summary?.length === 0) {
return null;
}
return (
<Box
key={`${message.id}-${index}`}
flexDirection="column"
marginLeft={
message.type === "message" &&
(message.role === "user" || message.role === "assistant")
? 0
: 4
}
marginTop={
message.type === "message" && message.role === "user" ? 0 : 1
}
marginBottom={
message.type === "message" && message.role === "assistant"
? 1
: 0
}
>
<TerminalChatResponseItem
item={message}
fullStdout={fullStdout}
setOverlayMode={setOverlayMode}
fileOpener={fileOpener}
/>
</Box>
);
}}
</Static>
return (
<Box
key={`${message.id}-${index}`}
flexDirection="column"
marginLeft={
message.type === "message" &&
(message.role === "user" || message.role === "assistant")
? 0
: 4
}
marginTop={
message.type === "message" && message.role === "user" ? 0 : 1
}
marginBottom={
message.type === "message" && message.role === "assistant" ? 1 : 0
}
>
<TerminalChatResponseItem
item={message}
fullStdout={fullStdout}
setOverlayMode={setOverlayMode}
fileOpener={fileOpener}
/>
</Box>
);
})}
</Box>
);
};

View File

@@ -669,10 +669,16 @@ export class AgentLoop {
}
// Skip items we've already processed to avoid staging duplicates
if (item.id && alreadyStagedItemIds.has(item.id)) {
if (
item.id &&
alreadyStagedItemIds.has(item.id) &&
item.status !== "in_progress"
) {
return;
}
alreadyStagedItemIds.add(item.id);
if (item.id && item.status !== "in_progress") {
alreadyStagedItemIds.add(item.id);
}
// Store the item so the final flush can still operate on a complete list.
// We'll nil out entries once they're delivered.
@@ -1035,11 +1041,42 @@ export class AgentLoop {
try {
let newTurnInput: Array<ResponseInputItem> = [];
const partials = new Map<string, string>();
// eslint-disable-next-line no-await-in-loop
for await (const event of stream as AsyncIterable<ResponseEvent>) {
log(`AgentLoop.run(): response event ${event.type}`);
// process and surface each item (no-op until we can depend on streaming events)
if (event.type === "response.output_text.delta") {
const id = event.item_id;
const soFar = partials.get(id) ?? "";
const text = soFar + event.delta;
partials.set(id, text);
stageItem({
id,
type: "message",
role: "assistant",
status: "in_progress",
content: [{ type: "output_text", text }],
} as ResponseItem);
continue;
}
if (event.type === "response.output_text.done") {
const id = event.item_id;
const text = event.text;
partials.set(id, text);
stageItem({
id,
type: "message",
role: "assistant",
status: "completed",
content: [{ type: "output_text", text }],
} as ResponseItem);
continue;
}
// process and surface each item when completed
if (event.type === "response.output_item.done") {
const item = event.item;
// 1) if it's a reasoning item, annotate it
@@ -1062,6 +1099,12 @@ export class AgentLoop {
if (callId) {
this.pendingAborts.add(callId);
}
} else if (
item.type === "message" &&
(item as { role?: string }).role === "assistant"
) {
// Final message already emitted via output_text.done
continue;
} else {
stageItem(item as ResponseItem);
}