chore: generate

This commit is contained in:
opencode-agent[bot]
2026-05-11 02:11:07 +00:00
parent 942630eb4a
commit 02cb7e7b71
7 changed files with 14 additions and 46 deletions

View File

@@ -82,12 +82,12 @@ LLM.request({
### Provider behavior table
| Protocol | `cache: "auto"` |
|---|---|
| Anthropic Messages | emits up to 3 `cache_control` markers (4-breakpoint cap enforced) |
| Bedrock Converse | emits up to 3 `cachePoint` blocks (4-breakpoint cap enforced) |
| OpenAI Chat / Responses | no-op (implicit caching above 1024 tokens) |
| Gemini | no-op (implicit caching on 2.5+; explicit `CachedContent` is out-of-band) |
| Protocol | `cache: "auto"` |
| ----------------------- | ------------------------------------------------------------------------- |
| Anthropic Messages | emits up to 3 `cache_control` markers (4-breakpoint cap enforced) |
| Bedrock Converse | emits up to 3 `cachePoint` blocks (4-breakpoint cap enforced) |
| OpenAI Chat / Responses | no-op (implicit caching above 1024 tokens) |
| Gemini | no-op (implicit caching on 2.5+; explicit `CachedContent` is out-of-band) |
Normalized cache usage is read back into `response.usage.cacheReadInputTokens` and `cacheWriteInputTokens` across every provider.

View File

@@ -44,10 +44,7 @@ const RESPECTS_INLINE_HINTS = new Set(["anthropic-messages", "bedrock-converse"]
const makeHint = (ttlSeconds: number | undefined): CacheHint =>
ttlSeconds !== undefined ? new CacheHint({ type: "ephemeral", ttlSeconds }) : new CacheHint({ type: "ephemeral" })
const markLastTool = (
tools: ReadonlyArray<ToolDefinition>,
hint: CacheHint,
): ReadonlyArray<ToolDefinition> => {
const markLastTool = (tools: ReadonlyArray<ToolDefinition>, hint: CacheHint): ReadonlyArray<ToolDefinition> => {
if (tools.length === 0) return tools
const last = tools.length - 1
if (tools[last]!.cache) return tools
@@ -67,11 +64,7 @@ const lastIndexOfRole = (messages: ReadonlyArray<Message>, role: Message["role"]
// Mark the last text part of `messages[index]`. If no text part exists, mark
// the last content part regardless of type — that's the breakpoint position
// in tool-result-only messages too.
const markMessageAt = (
messages: ReadonlyArray<Message>,
index: number,
hint: CacheHint,
): ReadonlyArray<Message> => {
const markMessageAt = (messages: ReadonlyArray<Message>, index: number, hint: CacheHint): ReadonlyArray<Message> => {
if (index < 0 || index >= messages.length) return messages
const target = messages[index]!
if (target.content.length === 0) return messages
@@ -79,9 +72,7 @@ const markMessageAt = (
const markAt = lastTextIndex >= 0 ? lastTextIndex : target.content.length - 1
const existing = target.content[markAt]!
if ("cache" in existing && existing.cache) return messages
const nextContent = target.content.map((part, i) =>
i === markAt ? ({ ...part, cache: hint } as ContentPart) : part,
)
const nextContent = target.content.map((part, i) => (i === markAt ? ({ ...part, cache: hint } as ContentPart) : part))
const next = new Message({ ...target, content: nextContent })
// Single pass over `messages`, substituting the one updated entry. Long
// conversations call this on every request, so avoid `.map()` here — its

View File

@@ -226,9 +226,5 @@ export const CachePolicyObject = Schema.Struct({
})
export type CachePolicyObject = Schema.Schema.Type<typeof CachePolicyObject>
export const CachePolicy = Schema.Union([
Schema.Literal("auto"),
Schema.Literal("none"),
CachePolicyObject,
])
export const CachePolicy = Schema.Union([Schema.Literal("auto"), Schema.Literal("none"), CachePolicyObject])
export type CachePolicy = Schema.Schema.Type<typeof CachePolicy>

View File

@@ -56,11 +56,7 @@ describe("applyCachePolicy", () => {
model: anthropicModel,
system: "Sys A",
tools: [{ name: "t1", description: "t1", inputSchema: { type: "object", properties: {} } }],
messages: [
LLM.user("first user"),
LLM.assistant("assistant reply"),
LLM.user("latest user message"),
],
messages: [LLM.user("first user"), LLM.assistant("assistant reply"), LLM.user("latest user message")],
cache: "auto",
}),
)

View File

@@ -3,12 +3,7 @@
"metadata": {
"name": "anthropic-messages-cache/writes-then-reads-cache-control-on-identical-second-call",
"recordedAt": "2026-05-11T01:52:54.319Z",
"tags": [
"prefix:anthropic-messages-cache",
"provider:anthropic",
"protocol:anthropic-messages",
"cache"
]
"tags": ["prefix:anthropic-messages-cache", "provider:anthropic", "protocol:anthropic-messages", "cache"]
},
"interactions": [
{

View File

@@ -3,12 +3,7 @@
"metadata": {
"name": "gemini-cache/reports-cachedcontenttokencount-on-identical-second-call",
"recordedAt": "2026-05-11T01:55:40.600Z",
"tags": [
"prefix:gemini-cache",
"provider:google",
"protocol:gemini",
"cache"
]
"tags": ["prefix:gemini-cache", "provider:google", "protocol:gemini", "cache"]
},
"interactions": [
{

View File

@@ -3,12 +3,7 @@
"metadata": {
"name": "openai-responses-cache/reports-cached-tokens-on-identical-second-call",
"recordedAt": "2026-05-11T01:41:58.951Z",
"tags": [
"prefix:openai-responses-cache",
"provider:openai",
"protocol:openai-responses",
"cache"
]
"tags": ["prefix:openai-responses-cache", "provider:openai", "protocol:openai-responses", "cache"]
},
"interactions": [
{