Propagate cache key and service tiers in compact (#21249)

## Why

`/responses/compact` should preserve the request-affinity fields that
apply to the active auth mode. ChatGPT-auth compact requests need the
effective `service_tier`, and compact requests for every auth mode need
the stable `prompt_cache_key`, so compaction does not quietly lose
routing or cache behavior that normal sampling already has.

This follows the request-parity direction from #20719, but keeps the net
change focused on the compact payload fields needed here.

## What changed

- Add `service_tier` and `prompt_cache_key` to the compact endpoint
input payload.
- Build the remote compact payload from the existing responses request
builder output so `Fast` still maps to `priority` when compact sends a
service tier.
- Pass the turn service tier into remote compaction, but only include it
in compact payloads for ChatGPT-backed auth.
- Keep `prompt_cache_key` on compact payloads for all auth modes.
- Add request-body diff snapshot coverage in
`core/tests/suite/compact_remote.rs` for:
- API-key auth reusing `prompt_cache_key` while omitting `service_tier`
even when `Fast` is configured.
  - ChatGPT auth reusing both `service_tier` and `prompt_cache_key`.
- Drive the snapshot coverage through five varied turns: plain text,
multi-part text, tool-call continuation, image+text input, local-shell
continuation, and final-turn reasoning output.

## Verification

- Added insta snapshots for compact request-body parity against the last
normal `/responses` request after five varied turns.
- Not run locally per repo guidance; relying on GitHub CI for test
execution.

---------

Co-authored-by: Codex <noreply@openai.com>
This commit is contained in:
Ahmed Ibrahim
2026-05-06 13:38:43 +03:00
committed by Channing Conger
parent 39937b074f
commit 2eb396deb5
8 changed files with 502 additions and 8 deletions

View File

@@ -32,6 +32,10 @@ pub struct CompactionInput<'a> {
#[serde(skip_serializing_if = "Option::is_none")]
pub reasoning: Option<Reasoning>,
#[serde(skip_serializing_if = "Option::is_none")]
pub service_tier: Option<&'a str>,
#[serde(skip_serializing_if = "Option::is_none")]
pub prompt_cache_key: Option<&'a str>,
#[serde(skip_serializing_if = "Option::is_none")]
pub text: Option<TextControls>,
}

View File

@@ -147,6 +147,12 @@ const MEMORIES_SUMMARIZE_ENDPOINT: &str = "/memories/trace_summarize";
pub(crate) const WEBSOCKET_CONNECT_TIMEOUT: Duration =
Duration::from_millis(DEFAULT_WEBSOCKET_CONNECT_TIMEOUT_MS);
pub(crate) struct CompactConversationRequestSettings {
pub(crate) effort: Option<ReasoningEffortConfig>,
pub(crate) summary: ReasoningSummaryConfig,
pub(crate) service_tier: Option<ServiceTier>,
}
/// Session-scoped state shared by all [`ModelClient`] clones.
///
/// This is intentionally kept minimal so `ModelClient` does not need to hold a full `Config`. Most
@@ -414,12 +420,11 @@ impl ModelClient {
///
/// The model selection and telemetry context are passed explicitly to keep `ModelClient`
/// session-scoped.
pub async fn compact_conversation_history(
pub(crate) async fn compact_conversation_history(
&self,
prompt: &Prompt,
model_info: &ModelInfo,
effort: Option<ReasoningEffortConfig>,
summary: ReasoningSummaryConfig,
settings: CompactConversationRequestSettings,
session_telemetry: &SessionTelemetry,
compaction_trace: &CompactionTraceContext,
) -> Result<Vec<ResponseItem>> {
@@ -442,9 +447,9 @@ impl ModelClient {
&client_setup.api_provider,
prompt,
model_info,
effort,
summary,
/*service_tier*/ None,
settings.effort,
settings.summary,
settings.service_tier,
)?;
let ResponsesApiRequest {
model,
@@ -453,6 +458,8 @@ impl ModelClient {
tools,
parallel_tool_calls,
reasoning,
service_tier,
prompt_cache_key,
text,
..
} = request;
@@ -466,6 +473,8 @@ impl ModelClient {
tools,
parallel_tool_calls,
reasoning,
service_tier: service_tier.as_deref(),
prompt_cache_key: prompt_cache_key.as_deref(),
text,
};

View File

@@ -2,6 +2,7 @@ use std::collections::HashSet;
use std::sync::Arc;
use crate::Prompt;
use crate::client::CompactConversationRequestSettings;
use crate::compact::CompactionAnalyticsAttempt;
use crate::compact::InitialContextInjection;
use crate::compact::compaction_status_from_result;
@@ -170,8 +171,11 @@ async fn run_remote_compact_task_inner_impl(
.compact_conversation_history(
&prompt,
&turn_context.model_info,
turn_context.reasoning_effort,
turn_context.reasoning_summary,
CompactConversationRequestSettings {
effort: turn_context.reasoning_effort,
summary: turn_context.reasoning_summary,
service_tier: turn_context.config.service_tier,
},
&turn_context.session_telemetry,
&compaction_trace,
)

View File

@@ -7,4 +7,7 @@ codex_rust_crate(
lib_data_extra = [
"//codex-rs/core:model_availability_nux_fixtures",
],
deps_extra = [
"@crates//:similar",
],
)

View File

@@ -1,5 +1,7 @@
use regex_lite::Regex;
use serde_json::Value;
use similar::ChangeTag;
use similar::TextDiff;
use std::sync::OnceLock;
use crate::responses::ResponsesRequest;
@@ -242,6 +244,102 @@ pub fn format_labeled_items_snapshot(
format!("Scenario: {scenario}\n\n{sections}")
}
/// Render changed JSON lines between two captured `/responses` request bodies.
///
/// Request-parity tests use this to compare the entire JSON payload while showing only fields that
/// changed, with the same redactions as the other context snapshots.
pub fn format_request_body_diff_snapshot(
scenario: &str,
before_title: &str,
before_request: &ResponsesRequest,
after_title: &str,
after_request: &ResponsesRequest,
options: &ContextSnapshotOptions,
) -> String {
let before = format_request_body_snapshot(before_request, options);
let after = format_request_body_snapshot(after_request, options);
let diff = format_changed_lines_diff(before_title, &before, after_title, &after);
format!("Scenario: {scenario}\n\n{diff}")
}
fn format_request_body_snapshot(
request: &ResponsesRequest,
options: &ContextSnapshotOptions,
) -> String {
let mut body = request.body_json();
canonicalize_json_snapshot_value(&mut body, options);
serde_json::to_string_pretty(&body).expect("request body should serialize")
}
fn canonicalize_json_snapshot_value(value: &mut Value, options: &ContextSnapshotOptions) {
match value {
Value::Array(values) => {
for value in values {
canonicalize_json_snapshot_value(value, options);
}
}
Value::Object(map) => {
// Keep request-body snapshots stable when serde_json preserves insertion order.
let mut entries = std::mem::take(map).into_iter().collect::<Vec<_>>();
entries.sort_by(|(left_key, _), (right_key, _)| left_key.cmp(right_key));
for (key, mut value) in entries {
canonicalize_json_snapshot_value(&mut value, options);
map.insert(key, value);
}
}
Value::String(text) => {
*text = format_snapshot_json_string(text, options);
}
Value::Null | Value::Bool(_) | Value::Number(_) => {}
}
}
fn format_snapshot_json_string(text: &str, options: &ContextSnapshotOptions) -> String {
let normalized = match options.render_mode {
ContextSnapshotRenderMode::RedactedText
| ContextSnapshotRenderMode::KindWithTextPrefix { .. } => normalize_snapshot_uuids(
&normalize_snapshot_line_endings(&canonicalize_snapshot_text(text)),
),
ContextSnapshotRenderMode::FullText => normalize_snapshot_line_endings(text),
ContextSnapshotRenderMode::KindOnly => unreachable!(),
};
match options.render_mode {
ContextSnapshotRenderMode::KindWithTextPrefix { max_chars }
if normalized.chars().count() > max_chars =>
{
let prefix = normalized.chars().take(max_chars).collect::<String>();
format!("{prefix}...")
}
ContextSnapshotRenderMode::RedactedText
| ContextSnapshotRenderMode::FullText
| ContextSnapshotRenderMode::KindWithTextPrefix { .. } => normalized,
ContextSnapshotRenderMode::KindOnly => unreachable!(),
}
}
fn format_changed_lines_diff(
before_title: &str,
before: &str,
after_title: &str,
after: &str,
) -> String {
let mut diff = format!("--- {before_title}\n+++ {after_title}\n");
for change in TextDiff::from_lines(before, after).iter_all_changes() {
match change.tag() {
ChangeTag::Equal => {}
ChangeTag::Delete => {
diff.push('-');
diff.push_str(change.value());
}
ChangeTag::Insert => {
diff.push('+');
diff.push_str(change.value());
}
}
}
diff
}
fn format_snapshot_text(text: &str, options: &ContextSnapshotOptions) -> String {
match options.render_mode {
ContextSnapshotRenderMode::RedactedText => {
@@ -342,6 +440,17 @@ fn normalize_dynamic_snapshot_paths(text: &str) -> String {
.into_owned()
}
fn normalize_snapshot_uuids(text: &str) -> String {
static UUID_RE: OnceLock<Regex> = OnceLock::new();
let uuid_re = UUID_RE.get_or_init(|| {
Regex::new(
r"\b[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}\b",
)
.expect("uuid regex should compile")
});
uuid_re.replace_all(text, "<UUID>").into_owned()
}
#[cfg(test)]
mod tests {
use super::ContextSnapshotOptions;

View File

@@ -7,6 +7,7 @@ use anyhow::Result;
use codex_core::compact::SUMMARY_PREFIX;
use codex_features::Feature;
use codex_login::CodexAuth;
use codex_protocol::config_types::ServiceTier;
use codex_protocol::dynamic_tools::DynamicToolSpec;
use codex_protocol::items::TurnItem;
use codex_protocol::models::ContentItem;
@@ -105,6 +106,23 @@ fn contains_defer_loading(value: &Value) -> bool {
}
}
fn canonical_json(value: &Value) -> Value {
match value {
Value::Object(map) => {
let mut entries = map.iter().collect::<Vec<_>>();
entries.sort_by(|(left_key, _), (right_key, _)| left_key.cmp(right_key));
Value::Object(
entries
.into_iter()
.map(|(key, value)| (key.clone(), canonical_json(value)))
.collect(),
)
}
Value::Array(values) => Value::Array(values.iter().map(canonical_json).collect()),
Value::Null | Value::Bool(_) | Value::Number(_) | Value::String(_) => value.clone(),
}
}
const PRETURN_CONTEXT_DIFF_CWD: &str = "/tmp/PRETURN_CONTEXT_DIFF_CWD";
const DUMMY_FUNCTION_NAME: &str = "test_tool";
const REMOTE_COMPACT_TURN_COMPLETE_TIMEOUT: Duration = Duration::from_secs(30);
@@ -417,6 +435,267 @@ async fn remote_compact_replaces_history_for_followups() -> Result<()> {
Ok(())
}
async fn assert_remote_manual_compact_request_parity(
auth: CodexAuth,
configured_service_tier: Option<ServiceTier>,
expected_service_tier: Option<&str>,
snapshot_name: &str,
scenario: &str,
) -> Result<()> {
let mut builder = test_codex().with_auth(auth);
if let Some(service_tier) = configured_service_tier {
builder = builder.with_config(move |config| {
config.service_tier = Some(service_tier);
});
}
let harness = TestCodexHarness::with_builder(builder).await?;
let codex = harness.test().codex.clone();
let image_url =
"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVR4nGNgYAAAAAMAASsJTYQAAAAASUVORK5CYII="
.to_string();
let responses_mock = responses::mount_sse_sequence(
harness.server(),
vec![
responses::sse(vec![
responses::ev_assistant_message("turn-one-assistant", "TURN_ONE_ASSISTANT"),
responses::ev_completed("turn-one-response"),
]),
responses::sse(vec![
responses::ev_reasoning_item(
"turn-two-reasoning",
&["TURN_TWO_REASONING"],
&["turn two raw content"],
),
responses::ev_assistant_message("turn-two-assistant", "TURN_TWO_ASSISTANT"),
responses::ev_completed("turn-two-response"),
]),
responses::sse(vec![
responses::ev_function_call("turn-three-call", DUMMY_FUNCTION_NAME, "{}"),
responses::ev_completed("turn-three-call-response"),
]),
responses::sse(vec![
responses::ev_assistant_message("turn-three-assistant", "TURN_THREE_ASSISTANT"),
responses::ev_completed("turn-three-final-response"),
]),
responses::sse(vec![
responses::ev_local_shell_call(
"turn-four-local-shell",
"completed",
vec!["/bin/echo", "TURN_FOUR_LOCAL_SHELL"],
),
responses::ev_completed("turn-four-local-shell-response"),
]),
responses::sse(vec![
responses::ev_assistant_message("turn-four-assistant", "TURN_FOUR_ASSISTANT"),
responses::ev_completed("turn-four-final-response"),
]),
responses::sse(vec![
responses::ev_reasoning_item(
"turn-five-reasoning",
&["TURN_FIVE_REASONING"],
&["turn five raw content"],
),
responses::ev_assistant_message("turn-five-assistant", "TURN_FIVE_ASSISTANT"),
responses::ev_completed("turn-five-response"),
]),
],
)
.await;
let compact_mock = responses::mount_compact_user_history_with_summary_once(
harness.server(),
"REMOTE_CACHE_TIER_SUMMARY",
)
.await;
codex
.submit(Op::UserInput {
environments: None,
items: vec![UserInput::Text {
text: "TURN_ONE_USER".to_string(),
text_elements: Vec::new(),
}],
final_output_json_schema: None,
responsesapi_client_metadata: None,
})
.await?;
wait_for_turn_complete(&codex).await;
codex
.submit(Op::UserInput {
environments: None,
items: vec![
UserInput::Text {
text: "TURN_TWO_PREFIX".to_string(),
text_elements: Vec::new(),
},
UserInput::Text {
text: "TURN_TWO_SUFFIX".to_string(),
text_elements: Vec::new(),
},
],
final_output_json_schema: None,
responsesapi_client_metadata: None,
})
.await?;
wait_for_turn_complete(&codex).await;
codex
.submit(Op::UserInput {
environments: None,
items: vec![UserInput::Text {
text: "TURN_THREE_TOOL_USER".to_string(),
text_elements: Vec::new(),
}],
final_output_json_schema: None,
responsesapi_client_metadata: None,
})
.await?;
wait_for_turn_complete(&codex).await;
codex
.submit(Op::UserInput {
environments: None,
items: vec![
UserInput::Image { image_url },
UserInput::Text {
text: "TURN_FOUR_IMAGE_USER".to_string(),
text_elements: Vec::new(),
},
],
final_output_json_schema: None,
responsesapi_client_metadata: None,
})
.await?;
wait_for_turn_complete(&codex).await;
codex
.submit(Op::UserInput {
environments: None,
items: vec![UserInput::Text {
text: "TURN_FIVE_USER".to_string(),
text_elements: Vec::new(),
}],
final_output_json_schema: None,
responsesapi_client_metadata: None,
})
.await?;
wait_for_turn_complete(&codex).await;
codex.submit(Op::Compact).await?;
wait_for_turn_complete(&codex).await;
let response_requests = responses_mock.requests();
assert_eq!(
response_requests.len(),
7,
"expected five turns with one unsupported tool continuation and one local shell continuation"
);
assert_eq!(
compact_mock.requests().len(),
1,
"expected exactly one remote compact request"
);
let normal_request = response_requests
.last()
.cloned()
.expect("last turn request missing");
let compact_request = compact_mock.single_request();
let normal_body = normal_request.body_json();
let compact_body = compact_request.body_json();
let mut expected_compact_body_without_input = normal_body.clone();
let expected_compact_object = expected_compact_body_without_input
.as_object_mut()
.expect("responses request body should be an object");
for field in [
"input",
"client_metadata",
"include",
"store",
"stream",
"tool_choice",
] {
expected_compact_object.remove(field);
}
if expected_service_tier.is_none() {
expected_compact_object.remove("service_tier");
}
let mut compact_body_without_input = compact_body.clone();
compact_body_without_input
.as_object_mut()
.expect("compact request body should be an object")
.remove("input");
let canonical_compact_body_without_input = canonical_json(&compact_body_without_input);
let canonical_expected_compact_body_without_input =
canonical_json(&expected_compact_body_without_input);
assert_eq!(
json!({
"compact_body_without_input": canonical_compact_body_without_input,
"expected_compact_body_without_input": canonical_expected_compact_body_without_input,
"prompt_cache_key_matches_responses": compact_body["prompt_cache_key"] == normal_body["prompt_cache_key"],
"prompt_cache_key_present": compact_body["prompt_cache_key"].is_string(),
"service_tier": compact_body.get("service_tier").and_then(Value::as_str),
}),
json!({
"compact_body_without_input": canonical_expected_compact_body_without_input,
"expected_compact_body_without_input": canonical_expected_compact_body_without_input,
"prompt_cache_key_matches_responses": true,
"prompt_cache_key_present": true,
"service_tier": expected_service_tier,
}),
"compact requests should carry the same shared request fields as /responses"
);
insta::assert_snapshot!(
snapshot_name,
context_snapshot::format_request_body_diff_snapshot(
scenario,
"Last Normal /responses Request",
&normal_request,
"Remote /responses/compact Request",
&compact_request,
&ContextSnapshotOptions::default(),
)
);
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn remote_manual_compact_api_auth_reuses_prompt_cache_key() -> Result<()> {
skip_if_no_network!(Ok(()));
assert_remote_manual_compact_request_parity(
CodexAuth::from_api_key("dummy"),
Some(ServiceTier::Fast),
Some("priority"),
"remote_manual_compact_api_auth_prompt_cache_key_request_diff",
"After five varied API-key-auth turns, remote manual compaction reuses the normal responses service_tier and prompt_cache_key while omitting responses-only fields.",
)
.await?;
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn remote_manual_compact_chatgpt_auth_reuses_service_tier_and_prompt_cache_key() -> Result<()>
{
skip_if_no_network!(Ok(()));
assert_remote_manual_compact_request_parity(
CodexAuth::create_dummy_chatgpt_auth_for_testing(),
Some(ServiceTier::Fast),
Some("priority"),
"remote_manual_compact_chatgpt_auth_service_tier_prompt_cache_key_request_diff",
"After five varied ChatGPT-auth turns, remote manual compaction reuses the normal responses service_tier and prompt_cache_key while omitting responses-only fields.",
)
.await?;
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn remote_compact_v2_reuses_context_compaction_for_followups() -> Result<()> {
skip_if_no_network!(Ok(()));

View File

@@ -0,0 +1,43 @@
---
source: core/tests/suite/compact_remote.rs
expression: "context_snapshot::format_request_body_diff_snapshot(scenario,\n\"Last Normal /responses Request\", &normal_request,\n\"Remote /responses/compact Request\", &compact_request,\n&ContextSnapshotOptions::default(),)"
---
Scenario: After five varied API-key-auth turns, remote manual compaction reuses the normal responses service_tier and prompt_cache_key while omitting responses-only fields.
--- Last Normal /responses Request
+++ Remote /responses/compact Request
- "client_metadata": {
- "x-codex-installation-id": "<UUID>"
- },
- "include": [
- "reasoning.encrypted_content"
- ],
+ },
+ {
+ "content": [
+ {
+ "text": "turn five raw content",
+ "type": "reasoning_text"
+ }
+ ],
+ "encrypted_content": "YmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYnR1cm4gZml2ZSByYXcgY29udGVudA==",
+ "summary": [
+ {
+ "text": "TURN_FIVE_REASONING",
+ "type": "summary_text"
+ }
+ ],
+ "type": "reasoning"
+ },
+ {
+ "content": [
+ {
+ "text": "TURN_FIVE_ASSISTANT",
+ "type": "output_text"
+ }
+ ],
+ "role": "assistant",
+ "type": "message"
- "store": false,
- "stream": true,
- "tool_choice": "auto",

View File

@@ -0,0 +1,43 @@
---
source: core/tests/suite/compact_remote.rs
expression: "context_snapshot::format_request_body_diff_snapshot(scenario,\n\"Last Normal /responses Request\", &normal_request,\n\"Remote /responses/compact Request\", &compact_request,\n&ContextSnapshotOptions::default(),)"
---
Scenario: After five varied ChatGPT-auth turns, remote manual compaction reuses the normal responses service_tier and prompt_cache_key while omitting responses-only fields.
--- Last Normal /responses Request
+++ Remote /responses/compact Request
- "client_metadata": {
- "x-codex-installation-id": "<UUID>"
- },
- "include": [
- "reasoning.encrypted_content"
- ],
+ },
+ {
+ "content": [
+ {
+ "text": "turn five raw content",
+ "type": "reasoning_text"
+ }
+ ],
+ "encrypted_content": "YmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYnR1cm4gZml2ZSByYXcgY29udGVudA==",
+ "summary": [
+ {
+ "text": "TURN_FIVE_REASONING",
+ "type": "summary_text"
+ }
+ ],
+ "type": "reasoning"
+ },
+ {
+ "content": [
+ {
+ "text": "TURN_FIVE_ASSISTANT",
+ "type": "output_text"
+ }
+ ],
+ "role": "assistant",
+ "type": "message"
- "store": false,
- "stream": true,
- "tool_choice": "auto",