Allow clients not to send summary as an option (#12950)

Summary is a required parameter on UserTurn. Ideally we'd like the core
to decide the appropriate summary level.

Make the summary optional and don't send it when not needed.
This commit is contained in:
pakrym-oai
2026-02-26 14:37:38 -08:00
committed by GitHub
parent c1afb8815a
commit 951a389654
38 changed files with 233 additions and 175 deletions

View File

@@ -5,7 +5,6 @@ use codex_core::built_in_model_providers;
use codex_core::compact::SUMMARIZATION_PROMPT;
use codex_core::compact::SUMMARY_PREFIX;
use codex_core::config::Config;
use codex_protocol::config_types::ReasoningSummary;
use codex_protocol::items::TurnItem;
use codex_protocol::openai_models::ModelInfo;
use codex_protocol::openai_models::ModelsResponse;
@@ -1659,7 +1658,7 @@ async fn auto_compact_runs_after_resume_when_token_usage_is_over_limit() {
sandbox_policy: SandboxPolicy::DangerFullAccess,
model: resumed.session_configured.model.clone(),
effort: None,
summary: ReasoningSummary::Auto,
summary: None,
collaboration_mode: None,
personality: None,
})
@@ -1748,7 +1747,7 @@ async fn pre_sampling_compact_runs_on_switch_to_smaller_context_model() {
sandbox_policy: SandboxPolicy::DangerFullAccess,
model: previous_model.to_string(),
effort: None,
summary: ReasoningSummary::Auto,
summary: None,
collaboration_mode: None,
personality: None,
})
@@ -1771,7 +1770,7 @@ async fn pre_sampling_compact_runs_on_switch_to_smaller_context_model() {
sandbox_policy: SandboxPolicy::DangerFullAccess,
model: next_model.to_string(),
effort: None,
summary: ReasoningSummary::Auto,
summary: None,
collaboration_mode: None,
personality: None,
})
@@ -1880,7 +1879,7 @@ async fn pre_sampling_compact_runs_after_resume_and_switch_to_smaller_model() {
sandbox_policy: SandboxPolicy::DangerFullAccess,
model: previous_model.to_string(),
effort: None,
summary: ReasoningSummary::Auto,
summary: None,
collaboration_mode: None,
personality: None,
})
@@ -1927,7 +1926,7 @@ async fn pre_sampling_compact_runs_after_resume_and_switch_to_smaller_model() {
sandbox_policy: SandboxPolicy::DangerFullAccess,
model: next_model.to_string(),
effort: None,
summary: ReasoningSummary::Auto,
summary: None,
collaboration_mode: None,
personality: None,
})
@@ -3128,7 +3127,7 @@ async fn snapshot_request_shape_pre_turn_compaction_strips_incoming_model_switch
sandbox_policy: SandboxPolicy::DangerFullAccess,
model: previous_model.to_string(),
effort: None,
summary: ReasoningSummary::Auto,
summary: None,
collaboration_mode: None,
personality: None,
})
@@ -3151,7 +3150,7 @@ async fn snapshot_request_shape_pre_turn_compaction_strips_incoming_model_switch
sandbox_policy: SandboxPolicy::DangerFullAccess,
model: next_model.to_string(),
effort: None,
summary: ReasoningSummary::Auto,
summary: None,
collaboration_mode: None,
personality: None,
})