add fast mode toggle (#13212)

- add a local Fast mode setting in codex-core (similar to how model id
is currently stored on disk locally)
- send `service_tier=priority` on requests when Fast is enabled
- add `/fast` in the TUI and persist it locally
- feature flag
This commit is contained in:
pash-openai
2026-03-02 20:29:33 -08:00
committed by GitHub
parent 56cc2c71f4
commit 2f5b01abd6
69 changed files with 929 additions and 127 deletions

View File

@@ -413,6 +413,7 @@ async fn overrides_turn_context_but_keeps_cached_prefix_and_key_constant() -> an
model: None,
effort: Some(Some(ReasoningEffort::High)),
summary: Some(ReasoningSummary::Detailed),
service_tier: None,
collaboration_mode: None,
personality: None,
})
@@ -494,6 +495,7 @@ async fn override_before_first_turn_emits_environment_context() -> anyhow::Resul
model: Some("gpt-5.1-codex".to_string()),
effort: Some(Some(ReasoningEffort::Low)),
summary: None,
service_tier: None,
collaboration_mode: Some(collaboration_mode),
personality: None,
})
@@ -680,6 +682,7 @@ async fn per_turn_overrides_keep_cached_prefix_and_key_constant() -> anyhow::Res
model: "o3".to_string(),
effort: Some(ReasoningEffort::High),
summary: Some(ReasoningSummary::Detailed),
service_tier: None,
collaboration_mode: None,
final_output_json_schema: None,
personality: None,
@@ -788,6 +791,7 @@ async fn send_user_turn_with_no_changes_does_not_send_environment_context() -> a
model: default_model.clone(),
effort: default_effort,
summary: Some(default_summary.unwrap_or(ReasoningSummary::Auto)),
service_tier: None,
collaboration_mode: None,
final_output_json_schema: None,
personality: None,
@@ -807,6 +811,7 @@ async fn send_user_turn_with_no_changes_does_not_send_environment_context() -> a
model: default_model.clone(),
effort: default_effort,
summary: Some(default_summary.unwrap_or(ReasoningSummary::Auto)),
service_tier: None,
collaboration_mode: None,
final_output_json_schema: None,
personality: None,
@@ -907,6 +912,7 @@ async fn send_user_turn_with_changes_sends_environment_context() -> anyhow::Resu
model: default_model,
effort: default_effort,
summary: Some(default_summary.unwrap_or(ReasoningSummary::Auto)),
service_tier: None,
collaboration_mode: None,
final_output_json_schema: None,
personality: None,
@@ -926,6 +932,7 @@ async fn send_user_turn_with_changes_sends_environment_context() -> anyhow::Resu
model: "o3".to_string(),
effort: Some(ReasoningEffort::High),
summary: Some(ReasoningSummary::Detailed),
service_tier: None,
collaboration_mode: None,
final_output_json_schema: None,
personality: None,