add fast mode toggle (#13212)

- add a local Fast mode setting in codex-core (similar to how model id
is currently stored on disk locally)
- send `service_tier=priority` on requests when Fast is enabled
- add `/fast` in the TUI and persist it locally
- feature flag
This commit is contained in:
pash-openai
2026-03-02 20:29:33 -08:00
committed by GitHub
parent 56cc2c71f4
commit 2f5b01abd6
69 changed files with 929 additions and 127 deletions

View File

@@ -209,6 +209,7 @@ async fn unified_exec_intercepts_apply_patch_exec_command() -> Result<()> {
model: session_model,
effort: None,
summary: None,
service_tier: None,
collaboration_mode: None,
personality: None,
})
@@ -338,6 +339,7 @@ async fn unified_exec_emits_exec_command_begin_event() -> Result<()> {
model: session_model,
effort: None,
summary: None,
service_tier: None,
collaboration_mode: None,
personality: None,
})
@@ -416,6 +418,7 @@ async fn unified_exec_resolves_relative_workdir() -> Result<()> {
model: session_model,
effort: None,
summary: None,
service_tier: None,
collaboration_mode: None,
personality: None,
})
@@ -497,6 +500,7 @@ async fn unified_exec_respects_workdir_override() -> Result<()> {
model: session_model,
effort: None,
summary: None,
service_tier: None,
collaboration_mode: None,
personality: None,
})
@@ -590,6 +594,7 @@ async fn unified_exec_emits_exec_command_end_event() -> Result<()> {
model: session_model,
effort: None,
summary: None,
service_tier: None,
collaboration_mode: None,
personality: None,
})
@@ -665,6 +670,7 @@ async fn unified_exec_emits_output_delta_for_exec_command() -> Result<()> {
model: session_model,
effort: None,
summary: None,
service_tier: None,
collaboration_mode: None,
personality: None,
})
@@ -741,6 +747,7 @@ async fn unified_exec_full_lifecycle_with_background_end_event() -> Result<()> {
model: session_model,
effort: None,
summary: None,
service_tier: None,
collaboration_mode: None,
personality: None,
})
@@ -871,6 +878,7 @@ async fn unified_exec_emits_terminal_interaction_for_write_stdin() -> Result<()>
model: session_model,
effort: None,
summary: None,
service_tier: None,
collaboration_mode: None,
personality: None,
})
@@ -1008,6 +1016,7 @@ async fn unified_exec_terminal_interaction_captures_delayed_output() -> Result<(
model: session_model,
effort: None,
summary: None,
service_tier: None,
collaboration_mode: None,
personality: None,
})
@@ -1168,6 +1177,7 @@ async fn unified_exec_emits_one_begin_and_one_end_event() -> Result<()> {
model: session_model,
effort: None,
summary: None,
service_tier: None,
collaboration_mode: None,
personality: None,
})
@@ -1266,6 +1276,7 @@ async fn exec_command_reports_chunk_and_exit_metadata() -> Result<()> {
model: session_model,
effort: None,
summary: None,
service_tier: None,
collaboration_mode: None,
personality: None,
})
@@ -1384,6 +1395,7 @@ async fn unified_exec_defaults_to_pipe() -> Result<()> {
model: session_model,
effort: None,
summary: None,
service_tier: None,
collaboration_mode: None,
personality: None,
})
@@ -1474,6 +1486,7 @@ async fn unified_exec_can_enable_tty() -> Result<()> {
model: session_model,
effort: None,
summary: None,
service_tier: None,
collaboration_mode: None,
personality: None,
})
@@ -1555,6 +1568,7 @@ async fn unified_exec_respects_early_exit_notifications() -> Result<()> {
model: session_model,
effort: None,
summary: None,
service_tier: None,
collaboration_mode: None,
personality: None,
})
@@ -1686,6 +1700,7 @@ async fn write_stdin_returns_exit_metadata_and_clears_session() -> Result<()> {
model: session_model,
effort: None,
summary: None,
service_tier: None,
collaboration_mode: None,
personality: None,
})
@@ -1854,6 +1869,7 @@ async fn unified_exec_emits_end_event_when_session_dies_via_stdin() -> Result<()
model: session_model,
effort: None,
summary: None,
service_tier: None,
collaboration_mode: None,
personality: None,
})
@@ -1931,6 +1947,7 @@ async fn unified_exec_keeps_long_running_session_after_turn_end() -> Result<()>
model: session_model,
effort: None,
summary: None,
service_tier: None,
collaboration_mode: None,
personality: None,
})
@@ -2019,6 +2036,7 @@ async fn unified_exec_interrupt_terminates_long_running_session() -> Result<()>
model: session_model,
effort: None,
summary: None,
service_tier: None,
collaboration_mode: None,
personality: None,
})
@@ -2116,6 +2134,7 @@ async fn unified_exec_reuses_session_via_stdin() -> Result<()> {
model: session_model,
effort: None,
summary: None,
service_tier: None,
collaboration_mode: None,
personality: None,
})
@@ -2251,6 +2270,7 @@ PY
model: session_model,
effort: None,
summary: None,
service_tier: None,
collaboration_mode: None,
personality: None,
})
@@ -2365,6 +2385,7 @@ async fn unified_exec_timeout_and_followup_poll() -> Result<()> {
model: session_model,
effort: None,
summary: None,
service_tier: None,
collaboration_mode: None,
personality: None,
})
@@ -2461,6 +2482,7 @@ PY
model: session_model,
effort: None,
summary: None,
service_tier: None,
collaboration_mode: None,
personality: None,
})
@@ -2543,6 +2565,7 @@ async fn unified_exec_runs_under_sandbox() -> Result<()> {
model: session_model,
effort: None,
summary: None,
service_tier: None,
collaboration_mode: None,
personality: None,
})
@@ -2647,6 +2670,7 @@ async fn unified_exec_python_prompt_under_seatbelt() -> Result<()> {
model: session_model,
effort: None,
summary: None,
service_tier: None,
collaboration_mode: None,
personality: None,
})
@@ -2742,6 +2766,7 @@ async fn unified_exec_runs_on_all_platforms() -> Result<()> {
model: session_model,
effort: None,
summary: None,
service_tier: None,
collaboration_mode: None,
personality: None,
})
@@ -2877,6 +2902,7 @@ async fn unified_exec_prunes_exited_sessions_first() -> Result<()> {
model: session_model,
effort: None,
summary: None,
service_tier: None,
collaboration_mode: None,
personality: None,
})