Compare commits

...

3 Commits
pr12438 ... oss

Author SHA1 Message Date
Ahmed Ibrahim
4e6bc85fc4 oss 2025-09-02 15:19:29 -07:00
Ahmed Ibrahim
8bb57afc84 oss 2025-09-02 15:18:04 -07:00
Ahmed Ibrahim
a8324c5d94 progress 2025-09-02 14:52:44 -07:00
3 changed files with 39 additions and 4 deletions

View File

@@ -19,6 +19,7 @@ use crate::ModelProviderInfo;
use crate::client_common::Prompt; use crate::client_common::Prompt;
use crate::client_common::ResponseEvent; use crate::client_common::ResponseEvent;
use crate::client_common::ResponseStream; use crate::client_common::ResponseStream;
use crate::config::Config;
use crate::error::CodexErr; use crate::error::CodexErr;
use crate::error::Result; use crate::error::Result;
use crate::model_family::ModelFamily; use crate::model_family::ModelFamily;
@@ -34,6 +35,7 @@ pub(crate) async fn stream_chat_completions(
model_family: &ModelFamily, model_family: &ModelFamily,
client: &reqwest::Client, client: &reqwest::Client,
provider: &ModelProviderInfo, provider: &ModelProviderInfo,
config: &Config,
) -> Result<ResponseStream> { ) -> Result<ResponseStream> {
// Build messages array // Build messages array
let mut messages = Vec::<serde_json::Value>::new(); let mut messages = Vec::<serde_json::Value>::new();
@@ -129,10 +131,26 @@ pub(crate) async fn stream_chat_completions(
"content": output, "content": output,
})); }));
} }
ResponseItem::Reasoning { .. } ResponseItem::Reasoning {
| ResponseItem::WebSearchCall { .. } id: _,
| ResponseItem::Other => { summary,
// Omit these items from the conversation history. content,
encrypted_content: _,
} => {
if !config.skip_reasoning_in_chat_completions {
// There is no clear way of sending reasoning items over chat completions.
// We are sending it as an assistant message.
tracing::info!("reasoning item: {:?}", item);
let reasoning =
format!("Reasoning Summary: {summary:?}, Reasoning Content: {content:?}");
messages.push(json!({
"role": "assistant",
"content": reasoning,
}));
}
}
ResponseItem::WebSearchCall { .. } | ResponseItem::Other => {
tracing::info!("omitting item from chat completions: {:?}", item);
continue; continue;
} }
} }
@@ -350,6 +368,8 @@ async fn process_chat_sse<S>(
} }
if let Some(reasoning) = maybe_text { if let Some(reasoning) = maybe_text {
// Accumulate so we can emit a terminal Reasoning item at end-of-turn.
reasoning_text.push_str(&reasoning);
let _ = tx_event let _ = tx_event
.send(Ok(ResponseEvent::ReasoningContentDelta(reasoning))) .send(Ok(ResponseEvent::ReasoningContentDelta(reasoning)))
.await; .await;

View File

@@ -110,6 +110,7 @@ impl ModelClient {
&self.config.model_family, &self.config.model_family,
&self.client, &self.client,
&self.provider, &self.provider,
&self.config,
) )
.await?; .await?;

View File

@@ -185,6 +185,10 @@ pub struct Config {
/// All characters are inserted as they are received, and no buffering /// All characters are inserted as they are received, and no buffering
/// or placeholder replacement will occur for fast keypress bursts. /// or placeholder replacement will occur for fast keypress bursts.
pub disable_paste_burst: bool, pub disable_paste_burst: bool,
/// When `true`, reasoning items in Chat Completions input will be skipped.
/// Defaults to `false`.
pub skip_reasoning_in_chat_completions: bool,
} }
impl Config { impl Config {
@@ -497,6 +501,10 @@ pub struct ConfigToml {
/// All characters are inserted as they are received, and no buffering /// All characters are inserted as they are received, and no buffering
/// or placeholder replacement will occur for fast keypress bursts. /// or placeholder replacement will occur for fast keypress bursts.
pub disable_paste_burst: Option<bool>, pub disable_paste_burst: Option<bool>,
/// When set to `true`, reasoning items will be skipped from Chat Completions input.
/// Defaults to `false`.
pub skip_reasoning_in_chat_completions: Option<bool>,
} }
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)] #[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
@@ -807,6 +815,9 @@ impl Config {
.unwrap_or(false), .unwrap_or(false),
include_view_image_tool, include_view_image_tool,
disable_paste_burst: cfg.disable_paste_burst.unwrap_or(false), disable_paste_burst: cfg.disable_paste_burst.unwrap_or(false),
skip_reasoning_in_chat_completions: cfg
.skip_reasoning_in_chat_completions
.unwrap_or(false),
}; };
Ok(config) Ok(config)
} }
@@ -1177,6 +1188,7 @@ disable_response_storage = true
use_experimental_streamable_shell_tool: false, use_experimental_streamable_shell_tool: false,
include_view_image_tool: true, include_view_image_tool: true,
disable_paste_burst: false, disable_paste_burst: false,
skip_reasoning_in_chat_completions: false,
}, },
o3_profile_config o3_profile_config
); );
@@ -1235,6 +1247,7 @@ disable_response_storage = true
use_experimental_streamable_shell_tool: false, use_experimental_streamable_shell_tool: false,
include_view_image_tool: true, include_view_image_tool: true,
disable_paste_burst: false, disable_paste_burst: false,
skip_reasoning_in_chat_completions: false,
}; };
assert_eq!(expected_gpt3_profile_config, gpt3_profile_config); assert_eq!(expected_gpt3_profile_config, gpt3_profile_config);
@@ -1308,6 +1321,7 @@ disable_response_storage = true
use_experimental_streamable_shell_tool: false, use_experimental_streamable_shell_tool: false,
include_view_image_tool: true, include_view_image_tool: true,
disable_paste_burst: false, disable_paste_burst: false,
skip_reasoning_in_chat_completions: false,
}; };
assert_eq!(expected_zdr_profile_config, zdr_profile_config); assert_eq!(expected_zdr_profile_config, zdr_profile_config);