This commit is contained in:
jif-oai
2025-10-31 14:24:09 +01:00
parent 423c40d9b8
commit 6e936f92a4
5 changed files with 5 additions and 15 deletions

View File

@@ -14,7 +14,6 @@ pub struct Prompt {
pub output_schema: Option<Value>,
pub reasoning: Option<Reasoning>,
pub text_controls: Option<TextControls>,
pub store_response: bool,
pub prompt_cache_key: Option<String>,
pub previous_response_id: Option<String>,
pub session_source: Option<SessionSource>,
@@ -30,7 +29,6 @@ impl Prompt {
output_schema: Option<Value>,
reasoning: Option<Reasoning>,
text_controls: Option<TextControls>,
store_response: bool,
prompt_cache_key: Option<String>,
previous_response_id: Option<String>,
session_source: Option<SessionSource>,
@@ -43,7 +41,6 @@ impl Prompt {
output_schema,
reasoning,
text_controls,
store_response,
prompt_cache_key,
previous_response_id,
session_source,

View File

@@ -114,7 +114,7 @@ impl ResponsesApiClient {
"tools": prompt.tools,
"tool_choice": "auto",
"parallel_tool_calls": prompt.parallel_tool_calls,
"store": azure_workaround || prompt.store_response,
"store": azure_workaround,
"stream": true,
"prompt_cache_key": prompt
.prompt_cache_key

View File

@@ -280,7 +280,6 @@ impl ModelClient {
crate::client_common::compute_full_instructions(None, &self.config.model_family, false)
.into_owned();
prompt.instructions = instructions;
prompt.store_response = false;
prompt.previous_response_id = None;
self.stream(&prompt).await
}

View File

@@ -981,14 +981,9 @@ impl Session {
async fn update_responses_api_chain_state(
&self,
supports_responses_api_chaining: bool,
response_id: Option<String>,
) {
let mut state = self.state.lock().await;
if !supports_responses_api_chaining {
state.reset_responses_api_chain();
return;
}
let Some(response_id) = response_id.filter(|id| !id.is_empty()) else {
state.reset_responses_api_chain();
@@ -2028,8 +2023,6 @@ async fn try_run_turn(
prompt: Prompt,
cancellation_token: CancellationToken,
) -> CodexResult<TurnRunResult> {
let supports_responses_api_chaining = prompt.store_response;
let rollout_item = RolloutItem::TurnContext(TurnContextItem {
cwd: turn_context.cwd.clone(),
approval_policy: turn_context.approval_policy,
@@ -2187,7 +2180,6 @@ async fn try_run_turn(
tracker.get_unified_diff()
};
sess.update_responses_api_chain_state(
supports_responses_api_chaining,
Some(response_id.clone()),
)
.await;

View File

@@ -8,6 +8,7 @@ use crate::codex::SessionConfiguration;
use crate::conversation_history::ConversationHistory;
use crate::conversation_history::ResponsesApiChainState;
use crate::conversation_history::format_prompt_items;
use crate::features::Feature;
use crate::model_family::ModelFamily;
use crate::protocol::RateLimitSnapshot;
use crate::protocol::TokenUsage;
@@ -61,7 +62,9 @@ impl SessionState {
}
pub(crate) fn set_responses_api_chain(&mut self, chain: ResponsesApiChainState) {
self.history.set_responses_api_chain(chain);
if self.session_configuration.features.enabled(Feature::ResponsesApiChaining) {
self.history.set_responses_api_chain(chain);
}
}
// Token/rate limit helpers
@@ -142,7 +145,6 @@ pub(crate) fn build_prompt_from_items(
chain_state: Option<&ResponsesApiChainState>,
) -> (Prompt, bool) {
let mut prompt = Prompt {
store_response: chain_state.is_some(),
..Prompt::default()
};