This commit is contained in:
jif-oai
2025-10-30 21:16:14 +00:00
parent 3315336366
commit 69341a04f4
4 changed files with 44 additions and 71 deletions

View File

@@ -3,7 +3,6 @@ use std::sync::Arc;
use super::Session;
use super::TurnContext;
use super::get_last_assistant_message_from_turn;
use crate::Prompt;
use crate::client::StreamPayload;
use crate::client_common::ResponseEvent;
use crate::error::CodexErr;
@@ -84,28 +83,9 @@ async fn run_compact_task_inner(
sess.persist_rollout_items(&[rollout_item]).await;
loop {
let mut turn_input = history.get_history_for_prompt();
let turn_input = history.get_history_for_prompt();
let turn_input_len = turn_input.len();
crate::conversation_history::format_prompt_items(&mut turn_input, false);
let prompt = Prompt {
input: turn_input,
tools: Vec::new(),
parallel_tool_calls: false,
output_schema: None,
store_response: false,
..Default::default()
};
let instructions = crate::client_common::compute_full_instructions(
turn_context.base_instructions.as_deref(),
&turn_context.client.get_model_family(),
false,
)
.into_owned();
let prompt = Prompt {
instructions: instructions.clone(),
previous_response_id: None,
..prompt
};
let (prompt, _) = crate::state::build_prompt_from_items(turn_input, None);
let payload = StreamPayload { prompt };
let attempt_result = drain_to_completed(&sess, turn_context.as_ref(), payload).await;

View File

@@ -7,7 +7,6 @@ use std::time::Instant;
use crate::AuthManager;
use crate::client::ModelClient;
use crate::client::StreamPayload;
use crate::client_common::Prompt;
use crate::client_common::ResponseEvent;
use crate::config::Config;
use crate::protocol::SandboxPolicy;
@@ -121,32 +120,19 @@ pub(crate) async fn assess_command(
.trim()
.to_string();
let mut prompt_items = vec![ResponseItem::Message {
let prompt_items = vec![ResponseItem::Message {
id: None,
role: "user".to_string(),
content: vec![ContentItem::InputText { text: user_prompt }],
}];
crate::conversation_history::format_prompt_items(&mut prompt_items, false);
let prompt = Prompt {
input: prompt_items,
tools: Vec::new(),
parallel_tool_calls: false,
output_schema: Some(sandbox_assessment_schema()),
store_response: false,
..Default::default()
};
let instructions = crate::client_common::compute_full_instructions(
let (mut prompt, _) = crate::state::build_prompt_from_items(prompt_items, None);
prompt.output_schema = Some(sandbox_assessment_schema());
prompt.instructions = crate::client_common::compute_full_instructions(
Some(system_prompt.as_str()),
&config.model_family,
false,
)
.into_owned();
let prompt = Prompt {
instructions: instructions.clone(),
..prompt
};
let payload = StreamPayload { prompt };
let child_otel =

View File

@@ -4,6 +4,7 @@ mod turn;
pub(crate) use service::SessionServices;
pub(crate) use session::SessionState;
pub(crate) use session::build_prompt_from_items;
pub(crate) use session::response_item_id;
pub(crate) use turn::ActiveTurn;
pub(crate) use turn::RunningTask;

View File

@@ -80,40 +80,12 @@ impl SessionState {
}
pub(crate) fn prompt_for_turn(&mut self, supports_responses_api_chaining: bool) -> Prompt {
let mut prompt = Prompt::default();
prompt.store_response = supports_responses_api_chaining;
let prompt_items = self.history.get_history_for_prompt();
if !supports_responses_api_chaining {
let chain_state = self.history.responses_api_chain();
let (prompt, reset_chain) = build_prompt_from_items(prompt_items, chain_state.as_ref());
if reset_chain {
self.reset_responses_api_chain();
prompt.input = prompt_items;
return prompt;
}
if let Some(chain_state) = self.history.responses_api_chain() {
let previous_response_id = chain_state.last_response_id.clone();
if let Some(last_message_id) = chain_state.last_message_id.as_ref() {
if let Some(position) = prompt_items
.iter()
.position(|item| response_item_id(item) == Some(last_message_id))
{
prompt.previous_response_id = previous_response_id;
prompt.input = prompt_items.iter().skip(position + 1).cloned().collect();
return prompt;
}
// Cache marker no longer present; fall back to full prompt and clear chain info.
self.reset_responses_api_chain();
prompt.input = prompt_items;
return prompt;
}
prompt.previous_response_id = previous_response_id;
prompt.input = prompt_items;
return prompt;
}
prompt.input = prompt_items;
prompt
}
}
@@ -129,3 +101,37 @@ pub(crate) fn response_item_id(item: &ResponseItem) -> Option<&str> {
_ => None,
}
}
pub(crate) fn build_prompt_from_items(
prompt_items: Vec<ResponseItem>,
chain_state: Option<&ResponsesApiChainState>,
) -> (Prompt, bool) {
let mut prompt = Prompt::default();
prompt.store_response = chain_state.is_some();
if let Some(state) = chain_state {
if let Some(last_message_id) = state.last_message_id.as_ref() {
if let Some(position) = prompt_items
.iter()
.position(|item| response_item_id(item) == Some(last_message_id.as_str()))
{
if let Some(previous_response_id) = state.last_response_id.clone() {
prompt.previous_response_id = Some(previous_response_id);
}
prompt.input = prompt_items.into_iter().skip(position + 1).collect();
return (prompt, false);
}
prompt.input = prompt_items;
return (prompt, true);
}
if let Some(previous_response_id) = state.last_response_id.clone() {
prompt.previous_response_id = Some(previous_response_id);
}
prompt.input = prompt_items;
return (prompt, false);
}
prompt.input = prompt_items;
(prompt, false)
}