mirror of
https://github.com/openai/codex.git
synced 2026-04-24 14:45:27 +00:00
143 lines
4.4 KiB
Rust
143 lines
4.4 KiB
Rust
use std::sync::Arc;
|
|
|
|
use crate::Prompt;
|
|
use crate::codex::Session;
|
|
use crate::codex::TurnContext;
|
|
use crate::context_manager::ContextManager;
|
|
use crate::context_manager::is_codex_generated_item;
|
|
use crate::error::Result as CodexResult;
|
|
use crate::protocol::CompactedItem;
|
|
use crate::protocol::EventMsg;
|
|
use crate::protocol::RolloutItem;
|
|
use crate::protocol::TurnStartedEvent;
|
|
use codex_protocol::items::ContextCompactionItem;
|
|
use codex_protocol::items::TurnItem;
|
|
use codex_protocol::models::BaseInstructions;
|
|
use codex_protocol::models::ResponseItem;
|
|
use tracing::info;
|
|
|
|
pub(crate) async fn run_inline_remote_auto_compact_task(
|
|
sess: Arc<Session>,
|
|
turn_context: Arc<TurnContext>,
|
|
) {
|
|
run_remote_compact_task_inner(&sess, &turn_context).await;
|
|
}
|
|
|
|
pub(crate) async fn run_remote_compact_task(sess: Arc<Session>, turn_context: Arc<TurnContext>) {
|
|
let start_event = EventMsg::TurnStarted(TurnStartedEvent {
|
|
model_context_window: turn_context.model_context_window(),
|
|
collaboration_mode_kind: turn_context.collaboration_mode.mode,
|
|
});
|
|
sess.send_event(&turn_context, start_event).await;
|
|
|
|
run_remote_compact_task_inner(&sess, &turn_context).await;
|
|
}
|
|
|
|
async fn run_remote_compact_task_inner(sess: &Arc<Session>, turn_context: &Arc<TurnContext>) {
|
|
if let Err(err) = run_remote_compact_task_inner_impl(sess, turn_context).await {
|
|
let event = EventMsg::Error(
|
|
err.to_error_event(Some("Error running remote compact task".to_string())),
|
|
);
|
|
sess.send_event(turn_context, event).await;
|
|
}
|
|
}
|
|
|
|
async fn run_remote_compact_task_inner_impl(
|
|
sess: &Arc<Session>,
|
|
turn_context: &Arc<TurnContext>,
|
|
) -> CodexResult<()> {
|
|
let compaction_item = TurnItem::ContextCompaction(ContextCompactionItem::new());
|
|
sess.emit_turn_item_started(turn_context, &compaction_item)
|
|
.await;
|
|
let mut history = sess.clone_history().await;
|
|
let base_instructions = sess.get_base_instructions().await;
|
|
let deleted_items = trim_function_call_history_to_fit_context_window(
|
|
&mut history,
|
|
turn_context.as_ref(),
|
|
&base_instructions,
|
|
);
|
|
if deleted_items > 0 {
|
|
info!(
|
|
turn_id = %turn_context.sub_id,
|
|
deleted_items,
|
|
"trimmed history items before remote compaction"
|
|
);
|
|
}
|
|
|
|
// Required to keep `/undo` available after compaction
|
|
let ghost_snapshots: Vec<ResponseItem> = history
|
|
.raw_items()
|
|
.iter()
|
|
.filter(|item| matches!(item, ResponseItem::GhostSnapshot { .. }))
|
|
.cloned()
|
|
.collect();
|
|
|
|
let prompt = Prompt {
|
|
input: history.for_prompt(),
|
|
tools: vec![],
|
|
parallel_tool_calls: false,
|
|
base_instructions,
|
|
personality: turn_context.personality,
|
|
output_schema: None,
|
|
};
|
|
|
|
let mut new_history = sess
|
|
.services
|
|
.model_client
|
|
.compact_conversation_history(
|
|
&prompt,
|
|
&turn_context.model_info,
|
|
&turn_context.otel_manager,
|
|
)
|
|
.await?;
|
|
new_history = sess
|
|
.process_compacted_history(turn_context.as_ref(), new_history)
|
|
.await;
|
|
|
|
if !ghost_snapshots.is_empty() {
|
|
new_history.extend(ghost_snapshots);
|
|
}
|
|
sess.replace_history(new_history.clone()).await;
|
|
sess.recompute_token_usage(turn_context).await;
|
|
|
|
let compacted_item = CompactedItem {
|
|
message: String::new(),
|
|
replacement_history: Some(new_history),
|
|
};
|
|
sess.persist_rollout_items(&[RolloutItem::Compacted(compacted_item)])
|
|
.await;
|
|
|
|
sess.emit_turn_item_completed(turn_context, compaction_item)
|
|
.await;
|
|
Ok(())
|
|
}
|
|
|
|
fn trim_function_call_history_to_fit_context_window(
|
|
history: &mut ContextManager,
|
|
turn_context: &TurnContext,
|
|
base_instructions: &BaseInstructions,
|
|
) -> usize {
|
|
let mut deleted_items = 0usize;
|
|
let Some(context_window) = turn_context.model_context_window() else {
|
|
return deleted_items;
|
|
};
|
|
|
|
while history
|
|
.estimate_token_count_with_base_instructions(base_instructions)
|
|
.is_some_and(|estimated_tokens| estimated_tokens > context_window)
|
|
{
|
|
let Some(last_item) = history.raw_items().last() else {
|
|
break;
|
|
};
|
|
if !is_codex_generated_item(last_item) {
|
|
break;
|
|
}
|
|
if !history.remove_last_item() {
|
|
break;
|
|
}
|
|
deleted_items += 1;
|
|
}
|
|
|
|
deleted_items
|
|
}
|