mirror of
https://github.com/openai/codex.git
synced 2026-05-01 01:47:18 +00:00
Improve compact (#6692)
This PR does the following: - Add compact prefix to the summary - Change the compaction prompt - Allow multiple compaction for long running tasks - Filter out summary messages on the following compaction Considerations: - Filtering out the summary message isn't the most clean - Theoretically, we can end up in infinite compaction loop if the user messages > compaction limit . However, that's not possible in today's code because we have hard cap on user messages. - We need to address having multiple user messages because it confuses the model. Testing: - Making sure that after compact we always end up with one user message (task) and one summary, even on multiple compaction.
This commit is contained in:
@@ -1819,7 +1819,6 @@ pub(crate) async fn run_task(
|
||||
// Although from the perspective of codex.rs, TurnDiffTracker has the lifecycle of a Task which contains
|
||||
// many turns, from the perspective of the user, it is a single turn.
|
||||
let turn_diff_tracker = Arc::new(tokio::sync::Mutex::new(TurnDiffTracker::new()));
|
||||
let mut auto_compact_recently_attempted = false;
|
||||
|
||||
loop {
|
||||
// Note that pending_input would be something like a message the user
|
||||
@@ -1874,27 +1873,12 @@ pub(crate) async fn run_task(
|
||||
let (responses, items_to_record_in_conversation_history) =
|
||||
process_items(processed_items, &sess, &turn_context).await;
|
||||
|
||||
// as long as compaction works well in getting us way below the token limit, we shouldn't worry about being in an infinite loop.
|
||||
if token_limit_reached {
|
||||
if auto_compact_recently_attempted {
|
||||
let limit_str = limit.to_string();
|
||||
let current_tokens = total_usage_tokens
|
||||
.map(|tokens| tokens.to_string())
|
||||
.unwrap_or_else(|| "unknown".to_string());
|
||||
let event = EventMsg::Error(ErrorEvent {
|
||||
message: format!(
|
||||
"Conversation is still above the token limit after automatic summarization (limit {limit_str}, current {current_tokens}). Please start a new session or trim your input."
|
||||
),
|
||||
});
|
||||
sess.send_event(&turn_context, event).await;
|
||||
break;
|
||||
}
|
||||
auto_compact_recently_attempted = true;
|
||||
compact::run_inline_auto_compact_task(sess.clone(), turn_context.clone()).await;
|
||||
continue;
|
||||
}
|
||||
|
||||
auto_compact_recently_attempted = false;
|
||||
|
||||
if responses.is_empty() {
|
||||
last_agent_message = get_last_assistant_message_from_turn(
|
||||
&items_to_record_in_conversation_history,
|
||||
|
||||
Reference in New Issue
Block a user