mirror of
https://github.com/openai/codex.git
synced 2026-02-02 23:13:37 +00:00
Compare commits
5 Commits
models-not
...
compact
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bd2a53d1cd | ||
|
|
e744548aae | ||
|
|
8b23e160c4 | ||
|
|
3df732caa1 | ||
|
|
3a4f5435e8 |
2
codex-rs/Cargo.lock
generated
2
codex-rs/Cargo.lock
generated
@@ -856,6 +856,8 @@ dependencies = [
|
||||
"ratatui",
|
||||
"ratatui-image",
|
||||
"regex-lite",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"shlex",
|
||||
"strum 0.27.2",
|
||||
|
||||
@@ -219,6 +219,23 @@ impl Session {
|
||||
.map(PathBuf::from)
|
||||
.map_or_else(|| self.cwd.clone(), |p| self.cwd.join(p))
|
||||
}
|
||||
/// Erases all previous messages from the conversation history (zdr_transcript), if present.
|
||||
pub fn erase_conversation_history(&self) {
|
||||
let mut state = self.state.lock().unwrap();
|
||||
if let Some(transcript) = state.zdr_transcript.as_mut() {
|
||||
transcript.clear();
|
||||
}
|
||||
|
||||
// When using the experimental OpenAI Responses API with server-side
|
||||
// storage enabled, `previous_response_id` is used to let the model
|
||||
// access the earlier part of the conversation **without** having to
|
||||
// resend the full transcript. To truly wipe all historical context
|
||||
// we must drop this identifier as well, otherwise the backend will
|
||||
// still be able to retrieve the prior messages via the ID even
|
||||
// though our local transcript has been cleared. See
|
||||
// https://platform.openai.com/docs/guides/responses for details.
|
||||
state.previous_response_id = None;
|
||||
}
|
||||
}
|
||||
|
||||
/// Mutable state of the agent
|
||||
@@ -558,6 +575,11 @@ async fn submission_loop(
|
||||
|
||||
debug!(?sub, "Submission");
|
||||
match sub.op {
|
||||
Op::EraseConversationHistory => {
|
||||
if let Some(sess) = sess.as_ref() {
|
||||
sess.erase_conversation_history();
|
||||
}
|
||||
}
|
||||
Op::Interrupt => {
|
||||
let sess = match sess.as_ref() {
|
||||
Some(sess) => sess,
|
||||
|
||||
@@ -30,6 +30,11 @@ impl ConversationHistory {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Clears the conversation history.
|
||||
pub(crate) fn clear(&mut self) {
|
||||
self.items.clear();
|
||||
}
|
||||
}
|
||||
|
||||
/// Anything that is not a system message or "reasoning" message is considered
|
||||
@@ -44,3 +49,31 @@ fn is_api_message(message: &ResponseItem) -> bool {
|
||||
ResponseItem::Other => false,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::models::ResponseItem;
|
||||
|
||||
#[test]
|
||||
fn clear_removes_all_items() {
|
||||
let mut hist = ConversationHistory::new();
|
||||
|
||||
use crate::models::ContentItem;
|
||||
|
||||
let items = [ResponseItem::Message {
|
||||
role: "user".into(),
|
||||
content: vec![ContentItem::InputText {
|
||||
text: "hello".into(),
|
||||
}],
|
||||
}];
|
||||
|
||||
hist.record_items(items.iter());
|
||||
|
||||
assert_eq!(hist.contents().len(), 1, "sanity – item should be present");
|
||||
|
||||
hist.clear();
|
||||
|
||||
assert!(hist.contents().is_empty(), "all items should be removed");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -35,6 +35,8 @@ pub struct Submission {
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
#[non_exhaustive]
|
||||
pub enum Op {
|
||||
/// Erase all conversation history for the current session.
|
||||
EraseConversationHistory,
|
||||
/// Configure the model session.
|
||||
ConfigureSession {
|
||||
/// Provider identifier ("openai", "openrouter", ...).
|
||||
|
||||
@@ -61,6 +61,8 @@ tui-textarea = "0.7.0"
|
||||
unicode-segmentation = "1.12.0"
|
||||
unicode-width = "0.1"
|
||||
uuid = "1"
|
||||
reqwest = { version = "0.12", features = ["json"] }
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
|
||||
[dev-dependencies]
|
||||
insta = "1.43.1"
|
||||
|
||||
@@ -329,6 +329,11 @@ impl App<'_> {
|
||||
widget.add_diff_output(text);
|
||||
}
|
||||
}
|
||||
SlashCommand::Compact => {
|
||||
if let AppState::Chat { widget } = &mut self.app_state {
|
||||
widget.start_compact();
|
||||
}
|
||||
}
|
||||
},
|
||||
AppEvent::StartFileSearch(query) => {
|
||||
self.file_search.on_user_query(query);
|
||||
@@ -338,6 +343,11 @@ impl App<'_> {
|
||||
widget.apply_file_search_result(query, matches);
|
||||
}
|
||||
}
|
||||
AppEvent::CompactComplete(result) => {
|
||||
if let AppState::Chat { widget } = &mut self.app_state {
|
||||
widget.apply_compact_summary(result);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
terminal.clear()?;
|
||||
|
||||
@@ -51,5 +51,10 @@ pub(crate) enum AppEvent {
|
||||
matches: Vec<FileMatch>,
|
||||
},
|
||||
|
||||
/// Result of the asynchronous `/compact` summarization.
|
||||
CompactComplete(Result<String, String>),
|
||||
|
||||
/// Insert the most recently appended history entry directly into the
|
||||
/// terminal scrollback. Carries already formatted lines.
|
||||
InsertHistory(Vec<Line<'static>>),
|
||||
}
|
||||
|
||||
@@ -36,6 +36,9 @@ use crate::bottom_pane::BottomPane;
|
||||
use crate::bottom_pane::BottomPaneParams;
|
||||
use crate::bottom_pane::CancellationEvent;
|
||||
use crate::bottom_pane::InputResult;
|
||||
use crate::compact::Role;
|
||||
use crate::compact::TranscriptEntry;
|
||||
use crate::compact::generate_compact_summary;
|
||||
use crate::conversation_history_widget::ConversationHistoryWidget;
|
||||
use crate::exec_command::strip_bash_lc_and_escape;
|
||||
use crate::history_cell::PatchEventType;
|
||||
@@ -50,11 +53,12 @@ pub(crate) struct ChatWidget<'a> {
|
||||
config: Config,
|
||||
initial_user_message: Option<UserMessage>,
|
||||
token_usage: TokenUsage,
|
||||
// Buffer for streaming assistant reasoning text; emitted on final event.
|
||||
reasoning_buffer: String,
|
||||
// Buffer for streaming assistant answer text; we do not surface partial
|
||||
// We wait for the final AgentMessage event and then emit the full text
|
||||
// at once into scrollback so the history contains a single message.
|
||||
// Buffer for streaming assistant answer text; emitted on final event.
|
||||
answer_buffer: String,
|
||||
// Transcript of chat for `/compact` summarization.
|
||||
transcript: Vec<TranscriptEntry>,
|
||||
}
|
||||
|
||||
struct UserMessage {
|
||||
@@ -140,6 +144,7 @@ impl ChatWidget<'_> {
|
||||
token_usage: TokenUsage::default(),
|
||||
reasoning_buffer: String::new(),
|
||||
answer_buffer: String::new(),
|
||||
transcript: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -198,8 +203,14 @@ impl ChatWidget<'_> {
|
||||
|
||||
// Only show text portion in conversation history for now.
|
||||
if !text.is_empty() {
|
||||
// Forward a copy for history and emit into scrollback.
|
||||
self.conversation_history.add_user_message(text.clone());
|
||||
self.emit_last_history_entry();
|
||||
// Record in transcript for `/compact`.
|
||||
self.transcript.push(TranscriptEntry {
|
||||
role: Role::User,
|
||||
text,
|
||||
});
|
||||
}
|
||||
self.conversation_history.scroll_to_bottom();
|
||||
}
|
||||
@@ -230,10 +241,7 @@ impl ChatWidget<'_> {
|
||||
self.request_redraw();
|
||||
}
|
||||
EventMsg::AgentMessage(AgentMessageEvent { message }) => {
|
||||
// Final assistant answer. Prefer the fully provided message
|
||||
// from the event; if it is empty fall back to any accumulated
|
||||
// delta buffer (some providers may only stream deltas and send
|
||||
// an empty final message).
|
||||
// Final assistant answer. Prefer the fully provided message.
|
||||
let full = if message.is_empty() {
|
||||
std::mem::take(&mut self.answer_buffer)
|
||||
} else {
|
||||
@@ -242,8 +250,13 @@ impl ChatWidget<'_> {
|
||||
};
|
||||
if !full.is_empty() {
|
||||
self.conversation_history
|
||||
.add_agent_message(&self.config, full);
|
||||
.add_agent_message(&self.config, full.clone());
|
||||
self.emit_last_history_entry();
|
||||
// Record final answer in transcript for `/compact`.
|
||||
self.transcript.push(TranscriptEntry {
|
||||
role: Role::Assistant,
|
||||
text: full,
|
||||
});
|
||||
}
|
||||
self.request_redraw();
|
||||
}
|
||||
@@ -469,6 +482,88 @@ impl ChatWidget<'_> {
|
||||
self.bottom_pane.on_file_search_result(query, matches);
|
||||
}
|
||||
|
||||
// (removed deprecated synchronous `compact` implementation)
|
||||
|
||||
/// Kick off an asynchronous summarization of the current transcript.
|
||||
/// Returns immediately so the UI stays responsive.
|
||||
pub(crate) fn start_compact(&mut self) {
|
||||
// Show status indicator immediately.
|
||||
self.bottom_pane.set_task_running(true);
|
||||
self.bottom_pane
|
||||
.update_status_text("Summarizing context…".to_string());
|
||||
self.request_redraw();
|
||||
|
||||
// Clone data required for the background task.
|
||||
let transcript = self.transcript.clone();
|
||||
let model = self.config.model.clone();
|
||||
let config_clone = self.config.clone();
|
||||
let app_event_tx = self.app_event_tx.clone();
|
||||
|
||||
// Spawn the summarization on a blocking thread to avoid CPU-bound work
|
||||
// stalling the async runtime (and thus the UI).
|
||||
tokio::task::spawn_blocking(move || {
|
||||
let rt = tokio::runtime::Handle::current();
|
||||
rt.block_on(async move {
|
||||
let result = generate_compact_summary(&transcript, &model, &config_clone).await;
|
||||
let evt = match result {
|
||||
Ok(summary) => AppEvent::CompactComplete(Ok(summary)),
|
||||
Err(e) => AppEvent::CompactComplete(Err(format!("{e}"))),
|
||||
};
|
||||
app_event_tx.send(evt);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/// Apply the completed summary returned by the background task.
|
||||
pub(crate) fn apply_compact_summary(&mut self, result: Result<String, String>) {
|
||||
match result {
|
||||
Ok(summary) => {
|
||||
self.conversation_history.clear_agent_history();
|
||||
self.transcript.clear();
|
||||
// clear session history in backend
|
||||
self.submit_op(Op::EraseConversationHistory);
|
||||
self.conversation_history
|
||||
.add_agent_message(&self.config, summary.clone());
|
||||
self.transcript = vec![TranscriptEntry {
|
||||
role: Role::Assistant,
|
||||
text: summary,
|
||||
}];
|
||||
|
||||
// Re-configure the Codex session so that the backend agent starts with
|
||||
// a clean conversation context.
|
||||
let op = Op::ConfigureSession {
|
||||
provider: self.config.model_provider.clone(),
|
||||
model: self.config.model.clone(),
|
||||
model_reasoning_effort: self.config.model_reasoning_effort,
|
||||
model_reasoning_summary: self.config.model_reasoning_summary,
|
||||
user_instructions: self.config.user_instructions.clone(),
|
||||
base_instructions: self.config.base_instructions.clone(),
|
||||
approval_policy: self.config.approval_policy,
|
||||
sandbox_policy: self.config.sandbox_policy.clone(),
|
||||
disable_response_storage: self.config.disable_response_storage,
|
||||
notify: self.config.notify.clone(),
|
||||
cwd: self.config.cwd.clone(),
|
||||
resume_path: None,
|
||||
};
|
||||
self.submit_op(op);
|
||||
|
||||
// Reset the recorded token usage because we start a fresh
|
||||
// conversation context. This ensures the *context remaining*
|
||||
// indicator in the composer is updated immediately.
|
||||
self.token_usage = TokenUsage::default();
|
||||
self.bottom_pane
|
||||
.set_token_usage(self.token_usage.clone(), self.config.model_context_window);
|
||||
}
|
||||
Err(msg) => {
|
||||
self.conversation_history.add_error(msg);
|
||||
}
|
||||
}
|
||||
|
||||
// Hide status indicator and refresh UI.
|
||||
self.bottom_pane.set_task_running(false);
|
||||
self.request_redraw();
|
||||
}
|
||||
|
||||
/// Handle Ctrl-C key press.
|
||||
/// Returns CancellationEvent::Handled if the event was consumed by the UI, or
|
||||
/// CancellationEvent::Ignored if the caller should handle it (e.g. exit).
|
||||
|
||||
91
codex-rs/tui/src/compact.rs
Normal file
91
codex-rs/tui/src/compact.rs
Normal file
@@ -0,0 +1,91 @@
|
||||
use anyhow::Result;
|
||||
use anyhow::anyhow;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::openai_api_key::get_openai_api_key;
|
||||
use serde::Serialize;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum Role {
|
||||
User,
|
||||
Assistant,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct TranscriptEntry {
|
||||
pub role: Role,
|
||||
pub text: String,
|
||||
}
|
||||
|
||||
impl TranscriptEntry {
|
||||
fn role_str(&self) -> &'static str {
|
||||
match self.role {
|
||||
Role::User => "user",
|
||||
Role::Assistant => "assistant",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct Message<'a> {
|
||||
role: &'a str,
|
||||
content: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct Payload<'a> {
|
||||
model: &'a str,
|
||||
messages: Vec<Message<'a>>,
|
||||
}
|
||||
|
||||
/// Generate a concise summary of the provided transcript using the OpenAI chat
|
||||
/// completions API.
|
||||
pub async fn generate_compact_summary(
|
||||
transcript: &[TranscriptEntry],
|
||||
model: &str,
|
||||
config: &Config,
|
||||
) -> Result<String> {
|
||||
let conversation_text = transcript
|
||||
.iter()
|
||||
.map(|e| format!("{}: {}", e.role_str(), e.text))
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
|
||||
let messages = vec![
|
||||
Message {
|
||||
role: "assistant",
|
||||
content: "You are an expert coding assistant. Your goal is to generate a concise, structured summary of the conversation below that captures all essential information needed to continue development after context replacement. Include tasks performed, code areas modified or reviewed, key decisions or assumptions, test results or errors, and outstanding tasks or next steps.".to_string(),
|
||||
},
|
||||
Message {
|
||||
role: "user",
|
||||
content: format!(
|
||||
"Here is the conversation so far:\n{conversation_text}\n\nPlease summarize this conversation, covering:\n1. Tasks performed and outcomes\n2. Code files, modules, or functions modified or examined\n3. Important decisions or assumptions made\n4. Errors encountered and test or build results\n5. Remaining tasks, open questions, or next steps\nProvide the summary in a clear, concise format."
|
||||
),
|
||||
},
|
||||
];
|
||||
|
||||
let api_key = get_openai_api_key().ok_or_else(|| anyhow!("OpenAI API key not set"))?;
|
||||
let client = reqwest::Client::new();
|
||||
let base = config.model_provider.base_url.trim_end_matches('/');
|
||||
let url = format!("{}/chat/completions", base);
|
||||
|
||||
let payload = Payload { model, messages };
|
||||
let res = client
|
||||
.post(url)
|
||||
.bearer_auth(api_key)
|
||||
.json(&payload)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
let body: serde_json::Value = res.json().await?;
|
||||
if let Some(summary) = body
|
||||
.get("choices")
|
||||
.and_then(|c| c.get(0))
|
||||
.and_then(|c| c.get("message"))
|
||||
.and_then(|m| m.get("content"))
|
||||
.and_then(|v| v.as_str())
|
||||
{
|
||||
Ok(summary.to_string())
|
||||
} else {
|
||||
Ok("Unable to generate summary.".to_string())
|
||||
}
|
||||
}
|
||||
@@ -122,6 +122,10 @@ impl ConversationHistoryWidget {
|
||||
self.add_to_history(HistoryCell::new_agent_message(config, message));
|
||||
}
|
||||
|
||||
pub fn clear_agent_history(&mut self) {
|
||||
self.clear_all();
|
||||
}
|
||||
|
||||
pub fn add_agent_reasoning(&mut self, config: &Config, text: String) {
|
||||
self.add_to_history(HistoryCell::new_agent_reasoning(config, text));
|
||||
}
|
||||
@@ -173,6 +177,10 @@ impl ConversationHistoryWidget {
|
||||
});
|
||||
}
|
||||
|
||||
fn clear_all(&mut self) {
|
||||
self.entries.clear();
|
||||
}
|
||||
|
||||
/// Return the lines for the most recently appended entry (if any) so the
|
||||
/// parent widget can surface them via the new scrollback insertion path.
|
||||
pub(crate) fn last_entry_plain_lines(&self) -> Option<Vec<Line<'static>>> {
|
||||
|
||||
@@ -27,6 +27,7 @@ mod cell_widget;
|
||||
mod chatwidget;
|
||||
mod citation_regex;
|
||||
mod cli;
|
||||
mod compact;
|
||||
mod conversation_history_widget;
|
||||
mod exec_command;
|
||||
mod file_search;
|
||||
|
||||
@@ -14,6 +14,7 @@ pub enum SlashCommand {
|
||||
// more frequently used commands should be listed first.
|
||||
New,
|
||||
Diff,
|
||||
Compact,
|
||||
Quit,
|
||||
}
|
||||
|
||||
@@ -26,6 +27,7 @@ impl SlashCommand {
|
||||
SlashCommand::Diff => {
|
||||
"Show git diff of the working directory (including untracked files)"
|
||||
}
|
||||
SlashCommand::Compact => "Condense context into a summary.",
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user