Files
codex/codex-rs/core/src/memory_trace.rs
Ahmed Ibrahim 6b004b74ca Preserve memory trace I/O errors
Co-authored-by: Codex <noreply@openai.com>
2026-03-18 05:44:23 +00:00

47 lines
1.7 KiB
Rust

use std::io;
use std::path::PathBuf;
use crate::ModelClient;
use crate::error::CodexErr;
use crate::error::Result;
pub use codex_memories::memory_trace::BuiltMemory;
use codex_otel::SessionTelemetry;
use codex_protocol::openai_models::ModelInfo;
use codex_protocol::openai_models::ReasoningEffort as ReasoningEffortConfig;
/// Loads raw trace files, normalizes items, and builds memory summaries.
///
/// The request/response wiring mirrors the memory summarize E2E flow:
/// `/v1/memories/trace_summarize` with one output object per input raw memory.
///
/// The caller provides the model selection, reasoning effort, and telemetry context explicitly so
/// the session-scoped [`ModelClient`] can be reused across turns.
pub async fn build_memories_from_trace_files(
client: &ModelClient,
trace_paths: &[PathBuf],
model_info: &ModelInfo,
effort: Option<ReasoningEffortConfig>,
session_telemetry: &SessionTelemetry,
) -> Result<Vec<BuiltMemory>> {
if trace_paths.is_empty() {
return Ok(Vec::new());
}
let prepared = codex_memories::memory_trace::load_trace_requests(trace_paths)
.await
.map_err(map_trace_load_error)?;
let raw_memories = prepared.iter().map(|trace| trace.payload.clone()).collect();
let output = client
.summarize_memories(raw_memories, model_info, effort, session_telemetry)
.await?;
codex_memories::memory_trace::build_memories_from_output(prepared, output)
.map_err(|err| CodexErr::InvalidRequest(err.to_string()))
}
fn map_trace_load_error(err: anyhow::Error) -> CodexErr {
match err.downcast::<io::Error>() {
Ok(err) => CodexErr::Io(err),
Err(err) => CodexErr::InvalidRequest(err.to_string()),
}
}