mirror of
https://github.com/openai/codex.git
synced 2026-04-29 17:06:51 +00:00
feat: add phase 1 mem client (#10629)
Adding a client on top of https://github.com/openai/openai/pull/672176
This commit is contained in:
@@ -6,6 +6,7 @@ use codex_protocol::openai_models::ReasoningEffort as ReasoningEffortConfig;
|
||||
use codex_protocol::protocol::RateLimitSnapshot;
|
||||
use codex_protocol::protocol::TokenUsage;
|
||||
use futures::Stream;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use serde_json::Value;
|
||||
use std::pin::Pin;
|
||||
@@ -37,6 +38,33 @@ pub struct CompactionInput<'a> {
|
||||
pub instructions: &'a str,
|
||||
}
|
||||
|
||||
/// Canonical input payload for the memory trace summarize endpoint.
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
pub struct MemoryTraceSummarizeInput {
|
||||
pub model: String,
|
||||
pub traces: Vec<MemoryTrace>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub reasoning: Option<Reasoning>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
pub struct MemoryTrace {
|
||||
pub id: String,
|
||||
pub metadata: MemoryTraceMetadata,
|
||||
pub items: Vec<Value>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
pub struct MemoryTraceMetadata {
|
||||
pub source_path: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, PartialEq, Eq)]
|
||||
pub struct MemoryTraceSummaryOutput {
|
||||
pub trace_summary: String,
|
||||
pub memory_summary: String,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum ResponseEvent {
|
||||
Created,
|
||||
|
||||
Reference in New Issue
Block a user