mirror of
https://github.com/openai/codex.git
synced 2026-05-13 07:42:40 +00:00
Compare commits
4 Commits
text-eleme
...
fix/symlin
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7992cd07ee | ||
|
|
f1653dd4d3 | ||
|
|
e893e83eb9 | ||
|
|
f89a40a849 |
8
.github/workflows/rust-ci.yml
vendored
8
.github/workflows/rust-ci.yml
vendored
@@ -59,7 +59,7 @@ jobs:
|
||||
working-directory: codex-rs
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
with:
|
||||
components: rustfmt
|
||||
- name: cargo fmt
|
||||
@@ -77,7 +77,7 @@ jobs:
|
||||
working-directory: codex-rs
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
with:
|
||||
tool: cargo-shear
|
||||
@@ -177,7 +177,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
components: clippy
|
||||
@@ -416,7 +416,7 @@ jobs:
|
||||
- name: Install DotSlash
|
||||
uses: facebook/install-dotslash@v2
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
|
||||
2
.github/workflows/rust-release.yml
vendored
2
.github/workflows/rust-release.yml
vendored
@@ -80,7 +80,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
|
||||
2
.github/workflows/sdk.yml
vendored
2
.github/workflows/sdk.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
node-version: 22
|
||||
cache: pnpm
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
|
||||
- name: build codex
|
||||
run: cargo build --bin codex
|
||||
|
||||
2
.github/workflows/shell-tool-mcp.yml
vendored
2
.github/workflows/shell-tool-mcp.yml
vendored
@@ -93,7 +93,7 @@ jobs:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
|
||||
@@ -128,6 +128,7 @@ pub struct ConversationSummary {
|
||||
pub path: PathBuf,
|
||||
pub preview: String,
|
||||
pub timestamp: Option<String>,
|
||||
pub updated_at: Option<String>,
|
||||
pub model_provider: String,
|
||||
pub cwd: PathBuf,
|
||||
pub cli_version: String,
|
||||
|
||||
@@ -1182,11 +1182,21 @@ pub struct ThreadListParams {
|
||||
pub cursor: Option<String>,
|
||||
/// Optional page size; defaults to a reasonable server-side value.
|
||||
pub limit: Option<u32>,
|
||||
/// Optional sort key; defaults to created_at.
|
||||
pub sort_key: Option<ThreadSortKey>,
|
||||
/// Optional provider filter; when set, only sessions recorded under these
|
||||
/// providers are returned. When present but empty, includes all providers.
|
||||
pub model_providers: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum ThreadSortKey {
|
||||
CreatedAt,
|
||||
UpdatedAt,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -1359,6 +1369,9 @@ pub struct Thread {
|
||||
/// Unix timestamp (in seconds) when the thread was created.
|
||||
#[ts(type = "number")]
|
||||
pub created_at: i64,
|
||||
/// Unix timestamp (in seconds) when the thread was last updated.
|
||||
#[ts(type = "number")]
|
||||
pub updated_at: i64,
|
||||
/// [UNSTABLE] Path to the thread on disk.
|
||||
pub path: PathBuf,
|
||||
/// Working directory captured for the thread.
|
||||
|
||||
@@ -138,10 +138,11 @@ To branch from a stored session, call `thread/fork` with the `thread.id`. This c
|
||||
|
||||
### Example: List threads (with pagination & filters)
|
||||
|
||||
`thread/list` lets you render a history UI. Pass any combination of:
|
||||
`thread/list` lets you render a history UI. Results default to `createdAt` (newest first) descending. Pass any combination of:
|
||||
|
||||
- `cursor` — opaque string from a prior response; omit for the first page.
|
||||
- `limit` — server defaults to a reasonable page size if unset.
|
||||
- `sortKey` — `created_at` (default) or `updated_at`.
|
||||
- `modelProviders` — restrict results to specific providers; unset, null, or an empty array will include all providers.
|
||||
|
||||
Example:
|
||||
@@ -150,11 +151,12 @@ Example:
|
||||
{ "method": "thread/list", "id": 20, "params": {
|
||||
"cursor": null,
|
||||
"limit": 25,
|
||||
"sortKey": "created_at"
|
||||
} }
|
||||
{ "id": 20, "result": {
|
||||
"data": [
|
||||
{ "id": "thr_a", "preview": "Create a TUI", "modelProvider": "openai", "createdAt": 1730831111 },
|
||||
{ "id": "thr_b", "preview": "Fix tests", "modelProvider": "openai", "createdAt": 1730750000 }
|
||||
{ "id": "thr_a", "preview": "Create a TUI", "modelProvider": "openai", "createdAt": 1730831111, "updatedAt": 1730831111 },
|
||||
{ "id": "thr_b", "preview": "Fix tests", "modelProvider": "openai", "createdAt": 1730750000, "updatedAt": 1730750000 }
|
||||
],
|
||||
"nextCursor": "opaque-token-or-null"
|
||||
} }
|
||||
|
||||
@@ -6,6 +6,7 @@ use crate::models::supported_models;
|
||||
use crate::outgoing_message::OutgoingMessageSender;
|
||||
use crate::outgoing_message::OutgoingNotification;
|
||||
use chrono::DateTime;
|
||||
use chrono::SecondsFormat;
|
||||
use chrono::Utc;
|
||||
use codex_app_server_protocol::Account;
|
||||
use codex_app_server_protocol::AccountLoginCompletedNotification;
|
||||
@@ -99,6 +100,7 @@ use codex_app_server_protocol::ThreadLoadedListResponse;
|
||||
use codex_app_server_protocol::ThreadResumeParams;
|
||||
use codex_app_server_protocol::ThreadResumeResponse;
|
||||
use codex_app_server_protocol::ThreadRollbackParams;
|
||||
use codex_app_server_protocol::ThreadSortKey;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_app_server_protocol::ThreadStartedNotification;
|
||||
@@ -123,6 +125,7 @@ use codex_core::NewThread;
|
||||
use codex_core::RolloutRecorder;
|
||||
use codex_core::SessionMeta;
|
||||
use codex_core::ThreadManager;
|
||||
use codex_core::ThreadSortKey as CoreThreadSortKey;
|
||||
use codex_core::auth::CLIENT_ID;
|
||||
use codex_core::auth::login_with_api_key;
|
||||
use codex_core::config::Config;
|
||||
@@ -1598,6 +1601,7 @@ impl CodexMessageProcessor {
|
||||
let ThreadListParams {
|
||||
cursor,
|
||||
limit,
|
||||
sort_key,
|
||||
model_providers,
|
||||
} = params;
|
||||
|
||||
@@ -1605,8 +1609,12 @@ impl CodexMessageProcessor {
|
||||
.map(|value| value as usize)
|
||||
.unwrap_or(THREAD_LIST_DEFAULT_LIMIT)
|
||||
.clamp(1, THREAD_LIST_MAX_LIMIT);
|
||||
let core_sort_key = match sort_key.unwrap_or(ThreadSortKey::CreatedAt) {
|
||||
ThreadSortKey::CreatedAt => CoreThreadSortKey::CreatedAt,
|
||||
ThreadSortKey::UpdatedAt => CoreThreadSortKey::UpdatedAt,
|
||||
};
|
||||
let (summaries, next_cursor) = match self
|
||||
.list_threads_common(requested_page_size, cursor, model_providers)
|
||||
.list_threads_common(requested_page_size, cursor, model_providers, core_sort_key)
|
||||
.await
|
||||
{
|
||||
Ok(r) => r,
|
||||
@@ -2171,7 +2179,12 @@ impl CodexMessageProcessor {
|
||||
.clamp(1, THREAD_LIST_MAX_LIMIT);
|
||||
|
||||
match self
|
||||
.list_threads_common(requested_page_size, cursor, model_providers)
|
||||
.list_threads_common(
|
||||
requested_page_size,
|
||||
cursor,
|
||||
model_providers,
|
||||
CoreThreadSortKey::UpdatedAt,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok((items, next_cursor)) => {
|
||||
@@ -2189,8 +2202,18 @@ impl CodexMessageProcessor {
|
||||
requested_page_size: usize,
|
||||
cursor: Option<String>,
|
||||
model_providers: Option<Vec<String>>,
|
||||
sort_key: CoreThreadSortKey,
|
||||
) -> Result<(Vec<ConversationSummary>, Option<String>), JSONRPCErrorError> {
|
||||
let mut cursor_obj: Option<RolloutCursor> = cursor.as_ref().and_then(|s| parse_cursor(s));
|
||||
let mut cursor_obj: Option<RolloutCursor> = match cursor.as_ref() {
|
||||
Some(cursor_str) => {
|
||||
Some(parse_cursor(cursor_str).ok_or_else(|| JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: format!("invalid cursor: {cursor_str}"),
|
||||
data: None,
|
||||
})?)
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
let mut last_cursor = cursor_obj.clone();
|
||||
let mut remaining = requested_page_size;
|
||||
let mut items = Vec::with_capacity(requested_page_size);
|
||||
@@ -2214,6 +2237,7 @@ impl CodexMessageProcessor {
|
||||
&self.config.codex_home,
|
||||
page_size,
|
||||
cursor_obj.as_ref(),
|
||||
sort_key,
|
||||
INTERACTIVE_SESSION_SOURCES,
|
||||
model_provider_filter.as_deref(),
|
||||
fallback_provider.as_str(),
|
||||
@@ -2229,6 +2253,7 @@ impl CodexMessageProcessor {
|
||||
.items
|
||||
.into_iter()
|
||||
.filter_map(|it| {
|
||||
let updated_at = it.updated_at.clone();
|
||||
let session_meta_line = it.head.first().and_then(|first| {
|
||||
serde_json::from_value::<SessionMetaLine>(first.clone()).ok()
|
||||
})?;
|
||||
@@ -2238,6 +2263,7 @@ impl CodexMessageProcessor {
|
||||
&session_meta_line.meta,
|
||||
session_meta_line.git.as_ref(),
|
||||
fallback_provider.as_str(),
|
||||
updated_at,
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
@@ -3982,12 +4008,19 @@ pub(crate) async fn read_summary_from_rollout(
|
||||
git,
|
||||
} = session_meta_line;
|
||||
|
||||
let created_at = if session_meta.timestamp.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(session_meta.timestamp.as_str())
|
||||
};
|
||||
let updated_at = read_updated_at(path, created_at).await;
|
||||
if let Some(summary) = extract_conversation_summary(
|
||||
path.to_path_buf(),
|
||||
&head,
|
||||
&session_meta,
|
||||
git.as_ref(),
|
||||
fallback_provider,
|
||||
updated_at.clone(),
|
||||
) {
|
||||
return Ok(summary);
|
||||
}
|
||||
@@ -4002,10 +4035,12 @@ pub(crate) async fn read_summary_from_rollout(
|
||||
.clone()
|
||||
.unwrap_or_else(|| fallback_provider.to_string());
|
||||
let git_info = git.as_ref().map(map_git_info);
|
||||
let updated_at = updated_at.or_else(|| timestamp.clone());
|
||||
|
||||
Ok(ConversationSummary {
|
||||
conversation_id: session_meta.id,
|
||||
timestamp,
|
||||
updated_at,
|
||||
path: path.to_path_buf(),
|
||||
preview: String::new(),
|
||||
model_provider,
|
||||
@@ -4040,6 +4075,7 @@ fn extract_conversation_summary(
|
||||
session_meta: &SessionMeta,
|
||||
git: Option<&CoreGitInfo>,
|
||||
fallback_provider: &str,
|
||||
updated_at: Option<String>,
|
||||
) -> Option<ConversationSummary> {
|
||||
let preview = head
|
||||
.iter()
|
||||
@@ -4065,10 +4101,12 @@ fn extract_conversation_summary(
|
||||
.clone()
|
||||
.unwrap_or_else(|| fallback_provider.to_string());
|
||||
let git_info = git.map(map_git_info);
|
||||
let updated_at = updated_at.or_else(|| timestamp.clone());
|
||||
|
||||
Some(ConversationSummary {
|
||||
conversation_id,
|
||||
timestamp,
|
||||
updated_at,
|
||||
path,
|
||||
preview: preview.to_string(),
|
||||
model_provider,
|
||||
@@ -4095,12 +4133,25 @@ fn parse_datetime(timestamp: Option<&str>) -> Option<DateTime<Utc>> {
|
||||
})
|
||||
}
|
||||
|
||||
async fn read_updated_at(path: &Path, created_at: Option<&str>) -> Option<String> {
|
||||
let updated_at = tokio::fs::metadata(path)
|
||||
.await
|
||||
.ok()
|
||||
.and_then(|meta| meta.modified().ok())
|
||||
.map(|modified| {
|
||||
let updated_at: DateTime<Utc> = modified.into();
|
||||
updated_at.to_rfc3339_opts(SecondsFormat::Secs, true)
|
||||
});
|
||||
updated_at.or_else(|| created_at.map(str::to_string))
|
||||
}
|
||||
|
||||
pub(crate) fn summary_to_thread(summary: ConversationSummary) -> Thread {
|
||||
let ConversationSummary {
|
||||
conversation_id,
|
||||
path,
|
||||
preview,
|
||||
timestamp,
|
||||
updated_at,
|
||||
model_provider,
|
||||
cwd,
|
||||
cli_version,
|
||||
@@ -4109,6 +4160,7 @@ pub(crate) fn summary_to_thread(summary: ConversationSummary) -> Thread {
|
||||
} = summary;
|
||||
|
||||
let created_at = parse_datetime(timestamp.as_deref());
|
||||
let updated_at = parse_datetime(updated_at.as_deref()).or(created_at);
|
||||
let git_info = git_info.map(|info| ApiGitInfo {
|
||||
sha: info.sha,
|
||||
branch: info.branch,
|
||||
@@ -4120,6 +4172,7 @@ pub(crate) fn summary_to_thread(summary: ConversationSummary) -> Thread {
|
||||
preview,
|
||||
model_provider,
|
||||
created_at: created_at.map(|dt| dt.timestamp()).unwrap_or(0),
|
||||
updated_at: updated_at.map(|dt| dt.timestamp()).unwrap_or(0),
|
||||
path,
|
||||
cwd,
|
||||
cli_version,
|
||||
@@ -4174,13 +4227,20 @@ mod tests {
|
||||
|
||||
let session_meta = serde_json::from_value::<SessionMeta>(head[0].clone())?;
|
||||
|
||||
let summary =
|
||||
extract_conversation_summary(path.clone(), &head, &session_meta, None, "test-provider")
|
||||
.expect("summary");
|
||||
let summary = extract_conversation_summary(
|
||||
path.clone(),
|
||||
&head,
|
||||
&session_meta,
|
||||
None,
|
||||
"test-provider",
|
||||
timestamp.clone(),
|
||||
)
|
||||
.expect("summary");
|
||||
|
||||
let expected = ConversationSummary {
|
||||
conversation_id,
|
||||
timestamp,
|
||||
timestamp: timestamp.clone(),
|
||||
updated_at: timestamp,
|
||||
path,
|
||||
preview: "Count to 5".to_string(),
|
||||
model_provider: "test-provider".to_string(),
|
||||
@@ -4200,6 +4260,7 @@ mod tests {
|
||||
use codex_protocol::protocol::RolloutLine;
|
||||
use codex_protocol::protocol::SessionMetaLine;
|
||||
use std::fs;
|
||||
use std::fs::FileTimes;
|
||||
|
||||
let temp_dir = TempDir::new()?;
|
||||
let path = temp_dir.path().join("rollout.jsonl");
|
||||
@@ -4223,12 +4284,19 @@ mod tests {
|
||||
};
|
||||
|
||||
fs::write(&path, format!("{}\n", serde_json::to_string(&line)?))?;
|
||||
let parsed = chrono::DateTime::parse_from_rfc3339(×tamp)?.with_timezone(&Utc);
|
||||
let times = FileTimes::new().set_modified(parsed.into());
|
||||
std::fs::OpenOptions::new()
|
||||
.append(true)
|
||||
.open(&path)?
|
||||
.set_times(times)?;
|
||||
|
||||
let summary = read_summary_from_rollout(path.as_path(), "fallback").await?;
|
||||
|
||||
let expected = ConversationSummary {
|
||||
conversation_id,
|
||||
timestamp: Some(timestamp),
|
||||
timestamp: Some(timestamp.clone()),
|
||||
updated_at: Some("2025-09-05T16:53:11Z".to_string()),
|
||||
path: path.clone(),
|
||||
preview: String::new(),
|
||||
model_provider: "fallback".to_string(),
|
||||
|
||||
@@ -30,6 +30,7 @@ pub use responses::create_final_assistant_message_sse_response;
|
||||
pub use responses::create_shell_command_sse_response;
|
||||
pub use rollout::create_fake_rollout;
|
||||
pub use rollout::create_fake_rollout_with_text_elements;
|
||||
pub use rollout::rollout_path;
|
||||
use serde::de::DeserializeOwned;
|
||||
|
||||
pub fn to_response<T: DeserializeOwned>(response: JSONRPCResponse) -> anyhow::Result<T> {
|
||||
|
||||
@@ -6,10 +6,23 @@ use codex_protocol::protocol::SessionMetaLine;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use serde_json::json;
|
||||
use std::fs;
|
||||
use std::fs::FileTimes;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use uuid::Uuid;
|
||||
|
||||
pub fn rollout_path(codex_home: &Path, filename_ts: &str, thread_id: &str) -> PathBuf {
|
||||
let year = &filename_ts[0..4];
|
||||
let month = &filename_ts[5..7];
|
||||
let day = &filename_ts[8..10];
|
||||
codex_home
|
||||
.join("sessions")
|
||||
.join(year)
|
||||
.join(month)
|
||||
.join(day)
|
||||
.join(format!("rollout-{filename_ts}-{thread_id}.jsonl"))
|
||||
}
|
||||
|
||||
/// Create a minimal rollout file under `CODEX_HOME/sessions/YYYY/MM/DD/`.
|
||||
///
|
||||
/// - `filename_ts` is the filename timestamp component in `YYYY-MM-DDThh-mm-ss` format.
|
||||
@@ -30,14 +43,11 @@ pub fn create_fake_rollout(
|
||||
let uuid_str = uuid.to_string();
|
||||
let conversation_id = ThreadId::from_string(&uuid_str)?;
|
||||
|
||||
// sessions/YYYY/MM/DD derived from filename_ts (YYYY-MM-DDThh-mm-ss)
|
||||
let year = &filename_ts[0..4];
|
||||
let month = &filename_ts[5..7];
|
||||
let day = &filename_ts[8..10];
|
||||
let dir = codex_home.join("sessions").join(year).join(month).join(day);
|
||||
fs::create_dir_all(&dir)?;
|
||||
|
||||
let file_path = dir.join(format!("rollout-{filename_ts}-{uuid}.jsonl"));
|
||||
let file_path = rollout_path(codex_home, filename_ts, &uuid_str);
|
||||
let dir = file_path
|
||||
.parent()
|
||||
.ok_or_else(|| anyhow::anyhow!("missing rollout parent directory"))?;
|
||||
fs::create_dir_all(dir)?;
|
||||
|
||||
// Build JSONL lines
|
||||
let meta = SessionMeta {
|
||||
@@ -84,7 +94,13 @@ pub fn create_fake_rollout(
|
||||
.to_string(),
|
||||
];
|
||||
|
||||
fs::write(file_path, lines.join("\n") + "\n")?;
|
||||
fs::write(&file_path, lines.join("\n") + "\n")?;
|
||||
let parsed = chrono::DateTime::parse_from_rfc3339(meta_rfc3339)?.with_timezone(&chrono::Utc);
|
||||
let times = FileTimes::new().set_modified(parsed.into());
|
||||
std::fs::OpenOptions::new()
|
||||
.append(true)
|
||||
.open(&file_path)?
|
||||
.set_times(times)?;
|
||||
Ok(uuid_str)
|
||||
}
|
||||
|
||||
|
||||
@@ -1,17 +1,27 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_fake_rollout;
|
||||
use app_test_support::rollout_path;
|
||||
use app_test_support::to_response;
|
||||
use chrono::DateTime;
|
||||
use chrono::Utc;
|
||||
use codex_app_server_protocol::GitInfo as ApiGitInfo;
|
||||
use codex_app_server_protocol::JSONRPCError;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::SessionSource;
|
||||
use codex_app_server_protocol::ThreadListResponse;
|
||||
use codex_app_server_protocol::ThreadSortKey;
|
||||
use codex_protocol::protocol::GitInfo as CoreGitInfo;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::cmp::Reverse;
|
||||
use std::fs::FileTimes;
|
||||
use std::fs::OpenOptions;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
use uuid::Uuid;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
@@ -26,11 +36,22 @@ async fn list_threads(
|
||||
cursor: Option<String>,
|
||||
limit: Option<u32>,
|
||||
providers: Option<Vec<String>>,
|
||||
) -> Result<ThreadListResponse> {
|
||||
list_threads_with_sort(mcp, cursor, limit, providers, None).await
|
||||
}
|
||||
|
||||
async fn list_threads_with_sort(
|
||||
mcp: &mut McpProcess,
|
||||
cursor: Option<String>,
|
||||
limit: Option<u32>,
|
||||
providers: Option<Vec<String>>,
|
||||
sort_key: Option<ThreadSortKey>,
|
||||
) -> Result<ThreadListResponse> {
|
||||
let request_id = mcp
|
||||
.send_thread_list_request(codex_app_server_protocol::ThreadListParams {
|
||||
cursor,
|
||||
limit,
|
||||
sort_key,
|
||||
model_providers: providers,
|
||||
})
|
||||
.await?;
|
||||
@@ -82,6 +103,16 @@ fn timestamp_at(
|
||||
)
|
||||
}
|
||||
|
||||
fn set_rollout_mtime(path: &Path, updated_at_rfc3339: &str) -> Result<()> {
|
||||
let parsed = DateTime::parse_from_rfc3339(updated_at_rfc3339)?.with_timezone(&Utc);
|
||||
let times = FileTimes::new().set_modified(parsed.into());
|
||||
OpenOptions::new()
|
||||
.append(true)
|
||||
.open(path)?
|
||||
.set_times(times)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_list_basic_empty() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
@@ -163,6 +194,7 @@ async fn thread_list_pagination_next_cursor_none_on_last_page() -> Result<()> {
|
||||
assert_eq!(thread.preview, "Hello");
|
||||
assert_eq!(thread.model_provider, "mock_provider");
|
||||
assert!(thread.created_at > 0);
|
||||
assert_eq!(thread.updated_at, thread.created_at);
|
||||
assert_eq!(thread.cwd, PathBuf::from("/"));
|
||||
assert_eq!(thread.cli_version, "0.0.0");
|
||||
assert_eq!(thread.source, SessionSource::Cli);
|
||||
@@ -186,6 +218,7 @@ async fn thread_list_pagination_next_cursor_none_on_last_page() -> Result<()> {
|
||||
assert_eq!(thread.preview, "Hello");
|
||||
assert_eq!(thread.model_provider, "mock_provider");
|
||||
assert!(thread.created_at > 0);
|
||||
assert_eq!(thread.updated_at, thread.created_at);
|
||||
assert_eq!(thread.cwd, PathBuf::from("/"));
|
||||
assert_eq!(thread.cli_version, "0.0.0");
|
||||
assert_eq!(thread.source, SessionSource::Cli);
|
||||
@@ -236,6 +269,7 @@ async fn thread_list_respects_provider_filter() -> Result<()> {
|
||||
assert_eq!(thread.model_provider, "other_provider");
|
||||
let expected_ts = chrono::DateTime::parse_from_rfc3339("2025-01-02T11:00:00Z")?.timestamp();
|
||||
assert_eq!(thread.created_at, expected_ts);
|
||||
assert_eq!(thread.updated_at, expected_ts);
|
||||
assert_eq!(thread.cwd, PathBuf::from("/"));
|
||||
assert_eq!(thread.cli_version, "0.0.0");
|
||||
assert_eq!(thread.source, SessionSource::Cli);
|
||||
@@ -429,3 +463,351 @@ async fn thread_list_includes_git_info() -> Result<()> {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_list_default_sorts_by_created_at() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_minimal_config(codex_home.path())?;
|
||||
|
||||
let id_a = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-01-02T12-00-00",
|
||||
"2025-01-02T12:00:00Z",
|
||||
"Hello",
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
)?;
|
||||
let id_b = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-01-01T13-00-00",
|
||||
"2025-01-01T13:00:00Z",
|
||||
"Hello",
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
)?;
|
||||
let id_c = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-01-01T12-00-00",
|
||||
"2025-01-01T12:00:00Z",
|
||||
"Hello",
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
)?;
|
||||
|
||||
let mut mcp = init_mcp(codex_home.path()).await?;
|
||||
|
||||
let ThreadListResponse { data, .. } = list_threads_with_sort(
|
||||
&mut mcp,
|
||||
None,
|
||||
Some(10),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let ids: Vec<_> = data.iter().map(|thread| thread.id.as_str()).collect();
|
||||
assert_eq!(ids, vec![id_a.as_str(), id_b.as_str(), id_c.as_str()]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_list_sort_updated_at_orders_by_mtime() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_minimal_config(codex_home.path())?;
|
||||
|
||||
let id_old = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-01-01T10-00-00",
|
||||
"2025-01-01T10:00:00Z",
|
||||
"Hello",
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
)?;
|
||||
let id_mid = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-01-01T11-00-00",
|
||||
"2025-01-01T11:00:00Z",
|
||||
"Hello",
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
)?;
|
||||
let id_new = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-01-01T12-00-00",
|
||||
"2025-01-01T12:00:00Z",
|
||||
"Hello",
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
)?;
|
||||
|
||||
set_rollout_mtime(
|
||||
rollout_path(codex_home.path(), "2025-01-01T10-00-00", &id_old).as_path(),
|
||||
"2025-01-03T00:00:00Z",
|
||||
)?;
|
||||
set_rollout_mtime(
|
||||
rollout_path(codex_home.path(), "2025-01-01T11-00-00", &id_mid).as_path(),
|
||||
"2025-01-02T00:00:00Z",
|
||||
)?;
|
||||
set_rollout_mtime(
|
||||
rollout_path(codex_home.path(), "2025-01-01T12-00-00", &id_new).as_path(),
|
||||
"2025-01-01T00:00:00Z",
|
||||
)?;
|
||||
|
||||
let mut mcp = init_mcp(codex_home.path()).await?;
|
||||
|
||||
let ThreadListResponse { data, .. } = list_threads_with_sort(
|
||||
&mut mcp,
|
||||
None,
|
||||
Some(10),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
Some(ThreadSortKey::UpdatedAt),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let ids: Vec<_> = data.iter().map(|thread| thread.id.as_str()).collect();
|
||||
assert_eq!(ids, vec![id_old.as_str(), id_mid.as_str(), id_new.as_str()]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_list_updated_at_paginates_with_cursor() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_minimal_config(codex_home.path())?;
|
||||
|
||||
let id_a = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-02-01T10-00-00",
|
||||
"2025-02-01T10:00:00Z",
|
||||
"Hello",
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
)?;
|
||||
let id_b = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-02-01T11-00-00",
|
||||
"2025-02-01T11:00:00Z",
|
||||
"Hello",
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
)?;
|
||||
let id_c = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-02-01T12-00-00",
|
||||
"2025-02-01T12:00:00Z",
|
||||
"Hello",
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
)?;
|
||||
|
||||
set_rollout_mtime(
|
||||
rollout_path(codex_home.path(), "2025-02-01T10-00-00", &id_a).as_path(),
|
||||
"2025-02-03T00:00:00Z",
|
||||
)?;
|
||||
set_rollout_mtime(
|
||||
rollout_path(codex_home.path(), "2025-02-01T11-00-00", &id_b).as_path(),
|
||||
"2025-02-02T00:00:00Z",
|
||||
)?;
|
||||
set_rollout_mtime(
|
||||
rollout_path(codex_home.path(), "2025-02-01T12-00-00", &id_c).as_path(),
|
||||
"2025-02-01T00:00:00Z",
|
||||
)?;
|
||||
|
||||
let mut mcp = init_mcp(codex_home.path()).await?;
|
||||
|
||||
let ThreadListResponse {
|
||||
data: page1,
|
||||
next_cursor: cursor1,
|
||||
} = list_threads_with_sort(
|
||||
&mut mcp,
|
||||
None,
|
||||
Some(2),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
Some(ThreadSortKey::UpdatedAt),
|
||||
)
|
||||
.await?;
|
||||
let ids_page1: Vec<_> = page1.iter().map(|thread| thread.id.as_str()).collect();
|
||||
assert_eq!(ids_page1, vec![id_a.as_str(), id_b.as_str()]);
|
||||
let cursor1 = cursor1.expect("expected nextCursor on first page");
|
||||
|
||||
let ThreadListResponse {
|
||||
data: page2,
|
||||
next_cursor: cursor2,
|
||||
} = list_threads_with_sort(
|
||||
&mut mcp,
|
||||
Some(cursor1),
|
||||
Some(2),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
Some(ThreadSortKey::UpdatedAt),
|
||||
)
|
||||
.await?;
|
||||
let ids_page2: Vec<_> = page2.iter().map(|thread| thread.id.as_str()).collect();
|
||||
assert_eq!(ids_page2, vec![id_c.as_str()]);
|
||||
assert_eq!(cursor2, None);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_list_created_at_tie_breaks_by_uuid() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_minimal_config(codex_home.path())?;
|
||||
|
||||
let id_a = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-02-01T10-00-00",
|
||||
"2025-02-01T10:00:00Z",
|
||||
"Hello",
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
)?;
|
||||
let id_b = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-02-01T10-00-00",
|
||||
"2025-02-01T10:00:00Z",
|
||||
"Hello",
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
)?;
|
||||
|
||||
let mut mcp = init_mcp(codex_home.path()).await?;
|
||||
|
||||
let ThreadListResponse { data, .. } = list_threads(
|
||||
&mut mcp,
|
||||
None,
|
||||
Some(10),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let ids: Vec<_> = data.iter().map(|thread| thread.id.as_str()).collect();
|
||||
let mut expected = [id_a, id_b];
|
||||
expected.sort_by_key(|id| Reverse(Uuid::parse_str(id).expect("uuid should parse")));
|
||||
let expected: Vec<_> = expected.iter().map(String::as_str).collect();
|
||||
assert_eq!(ids, expected);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_list_updated_at_tie_breaks_by_uuid() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_minimal_config(codex_home.path())?;
|
||||
|
||||
let id_a = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-02-01T10-00-00",
|
||||
"2025-02-01T10:00:00Z",
|
||||
"Hello",
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
)?;
|
||||
let id_b = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-02-01T11-00-00",
|
||||
"2025-02-01T11:00:00Z",
|
||||
"Hello",
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
)?;
|
||||
|
||||
let updated_at = "2025-02-03T00:00:00Z";
|
||||
set_rollout_mtime(
|
||||
rollout_path(codex_home.path(), "2025-02-01T10-00-00", &id_a).as_path(),
|
||||
updated_at,
|
||||
)?;
|
||||
set_rollout_mtime(
|
||||
rollout_path(codex_home.path(), "2025-02-01T11-00-00", &id_b).as_path(),
|
||||
updated_at,
|
||||
)?;
|
||||
|
||||
let mut mcp = init_mcp(codex_home.path()).await?;
|
||||
|
||||
let ThreadListResponse { data, .. } = list_threads_with_sort(
|
||||
&mut mcp,
|
||||
None,
|
||||
Some(10),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
Some(ThreadSortKey::UpdatedAt),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let ids: Vec<_> = data.iter().map(|thread| thread.id.as_str()).collect();
|
||||
let mut expected = [id_a, id_b];
|
||||
expected.sort_by_key(|id| Reverse(Uuid::parse_str(id).expect("uuid should parse")));
|
||||
let expected: Vec<_> = expected.iter().map(String::as_str).collect();
|
||||
assert_eq!(ids, expected);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_list_updated_at_uses_mtime() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_minimal_config(codex_home.path())?;
|
||||
|
||||
let thread_id = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-02-01T10-00-00",
|
||||
"2025-02-01T10:00:00Z",
|
||||
"Hello",
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
)?;
|
||||
|
||||
set_rollout_mtime(
|
||||
rollout_path(codex_home.path(), "2025-02-01T10-00-00", &thread_id).as_path(),
|
||||
"2025-02-05T00:00:00Z",
|
||||
)?;
|
||||
|
||||
let mut mcp = init_mcp(codex_home.path()).await?;
|
||||
|
||||
let ThreadListResponse { data, .. } = list_threads_with_sort(
|
||||
&mut mcp,
|
||||
None,
|
||||
Some(10),
|
||||
Some(vec!["mock_provider".to_string()]),
|
||||
Some(ThreadSortKey::UpdatedAt),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let thread = data
|
||||
.iter()
|
||||
.find(|item| item.id == thread_id)
|
||||
.expect("expected thread for created rollout");
|
||||
let expected_created =
|
||||
chrono::DateTime::parse_from_rfc3339("2025-02-01T10:00:00Z")?.timestamp();
|
||||
let expected_updated =
|
||||
chrono::DateTime::parse_from_rfc3339("2025-02-05T00:00:00Z")?.timestamp();
|
||||
assert_eq!(thread.created_at, expected_created);
|
||||
assert_eq!(thread.updated_at, expected_updated);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_list_invalid_cursor_returns_error() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_minimal_config(codex_home.path())?;
|
||||
|
||||
let mut mcp = init_mcp(codex_home.path()).await?;
|
||||
|
||||
let request_id = mcp
|
||||
.send_thread_list_request(codex_app_server_protocol::ThreadListParams {
|
||||
cursor: Some("not-a-cursor".to_string()),
|
||||
limit: Some(2),
|
||||
sort_key: None,
|
||||
model_providers: Some(vec!["mock_provider".to_string()]),
|
||||
})
|
||||
.await?;
|
||||
let error: JSONRPCError = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_error_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
assert_eq!(error.error.code, -32600);
|
||||
assert_eq!(error.error.message, "invalid cursor: not-a-cursor");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -42,9 +42,12 @@ impl RateLimitStatusPayload {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||
#[derive(
|
||||
Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize, Default,
|
||||
)]
|
||||
pub enum PlanType {
|
||||
#[serde(rename = "guest")]
|
||||
#[default]
|
||||
Guest,
|
||||
#[serde(rename = "free")]
|
||||
Free,
|
||||
@@ -71,9 +74,3 @@ pub enum PlanType {
|
||||
#[serde(rename = "edu")]
|
||||
Edu,
|
||||
}
|
||||
|
||||
impl Default for PlanType {
|
||||
fn default() -> PlanType {
|
||||
Self::Guest
|
||||
}
|
||||
}
|
||||
|
||||
@@ -423,10 +423,11 @@ impl Default for Notifications {
|
||||
/// Terminals generally encode both mouse wheels and trackpads as the same "scroll up/down" mouse
|
||||
/// button events, without a magnitude. This setting controls whether Codex uses a heuristic to
|
||||
/// infer wheel vs trackpad per stream, or forces a specific behavior.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, Default)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum ScrollInputMode {
|
||||
/// Infer wheel vs trackpad behavior per scroll stream.
|
||||
#[default]
|
||||
Auto,
|
||||
/// Always treat scroll events as mouse-wheel input (fixed lines per tick).
|
||||
Wheel,
|
||||
@@ -434,12 +435,6 @@ pub enum ScrollInputMode {
|
||||
Trackpad,
|
||||
}
|
||||
|
||||
impl Default for ScrollInputMode {
|
||||
fn default() -> Self {
|
||||
Self::Auto
|
||||
}
|
||||
}
|
||||
|
||||
/// Collection of settings that are specific to the TUI.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema)]
|
||||
#[schemars(deny_unknown_fields)]
|
||||
|
||||
@@ -100,6 +100,7 @@ pub use rollout::find_conversation_path_by_id_str;
|
||||
pub use rollout::find_thread_path_by_id_str;
|
||||
pub use rollout::list::Cursor;
|
||||
pub use rollout::list::ThreadItem;
|
||||
pub use rollout::list::ThreadSortKey;
|
||||
pub use rollout::list::ThreadsPage;
|
||||
pub use rollout::list::parse_cursor;
|
||||
pub use rollout::list::read_head_for_summary;
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
use std::cmp::Reverse;
|
||||
use std::io::{self};
|
||||
use std::num::NonZero;
|
||||
use std::ops::ControlFlow;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use time::OffsetDateTime;
|
||||
use time::PrimitiveDateTime;
|
||||
use time::format_description::FormatItem;
|
||||
@@ -41,8 +43,10 @@ pub struct ThreadItem {
|
||||
/// First up to `HEAD_RECORD_LIMIT` JSONL records parsed as JSON (includes meta line).
|
||||
pub head: Vec<serde_json::Value>,
|
||||
/// RFC3339 timestamp string for when the session was created, if available.
|
||||
/// created_at comes from the filename timestamp with second precision.
|
||||
pub created_at: Option<String>,
|
||||
/// RFC3339 timestamp string for the most recent update (from file mtime).
|
||||
/// updated_at is truncated to second precision to match created_at.
|
||||
pub updated_at: Option<String>,
|
||||
}
|
||||
|
||||
@@ -68,6 +72,12 @@ struct HeadTailSummary {
|
||||
const MAX_SCAN_FILES: usize = 10000;
|
||||
const HEAD_RECORD_LIMIT: usize = 10;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum ThreadSortKey {
|
||||
CreatedAt,
|
||||
UpdatedAt,
|
||||
}
|
||||
|
||||
/// Pagination cursor identifying a file by timestamp and UUID.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct Cursor {
|
||||
@@ -81,6 +91,135 @@ impl Cursor {
|
||||
}
|
||||
}
|
||||
|
||||
/// Keeps track of where a paginated listing left off. As the file scan goes newest -> oldest,
|
||||
/// it ignores everything until it reaches the last seen item from the previous page, then
|
||||
/// starts returning results after that. This makes paging stable even if new files show up during
|
||||
/// pagination.
|
||||
struct AnchorState {
|
||||
ts: OffsetDateTime,
|
||||
id: Uuid,
|
||||
passed: bool,
|
||||
}
|
||||
|
||||
impl AnchorState {
|
||||
fn new(anchor: Option<Cursor>) -> Self {
|
||||
match anchor {
|
||||
Some(cursor) => Self {
|
||||
ts: cursor.ts,
|
||||
id: cursor.id,
|
||||
passed: false,
|
||||
},
|
||||
None => Self {
|
||||
ts: OffsetDateTime::UNIX_EPOCH,
|
||||
id: Uuid::nil(),
|
||||
passed: true,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn should_skip(&mut self, ts: OffsetDateTime, id: Uuid) -> bool {
|
||||
if self.passed {
|
||||
return false;
|
||||
}
|
||||
if ts < self.ts || (ts == self.ts && id < self.id) {
|
||||
self.passed = true;
|
||||
false
|
||||
} else {
|
||||
true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Visitor interface to customize behavior when visiting each rollout file
|
||||
/// in `walk_rollout_files`.
|
||||
///
|
||||
/// We need to apply different logic if we're ultimately going to be returning
|
||||
/// threads ordered by created_at or updated_at.
|
||||
#[async_trait]
|
||||
trait RolloutFileVisitor {
|
||||
async fn visit(
|
||||
&mut self,
|
||||
ts: OffsetDateTime,
|
||||
id: Uuid,
|
||||
path: PathBuf,
|
||||
scanned: usize,
|
||||
) -> ControlFlow<()>;
|
||||
}
|
||||
|
||||
/// Collects thread items during directory traversal in created_at order,
|
||||
/// applying pagination and filters inline.
|
||||
struct FilesByCreatedAtVisitor<'a> {
|
||||
items: &'a mut Vec<ThreadItem>,
|
||||
page_size: usize,
|
||||
anchor_state: AnchorState,
|
||||
more_matches_available: bool,
|
||||
allowed_sources: &'a [SessionSource],
|
||||
provider_matcher: Option<&'a ProviderMatcher<'a>>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl<'a> RolloutFileVisitor for FilesByCreatedAtVisitor<'a> {
|
||||
async fn visit(
|
||||
&mut self,
|
||||
ts: OffsetDateTime,
|
||||
id: Uuid,
|
||||
path: PathBuf,
|
||||
scanned: usize,
|
||||
) -> ControlFlow<()> {
|
||||
if scanned >= MAX_SCAN_FILES && self.items.len() >= self.page_size {
|
||||
self.more_matches_available = true;
|
||||
return ControlFlow::Break(());
|
||||
}
|
||||
if self.anchor_state.should_skip(ts, id) {
|
||||
return ControlFlow::Continue(());
|
||||
}
|
||||
if self.items.len() == self.page_size {
|
||||
self.more_matches_available = true;
|
||||
return ControlFlow::Break(());
|
||||
}
|
||||
let updated_at = file_modified_time(&path)
|
||||
.await
|
||||
.unwrap_or(None)
|
||||
.and_then(format_rfc3339);
|
||||
if let Some(item) = build_thread_item(
|
||||
path,
|
||||
self.allowed_sources,
|
||||
self.provider_matcher,
|
||||
updated_at,
|
||||
)
|
||||
.await
|
||||
{
|
||||
self.items.push(item);
|
||||
}
|
||||
ControlFlow::Continue(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Collects lightweight file candidates (path + id + mtime).
|
||||
/// Sorting after mtime happens after all files are collected.
|
||||
struct FilesByUpdatedAtVisitor<'a> {
|
||||
candidates: &'a mut Vec<ThreadCandidate>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl<'a> RolloutFileVisitor for FilesByUpdatedAtVisitor<'a> {
|
||||
async fn visit(
|
||||
&mut self,
|
||||
_ts: OffsetDateTime,
|
||||
id: Uuid,
|
||||
path: PathBuf,
|
||||
_scanned: usize,
|
||||
) -> ControlFlow<()> {
|
||||
let updated_at = file_modified_time(&path).await.unwrap_or(None);
|
||||
self.candidates.push(ThreadCandidate {
|
||||
path,
|
||||
id,
|
||||
updated_at,
|
||||
});
|
||||
ControlFlow::Continue(())
|
||||
}
|
||||
}
|
||||
|
||||
impl serde::Serialize for Cursor {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
@@ -108,11 +247,13 @@ impl<'de> serde::Deserialize<'de> for Cursor {
|
||||
|
||||
/// Retrieve recorded thread file paths with token pagination. The returned `next_cursor`
|
||||
/// can be supplied on the next call to resume after the last returned item, resilient to
|
||||
/// concurrent new sessions being appended. Ordering is stable by timestamp desc, then UUID desc.
|
||||
/// concurrent new sessions being appended. Ordering is stable by the requested sort key
|
||||
/// (timestamp desc, then UUID desc).
|
||||
pub(crate) async fn get_threads(
|
||||
codex_home: &Path,
|
||||
page_size: usize,
|
||||
cursor: Option<&Cursor>,
|
||||
sort_key: ThreadSortKey,
|
||||
allowed_sources: &[SessionSource],
|
||||
model_providers: Option<&[String]>,
|
||||
default_provider: &str,
|
||||
@@ -138,6 +279,7 @@ pub(crate) async fn get_threads(
|
||||
root.clone(),
|
||||
page_size,
|
||||
anchor,
|
||||
sort_key,
|
||||
allowed_sources,
|
||||
provider_matcher.as_ref(),
|
||||
)
|
||||
@@ -148,8 +290,46 @@ pub(crate) async fn get_threads(
|
||||
/// Load thread file paths from disk using directory traversal.
|
||||
///
|
||||
/// Directory layout: `~/.codex/sessions/YYYY/MM/DD/rollout-YYYY-MM-DDThh-mm-ss-<uuid>.jsonl`
|
||||
/// Returned newest (latest) first.
|
||||
/// Returned newest (based on sort key) first.
|
||||
async fn traverse_directories_for_paths(
|
||||
root: PathBuf,
|
||||
page_size: usize,
|
||||
anchor: Option<Cursor>,
|
||||
sort_key: ThreadSortKey,
|
||||
allowed_sources: &[SessionSource],
|
||||
provider_matcher: Option<&ProviderMatcher<'_>>,
|
||||
) -> io::Result<ThreadsPage> {
|
||||
match sort_key {
|
||||
ThreadSortKey::CreatedAt => {
|
||||
traverse_directories_for_paths_created(
|
||||
root,
|
||||
page_size,
|
||||
anchor,
|
||||
allowed_sources,
|
||||
provider_matcher,
|
||||
)
|
||||
.await
|
||||
}
|
||||
ThreadSortKey::UpdatedAt => {
|
||||
traverse_directories_for_paths_updated(
|
||||
root,
|
||||
page_size,
|
||||
anchor,
|
||||
allowed_sources,
|
||||
provider_matcher,
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Walk the rollout directory tree in reverse chronological order and
|
||||
/// collect items until the page fills or the scan cap is hit.
|
||||
///
|
||||
/// Ordering comes from directory/filename sorting, so created_at is derived
|
||||
/// from the filename timestamp. Pagination is handled by the anchor cursor
|
||||
/// so we resume strictly after the last returned `(ts, id)` pair.
|
||||
async fn traverse_directories_for_paths_created(
|
||||
root: PathBuf,
|
||||
page_size: usize,
|
||||
anchor: Option<Cursor>,
|
||||
@@ -158,98 +338,17 @@ async fn traverse_directories_for_paths(
|
||||
) -> io::Result<ThreadsPage> {
|
||||
let mut items: Vec<ThreadItem> = Vec::with_capacity(page_size);
|
||||
let mut scanned_files = 0usize;
|
||||
let mut anchor_passed = anchor.is_none();
|
||||
let (anchor_ts, anchor_id) = match anchor {
|
||||
Some(c) => (c.ts, c.id),
|
||||
None => (OffsetDateTime::UNIX_EPOCH, Uuid::nil()),
|
||||
};
|
||||
let mut more_matches_available = false;
|
||||
|
||||
let year_dirs = collect_dirs_desc(&root, |s| s.parse::<u16>().ok()).await?;
|
||||
|
||||
'outer: for (_year, year_path) in year_dirs.iter() {
|
||||
if scanned_files >= MAX_SCAN_FILES {
|
||||
break;
|
||||
}
|
||||
let month_dirs = collect_dirs_desc(year_path, |s| s.parse::<u8>().ok()).await?;
|
||||
for (_month, month_path) in month_dirs.iter() {
|
||||
if scanned_files >= MAX_SCAN_FILES {
|
||||
break 'outer;
|
||||
}
|
||||
let day_dirs = collect_dirs_desc(month_path, |s| s.parse::<u8>().ok()).await?;
|
||||
for (_day, day_path) in day_dirs.iter() {
|
||||
if scanned_files >= MAX_SCAN_FILES {
|
||||
break 'outer;
|
||||
}
|
||||
let mut day_files = collect_files(day_path, |name_str, path| {
|
||||
if !name_str.starts_with("rollout-") || !name_str.ends_with(".jsonl") {
|
||||
return None;
|
||||
}
|
||||
|
||||
parse_timestamp_uuid_from_filename(name_str)
|
||||
.map(|(ts, id)| (ts, id, name_str.to_string(), path.to_path_buf()))
|
||||
})
|
||||
.await?;
|
||||
// Stable ordering within the same second: (timestamp desc, uuid desc)
|
||||
day_files.sort_by_key(|(ts, sid, _name_str, _path)| (Reverse(*ts), Reverse(*sid)));
|
||||
for (ts, sid, _name_str, path) in day_files.into_iter() {
|
||||
scanned_files += 1;
|
||||
if scanned_files >= MAX_SCAN_FILES && items.len() >= page_size {
|
||||
more_matches_available = true;
|
||||
break 'outer;
|
||||
}
|
||||
if !anchor_passed {
|
||||
if ts < anchor_ts || (ts == anchor_ts && sid < anchor_id) {
|
||||
anchor_passed = true;
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
if items.len() == page_size {
|
||||
more_matches_available = true;
|
||||
break 'outer;
|
||||
}
|
||||
// Read head and detect message events; stop once meta + user are found.
|
||||
let summary = read_head_summary(&path, HEAD_RECORD_LIMIT)
|
||||
.await
|
||||
.unwrap_or_default();
|
||||
if !allowed_sources.is_empty()
|
||||
&& !summary
|
||||
.source
|
||||
.is_some_and(|source| allowed_sources.iter().any(|s| s == &source))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
if let Some(matcher) = provider_matcher
|
||||
&& !matcher.matches(summary.model_provider.as_deref())
|
||||
{
|
||||
continue;
|
||||
}
|
||||
// Apply filters: must have session meta and at least one user message event
|
||||
if summary.saw_session_meta && summary.saw_user_event {
|
||||
let HeadTailSummary {
|
||||
head,
|
||||
created_at,
|
||||
mut updated_at,
|
||||
..
|
||||
} = summary;
|
||||
if updated_at.is_none() {
|
||||
updated_at = file_modified_rfc3339(&path)
|
||||
.await
|
||||
.unwrap_or(None)
|
||||
.or_else(|| created_at.clone());
|
||||
}
|
||||
items.push(ThreadItem {
|
||||
path,
|
||||
head,
|
||||
created_at,
|
||||
updated_at,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
let mut visitor = FilesByCreatedAtVisitor {
|
||||
items: &mut items,
|
||||
page_size,
|
||||
anchor_state: AnchorState::new(anchor),
|
||||
more_matches_available,
|
||||
allowed_sources,
|
||||
provider_matcher,
|
||||
};
|
||||
walk_rollout_files(&root, &mut scanned_files, &mut visitor).await?;
|
||||
more_matches_available = visitor.more_matches_available;
|
||||
|
||||
let reached_scan_cap = scanned_files >= MAX_SCAN_FILES;
|
||||
if reached_scan_cap && !items.is_empty() {
|
||||
@@ -257,7 +356,7 @@ async fn traverse_directories_for_paths(
|
||||
}
|
||||
|
||||
let next = if more_matches_available {
|
||||
build_next_cursor(&items)
|
||||
build_next_cursor(&items, ThreadSortKey::CreatedAt)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
@@ -269,9 +368,77 @@ async fn traverse_directories_for_paths(
|
||||
})
|
||||
}
|
||||
|
||||
/// Pagination cursor token format: "<file_ts>|<uuid>" where `file_ts` matches the
|
||||
/// filename timestamp portion (YYYY-MM-DDThh-mm-ss) used in rollout filenames.
|
||||
/// The cursor orders files by timestamp desc, then UUID desc.
|
||||
/// Walk the rollout directory tree to collect files by updated_at, then sort by
|
||||
/// file mtime (updated_at) and apply pagination/filtering in that order.
|
||||
///
|
||||
/// Because updated_at is not encoded in filenames, this path must scan all
|
||||
/// files up to the scan cap, then sort and filter by the anchor cursor.
|
||||
///
|
||||
/// NOTE: This can be optimized in the future if we store additional state on disk
|
||||
/// to cache updated_at timestamps.
|
||||
async fn traverse_directories_for_paths_updated(
|
||||
root: PathBuf,
|
||||
page_size: usize,
|
||||
anchor: Option<Cursor>,
|
||||
allowed_sources: &[SessionSource],
|
||||
provider_matcher: Option<&ProviderMatcher<'_>>,
|
||||
) -> io::Result<ThreadsPage> {
|
||||
let mut items: Vec<ThreadItem> = Vec::with_capacity(page_size);
|
||||
let mut scanned_files = 0usize;
|
||||
let mut anchor_state = AnchorState::new(anchor);
|
||||
let mut more_matches_available = false;
|
||||
|
||||
let candidates = collect_files_by_updated_at(&root, &mut scanned_files).await?;
|
||||
let mut candidates = candidates;
|
||||
candidates.sort_by_key(|candidate| {
|
||||
let ts = candidate.updated_at.unwrap_or(OffsetDateTime::UNIX_EPOCH);
|
||||
(Reverse(ts), Reverse(candidate.id))
|
||||
});
|
||||
|
||||
for candidate in candidates.into_iter() {
|
||||
let ts = candidate.updated_at.unwrap_or(OffsetDateTime::UNIX_EPOCH);
|
||||
if anchor_state.should_skip(ts, candidate.id) {
|
||||
continue;
|
||||
}
|
||||
if items.len() == page_size {
|
||||
more_matches_available = true;
|
||||
break;
|
||||
}
|
||||
|
||||
let updated_at_fallback = candidate.updated_at.and_then(format_rfc3339);
|
||||
if let Some(item) = build_thread_item(
|
||||
candidate.path,
|
||||
allowed_sources,
|
||||
provider_matcher,
|
||||
updated_at_fallback,
|
||||
)
|
||||
.await
|
||||
{
|
||||
items.push(item);
|
||||
}
|
||||
}
|
||||
|
||||
let reached_scan_cap = scanned_files >= MAX_SCAN_FILES;
|
||||
if reached_scan_cap && !items.is_empty() {
|
||||
more_matches_available = true;
|
||||
}
|
||||
|
||||
let next = if more_matches_available {
|
||||
build_next_cursor(&items, ThreadSortKey::UpdatedAt)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Ok(ThreadsPage {
|
||||
items,
|
||||
next_cursor: next,
|
||||
num_scanned_files: scanned_files,
|
||||
reached_scan_cap,
|
||||
})
|
||||
}
|
||||
|
||||
/// Pagination cursor token format: "<ts>|<uuid>" where `ts` uses
|
||||
/// YYYY-MM-DDThh-mm-ss (UTC, second precision).
|
||||
/// The cursor orders files by the requested sort key (timestamp desc, then UUID desc).
|
||||
pub fn parse_cursor(token: &str) -> Option<Cursor> {
|
||||
let (file_ts, uuid_str) = token.split_once('|')?;
|
||||
|
||||
@@ -286,13 +453,63 @@ pub fn parse_cursor(token: &str) -> Option<Cursor> {
|
||||
Some(Cursor::new(ts, uuid))
|
||||
}
|
||||
|
||||
fn build_next_cursor(items: &[ThreadItem]) -> Option<Cursor> {
|
||||
fn build_next_cursor(items: &[ThreadItem], sort_key: ThreadSortKey) -> Option<Cursor> {
|
||||
let last = items.last()?;
|
||||
let file_name = last.path.file_name()?.to_string_lossy();
|
||||
let (ts, id) = parse_timestamp_uuid_from_filename(&file_name)?;
|
||||
let (created_ts, id) = parse_timestamp_uuid_from_filename(&file_name)?;
|
||||
let ts = match sort_key {
|
||||
ThreadSortKey::CreatedAt => created_ts,
|
||||
ThreadSortKey::UpdatedAt => {
|
||||
let updated_at = last.updated_at.as_deref()?;
|
||||
OffsetDateTime::parse(updated_at, &Rfc3339).ok()?
|
||||
}
|
||||
};
|
||||
Some(Cursor::new(ts, id))
|
||||
}
|
||||
|
||||
async fn build_thread_item(
|
||||
path: PathBuf,
|
||||
allowed_sources: &[SessionSource],
|
||||
provider_matcher: Option<&ProviderMatcher<'_>>,
|
||||
updated_at: Option<String>,
|
||||
) -> Option<ThreadItem> {
|
||||
// Read head and detect message events; stop once meta + user are found.
|
||||
let summary = read_head_summary(&path, HEAD_RECORD_LIMIT)
|
||||
.await
|
||||
.unwrap_or_default();
|
||||
if !allowed_sources.is_empty()
|
||||
&& !summary
|
||||
.source
|
||||
.is_some_and(|source| allowed_sources.contains(&source))
|
||||
{
|
||||
return None;
|
||||
}
|
||||
if let Some(matcher) = provider_matcher
|
||||
&& !matcher.matches(summary.model_provider.as_deref())
|
||||
{
|
||||
return None;
|
||||
}
|
||||
// Apply filters: must have session meta and at least one user message event
|
||||
if summary.saw_session_meta && summary.saw_user_event {
|
||||
let HeadTailSummary {
|
||||
head,
|
||||
created_at,
|
||||
updated_at: mut summary_updated_at,
|
||||
..
|
||||
} = summary;
|
||||
if summary_updated_at.is_none() {
|
||||
summary_updated_at = updated_at.or_else(|| created_at.clone());
|
||||
}
|
||||
return Some(ThreadItem {
|
||||
path,
|
||||
head,
|
||||
created_at,
|
||||
updated_at: summary_updated_at,
|
||||
});
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// Collects immediate subdirectories of `parent`, parses their (string) names with `parse`,
|
||||
/// and returns them sorted descending by the parsed key.
|
||||
async fn collect_dirs_desc<T, F>(parent: &Path, parse: F) -> io::Result<Vec<(T, PathBuf)>>
|
||||
@@ -340,6 +557,22 @@ where
|
||||
Ok(collected)
|
||||
}
|
||||
|
||||
async fn collect_rollout_day_files(
|
||||
day_path: &Path,
|
||||
) -> io::Result<Vec<(OffsetDateTime, Uuid, PathBuf)>> {
|
||||
let mut day_files = collect_files(day_path, |name_str, path| {
|
||||
if !name_str.starts_with("rollout-") || !name_str.ends_with(".jsonl") {
|
||||
return None;
|
||||
}
|
||||
|
||||
parse_timestamp_uuid_from_filename(name_str).map(|(ts, id)| (ts, id, path.to_path_buf()))
|
||||
})
|
||||
.await?;
|
||||
// Stable ordering within the same second: (timestamp desc, uuid desc)
|
||||
day_files.sort_by_key(|(ts, sid, _path)| (Reverse(*ts), Reverse(*sid)));
|
||||
Ok(day_files)
|
||||
}
|
||||
|
||||
fn parse_timestamp_uuid_from_filename(name: &str) -> Option<(OffsetDateTime, Uuid)> {
|
||||
// Expected: rollout-YYYY-MM-DDThh-mm-ss-<uuid>.jsonl
|
||||
let core = name.strip_prefix("rollout-")?.strip_suffix(".jsonl")?;
|
||||
@@ -357,6 +590,65 @@ fn parse_timestamp_uuid_from_filename(name: &str) -> Option<(OffsetDateTime, Uui
|
||||
Some((ts, uuid))
|
||||
}
|
||||
|
||||
struct ThreadCandidate {
|
||||
path: PathBuf,
|
||||
id: Uuid,
|
||||
updated_at: Option<OffsetDateTime>,
|
||||
}
|
||||
|
||||
async fn collect_files_by_updated_at(
|
||||
root: &Path,
|
||||
scanned_files: &mut usize,
|
||||
) -> io::Result<Vec<ThreadCandidate>> {
|
||||
let mut candidates = Vec::new();
|
||||
let mut visitor = FilesByUpdatedAtVisitor {
|
||||
candidates: &mut candidates,
|
||||
};
|
||||
walk_rollout_files(root, scanned_files, &mut visitor).await?;
|
||||
|
||||
Ok(candidates)
|
||||
}
|
||||
|
||||
async fn walk_rollout_files(
|
||||
root: &Path,
|
||||
scanned_files: &mut usize,
|
||||
visitor: &mut impl RolloutFileVisitor,
|
||||
) -> io::Result<()> {
|
||||
let year_dirs = collect_dirs_desc(root, |s| s.parse::<u16>().ok()).await?;
|
||||
|
||||
'outer: for (_year, year_path) in year_dirs.iter() {
|
||||
if *scanned_files >= MAX_SCAN_FILES {
|
||||
break;
|
||||
}
|
||||
let month_dirs = collect_dirs_desc(year_path, |s| s.parse::<u8>().ok()).await?;
|
||||
for (_month, month_path) in month_dirs.iter() {
|
||||
if *scanned_files >= MAX_SCAN_FILES {
|
||||
break 'outer;
|
||||
}
|
||||
let day_dirs = collect_dirs_desc(month_path, |s| s.parse::<u8>().ok()).await?;
|
||||
for (_day, day_path) in day_dirs.iter() {
|
||||
if *scanned_files >= MAX_SCAN_FILES {
|
||||
break 'outer;
|
||||
}
|
||||
let day_files = collect_rollout_day_files(day_path).await?;
|
||||
for (ts, id, path) in day_files.into_iter() {
|
||||
*scanned_files += 1;
|
||||
if *scanned_files > MAX_SCAN_FILES {
|
||||
break 'outer;
|
||||
}
|
||||
if let ControlFlow::Break(()) =
|
||||
visitor.visit(ts, id, path, *scanned_files).await
|
||||
{
|
||||
break 'outer;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct ProviderMatcher<'a> {
|
||||
filters: &'a [String],
|
||||
matches_default_provider: bool,
|
||||
@@ -452,14 +744,24 @@ pub async fn read_head_for_summary(path: &Path) -> io::Result<Vec<serde_json::Va
|
||||
Ok(summary.head)
|
||||
}
|
||||
|
||||
async fn file_modified_rfc3339(path: &Path) -> io::Result<Option<String>> {
|
||||
async fn file_modified_time(path: &Path) -> io::Result<Option<OffsetDateTime>> {
|
||||
let meta = tokio::fs::metadata(path).await?;
|
||||
let modified = meta.modified().ok();
|
||||
let Some(modified) = modified else {
|
||||
return Ok(None);
|
||||
};
|
||||
let dt = OffsetDateTime::from(modified);
|
||||
Ok(dt.format(&Rfc3339).ok())
|
||||
// Truncate to seconds so ordering and cursor comparisons align with the
|
||||
// cursor timestamp format (which exposes seconds), keeping pagination stable.
|
||||
Ok(truncate_to_seconds(dt))
|
||||
}
|
||||
|
||||
fn format_rfc3339(dt: OffsetDateTime) -> Option<String> {
|
||||
dt.format(&Rfc3339).ok()
|
||||
}
|
||||
|
||||
fn truncate_to_seconds(dt: OffsetDateTime) -> Option<OffsetDateTime> {
|
||||
dt.replace_nanosecond(0).ok()
|
||||
}
|
||||
|
||||
/// Locate a recorded thread rollout file by its UUID string using the existing
|
||||
|
||||
@@ -22,6 +22,7 @@ use tracing::warn;
|
||||
|
||||
use super::SESSIONS_SUBDIR;
|
||||
use super::list::Cursor;
|
||||
use super::list::ThreadSortKey;
|
||||
use super::list::ThreadsPage;
|
||||
use super::list::get_threads;
|
||||
use super::policy::is_persisted_response_item;
|
||||
@@ -99,6 +100,7 @@ impl RolloutRecorder {
|
||||
codex_home: &Path,
|
||||
page_size: usize,
|
||||
cursor: Option<&Cursor>,
|
||||
sort_key: ThreadSortKey,
|
||||
allowed_sources: &[SessionSource],
|
||||
model_providers: Option<&[String]>,
|
||||
default_provider: &str,
|
||||
@@ -107,6 +109,7 @@ impl RolloutRecorder {
|
||||
codex_home,
|
||||
page_size,
|
||||
cursor,
|
||||
sort_key,
|
||||
allowed_sources,
|
||||
model_providers,
|
||||
default_provider,
|
||||
@@ -115,19 +118,24 @@ impl RolloutRecorder {
|
||||
}
|
||||
|
||||
/// Find the newest recorded thread path, optionally filtering to a matching cwd.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn find_latest_thread_path(
|
||||
codex_home: &Path,
|
||||
page_size: usize,
|
||||
cursor: Option<&Cursor>,
|
||||
sort_key: ThreadSortKey,
|
||||
allowed_sources: &[SessionSource],
|
||||
model_providers: Option<&[String]>,
|
||||
default_provider: &str,
|
||||
filter_cwd: Option<&Path>,
|
||||
) -> std::io::Result<Option<PathBuf>> {
|
||||
let mut cursor: Option<Cursor> = None;
|
||||
let mut cursor = cursor.cloned();
|
||||
loop {
|
||||
let page = Self::list_threads(
|
||||
codex_home,
|
||||
25,
|
||||
page_size,
|
||||
cursor.as_ref(),
|
||||
sort_key,
|
||||
allowed_sources,
|
||||
model_providers,
|
||||
default_provider,
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
#![allow(clippy::unwrap_used, clippy::expect_used)]
|
||||
|
||||
use std::fs::File;
|
||||
use std::fs::FileTimes;
|
||||
use std::fs::{self};
|
||||
use std::io::Write;
|
||||
use std::path::Path;
|
||||
|
||||
use tempfile::TempDir;
|
||||
use time::Duration;
|
||||
use time::OffsetDateTime;
|
||||
use time::PrimitiveDateTime;
|
||||
use time::format_description::FormatItem;
|
||||
@@ -15,6 +17,7 @@ use uuid::Uuid;
|
||||
use crate::rollout::INTERACTIVE_SESSION_SOURCES;
|
||||
use crate::rollout::list::Cursor;
|
||||
use crate::rollout::list::ThreadItem;
|
||||
use crate::rollout::list::ThreadSortKey;
|
||||
use crate::rollout::list::ThreadsPage;
|
||||
use crate::rollout::list::get_threads;
|
||||
use anyhow::Result;
|
||||
@@ -122,6 +125,8 @@ fn write_session_file_with_provider(
|
||||
});
|
||||
writeln!(file, "{rec}")?;
|
||||
}
|
||||
let times = FileTimes::new().set_modified(dt.into());
|
||||
file.set_times(times)?;
|
||||
Ok((dt, uuid))
|
||||
}
|
||||
|
||||
@@ -166,6 +171,7 @@ async fn test_list_conversations_latest_first() {
|
||||
home,
|
||||
10,
|
||||
None,
|
||||
ThreadSortKey::CreatedAt,
|
||||
INTERACTIVE_SESSION_SOURCES,
|
||||
Some(provider_filter.as_slice()),
|
||||
TEST_PROVIDER,
|
||||
@@ -315,6 +321,7 @@ async fn test_pagination_cursor() {
|
||||
home,
|
||||
2,
|
||||
None,
|
||||
ThreadSortKey::CreatedAt,
|
||||
INTERACTIVE_SESSION_SOURCES,
|
||||
Some(provider_filter.as_slice()),
|
||||
TEST_PROVIDER,
|
||||
@@ -382,6 +389,7 @@ async fn test_pagination_cursor() {
|
||||
home,
|
||||
2,
|
||||
page1.next_cursor.as_ref(),
|
||||
ThreadSortKey::CreatedAt,
|
||||
INTERACTIVE_SESSION_SOURCES,
|
||||
Some(provider_filter.as_slice()),
|
||||
TEST_PROVIDER,
|
||||
@@ -449,6 +457,7 @@ async fn test_pagination_cursor() {
|
||||
home,
|
||||
2,
|
||||
page2.next_cursor.as_ref(),
|
||||
ThreadSortKey::CreatedAt,
|
||||
INTERACTIVE_SESSION_SOURCES,
|
||||
Some(provider_filter.as_slice()),
|
||||
TEST_PROVIDER,
|
||||
@@ -501,6 +510,7 @@ async fn test_get_thread_contents() {
|
||||
home,
|
||||
1,
|
||||
None,
|
||||
ThreadSortKey::CreatedAt,
|
||||
INTERACTIVE_SESSION_SOURCES,
|
||||
Some(provider_filter.as_slice()),
|
||||
TEST_PROVIDER,
|
||||
@@ -567,6 +577,52 @@ async fn test_get_thread_contents() {
|
||||
assert_eq!(content, expected_content);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_created_at_sort_uses_file_mtime_for_updated_at() -> Result<()> {
|
||||
let temp = TempDir::new().unwrap();
|
||||
let home = temp.path();
|
||||
|
||||
let ts = "2025-06-01T08-00-00";
|
||||
let uuid = Uuid::from_u128(43);
|
||||
write_session_file(home, ts, uuid, 0, Some(SessionSource::VSCode)).unwrap();
|
||||
|
||||
let created = PrimitiveDateTime::parse(
|
||||
ts,
|
||||
format_description!("[year]-[month]-[day]T[hour]-[minute]-[second]"),
|
||||
)?
|
||||
.assume_utc();
|
||||
let updated = created + Duration::hours(2);
|
||||
let expected_updated = updated.format(&time::format_description::well_known::Rfc3339)?;
|
||||
|
||||
let file_path = home
|
||||
.join("sessions")
|
||||
.join("2025")
|
||||
.join("06")
|
||||
.join("01")
|
||||
.join(format!("rollout-{ts}-{uuid}.jsonl"));
|
||||
let file = std::fs::OpenOptions::new().write(true).open(&file_path)?;
|
||||
let times = FileTimes::new().set_modified(updated.into());
|
||||
file.set_times(times)?;
|
||||
|
||||
let provider_filter = provider_vec(&[TEST_PROVIDER]);
|
||||
let page = get_threads(
|
||||
home,
|
||||
1,
|
||||
None,
|
||||
ThreadSortKey::CreatedAt,
|
||||
INTERACTIVE_SESSION_SOURCES,
|
||||
Some(provider_filter.as_slice()),
|
||||
TEST_PROVIDER,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let item = page.items.first().expect("conversation item");
|
||||
assert_eq!(item.created_at.as_deref(), Some(ts));
|
||||
assert_eq!(item.updated_at.as_deref(), Some(expected_updated.as_str()));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_updated_at_uses_file_mtime() -> Result<()> {
|
||||
let temp = TempDir::new().unwrap();
|
||||
@@ -630,6 +686,7 @@ async fn test_updated_at_uses_file_mtime() -> Result<()> {
|
||||
home,
|
||||
1,
|
||||
None,
|
||||
ThreadSortKey::UpdatedAt,
|
||||
INTERACTIVE_SESSION_SOURCES,
|
||||
Some(provider_filter.as_slice()),
|
||||
TEST_PROVIDER,
|
||||
@@ -669,6 +726,7 @@ async fn test_stable_ordering_same_second_pagination() {
|
||||
home,
|
||||
2,
|
||||
None,
|
||||
ThreadSortKey::CreatedAt,
|
||||
INTERACTIVE_SESSION_SOURCES,
|
||||
Some(provider_filter.as_slice()),
|
||||
TEST_PROVIDER,
|
||||
@@ -728,6 +786,7 @@ async fn test_stable_ordering_same_second_pagination() {
|
||||
home,
|
||||
2,
|
||||
page1.next_cursor.as_ref(),
|
||||
ThreadSortKey::CreatedAt,
|
||||
INTERACTIVE_SESSION_SOURCES,
|
||||
Some(provider_filter.as_slice()),
|
||||
TEST_PROVIDER,
|
||||
@@ -786,6 +845,7 @@ async fn test_source_filter_excludes_non_matching_sessions() {
|
||||
home,
|
||||
10,
|
||||
None,
|
||||
ThreadSortKey::CreatedAt,
|
||||
INTERACTIVE_SESSION_SOURCES,
|
||||
Some(provider_filter.as_slice()),
|
||||
TEST_PROVIDER,
|
||||
@@ -803,9 +863,17 @@ async fn test_source_filter_excludes_non_matching_sessions() {
|
||||
path.ends_with("rollout-2025-08-02T10-00-00-00000000-0000-0000-0000-00000000002a.jsonl")
|
||||
}));
|
||||
|
||||
let all_sessions = get_threads(home, 10, None, NO_SOURCE_FILTER, None, TEST_PROVIDER)
|
||||
.await
|
||||
.unwrap();
|
||||
let all_sessions = get_threads(
|
||||
home,
|
||||
10,
|
||||
None,
|
||||
ThreadSortKey::CreatedAt,
|
||||
NO_SOURCE_FILTER,
|
||||
None,
|
||||
TEST_PROVIDER,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
let all_paths: Vec<_> = all_sessions
|
||||
.items
|
||||
.into_iter()
|
||||
@@ -861,6 +929,7 @@ async fn test_model_provider_filter_selects_only_matching_sessions() -> Result<(
|
||||
home,
|
||||
10,
|
||||
None,
|
||||
ThreadSortKey::CreatedAt,
|
||||
NO_SOURCE_FILTER,
|
||||
Some(openai_filter.as_slice()),
|
||||
"openai",
|
||||
@@ -886,6 +955,7 @@ async fn test_model_provider_filter_selects_only_matching_sessions() -> Result<(
|
||||
home,
|
||||
10,
|
||||
None,
|
||||
ThreadSortKey::CreatedAt,
|
||||
NO_SOURCE_FILTER,
|
||||
Some(beta_filter.as_slice()),
|
||||
"openai",
|
||||
@@ -906,6 +976,7 @@ async fn test_model_provider_filter_selects_only_matching_sessions() -> Result<(
|
||||
home,
|
||||
10,
|
||||
None,
|
||||
ThreadSortKey::CreatedAt,
|
||||
NO_SOURCE_FILTER,
|
||||
Some(unknown_filter.as_slice()),
|
||||
"openai",
|
||||
@@ -913,7 +984,16 @@ async fn test_model_provider_filter_selects_only_matching_sessions() -> Result<(
|
||||
.await?;
|
||||
assert!(unknown_sessions.items.is_empty());
|
||||
|
||||
let all_sessions = get_threads(home, 10, None, NO_SOURCE_FILTER, None, "openai").await?;
|
||||
let all_sessions = get_threads(
|
||||
home,
|
||||
10,
|
||||
None,
|
||||
ThreadSortKey::CreatedAt,
|
||||
NO_SOURCE_FILTER,
|
||||
None,
|
||||
"openai",
|
||||
)
|
||||
.await?;
|
||||
assert_eq!(all_sessions.items.len(), 3);
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -340,6 +340,7 @@ mod detect_shell_type_tests {
|
||||
#[cfg(unix)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
|
||||
@@ -350,7 +351,7 @@ mod tests {
|
||||
|
||||
let shell_path = zsh_shell.shell_path;
|
||||
|
||||
assert_eq!(shell_path, PathBuf::from("/bin/zsh"));
|
||||
assert_eq!(shell_path, Path::new("/bin/zsh"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -360,7 +361,7 @@ mod tests {
|
||||
|
||||
let shell_path = zsh_shell.shell_path;
|
||||
|
||||
assert_eq!(shell_path, PathBuf::from("/bin/zsh"));
|
||||
assert_eq!(shell_path, Path::new("/bin/zsh"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -369,9 +370,9 @@ mod tests {
|
||||
let shell_path = bash_shell.shell_path;
|
||||
|
||||
assert!(
|
||||
shell_path == PathBuf::from("/bin/bash")
|
||||
|| shell_path == PathBuf::from("/usr/bin/bash")
|
||||
|| shell_path == PathBuf::from("/usr/local/bin/bash"),
|
||||
shell_path == Path::new("/bin/bash")
|
||||
|| shell_path == Path::new("/usr/bin/bash")
|
||||
|| shell_path == Path::new("/usr/local/bin/bash"),
|
||||
"shell path: {shell_path:?}",
|
||||
);
|
||||
}
|
||||
@@ -381,7 +382,7 @@ mod tests {
|
||||
let sh_shell = get_shell(ShellType::Sh, None).unwrap();
|
||||
let shell_path = sh_shell.shell_path;
|
||||
assert!(
|
||||
shell_path == PathBuf::from("/bin/sh") || shell_path == PathBuf::from("/usr/bin/sh"),
|
||||
shell_path == Path::new("/bin/sh") || shell_path == Path::new("/usr/bin/sh"),
|
||||
"shell path: {shell_path:?}",
|
||||
);
|
||||
}
|
||||
|
||||
@@ -282,6 +282,10 @@ fn discover_skills_under_root(root: &Path, scope: SkillScope, outcome: &mut Skil
|
||||
continue;
|
||||
}
|
||||
|
||||
if metadata.is_file() && file_name == SKILLS_FILENAME {
|
||||
record_skill_from_path(&path, scope, outcome);
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -300,19 +304,7 @@ fn discover_skills_under_root(root: &Path, scope: SkillScope, outcome: &mut Skil
|
||||
}
|
||||
|
||||
if file_type.is_file() && file_name == SKILLS_FILENAME {
|
||||
match parse_skill_file(&path, scope) {
|
||||
Ok(skill) => {
|
||||
outcome.skills.push(skill);
|
||||
}
|
||||
Err(err) => {
|
||||
if scope != SkillScope::System {
|
||||
outcome.errors.push(SkillError {
|
||||
path,
|
||||
message: err.to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
record_skill_from_path(&path, scope, outcome);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -326,6 +318,22 @@ fn discover_skills_under_root(root: &Path, scope: SkillScope, outcome: &mut Skil
|
||||
}
|
||||
}
|
||||
|
||||
fn record_skill_from_path(path: &Path, scope: SkillScope, outcome: &mut SkillLoadOutcome) {
|
||||
match parse_skill_file(path, scope) {
|
||||
Ok(skill) => {
|
||||
outcome.skills.push(skill);
|
||||
}
|
||||
Err(err) => {
|
||||
if scope != SkillScope::System {
|
||||
outcome.errors.push(SkillError {
|
||||
path: path.to_path_buf(),
|
||||
message: err.to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_skill_file(path: &Path, scope: SkillScope) -> Result<SkillMetadata, SkillParseError> {
|
||||
let contents = fs::read_to_string(path).map_err(SkillParseError::Read)?;
|
||||
|
||||
|
||||
@@ -40,9 +40,10 @@ struct ReadFileArgs {
|
||||
indentation: Option<IndentationArgs>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[derive(Deserialize, Default)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
enum ReadMode {
|
||||
#[default]
|
||||
Slice,
|
||||
Indentation,
|
||||
}
|
||||
@@ -461,12 +462,6 @@ mod defaults {
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for ReadMode {
|
||||
fn default() -> Self {
|
||||
Self::Slice
|
||||
}
|
||||
}
|
||||
|
||||
pub fn offset() -> usize {
|
||||
1
|
||||
}
|
||||
|
||||
@@ -88,6 +88,7 @@ async fn chat_mode_stream_cli() {
|
||||
home.path(),
|
||||
10,
|
||||
None,
|
||||
codex_core::ThreadSortKey::UpdatedAt,
|
||||
&[],
|
||||
Some(provider_filter.as_slice()),
|
||||
"mock",
|
||||
|
||||
@@ -15,6 +15,7 @@ use core_test_support::responses::start_mock_server;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use core_test_support::test_codex::test_codex;
|
||||
use std::fs;
|
||||
use std::os::unix::fs::symlink;
|
||||
use std::path::Path;
|
||||
|
||||
fn write_skill(home: &Path, name: &str, description: &str, body: &str) -> std::path::PathBuf {
|
||||
@@ -154,6 +155,57 @@ async fn skill_load_errors_surface_in_session_configured() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn list_skills_includes_symlinked_skill_md() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
let mut builder = test_codex().with_pre_build_hook(|home| {
|
||||
let external_dir = home.join("external-skills").join("demo-symlink");
|
||||
fs::create_dir_all(&external_dir).unwrap();
|
||||
let contents = "---\nname: demo-symlink\ndescription: demo skill\n---\n\nbody\n";
|
||||
let external_skill_path = external_dir.join("SKILL.md");
|
||||
fs::write(&external_skill_path, contents).unwrap();
|
||||
|
||||
let skill_dir = home.join("skills").join("demo-symlink");
|
||||
fs::create_dir_all(&skill_dir).unwrap();
|
||||
symlink(&external_skill_path, skill_dir.join("SKILL.md")).unwrap();
|
||||
});
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
test.codex
|
||||
.submit(Op::ListSkills {
|
||||
cwds: Vec::new(),
|
||||
force_reload: true,
|
||||
})
|
||||
.await?;
|
||||
let response =
|
||||
core_test_support::wait_for_event_match(test.codex.as_ref(), |event| match event {
|
||||
codex_core::protocol::EventMsg::ListSkillsResponse(response) => Some(response.clone()),
|
||||
_ => None,
|
||||
})
|
||||
.await;
|
||||
|
||||
let cwd = test.cwd_path();
|
||||
let (skills, errors) = response
|
||||
.skills
|
||||
.iter()
|
||||
.find(|entry| entry.cwd.as_path() == cwd)
|
||||
.map(|entry| (entry.skills.clone(), entry.errors.clone()))
|
||||
.unwrap_or_default();
|
||||
|
||||
assert!(
|
||||
errors.is_empty(),
|
||||
"expected no load errors for symlinked skill, got {errors:?}"
|
||||
);
|
||||
assert!(
|
||||
skills.iter().any(|skill| skill.name == "demo-symlink"),
|
||||
"expected symlinked skill to be listed, got {skills:?}"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn list_skills_includes_system_cache_entries() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
@@ -171,6 +171,7 @@ impl AppServerClient {
|
||||
params: ThreadListParams {
|
||||
cursor,
|
||||
limit: None,
|
||||
sort_key: None,
|
||||
model_providers: None,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -514,6 +514,9 @@ async fn resolve_resume_path(
|
||||
};
|
||||
match codex_core::RolloutRecorder::find_latest_thread_path(
|
||||
&config.codex_home,
|
||||
1,
|
||||
None,
|
||||
codex_core::ThreadSortKey::UpdatedAt,
|
||||
&[],
|
||||
Some(default_provider_filter.as_slice()),
|
||||
&config.model_provider_id,
|
||||
|
||||
@@ -1656,21 +1656,18 @@ pub struct ReviewLineRange {
|
||||
pub end: u32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Display, Deserialize, Serialize, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[derive(
|
||||
Debug, Clone, Copy, Display, Deserialize, Serialize, PartialEq, Eq, JsonSchema, TS, Default,
|
||||
)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum ExecCommandSource {
|
||||
#[default]
|
||||
Agent,
|
||||
UserShell,
|
||||
UnifiedExecStartup,
|
||||
UnifiedExecInteraction,
|
||||
}
|
||||
|
||||
impl Default for ExecCommandSource {
|
||||
fn default() -> Self {
|
||||
Self::Agent
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, JsonSchema, TS)]
|
||||
pub struct ExecCommandBeginEvent {
|
||||
/// Identifier so this can be paired with the ExecCommandEnd event.
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
[toolchain]
|
||||
channel = "1.90.0"
|
||||
channel = "1.92.0"
|
||||
components = ["clippy", "rustfmt", "rust-src"]
|
||||
|
||||
@@ -179,7 +179,7 @@ if (-not (Ensure-Command 'cargo')) {
|
||||
Write-Host "==> Configuring Rust toolchain per rust-toolchain.toml" -ForegroundColor Cyan
|
||||
|
||||
# Pin to the workspace toolchain and install components
|
||||
$toolchain = '1.90.0'
|
||||
$toolchain = '1.92.0'
|
||||
& rustup toolchain install $toolchain --profile minimal | Out-Host
|
||||
& rustup default $toolchain | Out-Host
|
||||
& rustup component add clippy rustfmt rust-src --toolchain $toolchain | Out-Host
|
||||
|
||||
@@ -758,73 +758,61 @@ impl App {
|
||||
// Leaving alt-screen may blank the inline viewport; force a redraw either way.
|
||||
tui.frame_requester().schedule_frame();
|
||||
}
|
||||
AppEvent::OpenForkPicker => {
|
||||
match crate::resume_picker::run_fork_picker(
|
||||
tui,
|
||||
&self.config.codex_home,
|
||||
&self.config.model_provider_id,
|
||||
false,
|
||||
)
|
||||
.await?
|
||||
{
|
||||
SessionSelection::Fork(path) => {
|
||||
let summary = session_summary(
|
||||
self.chat_widget.token_usage(),
|
||||
self.chat_widget.thread_id(),
|
||||
);
|
||||
match self
|
||||
.server
|
||||
.fork_thread(usize::MAX, self.config.clone(), path.clone())
|
||||
.await
|
||||
{
|
||||
Ok(forked) => {
|
||||
self.shutdown_current_thread().await;
|
||||
let init = crate::chatwidget::ChatWidgetInit {
|
||||
config: self.config.clone(),
|
||||
frame_requester: tui.frame_requester(),
|
||||
app_event_tx: self.app_event_tx.clone(),
|
||||
initial_prompt: None,
|
||||
initial_images: Vec::new(),
|
||||
enhanced_keys_supported: self.enhanced_keys_supported,
|
||||
auth_manager: self.auth_manager.clone(),
|
||||
models_manager: self.server.get_models_manager(),
|
||||
feedback: self.feedback.clone(),
|
||||
is_first_run: false,
|
||||
model: Some(self.current_model.clone()),
|
||||
};
|
||||
self.chat_widget = ChatWidget::new_from_existing(
|
||||
init,
|
||||
forked.thread,
|
||||
forked.session_configured,
|
||||
);
|
||||
self.current_model = model_info.slug.clone();
|
||||
if let Some(summary) = summary {
|
||||
let mut lines: Vec<Line<'static>> =
|
||||
vec![summary.usage_line.clone().into()];
|
||||
if let Some(command) = summary.resume_command {
|
||||
let spans = vec![
|
||||
"To continue this session, run ".into(),
|
||||
command.cyan(),
|
||||
];
|
||||
lines.push(spans.into());
|
||||
}
|
||||
self.chat_widget.add_plain_history_lines(lines);
|
||||
AppEvent::ForkCurrentSession => {
|
||||
let summary =
|
||||
session_summary(self.chat_widget.token_usage(), self.chat_widget.thread_id());
|
||||
if let Some(path) = self.chat_widget.rollout_path() {
|
||||
match self
|
||||
.server
|
||||
.fork_thread(usize::MAX, self.config.clone(), path.clone())
|
||||
.await
|
||||
{
|
||||
Ok(forked) => {
|
||||
self.shutdown_current_thread().await;
|
||||
let init = crate::chatwidget::ChatWidgetInit {
|
||||
config: self.config.clone(),
|
||||
frame_requester: tui.frame_requester(),
|
||||
app_event_tx: self.app_event_tx.clone(),
|
||||
initial_prompt: None,
|
||||
initial_images: Vec::new(),
|
||||
enhanced_keys_supported: self.enhanced_keys_supported,
|
||||
auth_manager: self.auth_manager.clone(),
|
||||
models_manager: self.server.get_models_manager(),
|
||||
feedback: self.feedback.clone(),
|
||||
is_first_run: false,
|
||||
model: Some(self.current_model.clone()),
|
||||
};
|
||||
self.chat_widget = ChatWidget::new_from_existing(
|
||||
init,
|
||||
forked.thread,
|
||||
forked.session_configured,
|
||||
);
|
||||
self.current_model = model_info.slug.clone();
|
||||
if let Some(summary) = summary {
|
||||
let mut lines: Vec<Line<'static>> =
|
||||
vec![summary.usage_line.clone().into()];
|
||||
if let Some(command) = summary.resume_command {
|
||||
let spans = vec![
|
||||
"To continue this session, run ".into(),
|
||||
command.cyan(),
|
||||
];
|
||||
lines.push(spans.into());
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
let path_display = path.display();
|
||||
self.chat_widget.add_error_message(format!(
|
||||
"Failed to fork session from {path_display}: {err}"
|
||||
));
|
||||
self.chat_widget.add_plain_history_lines(lines);
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
let path_display = path.display();
|
||||
self.chat_widget.add_error_message(format!(
|
||||
"Failed to fork current session from {path_display}: {err}"
|
||||
));
|
||||
}
|
||||
}
|
||||
SessionSelection::Exit
|
||||
| SessionSelection::StartFresh
|
||||
| SessionSelection::Resume(_) => {}
|
||||
} else {
|
||||
self.chat_widget
|
||||
.add_error_message("Current session is not ready to fork yet.".to_string());
|
||||
}
|
||||
|
||||
// Leaving alt-screen may blank the inline viewport; force a redraw either way.
|
||||
tui.frame_requester().schedule_frame();
|
||||
}
|
||||
AppEvent::InsertHistoryCell(cell) => {
|
||||
|
||||
@@ -53,8 +53,8 @@ pub(crate) enum AppEvent {
|
||||
/// Open the resume picker inside the running TUI session.
|
||||
OpenResumePicker,
|
||||
|
||||
/// Open the fork picker inside the running TUI session.
|
||||
OpenForkPicker,
|
||||
/// Fork the current session into a new thread.
|
||||
ForkCurrentSession,
|
||||
|
||||
/// Request to exit the application.
|
||||
///
|
||||
|
||||
@@ -1994,7 +1994,7 @@ impl ChatWidget {
|
||||
self.app_event_tx.send(AppEvent::OpenResumePicker);
|
||||
}
|
||||
SlashCommand::Fork => {
|
||||
self.app_event_tx.send(AppEvent::OpenForkPicker);
|
||||
self.app_event_tx.send(AppEvent::ForkCurrentSession);
|
||||
}
|
||||
SlashCommand::Init => {
|
||||
let init_target = self.config.cwd.join(DEFAULT_PROJECT_DOC_FILENAME);
|
||||
|
||||
@@ -1532,12 +1532,12 @@ async fn slash_resume_opens_picker() {
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn slash_fork_opens_picker() {
|
||||
async fn slash_fork_requests_current_fork() {
|
||||
let (mut chat, mut rx, _op_rx) = make_chatwidget_manual(None).await;
|
||||
|
||||
chat.dispatch_command(SlashCommand::Fork);
|
||||
|
||||
assert_matches!(rx.try_recv(), Ok(AppEvent::OpenForkPicker));
|
||||
assert_matches!(rx.try_recv(), Ok(AppEvent::ForkCurrentSession));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
||||
@@ -15,6 +15,7 @@ use codex_core::AuthManager;
|
||||
use codex_core::CodexAuth;
|
||||
use codex_core::INTERACTIVE_SESSION_SOURCES;
|
||||
use codex_core::RolloutRecorder;
|
||||
use codex_core::ThreadSortKey;
|
||||
use codex_core::auth::enforce_login_restrictions;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
@@ -478,6 +479,7 @@ async fn run_ratatui_app(
|
||||
&config.codex_home,
|
||||
1,
|
||||
None,
|
||||
ThreadSortKey::UpdatedAt,
|
||||
INTERACTIVE_SESSION_SOURCES,
|
||||
Some(provider_filter.as_slice()),
|
||||
&config.model_provider_id,
|
||||
@@ -529,6 +531,9 @@ async fn run_ratatui_app(
|
||||
};
|
||||
match RolloutRecorder::find_latest_thread_path(
|
||||
&config.codex_home,
|
||||
1,
|
||||
None,
|
||||
ThreadSortKey::UpdatedAt,
|
||||
INTERACTIVE_SESSION_SOURCES,
|
||||
Some(provider_filter.as_slice()),
|
||||
&config.model_provider_id,
|
||||
|
||||
@@ -275,7 +275,6 @@ impl<'a> FlexRenderable<'a> {
|
||||
};
|
||||
let child_size = child.desired_height(area.width).min(max_child_extent);
|
||||
child_sizes[i] = child_size;
|
||||
allocated_size += child_size;
|
||||
allocated_flex_space += child_size;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ use codex_core::Cursor;
|
||||
use codex_core::INTERACTIVE_SESSION_SOURCES;
|
||||
use codex_core::RolloutRecorder;
|
||||
use codex_core::ThreadItem;
|
||||
use codex_core::ThreadSortKey;
|
||||
use codex_core::ThreadsPage;
|
||||
use codex_core::path_utils;
|
||||
use codex_protocol::items::TurnItem;
|
||||
@@ -156,6 +157,7 @@ async fn run_session_picker(
|
||||
&request.codex_home,
|
||||
PAGE_SIZE,
|
||||
request.cursor.as_ref(),
|
||||
ThreadSortKey::CreatedAt,
|
||||
INTERACTIVE_SESSION_SOURCES,
|
||||
Some(provider_filter.as_slice()),
|
||||
request.default_provider.as_str(),
|
||||
@@ -1422,6 +1424,7 @@ mod tests {
|
||||
&state.codex_home,
|
||||
PAGE_SIZE,
|
||||
None,
|
||||
ThreadSortKey::CreatedAt,
|
||||
INTERACTIVE_SESSION_SOURCES,
|
||||
Some(&[String::from("openai")]),
|
||||
"openai",
|
||||
|
||||
@@ -48,7 +48,7 @@ impl SlashCommand {
|
||||
SlashCommand::Compact => "summarize conversation to prevent hitting the context limit",
|
||||
SlashCommand::Review => "review my current changes and find issues",
|
||||
SlashCommand::Resume => "resume a saved chat",
|
||||
SlashCommand::Fork => "fork a saved chat",
|
||||
SlashCommand::Fork => "fork the current chat",
|
||||
// SlashCommand::Undo => "ask Codex to undo a turn",
|
||||
SlashCommand::Quit | SlashCommand::Exit => "exit Codex",
|
||||
SlashCommand::Diff => "show git diff (including untracked files)",
|
||||
|
||||
@@ -6,7 +6,7 @@ Use /approvals to control when Codex asks for confirmation.
|
||||
Run /review to get a code review of your current changes.
|
||||
Use /skills to list available skills or ask Codex to use one.
|
||||
Use /status to see the current model, approvals, and token usage.
|
||||
Use /fork to branch a saved chat into a new thread.
|
||||
Use /fork to branch the current chat into a new thread.
|
||||
Use /init to create an AGENTS.md with project-specific guidance.
|
||||
Use /mcp to list configured MCP tools.
|
||||
You can run any shell command from Codex using `!` (e.g. `!ls`)
|
||||
|
||||
@@ -1531,72 +1531,62 @@ impl App {
|
||||
// Leaving alt-screen may blank the inline viewport; force a redraw either way.
|
||||
tui.frame_requester().schedule_frame();
|
||||
}
|
||||
AppEvent::OpenForkPicker => {
|
||||
match crate::resume_picker::run_fork_picker(
|
||||
tui,
|
||||
&self.config.codex_home,
|
||||
&self.config.model_provider_id,
|
||||
false,
|
||||
)
|
||||
.await?
|
||||
{
|
||||
SessionSelection::Fork(path) => {
|
||||
let summary = session_summary(
|
||||
self.chat_widget.token_usage(),
|
||||
self.chat_widget.conversation_id(),
|
||||
);
|
||||
match self
|
||||
.server
|
||||
.fork_thread(usize::MAX, self.config.clone(), path.clone())
|
||||
.await
|
||||
{
|
||||
Ok(forked) => {
|
||||
self.shutdown_current_conversation().await;
|
||||
let init = crate::chatwidget::ChatWidgetInit {
|
||||
config: self.config.clone(),
|
||||
frame_requester: tui.frame_requester(),
|
||||
app_event_tx: self.app_event_tx.clone(),
|
||||
initial_prompt: None,
|
||||
initial_images: Vec::new(),
|
||||
enhanced_keys_supported: self.enhanced_keys_supported,
|
||||
auth_manager: self.auth_manager.clone(),
|
||||
models_manager: self.server.get_models_manager(),
|
||||
feedback: self.feedback.clone(),
|
||||
is_first_run: false,
|
||||
model: Some(self.current_model.clone()),
|
||||
};
|
||||
self.chat_widget = ChatWidget::new_from_existing(
|
||||
init,
|
||||
forked.thread,
|
||||
forked.session_configured,
|
||||
);
|
||||
if let Some(summary) = summary {
|
||||
let mut lines: Vec<Line<'static>> =
|
||||
vec![summary.usage_line.clone().into()];
|
||||
if let Some(command) = summary.resume_command {
|
||||
let spans = vec![
|
||||
"To continue this session, run ".into(),
|
||||
command.cyan(),
|
||||
];
|
||||
lines.push(spans.into());
|
||||
}
|
||||
self.chat_widget.add_plain_history_lines(lines);
|
||||
AppEvent::ForkCurrentSession => {
|
||||
let summary = session_summary(
|
||||
self.chat_widget.token_usage(),
|
||||
self.chat_widget.conversation_id(),
|
||||
);
|
||||
if let Some(path) = self.chat_widget.rollout_path() {
|
||||
match self
|
||||
.server
|
||||
.fork_thread(usize::MAX, self.config.clone(), path.clone())
|
||||
.await
|
||||
{
|
||||
Ok(forked) => {
|
||||
self.shutdown_current_conversation().await;
|
||||
let init = crate::chatwidget::ChatWidgetInit {
|
||||
config: self.config.clone(),
|
||||
frame_requester: tui.frame_requester(),
|
||||
app_event_tx: self.app_event_tx.clone(),
|
||||
initial_prompt: None,
|
||||
initial_images: Vec::new(),
|
||||
enhanced_keys_supported: self.enhanced_keys_supported,
|
||||
auth_manager: self.auth_manager.clone(),
|
||||
models_manager: self.server.get_models_manager(),
|
||||
feedback: self.feedback.clone(),
|
||||
is_first_run: false,
|
||||
model: Some(self.current_model.clone()),
|
||||
};
|
||||
self.chat_widget = ChatWidget::new_from_existing(
|
||||
init,
|
||||
forked.thread,
|
||||
forked.session_configured,
|
||||
);
|
||||
if let Some(summary) = summary {
|
||||
let mut lines: Vec<Line<'static>> =
|
||||
vec![summary.usage_line.clone().into()];
|
||||
if let Some(command) = summary.resume_command {
|
||||
let spans = vec![
|
||||
"To continue this session, run ".into(),
|
||||
command.cyan(),
|
||||
];
|
||||
lines.push(spans.into());
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
let path_display = path.display();
|
||||
self.chat_widget.add_error_message(format!(
|
||||
"Failed to fork session from {path_display}: {err}"
|
||||
));
|
||||
self.chat_widget.add_plain_history_lines(lines);
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
let path_display = path.display();
|
||||
self.chat_widget.add_error_message(format!(
|
||||
"Failed to fork current session from {path_display}: {err}"
|
||||
));
|
||||
}
|
||||
}
|
||||
SessionSelection::Exit
|
||||
| SessionSelection::StartFresh
|
||||
| SessionSelection::Resume(_) => {}
|
||||
} else {
|
||||
self.chat_widget
|
||||
.add_error_message("Current session is not ready to fork yet.".to_string());
|
||||
}
|
||||
|
||||
// Leaving alt-screen may blank the inline viewport; force a redraw either way.
|
||||
tui.frame_requester().schedule_frame();
|
||||
}
|
||||
AppEvent::InsertHistoryCell(cell) => {
|
||||
|
||||
@@ -47,8 +47,8 @@ pub(crate) enum AppEvent {
|
||||
/// Open the resume picker inside the running TUI session.
|
||||
OpenResumePicker,
|
||||
|
||||
/// Open the fork picker inside the running TUI session.
|
||||
OpenForkPicker,
|
||||
/// Fork the current session into a new thread.
|
||||
ForkCurrentSession,
|
||||
|
||||
/// Request to exit the application.
|
||||
///
|
||||
|
||||
@@ -1769,7 +1769,7 @@ impl ChatWidget {
|
||||
self.app_event_tx.send(AppEvent::OpenResumePicker);
|
||||
}
|
||||
SlashCommand::Fork => {
|
||||
self.app_event_tx.send(AppEvent::OpenForkPicker);
|
||||
self.app_event_tx.send(AppEvent::ForkCurrentSession);
|
||||
}
|
||||
SlashCommand::Init => {
|
||||
let init_target = self.config.cwd.join(DEFAULT_PROJECT_DOC_FILENAME);
|
||||
|
||||
@@ -1338,12 +1338,12 @@ async fn slash_resume_opens_picker() {
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn slash_fork_opens_picker() {
|
||||
async fn slash_fork_requests_current_fork() {
|
||||
let (mut chat, mut rx, _op_rx) = make_chatwidget_manual(None).await;
|
||||
|
||||
chat.dispatch_command(SlashCommand::Fork);
|
||||
|
||||
assert_matches!(rx.try_recv(), Ok(AppEvent::OpenForkPicker));
|
||||
assert_matches!(rx.try_recv(), Ok(AppEvent::ForkCurrentSession));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
||||
@@ -15,6 +15,7 @@ use codex_core::AuthManager;
|
||||
use codex_core::CodexAuth;
|
||||
use codex_core::INTERACTIVE_SESSION_SOURCES;
|
||||
use codex_core::RolloutRecorder;
|
||||
use codex_core::ThreadSortKey;
|
||||
use codex_core::auth::enforce_login_restrictions;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
@@ -500,6 +501,7 @@ async fn run_ratatui_app(
|
||||
&config.codex_home,
|
||||
1,
|
||||
None,
|
||||
ThreadSortKey::UpdatedAt,
|
||||
INTERACTIVE_SESSION_SOURCES,
|
||||
Some(provider_filter.as_slice()),
|
||||
&config.model_provider_id,
|
||||
@@ -552,6 +554,9 @@ async fn run_ratatui_app(
|
||||
};
|
||||
match RolloutRecorder::find_latest_thread_path(
|
||||
&config.codex_home,
|
||||
1,
|
||||
None,
|
||||
ThreadSortKey::UpdatedAt,
|
||||
INTERACTIVE_SESSION_SOURCES,
|
||||
Some(provider_filter.as_slice()),
|
||||
&config.model_provider_id,
|
||||
|
||||
@@ -275,7 +275,6 @@ impl<'a> FlexRenderable<'a> {
|
||||
};
|
||||
let child_size = child.desired_height(area.width).min(max_child_extent);
|
||||
child_sizes[i] = child_size;
|
||||
allocated_size += child_size;
|
||||
allocated_flex_space += child_size;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ use codex_core::Cursor;
|
||||
use codex_core::INTERACTIVE_SESSION_SOURCES;
|
||||
use codex_core::RolloutRecorder;
|
||||
use codex_core::ThreadItem;
|
||||
use codex_core::ThreadSortKey;
|
||||
use codex_core::ThreadsPage;
|
||||
use codex_core::path_utils;
|
||||
use codex_protocol::items::TurnItem;
|
||||
@@ -156,6 +157,7 @@ async fn run_session_picker(
|
||||
&request.codex_home,
|
||||
PAGE_SIZE,
|
||||
request.cursor.as_ref(),
|
||||
ThreadSortKey::CreatedAt,
|
||||
INTERACTIVE_SESSION_SOURCES,
|
||||
Some(provider_filter.as_slice()),
|
||||
request.default_provider.as_str(),
|
||||
@@ -1422,6 +1424,7 @@ mod tests {
|
||||
&state.codex_home,
|
||||
PAGE_SIZE,
|
||||
None,
|
||||
ThreadSortKey::CreatedAt,
|
||||
INTERACTIVE_SESSION_SOURCES,
|
||||
Some(&[String::from("openai")]),
|
||||
"openai",
|
||||
|
||||
@@ -46,7 +46,7 @@ impl SlashCommand {
|
||||
SlashCommand::Compact => "summarize conversation to prevent hitting the context limit",
|
||||
SlashCommand::Review => "review my current changes and find issues",
|
||||
SlashCommand::Resume => "resume a saved chat",
|
||||
SlashCommand::Fork => "fork a saved chat",
|
||||
SlashCommand::Fork => "fork the current chat",
|
||||
// SlashCommand::Undo => "ask Codex to undo a turn",
|
||||
SlashCommand::Quit | SlashCommand::Exit => "exit Codex",
|
||||
SlashCommand::Diff => "show git diff (including untracked files)",
|
||||
|
||||
@@ -6,7 +6,7 @@ Use /approvals to control when Codex asks for confirmation.
|
||||
Run /review to get a code review of your current changes.
|
||||
Use /skills to list available skills or ask Codex to use one.
|
||||
Use /status to see the current model, approvals, and token usage.
|
||||
Use /fork to branch a saved chat into a new thread.
|
||||
Use /fork to branch the current chat into a new thread.
|
||||
Use /init to create an AGENTS.md with project-specific guidance.
|
||||
Use /mcp to list configured MCP tools.
|
||||
You can run any shell command from Codex using `!` (e.g. `!ls`)
|
||||
|
||||
@@ -76,19 +76,14 @@ struct Payload {
|
||||
refresh_only: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Deserialize, Serialize, PartialEq, Eq)]
|
||||
#[derive(Debug, Clone, Copy, Deserialize, Serialize, PartialEq, Eq, Default)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
enum SetupMode {
|
||||
#[default]
|
||||
Full,
|
||||
ReadAclsOnly,
|
||||
}
|
||||
|
||||
impl Default for SetupMode {
|
||||
fn default() -> Self {
|
||||
Self::Full
|
||||
}
|
||||
}
|
||||
|
||||
fn log_line(log: &mut File, msg: &str) -> Result<()> {
|
||||
let ts = chrono::Utc::now().to_rfc3339();
|
||||
writeln!(log, "[{ts}] {msg}")?;
|
||||
|
||||
Reference in New Issue
Block a user