mirror of
https://github.com/openai/codex.git
synced 2026-04-24 22:54:54 +00:00
init, ugly
This commit is contained in:
6
codex-rs/Cargo.lock
generated
6
codex-rs/Cargo.lock
generated
@@ -698,19 +698,24 @@ dependencies = [
|
||||
"codex-cloud-tasks-client",
|
||||
"codex-common",
|
||||
"codex-core",
|
||||
"codex-file-search",
|
||||
"codex-login",
|
||||
"codex-tui",
|
||||
"crossterm",
|
||||
"image",
|
||||
"mime_guess",
|
||||
"ratatui",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tempfile",
|
||||
"throbber-widgets-tui",
|
||||
"tokio",
|
||||
"tokio-stream",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
"unicode-width 0.1.14",
|
||||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -723,6 +728,7 @@ dependencies = [
|
||||
"codex-backend-client",
|
||||
"codex-git-apply",
|
||||
"diffy",
|
||||
"dirs",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
|
||||
@@ -27,3 +27,4 @@ thiserror = "2.0.12"
|
||||
tokio = { version = "1", features = ["macros", "rt-multi-thread"], optional = true }
|
||||
codex-backend-client = { path = "../backend-client", optional = true }
|
||||
codex-git-apply = { path = "../git-apply" }
|
||||
dirs = { workspace = true }
|
||||
|
||||
@@ -94,6 +94,32 @@ pub struct CreatedTask {
|
||||
pub id: TaskId,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum AttachmentKind {
|
||||
File,
|
||||
Image,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct AttachmentReference {
|
||||
pub sediment_id: String,
|
||||
pub asset_pointer: String,
|
||||
pub path: Option<String>,
|
||||
pub display_name: Option<String>,
|
||||
pub kind: AttachmentKind,
|
||||
pub size_bytes: Option<u64>,
|
||||
pub width: Option<u32>,
|
||||
pub height: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq)]
|
||||
pub struct FileServiceConfig {
|
||||
pub base_url: String,
|
||||
pub bearer_token: Option<String>,
|
||||
pub chatgpt_account_id: Option<String>,
|
||||
pub user_agent: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
|
||||
pub struct DiffSummary {
|
||||
pub files_changed: usize,
|
||||
@@ -153,5 +179,10 @@ pub trait CloudBackend: Send + Sync {
|
||||
prompt: &str,
|
||||
git_ref: &str,
|
||||
qa_mode: bool,
|
||||
attachments: &[AttachmentReference],
|
||||
) -> Result<CreatedTask>;
|
||||
|
||||
fn file_service_config(&self) -> Option<FileServiceConfig> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,6 +16,8 @@ use chrono::Utc;
|
||||
use serde_json::Value;
|
||||
use std::cmp::Ordering;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use codex_backend_client as backend;
|
||||
use codex_backend_client::CodeTaskDetailsResponseExt;
|
||||
@@ -24,27 +26,45 @@ use codex_backend_client::CodeTaskDetailsResponseExt;
|
||||
pub struct HttpClient {
|
||||
pub base_url: String,
|
||||
backend: backend::Client,
|
||||
bearer_token: Option<String>,
|
||||
chatgpt_account_id: Option<String>,
|
||||
user_agent: Option<String>,
|
||||
}
|
||||
|
||||
impl HttpClient {
|
||||
pub fn new(base_url: impl Into<String>) -> anyhow::Result<Self> {
|
||||
let base_url = base_url.into();
|
||||
let backend = backend::Client::new(base_url.clone())?;
|
||||
Ok(Self { base_url, backend })
|
||||
Ok(Self {
|
||||
base_url,
|
||||
backend,
|
||||
bearer_token: None,
|
||||
chatgpt_account_id: None,
|
||||
user_agent: None,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn with_bearer_token(mut self, token: impl Into<String>) -> Self {
|
||||
self.backend = self.backend.clone().with_bearer_token(token);
|
||||
let token = token.into();
|
||||
self.backend = self.backend.clone().with_bearer_token(token.clone());
|
||||
self.bearer_token = Some(token);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_user_agent(mut self, ua: impl Into<String>) -> Self {
|
||||
self.backend = self.backend.clone().with_user_agent(ua);
|
||||
let ua = ua.into();
|
||||
self.backend = self.backend.clone().with_user_agent(ua.clone());
|
||||
self.user_agent = Some(ua);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_chatgpt_account_id(mut self, account_id: impl Into<String>) -> Self {
|
||||
self.backend = self.backend.clone().with_chatgpt_account_id(account_id);
|
||||
let account_id = account_id.into();
|
||||
self.backend = self
|
||||
.backend
|
||||
.clone()
|
||||
.with_chatgpt_account_id(account_id.clone());
|
||||
self.chatgpt_account_id = Some(account_id);
|
||||
self
|
||||
}
|
||||
}
|
||||
@@ -219,6 +239,7 @@ impl CloudBackend for HttpClient {
|
||||
prompt: &str,
|
||||
git_ref: &str,
|
||||
qa_mode: bool,
|
||||
attachments: &[crate::AttachmentReference],
|
||||
) -> Result<crate::CreatedTask> {
|
||||
// Build request payload patterned after VSCode/newtask.rs
|
||||
let mut input_items: Vec<serde_json::Value> = Vec::new();
|
||||
@@ -228,6 +249,47 @@ impl CloudBackend for HttpClient {
|
||||
"content": [{ "content_type": "text", "text": prompt }]
|
||||
}));
|
||||
|
||||
for attachment in attachments {
|
||||
match attachment.kind {
|
||||
crate::AttachmentKind::Image => {
|
||||
if let (Some(width), Some(height), Some(size_bytes)) =
|
||||
(attachment.width, attachment.height, attachment.size_bytes)
|
||||
{
|
||||
input_items.push(serde_json::json!({
|
||||
"type": "image_asset_pointer",
|
||||
"asset_pointer": attachment.asset_pointer,
|
||||
"width": width,
|
||||
"height": height,
|
||||
"size_bytes": size_bytes,
|
||||
}));
|
||||
continue;
|
||||
}
|
||||
// Fallback to container when metadata is missing
|
||||
}
|
||||
crate::AttachmentKind::File => {}
|
||||
}
|
||||
|
||||
let default_path = attachment
|
||||
.path
|
||||
.clone()
|
||||
.or_else(|| attachment.display_name.clone())
|
||||
.unwrap_or_else(|| attachment.sediment_id.clone());
|
||||
|
||||
let file_entry = serde_json::json!({
|
||||
"type": "file",
|
||||
"sediment_id": attachment.sediment_id,
|
||||
"path": default_path.clone(),
|
||||
});
|
||||
|
||||
let mut container = serde_json::json!({
|
||||
"type": "container_file",
|
||||
"file_ids": [file_entry],
|
||||
"body": "",
|
||||
});
|
||||
container["path"] = serde_json::Value::String(default_path);
|
||||
input_items.push(container);
|
||||
}
|
||||
|
||||
if let Ok(diff) = std::env::var("CODEX_STARTING_DIFF")
|
||||
&& !diff.is_empty()
|
||||
{
|
||||
@@ -250,23 +312,34 @@ impl CloudBackend for HttpClient {
|
||||
match self.backend.create_task(request_body).await {
|
||||
Ok(id) => {
|
||||
append_error_log(&format!(
|
||||
"new_task: created id={id} env={} prompt_chars={}",
|
||||
"new_task: created id={id} env={} prompt_chars={} attachments={}",
|
||||
env_id,
|
||||
prompt.chars().count()
|
||||
prompt.chars().count(),
|
||||
attachments.len()
|
||||
));
|
||||
Ok(crate::CreatedTask { id: TaskId(id) })
|
||||
}
|
||||
Err(e) => {
|
||||
append_error_log(&format!(
|
||||
"new_task: create failed env={} prompt_chars={}: {}",
|
||||
"new_task: create failed env={} prompt_chars={} attachments={}: {}",
|
||||
env_id,
|
||||
prompt.chars().count(),
|
||||
attachments.len(),
|
||||
e
|
||||
));
|
||||
Err(CloudTaskError::Http(format!("create_task failed: {e}")))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn file_service_config(&self) -> Option<crate::FileServiceConfig> {
|
||||
Some(crate::FileServiceConfig {
|
||||
base_url: self.base_url.clone(),
|
||||
bearer_token: self.bearer_token.clone(),
|
||||
chatgpt_account_id: self.chatgpt_account_id.clone(),
|
||||
user_agent: self.user_agent.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Best-effort extraction of assistant text messages from a raw `get_task_details` body.
|
||||
@@ -725,13 +798,52 @@ fn summarize_patch_for_logging(patch: &str) -> String {
|
||||
}
|
||||
|
||||
fn append_error_log(message: &str) {
|
||||
let ts = Utc::now().to_rfc3339();
|
||||
if let Ok(mut f) = std::fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.append(true)
|
||||
.open("error.log")
|
||||
let timestamp = Utc::now().to_rfc3339();
|
||||
|
||||
if let Some(path) = log_file_path()
|
||||
&& write_log_line(&path, ×tamp, message)
|
||||
{
|
||||
use std::io::Write as _;
|
||||
let _ = writeln!(f, "[{ts}] {message}");
|
||||
return;
|
||||
}
|
||||
|
||||
let fallback = Path::new("error.log");
|
||||
let _ = write_log_line(fallback, ×tamp, message);
|
||||
}
|
||||
|
||||
fn log_file_path() -> Option<PathBuf> {
|
||||
let mut codex_home = codex_home_dir()?;
|
||||
codex_home.push("log");
|
||||
std::fs::create_dir_all(&codex_home).ok()?;
|
||||
Some(codex_home.join("codex-cloud-tasks.log"))
|
||||
}
|
||||
|
||||
fn codex_home_dir() -> Option<PathBuf> {
|
||||
if let Ok(val) = std::env::var("CODEX_HOME")
|
||||
&& !val.is_empty()
|
||||
{
|
||||
let path = PathBuf::from(val);
|
||||
return path.canonicalize().ok().or(Some(path));
|
||||
}
|
||||
dirs::home_dir().map(|mut home| {
|
||||
home.push(".codex");
|
||||
home
|
||||
})
|
||||
}
|
||||
|
||||
fn write_log_line(path: &Path, timestamp: &str, message: &str) -> bool {
|
||||
let mut opts = std::fs::OpenOptions::new();
|
||||
opts.create(true).append(true);
|
||||
#[cfg(unix)]
|
||||
{
|
||||
use std::os::unix::fs::OpenOptionsExt;
|
||||
opts.mode(0o600);
|
||||
}
|
||||
|
||||
match opts.open(path) {
|
||||
Ok(mut file) => {
|
||||
use std::io::Write as _;
|
||||
writeln!(file, "[{timestamp}] {message}").is_ok()
|
||||
}
|
||||
Err(_) => false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,11 +2,14 @@ mod api;
|
||||
|
||||
pub use api::ApplyOutcome;
|
||||
pub use api::ApplyStatus;
|
||||
pub use api::AttachmentKind;
|
||||
pub use api::AttachmentReference;
|
||||
pub use api::AttemptStatus;
|
||||
pub use api::CloudBackend;
|
||||
pub use api::CloudTaskError;
|
||||
pub use api::CreatedTask;
|
||||
pub use api::DiffSummary;
|
||||
pub use api::FileServiceConfig;
|
||||
pub use api::Result;
|
||||
pub use api::TaskId;
|
||||
pub use api::TaskStatus;
|
||||
|
||||
@@ -129,8 +129,9 @@ impl CloudBackend for MockClient {
|
||||
prompt: &str,
|
||||
git_ref: &str,
|
||||
qa_mode: bool,
|
||||
attachments: &[crate::AttachmentReference],
|
||||
) -> Result<crate::CreatedTask> {
|
||||
let _ = (env_id, prompt, git_ref, qa_mode);
|
||||
let _ = (env_id, prompt, git_ref, qa_mode, attachments);
|
||||
let id = format!("task_local_{}", chrono::Utc::now().timestamp_millis());
|
||||
Ok(crate::CreatedTask { id: TaskId(id) })
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ workspace = true
|
||||
anyhow = "1"
|
||||
clap = { version = "4", features = ["derive"] }
|
||||
codex-common = { path = "../common", features = ["cli"] }
|
||||
tokio = { version = "1", features = ["macros", "rt-multi-thread"] }
|
||||
tokio = { version = "1", features = ["fs", "macros", "rt-multi-thread"] }
|
||||
tracing = { version = "0.1.41", features = ["log"] }
|
||||
tracing-subscriber = { version = "0.3.19", features = ["env-filter"] }
|
||||
codex-cloud-tasks-client = { path = "../cloud-tasks-client", features = ["mock", "online"] }
|
||||
@@ -32,9 +32,14 @@ reqwest = { version = "0.12", features = ["json"] }
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
unicode-width = "0.1"
|
||||
codex-tui = { path = "../tui" }
|
||||
codex-file-search = { path = "../file-search" }
|
||||
mime_guess = "2"
|
||||
url = "2"
|
||||
image = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
async-trait = "0.1"
|
||||
tempfile = "3"
|
||||
|
||||
[[bin]]
|
||||
name = "conncheck"
|
||||
@@ -47,3 +52,7 @@ path = "src/bin/newtask.rs"
|
||||
[[bin]]
|
||||
name = "envcheck"
|
||||
path = "src/bin/envcheck.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "upload-debug"
|
||||
path = "src/bin/upload_debug.rs"
|
||||
|
||||
@@ -7,6 +7,7 @@ pub struct EnvironmentRow {
|
||||
pub label: Option<String>,
|
||||
pub is_pinned: bool,
|
||||
pub repo_hints: Option<String>, // e.g., "openai/codex"
|
||||
pub default_branch: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
@@ -440,6 +441,7 @@ mod tests {
|
||||
_prompt: &str,
|
||||
_git_ref: &str,
|
||||
_qa_mode: bool,
|
||||
_attachments: &[codex_cloud_tasks_client::AttachmentReference],
|
||||
) -> codex_cloud_tasks_client::Result<codex_cloud_tasks_client::CreatedTask> {
|
||||
Err(codex_cloud_tasks_client::CloudTaskError::Unimplemented(
|
||||
"not used in test",
|
||||
|
||||
226
codex-rs/cloud-tasks/src/attachments/mod.rs
Normal file
226
codex-rs/cloud-tasks/src/attachments/mod.rs
Normal file
@@ -0,0 +1,226 @@
|
||||
pub mod upload;
|
||||
|
||||
pub use upload::AttachmentAssetPointer;
|
||||
pub use upload::AttachmentId;
|
||||
pub use upload::AttachmentUploadError;
|
||||
pub use upload::AttachmentUploadMode;
|
||||
pub use upload::AttachmentUploadProgress;
|
||||
pub use upload::AttachmentUploadState;
|
||||
pub use upload::AttachmentUploadUpdate;
|
||||
pub use upload::AttachmentUploader;
|
||||
pub use upload::HttpConfig as AttachmentUploadHttpConfig;
|
||||
pub use upload::pointer_id_from_value;
|
||||
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
const MAX_SUGGESTIONS: usize = 5;
|
||||
|
||||
/// The type of attachment included alongside a composer submission.
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub enum AttachmentKind {
|
||||
File,
|
||||
Image,
|
||||
}
|
||||
|
||||
/// Metadata describing a file or asset attached via an `@` mention.
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct ComposerAttachment {
|
||||
pub kind: AttachmentKind,
|
||||
pub label: String,
|
||||
pub path: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub fs_path: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub start_line: Option<u32>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub end_line: Option<u32>,
|
||||
#[serde(skip, default)]
|
||||
pub id: AttachmentId,
|
||||
#[serde(skip_serializing, skip_deserializing)]
|
||||
pub upload: AttachmentUploadState,
|
||||
}
|
||||
|
||||
impl ComposerAttachment {
|
||||
pub fn from_suggestion(id: AttachmentId, suggestion: &MentionSuggestion) -> Self {
|
||||
Self {
|
||||
kind: AttachmentKind::File,
|
||||
label: suggestion.label.clone(),
|
||||
path: suggestion.path.clone(),
|
||||
fs_path: suggestion.fs_path.clone(),
|
||||
start_line: suggestion.start_line,
|
||||
end_line: suggestion.end_line,
|
||||
id,
|
||||
upload: AttachmentUploadState::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// UI state for the active `@` mention query inside the composer.
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq)]
|
||||
pub struct MentionQueryState {
|
||||
pub current: Option<MentionToken>,
|
||||
}
|
||||
|
||||
impl MentionQueryState {
|
||||
/// Returns true when the stored token changed.
|
||||
pub fn update_from(&mut self, token: Option<String>) -> bool {
|
||||
let next = token.map(MentionToken::from_query);
|
||||
if next != self.current {
|
||||
self.current = next;
|
||||
return true;
|
||||
}
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents an `@` mention currently under the user's cursor.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct MentionToken {
|
||||
/// Query string without the leading `@`.
|
||||
pub query: String,
|
||||
/// Raw token including the `@` prefix.
|
||||
pub raw: String,
|
||||
}
|
||||
|
||||
impl MentionToken {
|
||||
pub(crate) fn from_query(query: String) -> Self {
|
||||
let raw = format!("@{query}");
|
||||
Self { query, raw }
|
||||
}
|
||||
}
|
||||
|
||||
/// A suggested file (or range within a file) that matches the active `@` token.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct MentionSuggestion {
|
||||
pub label: String,
|
||||
pub path: String,
|
||||
pub fs_path: Option<String>,
|
||||
pub start_line: Option<u32>,
|
||||
pub end_line: Option<u32>,
|
||||
}
|
||||
|
||||
impl MentionSuggestion {
|
||||
pub fn new(label: impl Into<String>, path: impl Into<String>) -> Self {
|
||||
Self {
|
||||
label: label.into(),
|
||||
path: path.into(),
|
||||
fs_path: None,
|
||||
start_line: None,
|
||||
end_line: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Tracks suggestion list + selection for the mention picker overlay.
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq)]
|
||||
pub struct MentionPickerState {
|
||||
suggestions: Vec<MentionSuggestion>,
|
||||
selected: usize,
|
||||
}
|
||||
|
||||
impl MentionPickerState {
|
||||
pub fn clear(&mut self) -> bool {
|
||||
if self.suggestions.is_empty() {
|
||||
return false;
|
||||
}
|
||||
self.suggestions.clear();
|
||||
self.selected = 0;
|
||||
true
|
||||
}
|
||||
|
||||
pub fn move_selection(&mut self, delta: isize) {
|
||||
if self.suggestions.is_empty() {
|
||||
return;
|
||||
}
|
||||
let len = self.suggestions.len() as isize;
|
||||
let mut idx = self.selected as isize + delta;
|
||||
if idx < 0 {
|
||||
idx = len - 1;
|
||||
}
|
||||
if idx >= len {
|
||||
idx = 0;
|
||||
}
|
||||
self.selected = idx as usize;
|
||||
}
|
||||
|
||||
pub fn selected_index(&self) -> usize {
|
||||
self.selected.min(self.suggestions.len().saturating_sub(1))
|
||||
}
|
||||
|
||||
pub fn current(&self) -> Option<&MentionSuggestion> {
|
||||
self.suggestions.get(self.selected_index())
|
||||
}
|
||||
|
||||
pub fn render_height(&self) -> u16 {
|
||||
let rows = self.suggestions.len().clamp(1, MAX_SUGGESTIONS) as u16;
|
||||
// Add borders + padding space.
|
||||
rows.saturating_add(2)
|
||||
}
|
||||
|
||||
pub fn items(&self) -> &[MentionSuggestion] {
|
||||
&self.suggestions
|
||||
}
|
||||
|
||||
pub fn set_suggestions(&mut self, suggestions: Vec<MentionSuggestion>) -> bool {
|
||||
let mut trimmed = suggestions;
|
||||
if trimmed.len() > MAX_SUGGESTIONS {
|
||||
trimmed.truncate(MAX_SUGGESTIONS);
|
||||
}
|
||||
if trimmed == self.suggestions {
|
||||
return false;
|
||||
}
|
||||
self.suggestions = trimmed;
|
||||
self.selected = 0;
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::AttachmentUploadState;
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn compose_attachment_from_suggestion_copies_fields() {
|
||||
let mut suggestion = MentionSuggestion::new("src/main.rs", "src/main.rs");
|
||||
suggestion.fs_path = Some("/repo/src/main.rs".to_string());
|
||||
suggestion.start_line = Some(10);
|
||||
suggestion.end_line = Some(20);
|
||||
let att = ComposerAttachment::from_suggestion(AttachmentId::new(42), &suggestion);
|
||||
assert_eq!(att.label, "src/main.rs");
|
||||
assert_eq!(att.path, "src/main.rs");
|
||||
assert_eq!(att.fs_path.as_deref(), Some("/repo/src/main.rs"));
|
||||
assert_eq!(att.start_line, Some(10));
|
||||
assert_eq!(att.end_line, Some(20));
|
||||
assert!(matches!(att.upload, AttachmentUploadState::NotStarted));
|
||||
assert_eq!(att.id.raw(), 42);
|
||||
}
|
||||
#[test]
|
||||
fn move_selection_wraps() {
|
||||
let _token = MentionToken::from_query("foo".to_string());
|
||||
let mut picker = MentionPickerState::default();
|
||||
assert!(picker.set_suggestions(vec![
|
||||
MentionSuggestion::new("src/foo.rs", "src/foo.rs"),
|
||||
MentionSuggestion::new("src/main.rs", "src/main.rs"),
|
||||
]));
|
||||
picker.move_selection(1);
|
||||
assert_eq!(
|
||||
picker.selected_index(),
|
||||
1.min(picker.items().len().saturating_sub(1))
|
||||
);
|
||||
picker.move_selection(-1);
|
||||
assert_eq!(picker.selected_index(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn refresh_none_clears_suggestions() {
|
||||
let _token = MentionToken::from_query("bar".to_string());
|
||||
let mut picker = MentionPickerState::default();
|
||||
assert!(
|
||||
picker.set_suggestions(vec![MentionSuggestion::new("docs/bar.md", "docs/bar.md",)])
|
||||
);
|
||||
assert!(picker.clear());
|
||||
assert!(picker.items().is_empty());
|
||||
}
|
||||
}
|
||||
605
codex-rs/cloud-tasks/src/attachments/upload.rs
Normal file
605
codex-rs/cloud-tasks/src/attachments/upload.rs
Normal file
@@ -0,0 +1,605 @@
|
||||
use std::collections::HashMap;
|
||||
use std::fmt;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::sync::atomic::Ordering;
|
||||
|
||||
use crate::util::append_error_log;
|
||||
use chrono::Local;
|
||||
use mime_guess::MimeGuess;
|
||||
use reqwest::Client;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use tokio::sync::mpsc;
|
||||
use tokio::sync::mpsc::UnboundedReceiver;
|
||||
use tokio::sync::mpsc::UnboundedSender;
|
||||
use tracing::debug;
|
||||
use tracing::warn;
|
||||
use url::Url;
|
||||
|
||||
const UPLOAD_USE_CASE: &str = "codex";
|
||||
|
||||
/// Stable identifier assigned to each staged attachment.
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, Default, Serialize, Deserialize)]
|
||||
#[serde(transparent)]
|
||||
pub struct AttachmentId(pub u64);
|
||||
|
||||
impl AttachmentId {
|
||||
pub const fn new(raw: u64) -> Self {
|
||||
Self(raw)
|
||||
}
|
||||
|
||||
pub const fn raw(self) -> u64 {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents the lifecycle of an attachment upload initiated after an `@` mention.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum AttachmentUploadState {
|
||||
NotStarted,
|
||||
Uploading(AttachmentUploadProgress),
|
||||
Uploaded(AttachmentUploadSuccess),
|
||||
Failed(AttachmentUploadError),
|
||||
}
|
||||
|
||||
impl Default for AttachmentUploadState {
|
||||
fn default() -> Self {
|
||||
Self::NotStarted
|
||||
}
|
||||
}
|
||||
|
||||
impl AttachmentUploadState {
|
||||
pub fn is_pending(&self) -> bool {
|
||||
matches!(self, Self::NotStarted | Self::Uploading(_))
|
||||
}
|
||||
|
||||
pub fn is_uploaded(&self) -> bool {
|
||||
matches!(self, Self::Uploaded(_))
|
||||
}
|
||||
|
||||
pub fn is_failed(&self) -> bool {
|
||||
matches!(self, Self::Failed(_))
|
||||
}
|
||||
}
|
||||
|
||||
/// Progress for uploads where the total size is known.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct AttachmentUploadProgress {
|
||||
pub uploaded_bytes: u64,
|
||||
pub total_bytes: Option<u64>,
|
||||
}
|
||||
|
||||
impl AttachmentUploadProgress {
|
||||
pub fn new(uploaded_bytes: u64, total_bytes: Option<u64>) -> Self {
|
||||
Self {
|
||||
uploaded_bytes,
|
||||
total_bytes,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Successful upload metadata containing the remote pointer.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct AttachmentUploadSuccess {
|
||||
pub asset_pointer: AttachmentAssetPointer,
|
||||
pub display_name: String,
|
||||
}
|
||||
|
||||
impl AttachmentUploadSuccess {
|
||||
pub fn new(asset_pointer: AttachmentAssetPointer, display_name: impl Into<String>) -> Self {
|
||||
Self {
|
||||
asset_pointer,
|
||||
display_name: display_name.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Describes the remote asset pointer returned by the file service.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct AttachmentAssetPointer {
|
||||
pub kind: AttachmentPointerKind,
|
||||
pub value: String,
|
||||
}
|
||||
|
||||
impl AttachmentAssetPointer {
|
||||
pub fn new(kind: AttachmentPointerKind, value: impl Into<String>) -> Self {
|
||||
Self {
|
||||
kind,
|
||||
value: value.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// High-level pointer type so we can support both single file and container uploads.
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum AttachmentPointerKind {
|
||||
File,
|
||||
Image,
|
||||
#[allow(dead_code)]
|
||||
Container,
|
||||
}
|
||||
|
||||
impl fmt::Display for AttachmentPointerKind {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::File => write!(f, "file"),
|
||||
Self::Image => write!(f, "image"),
|
||||
Self::Container => write!(f, "container"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Captures a user-visible error when uploading an attachment fails.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct AttachmentUploadError {
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
impl AttachmentUploadError {
|
||||
pub fn new(message: impl Into<String>) -> Self {
|
||||
Self {
|
||||
message: message.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for AttachmentUploadError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", self.message)
|
||||
}
|
||||
}
|
||||
|
||||
/// Internal update emitted by the background uploader task.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum AttachmentUploadUpdate {
|
||||
Started {
|
||||
id: AttachmentId,
|
||||
total_bytes: Option<u64>,
|
||||
},
|
||||
Finished {
|
||||
id: AttachmentId,
|
||||
result: Result<AttachmentUploadSuccess, AttachmentUploadError>,
|
||||
},
|
||||
}
|
||||
|
||||
/// Configuration for attachment uploads.
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum AttachmentUploadMode {
|
||||
Disabled,
|
||||
#[cfg_attr(not(test), allow(dead_code))]
|
||||
ImmediateSuccess,
|
||||
Http(HttpConfig),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct HttpConfig {
|
||||
pub base_url: String,
|
||||
pub bearer_token: Option<String>,
|
||||
pub chatgpt_account_id: Option<String>,
|
||||
pub user_agent: Option<String>,
|
||||
}
|
||||
|
||||
impl HttpConfig {
|
||||
fn trimmed_base(&self) -> String {
|
||||
self.base_url.trim_end_matches('/').to_string()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
enum AttachmentUploadBackend {
|
||||
Disabled,
|
||||
ImmediateSuccess,
|
||||
Http(Arc<AttachmentUploadHttp>),
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct AttachmentUploadHttp {
|
||||
client: Client,
|
||||
base_url: String,
|
||||
bearer_token: Option<String>,
|
||||
chatgpt_account_id: Option<String>,
|
||||
user_agent: Option<String>,
|
||||
}
|
||||
|
||||
impl AttachmentUploadHttp {
|
||||
fn apply_default_headers(&self, builder: reqwest::RequestBuilder) -> reqwest::RequestBuilder {
|
||||
let mut b = builder;
|
||||
if let Some(token) = &self.bearer_token {
|
||||
b = b.bearer_auth(token);
|
||||
}
|
||||
if let Some(acc) = &self.chatgpt_account_id {
|
||||
b = b.header("ChatGPT-Account-Id", acc);
|
||||
}
|
||||
if let Some(ua) = &self.user_agent {
|
||||
b = b.header(reqwest::header::USER_AGENT, ua.clone());
|
||||
}
|
||||
b
|
||||
}
|
||||
}
|
||||
|
||||
/// Bookkeeping for in-flight attachment uploads, providing polling APIs for the UI thread.
|
||||
pub struct AttachmentUploader {
|
||||
update_tx: UnboundedSender<AttachmentUploadUpdate>,
|
||||
update_rx: UnboundedReceiver<AttachmentUploadUpdate>,
|
||||
inflight: HashMap<AttachmentId, Arc<AtomicBool>>,
|
||||
backend: AttachmentUploadBackend,
|
||||
}
|
||||
|
||||
impl AttachmentUploader {
|
||||
pub fn new(mode: AttachmentUploadMode) -> Self {
|
||||
let (tx, rx) = mpsc::unbounded_channel();
|
||||
let backend = match mode {
|
||||
AttachmentUploadMode::Disabled => AttachmentUploadBackend::Disabled,
|
||||
AttachmentUploadMode::ImmediateSuccess => AttachmentUploadBackend::ImmediateSuccess,
|
||||
AttachmentUploadMode::Http(cfg) => match Client::builder().build() {
|
||||
Ok(client) => AttachmentUploadBackend::Http(Arc::new(AttachmentUploadHttp {
|
||||
client,
|
||||
base_url: cfg.trimmed_base(),
|
||||
bearer_token: cfg.bearer_token,
|
||||
chatgpt_account_id: cfg.chatgpt_account_id,
|
||||
user_agent: cfg.user_agent,
|
||||
})),
|
||||
Err(err) => {
|
||||
warn!("attachment_upload.http_client_init_failed: {err}");
|
||||
AttachmentUploadBackend::Disabled
|
||||
}
|
||||
},
|
||||
};
|
||||
Self {
|
||||
update_tx: tx,
|
||||
update_rx: rx,
|
||||
inflight: HashMap::new(),
|
||||
backend,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn start_upload(
|
||||
&mut self,
|
||||
id: AttachmentId,
|
||||
display_name: impl Into<String>,
|
||||
fs_path: PathBuf,
|
||||
) -> Result<(), AttachmentUploadError> {
|
||||
if self.inflight.contains_key(&id) {
|
||||
return Err(AttachmentUploadError::new("upload already queued"));
|
||||
}
|
||||
if let AttachmentUploadBackend::Disabled = &self.backend {
|
||||
return Err(AttachmentUploadError::new(
|
||||
"file uploads are not available in this environment",
|
||||
));
|
||||
}
|
||||
|
||||
if !is_supported_image(&fs_path) {
|
||||
return Err(AttachmentUploadError::new(
|
||||
"only image files can be uploaded",
|
||||
));
|
||||
}
|
||||
|
||||
let cancel_token = Arc::new(AtomicBool::new(false));
|
||||
self.inflight.insert(id, cancel_token.clone());
|
||||
let tx = self.update_tx.clone();
|
||||
let backend = self.backend.clone();
|
||||
let path_clone = fs_path.clone();
|
||||
let label = display_name.into();
|
||||
tokio::spawn(async move {
|
||||
let metadata = tokio::fs::metadata(&fs_path).await.ok();
|
||||
let total_bytes = metadata.as_ref().map(std::fs::Metadata::len);
|
||||
let _ = tx.send(AttachmentUploadUpdate::Started { id, total_bytes });
|
||||
|
||||
if cancel_token.load(Ordering::Relaxed) {
|
||||
let _ = tx.send(AttachmentUploadUpdate::Finished {
|
||||
id,
|
||||
result: Err(AttachmentUploadError::new("upload canceled")),
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
let result = match backend {
|
||||
AttachmentUploadBackend::Disabled => Err(AttachmentUploadError::new(
|
||||
"file uploads are not available in this environment",
|
||||
)),
|
||||
AttachmentUploadBackend::ImmediateSuccess => {
|
||||
let pointer = AttachmentAssetPointer::new(
|
||||
AttachmentPointerKind::File,
|
||||
format!("file-service://mock-{}", id.raw()),
|
||||
);
|
||||
Ok(AttachmentUploadSuccess::new(pointer, label.clone()))
|
||||
}
|
||||
AttachmentUploadBackend::Http(http) => {
|
||||
perform_http_upload(
|
||||
http,
|
||||
&path_clone,
|
||||
&label,
|
||||
total_bytes,
|
||||
cancel_token.clone(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
};
|
||||
|
||||
let _ = tx.send(AttachmentUploadUpdate::Finished { id, result });
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg_attr(not(test), allow(dead_code))]
|
||||
pub fn cancel_all(&mut self) {
|
||||
for cancel in self.inflight.values() {
|
||||
cancel.store(true, Ordering::Relaxed);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn poll(&mut self) -> Vec<AttachmentUploadUpdate> {
|
||||
let mut out = Vec::new();
|
||||
while let Ok(update) = self.update_rx.try_recv() {
|
||||
if let AttachmentUploadUpdate::Finished { id, .. } = &update {
|
||||
self.inflight.remove(id);
|
||||
}
|
||||
out.push(update);
|
||||
}
|
||||
out
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for AttachmentUploader {
|
||||
fn default() -> Self {
|
||||
Self::new(AttachmentUploadMode::Disabled)
|
||||
}
|
||||
}
|
||||
|
||||
async fn perform_http_upload(
|
||||
http: Arc<AttachmentUploadHttp>,
|
||||
fs_path: &Path,
|
||||
display_label: &str,
|
||||
total_bytes: Option<u64>,
|
||||
cancel_token: Arc<AtomicBool>,
|
||||
) -> Result<AttachmentUploadSuccess, AttachmentUploadError> {
|
||||
let file_bytes = tokio::fs::read(fs_path)
|
||||
.await
|
||||
.map_err(|e| AttachmentUploadError::new(format!("failed to read file: {e}")))?;
|
||||
|
||||
if cancel_token.load(Ordering::Relaxed) {
|
||||
return Err(AttachmentUploadError::new("upload canceled"));
|
||||
}
|
||||
|
||||
let file_name = fs_path
|
||||
.file_name()
|
||||
.and_then(|s| s.to_str())
|
||||
.map(std::string::ToString::to_string)
|
||||
.unwrap_or_else(|| display_label.to_string());
|
||||
|
||||
let create_url = format!("{}/files", http.base_url);
|
||||
let body = CreateFileRequest {
|
||||
file_name: &file_name,
|
||||
file_size: total_bytes.unwrap_or(file_bytes.len() as u64),
|
||||
use_case: UPLOAD_USE_CASE,
|
||||
timezone_offset_min: (Local::now().offset().utc_minus_local() / 60),
|
||||
reset_rate_limits: false,
|
||||
};
|
||||
|
||||
let create_resp = http
|
||||
.apply_default_headers(http.client.post(&create_url))
|
||||
.json(&body)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| AttachmentUploadError::new(format!("file create failed: {e}")))?;
|
||||
if !create_resp.status().is_success() {
|
||||
let status = create_resp.status();
|
||||
let text = create_resp.text().await.unwrap_or_default();
|
||||
return Err(AttachmentUploadError::new(format!(
|
||||
"file create request failed status={status} body={text}"
|
||||
)));
|
||||
}
|
||||
let created: CreateFileResponse = create_resp
|
||||
.json()
|
||||
.await
|
||||
.map_err(|e| AttachmentUploadError::new(format!("decode file create response: {e}")))?;
|
||||
|
||||
if cancel_token.load(Ordering::Relaxed) {
|
||||
return Err(AttachmentUploadError::new("upload canceled"));
|
||||
}
|
||||
|
||||
let upload_url = resolve_upload_url(&created.upload_url)
|
||||
.ok_or_else(|| AttachmentUploadError::new("invalid upload url"))?;
|
||||
|
||||
let mime = infer_image_mime(fs_path)
|
||||
.ok_or_else(|| AttachmentUploadError::new("only image files can be uploaded"))?;
|
||||
let mut azure_req = http.client.put(&upload_url);
|
||||
azure_req = azure_req
|
||||
.header("x-ms-blob-type", "BlockBlob")
|
||||
.header("x-ms-version", "2020-04-08");
|
||||
|
||||
azure_req = azure_req
|
||||
.header(reqwest::header::CONTENT_TYPE, mime.as_str())
|
||||
.header("x-ms-blob-content-type", mime.as_str());
|
||||
|
||||
let azure_resp = azure_req
|
||||
.body(file_bytes)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| AttachmentUploadError::new(format!("blob upload failed: {e}")))?;
|
||||
|
||||
if !(200..300).contains(&azure_resp.status().as_u16()) {
|
||||
let status = azure_resp.status();
|
||||
let text = azure_resp.text().await.unwrap_or_default();
|
||||
return Err(AttachmentUploadError::new(format!(
|
||||
"blob upload failed status={status} body={text}"
|
||||
)));
|
||||
}
|
||||
|
||||
if cancel_token.load(Ordering::Relaxed) {
|
||||
return Err(AttachmentUploadError::new("upload canceled"));
|
||||
}
|
||||
|
||||
// Finalization must succeed so the pointer can be used; surface any failure
|
||||
// to the caller after logging for easier debugging.
|
||||
if let Err(err) = finalize_upload(http.clone(), &created.file_id, &file_name).await {
|
||||
let reason = err.message.clone();
|
||||
warn!(
|
||||
"mention.attachment.upload.finalize_failed file_id={} reason={reason}",
|
||||
created.file_id
|
||||
);
|
||||
append_error_log(format!(
|
||||
"mention.attachment.upload.finalize_failed file_id={} reason={reason}",
|
||||
created.file_id
|
||||
));
|
||||
return Err(err);
|
||||
}
|
||||
|
||||
let pointer = asset_pointer_from_id(&created.file_id);
|
||||
debug!(
|
||||
"mention.attachment.upload.success file_id={} pointer={}",
|
||||
created.file_id, pointer
|
||||
);
|
||||
let pointer_kind = AttachmentPointerKind::Image;
|
||||
|
||||
Ok(AttachmentUploadSuccess::new(
|
||||
AttachmentAssetPointer::new(pointer_kind, pointer),
|
||||
display_label,
|
||||
))
|
||||
}
|
||||
|
||||
fn asset_pointer_from_id(file_id: &str) -> String {
|
||||
if file_id.starts_with("file_") {
|
||||
format!("sediment://{file_id}")
|
||||
} else {
|
||||
format!("file-service://{file_id}")
|
||||
}
|
||||
}
|
||||
|
||||
pub fn pointer_id_from_value(pointer: &str) -> Option<String> {
|
||||
pointer
|
||||
.strip_prefix("file-service://")
|
||||
.or_else(|| pointer.strip_prefix("sediment://"))
|
||||
.map(str::to_string)
|
||||
.or_else(|| (!pointer.is_empty()).then(|| pointer.to_string()))
|
||||
}
|
||||
|
||||
async fn finalize_upload(
|
||||
http: Arc<AttachmentUploadHttp>,
|
||||
file_id: &str,
|
||||
file_name: &str,
|
||||
) -> Result<(), AttachmentUploadError> {
|
||||
let finalize_url = format!("{}/files/process_upload_stream", http.base_url);
|
||||
let body = FinalizeUploadRequest {
|
||||
file_id,
|
||||
use_case: UPLOAD_USE_CASE,
|
||||
index_for_retrieval: false,
|
||||
file_name,
|
||||
};
|
||||
let finalize_resp = http
|
||||
.apply_default_headers(http.client.post(&finalize_url))
|
||||
.json(&body)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| AttachmentUploadError::new(format!("finalize upload failed: {e}")))?;
|
||||
if !finalize_resp.status().is_success() {
|
||||
let status = finalize_resp.status();
|
||||
let text = finalize_resp.text().await.unwrap_or_default();
|
||||
return Err(AttachmentUploadError::new(format!(
|
||||
"finalize upload failed status={status} body={text}"
|
||||
)));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn resolve_upload_url(url: &str) -> Option<String> {
|
||||
let parsed = Url::parse(url).ok()?;
|
||||
if !parsed.as_str().to_lowercase().contains("estuary") {
|
||||
return Some(parsed.into());
|
||||
}
|
||||
parsed
|
||||
.query_pairs()
|
||||
.find(|(k, _)| k == "upload_url")
|
||||
.map(|(_, v)| v.into_owned())
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct CreateFileRequest<'a> {
|
||||
file_name: &'a str,
|
||||
file_size: u64,
|
||||
use_case: &'a str,
|
||||
timezone_offset_min: i32,
|
||||
reset_rate_limits: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct FinalizeUploadRequest<'a> {
|
||||
file_id: &'a str,
|
||||
use_case: &'a str,
|
||||
index_for_retrieval: bool,
|
||||
file_name: &'a str,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct CreateFileResponse {
|
||||
file_id: String,
|
||||
upload_url: String,
|
||||
}
|
||||
|
||||
fn is_supported_image(path: &Path) -> bool {
|
||||
infer_image_mime(path).is_some()
|
||||
}
|
||||
|
||||
fn infer_image_mime(path: &Path) -> Option<String> {
|
||||
let guess = MimeGuess::from_path(path)
|
||||
.first_raw()
|
||||
.map(std::string::ToString::to_string);
|
||||
if let Some(m) = guess {
|
||||
if m.starts_with("image/") {
|
||||
return Some(m);
|
||||
}
|
||||
}
|
||||
|
||||
let ext = path
|
||||
.extension()
|
||||
.and_then(|ext| ext.to_str())
|
||||
.map(|ext| ext.trim().to_ascii_lowercase())?;
|
||||
|
||||
let mime = match ext.as_str() {
|
||||
"png" => "image/png",
|
||||
"jpg" | "jpeg" => "image/jpeg",
|
||||
"gif" => "image/gif",
|
||||
"webp" => "image/webp",
|
||||
"bmp" => "image/bmp",
|
||||
"svg" => "image/svg+xml",
|
||||
"heic" => "image/heic",
|
||||
"heif" => "image/heif",
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
Some(mime.to_string())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::path::Path;
|
||||
|
||||
#[test]
|
||||
fn infer_image_mime_accepts_common_extensions() {
|
||||
let cases = [
|
||||
("foo.png", Some("image/png")),
|
||||
("bar.JPG", Some("image/jpeg")),
|
||||
("baz.jpeg", Some("image/jpeg")),
|
||||
("img.gif", Some("image/gif")),
|
||||
("slide.WEBP", Some("image/webp")),
|
||||
("art.bmp", Some("image/bmp")),
|
||||
("vector.svg", Some("image/svg+xml")),
|
||||
("photo.heic", Some("image/heic")),
|
||||
("photo.heif", Some("image/heif")),
|
||||
];
|
||||
|
||||
for (path, expected) in cases {
|
||||
let actual = infer_image_mime(Path::new(path));
|
||||
assert_eq!(actual.as_deref(), expected, "case {path}");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn infer_image_mime_rejects_unknown_extension() {
|
||||
assert!(infer_image_mime(Path::new("doc.txt")).is_none());
|
||||
}
|
||||
}
|
||||
596
codex-rs/cloud-tasks/src/bin/upload_debug.rs
Normal file
596
codex-rs/cloud-tasks/src/bin/upload_debug.rs
Normal file
@@ -0,0 +1,596 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::Context;
|
||||
use anyhow::anyhow;
|
||||
use clap::Parser;
|
||||
use codex_cloud_tasks::AttachmentAssetPointer;
|
||||
use codex_cloud_tasks::AttachmentId;
|
||||
use codex_cloud_tasks::AttachmentUploadHttpConfig;
|
||||
use codex_cloud_tasks::AttachmentUploadMode;
|
||||
use codex_cloud_tasks::AttachmentUploadUpdate;
|
||||
use codex_cloud_tasks::AttachmentUploader;
|
||||
use codex_cloud_tasks::env_detect;
|
||||
use codex_cloud_tasks::pointer_id_from_value;
|
||||
use codex_cloud_tasks::util::append_error_log;
|
||||
use codex_cloud_tasks::util::extract_chatgpt_account_id;
|
||||
use codex_cloud_tasks::util::normalize_base_url;
|
||||
use codex_cloud_tasks::util::set_user_agent_suffix;
|
||||
use codex_core::config::find_codex_home;
|
||||
use codex_core::default_client::get_codex_user_agent;
|
||||
use codex_login::AuthManager;
|
||||
use image::image_dimensions;
|
||||
use reqwest::header::AUTHORIZATION;
|
||||
use reqwest::header::CONTENT_TYPE;
|
||||
use reqwest::header::HeaderMap;
|
||||
use reqwest::header::HeaderName;
|
||||
use reqwest::header::HeaderValue;
|
||||
use reqwest::header::USER_AGENT;
|
||||
use serde_json::json;
|
||||
use tokio::time::sleep;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(
|
||||
name = "upload-debug",
|
||||
version,
|
||||
about = "Debug Codex cloud image uploads"
|
||||
)]
|
||||
struct Args {
|
||||
/// Explicit environment id to submit to. Uses auto-detect when omitted.
|
||||
#[arg(long = "env-id")]
|
||||
environment_id: Option<String>,
|
||||
/// Optional environment label hint when auto-detecting (case-insensitive).
|
||||
#[arg(long = "env-label")]
|
||||
environment_label: Option<String>,
|
||||
/// Git ref/branch to include in the submission payload.
|
||||
#[arg(long = "ref", default_value = "main")]
|
||||
git_ref: String,
|
||||
/// Enable QA mode for task creation.
|
||||
#[arg(long = "qa-mode", default_value_t = false)]
|
||||
qa_mode: bool,
|
||||
/// Files to upload as images. Use paths relative to the current workspace for easier repros.
|
||||
#[arg(long = "image", value_name = "PATH")]
|
||||
images: Vec<String>,
|
||||
/// Optional override prompt text. Defaults to a canned debug prompt.
|
||||
#[arg(long)]
|
||||
prompt: Option<String>,
|
||||
/// Skip the final POST /wham/tasks call, only perform uploads.
|
||||
#[arg(long = "skip-submit", default_value_t = false)]
|
||||
skip_submit: bool,
|
||||
}
|
||||
|
||||
struct ImageAttachment {
|
||||
id: AttachmentId,
|
||||
fs_path: PathBuf,
|
||||
submit_path: String,
|
||||
display_name: String,
|
||||
size_bytes: u64,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct UploadOutcome {
|
||||
pointer: AttachmentAssetPointer,
|
||||
submit_path: String,
|
||||
size_bytes: u64,
|
||||
width: u32,
|
||||
height: u32,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> anyhow::Result<()> {
|
||||
let args = Args::parse();
|
||||
if args.images.is_empty() {
|
||||
anyhow::bail!("Provide at least one --image <PATH> to upload");
|
||||
}
|
||||
|
||||
set_user_agent_suffix("codex_cloud_tasks_upload_debug");
|
||||
append_error_log("upload-debug: starting run");
|
||||
|
||||
let base_url = normalize_base_url(
|
||||
&std::env::var("CODEX_CLOUD_TASKS_BASE_URL")
|
||||
.unwrap_or_else(|_| "https://chatgpt.com/backend-api".to_string()),
|
||||
);
|
||||
println!("base_url: {base_url}");
|
||||
|
||||
let user_agent = get_codex_user_agent();
|
||||
println!("user_agent: {user_agent}");
|
||||
|
||||
let (auth_token, account_id) = load_chatgpt_auth().await?;
|
||||
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
USER_AGENT,
|
||||
HeaderValue::from_str(&user_agent).unwrap_or(HeaderValue::from_static("codex-cli")),
|
||||
);
|
||||
if let Some(token) = &auth_token {
|
||||
let hv = HeaderValue::from_str(&format!("Bearer {token}"))
|
||||
.context("failed to encode bearer token header")?;
|
||||
headers.insert(AUTHORIZATION, hv);
|
||||
}
|
||||
if let Some(acc) = &account_id {
|
||||
let name = HeaderName::from_static("ChatGPT-Account-Id");
|
||||
let hv = HeaderValue::from_str(acc).context("invalid ChatGPT-Account-Id header")?;
|
||||
headers.insert(name, hv);
|
||||
}
|
||||
|
||||
let env_id = match args.environment_id.clone() {
|
||||
Some(id) => {
|
||||
println!("env_id (provided): {id}");
|
||||
id
|
||||
}
|
||||
None => {
|
||||
let selection = env_detect::autodetect_environment_id(
|
||||
&base_url,
|
||||
&headers,
|
||||
args.environment_label.clone(),
|
||||
)
|
||||
.await
|
||||
.context("failed to auto-detect environment")?;
|
||||
if let Some(label) = selection.label.as_deref() {
|
||||
println!("env_id (auto): {} — label: {}", selection.id, label);
|
||||
} else {
|
||||
println!("env_id (auto): {}", selection.id);
|
||||
}
|
||||
selection.id
|
||||
}
|
||||
};
|
||||
|
||||
let attachments = build_attachments(&args.images)?;
|
||||
println!("attachments: {}", attachments.len());
|
||||
|
||||
let auth_token = auth_token.context("ChatGPT auth token is required. Run `codex login`.")?;
|
||||
|
||||
let upload_cfg = AttachmentUploadHttpConfig {
|
||||
base_url: base_url.clone(),
|
||||
bearer_token: Some(auth_token.clone()),
|
||||
chatgpt_account_id: account_id.clone(),
|
||||
user_agent: Some(user_agent.clone()),
|
||||
};
|
||||
let mut uploader = AttachmentUploader::new(AttachmentUploadMode::Http(upload_cfg));
|
||||
|
||||
let upload_results = perform_uploads(&mut uploader, &attachments).await?;
|
||||
|
||||
println!("uploads complete:");
|
||||
for (idx, outcome) in upload_results.iter().enumerate() {
|
||||
let file_id = pointer_id_from_value(&outcome.pointer.value)
|
||||
.unwrap_or_else(|| "<unknown>".to_string());
|
||||
println!(
|
||||
" [{}] {} -> {} ({} bytes, {}x{})",
|
||||
idx + 1,
|
||||
outcome.submit_path,
|
||||
file_id,
|
||||
outcome.size_bytes,
|
||||
outcome.width,
|
||||
outcome.height
|
||||
);
|
||||
}
|
||||
|
||||
let prompt = args
|
||||
.prompt
|
||||
.clone()
|
||||
.unwrap_or_else(|| "Debug upload via codex cloud upload-debug".to_string());
|
||||
|
||||
let mut input_items = vec![json!({
|
||||
"type": "message",
|
||||
"role": "user",
|
||||
"content": [{ "content_type": "text", "text": prompt }]
|
||||
})];
|
||||
|
||||
if let Ok(diff) = std::env::var("CODEX_STARTING_DIFF")
|
||||
&& !diff.is_empty()
|
||||
{
|
||||
input_items.push(json!({
|
||||
"type": "pre_apply_patch",
|
||||
"output_diff": { "diff": diff }
|
||||
}));
|
||||
}
|
||||
|
||||
for outcome in &upload_results {
|
||||
input_items.push(json!({
|
||||
"type": "image_asset_pointer",
|
||||
"asset_pointer": outcome.pointer.value,
|
||||
"width": outcome.width,
|
||||
"height": outcome.height,
|
||||
"size_bytes": outcome.size_bytes,
|
||||
}));
|
||||
}
|
||||
|
||||
let request_body = json!({
|
||||
"new_task": {
|
||||
"environment_id": env_id,
|
||||
"branch": args.git_ref,
|
||||
"run_environment_in_qa_mode": args.qa_mode,
|
||||
},
|
||||
"input_items": input_items,
|
||||
});
|
||||
|
||||
let pretty = serde_json::to_string_pretty(&request_body)?;
|
||||
println!("request payload:\n{pretty}");
|
||||
append_error_log(format!(
|
||||
"upload-debug: request body {}",
|
||||
truncate(&pretty, 6000)
|
||||
));
|
||||
|
||||
if args.skip_submit {
|
||||
println!("--skip-submit set; skipping POST /wham/tasks");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let client = reqwest::Client::builder().build()?;
|
||||
let url = if base_url.contains("/backend-api") {
|
||||
format!("{base_url}/wham/tasks")
|
||||
} else {
|
||||
format!("{base_url}/api/codex/tasks")
|
||||
};
|
||||
|
||||
let mut req = client.post(&url).header(USER_AGENT, user_agent.clone());
|
||||
req = req.header(CONTENT_TYPE, HeaderValue::from_static("application/json"));
|
||||
req = req.bearer_auth(auth_token.clone());
|
||||
if let Some(acc) = &account_id {
|
||||
req = req.header("ChatGPT-Account-Id", acc);
|
||||
}
|
||||
let resp = req.json(&request_body).send().await?;
|
||||
|
||||
let status = resp.status();
|
||||
let ct = resp
|
||||
.headers()
|
||||
.get(CONTENT_TYPE)
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
let body = resp.text().await.unwrap_or_default();
|
||||
|
||||
println!("status: {status}");
|
||||
println!("content-type: {ct}");
|
||||
let parsed = serde_json::from_str::<serde_json::Value>(&body);
|
||||
let parsed_value = match parsed {
|
||||
Ok(ref v) => {
|
||||
println!(
|
||||
"response (pretty JSON):\n{}",
|
||||
serde_json::to_string_pretty(v).unwrap_or(body.clone())
|
||||
);
|
||||
Some(v.clone())
|
||||
}
|
||||
Err(_) => {
|
||||
println!("response (raw):\n{body}");
|
||||
None
|
||||
}
|
||||
};
|
||||
|
||||
append_error_log(format!(
|
||||
"upload-debug: POST {} status={} body={}",
|
||||
url,
|
||||
status,
|
||||
truncate(&body, 4000)
|
||||
));
|
||||
|
||||
if !status.is_success() {
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
let task_id = parsed_value
|
||||
.as_ref()
|
||||
.and_then(|v| v.get("task"))
|
||||
.and_then(|task| task.get("id"))
|
||||
.and_then(|id| id.as_str())
|
||||
.map(str::to_string);
|
||||
|
||||
let task_id = match task_id {
|
||||
Some(id) => {
|
||||
println!("created task id: {id}");
|
||||
id
|
||||
}
|
||||
None => {
|
||||
eprintln!("error: response missing task.id field");
|
||||
std::process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
verify_task_creation(
|
||||
&client,
|
||||
&base_url,
|
||||
&auth_token,
|
||||
account_id.as_deref(),
|
||||
&task_id,
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn verify_task_creation(
|
||||
client: &reqwest::Client,
|
||||
base_url: &str,
|
||||
auth_token: &str,
|
||||
account_id: Option<&str>,
|
||||
task_id: &str,
|
||||
) -> anyhow::Result<()> {
|
||||
let mut url = base_url.to_string();
|
||||
if url.ends_with('/') {
|
||||
url.pop();
|
||||
}
|
||||
url = format!("{url}/wham/tasks/{task_id}");
|
||||
|
||||
const MAX_POLLS: usize = 30;
|
||||
const POLL_DELAY: Duration = Duration::from_secs(2);
|
||||
|
||||
let mut saw_image_pointer = false;
|
||||
let mut final_status = String::from("unknown");
|
||||
let mut final_error: Option<serde_json::Value> = None;
|
||||
let mut last_value: Option<serde_json::Value> = None;
|
||||
let mut last_body = String::new();
|
||||
|
||||
for attempt in 0..=MAX_POLLS {
|
||||
let mut req = client.get(&url).bearer_auth(auth_token);
|
||||
if let Some(acc) = account_id {
|
||||
req = req.header("ChatGPT-Account-Id", acc);
|
||||
}
|
||||
|
||||
let resp = req.send().await?;
|
||||
let status = resp.status();
|
||||
let body = resp.text().await.unwrap_or_default();
|
||||
if !status.is_success() {
|
||||
eprintln!(
|
||||
"error: GET {} returned {} body={}",
|
||||
url,
|
||||
status,
|
||||
truncate(&body, 2000)
|
||||
);
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
let value: serde_json::Value = serde_json::from_str(&body)
|
||||
.map_err(|e| anyhow!("failed to parse task GET response: {e}"))?;
|
||||
last_body = body.clone();
|
||||
last_value = Some(value.clone());
|
||||
let found_id = value
|
||||
.get("task")
|
||||
.and_then(|task| task.get("id"))
|
||||
.and_then(|id| id.as_str())
|
||||
.unwrap_or("");
|
||||
if found_id != task_id {
|
||||
eprintln!("error: GET {url} returned mismatched task id {found_id}");
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
let turn_items = value
|
||||
.get("user_turn")
|
||||
.or_else(|| value.get("current_user_turn"))
|
||||
.and_then(|turn| turn.get("input_items"))
|
||||
.and_then(|items| items.as_array())
|
||||
.cloned()
|
||||
.unwrap_or_default();
|
||||
if !saw_image_pointer {
|
||||
for item in &turn_items {
|
||||
if item.get("type").and_then(|t| t.as_str()) == Some("image_asset_pointer") {
|
||||
saw_image_pointer = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let turn = value
|
||||
.get("turn")
|
||||
.or_else(|| value.get("current_assistant_turn"))
|
||||
.or_else(|| value.get("current_turn"));
|
||||
final_status = turn
|
||||
.and_then(|t| t.get("turn_status"))
|
||||
.and_then(|s| s.as_str())
|
||||
.unwrap_or("unknown")
|
||||
.to_string();
|
||||
final_error = turn
|
||||
.and_then(|t| t.get("error"))
|
||||
.filter(|e| !e.is_null())
|
||||
.cloned();
|
||||
|
||||
let finished = matches!(
|
||||
final_status.as_str(),
|
||||
"completed" | "success" | "ready" | "failed" | "error" | "cancelled"
|
||||
);
|
||||
|
||||
if finished {
|
||||
break;
|
||||
}
|
||||
|
||||
if attempt == MAX_POLLS {
|
||||
eprintln!(
|
||||
"error: task {} did not complete within {} polls (last status={})",
|
||||
task_id,
|
||||
MAX_POLLS + 1,
|
||||
final_status
|
||||
);
|
||||
if let Some(val) = &last_value {
|
||||
eprintln!(
|
||||
"task detail: {}",
|
||||
truncate(&serde_json::to_string_pretty(val).unwrap_or_default(), 2000)
|
||||
);
|
||||
} else if !last_body.is_empty() {
|
||||
eprintln!("task detail: {}", truncate(&last_body, 2000));
|
||||
}
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
sleep(POLL_DELAY).await;
|
||||
}
|
||||
|
||||
if !saw_image_pointer {
|
||||
eprintln!("error: created task missing image_asset_pointer input item after polling");
|
||||
if let Some(val) = &last_value {
|
||||
eprintln!(
|
||||
"task detail: {}",
|
||||
truncate(&serde_json::to_string_pretty(val).unwrap_or_default(), 2000)
|
||||
);
|
||||
} else if !last_body.is_empty() {
|
||||
eprintln!("task detail: {}", truncate(&last_body, 2000));
|
||||
}
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
if final_status.eq_ignore_ascii_case("failed")
|
||||
|| final_status.eq_ignore_ascii_case("error")
|
||||
|| final_error.is_some()
|
||||
{
|
||||
eprintln!(
|
||||
"error: task {task_id} completed with status={final_status} error={final_error:?}"
|
||||
);
|
||||
if let Some(val) = &last_value {
|
||||
eprintln!(
|
||||
"task detail: {}",
|
||||
truncate(&serde_json::to_string_pretty(val).unwrap_or_default(), 2000)
|
||||
);
|
||||
} else if !last_body.is_empty() {
|
||||
eprintln!("task detail: {}", truncate(&last_body, 2000));
|
||||
}
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
println!(
|
||||
"verified task {task_id} completed with status={final_status} and contains image_asset_pointer"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn build_attachments(raw_paths: &[String]) -> anyhow::Result<Vec<ImageAttachment>> {
|
||||
let mut out = Vec::new();
|
||||
for (idx, raw) in raw_paths.iter().enumerate() {
|
||||
let fs_path = PathBuf::from(raw);
|
||||
if !fs_path.exists() {
|
||||
anyhow::bail!("Attachment {} does not exist: {}", idx + 1, raw);
|
||||
}
|
||||
let submit_path = raw.clone();
|
||||
let display_name = fs_path
|
||||
.file_name()
|
||||
.and_then(|s| s.to_str())
|
||||
.unwrap_or(raw)
|
||||
.to_string();
|
||||
let size_bytes = std::fs::metadata(&fs_path)
|
||||
.with_context(|| format!("metadata failed for {raw}"))?
|
||||
.len();
|
||||
out.push(ImageAttachment {
|
||||
id: AttachmentId::new(idx as u64 + 1),
|
||||
fs_path,
|
||||
submit_path,
|
||||
display_name,
|
||||
size_bytes,
|
||||
});
|
||||
}
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
async fn perform_uploads(
|
||||
uploader: &mut AttachmentUploader,
|
||||
attachments: &[ImageAttachment],
|
||||
) -> anyhow::Result<Vec<UploadOutcome>> {
|
||||
let mut id_to_index = HashMap::new();
|
||||
for (idx, att) in attachments.iter().enumerate() {
|
||||
uploader
|
||||
.start_upload(att.id, att.display_name.clone(), att.fs_path.clone())
|
||||
.map_err(|err| {
|
||||
anyhow!(
|
||||
"failed to queue upload for {}: {}",
|
||||
att.submit_path,
|
||||
err.message
|
||||
)
|
||||
})?;
|
||||
id_to_index.insert(att.id, idx);
|
||||
}
|
||||
|
||||
let mut results: Vec<Option<UploadOutcome>> = vec![None; attachments.len()];
|
||||
|
||||
while results.iter().any(std::option::Option::is_none) {
|
||||
let updates = uploader.poll();
|
||||
if updates.is_empty() {
|
||||
sleep(Duration::from_millis(50)).await;
|
||||
continue;
|
||||
}
|
||||
for update in updates {
|
||||
match update {
|
||||
AttachmentUploadUpdate::Started { id, total_bytes } => {
|
||||
append_error_log(format!(
|
||||
"upload-debug: id={} started total_bytes={:?}",
|
||||
id.raw(),
|
||||
total_bytes
|
||||
));
|
||||
}
|
||||
AttachmentUploadUpdate::Finished { id, result } => match result {
|
||||
Ok(success) => {
|
||||
let idx = *id_to_index
|
||||
.get(&id)
|
||||
.ok_or_else(|| anyhow!("unknown attachment id"))?;
|
||||
let att = &attachments[idx];
|
||||
let (width, height) =
|
||||
image_dimensions(&att.fs_path).with_context(|| {
|
||||
format!("failed to decode image dimensions for {}", att.submit_path)
|
||||
})?;
|
||||
append_error_log(format!(
|
||||
"upload-debug: id={} completed pointer={}",
|
||||
id.raw(),
|
||||
success.asset_pointer.value
|
||||
));
|
||||
results[idx] = Some(UploadOutcome {
|
||||
pointer: success.asset_pointer,
|
||||
submit_path: att.submit_path.clone(),
|
||||
size_bytes: att.size_bytes,
|
||||
width,
|
||||
height,
|
||||
});
|
||||
}
|
||||
Err(err) => {
|
||||
append_error_log(format!(
|
||||
"upload-debug: id={} failed: {}",
|
||||
id.raw(),
|
||||
err.message
|
||||
));
|
||||
return Err(anyhow!("upload {} failed: {}", id.raw(), err.message));
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
results
|
||||
.into_iter()
|
||||
.collect::<Option<Vec<UploadOutcome>>>()
|
||||
.ok_or_else(|| anyhow!("upload result missing"))
|
||||
}
|
||||
|
||||
async fn load_chatgpt_auth() -> anyhow::Result<(Option<String>, Option<String>)> {
|
||||
if let Ok(home) = find_codex_home() {
|
||||
let authm = AuthManager::new(home);
|
||||
if let Some(auth) = authm.auth() {
|
||||
match auth.get_token().await {
|
||||
Ok(token) if !token.is_empty() => {
|
||||
let account_id = auth
|
||||
.get_account_id()
|
||||
.or_else(|| extract_chatgpt_account_id(&token));
|
||||
println!("auth: ChatGPT token loaded ({} chars)", token.len());
|
||||
if let Some(acc) = &account_id {
|
||||
println!("auth: ChatGPT-Account-Id={acc}");
|
||||
}
|
||||
return Ok((Some(token), account_id));
|
||||
}
|
||||
Ok(_) => {
|
||||
println!("auth: ChatGPT token empty");
|
||||
}
|
||||
Err(e) => {
|
||||
println!("auth: failed to load token: {e}");
|
||||
append_error_log(format!("upload-debug: auth token load failed: {e}"));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
println!("auth: no ChatGPT auth.json");
|
||||
}
|
||||
} else {
|
||||
println!("auth: could not resolve CODEX_HOME");
|
||||
}
|
||||
Ok((None, None))
|
||||
}
|
||||
|
||||
fn truncate(text: &str, max: usize) -> String {
|
||||
if text.len() <= max {
|
||||
text.to_string()
|
||||
} else {
|
||||
format!("{}…", &text[..max])
|
||||
}
|
||||
}
|
||||
@@ -13,12 +13,79 @@ struct CodeEnvironment {
|
||||
is_pinned: Option<bool>,
|
||||
#[serde(default)]
|
||||
task_count: Option<i64>,
|
||||
#[serde(default)]
|
||||
repo_map: Option<HashMap<String, GitRepository>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, serde::Deserialize)]
|
||||
struct GitRepository {
|
||||
#[serde(default)]
|
||||
repository_full_name: Option<String>,
|
||||
#[serde(default)]
|
||||
default_branch: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AutodetectSelection {
|
||||
pub id: String,
|
||||
pub label: Option<String>,
|
||||
pub default_branch: Option<String>,
|
||||
}
|
||||
|
||||
fn clean_branch(branch: Option<&str>) -> Option<String> {
|
||||
branch
|
||||
.map(str::trim)
|
||||
.filter(|s| !s.is_empty())
|
||||
.map(std::string::ToString::to_string)
|
||||
}
|
||||
|
||||
fn default_branch_from_env(env: &CodeEnvironment, repo_hint: Option<&str>) -> Option<String> {
|
||||
let repo_map = env.repo_map.as_ref()?;
|
||||
if let Some(hint) = repo_hint {
|
||||
if let Some(repo) = repo_map
|
||||
.values()
|
||||
.find(|repo| repo.repository_full_name.as_deref() == Some(hint))
|
||||
&& let Some(branch) = clean_branch(repo.default_branch.as_deref())
|
||||
{
|
||||
return Some(branch);
|
||||
}
|
||||
if let Some(repo) = repo_map.get(hint)
|
||||
&& let Some(branch) = clean_branch(repo.default_branch.as_deref())
|
||||
{
|
||||
return Some(branch);
|
||||
}
|
||||
}
|
||||
repo_map
|
||||
.values()
|
||||
.find_map(|repo| clean_branch(repo.default_branch.as_deref()))
|
||||
}
|
||||
|
||||
fn merge_environment_row(
|
||||
map: &mut HashMap<String, crate::app::EnvironmentRow>,
|
||||
env: &CodeEnvironment,
|
||||
repo_hint: Option<&str>,
|
||||
) {
|
||||
let default_branch = default_branch_from_env(env, repo_hint);
|
||||
let repo_hint_owned = repo_hint.map(str::to_string);
|
||||
let entry = map
|
||||
.entry(env.id.clone())
|
||||
.or_insert_with(|| crate::app::EnvironmentRow {
|
||||
id: env.id.clone(),
|
||||
label: env.label.clone(),
|
||||
is_pinned: env.is_pinned.unwrap_or(false),
|
||||
repo_hints: repo_hint_owned.clone(),
|
||||
default_branch: default_branch.clone(),
|
||||
});
|
||||
if entry.label.is_none() {
|
||||
entry.label = env.label.clone();
|
||||
}
|
||||
entry.is_pinned = entry.is_pinned || env.is_pinned.unwrap_or(false);
|
||||
if entry.repo_hints.is_none() {
|
||||
entry.repo_hints = repo_hint_owned;
|
||||
}
|
||||
if let Some(branch) = default_branch {
|
||||
entry.default_branch = Some(branch);
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn autodetect_environment_id(
|
||||
@@ -62,6 +129,7 @@ pub async fn autodetect_environment_id(
|
||||
return Ok(AutodetectSelection {
|
||||
id: env.id.clone(),
|
||||
label: env.label.as_deref().map(str::to_owned),
|
||||
default_branch: default_branch_from_env(&env, None),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -101,6 +169,7 @@ pub async fn autodetect_environment_id(
|
||||
return Ok(AutodetectSelection {
|
||||
id: env.id.clone(),
|
||||
label: env.label.as_deref().map(str::to_owned),
|
||||
default_branch: default_branch_from_env(&env, None),
|
||||
});
|
||||
}
|
||||
anyhow::bail!("no environments available")
|
||||
@@ -276,23 +345,9 @@ pub async fn list_environments(
|
||||
match get_json::<Vec<CodeEnvironment>>(&url, headers).await {
|
||||
Ok(list) => {
|
||||
info!("env_tui: by-repo {}:{} -> {} envs", owner, repo, list.len());
|
||||
for e in list {
|
||||
let entry =
|
||||
map.entry(e.id.clone())
|
||||
.or_insert_with(|| crate::app::EnvironmentRow {
|
||||
id: e.id.clone(),
|
||||
label: e.label.clone(),
|
||||
is_pinned: e.is_pinned.unwrap_or(false),
|
||||
repo_hints: Some(format!("{owner}/{repo}")),
|
||||
});
|
||||
// Merge: keep label if present, or use new; accumulate pinned flag
|
||||
if entry.label.is_none() {
|
||||
entry.label = e.label.clone();
|
||||
}
|
||||
entry.is_pinned = entry.is_pinned || e.is_pinned.unwrap_or(false);
|
||||
if entry.repo_hints.is_none() {
|
||||
entry.repo_hints = Some(format!("{owner}/{repo}"));
|
||||
}
|
||||
for env in list {
|
||||
let repo_hint = format!("{owner}/{repo}");
|
||||
merge_environment_row(&mut map, &env, Some(repo_hint.as_str()));
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
@@ -314,19 +369,8 @@ pub async fn list_environments(
|
||||
match get_json::<Vec<CodeEnvironment>>(&list_url, headers).await {
|
||||
Ok(list) => {
|
||||
info!("env_tui: global list -> {} envs", list.len());
|
||||
for e in list {
|
||||
let entry = map
|
||||
.entry(e.id.clone())
|
||||
.or_insert_with(|| crate::app::EnvironmentRow {
|
||||
id: e.id.clone(),
|
||||
label: e.label.clone(),
|
||||
is_pinned: e.is_pinned.unwrap_or(false),
|
||||
repo_hints: None,
|
||||
});
|
||||
if entry.label.is_none() {
|
||||
entry.label = e.label.clone();
|
||||
}
|
||||
entry.is_pinned = entry.is_pinned || e.is_pinned.unwrap_or(false);
|
||||
for env in list {
|
||||
merge_environment_row(&mut map, &env, None);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
|
||||
@@ -1,20 +1,167 @@
|
||||
mod app;
|
||||
mod attachments;
|
||||
mod cli;
|
||||
pub mod env_detect;
|
||||
mod new_task;
|
||||
pub mod scrollable_diff;
|
||||
mod ui;
|
||||
pub mod util;
|
||||
pub use attachments::AttachmentAssetPointer;
|
||||
pub use attachments::AttachmentId;
|
||||
pub use attachments::AttachmentKind;
|
||||
pub use attachments::AttachmentUploadHttpConfig;
|
||||
pub use attachments::AttachmentUploadMode;
|
||||
pub use attachments::AttachmentUploadUpdate;
|
||||
pub use attachments::AttachmentUploader;
|
||||
pub use attachments::pointer_id_from_value;
|
||||
pub use cli::Cli;
|
||||
|
||||
use crate::new_task::AttachmentSubmission;
|
||||
use crate::new_task::SubmissionAction;
|
||||
use crate::new_task::SubmitPhase;
|
||||
use image::image_dimensions;
|
||||
use std::fs;
|
||||
use std::fs::File;
|
||||
use std::io::IsTerminal;
|
||||
use std::io::Read;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use tracing::info;
|
||||
use tracing_subscriber::EnvFilter;
|
||||
use util::append_error_log;
|
||||
use util::normalize_base_url;
|
||||
use util::set_user_agent_suffix;
|
||||
|
||||
fn attachment_upload_mode_for(
|
||||
backend: &dyn codex_cloud_tasks_client::CloudBackend,
|
||||
) -> AttachmentUploadMode {
|
||||
backend
|
||||
.file_service_config()
|
||||
.map(|cfg| {
|
||||
if cfg.bearer_token.is_none() {
|
||||
return AttachmentUploadMode::Disabled;
|
||||
}
|
||||
AttachmentUploadMode::Http(AttachmentUploadHttpConfig {
|
||||
base_url: cfg.base_url,
|
||||
bearer_token: cfg.bearer_token,
|
||||
chatgpt_account_id: cfg.chatgpt_account_id,
|
||||
user_agent: cfg.user_agent,
|
||||
})
|
||||
})
|
||||
.unwrap_or(AttachmentUploadMode::Disabled)
|
||||
}
|
||||
|
||||
fn spawn_task_submission(
|
||||
backend: Arc<dyn codex_cloud_tasks_client::CloudBackend>,
|
||||
env: String,
|
||||
prompt: String,
|
||||
attachments: Vec<AttachmentSubmission>,
|
||||
tx: tokio::sync::mpsc::UnboundedSender<app::AppEvent>,
|
||||
) {
|
||||
tokio::spawn(async move {
|
||||
let attachments_payload: Result<Vec<_>, _> = attachments
|
||||
.into_iter()
|
||||
.map(map_attachment_submission)
|
||||
.collect();
|
||||
|
||||
let payload = match attachments_payload {
|
||||
Ok(p) => p,
|
||||
Err(err) => {
|
||||
append_error_log(format!("new-task: submission aborted: {err}"));
|
||||
let _ = tx.send(app::AppEvent::NewTaskSubmitted(Err(err)));
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let result = codex_cloud_tasks_client::CloudBackend::create_task(
|
||||
&*backend,
|
||||
&env,
|
||||
&prompt,
|
||||
"main",
|
||||
false,
|
||||
payload.as_slice(),
|
||||
)
|
||||
.await;
|
||||
|
||||
let evt = match result {
|
||||
Ok(ok) => app::AppEvent::NewTaskSubmitted(Ok(ok)),
|
||||
Err(e) => app::AppEvent::NewTaskSubmitted(Err(format!("{e}"))),
|
||||
};
|
||||
let _ = tx.send(evt);
|
||||
});
|
||||
}
|
||||
|
||||
fn map_attachment_submission(
|
||||
att: AttachmentSubmission,
|
||||
) -> Result<codex_cloud_tasks_client::AttachmentReference, String> {
|
||||
let pointer_value = att.pointer.value.clone();
|
||||
let pointer_id = pointer_id_from_value(&pointer_value)
|
||||
.ok_or_else(|| format!("Attachment missing upload pointer for {}", att.label))?;
|
||||
|
||||
let mut size_bytes = att
|
||||
.fs_path
|
||||
.as_deref()
|
||||
.and_then(|path| fs::metadata(path).ok().map(|m| m.len()));
|
||||
|
||||
let mut width = None;
|
||||
let mut height = None;
|
||||
let mut kind = match att.kind {
|
||||
AttachmentKind::Image => codex_cloud_tasks_client::AttachmentKind::Image,
|
||||
AttachmentKind::File => codex_cloud_tasks_client::AttachmentKind::File,
|
||||
};
|
||||
|
||||
if matches!(att.kind, AttachmentKind::Image) {
|
||||
if let Some(ref path) = att.fs_path {
|
||||
if let Some((w, h)) = detect_image_dimensions(path) {
|
||||
width = Some(w);
|
||||
height = Some(h);
|
||||
}
|
||||
if size_bytes.is_none() {
|
||||
size_bytes = fs::metadata(path).ok().map(|m| m.len());
|
||||
}
|
||||
}
|
||||
if width.is_none() || height.is_none() || size_bytes.is_none() {
|
||||
kind = codex_cloud_tasks_client::AttachmentKind::File;
|
||||
width = None;
|
||||
height = None;
|
||||
}
|
||||
}
|
||||
|
||||
append_error_log(format!(
|
||||
"attachment.map label={} pointer={} kind={:?} size={:?} width={:?} height={:?}",
|
||||
att.label, pointer_value, kind, size_bytes, width, height
|
||||
));
|
||||
|
||||
Ok(codex_cloud_tasks_client::AttachmentReference {
|
||||
sediment_id: pointer_id,
|
||||
asset_pointer: pointer_value,
|
||||
path: Some(att.path),
|
||||
display_name: Some(att.display_name),
|
||||
kind,
|
||||
size_bytes,
|
||||
width,
|
||||
height,
|
||||
})
|
||||
}
|
||||
|
||||
fn detect_image_dimensions(path: &str) -> Option<(u32, u32)> {
|
||||
if let Ok((w, h)) = image_dimensions(path) {
|
||||
return Some((w, h));
|
||||
}
|
||||
|
||||
let mut file = File::open(path).ok()?;
|
||||
let mut header = [0u8; 24];
|
||||
file.read_exact(&mut header).ok()?;
|
||||
let png_signature = [137, 80, 78, 71, 13, 10, 26, 10];
|
||||
if header[..8] == png_signature {
|
||||
let width = u32::from_be_bytes([header[16], header[17], header[18], header[19]]);
|
||||
let height = u32::from_be_bytes([header[20], header[21], header[22], header[23]]);
|
||||
return Some((width, height));
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
// logging helper lives in util module
|
||||
|
||||
// (no standalone patch summarizer needed – UI displays raw diffs)
|
||||
@@ -47,8 +194,9 @@ pub async fn run_main(_cli: Cli, _codex_linux_sandbox_exe: Option<PathBuf>) -> a
|
||||
Arc::new(codex_cloud_tasks_client::MockClient)
|
||||
} else {
|
||||
// Build an HTTP client against the configured (or default) base URL.
|
||||
let base_url = std::env::var("CODEX_CLOUD_TASKS_BASE_URL")
|
||||
let raw_base = std::env::var("CODEX_CLOUD_TASKS_BASE_URL")
|
||||
.unwrap_or_else(|_| "https://chatgpt.com/backend-api".to_string());
|
||||
let base_url = normalize_base_url(&raw_base);
|
||||
let ua = codex_core::default_client::get_codex_user_agent();
|
||||
let mut http =
|
||||
codex_cloud_tasks_client::HttpClient::new(base_url.clone())?.with_user_agent(ua);
|
||||
@@ -272,6 +420,18 @@ pub async fn run_main(_cli: Cli, _codex_linux_sandbox_exe: Option<PathBuf>) -> a
|
||||
if page.composer.is_in_paste_burst() {
|
||||
let _ = frame_tx.send(Instant::now() + codex_tui::ComposerInput::recommended_flush_delay());
|
||||
}
|
||||
let refresh = page.refresh_mention_state();
|
||||
if refresh.state_changed && !needs_redraw {
|
||||
needs_redraw = true;
|
||||
}
|
||||
if refresh.search_requested {
|
||||
let _ = frame_tx.send(Instant::now() + Duration::from_millis(75));
|
||||
}
|
||||
if page.is_submitting() || page.pending_upload_count() > 0 {
|
||||
page.submit_throbber_mut().calc_next();
|
||||
needs_redraw = true;
|
||||
let _ = frame_tx.send(Instant::now() + Duration::from_millis(100));
|
||||
}
|
||||
}
|
||||
// Advance throbber only while loading.
|
||||
if app.refresh_inflight
|
||||
@@ -341,7 +501,9 @@ pub async fn run_main(_cli: Cli, _codex_linux_sandbox_exe: Option<PathBuf>) -> a
|
||||
}
|
||||
Err(msg) => {
|
||||
append_error_log(format!("new-task: submit failed: {msg}"));
|
||||
if let Some(page) = app.new_task.as_mut() { page.submitting = false; }
|
||||
if let Some(page) = app.new_task.as_mut() {
|
||||
page.reset_submission_state();
|
||||
}
|
||||
app.status = format!("Submit failed: {msg}. See error.log for details.");
|
||||
needs_redraw = true;
|
||||
let _ = frame_tx.send(Instant::now());
|
||||
@@ -389,14 +551,49 @@ pub async fn run_main(_cli: Cli, _codex_linux_sandbox_exe: Option<PathBuf>) -> a
|
||||
sel.label.clone().unwrap_or_else(|| "<none>".to_string())
|
||||
));
|
||||
// Preseed environments with detected label so header can show it even before list arrives
|
||||
if let Some(lbl) = sel.label.clone() {
|
||||
let present = app.environments.iter().any(|r| r.id == sel.id);
|
||||
if !present {
|
||||
app.environments.push(app::EnvironmentRow { id: sel.id.clone(), label: Some(lbl), is_pinned: false, repo_hints: None });
|
||||
match app
|
||||
.environments
|
||||
.iter_mut()
|
||||
.find(|row| row.id == sel.id)
|
||||
{
|
||||
Some(row) => {
|
||||
if row.label.is_none() {
|
||||
row.label = sel.label.clone();
|
||||
}
|
||||
if sel.default_branch.is_some() {
|
||||
row.default_branch = sel.default_branch.clone();
|
||||
}
|
||||
}
|
||||
None => {
|
||||
app.environments.push(app::EnvironmentRow {
|
||||
id: sel.id.clone(),
|
||||
label: sel.label.clone(),
|
||||
is_pinned: false,
|
||||
repo_hints: None,
|
||||
default_branch: sel.default_branch.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
let mut status_override: Option<String> = None;
|
||||
if let Some(branch) = sel.default_branch.as_deref() {
|
||||
match crate::util::switch_to_branch(branch) {
|
||||
Ok(()) => {
|
||||
status_override =
|
||||
Some(format!("Loading tasks… (on {branch})"));
|
||||
}
|
||||
Err(err) => {
|
||||
append_error_log(format!(
|
||||
"env.select: failed to switch to {branch}: {err}"
|
||||
));
|
||||
status_override = Some(format!(
|
||||
"Loading tasks… (failed to switch to {branch})"
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
app.env_filter = Some(sel.id);
|
||||
app.status = "Loading tasks…".to_string();
|
||||
app.status = status_override
|
||||
.unwrap_or_else(|| "Loading tasks…".to_string());
|
||||
app.refresh_inflight = true;
|
||||
app.list_generation = app.list_generation.saturating_add(1);
|
||||
app.in_flight.clear();
|
||||
@@ -663,14 +860,21 @@ pub async fn run_main(_cli: Cli, _codex_linux_sandbox_exe: Option<PathBuf>) -> a
|
||||
}
|
||||
}
|
||||
needs_redraw = true;
|
||||
} else if let Some(page) = app.new_task.as_mut() {
|
||||
if !page.submitting {
|
||||
} else if let Some(page) = app.new_task.as_mut()
|
||||
&& !page.is_submitting() {
|
||||
if page.composer.handle_paste(pasted) {
|
||||
needs_redraw = true;
|
||||
}
|
||||
page.prune_unreferenced_attachments();
|
||||
let refresh = page.refresh_mention_state();
|
||||
if refresh.state_changed && !needs_redraw {
|
||||
needs_redraw = true;
|
||||
}
|
||||
if refresh.search_requested {
|
||||
let _ = frame_tx.send(Instant::now() + Duration::from_millis(75));
|
||||
}
|
||||
let _ = frame_tx.send(Instant::now());
|
||||
}
|
||||
}
|
||||
}
|
||||
Some(Ok(Event::Key(key))) if matches!(key.kind, KeyEventKind::Press | KeyEventKind::Repeat) => {
|
||||
// Treat Ctrl-C like pressing 'q' in the current context.
|
||||
@@ -733,57 +937,138 @@ pub async fn run_main(_cli: Cli, _codex_linux_sandbox_exe: Option<PathBuf>) -> a
|
||||
}
|
||||
|
||||
// New Task page has priority when active, unless an env modal is open.
|
||||
let mention_consumed = if app.env_modal.is_none() {
|
||||
if let Some(page) = app.new_task.as_mut() {
|
||||
if page.mention_active() {
|
||||
match key.code {
|
||||
KeyCode::Up => {
|
||||
page.move_mention_selection(-1);
|
||||
needs_redraw = true;
|
||||
true
|
||||
}
|
||||
KeyCode::Down => {
|
||||
page.move_mention_selection(1);
|
||||
needs_redraw = true;
|
||||
true
|
||||
}
|
||||
KeyCode::Tab if key.modifiers.is_empty() => {
|
||||
if page.accept_current_mention() {
|
||||
needs_redraw = true;
|
||||
let _ = frame_tx.send(Instant::now());
|
||||
}
|
||||
true
|
||||
}
|
||||
KeyCode::Enter if key.modifiers.is_empty() => {
|
||||
if page.accept_current_mention() {
|
||||
needs_redraw = true;
|
||||
let _ = frame_tx.send(Instant::now());
|
||||
}
|
||||
true
|
||||
}
|
||||
KeyCode::Esc => {
|
||||
page.cancel_mention();
|
||||
needs_redraw = true;
|
||||
true
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
} else {
|
||||
false
|
||||
}
|
||||
} else {
|
||||
false
|
||||
}
|
||||
} else {
|
||||
false
|
||||
};
|
||||
if mention_consumed {
|
||||
render_if_needed(&mut terminal, &mut app, &mut needs_redraw)?;
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(page) = app.new_task.as_mut() {
|
||||
if app.env_modal.is_some() {
|
||||
// Defer handling to env-modal branch below.
|
||||
} else {
|
||||
match key.code {
|
||||
KeyCode::Esc => {
|
||||
app.new_task = None;
|
||||
app.status = "Canceled new task".to_string();
|
||||
needs_redraw = true;
|
||||
}
|
||||
_ => {
|
||||
if page.submitting {
|
||||
// Ignore input while submitting
|
||||
} else if let codex_tui::ComposerAction::Submitted(text) = page.composer.input(key) {
|
||||
// Submit only if we have an env id
|
||||
if let Some(env) = page.env_id.clone() {
|
||||
append_error_log(format!(
|
||||
"new-task: submit env={} size={}",
|
||||
env,
|
||||
text.chars().count()
|
||||
));
|
||||
page.submitting = true;
|
||||
app.status = "Submitting new task…".to_string();
|
||||
let tx2 = tx.clone();
|
||||
let backend2 = backend.clone();
|
||||
tokio::spawn(async move {
|
||||
let result = codex_cloud_tasks_client::CloudBackend::create_task(&*backend2, &env, &text, "main", false).await;
|
||||
let evt = match result {
|
||||
Ok(ok) => app::AppEvent::NewTaskSubmitted(Ok(ok)),
|
||||
Err(e) => app::AppEvent::NewTaskSubmitted(Err(format!("{e}"))),
|
||||
};
|
||||
let _ = tx2.send(evt);
|
||||
});
|
||||
} else {
|
||||
app.status = "No environment selected (press 'e' to choose)".to_string();
|
||||
match key.code {
|
||||
KeyCode::Esc => {
|
||||
app.new_task = None;
|
||||
app.status = "Canceled new task".to_string();
|
||||
needs_redraw = true;
|
||||
}
|
||||
_ => {
|
||||
if page.is_submitting() {
|
||||
// Ignore input while submitting
|
||||
} else {
|
||||
needs_redraw = true;
|
||||
let action = page.composer.input(key);
|
||||
if !matches!(action, codex_tui::ComposerAction::Submitted(_)) {
|
||||
page.prune_unreferenced_attachments();
|
||||
}
|
||||
if let codex_tui::ComposerAction::Submitted(text) = action {
|
||||
if let Some(env) = page.env_id.clone() {
|
||||
match page.prepare_submission(text.clone()) {
|
||||
SubmissionAction::Blocked(msg) => {
|
||||
app.status = msg;
|
||||
page.reset_submission_state();
|
||||
}
|
||||
SubmissionAction::WaitForUploads => {
|
||||
let pending = page.pending_upload_count();
|
||||
app.status = if pending == 0 {
|
||||
"Waiting for attachments to finish uploading…".to_string()
|
||||
} else {
|
||||
format!(
|
||||
"Waiting for {pending} attachment upload{}…",
|
||||
if pending == 1 { "" } else { "s" }
|
||||
)
|
||||
};
|
||||
append_error_log("new-task: waiting for attachment uploads");
|
||||
let _ = frame_tx.send(Instant::now() + Duration::from_millis(100));
|
||||
needs_redraw = true;
|
||||
}
|
||||
SubmissionAction::StartSending { prompt, attachments } => {
|
||||
app.status = "Submitting new task…".to_string();
|
||||
append_error_log(format!(
|
||||
"new-task: submit env={} size={} attachments={}",
|
||||
env,
|
||||
prompt.chars().count(),
|
||||
attachments.len()
|
||||
));
|
||||
spawn_task_submission(
|
||||
backend.clone(),
|
||||
env,
|
||||
prompt,
|
||||
attachments,
|
||||
tx.clone(),
|
||||
);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
app.status = "No environment selected (press 'e' to choose)".to_string();
|
||||
}
|
||||
}
|
||||
let refresh = page.refresh_mention_state();
|
||||
if refresh.state_changed && !needs_redraw {
|
||||
needs_redraw = true;
|
||||
}
|
||||
if refresh.search_requested {
|
||||
let _ = frame_tx.send(Instant::now() + Duration::from_millis(75));
|
||||
}
|
||||
}
|
||||
// If paste‑burst is active, schedule a micro‑flush frame.
|
||||
if page.composer.is_in_paste_burst() {
|
||||
let _ = frame_tx.send(Instant::now() + codex_tui::ComposerInput::recommended_flush_delay());
|
||||
}
|
||||
// Always schedule an immediate redraw for key edits in the composer.
|
||||
let _ = frame_tx.send(Instant::now());
|
||||
// Draw now so non-char edits (e.g., Option+Delete) reflect instantly.
|
||||
render_if_needed(&mut terminal, &mut app, &mut needs_redraw)?;
|
||||
}
|
||||
needs_redraw = true;
|
||||
// If paste‑burst is active, schedule a micro‑flush frame.
|
||||
if page.composer.is_in_paste_burst() {
|
||||
let _ = frame_tx.send(Instant::now() + codex_tui::ComposerInput::recommended_flush_delay());
|
||||
}
|
||||
// Always schedule an immediate redraw for key edits in the composer.
|
||||
let _ = frame_tx.send(Instant::now());
|
||||
// Draw now so non-char edits (e.g., Option+Delete) reflect instantly.
|
||||
render_if_needed(&mut terminal, &mut app, &mut needs_redraw)?;
|
||||
}
|
||||
}
|
||||
continue;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// If a diff overlay is open, handle its keys first.
|
||||
if app.apply_modal.is_some() {
|
||||
// Simple apply confirmation modal: y apply, p preflight, n/Esc cancel
|
||||
@@ -1045,10 +1330,17 @@ pub async fn run_main(_cli: Cli, _codex_linux_sandbox_exe: Option<PathBuf>) -> a
|
||||
KeyCode::PageDown | KeyCode::Char(' ') => { if let Some(m) = app.env_modal.as_mut() { let step = 10usize; m.selected = m.selected.saturating_add(step); } needs_redraw = true; }
|
||||
KeyCode::PageUp => { if let Some(m) = app.env_modal.as_mut() { let step = 10usize; m.selected = m.selected.saturating_sub(step); } needs_redraw = true; }
|
||||
KeyCode::Char('n') => {
|
||||
let upload_mode = attachment_upload_mode_for(&*backend);
|
||||
if app.env_filter.is_none() {
|
||||
app.new_task = Some(crate::new_task::NewTaskPage::new(None));
|
||||
app.new_task = Some(crate::new_task::NewTaskPage::new(
|
||||
None,
|
||||
upload_mode.clone(),
|
||||
));
|
||||
} else {
|
||||
app.new_task = Some(crate::new_task::NewTaskPage::new(app.env_filter.clone()));
|
||||
app.new_task = Some(crate::new_task::NewTaskPage::new(
|
||||
app.env_filter.clone(),
|
||||
upload_mode,
|
||||
));
|
||||
}
|
||||
app.status = "New Task: Enter to submit; Esc to cancel".to_string();
|
||||
needs_redraw = true;
|
||||
@@ -1067,8 +1359,11 @@ pub async fn run_main(_cli: Cli, _codex_linux_sandbox_exe: Option<PathBuf>) -> a
|
||||
}).collect();
|
||||
// Keep original order (already sorted) — no need to re-sort
|
||||
let idx = state.selected;
|
||||
if idx == 0 { app.env_filter = None; append_error_log("env.select: All"); }
|
||||
else {
|
||||
let mut status_override: Option<String> = None;
|
||||
if idx == 0 {
|
||||
app.env_filter = None;
|
||||
append_error_log("env.select: All");
|
||||
} else {
|
||||
let env_idx = idx.saturating_sub(1);
|
||||
if let Some(row) = filtered.get(env_idx) {
|
||||
append_error_log(format!(
|
||||
@@ -1077,6 +1372,23 @@ pub async fn run_main(_cli: Cli, _codex_linux_sandbox_exe: Option<PathBuf>) -> a
|
||||
row.label.clone().unwrap_or_else(|| "<none>".to_string())
|
||||
));
|
||||
app.env_filter = Some(row.id.clone());
|
||||
if let Some(branch) = row.default_branch.as_deref() {
|
||||
match crate::util::switch_to_branch(branch) {
|
||||
Ok(()) => {
|
||||
status_override = Some(format!(
|
||||
"Loading tasks… (on {branch})"
|
||||
));
|
||||
}
|
||||
Err(err) => {
|
||||
append_error_log(format!(
|
||||
"env.select: failed to switch to {branch}: {err}"
|
||||
));
|
||||
status_override = Some(format!(
|
||||
"Loading tasks… (failed to switch to {branch})"
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// If New Task page is open, reflect the new selection in its header immediately.
|
||||
@@ -1084,7 +1396,8 @@ pub async fn run_main(_cli: Cli, _codex_linux_sandbox_exe: Option<PathBuf>) -> a
|
||||
page.env_id = app.env_filter.clone();
|
||||
}
|
||||
// Trigger tasks refresh with the selected filter
|
||||
app.status = "Loading tasks…".to_string();
|
||||
app.status = status_override
|
||||
.unwrap_or_else(|| "Loading tasks…".to_string());
|
||||
app.refresh_inflight = true;
|
||||
app.list_generation = app.list_generation.saturating_add(1);
|
||||
app.in_flight.clear();
|
||||
@@ -1155,7 +1468,11 @@ pub async fn run_main(_cli: Cli, _codex_linux_sandbox_exe: Option<PathBuf>) -> a
|
||||
}
|
||||
KeyCode::Char('n') => {
|
||||
let env_opt = app.env_filter.clone();
|
||||
app.new_task = Some(crate::new_task::NewTaskPage::new(env_opt));
|
||||
let upload_mode = attachment_upload_mode_for(&*backend);
|
||||
app.new_task = Some(crate::new_task::NewTaskPage::new(
|
||||
env_opt,
|
||||
upload_mode,
|
||||
));
|
||||
app.status = "New Task: Enter to submit; Esc to cancel".to_string();
|
||||
needs_redraw = true;
|
||||
}
|
||||
@@ -1332,6 +1649,57 @@ pub async fn run_main(_cli: Cli, _codex_linux_sandbox_exe: Option<PathBuf>) -> a
|
||||
render_if_needed(&mut terminal, &mut app, &mut needs_redraw)?;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(page) = app.new_task.as_mut() {
|
||||
let upload_poll = page.poll_attachment_uploads();
|
||||
if upload_poll.state_changed {
|
||||
needs_redraw = true;
|
||||
}
|
||||
if let Some(err) = upload_poll.failed {
|
||||
app.status = err;
|
||||
needs_redraw = true;
|
||||
}
|
||||
let mut schedule_upload_poll = false;
|
||||
|
||||
if matches!(page.submit_phase(), SubmitPhase::WaitingForUploads) {
|
||||
if let Some(payload) = page.take_ready_submission() {
|
||||
if let Some(env) = page.env_id.clone() {
|
||||
app.status = "Submitting new task…".to_string();
|
||||
append_error_log(format!(
|
||||
"new-task: submit env={} size={} attachments={}",
|
||||
env,
|
||||
payload.prompt.chars().count(),
|
||||
payload.attachments.len()
|
||||
));
|
||||
spawn_task_submission(
|
||||
backend.clone(),
|
||||
env,
|
||||
payload.prompt,
|
||||
payload.attachments,
|
||||
tx.clone(),
|
||||
);
|
||||
needs_redraw = true;
|
||||
} else {
|
||||
page.reset_submission_state();
|
||||
app.status = "No environment selected (press 'e' to choose)".to_string();
|
||||
needs_redraw = true;
|
||||
}
|
||||
} else if page.has_pending_referenced_uploads() {
|
||||
schedule_upload_poll = true;
|
||||
}
|
||||
}
|
||||
|
||||
if upload_poll.has_pending {
|
||||
schedule_upload_poll = true;
|
||||
}
|
||||
|
||||
if schedule_upload_poll {
|
||||
let _ = frame_tx.send(Instant::now() + Duration::from_millis(100));
|
||||
}
|
||||
if page.poll_mention_search() {
|
||||
needs_redraw = true;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Restore terminal
|
||||
@@ -1456,3 +1824,62 @@ fn pretty_lines_from_error(raw: &str) -> Vec<String> {
|
||||
}
|
||||
lines
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::attachments::AttachmentAssetPointer;
|
||||
use crate::attachments::upload::AttachmentPointerKind;
|
||||
use base64::Engine as _;
|
||||
use tempfile::NamedTempFile;
|
||||
|
||||
fn make_attachment_submission(
|
||||
pointer_kind: AttachmentPointerKind,
|
||||
fs_path: Option<String>,
|
||||
) -> AttachmentSubmission {
|
||||
let kind = match pointer_kind {
|
||||
AttachmentPointerKind::Image => AttachmentKind::Image,
|
||||
_ => AttachmentKind::File,
|
||||
};
|
||||
AttachmentSubmission {
|
||||
id: AttachmentId::new(1),
|
||||
label: "image.png".to_string(),
|
||||
path: "image.png".to_string(),
|
||||
fs_path,
|
||||
pointer: AttachmentAssetPointer::new(pointer_kind, "file-service://file_123"),
|
||||
display_name: "image.png".to_string(),
|
||||
kind,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn map_attachment_submission_image_with_metadata() {
|
||||
let png_bytes = base64::engine::general_purpose::STANDARD
|
||||
.decode("iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mP8/5+hHgAFgwJ/lkEc1QAAAABJRU5ErkJggg==")
|
||||
.expect("decode");
|
||||
let tmp = NamedTempFile::new().expect("tmp");
|
||||
std::fs::write(tmp.path(), png_bytes).expect("write png");
|
||||
|
||||
let submission = make_attachment_submission(
|
||||
AttachmentPointerKind::Image,
|
||||
Some(tmp.path().display().to_string()),
|
||||
);
|
||||
|
||||
let mapped = map_attachment_submission(submission).expect("map ok");
|
||||
assert_eq!(mapped.kind, codex_cloud_tasks_client::AttachmentKind::Image);
|
||||
assert_eq!(mapped.width, Some(1));
|
||||
assert_eq!(mapped.height, Some(1));
|
||||
assert!(mapped.size_bytes.unwrap_or(0) > 0);
|
||||
assert_eq!(mapped.asset_pointer, "file-service://file_123".to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn map_attachment_submission_image_without_metadata_falls_back() {
|
||||
let submission = make_attachment_submission(AttachmentPointerKind::Image, None);
|
||||
let mapped = map_attachment_submission(submission).expect("map ok");
|
||||
assert_eq!(mapped.kind, codex_cloud_tasks_client::AttachmentKind::File);
|
||||
assert_eq!(mapped.width, None);
|
||||
assert_eq!(mapped.height, None);
|
||||
assert_eq!(mapped.size_bytes, None);
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -15,10 +15,13 @@ use ratatui::widgets::ListItem;
|
||||
use ratatui::widgets::ListState;
|
||||
use ratatui::widgets::Padding;
|
||||
use ratatui::widgets::Paragraph;
|
||||
use ratatui::widgets::Wrap;
|
||||
use std::sync::OnceLock;
|
||||
|
||||
use crate::app::App;
|
||||
use crate::app::AttemptView;
|
||||
use crate::new_task::AttachmentUploadDisplay;
|
||||
use crate::new_task::SubmitPhase;
|
||||
use chrono::Local;
|
||||
use chrono::Utc;
|
||||
use codex_cloud_tasks_client::AttemptStatus;
|
||||
@@ -138,24 +141,201 @@ pub fn draw_new_task_page(frame: &mut Frame, area: Rect, app: &mut App) {
|
||||
.unwrap_or(3)
|
||||
.clamp(3, max_allowed);
|
||||
|
||||
// Anchor the composer to the bottom-left by allocating a flexible spacer
|
||||
// above it and a fixed `desired`-height area for the composer.
|
||||
let rows = Layout::default()
|
||||
.direction(Direction::Vertical)
|
||||
.constraints([Constraint::Min(1), Constraint::Length(desired)])
|
||||
.split(content);
|
||||
let composer_area = rows[1];
|
||||
let (mention_area, composer_area) = if let Some(page) = app.new_task.as_ref() {
|
||||
compute_new_task_areas(content, desired, page)
|
||||
} else {
|
||||
let rows = Layout::default()
|
||||
.direction(Direction::Vertical)
|
||||
.constraints([Constraint::Min(1), Constraint::Length(desired)])
|
||||
.split(content);
|
||||
(None, rows[1])
|
||||
};
|
||||
|
||||
if let Some(page) = app.new_task.as_ref() {
|
||||
page.composer.render_ref(composer_area, frame.buffer_mut());
|
||||
// Composer renders its own footer hints; no extra row here.
|
||||
let submitting = app
|
||||
.new_task
|
||||
.as_ref()
|
||||
.map(|p| p.submit_phase() != SubmitPhase::Idle)
|
||||
.unwrap_or(false);
|
||||
|
||||
if let Some(area) = mention_area
|
||||
&& !submitting
|
||||
&& let Some(page) = app.new_task.as_ref()
|
||||
{
|
||||
draw_mention_picker(frame, area, page);
|
||||
}
|
||||
|
||||
// Place cursor where composer wants it
|
||||
if let Some(page) = app.new_task.as_ref()
|
||||
&& let Some((x, y)) = page.composer.cursor_pos(composer_area)
|
||||
{
|
||||
frame.set_cursor_position((x, y));
|
||||
if submitting {
|
||||
if let Some(page) = app.new_task.as_mut() {
|
||||
draw_submission_status(frame, composer_area, page);
|
||||
}
|
||||
} else if let Some(page) = app.new_task.as_ref() {
|
||||
page.composer.render_ref(composer_area, frame.buffer_mut());
|
||||
if let Some((x, y)) = page.composer.cursor_pos(composer_area) {
|
||||
frame.set_cursor_position((x, y));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn compute_new_task_areas(
|
||||
content: Rect,
|
||||
desired: u16,
|
||||
page: &crate::new_task::NewTaskPage,
|
||||
) -> (Option<Rect>, Rect) {
|
||||
let available_for_mention = content.height.saturating_sub(desired);
|
||||
let mention_height = if page.mention_state.current.is_some() && available_for_mention >= 3 {
|
||||
page.mention_picker
|
||||
.render_height()
|
||||
.min(available_for_mention)
|
||||
.max(3)
|
||||
} else {
|
||||
0
|
||||
};
|
||||
|
||||
if mention_height > 0 {
|
||||
let rows = Layout::default()
|
||||
.direction(Direction::Vertical)
|
||||
.constraints([
|
||||
Constraint::Min(1),
|
||||
Constraint::Length(mention_height),
|
||||
Constraint::Length(desired),
|
||||
])
|
||||
.split(content);
|
||||
(Some(rows[1]), rows[2])
|
||||
} else {
|
||||
let rows = Layout::default()
|
||||
.direction(Direction::Vertical)
|
||||
.constraints([Constraint::Min(1), Constraint::Length(desired)])
|
||||
.split(content);
|
||||
(None, rows[1])
|
||||
}
|
||||
}
|
||||
|
||||
fn draw_mention_picker(frame: &mut Frame, area: Rect, page: &crate::new_task::NewTaskPage) {
|
||||
use ratatui::widgets::ListState;
|
||||
|
||||
let mut state = ListState::default().with_selected(Some(page.mention_picker.selected_index()));
|
||||
let block = Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.title("Files".magenta().bold());
|
||||
frame.render_widget(block.clone(), area);
|
||||
let inner = block.inner(area);
|
||||
|
||||
if page.mention_picker.items().is_empty() {
|
||||
let message = if page.mention_search_pending {
|
||||
"Searching…"
|
||||
} else if page
|
||||
.mention_state
|
||||
.current
|
||||
.as_ref()
|
||||
.is_some_and(|tok| tok.query.is_empty())
|
||||
{
|
||||
"Type to search"
|
||||
} else {
|
||||
"No matches"
|
||||
};
|
||||
frame.render_widget(
|
||||
Paragraph::new(Line::from(message.dim())).wrap(Wrap { trim: true }),
|
||||
inner,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
let items: Vec<ListItem> = page
|
||||
.mention_picker
|
||||
.items()
|
||||
.iter()
|
||||
.map(|s| {
|
||||
let mut spans: Vec<ratatui::text::Span> = vec![s.label.clone().into()];
|
||||
if s.path != s.label {
|
||||
spans.push(" ".into());
|
||||
spans.push(s.path.clone().dim());
|
||||
}
|
||||
ListItem::new(Line::from(spans))
|
||||
})
|
||||
.collect();
|
||||
frame.render_stateful_widget(
|
||||
List::new(items)
|
||||
.highlight_style(Style::default().add_modifier(Modifier::BOLD))
|
||||
.block(block),
|
||||
area,
|
||||
&mut state,
|
||||
);
|
||||
}
|
||||
|
||||
fn draw_submission_status(frame: &mut Frame, area: Rect, page: &mut crate::new_task::NewTaskPage) {
|
||||
use ratatui::text::Span;
|
||||
use ratatui::widgets::Paragraph;
|
||||
|
||||
let attachments = page.attachment_display_items();
|
||||
let mut constraints: Vec<Constraint> = Vec::new();
|
||||
constraints.push(Constraint::Length(1));
|
||||
for _ in 0..attachments.len() {
|
||||
constraints.push(Constraint::Length(1));
|
||||
}
|
||||
constraints.push(Constraint::Min(0));
|
||||
|
||||
let rows = Layout::default()
|
||||
.direction(Direction::Vertical)
|
||||
.constraints(constraints)
|
||||
.split(area);
|
||||
|
||||
let head_label = match page.submit_phase() {
|
||||
SubmitPhase::WaitingForUploads => "Waiting for uploads…",
|
||||
SubmitPhase::Sending | SubmitPhase::Idle => "Submitting…",
|
||||
};
|
||||
draw_inline_spinner(frame, rows[0], page.submit_throbber_mut(), head_label);
|
||||
|
||||
for (idx, (label, state)) in attachments.iter().enumerate() {
|
||||
let row = rows[idx + 1];
|
||||
match state {
|
||||
AttachmentUploadDisplay::Pending => {
|
||||
draw_inline_spinner(frame, row, page.submit_throbber_mut(), label);
|
||||
}
|
||||
AttachmentUploadDisplay::Uploaded => {
|
||||
let line = Line::from(vec!["✔".green(), " ".into(), Span::from(label.clone())]);
|
||||
frame.render_widget(Paragraph::new(line), row);
|
||||
}
|
||||
AttachmentUploadDisplay::Failed(msg) => {
|
||||
let line = Line::from(vec![
|
||||
"✖".red(),
|
||||
" ".into(),
|
||||
Span::from(label.clone()).red(),
|
||||
": ".into(),
|
||||
Span::from(msg.clone()).red(),
|
||||
]);
|
||||
frame.render_widget(Paragraph::new(line), row);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::attachments::AttachmentUploadMode;
|
||||
use crate::new_task::NewTaskPage;
|
||||
|
||||
#[test]
|
||||
fn mention_area_allocated_when_token_active() {
|
||||
let mut page = NewTaskPage::new(None, AttachmentUploadMode::Disabled);
|
||||
page.mention_state.update_from(Some("@foo".to_string()));
|
||||
page.mention_search_pending = true;
|
||||
let content = Rect::new(0, 0, 80, 8);
|
||||
let desired = page.composer.desired_height(content.width);
|
||||
let (mention, composer) = compute_new_task_areas(content, desired, &page);
|
||||
assert!(mention.is_some());
|
||||
assert!(composer.height > 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn mention_area_not_allocated_when_no_space() {
|
||||
let mut page = NewTaskPage::new(None, AttachmentUploadMode::Disabled);
|
||||
page.mention_state.update_from(Some("@foo".to_string()));
|
||||
page.mention_search_pending = true;
|
||||
let content = Rect::new(0, 0, 80, 3);
|
||||
let desired = page.composer.desired_height(content.width);
|
||||
let (mention, _composer) = compute_new_task_areas(content, desired, &page);
|
||||
assert!(mention.is_none());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -244,10 +424,10 @@ fn draw_footer(frame: &mut Frame, area: Rect, app: &mut App) {
|
||||
help.push("a".dim());
|
||||
help.push(": Apply ".dim());
|
||||
}
|
||||
help.push("o : Set Env ".dim());
|
||||
if app.new_task.is_some() {
|
||||
help.push("(editing new task) ".dim());
|
||||
help.push("o : Set Env ".dim());
|
||||
} else {
|
||||
help.push("o : Set Env ".dim());
|
||||
help.push("n : New Task ".dim());
|
||||
}
|
||||
help.extend(vec!["q".dim(), ": Quit ".dim()]);
|
||||
@@ -639,11 +819,11 @@ fn conversation_header_line(
|
||||
ConversationSpeaker::Assistant => {
|
||||
spans.push("Assistant".magenta().bold());
|
||||
spans.push(" response".dim());
|
||||
if let Some(attempt) = attempt {
|
||||
if let Some(status_span) = attempt_status_span(attempt.status) {
|
||||
spans.push(" • ".dim());
|
||||
spans.push(status_span);
|
||||
}
|
||||
if let Some(attempt) = attempt
|
||||
&& let Some(status_span) = attempt_status_span(attempt.status)
|
||||
{
|
||||
spans.push(" • ".dim());
|
||||
spans.push(status_span);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -710,8 +890,8 @@ fn attempt_status_span(status: AttemptStatus) -> Option<ratatui::text::Span<'sta
|
||||
AttemptStatus::Completed => Some("Completed".green()),
|
||||
AttemptStatus::Failed => Some("Failed".red().bold()),
|
||||
AttemptStatus::InProgress => Some("In progress".magenta()),
|
||||
AttemptStatus::Pending => Some("Pending".yellow()),
|
||||
AttemptStatus::Cancelled => Some("Cancelled".yellow().dim()),
|
||||
AttemptStatus::Pending => Some("Pending".cyan()),
|
||||
AttemptStatus::Cancelled => Some("Cancelled".red().dim()),
|
||||
AttemptStatus::Unknown => None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
use base64::Engine as _;
|
||||
use chrono::Utc;
|
||||
use reqwest::header::HeaderMap;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
|
||||
pub fn set_user_agent_suffix(suffix: &str) {
|
||||
if let Ok(mut guard) = codex_core::default_client::USER_AGENT_SUFFIX.lock() {
|
||||
@@ -9,15 +12,17 @@ pub fn set_user_agent_suffix(suffix: &str) {
|
||||
}
|
||||
|
||||
pub fn append_error_log(message: impl AsRef<str>) {
|
||||
let ts = Utc::now().to_rfc3339();
|
||||
if let Ok(mut f) = std::fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.append(true)
|
||||
.open("error.log")
|
||||
let message = message.as_ref();
|
||||
let timestamp = Utc::now().to_rfc3339();
|
||||
|
||||
if let Some(path) = log_file_path()
|
||||
&& write_log_line(&path, ×tamp, message)
|
||||
{
|
||||
use std::io::Write as _;
|
||||
let _ = writeln!(f, "[{ts}] {}", message.as_ref());
|
||||
return;
|
||||
}
|
||||
|
||||
let fallback = Path::new("error.log");
|
||||
let _ = write_log_line(fallback, ×tamp, message);
|
||||
}
|
||||
|
||||
/// Normalize the configured base URL to a canonical form used by the backend client.
|
||||
@@ -37,6 +42,31 @@ pub fn normalize_base_url(input: &str) -> String {
|
||||
base_url
|
||||
}
|
||||
|
||||
fn log_file_path() -> Option<PathBuf> {
|
||||
let mut log_dir = codex_core::config::find_codex_home().ok()?;
|
||||
log_dir.push("log");
|
||||
std::fs::create_dir_all(&log_dir).ok()?;
|
||||
Some(log_dir.join("codex-cloud-tasks.log"))
|
||||
}
|
||||
|
||||
fn write_log_line(path: &Path, timestamp: &str, message: &str) -> bool {
|
||||
let mut opts = std::fs::OpenOptions::new();
|
||||
opts.create(true).append(true);
|
||||
#[cfg(unix)]
|
||||
{
|
||||
use std::os::unix::fs::OpenOptionsExt;
|
||||
opts.mode(0o600);
|
||||
}
|
||||
|
||||
match opts.open(path) {
|
||||
Ok(mut file) => {
|
||||
use std::io::Write as _;
|
||||
writeln!(file, "[{timestamp}] {message}").is_ok()
|
||||
}
|
||||
Err(_) => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract the ChatGPT account id from a JWT token, when present.
|
||||
pub fn extract_chatgpt_account_id(token: &str) -> Option<String> {
|
||||
let mut parts = token.split('.');
|
||||
@@ -54,6 +84,90 @@ pub fn extract_chatgpt_account_id(token: &str) -> Option<String> {
|
||||
.map(str::to_string)
|
||||
}
|
||||
|
||||
pub fn switch_to_branch(branch: &str) -> Result<(), String> {
|
||||
let branch = branch.trim();
|
||||
if branch.is_empty() {
|
||||
return Err("default branch name is empty".to_string());
|
||||
}
|
||||
|
||||
if let Ok(current) = current_branch()
|
||||
&& current == branch
|
||||
{
|
||||
append_error_log(format!("git.switch: already on branch {branch}"));
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
append_error_log(format!("git.switch: switching to branch {branch}"));
|
||||
match ensure_success(&["checkout", branch]) {
|
||||
Ok(()) => Ok(()),
|
||||
Err(err) => {
|
||||
append_error_log(format!("git.switch: checkout {branch} failed: {err}"));
|
||||
if ensure_success(&["rev-parse", "--verify", branch]).is_ok() {
|
||||
return Err(err);
|
||||
}
|
||||
if let Err(fetch_err) = ensure_success(&["fetch", "origin", branch]) {
|
||||
append_error_log(format!(
|
||||
"git.switch: fetch origin/{branch} failed: {fetch_err}"
|
||||
));
|
||||
return Err(err);
|
||||
}
|
||||
let tracking = format!("origin/{branch}");
|
||||
ensure_success(&["checkout", "-b", branch, &tracking]).map_err(|create_err| {
|
||||
append_error_log(format!(
|
||||
"git.switch: checkout -b {branch} {tracking} failed: {create_err}"
|
||||
));
|
||||
create_err
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn current_branch() -> Result<String, String> {
|
||||
let output = run_git(&["rev-parse", "--abbrev-ref", "HEAD"])?;
|
||||
if !output.status.success() {
|
||||
return Err(format!(
|
||||
"git rev-parse --abbrev-ref failed: {}",
|
||||
format_command_failure(output, &["rev-parse", "--abbrev-ref", "HEAD"])
|
||||
));
|
||||
}
|
||||
Ok(String::from_utf8_lossy(&output.stdout).trim().to_string())
|
||||
}
|
||||
|
||||
fn ensure_success(args: &[&str]) -> Result<(), String> {
|
||||
let output = run_git(args)?;
|
||||
if output.status.success() {
|
||||
return Ok(());
|
||||
}
|
||||
Err(format_command_failure(output, args))
|
||||
}
|
||||
|
||||
fn run_git(args: &[&str]) -> Result<std::process::Output, String> {
|
||||
Command::new("git")
|
||||
.args(args)
|
||||
.output()
|
||||
.map_err(|e| format!("failed to launch git {}: {e}", join_args(args)))
|
||||
}
|
||||
|
||||
fn format_command_failure(output: std::process::Output, args: &[&str]) -> String {
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
format!(
|
||||
"git {} exited with status {}. stdout: {} stderr: {}",
|
||||
join_args(args),
|
||||
output
|
||||
.status
|
||||
.code()
|
||||
.map(|c| c.to_string())
|
||||
.unwrap_or_else(|| "<signal>".to_string()),
|
||||
stdout.trim(),
|
||||
stderr.trim()
|
||||
)
|
||||
}
|
||||
|
||||
fn join_args(args: &[&str]) -> String {
|
||||
args.join(" ")
|
||||
}
|
||||
|
||||
/// Build headers for ChatGPT-backed requests: `User-Agent`, optional `Authorization`,
|
||||
/// and optional `ChatGPT-Account-Id`.
|
||||
pub async fn build_chatgpt_headers() -> HeaderMap {
|
||||
|
||||
@@ -5,23 +5,30 @@ use crate::session_log;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct AppEventSender {
|
||||
pub app_event_tx: UnboundedSender<AppEvent>,
|
||||
app_event_tx: Option<UnboundedSender<AppEvent>>,
|
||||
}
|
||||
|
||||
impl AppEventSender {
|
||||
pub(crate) fn new(app_event_tx: UnboundedSender<AppEvent>) -> Self {
|
||||
Self { app_event_tx }
|
||||
Self {
|
||||
app_event_tx: Some(app_event_tx),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn noop() -> Self {
|
||||
Self { app_event_tx: None }
|
||||
}
|
||||
|
||||
/// Send an event to the app event channel. If it fails, we swallow the
|
||||
/// error and log it.
|
||||
pub(crate) fn send(&self, event: AppEvent) {
|
||||
let Some(tx) = &self.app_event_tx else { return };
|
||||
// Record inbound events for high-fidelity session replay.
|
||||
// Avoid double-logging Ops; those are logged at the point of submission.
|
||||
if !matches!(event, AppEvent::CodexOp(_)) {
|
||||
session_log::log_inbound_app_event(&event);
|
||||
}
|
||||
if let Err(e) = self.app_event_tx.send(event) {
|
||||
if let Err(e) = tx.send(event) {
|
||||
tracing::error!("failed to send event: {e}");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -90,6 +90,7 @@ pub(crate) struct ChatComposer {
|
||||
custom_prompts: Vec<CustomPrompt>,
|
||||
// Optional override for footer hint items.
|
||||
footer_hint_override: Option<Vec<(String, String)>>,
|
||||
inline_file_search_enabled: bool,
|
||||
}
|
||||
|
||||
/// Popup state – at most one can be visible at any time.
|
||||
@@ -134,12 +135,24 @@ impl ChatComposer {
|
||||
disable_paste_burst: false,
|
||||
custom_prompts: Vec::new(),
|
||||
footer_hint_override: None,
|
||||
inline_file_search_enabled: true,
|
||||
};
|
||||
// Apply configuration via the setter to keep side-effects centralized.
|
||||
this.set_disable_paste_burst(disable_paste_burst);
|
||||
this
|
||||
}
|
||||
|
||||
pub(crate) fn set_inline_file_search_enabled(&mut self, enabled: bool) {
|
||||
self.inline_file_search_enabled = enabled;
|
||||
if !enabled {
|
||||
if matches!(self.active_popup, ActivePopup::File(_)) {
|
||||
self.active_popup = ActivePopup::None;
|
||||
}
|
||||
self.current_file_query = None;
|
||||
self.dismissed_file_popup_token = None;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn desired_height(&self, width: u16) -> u16 {
|
||||
// Leave 1 column for the left border and 1 column for left padding
|
||||
self.textarea
|
||||
@@ -276,6 +289,10 @@ impl ChatComposer {
|
||||
self.textarea.text().to_string()
|
||||
}
|
||||
|
||||
pub(crate) fn current_mention_token(&self) -> Option<String> {
|
||||
Self::current_at_token(&self.textarea)
|
||||
}
|
||||
|
||||
/// Attempt to start a burst by retro-capturing recent chars before the cursor.
|
||||
pub fn attach_image(&mut self, path: PathBuf, width: u32, height: u32, format_label: &str) {
|
||||
let placeholder = format!("[image {width}x{height} {format_label}]");
|
||||
@@ -529,7 +546,7 @@ impl ChatComposer {
|
||||
// Determine dimensions; if that fails fall back to normal path insertion.
|
||||
let path_buf = PathBuf::from(&sel_path);
|
||||
if let Ok((w, h)) = image::image_dimensions(&path_buf) {
|
||||
// Remove the current @token (mirror logic from insert_selected_path without inserting text)
|
||||
// Remove the current @token (mirror logic from replace_current_token without inserting text)
|
||||
// using the flat text and byte-offset cursor API.
|
||||
let cursor_offset = self.textarea.cursor();
|
||||
let text = self.textarea.text();
|
||||
@@ -568,11 +585,11 @@ impl ChatComposer {
|
||||
self.textarea.insert_str(" ");
|
||||
} else {
|
||||
// Fallback to plain path insertion if metadata read fails.
|
||||
self.insert_selected_path(&sel_path);
|
||||
self.replace_current_token(&sel_path);
|
||||
}
|
||||
} else {
|
||||
// Non-image: inserting file path.
|
||||
self.insert_selected_path(&sel_path);
|
||||
self.replace_current_token(&sel_path);
|
||||
}
|
||||
// No selection: treat Enter as closing the popup/session.
|
||||
self.active_popup = ActivePopup::None;
|
||||
@@ -598,7 +615,7 @@ impl ChatComposer {
|
||||
/// - If the token under the cursor starts with `@`, that token is
|
||||
/// returned without the leading `@`. This includes the case where the
|
||||
/// token is just "@" (empty query), which is used to trigger a UI hint
|
||||
fn current_at_token(textarea: &TextArea) -> Option<String> {
|
||||
pub(crate) fn current_at_token(textarea: &TextArea) -> Option<String> {
|
||||
let cursor_offset = textarea.cursor();
|
||||
let text = textarea.text();
|
||||
|
||||
@@ -694,7 +711,7 @@ impl ChatComposer {
|
||||
/// The algorithm mirrors `current_at_token` so replacement works no matter
|
||||
/// where the cursor is within the token and regardless of how many
|
||||
/// `@tokens` exist in the line.
|
||||
fn insert_selected_path(&mut self, path: &str) {
|
||||
pub(crate) fn replace_current_token(&mut self, path: &str) {
|
||||
let cursor_offset = self.textarea.cursor();
|
||||
let text = self.textarea.text();
|
||||
// Clamp to a valid char boundary to avoid panics when slicing.
|
||||
@@ -946,9 +963,13 @@ impl ChatComposer {
|
||||
code: KeyCode::Backspace,
|
||||
..
|
||||
} = input
|
||||
&& self.try_remove_any_placeholder_at_cursor()
|
||||
{
|
||||
return (InputResult::None, true);
|
||||
if self.try_remove_any_placeholder_at_cursor() {
|
||||
return (InputResult::None, true);
|
||||
}
|
||||
if self.try_remove_bracket_reference_before_cursor() {
|
||||
return (InputResult::None, true);
|
||||
}
|
||||
}
|
||||
|
||||
// Normal input handling
|
||||
@@ -1148,6 +1169,56 @@ impl ChatComposer {
|
||||
false
|
||||
}
|
||||
|
||||
fn try_remove_bracket_reference_before_cursor(&mut self) -> bool {
|
||||
let cursor = self.textarea.cursor();
|
||||
if cursor == 0 {
|
||||
return false;
|
||||
}
|
||||
let text = self.textarea.text();
|
||||
let safe_cursor = Self::clamp_to_char_boundary(text, cursor);
|
||||
if safe_cursor == 0 {
|
||||
return false;
|
||||
}
|
||||
let before_cursor = &text[..safe_cursor];
|
||||
if !before_cursor.ends_with(']') {
|
||||
return false;
|
||||
}
|
||||
let Some(start_idx) = before_cursor.rfind('[') else {
|
||||
return false;
|
||||
};
|
||||
if before_cursor[start_idx..before_cursor.len().saturating_sub(1)].contains(']') {
|
||||
return false;
|
||||
}
|
||||
if start_idx > 0 {
|
||||
if let Some(prev) = before_cursor[..start_idx].chars().rev().next() {
|
||||
if !prev.is_whitespace()
|
||||
&& !matches!(prev, '(' | '{' | '[' | '<' | ',' | ';' | ':' | '!')
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(next_char) = text[safe_cursor..].chars().next() {
|
||||
if !next_char.is_whitespace()
|
||||
&& !matches!(next_char, ')' | ']' | '}' | '>' | ',' | '.' | ';' | ':')
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
let mut remove_end = safe_cursor;
|
||||
if let Some(next_char) = text[safe_cursor..].chars().next() {
|
||||
if next_char.is_whitespace() {
|
||||
remove_end += next_char.len_utf8();
|
||||
}
|
||||
}
|
||||
self.textarea.replace_range(start_idx..remove_end, "");
|
||||
self.textarea.set_cursor(start_idx);
|
||||
let text_after = self.textarea.text();
|
||||
self.pending_pastes
|
||||
.retain(|(placeholder, _)| text_after.contains(placeholder));
|
||||
true
|
||||
}
|
||||
|
||||
/// Synchronize `self.command_popup` with the current text in the
|
||||
/// textarea. This must be called after every modification that can change
|
||||
/// the text so the popup is shown/updated/hidden as appropriate.
|
||||
@@ -1182,6 +1253,13 @@ impl ChatComposer {
|
||||
/// Synchronize `self.file_search_popup` with the current text in the textarea.
|
||||
/// Note this is only called when self.active_popup is NOT Command.
|
||||
fn sync_file_search_popup(&mut self) {
|
||||
if !self.inline_file_search_enabled {
|
||||
self.active_popup = ActivePopup::None;
|
||||
self.current_file_query = None;
|
||||
self.dismissed_file_popup_token = None;
|
||||
return;
|
||||
}
|
||||
|
||||
// Determine if there is an @token underneath the cursor.
|
||||
let query = match Self::current_at_token(&self.textarea) {
|
||||
Some(token) => token,
|
||||
|
||||
@@ -10,7 +10,6 @@ use ratatui::layout::Rect;
|
||||
use ratatui::widgets::WidgetRef;
|
||||
use std::time::Duration;
|
||||
|
||||
use crate::app_event::AppEvent;
|
||||
use crate::app_event_sender::AppEventSender;
|
||||
use crate::bottom_pane::ChatComposer;
|
||||
use crate::bottom_pane::InputResult;
|
||||
@@ -27,17 +26,17 @@ pub enum ComposerAction {
|
||||
/// reusable text input field with submit semantics.
|
||||
pub struct ComposerInput {
|
||||
inner: ChatComposer,
|
||||
_tx: tokio::sync::mpsc::UnboundedSender<AppEvent>,
|
||||
}
|
||||
|
||||
impl ComposerInput {
|
||||
/// Create a new composer input with a neutral placeholder.
|
||||
pub fn new() -> Self {
|
||||
let (tx, _rx) = tokio::sync::mpsc::unbounded_channel();
|
||||
let sender = AppEventSender::new(tx.clone());
|
||||
let sender = AppEventSender::noop();
|
||||
// `enhanced_keys_supported=true` enables Shift+Enter newline hint/behavior.
|
||||
let inner = ChatComposer::new(true, sender, true, "Compose new task".to_string(), false);
|
||||
Self { inner, _tx: tx }
|
||||
let mut inner =
|
||||
ChatComposer::new(true, sender, true, "Compose new task".to_string(), false);
|
||||
inner.set_inline_file_search_enabled(false);
|
||||
Self { inner }
|
||||
}
|
||||
|
||||
/// Returns true if the input is empty.
|
||||
@@ -58,10 +57,28 @@ impl ComposerInput {
|
||||
}
|
||||
}
|
||||
|
||||
/// Current `@` token under the cursor, without the leading `@`.
|
||||
pub fn mention_token(&self) -> Option<String> {
|
||||
self.inner.current_mention_token()
|
||||
}
|
||||
|
||||
pub fn replace_current_token(&mut self, replacement: &str) {
|
||||
self.inner.replace_current_token(replacement);
|
||||
}
|
||||
|
||||
pub fn handle_paste(&mut self, pasted: String) -> bool {
|
||||
self.inner.handle_paste(pasted)
|
||||
}
|
||||
|
||||
pub fn set_text_content(&mut self, text: String) {
|
||||
self.inner.set_text_content(text);
|
||||
}
|
||||
|
||||
/// Read the current composer text.
|
||||
pub fn text_content(&self) -> String {
|
||||
self.inner.current_text()
|
||||
}
|
||||
|
||||
/// Override the footer hint items displayed under the composer.
|
||||
/// Each tuple is rendered as "<key> <label>", with keys styled.
|
||||
pub fn set_hint_items(&mut self, items: Vec<(impl Into<String>, impl Into<String>)>) {
|
||||
|
||||
Reference in New Issue
Block a user