This commit is contained in:
easong-openai
2025-07-28 18:03:04 -07:00
parent d276932354
commit 4d36208931
7 changed files with 78 additions and 72 deletions

View File

@@ -64,3 +64,21 @@ pub fn is_inside_git_repo(config: &Config) -> bool {
false
}
/// If `val` is a path to a readable file, return its trimmed contents.
///
/// - When `val` points to a file, this reads the file, trims leading/trailing
/// whitespace and returns `Ok(Some(contents))` unless the trimmed contents are
/// empty in which case it returns `Ok(None)`.
/// - When `val` is not a file path, return `Ok(Some(val.to_string()))` so
/// callers can treat the value as a literal string.
pub fn maybe_read_file(val: &str) -> std::io::Result<Option<String>> {
let p = std::path::Path::new(val);
if p.is_file() {
let s = std::fs::read_to_string(p)?;
let s = s.trim().to_string();
if s.is_empty() { Ok(None) } else { Ok(Some(s)) }
} else {
Ok(Some(val.to_string()))
}
}

View File

@@ -22,14 +22,14 @@ pub(crate) trait EventProcessor {
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub(crate) enum ExperimentalInstructionsOrigin {
pub(crate) enum PromptOrigin {
File(PathBuf),
Literal,
}
pub(crate) fn create_config_summary_entries(
config: &Config,
experimental_origin: Option<&ExperimentalInstructionsOrigin>,
prompt_origin: Option<&PromptOrigin>,
) -> Vec<(&'static str, String)> {
let mut entries = vec![
("workdir", config.cwd.display().to_string()),
@@ -38,15 +38,15 @@ pub(crate) fn create_config_summary_entries(
("approval", config.approval_policy.to_string()),
("sandbox", summarize_sandbox_policy(&config.sandbox_policy)),
];
if let Some(origin) = experimental_origin {
if let Some(origin) = prompt_origin {
let prompt_val = match origin {
ExperimentalInstructionsOrigin::Literal => "experimental".to_string(),
ExperimentalInstructionsOrigin::File(path) => path
PromptOrigin::Literal => "experimental".to_string(),
PromptOrigin::File(path) => path
.file_name()
.map(|s| s.to_string_lossy().to_string())
.unwrap_or_else(|| path.display().to_string()),
};
entries.push(("prompt", prompt_val));
entries.push(("prompt_origin", prompt_val));
}
if config.model_provider.wire_api == WireApi::Responses
&& model_supports_reasoning_summaries(config)
@@ -125,27 +125,26 @@ mod tests {
}
#[test]
fn entries_include_prompt_experimental_for_literal_origin() {
fn entries_include_prompt_origin_experimental_for_literal_origin() {
let mut cfg = minimal_config();
cfg.base_instructions = Some("hello".to_string());
let entries =
create_config_summary_entries(&cfg, Some(&ExperimentalInstructionsOrigin::Literal));
let entries = create_config_summary_entries(&cfg, Some(&PromptOrigin::Literal));
let map: HashMap<_, _> = entries.into_iter().collect();
assert_eq!(map.get("prompt").cloned(), Some("experimental".to_string()));
assert_eq!(
map.get("prompt_origin").cloned(),
Some("experimental".to_string())
);
}
#[test]
fn entries_include_prompt_filename_for_file_origin() {
fn entries_include_prompt_origin_filename_for_file_origin() {
let mut cfg = minimal_config();
cfg.base_instructions = Some("hello".to_string());
let path = PathBuf::from("/tmp/custom_instructions.txt");
let entries = create_config_summary_entries(
&cfg,
Some(&ExperimentalInstructionsOrigin::File(path.clone())),
);
let entries = create_config_summary_entries(&cfg, Some(&PromptOrigin::File(path.clone())));
let map: HashMap<_, _> = entries.into_iter().collect();
assert_eq!(
map.get("prompt").cloned(),
map.get("prompt_origin").cloned(),
Some("custom_instructions.txt".to_string())
);
}

View File

@@ -27,7 +27,7 @@ use std::time::Instant;
use crate::event_processor::CodexStatus;
use crate::event_processor::EventProcessor;
use crate::event_processor::ExperimentalInstructionsOrigin;
use crate::event_processor::PromptOrigin;
use crate::event_processor::create_config_summary_entries;
use crate::event_processor::handle_last_message;
@@ -60,7 +60,7 @@ pub(crate) struct EventProcessorWithHumanOutput {
answer_started: bool,
reasoning_started: bool,
last_message_path: Option<PathBuf>,
experimental_origin: Option<ExperimentalInstructionsOrigin>,
prompt_origin: Option<PromptOrigin>,
}
impl EventProcessorWithHumanOutput {
@@ -68,7 +68,7 @@ impl EventProcessorWithHumanOutput {
with_ansi: bool,
config: &Config,
last_message_path: Option<PathBuf>,
experimental_origin: Option<ExperimentalInstructionsOrigin>,
prompt_origin: Option<PromptOrigin>,
) -> Self {
let call_id_to_command = HashMap::new();
let call_id_to_patch = HashMap::new();
@@ -90,7 +90,7 @@ impl EventProcessorWithHumanOutput {
answer_started: false,
reasoning_started: false,
last_message_path,
experimental_origin,
prompt_origin,
}
} else {
Self {
@@ -108,7 +108,7 @@ impl EventProcessorWithHumanOutput {
answer_started: false,
reasoning_started: false,
last_message_path,
experimental_origin,
prompt_origin,
}
}
}
@@ -155,7 +155,7 @@ impl EventProcessor for EventProcessorWithHumanOutput {
VERSION
);
let entries = create_config_summary_entries(config, self.experimental_origin.as_ref());
let entries = create_config_summary_entries(config, self.prompt_origin.as_ref());
for (key, value) in entries {
println!("{} {}", format!("{key}:").style(self.bold), value);

View File

@@ -9,30 +9,27 @@ use serde_json::json;
use crate::event_processor::CodexStatus;
use crate::event_processor::EventProcessor;
use crate::event_processor::ExperimentalInstructionsOrigin;
use crate::event_processor::PromptOrigin;
use crate::event_processor::create_config_summary_entries;
use crate::event_processor::handle_last_message;
pub(crate) struct EventProcessorWithJsonOutput {
last_message_path: Option<PathBuf>,
experimental_origin: Option<ExperimentalInstructionsOrigin>,
prompt_origin: Option<PromptOrigin>,
}
impl EventProcessorWithJsonOutput {
pub fn new(
last_message_path: Option<PathBuf>,
experimental_origin: Option<ExperimentalInstructionsOrigin>,
) -> Self {
pub fn new(last_message_path: Option<PathBuf>, prompt_origin: Option<PromptOrigin>) -> Self {
Self {
last_message_path,
experimental_origin,
prompt_origin,
}
}
}
impl EventProcessor for EventProcessorWithJsonOutput {
fn print_config_summary(&mut self, config: &Config, prompt: &str) {
let entries = create_config_summary_entries(config, self.experimental_origin.as_ref())
let entries = create_config_summary_entries(config, self.prompt_origin.as_ref())
.into_iter()
.map(|(key, value)| (key.to_string(), value))
.collect::<HashMap<String, String>>();

View File

@@ -5,11 +5,10 @@ mod event_processor_with_json_output;
use std::io::IsTerminal;
use std::io::Read;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use crate::event_processor::ExperimentalInstructionsOrigin;
use crate::event_processor::PromptOrigin;
pub use cli::Cli;
use codex_core::codex_wrapper::CodexConversation;
use codex_core::codex_wrapper::{self};
@@ -23,6 +22,7 @@ use codex_core::protocol::InputItem;
use codex_core::protocol::Op;
use codex_core::protocol::TaskCompleteEvent;
use codex_core::util::is_inside_git_repo;
use codex_core::util::maybe_read_file;
use event_processor_with_human_output::EventProcessorWithHumanOutput;
use event_processor_with_json_output::EventProcessorWithJsonOutput;
use tracing::debug;
@@ -54,13 +54,13 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
// Determine how to describe experimental instructions in the summary and
// prepare the effective base instructions. If the flag points at a file,
// read its contents; otherwise use the value verbatim.
let mut experimental_origin = match experimental_instructions.as_deref() {
let mut prompt_origin = match experimental_instructions.as_deref() {
Some(val) => {
let p = std::path::Path::new(val);
if p.is_file() {
Some(ExperimentalInstructionsOrigin::File(p.to_path_buf()))
Some(PromptOrigin::File(p.to_path_buf()))
} else {
Some(ExperimentalInstructionsOrigin::Literal)
Some(PromptOrigin::Literal)
}
}
None => None,
@@ -155,20 +155,20 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
let config = Config::load_with_cli_overrides(cli_kv_overrides, overrides)?;
if !has_experimental {
experimental_origin = None;
prompt_origin = None;
}
let mut event_processor: Box<dyn EventProcessor> = if json_mode {
Box::new(EventProcessorWithJsonOutput::new(
last_message_file.clone(),
experimental_origin.clone(),
prompt_origin.clone(),
))
} else {
Box::new(EventProcessorWithHumanOutput::create_with_ansi(
stdout_with_ansi,
&config,
last_message_file.clone(),
experimental_origin,
prompt_origin,
))
};
@@ -285,22 +285,9 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
Ok(())
}
// If `val` is a path to a readable file, return its trimmed contents. If the
// file is empty after trimming, return Ok(None). Otherwise, return Ok(Some(val)).
fn maybe_read_file(val: &str) -> std::io::Result<Option<String>> {
let p = Path::new(val);
if p.is_file() {
let s = std::fs::read_to_string(p)?;
let s = s.trim().to_string();
if s.is_empty() { Ok(None) } else { Ok(Some(s)) }
} else {
Ok(Some(val.to_string()))
}
}
#[cfg(test)]
mod tests {
use super::maybe_read_file;
use codex_core::util::maybe_read_file;
use std::fs;
use tempfile::NamedTempFile;

View File

@@ -33,8 +33,6 @@ use std::path::PathBuf;
use std::time::Duration;
use std::time::Instant;
use tracing::error;
#[cfg(test)]
use uuid::Uuid;
pub(crate) struct CommandOutput {
pub(crate) exit_code: i32,
@@ -593,6 +591,7 @@ mod tests {
use codex_core::config::Config;
use codex_core::config::ConfigOverrides;
use codex_core::config::ConfigToml;
use uuid::Uuid;
use tempfile::TempDir;

View File

@@ -11,6 +11,7 @@ use codex_core::openai_api_key::get_openai_api_key;
use codex_core::openai_api_key::set_openai_api_key;
use codex_core::protocol::AskForApproval;
use codex_core::util::is_inside_git_repo;
use codex_core::util::maybe_read_file;
use codex_login::try_read_openai_api_key;
use log_layer::TuiLogLayer;
use std::fs::OpenOptions;
@@ -69,16 +70,28 @@ pub fn run_main(
)
};
// Capture any read error for experimental instructions so we can log it
// after the tracing subscriber has been initialized.
let mut experimental_read_error: Option<String> = None;
let (config, experimental_prompt_label) = {
// Load configuration and support CLI overrides.
// If the experimental instructions flag points at a file, read its
// contents; otherwise use the value verbatim. Avoid printing to stdout
// or stderr in this library crate fallback to the raw string on
// errors.
let base_instructions = cli
.experimental_instructions
.as_deref()
.and_then(|s| maybe_read_file(s).unwrap_or(Some(s.to_string())));
let base_instructions =
cli.experimental_instructions
.as_deref()
.and_then(|s| match maybe_read_file(s) {
Ok(v) => v,
Err(e) => {
experimental_read_error = Some(format!(
"Failed to read experimental instructions from '{s}': {e}"
));
Some(s.to_string())
}
});
// Derive a label shown in the welcome banner describing the origin of
// the experimental instructions: filename for file paths and
@@ -173,6 +186,12 @@ pub fn run_main(
.with(tui_layer)
.try_init();
if let Some(msg) = experimental_read_error {
// Now that logging is initialized, record a warning so the user
// can see that Codex fell back to using the literal string.
tracing::warn!("{msg}");
}
let show_login_screen = should_show_login_screen(&config);
// Determine whether we need to display the "not a git repo" warning
@@ -192,19 +211,6 @@ pub fn run_main(
.map_err(|err| std::io::Error::other(err.to_string()))
}
// If `val` is a path to a readable file, return its trimmed contents. If the
// file is empty after trimming, return None. Otherwise, return Some(val).
fn maybe_read_file(val: &str) -> std::io::Result<Option<String>> {
let p = Path::new(val);
if p.is_file() {
let s = std::fs::read_to_string(p)?;
let s = s.trim().to_string();
if s.is_empty() { Ok(None) } else { Ok(Some(s)) }
} else {
Ok(Some(val.to_string()))
}
}
fn run_ratatui_app(
cli: Cli,
config: Config,
@@ -300,7 +306,7 @@ fn is_in_need_of_openai_api_key(config: &Config) -> bool {
#[cfg(test)]
mod tests {
use super::maybe_read_file;
use codex_core::util::maybe_read_file;
use std::fs;
use std::path::PathBuf;
use uuid::Uuid;