mirror of
https://github.com/openai/codex.git
synced 2026-02-08 18:03:37 +00:00
Compare commits
19 Commits
gpeal/pars
...
codex/fix-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
abe596976b | ||
|
|
6967b51065 | ||
|
|
697f7b1300 | ||
|
|
14a3bb51b3 | ||
|
|
4f590ebf44 | ||
|
|
ddabd42236 | ||
|
|
b3d47cfa11 | ||
|
|
307d9957fa | ||
|
|
431c9299d4 | ||
|
|
52e12f2b6c | ||
|
|
2b7139859e | ||
|
|
fa0051190b | ||
|
|
cd06b28d84 | ||
|
|
295abf3e51 | ||
|
|
b991c04f86 | ||
|
|
02c9c2ecad | ||
|
|
db76f32888 | ||
|
|
548466df09 | ||
|
|
7d67159587 |
22
README.md
22
README.md
@@ -17,6 +17,7 @@
|
||||
|
||||
- [Quickstart](#quickstart)
|
||||
- [Installing and running Codex CLI](#installing-and-running-codex-cli)
|
||||
- [Updating](#updating)
|
||||
- [Using Codex with your ChatGPT plan](#using-codex-with-your-chatgpt-plan)
|
||||
- [Usage-based billing alternative: Use an OpenAI API key](#usage-based-billing-alternative-use-an-openai-api-key)
|
||||
- [Choosing Codex's level of autonomy](#choosing-codexs-level-of-autonomy)
|
||||
@@ -76,6 +77,16 @@ Then simply run `codex` to get started:
|
||||
codex
|
||||
```
|
||||
|
||||
### Updating
|
||||
|
||||
Upgrade an existing installation to the latest release:
|
||||
|
||||
```shell
|
||||
codex update
|
||||
```
|
||||
|
||||
The command checks for a newer version and will attempt to upgrade automatically if the CLI was installed via npm or Homebrew.
|
||||
|
||||
<details>
|
||||
<summary>You can also go to the <a href="https://github.com/openai/codex/releases/latest">latest GitHub Release</a> and download the appropriate binary for your platform.</summary>
|
||||
|
||||
@@ -340,11 +351,12 @@ Help us improve by filing issues or submitting PRs (see the section below for ho
|
||||
|
||||
## CLI reference
|
||||
|
||||
| Command | Purpose | Example |
|
||||
| ------------------ | ---------------------------------- | ------------------------------- |
|
||||
| `codex` | Interactive TUI | `codex` |
|
||||
| `codex "..."` | Initial prompt for interactive TUI | `codex "fix lint errors"` |
|
||||
| `codex exec "..."` | Non-interactive "automation mode" | `codex exec "explain utils.ts"` |
|
||||
| Command | Purpose | Example |
|
||||
| ------------------ | ------------------------------------- | ------------------------------- |
|
||||
| `codex` | Interactive TUI | `codex` |
|
||||
| `codex "..."` | Initial prompt for interactive TUI | `codex "fix lint errors"` |
|
||||
| `codex exec "..."` | Non-interactive "automation mode" | `codex exec "explain utils.ts"` |
|
||||
| `codex update` | Check for updates and upgrade the CLI | `codex update` |
|
||||
|
||||
Key flags: `--model/-m`, `--ask-for-approval/-a`.
|
||||
|
||||
|
||||
7
codex-rs/Cargo.lock
generated
7
codex-rs/Cargo.lock
generated
@@ -658,10 +658,16 @@ dependencies = [
|
||||
name = "codex-common"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"chrono",
|
||||
"clap",
|
||||
"codex-core",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tokio",
|
||||
"toml 0.9.4",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -888,7 +894,6 @@ dependencies = [
|
||||
"ratatui",
|
||||
"ratatui-image",
|
||||
"regex-lite",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"shlex",
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use codex_login::CodexAuth;
|
||||
use std::path::Path;
|
||||
use std::sync::LazyLock;
|
||||
use std::sync::RwLock;
|
||||
@@ -18,7 +19,7 @@ pub fn set_chatgpt_token_data(value: TokenData) {
|
||||
|
||||
/// Initialize the ChatGPT token from auth.json file
|
||||
pub async fn init_chatgpt_token_from_auth(codex_home: &Path) -> std::io::Result<()> {
|
||||
let auth = codex_login::load_auth(codex_home, true)?;
|
||||
let auth = CodexAuth::from_codex_home(codex_home)?;
|
||||
if let Some(auth) = auth {
|
||||
let token_data = auth.get_token_data().await?;
|
||||
set_chatgpt_token_data(token_data);
|
||||
|
||||
@@ -20,7 +20,7 @@ clap = { version = "4", features = ["derive"] }
|
||||
clap_complete = "4"
|
||||
codex-arg0 = { path = "../arg0" }
|
||||
codex-chatgpt = { path = "../chatgpt" }
|
||||
codex-common = { path = "../common", features = ["cli"] }
|
||||
codex-common = { path = "../common", features = ["cli", "updates"] }
|
||||
codex-core = { path = "../core" }
|
||||
codex-exec = { path = "../exec" }
|
||||
codex-login = { path = "../login" }
|
||||
|
||||
@@ -1,53 +0,0 @@
|
||||
//! Print the Fibonacci sequence.
|
||||
//!
|
||||
//! Usage:
|
||||
//! cargo run -p codex-cli --example fibonacci -- [COUNT]
|
||||
//!
|
||||
//! If COUNT is omitted, the first 10 numbers are printed.
|
||||
|
||||
use std::env;
|
||||
use std::process;
|
||||
|
||||
fn fibonacci(count: usize) -> Vec<u128> {
|
||||
let mut seq = Vec::with_capacity(count);
|
||||
if count == 0 {
|
||||
return seq;
|
||||
}
|
||||
// Start with 0, 1
|
||||
let mut a: u128 = 0;
|
||||
let mut b: u128 = 1;
|
||||
for _ in 0..count {
|
||||
seq.push(a);
|
||||
let next = a.saturating_add(b);
|
||||
a = b;
|
||||
b = next;
|
||||
}
|
||||
seq
|
||||
}
|
||||
|
||||
fn parse_count_arg() -> Result<usize, String> {
|
||||
let mut args = env::args().skip(1);
|
||||
match args.next() {
|
||||
None => Ok(10), // default
|
||||
Some(s) => s
|
||||
.parse::<usize>()
|
||||
.map_err(|_| format!("Invalid COUNT: '{}' (expected a non-negative integer)", s)),
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let count = match parse_count_arg() {
|
||||
Ok(n) => n,
|
||||
Err(e) => {
|
||||
eprintln!(
|
||||
"{}\nUsage: cargo run -p codex-cli --example fibonacci -- [COUNT]",
|
||||
e
|
||||
);
|
||||
process::exit(2);
|
||||
}
|
||||
};
|
||||
|
||||
for n in fibonacci(count) {
|
||||
println!("{}", n);
|
||||
}
|
||||
}
|
||||
@@ -4,8 +4,8 @@ use codex_common::CliConfigOverrides;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_login::AuthMode;
|
||||
use codex_login::CodexAuth;
|
||||
use codex_login::OPENAI_API_KEY_ENV_VAR;
|
||||
use codex_login::load_auth;
|
||||
use codex_login::login_with_api_key;
|
||||
use codex_login::login_with_chatgpt;
|
||||
use codex_login::logout;
|
||||
@@ -47,11 +47,11 @@ pub async fn run_login_with_api_key(
|
||||
pub async fn run_login_status(cli_config_overrides: CliConfigOverrides) -> ! {
|
||||
let config = load_config_or_exit(cli_config_overrides);
|
||||
|
||||
match load_auth(&config.codex_home, true) {
|
||||
match CodexAuth::from_codex_home(&config.codex_home) {
|
||||
Ok(Some(auth)) => match auth.mode {
|
||||
AuthMode::ApiKey => {
|
||||
if let Some(api_key) = auth.api_key.as_deref() {
|
||||
eprintln!("Logged in using an API key - {}", safe_format_key(api_key));
|
||||
AuthMode::ApiKey => match auth.get_token().await {
|
||||
Ok(api_key) => {
|
||||
eprintln!("Logged in using an API key - {}", safe_format_key(&api_key));
|
||||
|
||||
if let Ok(env_api_key) = env::var(OPENAI_API_KEY_ENV_VAR) {
|
||||
if env_api_key == api_key {
|
||||
@@ -60,11 +60,13 @@ pub async fn run_login_status(cli_config_overrides: CliConfigOverrides) -> ! {
|
||||
);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
eprintln!("Logged in using an API key");
|
||||
std::process::exit(0);
|
||||
}
|
||||
std::process::exit(0);
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Unexpected error retrieving API key: {e}");
|
||||
std::process::exit(1);
|
||||
}
|
||||
},
|
||||
AuthMode::ChatGPT => {
|
||||
eprintln!("Logged in using ChatGPT");
|
||||
std::process::exit(0);
|
||||
|
||||
@@ -13,6 +13,12 @@ use codex_cli::login::run_login_with_chatgpt;
|
||||
use codex_cli::login::run_logout;
|
||||
use codex_cli::proto;
|
||||
use codex_common::CliConfigOverrides;
|
||||
use codex_common::updates::check_for_update;
|
||||
use codex_common::updates::get_upgrade_version;
|
||||
#[cfg(not(debug_assertions))]
|
||||
use codex_core::config::Config;
|
||||
#[cfg(not(debug_assertions))]
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_exec::Cli as ExecCli;
|
||||
use codex_tui::Cli as TuiCli;
|
||||
use std::path::PathBuf;
|
||||
@@ -68,6 +74,9 @@ enum Subcommand {
|
||||
/// Apply the latest diff produced by Codex agent as a `git apply` to your local working tree.
|
||||
#[clap(visible_alias = "a")]
|
||||
Apply(ApplyCommand),
|
||||
|
||||
/// Check for a newer Codex release and upgrade automatically when possible.
|
||||
Update,
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
@@ -190,6 +199,9 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
prepend_config_flags(&mut apply_cli.config_overrides, cli.config_overrides);
|
||||
run_apply_command(apply_cli, None).await?;
|
||||
}
|
||||
Some(Subcommand::Update) => {
|
||||
run_update().await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -211,3 +223,88 @@ fn print_completion(cmd: CompletionCommand) {
|
||||
let name = "codex";
|
||||
generate(cmd.shell, &mut app, name, &mut std::io::stdout());
|
||||
}
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
async fn run_update() -> anyhow::Result<()> {
|
||||
let overrides = ConfigOverrides {
|
||||
model: None,
|
||||
cwd: None,
|
||||
approval_policy: None,
|
||||
sandbox_mode: None,
|
||||
model_provider: None,
|
||||
config_profile: None,
|
||||
codex_linux_sandbox_exe: None,
|
||||
base_instructions: None,
|
||||
include_plan_tool: None,
|
||||
disable_response_storage: None,
|
||||
show_raw_agent_reasoning: None,
|
||||
};
|
||||
|
||||
let config = Config::load_with_cli_overrides(Vec::new(), overrides)?;
|
||||
let version_file = config.codex_home.join("version.json");
|
||||
|
||||
if let Err(e) = check_for_update(&version_file).await {
|
||||
#[allow(clippy::print_stderr)]
|
||||
eprintln!("Failed to check for updates: {e}");
|
||||
}
|
||||
|
||||
let current_version = env!("CARGO_PKG_VERSION");
|
||||
if let Some(latest_version) = get_upgrade_version(&config) {
|
||||
println!("Current version: {current_version}");
|
||||
println!("Latest version: {latest_version}");
|
||||
let exe = std::env::current_exe()?;
|
||||
let managed_by_npm = std::env::var_os("CODEX_MANAGED_BY_NPM").is_some();
|
||||
if managed_by_npm {
|
||||
println!("Updating via npm...");
|
||||
match Command::new("npm")
|
||||
.args(["install", "-g", "@openai/codex@latest"])
|
||||
.status()
|
||||
{
|
||||
Ok(status) if status.success() => {
|
||||
println!("Codex updated successfully.");
|
||||
}
|
||||
Ok(status) => {
|
||||
println!(
|
||||
"`npm install` exited with status {status}. Run `npm install -g @openai/codex@latest` manually if needed."
|
||||
);
|
||||
}
|
||||
Err(err) => {
|
||||
println!(
|
||||
"Failed to run npm: {err}. Run `npm install -g @openai/codex@latest` manually."
|
||||
);
|
||||
}
|
||||
}
|
||||
} else if cfg!(target_os = "macos")
|
||||
&& (exe.starts_with("/opt/homebrew") || exe.starts_with("/usr/local"))
|
||||
{
|
||||
println!("Updating via Homebrew...");
|
||||
match Command::new("brew").args(["upgrade", "codex"]).status() {
|
||||
Ok(status) if status.success() => {
|
||||
println!("Codex updated successfully.");
|
||||
}
|
||||
Ok(status) => {
|
||||
println!(
|
||||
"`brew upgrade` exited with status {status}. Run `brew upgrade codex` manually if needed."
|
||||
);
|
||||
}
|
||||
Err(err) => {
|
||||
println!("Failed to run Homebrew: {err}. Run `brew upgrade codex` manually.");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
println!(
|
||||
"See https://github.com/openai/codex/releases/latest for the latest releases and installation options."
|
||||
);
|
||||
}
|
||||
} else {
|
||||
println!("Codex {current_version} is up to date.");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
async fn run_update() -> anyhow::Result<()> {
|
||||
println!("Update checking is disabled in debug builds.");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_core::protocol::Submission;
|
||||
use codex_core::util::notify_on_sigint;
|
||||
use codex_login::load_auth;
|
||||
use codex_login::CodexAuth;
|
||||
use tokio::io::AsyncBufReadExt;
|
||||
use tokio::io::BufReader;
|
||||
use tracing::error;
|
||||
@@ -36,7 +36,7 @@ pub async fn run_main(opts: ProtoCli) -> anyhow::Result<()> {
|
||||
.map_err(anyhow::Error::msg)?;
|
||||
|
||||
let config = Config::load_with_cli_overrides(overrides_vec, ConfigOverrides::default())?;
|
||||
let auth = load_auth(&config.codex_home, true)?;
|
||||
let auth = CodexAuth::from_codex_home(&config.codex_home)?;
|
||||
let ctrl_c = notify_on_sigint();
|
||||
let CodexSpawnOk { codex, .. } = Codex::spawn(config, auth, ctrl_c.clone()).await?;
|
||||
let codex = Arc::new(codex);
|
||||
|
||||
@@ -7,13 +7,20 @@ version = { workspace = true }
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = { version = "1", optional = true }
|
||||
chrono = { version = "0.4", features = ["serde"], optional = true }
|
||||
clap = { version = "4", features = ["derive", "wrap_help"], optional = true }
|
||||
codex-core = { path = "../core" }
|
||||
serde = { version = "1", optional = true }
|
||||
reqwest = { version = "0.12", features = ["json"], optional = true }
|
||||
serde = { version = "1", features = ["derive"], optional = true }
|
||||
serde_json = { version = "1", optional = true }
|
||||
tokio = { version = "1", features = ["fs"], optional = true }
|
||||
toml = { version = "0.9", optional = true }
|
||||
tracing = "0.1.41"
|
||||
|
||||
[features]
|
||||
# Separate feature so that `clap` is not a mandatory dependency.
|
||||
cli = ["clap", "serde", "toml"]
|
||||
elapsed = []
|
||||
sandbox_summary = []
|
||||
updates = ["anyhow", "chrono", "reqwest", "serde", "serde_json", "tokio"]
|
||||
|
||||
@@ -29,3 +29,6 @@ mod config_summary;
|
||||
pub use config_summary::create_config_summary_entries;
|
||||
// Shared fuzzy matcher (used by TUI selection popups and other UI filtering)
|
||||
pub mod fuzzy_match;
|
||||
|
||||
#[cfg(any(test, feature = "updates"))]
|
||||
pub mod updates;
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
#![cfg(any(not(debug_assertions), test))]
|
||||
|
||||
use chrono::DateTime;
|
||||
use chrono::Duration;
|
||||
use chrono::Utc;
|
||||
use codex_core::config::Config;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use tracing::error;
|
||||
|
||||
use codex_core::config::Config;
|
||||
|
||||
/// Returns the latest available version string if it is newer than the current
|
||||
/// one, otherwise `None`.
|
||||
pub fn get_upgrade_version(config: &Config) -> Option<String> {
|
||||
let version_file = version_filepath(config);
|
||||
let info = read_version_info(&version_file).ok();
|
||||
@@ -18,13 +18,11 @@ pub fn get_upgrade_version(config: &Config) -> Option<String> {
|
||||
None => true,
|
||||
Some(info) => info.last_checked_at < Utc::now() - Duration::hours(20),
|
||||
} {
|
||||
// Refresh the cached latest version in the background so TUI startup
|
||||
// isn’t blocked by a network call. The UI reads the previously cached
|
||||
// value (if any) for this run; the next run shows the banner if needed.
|
||||
// Refresh in the background; callers can use the cached value for this run.
|
||||
tokio::spawn(async move {
|
||||
check_for_update(&version_file)
|
||||
.await
|
||||
.inspect_err(|e| tracing::error!("Failed to update version: {e}"))
|
||||
.inspect_err(|e| error!("Failed to update version: {e}"))
|
||||
});
|
||||
}
|
||||
|
||||
@@ -62,7 +60,8 @@ fn read_version_info(version_file: &Path) -> anyhow::Result<VersionInfo> {
|
||||
Ok(serde_json::from_str(&contents)?)
|
||||
}
|
||||
|
||||
async fn check_for_update(version_file: &Path) -> anyhow::Result<()> {
|
||||
/// Fetches the latest release info and updates the on-disk cache file.
|
||||
pub async fn check_for_update(version_file: &Path) -> anyhow::Result<()> {
|
||||
let ReleaseInfo {
|
||||
tag_name: latest_tag_name,
|
||||
} = reqwest::Client::new()
|
||||
@@ -31,6 +31,7 @@ use crate::config_types::ReasoningEffort as ReasoningEffortConfig;
|
||||
use crate::config_types::ReasoningSummary as ReasoningSummaryConfig;
|
||||
use crate::error::CodexErr;
|
||||
use crate::error::Result;
|
||||
use crate::error::UsageLimitReachedError;
|
||||
use crate::flags::CODEX_RS_SSE_FIXTURE;
|
||||
use crate::model_provider_info::ModelProviderInfo;
|
||||
use crate::model_provider_info::WireApi;
|
||||
@@ -195,7 +196,7 @@ impl ModelClient {
|
||||
|
||||
if let Some(auth) = auth.as_ref()
|
||||
&& auth.mode == AuthMode::ChatGPT
|
||||
&& let Some(account_id) = auth.get_account_id().await
|
||||
&& let Some(account_id) = auth.get_account_id()
|
||||
{
|
||||
req_builder = req_builder.header("chatgpt-account-id", account_id);
|
||||
}
|
||||
@@ -263,7 +264,9 @@ impl ModelClient {
|
||||
}) = body
|
||||
{
|
||||
if r#type == "usage_limit_reached" {
|
||||
return Err(CodexErr::UsageLimitReached);
|
||||
return Err(CodexErr::UsageLimitReached(UsageLimitReachedError {
|
||||
plan_type: auth.and_then(|a| a.get_plan_type()),
|
||||
}));
|
||||
} else if r#type == "usage_not_included" {
|
||||
return Err(CodexErr::UsageNotIncluded);
|
||||
}
|
||||
|
||||
@@ -65,7 +65,6 @@ use crate::models::ResponseItem;
|
||||
use crate::models::ShellToolCallParams;
|
||||
use crate::openai_tools::ToolsConfig;
|
||||
use crate::openai_tools::get_openai_tools;
|
||||
use crate::parse_command::parse_command;
|
||||
use crate::plan_tool::handle_update_plan;
|
||||
use crate::project_doc::get_user_instructions;
|
||||
use crate::protocol::AgentMessageDeltaEvent;
|
||||
@@ -374,7 +373,7 @@ impl Session {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn on_exec_command_begin(
|
||||
async fn on_exec_command_begin(
|
||||
&self,
|
||||
turn_diff_tracker: &mut TurnDiffTracker,
|
||||
exec_command_context: ExecCommandContext,
|
||||
@@ -403,7 +402,6 @@ impl Session {
|
||||
call_id,
|
||||
command: command_for_display.clone(),
|
||||
cwd,
|
||||
parsed_cmd: parse_command(&command_for_display),
|
||||
}),
|
||||
};
|
||||
let event = Event {
|
||||
@@ -1292,7 +1290,9 @@ async fn run_turn(
|
||||
Ok(output) => return Ok(output),
|
||||
Err(CodexErr::Interrupted) => return Err(CodexErr::Interrupted),
|
||||
Err(CodexErr::EnvVar(var)) => return Err(CodexErr::EnvVar(var)),
|
||||
Err(e @ (CodexErr::UsageLimitReached | CodexErr::UsageNotIncluded)) => return Err(e),
|
||||
Err(e @ (CodexErr::UsageLimitReached(_) | CodexErr::UsageNotIncluded)) => {
|
||||
return Err(e);
|
||||
}
|
||||
Err(e) => {
|
||||
// Use the configured provider-specific stream retry budget.
|
||||
let max_retries = sess.client.get_provider().stream_max_retries();
|
||||
|
||||
@@ -6,7 +6,7 @@ use crate::config::Config;
|
||||
use crate::protocol::Event;
|
||||
use crate::protocol::EventMsg;
|
||||
use crate::util::notify_on_sigint;
|
||||
use codex_login::load_auth;
|
||||
use codex_login::CodexAuth;
|
||||
use tokio::sync::Notify;
|
||||
use uuid::Uuid;
|
||||
|
||||
@@ -26,7 +26,7 @@ pub struct CodexConversation {
|
||||
/// that callers can surface the information to the UI.
|
||||
pub async fn init_codex(config: Config) -> anyhow::Result<CodexConversation> {
|
||||
let ctrl_c = notify_on_sigint();
|
||||
let auth = load_auth(&config.codex_home, true)?;
|
||||
let auth = CodexAuth::from_codex_home(&config.codex_home)?;
|
||||
let CodexSpawnOk {
|
||||
codex,
|
||||
init_id,
|
||||
|
||||
@@ -62,15 +62,15 @@ pub enum CodexErr {
|
||||
#[error("unexpected status {0}: {1}")]
|
||||
UnexpectedStatus(StatusCode, String),
|
||||
|
||||
#[error("Usage limit has been reached")]
|
||||
UsageLimitReached,
|
||||
|
||||
#[error("Usage not included with the plan")]
|
||||
UsageNotIncluded,
|
||||
#[error("{0}")]
|
||||
UsageLimitReached(UsageLimitReachedError),
|
||||
|
||||
#[error(
|
||||
"We’re currently experiencing high demand, which may cause temporary errors. We’re adding capacity in East and West Europe to restore normal service."
|
||||
"To use Codex with your ChatGPT plan, upgrade to Plus: https://openai.com/chatgpt/pricing."
|
||||
)]
|
||||
UsageNotIncluded,
|
||||
|
||||
#[error("We're currently experiencing high demand, which may cause temporary errors.")]
|
||||
InternalServerError,
|
||||
|
||||
/// Retry limit exceeded.
|
||||
@@ -115,6 +115,30 @@ pub enum CodexErr {
|
||||
EnvVar(EnvVarError),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct UsageLimitReachedError {
|
||||
pub plan_type: Option<String>,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for UsageLimitReachedError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
if let Some(plan_type) = &self.plan_type
|
||||
&& plan_type == "plus"
|
||||
{
|
||||
write!(
|
||||
f,
|
||||
"You've hit your usage limit. Upgrade to Pro (https://openai.com/chatgpt/pricing), or wait for limits to reset (every 5h and every week.)."
|
||||
)?;
|
||||
} else {
|
||||
write!(
|
||||
f,
|
||||
"You've hit your usage limit. Limits reset every 5h and every week."
|
||||
)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct EnvVarError {
|
||||
/// Name of the environment variable that is missing.
|
||||
@@ -150,3 +174,39 @@ pub fn get_error_message_ui(e: &CodexErr) -> String {
|
||||
_ => e.to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn usage_limit_reached_error_formats_plus_plan() {
|
||||
let err = UsageLimitReachedError {
|
||||
plan_type: Some("plus".to_string()),
|
||||
};
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"You've hit your usage limit. Upgrade to Pro (https://openai.com/chatgpt/pricing), or wait for limits to reset (every 5h and every week.)."
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn usage_limit_reached_error_formats_default_when_none() {
|
||||
let err = UsageLimitReachedError { plan_type: None };
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"You've hit your usage limit. Limits reset every 5h and every week."
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn usage_limit_reached_error_formats_default_for_other_plans() {
|
||||
let err = UsageLimitReachedError {
|
||||
plan_type: Some("pro".to_string()),
|
||||
};
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"You've hit your usage limit. Limits reset every 5h and every week."
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -28,7 +28,6 @@ mod mcp_connection_manager;
|
||||
mod mcp_tool_call;
|
||||
mod message_history;
|
||||
mod model_provider_info;
|
||||
pub mod parse_command;
|
||||
pub use model_provider_info::BUILT_IN_OSS_MODEL_PROVIDER_ID;
|
||||
pub use model_provider_info::ModelProviderInfo;
|
||||
pub use model_provider_info::WireApi;
|
||||
|
||||
@@ -15,7 +15,7 @@ use std::time::Duration;
|
||||
|
||||
use crate::error::EnvVarError;
|
||||
const DEFAULT_STREAM_IDLE_TIMEOUT_MS: u64 = 300_000;
|
||||
const DEFAULT_STREAM_MAX_RETRIES: u64 = 10;
|
||||
const DEFAULT_STREAM_MAX_RETRIES: u64 = 5;
|
||||
const DEFAULT_REQUEST_MAX_RETRIES: u64 = 4;
|
||||
|
||||
/// Wire protocol that the provider speaks. Most third-party services only
|
||||
@@ -96,7 +96,7 @@ impl ModelProviderInfo {
|
||||
auth: &Option<CodexAuth>,
|
||||
) -> crate::error::Result<reqwest::RequestBuilder> {
|
||||
let effective_auth = match self.api_key() {
|
||||
Ok(Some(key)) => Some(CodexAuth::from_api_key(key)),
|
||||
Ok(Some(key)) => Some(CodexAuth::from_api_key(&key)),
|
||||
Ok(None) => auth.clone(),
|
||||
Err(err) => {
|
||||
if auth.is_some() {
|
||||
|
||||
@@ -1,648 +0,0 @@
|
||||
use crate::bash::try_parse_bash;
|
||||
use crate::bash::try_parse_word_only_commands_sequence;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use shlex::split as shlex_split;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)]
|
||||
pub enum ParsedCommand {
|
||||
Read {
|
||||
cmd: Vec<String>,
|
||||
name: String,
|
||||
},
|
||||
Python {
|
||||
cmd: Vec<String>,
|
||||
},
|
||||
GitStatus {
|
||||
cmd: Vec<String>,
|
||||
},
|
||||
GitLog {
|
||||
cmd: Vec<String>,
|
||||
},
|
||||
GitDiff {
|
||||
cmd: Vec<String>,
|
||||
},
|
||||
Ls {
|
||||
cmd: Vec<String>,
|
||||
path: Option<String>,
|
||||
},
|
||||
Rg {
|
||||
cmd: Vec<String>,
|
||||
query: Option<String>,
|
||||
path: Option<String>,
|
||||
files_only: bool,
|
||||
},
|
||||
Shell {
|
||||
cmd: Vec<String>,
|
||||
display: String,
|
||||
},
|
||||
Pnpm {
|
||||
cmd: Vec<String>,
|
||||
pnpm_cmd: String,
|
||||
},
|
||||
Unknown {
|
||||
cmd: Vec<String>,
|
||||
},
|
||||
}
|
||||
|
||||
pub fn parse_command(command: &[String]) -> Vec<ParsedCommand> {
|
||||
let main_cmd = extract_main_cmd_tokens(command);
|
||||
|
||||
// 1) Try the "bash -lc <script>" path: leverage the existing parser so we
|
||||
// can get each sub-command (words-only) precisely.
|
||||
if let [bash, flag, script] = command {
|
||||
if bash == "bash" && flag == "-lc" {
|
||||
if let Some(tree) = try_parse_bash(script) {
|
||||
if let Some(all_commands) = try_parse_word_only_commands_sequence(&tree, script) {
|
||||
if !all_commands.is_empty() {
|
||||
// Tokenize the entire script once; used to preserve full context for certain summaries.
|
||||
let script_tokens = shlex_split(script).unwrap_or_else(|| {
|
||||
vec!["bash".to_string(), flag.clone(), script.clone()]
|
||||
});
|
||||
let commands: Vec<ParsedCommand> = all_commands
|
||||
.into_iter()
|
||||
.map(|tokens| {
|
||||
match summarize_main_tokens(&tokens) {
|
||||
// For ls within a bash -lc script, preserve the full script tokens for display.
|
||||
ParsedCommand::Ls { path, .. } => ParsedCommand::Ls {
|
||||
cmd: script_tokens.clone(),
|
||||
path,
|
||||
},
|
||||
other => other,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
return commands;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If we couldn't parse with the bash parser, conservatively treat the
|
||||
// whole thing as one opaque shell command and mark unsafe.
|
||||
let display = script.clone();
|
||||
let commands = vec![ParsedCommand::Shell {
|
||||
cmd: main_cmd.clone(),
|
||||
display,
|
||||
}];
|
||||
return commands;
|
||||
}
|
||||
}
|
||||
|
||||
// 2) Not a "bash -lc" form. If there are connectors, split locally.
|
||||
let has_connectors = main_cmd
|
||||
.iter()
|
||||
.any(|t| t == "&&" || t == "||" || t == "|" || t == ";");
|
||||
|
||||
let split_subcommands = |tokens: &[String]| -> Vec<Vec<String>> {
|
||||
let mut out: Vec<Vec<String>> = Vec::new();
|
||||
let mut cur: Vec<String> = Vec::new();
|
||||
for t in tokens {
|
||||
if t == "&&" || t == "||" || t == "|" || t == ";" {
|
||||
if !cur.is_empty() {
|
||||
out.push(std::mem::take(&mut cur));
|
||||
}
|
||||
} else {
|
||||
cur.push(t.clone());
|
||||
}
|
||||
}
|
||||
if !cur.is_empty() {
|
||||
out.push(cur);
|
||||
}
|
||||
out
|
||||
};
|
||||
|
||||
let commands_tokens: Vec<Vec<String>> = if has_connectors {
|
||||
split_subcommands(&main_cmd)
|
||||
} else {
|
||||
vec![main_cmd.clone()]
|
||||
};
|
||||
|
||||
// 3) Summarize each sub-command.
|
||||
let commands: Vec<ParsedCommand> = commands_tokens
|
||||
.into_iter()
|
||||
.map(|tokens| summarize_main_tokens(&tokens))
|
||||
.collect();
|
||||
|
||||
commands
|
||||
}
|
||||
|
||||
/// Returns true if `arg` matches /^(\d+,)?\d+p$/
|
||||
fn is_valid_sed_n_arg(arg: Option<&str>) -> bool {
|
||||
let s = match arg {
|
||||
Some(s) => s,
|
||||
None => return false,
|
||||
};
|
||||
let core = match s.strip_suffix('p') {
|
||||
Some(rest) => rest,
|
||||
None => return false,
|
||||
};
|
||||
let parts: Vec<&str> = core.split(',').collect();
|
||||
match parts.as_slice() {
|
||||
[num] => !num.is_empty() && num.chars().all(|c| c.is_ascii_digit()),
|
||||
[a, b] => {
|
||||
!a.is_empty()
|
||||
&& !b.is_empty()
|
||||
&& a.chars().all(|c| c.is_ascii_digit())
|
||||
&& b.chars().all(|c| c.is_ascii_digit())
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_main_cmd_tokens(cmd: &[String]) -> Vec<String> {
|
||||
match cmd {
|
||||
[first, pipe, rest @ ..] if (first == "yes" || first == "y") && pipe == "|" => {
|
||||
let s = rest.join(" ");
|
||||
shlex_split(&s).unwrap_or_else(|| rest.to_vec())
|
||||
}
|
||||
[first, pipe, rest @ ..] if (first == "no" || first == "n") && pipe == "|" => {
|
||||
let s = rest.join(" ");
|
||||
shlex_split(&s).unwrap_or_else(|| rest.to_vec())
|
||||
}
|
||||
[bash, flag, script] if bash == "bash" && (flag == "-c" || flag == "-lc") => {
|
||||
shlex_split(script)
|
||||
.unwrap_or_else(|| vec!["bash".to_string(), flag.clone(), script.clone()])
|
||||
}
|
||||
_ => cmd.to_vec(),
|
||||
}
|
||||
}
|
||||
|
||||
fn summarize_main_tokens(main_cmd: &[String]) -> ParsedCommand {
|
||||
let cut_at_connector = |tokens: &[String]| -> Vec<String> {
|
||||
let idx = tokens
|
||||
.iter()
|
||||
.position(|t| t == "|" || t == "&&" || t == "||")
|
||||
.unwrap_or(tokens.len());
|
||||
tokens[..idx].to_vec()
|
||||
};
|
||||
|
||||
let truncate_file_path_for_display = |path: &str| -> String {
|
||||
let mut parts = path.split('/').rev().filter(|p| {
|
||||
!p.is_empty() && *p != "build" && *p != "dist" && *p != "node_modules" && *p != "src"
|
||||
});
|
||||
parts
|
||||
.next()
|
||||
.map(|s| s.to_string())
|
||||
.unwrap_or_else(|| path.to_string())
|
||||
};
|
||||
|
||||
match main_cmd.split_first() {
|
||||
Some((head, tail)) if head == "ls" => {
|
||||
let path = tail
|
||||
.iter()
|
||||
.find(|p| !p.starts_with('-'))
|
||||
.map(|p| truncate_file_path_for_display(p));
|
||||
ParsedCommand::Ls {
|
||||
cmd: main_cmd.to_vec(),
|
||||
path,
|
||||
}
|
||||
}
|
||||
Some((head, tail)) if head == "rg" => {
|
||||
let args_no_connector = cut_at_connector(tail);
|
||||
let files_only = args_no_connector.iter().any(|a| a == "--files");
|
||||
let non_flags: Vec<&String> = args_no_connector
|
||||
.iter()
|
||||
.filter(|p| !p.starts_with('-'))
|
||||
.collect();
|
||||
let (query, path) = if files_only {
|
||||
let p = non_flags.first().map(|s| truncate_file_path_for_display(s));
|
||||
(None, p)
|
||||
} else {
|
||||
let q = non_flags.first().map(|s| truncate_file_path_for_display(s));
|
||||
let p = non_flags.get(1).map(|s| truncate_file_path_for_display(s));
|
||||
(q, p)
|
||||
};
|
||||
ParsedCommand::Rg {
|
||||
cmd: main_cmd.to_vec(),
|
||||
query,
|
||||
path,
|
||||
files_only,
|
||||
}
|
||||
}
|
||||
Some((head, tail)) if head == "grep" => {
|
||||
let args_no_connector = cut_at_connector(tail);
|
||||
let non_flags: Vec<&String> = args_no_connector
|
||||
.iter()
|
||||
.filter(|p| !p.starts_with('-'))
|
||||
.collect();
|
||||
let query = non_flags.first().map(|s| truncate_file_path_for_display(s));
|
||||
let path = non_flags.get(1).map(|s| truncate_file_path_for_display(s));
|
||||
ParsedCommand::Rg {
|
||||
cmd: main_cmd.to_vec(),
|
||||
query,
|
||||
path,
|
||||
files_only: false,
|
||||
}
|
||||
}
|
||||
Some((head, tail)) if head == "cat" && tail.len() == 1 => {
|
||||
let name = truncate_file_path_for_display(&tail[0]);
|
||||
ParsedCommand::Read {
|
||||
cmd: main_cmd.to_vec(),
|
||||
name,
|
||||
}
|
||||
}
|
||||
Some((head, tail))
|
||||
if head == "head"
|
||||
&& tail.len() >= 3
|
||||
&& tail[0] == "-n"
|
||||
&& tail[1].chars().all(|c| c.is_ascii_digit()) =>
|
||||
{
|
||||
let name = truncate_file_path_for_display(&tail[2]);
|
||||
ParsedCommand::Read {
|
||||
cmd: main_cmd.to_vec(),
|
||||
name,
|
||||
}
|
||||
}
|
||||
Some((head, tail))
|
||||
if head == "tail" && tail.len() >= 3 && tail[0] == "-n" && {
|
||||
let n = &tail[1];
|
||||
let s = n.strip_prefix('+').unwrap_or(n);
|
||||
!s.is_empty() && s.chars().all(|c| c.is_ascii_digit())
|
||||
} =>
|
||||
{
|
||||
let name = truncate_file_path_for_display(&tail[2]);
|
||||
ParsedCommand::Read {
|
||||
cmd: main_cmd.to_vec(),
|
||||
name,
|
||||
}
|
||||
}
|
||||
Some((head, tail))
|
||||
if head == "sed"
|
||||
&& tail.len() >= 3
|
||||
&& tail[0] == "-n"
|
||||
&& is_valid_sed_n_arg(tail.get(1).map(|s| s.as_str())) =>
|
||||
{
|
||||
if let Some(path) = tail.get(2) {
|
||||
let name = truncate_file_path_for_display(path);
|
||||
ParsedCommand::Read {
|
||||
cmd: main_cmd.to_vec(),
|
||||
name,
|
||||
}
|
||||
} else {
|
||||
ParsedCommand::Unknown {
|
||||
cmd: main_cmd.to_vec(),
|
||||
}
|
||||
}
|
||||
}
|
||||
Some((head, _tail)) if head == "python" => ParsedCommand::Python {
|
||||
cmd: main_cmd.to_vec(),
|
||||
},
|
||||
Some((first, rest)) if first == "git" => match rest.first().map(|s| s.as_str()) {
|
||||
Some("status") => ParsedCommand::GitStatus {
|
||||
cmd: main_cmd.to_vec(),
|
||||
},
|
||||
Some("log") => ParsedCommand::GitLog {
|
||||
cmd: main_cmd.to_vec(),
|
||||
},
|
||||
Some("diff") => ParsedCommand::GitDiff {
|
||||
cmd: main_cmd.to_vec(),
|
||||
},
|
||||
_ => ParsedCommand::Unknown {
|
||||
cmd: main_cmd.to_vec(),
|
||||
},
|
||||
},
|
||||
Some((tool, rest)) if (tool == "pnpm" || tool == "npm") => {
|
||||
let mut r = rest;
|
||||
let mut has_r = false;
|
||||
if let Some(flag) = r.first() {
|
||||
if flag == "-r" {
|
||||
has_r = true;
|
||||
r = &r[1..];
|
||||
}
|
||||
}
|
||||
if r.first().map(|s| s.as_str()) == Some("run") {
|
||||
let args = r[1..].to_vec();
|
||||
// For display, only include the script name before any "--" forwarded args.
|
||||
let script_name = args.first().cloned().unwrap_or_default();
|
||||
let pnpm_cmd = script_name;
|
||||
let mut full = vec![tool.clone()];
|
||||
if has_r {
|
||||
full.push("-r".to_string());
|
||||
}
|
||||
full.push("run".to_string());
|
||||
full.extend(args.clone());
|
||||
ParsedCommand::Pnpm {
|
||||
cmd: full,
|
||||
pnpm_cmd,
|
||||
}
|
||||
} else {
|
||||
ParsedCommand::Unknown {
|
||||
cmd: main_cmd.to_vec(),
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => ParsedCommand::Unknown {
|
||||
cmd: main_cmd.to_vec(),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
#![allow(clippy::unwrap_used)]
|
||||
use super::*;
|
||||
|
||||
fn vec_str(args: &[&str]) -> Vec<String> {
|
||||
args.iter().map(|s| s.to_string()).collect()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn git_status_summary() {
|
||||
let out = parse_command(&vec_str(&["git", "status"]));
|
||||
assert_eq!(
|
||||
out,
|
||||
vec![ParsedCommand::GitStatus {
|
||||
cmd: vec_str(&["git", "status"]),
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn handles_complex_bash_command() {
|
||||
let inner =
|
||||
"rg --version && node -v && pnpm -v && rg --files | wc -l && rg --files | head -n 40";
|
||||
let out = parse_command(&vec_str(&["bash", "-lc", inner]));
|
||||
assert_eq!(
|
||||
out,
|
||||
vec![
|
||||
ParsedCommand::Unknown {
|
||||
cmd: vec_str(&["head", "-n", "40"])
|
||||
},
|
||||
ParsedCommand::Rg {
|
||||
cmd: vec_str(&["rg", "--files"]),
|
||||
query: None,
|
||||
path: None,
|
||||
files_only: true,
|
||||
},
|
||||
ParsedCommand::Unknown {
|
||||
cmd: vec_str(&["wc", "-l"])
|
||||
},
|
||||
ParsedCommand::Rg {
|
||||
cmd: vec_str(&["rg", "--files"]),
|
||||
query: None,
|
||||
path: None,
|
||||
files_only: true,
|
||||
},
|
||||
ParsedCommand::Unknown {
|
||||
cmd: vec_str(&["pnpm", "-v"])
|
||||
},
|
||||
ParsedCommand::Unknown {
|
||||
cmd: vec_str(&["node", "-v"])
|
||||
},
|
||||
ParsedCommand::Rg {
|
||||
cmd: vec_str(&["rg", "--version"]),
|
||||
query: None,
|
||||
path: None,
|
||||
files_only: false,
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn supports_searching_for_navigate_to_route() {
|
||||
let inner = "rg -n \"navigate-to-route\" -S";
|
||||
let out = parse_command(&vec_str(&["bash", "-lc", inner]));
|
||||
assert_eq!(
|
||||
out,
|
||||
vec![ParsedCommand::Rg {
|
||||
cmd: shlex_split(inner).unwrap(),
|
||||
query: Some("navigate-to-route".to_string()),
|
||||
path: None,
|
||||
files_only: false,
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn supports_rg_files_with_path_and_pipe() {
|
||||
let inner = "rg --files webview/src | sed -n";
|
||||
let out = parse_command(&vec_str(&["bash", "-lc", inner]));
|
||||
assert_eq!(
|
||||
out,
|
||||
vec![
|
||||
ParsedCommand::Unknown {
|
||||
cmd: vec_str(&["sed", "-n"])
|
||||
},
|
||||
ParsedCommand::Rg {
|
||||
cmd: vec_str(&["rg", "--files", "webview/src"]),
|
||||
query: None,
|
||||
path: Some("webview".to_string()),
|
||||
files_only: true,
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn supports_rg_files_then_head() {
|
||||
let inner = "rg --files | head -n 50";
|
||||
let out = parse_command(&vec_str(&["bash", "-lc", inner]));
|
||||
assert_eq!(
|
||||
out,
|
||||
vec![
|
||||
ParsedCommand::Unknown {
|
||||
cmd: vec_str(&["head", "-n", "50"])
|
||||
},
|
||||
ParsedCommand::Rg {
|
||||
cmd: vec_str(&["rg", "--files"]),
|
||||
query: None,
|
||||
path: None,
|
||||
files_only: true,
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn supports_cat() {
|
||||
let inner = "cat webview/README.md";
|
||||
let out = parse_command(&vec_str(&["bash", "-lc", inner]));
|
||||
assert_eq!(
|
||||
out,
|
||||
vec![ParsedCommand::Read {
|
||||
cmd: shlex_split(inner).unwrap(),
|
||||
name: "README.md".to_string(),
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn supports_ls_with_pipe() {
|
||||
let inner = "ls -la | sed -n '1,120p'";
|
||||
let out = parse_command(&vec_str(&["bash", "-lc", inner]));
|
||||
assert_eq!(
|
||||
out,
|
||||
vec![
|
||||
ParsedCommand::Unknown {
|
||||
cmd: vec_str(&["sed", "-n", "1,120p"])
|
||||
},
|
||||
ParsedCommand::Ls {
|
||||
cmd: shlex_split(inner).unwrap(),
|
||||
path: None,
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn supports_head_n() {
|
||||
let inner = "head -n 50 Cargo.toml";
|
||||
let out = parse_command(&vec_str(&["bash", "-lc", inner]));
|
||||
assert_eq!(
|
||||
out,
|
||||
vec![ParsedCommand::Read {
|
||||
cmd: shlex_split(inner).unwrap(),
|
||||
name: "Cargo.toml".to_string(),
|
||||
},]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn supports_tail_n_plus() {
|
||||
let inner = "tail -n +522 README.md";
|
||||
let out = parse_command(&vec_str(&["bash", "-lc", inner]));
|
||||
assert_eq!(
|
||||
out,
|
||||
vec![ParsedCommand::Read {
|
||||
cmd: shlex_split(inner).unwrap(),
|
||||
name: "README.md".to_string(),
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn supports_tail_n_last_lines() {
|
||||
let inner = "tail -n 30 README.md";
|
||||
let out = parse_command(&vec_str(&["bash", "-lc", inner]));
|
||||
assert_eq!(
|
||||
out,
|
||||
vec![ParsedCommand::Read {
|
||||
cmd: shlex_split(inner).unwrap(),
|
||||
name: "README.md".to_string(),
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn supports_npm_run_build() {
|
||||
let out = parse_command(&vec_str(&["npm", "run", "build"]));
|
||||
assert_eq!(
|
||||
out,
|
||||
vec![ParsedCommand::Pnpm {
|
||||
cmd: vec_str(&["npm", "run", "build"]),
|
||||
pnpm_cmd: "build".to_string(),
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn supports_npm_run_with_forwarded_args() {
|
||||
let out = parse_command(&vec_str(&[
|
||||
"npm",
|
||||
"run",
|
||||
"lint",
|
||||
"--",
|
||||
"--max-warnings",
|
||||
"0",
|
||||
"--format",
|
||||
"json",
|
||||
]));
|
||||
assert_eq!(
|
||||
out,
|
||||
vec![ParsedCommand::Pnpm {
|
||||
cmd: vec_str(&[
|
||||
"npm",
|
||||
"run",
|
||||
"lint",
|
||||
"--",
|
||||
"--max-warnings",
|
||||
"0",
|
||||
"--format",
|
||||
"json",
|
||||
]),
|
||||
pnpm_cmd: "lint".to_string(),
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn supports_grep_recursive_current_dir() {
|
||||
let out = parse_command(&vec_str(&[
|
||||
"grep",
|
||||
"-R",
|
||||
"CODEX_SANDBOX_ENV_VAR",
|
||||
"-n",
|
||||
".",
|
||||
]));
|
||||
assert_eq!(
|
||||
out,
|
||||
vec![ParsedCommand::Rg {
|
||||
cmd: vec_str(&["grep", "-R", "CODEX_SANDBOX_ENV_VAR", "-n", "."]),
|
||||
query: Some("CODEX_SANDBOX_ENV_VAR".to_string()),
|
||||
path: Some(".".to_string()),
|
||||
files_only: false,
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn supports_grep_recursive_specific_file() {
|
||||
let out = parse_command(&vec_str(&[
|
||||
"grep",
|
||||
"-R",
|
||||
"CODEX_SANDBOX_ENV_VAR",
|
||||
"-n",
|
||||
"core/src/spawn.rs",
|
||||
]));
|
||||
assert_eq!(
|
||||
out,
|
||||
vec![ParsedCommand::Rg {
|
||||
cmd: vec_str(&[
|
||||
"grep",
|
||||
"-R",
|
||||
"CODEX_SANDBOX_ENV_VAR",
|
||||
"-n",
|
||||
"core/src/spawn.rs",
|
||||
]),
|
||||
query: Some("CODEX_SANDBOX_ENV_VAR".to_string()),
|
||||
path: Some("spawn.rs".to_string()),
|
||||
files_only: false,
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn supports_grep_weird_backtick_in_query() {
|
||||
let out = parse_command(&vec_str(&["grep", "-R", "COD`EX_SANDBOX", "-n"]));
|
||||
assert_eq!(
|
||||
out,
|
||||
vec![ParsedCommand::Rg {
|
||||
cmd: vec_str(&["grep", "-R", "COD`EX_SANDBOX", "-n"]),
|
||||
query: Some("COD`EX_SANDBOX".to_string()),
|
||||
path: None,
|
||||
files_only: false,
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn supports_cd_and_rg_files() {
|
||||
let out = parse_command(&vec_str(&["cd", "codex-rs", "&&", "rg", "--files"]));
|
||||
assert_eq!(
|
||||
out,
|
||||
vec![
|
||||
ParsedCommand::Unknown {
|
||||
cmd: vec_str(&["cd", "codex-rs"]),
|
||||
},
|
||||
ParsedCommand::Rg {
|
||||
cmd: vec_str(&["rg", "--files"]),
|
||||
query: None,
|
||||
path: None,
|
||||
files_only: true,
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -21,7 +21,6 @@ use crate::config_types::ReasoningEffort as ReasoningEffortConfig;
|
||||
use crate::config_types::ReasoningSummary as ReasoningSummaryConfig;
|
||||
use crate::message_history::HistoryEntry;
|
||||
use crate::model_provider_info::ModelProviderInfo;
|
||||
use crate::parse_command::ParsedCommand;
|
||||
use crate::plan_tool::UpdatePlanArgs;
|
||||
|
||||
/// Submission Queue Entry - requests from user
|
||||
@@ -580,7 +579,6 @@ pub struct ExecCommandBeginEvent {
|
||||
pub command: Vec<String>,
|
||||
/// The command's working directory if not the default cwd for the agent.
|
||||
pub cwd: PathBuf,
|
||||
pub parsed_cmd: Vec<ParsedCommand>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
|
||||
@@ -7,7 +7,7 @@ use tokio::sync::Notify;
|
||||
use tracing::debug;
|
||||
|
||||
const INITIAL_DELAY_MS: u64 = 200;
|
||||
const BACKOFF_FACTOR: f64 = 1.3;
|
||||
const BACKOFF_FACTOR: f64 = 2.0;
|
||||
|
||||
/// Make a CancellationToken that is fulfilled when SIGINT occurs.
|
||||
pub fn notify_on_sigint() -> Arc<Notify> {
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
#![allow(clippy::expect_used)]
|
||||
#![allow(clippy::unwrap_used)]
|
||||
use std::path::PathBuf;
|
||||
#![allow(clippy::expect_used, clippy::unwrap_used)]
|
||||
|
||||
use chrono::Utc;
|
||||
use codex_core::Codex;
|
||||
use codex_core::CodexSpawnOk;
|
||||
use codex_core::ModelProviderInfo;
|
||||
@@ -13,10 +10,7 @@ use codex_core::protocol::InputItem;
|
||||
use codex_core::protocol::Op;
|
||||
use codex_core::protocol::SessionConfiguredEvent;
|
||||
use codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR;
|
||||
use codex_login::AuthDotJson;
|
||||
use codex_login::AuthMode;
|
||||
use codex_login::CodexAuth;
|
||||
use codex_login::TokenData;
|
||||
use core_test_support::load_default_config_for_test;
|
||||
use core_test_support::load_sse_fixture_with_id;
|
||||
use core_test_support::wait_for_event;
|
||||
@@ -99,7 +93,7 @@ async fn includes_session_id_and_model_headers_in_request() {
|
||||
let ctrl_c = std::sync::Arc::new(tokio::sync::Notify::new());
|
||||
let CodexSpawnOk { codex, .. } = Codex::spawn(
|
||||
config,
|
||||
Some(CodexAuth::from_api_key("Test API Key".to_string())),
|
||||
Some(CodexAuth::from_api_key("Test API Key")),
|
||||
ctrl_c.clone(),
|
||||
)
|
||||
.await
|
||||
@@ -173,7 +167,7 @@ async fn includes_base_instructions_override_in_request() {
|
||||
let ctrl_c = std::sync::Arc::new(tokio::sync::Notify::new());
|
||||
let CodexSpawnOk { codex, .. } = Codex::spawn(
|
||||
config,
|
||||
Some(CodexAuth::from_api_key("Test API Key".to_string())),
|
||||
Some(CodexAuth::from_api_key("Test API Key")),
|
||||
ctrl_c.clone(),
|
||||
)
|
||||
.await
|
||||
@@ -232,7 +226,7 @@ async fn originator_config_override_is_used() {
|
||||
let ctrl_c = std::sync::Arc::new(tokio::sync::Notify::new());
|
||||
let CodexSpawnOk { codex, .. } = Codex::spawn(
|
||||
config,
|
||||
Some(CodexAuth::from_api_key("Test API Key".to_string())),
|
||||
Some(CodexAuth::from_api_key("Test API Key")),
|
||||
ctrl_c.clone(),
|
||||
)
|
||||
.await
|
||||
@@ -370,7 +364,7 @@ async fn includes_user_instructions_message_in_request() {
|
||||
let ctrl_c = std::sync::Arc::new(tokio::sync::Notify::new());
|
||||
let CodexSpawnOk { codex, .. } = Codex::spawn(
|
||||
config,
|
||||
Some(CodexAuth::from_api_key("Test API Key".to_string())),
|
||||
Some(CodexAuth::from_api_key("Test API Key")),
|
||||
ctrl_c.clone(),
|
||||
)
|
||||
.await
|
||||
@@ -556,19 +550,5 @@ async fn env_var_overrides_loaded_auth() {
|
||||
}
|
||||
|
||||
fn create_dummy_codex_auth() -> CodexAuth {
|
||||
CodexAuth::new(
|
||||
None,
|
||||
AuthMode::ChatGPT,
|
||||
PathBuf::new(),
|
||||
Some(AuthDotJson {
|
||||
openai_api_key: None,
|
||||
tokens: Some(TokenData {
|
||||
id_token: Default::default(),
|
||||
access_token: "Access Token".to_string(),
|
||||
refresh_token: "test".to_string(),
|
||||
account_id: Some("account_id".to_string()),
|
||||
}),
|
||||
last_refresh: Some(Utc::now()),
|
||||
}),
|
||||
)
|
||||
CodexAuth::create_dummy_chatgpt_auth_for_testing()
|
||||
}
|
||||
|
||||
@@ -145,7 +145,7 @@ async fn summarize_context_three_requests_and_instructions() {
|
||||
let ctrl_c = std::sync::Arc::new(tokio::sync::Notify::new());
|
||||
let CodexSpawnOk { codex, .. } = Codex::spawn(
|
||||
config,
|
||||
Some(CodexAuth::from_api_key("dummy".to_string())),
|
||||
Some(CodexAuth::from_api_key("dummy")),
|
||||
ctrl_c.clone(),
|
||||
)
|
||||
.await
|
||||
|
||||
@@ -99,7 +99,7 @@ async fn retries_on_early_close() {
|
||||
config.model_provider = model_provider;
|
||||
let CodexSpawnOk { codex, .. } = Codex::spawn(
|
||||
config,
|
||||
Some(CodexAuth::from_api_key("Test API Key".to_string())),
|
||||
Some(CodexAuth::from_api_key("Test API Key")),
|
||||
ctrl_c,
|
||||
)
|
||||
.await
|
||||
|
||||
@@ -255,7 +255,6 @@ impl EventProcessor for EventProcessorWithHumanOutput {
|
||||
call_id,
|
||||
command,
|
||||
cwd,
|
||||
parsed_cmd: _,
|
||||
}) => {
|
||||
self.call_id_to_command.insert(
|
||||
call_id.clone(),
|
||||
|
||||
@@ -38,8 +38,9 @@ pub enum AuthMode {
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct CodexAuth {
|
||||
pub api_key: Option<String>,
|
||||
pub mode: AuthMode,
|
||||
|
||||
api_key: Option<String>,
|
||||
auth_dot_json: Arc<Mutex<Option<AuthDotJson>>>,
|
||||
auth_file: PathBuf,
|
||||
}
|
||||
@@ -51,33 +52,23 @@ impl PartialEq for CodexAuth {
|
||||
}
|
||||
|
||||
impl CodexAuth {
|
||||
pub fn new(
|
||||
api_key: Option<String>,
|
||||
mode: AuthMode,
|
||||
auth_file: PathBuf,
|
||||
auth_dot_json: Option<AuthDotJson>,
|
||||
) -> Self {
|
||||
let auth_dot_json = Arc::new(Mutex::new(auth_dot_json));
|
||||
pub fn from_api_key(api_key: &str) -> Self {
|
||||
Self {
|
||||
api_key,
|
||||
mode,
|
||||
auth_file,
|
||||
auth_dot_json,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_api_key(api_key: String) -> Self {
|
||||
Self {
|
||||
api_key: Some(api_key),
|
||||
api_key: Some(api_key.to_owned()),
|
||||
mode: AuthMode::ApiKey,
|
||||
auth_file: PathBuf::new(),
|
||||
auth_dot_json: Arc::new(Mutex::new(None)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Loads the available auth information from the auth.json or
|
||||
/// OPENAI_API_KEY environment variable.
|
||||
pub fn from_codex_home(codex_home: &Path) -> std::io::Result<Option<CodexAuth>> {
|
||||
load_auth(codex_home, true)
|
||||
}
|
||||
|
||||
pub async fn get_token_data(&self) -> Result<TokenData, std::io::Error> {
|
||||
#[expect(clippy::unwrap_used)]
|
||||
let auth_dot_json = self.auth_dot_json.lock().unwrap().clone();
|
||||
let auth_dot_json: Option<AuthDotJson> = self.get_current_auth_json();
|
||||
match auth_dot_json {
|
||||
Some(AuthDotJson {
|
||||
tokens: Some(mut tokens),
|
||||
@@ -132,61 +123,120 @@ impl CodexAuth {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_account_id(&self) -> Option<String> {
|
||||
match self.mode {
|
||||
AuthMode::ApiKey => None,
|
||||
AuthMode::ChatGPT => {
|
||||
let token_data = self.get_token_data().await.ok()?;
|
||||
pub fn get_account_id(&self) -> Option<String> {
|
||||
self.get_current_token_data()
|
||||
.and_then(|t| t.account_id.clone())
|
||||
}
|
||||
|
||||
token_data.account_id.clone()
|
||||
}
|
||||
pub fn get_plan_type(&self) -> Option<String> {
|
||||
self.get_current_token_data()
|
||||
.and_then(|t| t.id_token.chatgpt_plan_type.as_ref().map(|p| p.as_string()))
|
||||
}
|
||||
|
||||
fn get_current_auth_json(&self) -> Option<AuthDotJson> {
|
||||
#[expect(clippy::unwrap_used)]
|
||||
self.auth_dot_json.lock().unwrap().clone()
|
||||
}
|
||||
|
||||
fn get_current_token_data(&self) -> Option<TokenData> {
|
||||
self.get_current_auth_json().and_then(|t| t.tokens.clone())
|
||||
}
|
||||
|
||||
/// Consider this private to integration tests.
|
||||
pub fn create_dummy_chatgpt_auth_for_testing() -> Self {
|
||||
let auth_dot_json = AuthDotJson {
|
||||
openai_api_key: None,
|
||||
tokens: Some(TokenData {
|
||||
id_token: Default::default(),
|
||||
access_token: "Access Token".to_string(),
|
||||
refresh_token: "test".to_string(),
|
||||
account_id: Some("account_id".to_string()),
|
||||
}),
|
||||
last_refresh: Some(Utc::now()),
|
||||
};
|
||||
|
||||
let auth_dot_json = Arc::new(Mutex::new(Some(auth_dot_json)));
|
||||
Self {
|
||||
api_key: None,
|
||||
mode: AuthMode::ChatGPT,
|
||||
auth_file: PathBuf::new(),
|
||||
auth_dot_json,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Loads the available auth information from the auth.json or OPENAI_API_KEY environment variable.
|
||||
pub fn load_auth(codex_home: &Path, include_env_var: bool) -> std::io::Result<Option<CodexAuth>> {
|
||||
fn load_auth(codex_home: &Path, include_env_var: bool) -> std::io::Result<Option<CodexAuth>> {
|
||||
// First, check to see if there is a valid auth.json file. If not, we fall
|
||||
// back to AuthMode::ApiKey using the OPENAI_API_KEY environment variable
|
||||
// (if it is set).
|
||||
let auth_file = get_auth_file(codex_home);
|
||||
|
||||
let auth_dot_json = try_read_auth_json(&auth_file).ok();
|
||||
|
||||
let auth_json_api_key = auth_dot_json
|
||||
.as_ref()
|
||||
.and_then(|a| a.openai_api_key.clone())
|
||||
.filter(|s| !s.is_empty());
|
||||
|
||||
let openai_api_key = if include_env_var {
|
||||
env::var(OPENAI_API_KEY_ENV_VAR)
|
||||
.ok()
|
||||
.filter(|s| !s.is_empty())
|
||||
.or(auth_json_api_key)
|
||||
} else {
|
||||
auth_json_api_key
|
||||
let auth_dot_json = match try_read_auth_json(&auth_file) {
|
||||
Ok(auth) => auth,
|
||||
// If auth.json does not exist, try to read the OPENAI_API_KEY from the
|
||||
// environment variable.
|
||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound && include_env_var => {
|
||||
return match read_openai_api_key_from_env() {
|
||||
Some(api_key) => Ok(Some(CodexAuth::from_api_key(&api_key))),
|
||||
None => Ok(None),
|
||||
};
|
||||
}
|
||||
// Though if auth.json exists but is malformed, do not fall back to the
|
||||
// env var because the user may be expecting to use AuthMode::ChatGPT.
|
||||
Err(e) => {
|
||||
return Err(e);
|
||||
}
|
||||
};
|
||||
|
||||
let has_tokens = auth_dot_json
|
||||
.as_ref()
|
||||
.and_then(|a| a.tokens.as_ref())
|
||||
.is_some();
|
||||
let AuthDotJson {
|
||||
openai_api_key: auth_json_api_key,
|
||||
tokens,
|
||||
last_refresh,
|
||||
} = auth_dot_json;
|
||||
|
||||
if openai_api_key.is_none() && !has_tokens {
|
||||
return Ok(None);
|
||||
// If the auth.json has an API key AND does not appear to be on a plan that
|
||||
// should prefer AuthMode::ChatGPT, use AuthMode::ApiKey.
|
||||
if let Some(api_key) = &auth_json_api_key {
|
||||
// Should any of these be AuthMode::ChatGPT with the api_key set?
|
||||
// Does AuthMode::ChatGPT indicate that there is an auth.json that is
|
||||
// "refreshable" even if we are using the API key for auth?
|
||||
match &tokens {
|
||||
Some(tokens) => {
|
||||
if tokens.is_plan_that_should_use_api_key() {
|
||||
return Ok(Some(CodexAuth::from_api_key(api_key)));
|
||||
} else {
|
||||
// Ignore the API key and fall through to ChatGPT auth.
|
||||
}
|
||||
}
|
||||
None => {
|
||||
// We have an API key but no tokens in the auth.json file.
|
||||
// Perhaps the user ran `codex login --api-key <KEY>` or updated
|
||||
// auth.json by hand. Either way, let's assume they are trying
|
||||
// to use their API key.
|
||||
return Ok(Some(CodexAuth::from_api_key(api_key)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mode = if openai_api_key.is_some() {
|
||||
AuthMode::ApiKey
|
||||
} else {
|
||||
AuthMode::ChatGPT
|
||||
};
|
||||
|
||||
// For the AuthMode::ChatGPT variant, perhaps neither api_key nor
|
||||
// openai_api_key should exist?
|
||||
Ok(Some(CodexAuth {
|
||||
api_key: openai_api_key,
|
||||
mode,
|
||||
api_key: None,
|
||||
mode: AuthMode::ChatGPT,
|
||||
auth_file,
|
||||
auth_dot_json: Arc::new(Mutex::new(auth_dot_json)),
|
||||
auth_dot_json: Arc::new(Mutex::new(Some(AuthDotJson {
|
||||
openai_api_key: None,
|
||||
tokens,
|
||||
last_refresh,
|
||||
}))),
|
||||
}))
|
||||
}
|
||||
|
||||
fn read_openai_api_key_from_env() -> Option<String> {
|
||||
env::var(OPENAI_API_KEY_ENV_VAR)
|
||||
.ok()
|
||||
.filter(|s| !s.is_empty())
|
||||
}
|
||||
|
||||
pub fn get_auth_file(codex_home: &Path) -> PathBuf {
|
||||
codex_home.join("auth.json")
|
||||
}
|
||||
@@ -410,14 +460,19 @@ pub struct AuthDotJson {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
#![expect(clippy::expect_used, clippy::unwrap_used)]
|
||||
use super::*;
|
||||
use crate::token_data::IdTokenInfo;
|
||||
use crate::token_data::KnownPlan;
|
||||
use crate::token_data::PlanType;
|
||||
use base64::Engine;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use tempfile::tempdir;
|
||||
|
||||
const LAST_REFRESH: &str = "2025-08-06T20:41:36.232376Z";
|
||||
|
||||
#[test]
|
||||
#[expect(clippy::unwrap_used)]
|
||||
fn writes_api_key_and_loads_auth() {
|
||||
let dir = tempdir().unwrap();
|
||||
login_with_api_key(dir.path(), "sk-test-key").unwrap();
|
||||
@@ -427,7 +482,6 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[expect(clippy::unwrap_used)]
|
||||
fn loads_from_env_var_if_env_var_exists() {
|
||||
let dir = tempdir().unwrap();
|
||||
|
||||
@@ -441,10 +495,132 @@ mod tests {
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[expect(clippy::expect_used, clippy::unwrap_used)]
|
||||
async fn loads_token_data_from_auth_json() {
|
||||
let dir = tempdir().unwrap();
|
||||
let auth_file = dir.path().join("auth.json");
|
||||
async fn pro_account_with_no_api_key_uses_chatgpt_auth() {
|
||||
let codex_home = tempdir().unwrap();
|
||||
write_auth_file(
|
||||
AuthFileParams {
|
||||
openai_api_key: None,
|
||||
chatgpt_plan_type: "pro".to_string(),
|
||||
},
|
||||
codex_home.path(),
|
||||
)
|
||||
.expect("failed to write auth file");
|
||||
|
||||
let CodexAuth {
|
||||
api_key,
|
||||
mode,
|
||||
auth_dot_json,
|
||||
auth_file: _,
|
||||
} = load_auth(codex_home.path(), false).unwrap().unwrap();
|
||||
assert_eq!(None, api_key);
|
||||
assert_eq!(AuthMode::ChatGPT, mode);
|
||||
|
||||
let guard = auth_dot_json.lock().unwrap();
|
||||
let auth_dot_json = guard.as_ref().expect("AuthDotJson should exist");
|
||||
assert_eq!(
|
||||
&AuthDotJson {
|
||||
openai_api_key: None,
|
||||
tokens: Some(TokenData {
|
||||
id_token: IdTokenInfo {
|
||||
email: Some("user@example.com".to_string()),
|
||||
chatgpt_plan_type: Some(PlanType::Known(KnownPlan::Pro)),
|
||||
},
|
||||
access_token: "test-access-token".to_string(),
|
||||
refresh_token: "test-refresh-token".to_string(),
|
||||
account_id: None,
|
||||
}),
|
||||
last_refresh: Some(
|
||||
DateTime::parse_from_rfc3339(LAST_REFRESH)
|
||||
.unwrap()
|
||||
.with_timezone(&Utc)
|
||||
),
|
||||
},
|
||||
auth_dot_json
|
||||
)
|
||||
}
|
||||
|
||||
/// Even if the OPENAI_API_KEY is set in auth.json, if the plan is not in
|
||||
/// [`TokenData::is_plan_that_should_use_api_key`], it should use
|
||||
/// [`AuthMode::ChatGPT`].
|
||||
#[tokio::test]
|
||||
async fn pro_account_with_api_key_still_uses_chatgpt_auth() {
|
||||
let codex_home = tempdir().unwrap();
|
||||
write_auth_file(
|
||||
AuthFileParams {
|
||||
openai_api_key: Some("sk-test-key".to_string()),
|
||||
chatgpt_plan_type: "pro".to_string(),
|
||||
},
|
||||
codex_home.path(),
|
||||
)
|
||||
.expect("failed to write auth file");
|
||||
|
||||
let CodexAuth {
|
||||
api_key,
|
||||
mode,
|
||||
auth_dot_json,
|
||||
auth_file: _,
|
||||
} = load_auth(codex_home.path(), false).unwrap().unwrap();
|
||||
assert_eq!(None, api_key);
|
||||
assert_eq!(AuthMode::ChatGPT, mode);
|
||||
|
||||
let guard = auth_dot_json.lock().unwrap();
|
||||
let auth_dot_json = guard.as_ref().expect("AuthDotJson should exist");
|
||||
assert_eq!(
|
||||
&AuthDotJson {
|
||||
openai_api_key: None,
|
||||
tokens: Some(TokenData {
|
||||
id_token: IdTokenInfo {
|
||||
email: Some("user@example.com".to_string()),
|
||||
chatgpt_plan_type: Some(PlanType::Known(KnownPlan::Pro)),
|
||||
},
|
||||
access_token: "test-access-token".to_string(),
|
||||
refresh_token: "test-refresh-token".to_string(),
|
||||
account_id: None,
|
||||
}),
|
||||
last_refresh: Some(
|
||||
DateTime::parse_from_rfc3339(LAST_REFRESH)
|
||||
.unwrap()
|
||||
.with_timezone(&Utc)
|
||||
),
|
||||
},
|
||||
auth_dot_json
|
||||
)
|
||||
}
|
||||
|
||||
/// If the OPENAI_API_KEY is set in auth.json and it is an enterprise
|
||||
/// account, then it should use [`AuthMode::ApiKey`].
|
||||
#[tokio::test]
|
||||
async fn enterprise_account_with_api_key_uses_chatgpt_auth() {
|
||||
let codex_home = tempdir().unwrap();
|
||||
write_auth_file(
|
||||
AuthFileParams {
|
||||
openai_api_key: Some("sk-test-key".to_string()),
|
||||
chatgpt_plan_type: "enterprise".to_string(),
|
||||
},
|
||||
codex_home.path(),
|
||||
)
|
||||
.expect("failed to write auth file");
|
||||
|
||||
let CodexAuth {
|
||||
api_key,
|
||||
mode,
|
||||
auth_dot_json,
|
||||
auth_file: _,
|
||||
} = load_auth(codex_home.path(), false).unwrap().unwrap();
|
||||
assert_eq!(Some("sk-test-key".to_string()), api_key);
|
||||
assert_eq!(AuthMode::ApiKey, mode);
|
||||
|
||||
let guard = auth_dot_json.lock().expect("should unwrap");
|
||||
assert!(guard.is_none(), "auth_dot_json should be None");
|
||||
}
|
||||
|
||||
struct AuthFileParams {
|
||||
openai_api_key: Option<String>,
|
||||
chatgpt_plan_type: String,
|
||||
}
|
||||
|
||||
fn write_auth_file(params: AuthFileParams, codex_home: &Path) -> std::io::Result<()> {
|
||||
let auth_file = get_auth_file(codex_home);
|
||||
// Create a minimal valid JWT for the id_token field.
|
||||
#[derive(Serialize)]
|
||||
struct Header {
|
||||
@@ -460,71 +636,31 @@ mod tests {
|
||||
"email_verified": true,
|
||||
"https://api.openai.com/auth": {
|
||||
"chatgpt_account_id": "bc3618e3-489d-4d49-9362-1561dc53ba53",
|
||||
"chatgpt_plan_type": "pro",
|
||||
"chatgpt_plan_type": params.chatgpt_plan_type,
|
||||
"chatgpt_user_id": "user-12345",
|
||||
"user_id": "user-12345",
|
||||
}
|
||||
});
|
||||
let b64 = |b: &[u8]| base64::engine::general_purpose::URL_SAFE_NO_PAD.encode(b);
|
||||
let header_b64 = b64(&serde_json::to_vec(&header).unwrap());
|
||||
let payload_b64 = b64(&serde_json::to_vec(&payload).unwrap());
|
||||
let header_b64 = b64(&serde_json::to_vec(&header)?);
|
||||
let payload_b64 = b64(&serde_json::to_vec(&payload)?);
|
||||
let signature_b64 = b64(b"sig");
|
||||
let fake_jwt = format!("{header_b64}.{payload_b64}.{signature_b64}");
|
||||
std::fs::write(
|
||||
auth_file,
|
||||
format!(
|
||||
r#"
|
||||
{{
|
||||
"OPENAI_API_KEY": null,
|
||||
"tokens": {{
|
||||
"id_token": "{fake_jwt}",
|
||||
|
||||
let auth_json_data = json!({
|
||||
"OPENAI_API_KEY": params.openai_api_key,
|
||||
"tokens": {
|
||||
"id_token": fake_jwt,
|
||||
"access_token": "test-access-token",
|
||||
"refresh_token": "test-refresh-token"
|
||||
}},
|
||||
"last_refresh": "2025-08-06T20:41:36.232376Z"
|
||||
}}
|
||||
"#,
|
||||
),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let CodexAuth {
|
||||
api_key,
|
||||
mode,
|
||||
auth_dot_json,
|
||||
auth_file,
|
||||
} = load_auth(dir.path(), false).unwrap().unwrap();
|
||||
assert_eq!(None, api_key);
|
||||
assert_eq!(AuthMode::ChatGPT, mode);
|
||||
assert_eq!(dir.path().join("auth.json"), auth_file);
|
||||
|
||||
let guard = auth_dot_json.lock().unwrap();
|
||||
let auth_dot_json = guard.as_ref().expect("AuthDotJson should exist");
|
||||
|
||||
assert_eq!(
|
||||
&AuthDotJson {
|
||||
openai_api_key: None,
|
||||
tokens: Some(TokenData {
|
||||
id_token: IdTokenInfo {
|
||||
email: Some("user@example.com".to_string()),
|
||||
chatgpt_plan_type: Some("pro".to_string()),
|
||||
},
|
||||
access_token: "test-access-token".to_string(),
|
||||
refresh_token: "test-refresh-token".to_string(),
|
||||
account_id: None,
|
||||
}),
|
||||
last_refresh: Some(
|
||||
DateTime::parse_from_rfc3339("2025-08-06T20:41:36.232376Z")
|
||||
.unwrap()
|
||||
.with_timezone(&Utc)
|
||||
),
|
||||
},
|
||||
auth_dot_json
|
||||
)
|
||||
"last_refresh": LAST_REFRESH,
|
||||
});
|
||||
let auth_json = serde_json::to_string_pretty(&auth_json_data)?;
|
||||
std::fs::write(auth_file, auth_json)
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[expect(clippy::expect_used, clippy::unwrap_used)]
|
||||
fn id_token_info_handles_missing_fields() {
|
||||
// Payload without email or plan should yield None values.
|
||||
let header = serde_json::json!({"alg": "none", "typ": "JWT"});
|
||||
@@ -542,7 +678,6 @@ mod tests {
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[expect(clippy::unwrap_used)]
|
||||
async fn loads_api_key_from_auth_json() {
|
||||
let dir = tempdir().unwrap();
|
||||
let auth_file = dir.path().join("auth.json");
|
||||
|
||||
@@ -17,6 +17,17 @@ pub struct TokenData {
|
||||
pub account_id: Option<String>,
|
||||
}
|
||||
|
||||
impl TokenData {
|
||||
/// Returns true if this is a plan that should use the traditional
|
||||
/// "metered" billing via an API key.
|
||||
pub(crate) fn is_plan_that_should_use_api_key(&self) -> bool {
|
||||
self.id_token
|
||||
.chatgpt_plan_type
|
||||
.as_ref()
|
||||
.is_none_or(|plan| plan.is_plan_that_should_use_api_key())
|
||||
}
|
||||
}
|
||||
|
||||
/// Flat subset of useful claims in id_token from auth.json.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Default, Serialize)]
|
||||
pub struct IdTokenInfo {
|
||||
@@ -24,7 +35,57 @@ pub struct IdTokenInfo {
|
||||
/// The ChatGPT subscription plan type
|
||||
/// (e.g., "free", "plus", "pro", "business", "enterprise", "edu").
|
||||
/// (Note: ae has not verified that those are the exact values.)
|
||||
pub chatgpt_plan_type: Option<String>,
|
||||
pub(crate) chatgpt_plan_type: Option<PlanType>,
|
||||
}
|
||||
|
||||
impl IdTokenInfo {
|
||||
pub fn get_chatgpt_plan_type(&self) -> Option<String> {
|
||||
self.chatgpt_plan_type.as_ref().map(|t| match t {
|
||||
PlanType::Known(plan) => format!("{plan:?}"),
|
||||
PlanType::Unknown(s) => s.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(untagged)]
|
||||
pub(crate) enum PlanType {
|
||||
Known(KnownPlan),
|
||||
Unknown(String),
|
||||
}
|
||||
|
||||
impl PlanType {
|
||||
fn is_plan_that_should_use_api_key(&self) -> bool {
|
||||
match self {
|
||||
Self::Known(known) => {
|
||||
use KnownPlan::*;
|
||||
!matches!(known, Free | Plus | Pro | Team)
|
||||
}
|
||||
Self::Unknown(_) => {
|
||||
// Unknown plans should use the API key.
|
||||
true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_string(&self) -> String {
|
||||
match self {
|
||||
Self::Known(known) => format!("{known:?}").to_lowercase(),
|
||||
Self::Unknown(s) => s.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub(crate) enum KnownPlan {
|
||||
Free,
|
||||
Plus,
|
||||
Pro,
|
||||
Team,
|
||||
Business,
|
||||
Enterprise,
|
||||
Edu,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
@@ -38,7 +99,7 @@ struct IdClaims {
|
||||
#[derive(Deserialize)]
|
||||
struct AuthClaims {
|
||||
#[serde(default)]
|
||||
chatgpt_plan_type: Option<String>,
|
||||
chatgpt_plan_type: Option<PlanType>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
@@ -112,6 +173,9 @@ mod tests {
|
||||
|
||||
let info = parse_id_token(&fake_jwt).expect("should parse");
|
||||
assert_eq!(info.email.as_deref(), Some("user@example.com"));
|
||||
assert_eq!(info.chatgpt_plan_type.as_deref(), Some("pro"));
|
||||
assert_eq!(
|
||||
info.chatgpt_plan_type,
|
||||
Some(PlanType::Known(KnownPlan::Pro))
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -936,7 +936,6 @@ mod tests {
|
||||
call_id: "c1".into(),
|
||||
command: vec!["bash".into(), "-lc".into(), "echo hi".into()],
|
||||
cwd: std::path::PathBuf::from("/work"),
|
||||
parsed_cmd: vec![],
|
||||
}),
|
||||
};
|
||||
|
||||
@@ -948,8 +947,7 @@ mod tests {
|
||||
"type": "exec_command_begin",
|
||||
"call_id": "c1",
|
||||
"command": ["bash", "-lc", "echo hi"],
|
||||
"cwd": "/work",
|
||||
"parsed_cmd": []
|
||||
"cwd": "/work"
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
@@ -29,6 +29,7 @@ codex-common = { path = "../common", features = [
|
||||
"cli",
|
||||
"elapsed",
|
||||
"sandbox_summary",
|
||||
"updates",
|
||||
] }
|
||||
codex-core = { path = "../core" }
|
||||
codex-file-search = { path = "../file-search" }
|
||||
@@ -48,7 +49,6 @@ ratatui = { version = "0.29.0", features = [
|
||||
] }
|
||||
ratatui-image = "8.0.0"
|
||||
regex-lite = "0.1"
|
||||
reqwest = { version = "0.12", features = ["json"] }
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = { version = "1", features = ["preserve_order"] }
|
||||
shlex = "1.3.0"
|
||||
|
||||
@@ -5,7 +5,6 @@ use std::sync::Arc;
|
||||
use codex_core::codex_wrapper::CodexConversation;
|
||||
use codex_core::codex_wrapper::init_codex;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::parse_command::ParsedCommand;
|
||||
use codex_core::protocol::AgentMessageDeltaEvent;
|
||||
use codex_core::protocol::AgentMessageEvent;
|
||||
use codex_core::protocol::AgentReasoningDeltaEvent;
|
||||
@@ -58,7 +57,6 @@ struct RunningCommand {
|
||||
command: Vec<String>,
|
||||
#[allow(dead_code)]
|
||||
cwd: PathBuf,
|
||||
parsed_cmd: Vec<ParsedCommand>,
|
||||
}
|
||||
|
||||
pub(crate) struct ChatWidget<'a> {
|
||||
@@ -444,7 +442,6 @@ impl ChatWidget<'_> {
|
||||
call_id,
|
||||
command,
|
||||
cwd,
|
||||
parsed_cmd,
|
||||
}) => {
|
||||
self.finalize_active_stream();
|
||||
// Ensure the status indicator is visible while the command runs.
|
||||
@@ -455,7 +452,6 @@ impl ChatWidget<'_> {
|
||||
RunningCommand {
|
||||
command: command.clone(),
|
||||
cwd: cwd.clone(),
|
||||
parsed_cmd: parsed_cmd.clone(),
|
||||
},
|
||||
);
|
||||
self.active_history_cell = Some(HistoryCell::new_active_exec_command(command));
|
||||
@@ -486,15 +482,10 @@ impl ChatWidget<'_> {
|
||||
stderr,
|
||||
}) => {
|
||||
// Compute summary before moving stdout into the history cell.
|
||||
let removed = self.running_commands.remove(&call_id);
|
||||
let (command, parsed_cmd) = match removed {
|
||||
Some(rc) => (rc.command, rc.parsed_cmd),
|
||||
None => (vec![call_id.clone()], vec![]),
|
||||
};
|
||||
let cmd = self.running_commands.remove(&call_id);
|
||||
self.active_history_cell = None;
|
||||
self.add_to_history(HistoryCell::new_completed_exec_command(
|
||||
command,
|
||||
parsed_cmd,
|
||||
cmd.map(|cmd| cmd.command).unwrap_or_else(|| vec![call_id]),
|
||||
CommandOutput {
|
||||
exit_code,
|
||||
stdout,
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
use crate::colors::LIGHT_BLUE;
|
||||
use crate::exec_command::relativize_to_home;
|
||||
use crate::exec_command::strip_bash_lc_and_escape;
|
||||
use crate::slash_command::SlashCommand;
|
||||
@@ -9,7 +8,6 @@ use codex_ansi_escape::ansi_escape_line;
|
||||
use codex_common::create_config_summary_entries;
|
||||
use codex_common::elapsed::format_duration;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::parse_command::ParsedCommand;
|
||||
use codex_core::plan_tool::PlanItemArg;
|
||||
use codex_core::plan_tool::StepStatus;
|
||||
use codex_core::plan_tool::UpdatePlanArgs;
|
||||
@@ -34,7 +32,6 @@ use ratatui::widgets::Paragraph;
|
||||
use ratatui::widgets::WidgetRef;
|
||||
use ratatui::widgets::Wrap;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::io::Cursor;
|
||||
use std::path::PathBuf;
|
||||
use std::time::Duration;
|
||||
@@ -281,121 +278,13 @@ impl HistoryCell {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn new_completed_exec_command(
|
||||
command: Vec<String>,
|
||||
parsed: Vec<ParsedCommand>,
|
||||
output: CommandOutput,
|
||||
) -> Self {
|
||||
let is_read_command = parsed
|
||||
.iter()
|
||||
.all(|c| matches!(c, ParsedCommand::Read { .. }));
|
||||
|
||||
let is_list_command = parsed.iter().all(|c| matches!(c, ParsedCommand::Ls { .. }));
|
||||
let is_search_command = parsed.iter().all(|c| matches!(c, ParsedCommand::Rg { .. }));
|
||||
|
||||
if is_read_command {
|
||||
return HistoryCell::new_read_command(parsed);
|
||||
} else if is_list_command {
|
||||
return HistoryCell::new_list_command(parsed);
|
||||
} else if is_search_command {
|
||||
return HistoryCell::new_search_command(parsed);
|
||||
}
|
||||
HistoryCell::new_completed_exec_command_generic(command, output)
|
||||
}
|
||||
|
||||
fn new_read_command(read_commands: Vec<ParsedCommand>) -> Self {
|
||||
let file_names: HashSet<&String> = read_commands
|
||||
.iter()
|
||||
.flat_map(|c| match c {
|
||||
ParsedCommand::Read { name, .. } => Some(name),
|
||||
_ => None,
|
||||
})
|
||||
.collect();
|
||||
|
||||
let count = file_names.len();
|
||||
let mut lines: Vec<Line> = vec![match count {
|
||||
0 => Line::from("📖 Reading files"),
|
||||
1 => Line::from("📖 Reading 1 file"),
|
||||
_ => Line::from(format!("📖 Reading {count} files")),
|
||||
}];
|
||||
|
||||
for name in file_names {
|
||||
lines.push(Line::from(vec![
|
||||
Span::styled(" L ", Style::default().fg(Color::Gray)),
|
||||
Span::styled(name.clone(), Style::default().fg(LIGHT_BLUE)),
|
||||
]));
|
||||
}
|
||||
lines.push(Line::from(""));
|
||||
|
||||
HistoryCell::CompletedExecCommand {
|
||||
view: TextBlock::new(lines),
|
||||
}
|
||||
}
|
||||
|
||||
fn new_list_command(list_commands: Vec<ParsedCommand>) -> Self {
|
||||
let paths: HashSet<&String> = list_commands
|
||||
.iter()
|
||||
.flat_map(|c| match c {
|
||||
ParsedCommand::Ls { path, .. } => path.as_ref(),
|
||||
_ => None,
|
||||
})
|
||||
.collect();
|
||||
|
||||
let count = paths.len();
|
||||
let mut lines: Vec<Line> = vec![match count {
|
||||
0 => Line::from("📖 Exploring files"),
|
||||
1 => Line::from("📖 Exploring 1 folder"),
|
||||
_ => Line::from(format!("📖 Exploring {count} folders")),
|
||||
}];
|
||||
|
||||
for name in paths {
|
||||
lines.push(Line::from(vec![
|
||||
Span::styled(" L ", Style::default().fg(Color::Gray)),
|
||||
Span::styled(name.clone(), Style::default().fg(LIGHT_BLUE)),
|
||||
]));
|
||||
}
|
||||
lines.push(Line::from(""));
|
||||
|
||||
HistoryCell::CompletedExecCommand {
|
||||
view: TextBlock::new(lines),
|
||||
}
|
||||
}
|
||||
|
||||
fn new_search_command(search_commands: Vec<ParsedCommand>) -> Self {
|
||||
let file_names: HashSet<&String> = search_commands
|
||||
.iter()
|
||||
.flat_map(|c| match c {
|
||||
ParsedCommand::Read { name, .. } => Some(name),
|
||||
_ => None,
|
||||
})
|
||||
.collect();
|
||||
|
||||
let count = file_names.len();
|
||||
let mut lines: Vec<Line> = vec![match count {
|
||||
0 => Line::from("🔎 Searching files"),
|
||||
1 => Line::from("🔎 Searching 1 file"),
|
||||
_ => Line::from(format!("🔎 Searching {count} files")),
|
||||
}];
|
||||
|
||||
for name in file_names {
|
||||
lines.push(Line::from(vec![
|
||||
Span::styled(" L ", Style::default().fg(Color::Gray)),
|
||||
Span::styled(name.clone(), Style::default().fg(LIGHT_BLUE)),
|
||||
]));
|
||||
}
|
||||
lines.push(Line::from(""));
|
||||
|
||||
HistoryCell::CompletedExecCommand {
|
||||
view: TextBlock::new(lines),
|
||||
}
|
||||
}
|
||||
|
||||
fn new_completed_exec_command_generic(command: Vec<String>, output: CommandOutput) -> Self {
|
||||
pub(crate) fn new_completed_exec_command(command: Vec<String>, output: CommandOutput) -> Self {
|
||||
let CommandOutput {
|
||||
exit_code,
|
||||
stdout,
|
||||
stderr,
|
||||
} = output;
|
||||
|
||||
let mut lines: Vec<Line<'static>> = Vec::new();
|
||||
let command_escaped = strip_bash_lc_and_escape(&command);
|
||||
lines.push(Line::from(vec![
|
||||
@@ -648,8 +537,8 @@ impl HistoryCell {
|
||||
lines.push(Line::from(" • Signed in with ChatGPT"));
|
||||
|
||||
let info = tokens.id_token;
|
||||
if let Some(email) = info.email {
|
||||
lines.push(Line::from(vec![" • Login: ".into(), email.into()]));
|
||||
if let Some(email) = &info.email {
|
||||
lines.push(Line::from(vec![" • Login: ".into(), email.clone().into()]));
|
||||
}
|
||||
|
||||
match auth.openai_api_key.as_deref() {
|
||||
@@ -660,9 +549,8 @@ impl HistoryCell {
|
||||
}
|
||||
_ => {
|
||||
let plan_text = info
|
||||
.chatgpt_plan_type
|
||||
.as_deref()
|
||||
.map(title_case)
|
||||
.get_chatgpt_plan_type()
|
||||
.map(|s| title_case(&s))
|
||||
.unwrap_or_else(|| "Unknown".to_string());
|
||||
lines.push(Line::from(vec![" • Plan: ".into(), plan_text.into()]));
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@ use codex_core::config::load_config_as_toml_with_cli_overrides;
|
||||
use codex_core::config_types::SandboxMode;
|
||||
use codex_core::protocol::AskForApproval;
|
||||
use codex_core::protocol::SandboxPolicy;
|
||||
use codex_login::load_auth;
|
||||
use codex_login::CodexAuth;
|
||||
use codex_ollama::DEFAULT_OSS_MODEL;
|
||||
use log_layer::TuiLogLayer;
|
||||
use std::fs::OpenOptions;
|
||||
@@ -48,8 +48,6 @@ mod text_formatting;
|
||||
mod tui;
|
||||
mod user_approval_widget;
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
mod updates;
|
||||
#[cfg(not(debug_assertions))]
|
||||
use color_eyre::owo_colors::OwoColorize;
|
||||
|
||||
@@ -211,7 +209,7 @@ pub async fn run_main(
|
||||
|
||||
#[allow(clippy::print_stderr)]
|
||||
#[cfg(not(debug_assertions))]
|
||||
if let Some(latest_version) = updates::get_upgrade_version(&config) {
|
||||
if let Some(latest_version) = codex_common::updates::get_upgrade_version(&config) {
|
||||
let current_version = env!("CARGO_PKG_VERSION");
|
||||
let exe = std::env::current_exe()?;
|
||||
let managed_by_npm = std::env::var_os("CODEX_MANAGED_BY_NPM").is_some();
|
||||
@@ -304,7 +302,7 @@ fn should_show_login_screen(config: &Config) -> bool {
|
||||
// Reading the OpenAI API key is an async operation because it may need
|
||||
// to refresh the token. Block on it.
|
||||
let codex_home = config.codex_home.clone();
|
||||
match load_auth(&codex_home, true) {
|
||||
match CodexAuth::from_codex_home(&codex_home) {
|
||||
Ok(Some(_)) => false,
|
||||
Ok(None) => true,
|
||||
Err(err) => {
|
||||
|
||||
@@ -247,7 +247,7 @@ impl UserApprovalWidget<'_> {
|
||||
match decision {
|
||||
ReviewDecision::Approved => {
|
||||
lines.push(Line::from(vec![
|
||||
"✔ ".fg(Color::Green),
|
||||
"✓ ".fg(Color::Green),
|
||||
"You ".into(),
|
||||
"approved".bold(),
|
||||
" codex to run ".into(),
|
||||
@@ -258,7 +258,7 @@ impl UserApprovalWidget<'_> {
|
||||
}
|
||||
ReviewDecision::ApprovedForSession => {
|
||||
lines.push(Line::from(vec![
|
||||
"✔ ".fg(Color::Green),
|
||||
"✓ ".fg(Color::Green),
|
||||
"You ".into(),
|
||||
"approved".bold(),
|
||||
" codex to run ".into(),
|
||||
|
||||
Reference in New Issue
Block a user