mirror of
https://github.com/openai/codex.git
synced 2026-02-02 15:03:38 +00:00
Compare commits
19 Commits
gpeal/pars
...
codex/fix-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
abe596976b | ||
|
|
6967b51065 | ||
|
|
697f7b1300 | ||
|
|
14a3bb51b3 | ||
|
|
4f590ebf44 | ||
|
|
ddabd42236 | ||
|
|
b3d47cfa11 | ||
|
|
307d9957fa | ||
|
|
431c9299d4 | ||
|
|
52e12f2b6c | ||
|
|
2b7139859e | ||
|
|
fa0051190b | ||
|
|
cd06b28d84 | ||
|
|
295abf3e51 | ||
|
|
b991c04f86 | ||
|
|
02c9c2ecad | ||
|
|
db76f32888 | ||
|
|
548466df09 | ||
|
|
7d67159587 |
22
README.md
22
README.md
@@ -17,6 +17,7 @@
|
||||
|
||||
- [Quickstart](#quickstart)
|
||||
- [Installing and running Codex CLI](#installing-and-running-codex-cli)
|
||||
- [Updating](#updating)
|
||||
- [Using Codex with your ChatGPT plan](#using-codex-with-your-chatgpt-plan)
|
||||
- [Usage-based billing alternative: Use an OpenAI API key](#usage-based-billing-alternative-use-an-openai-api-key)
|
||||
- [Choosing Codex's level of autonomy](#choosing-codexs-level-of-autonomy)
|
||||
@@ -76,6 +77,16 @@ Then simply run `codex` to get started:
|
||||
codex
|
||||
```
|
||||
|
||||
### Updating
|
||||
|
||||
Upgrade an existing installation to the latest release:
|
||||
|
||||
```shell
|
||||
codex update
|
||||
```
|
||||
|
||||
The command checks for a newer version and will attempt to upgrade automatically if the CLI was installed via npm or Homebrew.
|
||||
|
||||
<details>
|
||||
<summary>You can also go to the <a href="https://github.com/openai/codex/releases/latest">latest GitHub Release</a> and download the appropriate binary for your platform.</summary>
|
||||
|
||||
@@ -340,11 +351,12 @@ Help us improve by filing issues or submitting PRs (see the section below for ho
|
||||
|
||||
## CLI reference
|
||||
|
||||
| Command | Purpose | Example |
|
||||
| ------------------ | ---------------------------------- | ------------------------------- |
|
||||
| `codex` | Interactive TUI | `codex` |
|
||||
| `codex "..."` | Initial prompt for interactive TUI | `codex "fix lint errors"` |
|
||||
| `codex exec "..."` | Non-interactive "automation mode" | `codex exec "explain utils.ts"` |
|
||||
| Command | Purpose | Example |
|
||||
| ------------------ | ------------------------------------- | ------------------------------- |
|
||||
| `codex` | Interactive TUI | `codex` |
|
||||
| `codex "..."` | Initial prompt for interactive TUI | `codex "fix lint errors"` |
|
||||
| `codex exec "..."` | Non-interactive "automation mode" | `codex exec "explain utils.ts"` |
|
||||
| `codex update` | Check for updates and upgrade the CLI | `codex update` |
|
||||
|
||||
Key flags: `--model/-m`, `--ask-for-approval/-a`.
|
||||
|
||||
|
||||
7
codex-rs/Cargo.lock
generated
7
codex-rs/Cargo.lock
generated
@@ -658,10 +658,16 @@ dependencies = [
|
||||
name = "codex-common"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"chrono",
|
||||
"clap",
|
||||
"codex-core",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tokio",
|
||||
"toml 0.9.4",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -888,7 +894,6 @@ dependencies = [
|
||||
"ratatui",
|
||||
"ratatui-image",
|
||||
"regex-lite",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"shlex",
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use codex_login::CodexAuth;
|
||||
use std::path::Path;
|
||||
use std::sync::LazyLock;
|
||||
use std::sync::RwLock;
|
||||
@@ -18,7 +19,7 @@ pub fn set_chatgpt_token_data(value: TokenData) {
|
||||
|
||||
/// Initialize the ChatGPT token from auth.json file
|
||||
pub async fn init_chatgpt_token_from_auth(codex_home: &Path) -> std::io::Result<()> {
|
||||
let auth = codex_login::load_auth(codex_home, true)?;
|
||||
let auth = CodexAuth::from_codex_home(codex_home)?;
|
||||
if let Some(auth) = auth {
|
||||
let token_data = auth.get_token_data().await?;
|
||||
set_chatgpt_token_data(token_data);
|
||||
|
||||
@@ -20,7 +20,7 @@ clap = { version = "4", features = ["derive"] }
|
||||
clap_complete = "4"
|
||||
codex-arg0 = { path = "../arg0" }
|
||||
codex-chatgpt = { path = "../chatgpt" }
|
||||
codex-common = { path = "../common", features = ["cli"] }
|
||||
codex-common = { path = "../common", features = ["cli", "updates"] }
|
||||
codex-core = { path = "../core" }
|
||||
codex-exec = { path = "../exec" }
|
||||
codex-login = { path = "../login" }
|
||||
|
||||
@@ -4,8 +4,8 @@ use codex_common::CliConfigOverrides;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_login::AuthMode;
|
||||
use codex_login::CodexAuth;
|
||||
use codex_login::OPENAI_API_KEY_ENV_VAR;
|
||||
use codex_login::load_auth;
|
||||
use codex_login::login_with_api_key;
|
||||
use codex_login::login_with_chatgpt;
|
||||
use codex_login::logout;
|
||||
@@ -47,11 +47,11 @@ pub async fn run_login_with_api_key(
|
||||
pub async fn run_login_status(cli_config_overrides: CliConfigOverrides) -> ! {
|
||||
let config = load_config_or_exit(cli_config_overrides);
|
||||
|
||||
match load_auth(&config.codex_home, true) {
|
||||
match CodexAuth::from_codex_home(&config.codex_home) {
|
||||
Ok(Some(auth)) => match auth.mode {
|
||||
AuthMode::ApiKey => {
|
||||
if let Some(api_key) = auth.api_key.as_deref() {
|
||||
eprintln!("Logged in using an API key - {}", safe_format_key(api_key));
|
||||
AuthMode::ApiKey => match auth.get_token().await {
|
||||
Ok(api_key) => {
|
||||
eprintln!("Logged in using an API key - {}", safe_format_key(&api_key));
|
||||
|
||||
if let Ok(env_api_key) = env::var(OPENAI_API_KEY_ENV_VAR) {
|
||||
if env_api_key == api_key {
|
||||
@@ -60,11 +60,13 @@ pub async fn run_login_status(cli_config_overrides: CliConfigOverrides) -> ! {
|
||||
);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
eprintln!("Logged in using an API key");
|
||||
std::process::exit(0);
|
||||
}
|
||||
std::process::exit(0);
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Unexpected error retrieving API key: {e}");
|
||||
std::process::exit(1);
|
||||
}
|
||||
},
|
||||
AuthMode::ChatGPT => {
|
||||
eprintln!("Logged in using ChatGPT");
|
||||
std::process::exit(0);
|
||||
|
||||
@@ -13,6 +13,12 @@ use codex_cli::login::run_login_with_chatgpt;
|
||||
use codex_cli::login::run_logout;
|
||||
use codex_cli::proto;
|
||||
use codex_common::CliConfigOverrides;
|
||||
use codex_common::updates::check_for_update;
|
||||
use codex_common::updates::get_upgrade_version;
|
||||
#[cfg(not(debug_assertions))]
|
||||
use codex_core::config::Config;
|
||||
#[cfg(not(debug_assertions))]
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_exec::Cli as ExecCli;
|
||||
use codex_tui::Cli as TuiCli;
|
||||
use std::path::PathBuf;
|
||||
@@ -68,6 +74,9 @@ enum Subcommand {
|
||||
/// Apply the latest diff produced by Codex agent as a `git apply` to your local working tree.
|
||||
#[clap(visible_alias = "a")]
|
||||
Apply(ApplyCommand),
|
||||
|
||||
/// Check for a newer Codex release and upgrade automatically when possible.
|
||||
Update,
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
@@ -190,6 +199,9 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
prepend_config_flags(&mut apply_cli.config_overrides, cli.config_overrides);
|
||||
run_apply_command(apply_cli, None).await?;
|
||||
}
|
||||
Some(Subcommand::Update) => {
|
||||
run_update().await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -211,3 +223,88 @@ fn print_completion(cmd: CompletionCommand) {
|
||||
let name = "codex";
|
||||
generate(cmd.shell, &mut app, name, &mut std::io::stdout());
|
||||
}
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
async fn run_update() -> anyhow::Result<()> {
|
||||
let overrides = ConfigOverrides {
|
||||
model: None,
|
||||
cwd: None,
|
||||
approval_policy: None,
|
||||
sandbox_mode: None,
|
||||
model_provider: None,
|
||||
config_profile: None,
|
||||
codex_linux_sandbox_exe: None,
|
||||
base_instructions: None,
|
||||
include_plan_tool: None,
|
||||
disable_response_storage: None,
|
||||
show_raw_agent_reasoning: None,
|
||||
};
|
||||
|
||||
let config = Config::load_with_cli_overrides(Vec::new(), overrides)?;
|
||||
let version_file = config.codex_home.join("version.json");
|
||||
|
||||
if let Err(e) = check_for_update(&version_file).await {
|
||||
#[allow(clippy::print_stderr)]
|
||||
eprintln!("Failed to check for updates: {e}");
|
||||
}
|
||||
|
||||
let current_version = env!("CARGO_PKG_VERSION");
|
||||
if let Some(latest_version) = get_upgrade_version(&config) {
|
||||
println!("Current version: {current_version}");
|
||||
println!("Latest version: {latest_version}");
|
||||
let exe = std::env::current_exe()?;
|
||||
let managed_by_npm = std::env::var_os("CODEX_MANAGED_BY_NPM").is_some();
|
||||
if managed_by_npm {
|
||||
println!("Updating via npm...");
|
||||
match Command::new("npm")
|
||||
.args(["install", "-g", "@openai/codex@latest"])
|
||||
.status()
|
||||
{
|
||||
Ok(status) if status.success() => {
|
||||
println!("Codex updated successfully.");
|
||||
}
|
||||
Ok(status) => {
|
||||
println!(
|
||||
"`npm install` exited with status {status}. Run `npm install -g @openai/codex@latest` manually if needed."
|
||||
);
|
||||
}
|
||||
Err(err) => {
|
||||
println!(
|
||||
"Failed to run npm: {err}. Run `npm install -g @openai/codex@latest` manually."
|
||||
);
|
||||
}
|
||||
}
|
||||
} else if cfg!(target_os = "macos")
|
||||
&& (exe.starts_with("/opt/homebrew") || exe.starts_with("/usr/local"))
|
||||
{
|
||||
println!("Updating via Homebrew...");
|
||||
match Command::new("brew").args(["upgrade", "codex"]).status() {
|
||||
Ok(status) if status.success() => {
|
||||
println!("Codex updated successfully.");
|
||||
}
|
||||
Ok(status) => {
|
||||
println!(
|
||||
"`brew upgrade` exited with status {status}. Run `brew upgrade codex` manually if needed."
|
||||
);
|
||||
}
|
||||
Err(err) => {
|
||||
println!("Failed to run Homebrew: {err}. Run `brew upgrade codex` manually.");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
println!(
|
||||
"See https://github.com/openai/codex/releases/latest for the latest releases and installation options."
|
||||
);
|
||||
}
|
||||
} else {
|
||||
println!("Codex {current_version} is up to date.");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
async fn run_update() -> anyhow::Result<()> {
|
||||
println!("Update checking is disabled in debug builds.");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_core::protocol::Submission;
|
||||
use codex_core::util::notify_on_sigint;
|
||||
use codex_login::load_auth;
|
||||
use codex_login::CodexAuth;
|
||||
use tokio::io::AsyncBufReadExt;
|
||||
use tokio::io::BufReader;
|
||||
use tracing::error;
|
||||
@@ -36,7 +36,7 @@ pub async fn run_main(opts: ProtoCli) -> anyhow::Result<()> {
|
||||
.map_err(anyhow::Error::msg)?;
|
||||
|
||||
let config = Config::load_with_cli_overrides(overrides_vec, ConfigOverrides::default())?;
|
||||
let auth = load_auth(&config.codex_home, true)?;
|
||||
let auth = CodexAuth::from_codex_home(&config.codex_home)?;
|
||||
let ctrl_c = notify_on_sigint();
|
||||
let CodexSpawnOk { codex, .. } = Codex::spawn(config, auth, ctrl_c.clone()).await?;
|
||||
let codex = Arc::new(codex);
|
||||
|
||||
@@ -7,13 +7,20 @@ version = { workspace = true }
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = { version = "1", optional = true }
|
||||
chrono = { version = "0.4", features = ["serde"], optional = true }
|
||||
clap = { version = "4", features = ["derive", "wrap_help"], optional = true }
|
||||
codex-core = { path = "../core" }
|
||||
serde = { version = "1", optional = true }
|
||||
reqwest = { version = "0.12", features = ["json"], optional = true }
|
||||
serde = { version = "1", features = ["derive"], optional = true }
|
||||
serde_json = { version = "1", optional = true }
|
||||
tokio = { version = "1", features = ["fs"], optional = true }
|
||||
toml = { version = "0.9", optional = true }
|
||||
tracing = "0.1.41"
|
||||
|
||||
[features]
|
||||
# Separate feature so that `clap` is not a mandatory dependency.
|
||||
cli = ["clap", "serde", "toml"]
|
||||
elapsed = []
|
||||
sandbox_summary = []
|
||||
updates = ["anyhow", "chrono", "reqwest", "serde", "serde_json", "tokio"]
|
||||
|
||||
@@ -29,3 +29,6 @@ mod config_summary;
|
||||
pub use config_summary::create_config_summary_entries;
|
||||
// Shared fuzzy matcher (used by TUI selection popups and other UI filtering)
|
||||
pub mod fuzzy_match;
|
||||
|
||||
#[cfg(any(test, feature = "updates"))]
|
||||
pub mod updates;
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
#![cfg(any(not(debug_assertions), test))]
|
||||
|
||||
use chrono::DateTime;
|
||||
use chrono::Duration;
|
||||
use chrono::Utc;
|
||||
use codex_core::config::Config;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use tracing::error;
|
||||
|
||||
use codex_core::config::Config;
|
||||
|
||||
/// Returns the latest available version string if it is newer than the current
|
||||
/// one, otherwise `None`.
|
||||
pub fn get_upgrade_version(config: &Config) -> Option<String> {
|
||||
let version_file = version_filepath(config);
|
||||
let info = read_version_info(&version_file).ok();
|
||||
@@ -18,13 +18,11 @@ pub fn get_upgrade_version(config: &Config) -> Option<String> {
|
||||
None => true,
|
||||
Some(info) => info.last_checked_at < Utc::now() - Duration::hours(20),
|
||||
} {
|
||||
// Refresh the cached latest version in the background so TUI startup
|
||||
// isn’t blocked by a network call. The UI reads the previously cached
|
||||
// value (if any) for this run; the next run shows the banner if needed.
|
||||
// Refresh in the background; callers can use the cached value for this run.
|
||||
tokio::spawn(async move {
|
||||
check_for_update(&version_file)
|
||||
.await
|
||||
.inspect_err(|e| tracing::error!("Failed to update version: {e}"))
|
||||
.inspect_err(|e| error!("Failed to update version: {e}"))
|
||||
});
|
||||
}
|
||||
|
||||
@@ -62,7 +60,8 @@ fn read_version_info(version_file: &Path) -> anyhow::Result<VersionInfo> {
|
||||
Ok(serde_json::from_str(&contents)?)
|
||||
}
|
||||
|
||||
async fn check_for_update(version_file: &Path) -> anyhow::Result<()> {
|
||||
/// Fetches the latest release info and updates the on-disk cache file.
|
||||
pub async fn check_for_update(version_file: &Path) -> anyhow::Result<()> {
|
||||
let ReleaseInfo {
|
||||
tag_name: latest_tag_name,
|
||||
} = reqwest::Client::new()
|
||||
@@ -31,6 +31,7 @@ use crate::config_types::ReasoningEffort as ReasoningEffortConfig;
|
||||
use crate::config_types::ReasoningSummary as ReasoningSummaryConfig;
|
||||
use crate::error::CodexErr;
|
||||
use crate::error::Result;
|
||||
use crate::error::UsageLimitReachedError;
|
||||
use crate::flags::CODEX_RS_SSE_FIXTURE;
|
||||
use crate::model_provider_info::ModelProviderInfo;
|
||||
use crate::model_provider_info::WireApi;
|
||||
@@ -195,7 +196,7 @@ impl ModelClient {
|
||||
|
||||
if let Some(auth) = auth.as_ref()
|
||||
&& auth.mode == AuthMode::ChatGPT
|
||||
&& let Some(account_id) = auth.get_account_id().await
|
||||
&& let Some(account_id) = auth.get_account_id()
|
||||
{
|
||||
req_builder = req_builder.header("chatgpt-account-id", account_id);
|
||||
}
|
||||
@@ -263,7 +264,9 @@ impl ModelClient {
|
||||
}) = body
|
||||
{
|
||||
if r#type == "usage_limit_reached" {
|
||||
return Err(CodexErr::UsageLimitReached);
|
||||
return Err(CodexErr::UsageLimitReached(UsageLimitReachedError {
|
||||
plan_type: auth.and_then(|a| a.get_plan_type()),
|
||||
}));
|
||||
} else if r#type == "usage_not_included" {
|
||||
return Err(CodexErr::UsageNotIncluded);
|
||||
}
|
||||
|
||||
@@ -1290,7 +1290,9 @@ async fn run_turn(
|
||||
Ok(output) => return Ok(output),
|
||||
Err(CodexErr::Interrupted) => return Err(CodexErr::Interrupted),
|
||||
Err(CodexErr::EnvVar(var)) => return Err(CodexErr::EnvVar(var)),
|
||||
Err(e @ (CodexErr::UsageLimitReached | CodexErr::UsageNotIncluded)) => return Err(e),
|
||||
Err(e @ (CodexErr::UsageLimitReached(_) | CodexErr::UsageNotIncluded)) => {
|
||||
return Err(e);
|
||||
}
|
||||
Err(e) => {
|
||||
// Use the configured provider-specific stream retry budget.
|
||||
let max_retries = sess.client.get_provider().stream_max_retries();
|
||||
|
||||
@@ -6,7 +6,7 @@ use crate::config::Config;
|
||||
use crate::protocol::Event;
|
||||
use crate::protocol::EventMsg;
|
||||
use crate::util::notify_on_sigint;
|
||||
use codex_login::load_auth;
|
||||
use codex_login::CodexAuth;
|
||||
use tokio::sync::Notify;
|
||||
use uuid::Uuid;
|
||||
|
||||
@@ -26,7 +26,7 @@ pub struct CodexConversation {
|
||||
/// that callers can surface the information to the UI.
|
||||
pub async fn init_codex(config: Config) -> anyhow::Result<CodexConversation> {
|
||||
let ctrl_c = notify_on_sigint();
|
||||
let auth = load_auth(&config.codex_home, true)?;
|
||||
let auth = CodexAuth::from_codex_home(&config.codex_home)?;
|
||||
let CodexSpawnOk {
|
||||
codex,
|
||||
init_id,
|
||||
|
||||
@@ -62,15 +62,15 @@ pub enum CodexErr {
|
||||
#[error("unexpected status {0}: {1}")]
|
||||
UnexpectedStatus(StatusCode, String),
|
||||
|
||||
#[error("Usage limit has been reached")]
|
||||
UsageLimitReached,
|
||||
|
||||
#[error("Usage not included with the plan")]
|
||||
UsageNotIncluded,
|
||||
#[error("{0}")]
|
||||
UsageLimitReached(UsageLimitReachedError),
|
||||
|
||||
#[error(
|
||||
"We’re currently experiencing high demand, which may cause temporary errors. We’re adding capacity in East and West Europe to restore normal service."
|
||||
"To use Codex with your ChatGPT plan, upgrade to Plus: https://openai.com/chatgpt/pricing."
|
||||
)]
|
||||
UsageNotIncluded,
|
||||
|
||||
#[error("We're currently experiencing high demand, which may cause temporary errors.")]
|
||||
InternalServerError,
|
||||
|
||||
/// Retry limit exceeded.
|
||||
@@ -115,6 +115,30 @@ pub enum CodexErr {
|
||||
EnvVar(EnvVarError),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct UsageLimitReachedError {
|
||||
pub plan_type: Option<String>,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for UsageLimitReachedError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
if let Some(plan_type) = &self.plan_type
|
||||
&& plan_type == "plus"
|
||||
{
|
||||
write!(
|
||||
f,
|
||||
"You've hit your usage limit. Upgrade to Pro (https://openai.com/chatgpt/pricing), or wait for limits to reset (every 5h and every week.)."
|
||||
)?;
|
||||
} else {
|
||||
write!(
|
||||
f,
|
||||
"You've hit your usage limit. Limits reset every 5h and every week."
|
||||
)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct EnvVarError {
|
||||
/// Name of the environment variable that is missing.
|
||||
@@ -150,3 +174,39 @@ pub fn get_error_message_ui(e: &CodexErr) -> String {
|
||||
_ => e.to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn usage_limit_reached_error_formats_plus_plan() {
|
||||
let err = UsageLimitReachedError {
|
||||
plan_type: Some("plus".to_string()),
|
||||
};
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"You've hit your usage limit. Upgrade to Pro (https://openai.com/chatgpt/pricing), or wait for limits to reset (every 5h and every week.)."
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn usage_limit_reached_error_formats_default_when_none() {
|
||||
let err = UsageLimitReachedError { plan_type: None };
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"You've hit your usage limit. Limits reset every 5h and every week."
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn usage_limit_reached_error_formats_default_for_other_plans() {
|
||||
let err = UsageLimitReachedError {
|
||||
plan_type: Some("pro".to_string()),
|
||||
};
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"You've hit your usage limit. Limits reset every 5h and every week."
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,7 +15,7 @@ use std::time::Duration;
|
||||
|
||||
use crate::error::EnvVarError;
|
||||
const DEFAULT_STREAM_IDLE_TIMEOUT_MS: u64 = 300_000;
|
||||
const DEFAULT_STREAM_MAX_RETRIES: u64 = 10;
|
||||
const DEFAULT_STREAM_MAX_RETRIES: u64 = 5;
|
||||
const DEFAULT_REQUEST_MAX_RETRIES: u64 = 4;
|
||||
|
||||
/// Wire protocol that the provider speaks. Most third-party services only
|
||||
@@ -96,7 +96,7 @@ impl ModelProviderInfo {
|
||||
auth: &Option<CodexAuth>,
|
||||
) -> crate::error::Result<reqwest::RequestBuilder> {
|
||||
let effective_auth = match self.api_key() {
|
||||
Ok(Some(key)) => Some(CodexAuth::from_api_key(key)),
|
||||
Ok(Some(key)) => Some(CodexAuth::from_api_key(&key)),
|
||||
Ok(None) => auth.clone(),
|
||||
Err(err) => {
|
||||
if auth.is_some() {
|
||||
|
||||
@@ -7,7 +7,7 @@ use tokio::sync::Notify;
|
||||
use tracing::debug;
|
||||
|
||||
const INITIAL_DELAY_MS: u64 = 200;
|
||||
const BACKOFF_FACTOR: f64 = 1.3;
|
||||
const BACKOFF_FACTOR: f64 = 2.0;
|
||||
|
||||
/// Make a CancellationToken that is fulfilled when SIGINT occurs.
|
||||
pub fn notify_on_sigint() -> Arc<Notify> {
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
#![allow(clippy::expect_used)]
|
||||
#![allow(clippy::unwrap_used)]
|
||||
use std::path::PathBuf;
|
||||
#![allow(clippy::expect_used, clippy::unwrap_used)]
|
||||
|
||||
use chrono::Utc;
|
||||
use codex_core::Codex;
|
||||
use codex_core::CodexSpawnOk;
|
||||
use codex_core::ModelProviderInfo;
|
||||
@@ -13,10 +10,7 @@ use codex_core::protocol::InputItem;
|
||||
use codex_core::protocol::Op;
|
||||
use codex_core::protocol::SessionConfiguredEvent;
|
||||
use codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR;
|
||||
use codex_login::AuthDotJson;
|
||||
use codex_login::AuthMode;
|
||||
use codex_login::CodexAuth;
|
||||
use codex_login::TokenData;
|
||||
use core_test_support::load_default_config_for_test;
|
||||
use core_test_support::load_sse_fixture_with_id;
|
||||
use core_test_support::wait_for_event;
|
||||
@@ -99,7 +93,7 @@ async fn includes_session_id_and_model_headers_in_request() {
|
||||
let ctrl_c = std::sync::Arc::new(tokio::sync::Notify::new());
|
||||
let CodexSpawnOk { codex, .. } = Codex::spawn(
|
||||
config,
|
||||
Some(CodexAuth::from_api_key("Test API Key".to_string())),
|
||||
Some(CodexAuth::from_api_key("Test API Key")),
|
||||
ctrl_c.clone(),
|
||||
)
|
||||
.await
|
||||
@@ -173,7 +167,7 @@ async fn includes_base_instructions_override_in_request() {
|
||||
let ctrl_c = std::sync::Arc::new(tokio::sync::Notify::new());
|
||||
let CodexSpawnOk { codex, .. } = Codex::spawn(
|
||||
config,
|
||||
Some(CodexAuth::from_api_key("Test API Key".to_string())),
|
||||
Some(CodexAuth::from_api_key("Test API Key")),
|
||||
ctrl_c.clone(),
|
||||
)
|
||||
.await
|
||||
@@ -232,7 +226,7 @@ async fn originator_config_override_is_used() {
|
||||
let ctrl_c = std::sync::Arc::new(tokio::sync::Notify::new());
|
||||
let CodexSpawnOk { codex, .. } = Codex::spawn(
|
||||
config,
|
||||
Some(CodexAuth::from_api_key("Test API Key".to_string())),
|
||||
Some(CodexAuth::from_api_key("Test API Key")),
|
||||
ctrl_c.clone(),
|
||||
)
|
||||
.await
|
||||
@@ -370,7 +364,7 @@ async fn includes_user_instructions_message_in_request() {
|
||||
let ctrl_c = std::sync::Arc::new(tokio::sync::Notify::new());
|
||||
let CodexSpawnOk { codex, .. } = Codex::spawn(
|
||||
config,
|
||||
Some(CodexAuth::from_api_key("Test API Key".to_string())),
|
||||
Some(CodexAuth::from_api_key("Test API Key")),
|
||||
ctrl_c.clone(),
|
||||
)
|
||||
.await
|
||||
@@ -556,19 +550,5 @@ async fn env_var_overrides_loaded_auth() {
|
||||
}
|
||||
|
||||
fn create_dummy_codex_auth() -> CodexAuth {
|
||||
CodexAuth::new(
|
||||
None,
|
||||
AuthMode::ChatGPT,
|
||||
PathBuf::new(),
|
||||
Some(AuthDotJson {
|
||||
openai_api_key: None,
|
||||
tokens: Some(TokenData {
|
||||
id_token: Default::default(),
|
||||
access_token: "Access Token".to_string(),
|
||||
refresh_token: "test".to_string(),
|
||||
account_id: Some("account_id".to_string()),
|
||||
}),
|
||||
last_refresh: Some(Utc::now()),
|
||||
}),
|
||||
)
|
||||
CodexAuth::create_dummy_chatgpt_auth_for_testing()
|
||||
}
|
||||
|
||||
@@ -145,7 +145,7 @@ async fn summarize_context_three_requests_and_instructions() {
|
||||
let ctrl_c = std::sync::Arc::new(tokio::sync::Notify::new());
|
||||
let CodexSpawnOk { codex, .. } = Codex::spawn(
|
||||
config,
|
||||
Some(CodexAuth::from_api_key("dummy".to_string())),
|
||||
Some(CodexAuth::from_api_key("dummy")),
|
||||
ctrl_c.clone(),
|
||||
)
|
||||
.await
|
||||
|
||||
@@ -99,7 +99,7 @@ async fn retries_on_early_close() {
|
||||
config.model_provider = model_provider;
|
||||
let CodexSpawnOk { codex, .. } = Codex::spawn(
|
||||
config,
|
||||
Some(CodexAuth::from_api_key("Test API Key".to_string())),
|
||||
Some(CodexAuth::from_api_key("Test API Key")),
|
||||
ctrl_c,
|
||||
)
|
||||
.await
|
||||
|
||||
@@ -38,8 +38,9 @@ pub enum AuthMode {
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct CodexAuth {
|
||||
pub api_key: Option<String>,
|
||||
pub mode: AuthMode,
|
||||
|
||||
api_key: Option<String>,
|
||||
auth_dot_json: Arc<Mutex<Option<AuthDotJson>>>,
|
||||
auth_file: PathBuf,
|
||||
}
|
||||
@@ -51,33 +52,23 @@ impl PartialEq for CodexAuth {
|
||||
}
|
||||
|
||||
impl CodexAuth {
|
||||
pub fn new(
|
||||
api_key: Option<String>,
|
||||
mode: AuthMode,
|
||||
auth_file: PathBuf,
|
||||
auth_dot_json: Option<AuthDotJson>,
|
||||
) -> Self {
|
||||
let auth_dot_json = Arc::new(Mutex::new(auth_dot_json));
|
||||
pub fn from_api_key(api_key: &str) -> Self {
|
||||
Self {
|
||||
api_key,
|
||||
mode,
|
||||
auth_file,
|
||||
auth_dot_json,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_api_key(api_key: String) -> Self {
|
||||
Self {
|
||||
api_key: Some(api_key),
|
||||
api_key: Some(api_key.to_owned()),
|
||||
mode: AuthMode::ApiKey,
|
||||
auth_file: PathBuf::new(),
|
||||
auth_dot_json: Arc::new(Mutex::new(None)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Loads the available auth information from the auth.json or
|
||||
/// OPENAI_API_KEY environment variable.
|
||||
pub fn from_codex_home(codex_home: &Path) -> std::io::Result<Option<CodexAuth>> {
|
||||
load_auth(codex_home, true)
|
||||
}
|
||||
|
||||
pub async fn get_token_data(&self) -> Result<TokenData, std::io::Error> {
|
||||
#[expect(clippy::unwrap_used)]
|
||||
let auth_dot_json = self.auth_dot_json.lock().unwrap().clone();
|
||||
let auth_dot_json: Option<AuthDotJson> = self.get_current_auth_json();
|
||||
match auth_dot_json {
|
||||
Some(AuthDotJson {
|
||||
tokens: Some(mut tokens),
|
||||
@@ -132,61 +123,120 @@ impl CodexAuth {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_account_id(&self) -> Option<String> {
|
||||
match self.mode {
|
||||
AuthMode::ApiKey => None,
|
||||
AuthMode::ChatGPT => {
|
||||
let token_data = self.get_token_data().await.ok()?;
|
||||
pub fn get_account_id(&self) -> Option<String> {
|
||||
self.get_current_token_data()
|
||||
.and_then(|t| t.account_id.clone())
|
||||
}
|
||||
|
||||
token_data.account_id.clone()
|
||||
}
|
||||
pub fn get_plan_type(&self) -> Option<String> {
|
||||
self.get_current_token_data()
|
||||
.and_then(|t| t.id_token.chatgpt_plan_type.as_ref().map(|p| p.as_string()))
|
||||
}
|
||||
|
||||
fn get_current_auth_json(&self) -> Option<AuthDotJson> {
|
||||
#[expect(clippy::unwrap_used)]
|
||||
self.auth_dot_json.lock().unwrap().clone()
|
||||
}
|
||||
|
||||
fn get_current_token_data(&self) -> Option<TokenData> {
|
||||
self.get_current_auth_json().and_then(|t| t.tokens.clone())
|
||||
}
|
||||
|
||||
/// Consider this private to integration tests.
|
||||
pub fn create_dummy_chatgpt_auth_for_testing() -> Self {
|
||||
let auth_dot_json = AuthDotJson {
|
||||
openai_api_key: None,
|
||||
tokens: Some(TokenData {
|
||||
id_token: Default::default(),
|
||||
access_token: "Access Token".to_string(),
|
||||
refresh_token: "test".to_string(),
|
||||
account_id: Some("account_id".to_string()),
|
||||
}),
|
||||
last_refresh: Some(Utc::now()),
|
||||
};
|
||||
|
||||
let auth_dot_json = Arc::new(Mutex::new(Some(auth_dot_json)));
|
||||
Self {
|
||||
api_key: None,
|
||||
mode: AuthMode::ChatGPT,
|
||||
auth_file: PathBuf::new(),
|
||||
auth_dot_json,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Loads the available auth information from the auth.json or OPENAI_API_KEY environment variable.
|
||||
pub fn load_auth(codex_home: &Path, include_env_var: bool) -> std::io::Result<Option<CodexAuth>> {
|
||||
fn load_auth(codex_home: &Path, include_env_var: bool) -> std::io::Result<Option<CodexAuth>> {
|
||||
// First, check to see if there is a valid auth.json file. If not, we fall
|
||||
// back to AuthMode::ApiKey using the OPENAI_API_KEY environment variable
|
||||
// (if it is set).
|
||||
let auth_file = get_auth_file(codex_home);
|
||||
|
||||
let auth_dot_json = try_read_auth_json(&auth_file).ok();
|
||||
|
||||
let auth_json_api_key = auth_dot_json
|
||||
.as_ref()
|
||||
.and_then(|a| a.openai_api_key.clone())
|
||||
.filter(|s| !s.is_empty());
|
||||
|
||||
let openai_api_key = if include_env_var {
|
||||
env::var(OPENAI_API_KEY_ENV_VAR)
|
||||
.ok()
|
||||
.filter(|s| !s.is_empty())
|
||||
.or(auth_json_api_key)
|
||||
} else {
|
||||
auth_json_api_key
|
||||
let auth_dot_json = match try_read_auth_json(&auth_file) {
|
||||
Ok(auth) => auth,
|
||||
// If auth.json does not exist, try to read the OPENAI_API_KEY from the
|
||||
// environment variable.
|
||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound && include_env_var => {
|
||||
return match read_openai_api_key_from_env() {
|
||||
Some(api_key) => Ok(Some(CodexAuth::from_api_key(&api_key))),
|
||||
None => Ok(None),
|
||||
};
|
||||
}
|
||||
// Though if auth.json exists but is malformed, do not fall back to the
|
||||
// env var because the user may be expecting to use AuthMode::ChatGPT.
|
||||
Err(e) => {
|
||||
return Err(e);
|
||||
}
|
||||
};
|
||||
|
||||
let has_tokens = auth_dot_json
|
||||
.as_ref()
|
||||
.and_then(|a| a.tokens.as_ref())
|
||||
.is_some();
|
||||
let AuthDotJson {
|
||||
openai_api_key: auth_json_api_key,
|
||||
tokens,
|
||||
last_refresh,
|
||||
} = auth_dot_json;
|
||||
|
||||
if openai_api_key.is_none() && !has_tokens {
|
||||
return Ok(None);
|
||||
// If the auth.json has an API key AND does not appear to be on a plan that
|
||||
// should prefer AuthMode::ChatGPT, use AuthMode::ApiKey.
|
||||
if let Some(api_key) = &auth_json_api_key {
|
||||
// Should any of these be AuthMode::ChatGPT with the api_key set?
|
||||
// Does AuthMode::ChatGPT indicate that there is an auth.json that is
|
||||
// "refreshable" even if we are using the API key for auth?
|
||||
match &tokens {
|
||||
Some(tokens) => {
|
||||
if tokens.is_plan_that_should_use_api_key() {
|
||||
return Ok(Some(CodexAuth::from_api_key(api_key)));
|
||||
} else {
|
||||
// Ignore the API key and fall through to ChatGPT auth.
|
||||
}
|
||||
}
|
||||
None => {
|
||||
// We have an API key but no tokens in the auth.json file.
|
||||
// Perhaps the user ran `codex login --api-key <KEY>` or updated
|
||||
// auth.json by hand. Either way, let's assume they are trying
|
||||
// to use their API key.
|
||||
return Ok(Some(CodexAuth::from_api_key(api_key)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mode = if openai_api_key.is_some() {
|
||||
AuthMode::ApiKey
|
||||
} else {
|
||||
AuthMode::ChatGPT
|
||||
};
|
||||
|
||||
// For the AuthMode::ChatGPT variant, perhaps neither api_key nor
|
||||
// openai_api_key should exist?
|
||||
Ok(Some(CodexAuth {
|
||||
api_key: openai_api_key,
|
||||
mode,
|
||||
api_key: None,
|
||||
mode: AuthMode::ChatGPT,
|
||||
auth_file,
|
||||
auth_dot_json: Arc::new(Mutex::new(auth_dot_json)),
|
||||
auth_dot_json: Arc::new(Mutex::new(Some(AuthDotJson {
|
||||
openai_api_key: None,
|
||||
tokens,
|
||||
last_refresh,
|
||||
}))),
|
||||
}))
|
||||
}
|
||||
|
||||
fn read_openai_api_key_from_env() -> Option<String> {
|
||||
env::var(OPENAI_API_KEY_ENV_VAR)
|
||||
.ok()
|
||||
.filter(|s| !s.is_empty())
|
||||
}
|
||||
|
||||
pub fn get_auth_file(codex_home: &Path) -> PathBuf {
|
||||
codex_home.join("auth.json")
|
||||
}
|
||||
@@ -410,14 +460,19 @@ pub struct AuthDotJson {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
#![expect(clippy::expect_used, clippy::unwrap_used)]
|
||||
use super::*;
|
||||
use crate::token_data::IdTokenInfo;
|
||||
use crate::token_data::KnownPlan;
|
||||
use crate::token_data::PlanType;
|
||||
use base64::Engine;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use tempfile::tempdir;
|
||||
|
||||
const LAST_REFRESH: &str = "2025-08-06T20:41:36.232376Z";
|
||||
|
||||
#[test]
|
||||
#[expect(clippy::unwrap_used)]
|
||||
fn writes_api_key_and_loads_auth() {
|
||||
let dir = tempdir().unwrap();
|
||||
login_with_api_key(dir.path(), "sk-test-key").unwrap();
|
||||
@@ -427,7 +482,6 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[expect(clippy::unwrap_used)]
|
||||
fn loads_from_env_var_if_env_var_exists() {
|
||||
let dir = tempdir().unwrap();
|
||||
|
||||
@@ -441,10 +495,132 @@ mod tests {
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[expect(clippy::expect_used, clippy::unwrap_used)]
|
||||
async fn loads_token_data_from_auth_json() {
|
||||
let dir = tempdir().unwrap();
|
||||
let auth_file = dir.path().join("auth.json");
|
||||
async fn pro_account_with_no_api_key_uses_chatgpt_auth() {
|
||||
let codex_home = tempdir().unwrap();
|
||||
write_auth_file(
|
||||
AuthFileParams {
|
||||
openai_api_key: None,
|
||||
chatgpt_plan_type: "pro".to_string(),
|
||||
},
|
||||
codex_home.path(),
|
||||
)
|
||||
.expect("failed to write auth file");
|
||||
|
||||
let CodexAuth {
|
||||
api_key,
|
||||
mode,
|
||||
auth_dot_json,
|
||||
auth_file: _,
|
||||
} = load_auth(codex_home.path(), false).unwrap().unwrap();
|
||||
assert_eq!(None, api_key);
|
||||
assert_eq!(AuthMode::ChatGPT, mode);
|
||||
|
||||
let guard = auth_dot_json.lock().unwrap();
|
||||
let auth_dot_json = guard.as_ref().expect("AuthDotJson should exist");
|
||||
assert_eq!(
|
||||
&AuthDotJson {
|
||||
openai_api_key: None,
|
||||
tokens: Some(TokenData {
|
||||
id_token: IdTokenInfo {
|
||||
email: Some("user@example.com".to_string()),
|
||||
chatgpt_plan_type: Some(PlanType::Known(KnownPlan::Pro)),
|
||||
},
|
||||
access_token: "test-access-token".to_string(),
|
||||
refresh_token: "test-refresh-token".to_string(),
|
||||
account_id: None,
|
||||
}),
|
||||
last_refresh: Some(
|
||||
DateTime::parse_from_rfc3339(LAST_REFRESH)
|
||||
.unwrap()
|
||||
.with_timezone(&Utc)
|
||||
),
|
||||
},
|
||||
auth_dot_json
|
||||
)
|
||||
}
|
||||
|
||||
/// Even if the OPENAI_API_KEY is set in auth.json, if the plan is not in
|
||||
/// [`TokenData::is_plan_that_should_use_api_key`], it should use
|
||||
/// [`AuthMode::ChatGPT`].
|
||||
#[tokio::test]
|
||||
async fn pro_account_with_api_key_still_uses_chatgpt_auth() {
|
||||
let codex_home = tempdir().unwrap();
|
||||
write_auth_file(
|
||||
AuthFileParams {
|
||||
openai_api_key: Some("sk-test-key".to_string()),
|
||||
chatgpt_plan_type: "pro".to_string(),
|
||||
},
|
||||
codex_home.path(),
|
||||
)
|
||||
.expect("failed to write auth file");
|
||||
|
||||
let CodexAuth {
|
||||
api_key,
|
||||
mode,
|
||||
auth_dot_json,
|
||||
auth_file: _,
|
||||
} = load_auth(codex_home.path(), false).unwrap().unwrap();
|
||||
assert_eq!(None, api_key);
|
||||
assert_eq!(AuthMode::ChatGPT, mode);
|
||||
|
||||
let guard = auth_dot_json.lock().unwrap();
|
||||
let auth_dot_json = guard.as_ref().expect("AuthDotJson should exist");
|
||||
assert_eq!(
|
||||
&AuthDotJson {
|
||||
openai_api_key: None,
|
||||
tokens: Some(TokenData {
|
||||
id_token: IdTokenInfo {
|
||||
email: Some("user@example.com".to_string()),
|
||||
chatgpt_plan_type: Some(PlanType::Known(KnownPlan::Pro)),
|
||||
},
|
||||
access_token: "test-access-token".to_string(),
|
||||
refresh_token: "test-refresh-token".to_string(),
|
||||
account_id: None,
|
||||
}),
|
||||
last_refresh: Some(
|
||||
DateTime::parse_from_rfc3339(LAST_REFRESH)
|
||||
.unwrap()
|
||||
.with_timezone(&Utc)
|
||||
),
|
||||
},
|
||||
auth_dot_json
|
||||
)
|
||||
}
|
||||
|
||||
/// If the OPENAI_API_KEY is set in auth.json and it is an enterprise
|
||||
/// account, then it should use [`AuthMode::ApiKey`].
|
||||
#[tokio::test]
|
||||
async fn enterprise_account_with_api_key_uses_chatgpt_auth() {
|
||||
let codex_home = tempdir().unwrap();
|
||||
write_auth_file(
|
||||
AuthFileParams {
|
||||
openai_api_key: Some("sk-test-key".to_string()),
|
||||
chatgpt_plan_type: "enterprise".to_string(),
|
||||
},
|
||||
codex_home.path(),
|
||||
)
|
||||
.expect("failed to write auth file");
|
||||
|
||||
let CodexAuth {
|
||||
api_key,
|
||||
mode,
|
||||
auth_dot_json,
|
||||
auth_file: _,
|
||||
} = load_auth(codex_home.path(), false).unwrap().unwrap();
|
||||
assert_eq!(Some("sk-test-key".to_string()), api_key);
|
||||
assert_eq!(AuthMode::ApiKey, mode);
|
||||
|
||||
let guard = auth_dot_json.lock().expect("should unwrap");
|
||||
assert!(guard.is_none(), "auth_dot_json should be None");
|
||||
}
|
||||
|
||||
struct AuthFileParams {
|
||||
openai_api_key: Option<String>,
|
||||
chatgpt_plan_type: String,
|
||||
}
|
||||
|
||||
fn write_auth_file(params: AuthFileParams, codex_home: &Path) -> std::io::Result<()> {
|
||||
let auth_file = get_auth_file(codex_home);
|
||||
// Create a minimal valid JWT for the id_token field.
|
||||
#[derive(Serialize)]
|
||||
struct Header {
|
||||
@@ -460,71 +636,31 @@ mod tests {
|
||||
"email_verified": true,
|
||||
"https://api.openai.com/auth": {
|
||||
"chatgpt_account_id": "bc3618e3-489d-4d49-9362-1561dc53ba53",
|
||||
"chatgpt_plan_type": "pro",
|
||||
"chatgpt_plan_type": params.chatgpt_plan_type,
|
||||
"chatgpt_user_id": "user-12345",
|
||||
"user_id": "user-12345",
|
||||
}
|
||||
});
|
||||
let b64 = |b: &[u8]| base64::engine::general_purpose::URL_SAFE_NO_PAD.encode(b);
|
||||
let header_b64 = b64(&serde_json::to_vec(&header).unwrap());
|
||||
let payload_b64 = b64(&serde_json::to_vec(&payload).unwrap());
|
||||
let header_b64 = b64(&serde_json::to_vec(&header)?);
|
||||
let payload_b64 = b64(&serde_json::to_vec(&payload)?);
|
||||
let signature_b64 = b64(b"sig");
|
||||
let fake_jwt = format!("{header_b64}.{payload_b64}.{signature_b64}");
|
||||
std::fs::write(
|
||||
auth_file,
|
||||
format!(
|
||||
r#"
|
||||
{{
|
||||
"OPENAI_API_KEY": null,
|
||||
"tokens": {{
|
||||
"id_token": "{fake_jwt}",
|
||||
|
||||
let auth_json_data = json!({
|
||||
"OPENAI_API_KEY": params.openai_api_key,
|
||||
"tokens": {
|
||||
"id_token": fake_jwt,
|
||||
"access_token": "test-access-token",
|
||||
"refresh_token": "test-refresh-token"
|
||||
}},
|
||||
"last_refresh": "2025-08-06T20:41:36.232376Z"
|
||||
}}
|
||||
"#,
|
||||
),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let CodexAuth {
|
||||
api_key,
|
||||
mode,
|
||||
auth_dot_json,
|
||||
auth_file,
|
||||
} = load_auth(dir.path(), false).unwrap().unwrap();
|
||||
assert_eq!(None, api_key);
|
||||
assert_eq!(AuthMode::ChatGPT, mode);
|
||||
assert_eq!(dir.path().join("auth.json"), auth_file);
|
||||
|
||||
let guard = auth_dot_json.lock().unwrap();
|
||||
let auth_dot_json = guard.as_ref().expect("AuthDotJson should exist");
|
||||
|
||||
assert_eq!(
|
||||
&AuthDotJson {
|
||||
openai_api_key: None,
|
||||
tokens: Some(TokenData {
|
||||
id_token: IdTokenInfo {
|
||||
email: Some("user@example.com".to_string()),
|
||||
chatgpt_plan_type: Some("pro".to_string()),
|
||||
},
|
||||
access_token: "test-access-token".to_string(),
|
||||
refresh_token: "test-refresh-token".to_string(),
|
||||
account_id: None,
|
||||
}),
|
||||
last_refresh: Some(
|
||||
DateTime::parse_from_rfc3339("2025-08-06T20:41:36.232376Z")
|
||||
.unwrap()
|
||||
.with_timezone(&Utc)
|
||||
),
|
||||
},
|
||||
auth_dot_json
|
||||
)
|
||||
"last_refresh": LAST_REFRESH,
|
||||
});
|
||||
let auth_json = serde_json::to_string_pretty(&auth_json_data)?;
|
||||
std::fs::write(auth_file, auth_json)
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[expect(clippy::expect_used, clippy::unwrap_used)]
|
||||
fn id_token_info_handles_missing_fields() {
|
||||
// Payload without email or plan should yield None values.
|
||||
let header = serde_json::json!({"alg": "none", "typ": "JWT"});
|
||||
@@ -542,7 +678,6 @@ mod tests {
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[expect(clippy::unwrap_used)]
|
||||
async fn loads_api_key_from_auth_json() {
|
||||
let dir = tempdir().unwrap();
|
||||
let auth_file = dir.path().join("auth.json");
|
||||
|
||||
@@ -17,6 +17,17 @@ pub struct TokenData {
|
||||
pub account_id: Option<String>,
|
||||
}
|
||||
|
||||
impl TokenData {
|
||||
/// Returns true if this is a plan that should use the traditional
|
||||
/// "metered" billing via an API key.
|
||||
pub(crate) fn is_plan_that_should_use_api_key(&self) -> bool {
|
||||
self.id_token
|
||||
.chatgpt_plan_type
|
||||
.as_ref()
|
||||
.is_none_or(|plan| plan.is_plan_that_should_use_api_key())
|
||||
}
|
||||
}
|
||||
|
||||
/// Flat subset of useful claims in id_token from auth.json.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Default, Serialize)]
|
||||
pub struct IdTokenInfo {
|
||||
@@ -24,7 +35,57 @@ pub struct IdTokenInfo {
|
||||
/// The ChatGPT subscription plan type
|
||||
/// (e.g., "free", "plus", "pro", "business", "enterprise", "edu").
|
||||
/// (Note: ae has not verified that those are the exact values.)
|
||||
pub chatgpt_plan_type: Option<String>,
|
||||
pub(crate) chatgpt_plan_type: Option<PlanType>,
|
||||
}
|
||||
|
||||
impl IdTokenInfo {
|
||||
pub fn get_chatgpt_plan_type(&self) -> Option<String> {
|
||||
self.chatgpt_plan_type.as_ref().map(|t| match t {
|
||||
PlanType::Known(plan) => format!("{plan:?}"),
|
||||
PlanType::Unknown(s) => s.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(untagged)]
|
||||
pub(crate) enum PlanType {
|
||||
Known(KnownPlan),
|
||||
Unknown(String),
|
||||
}
|
||||
|
||||
impl PlanType {
|
||||
fn is_plan_that_should_use_api_key(&self) -> bool {
|
||||
match self {
|
||||
Self::Known(known) => {
|
||||
use KnownPlan::*;
|
||||
!matches!(known, Free | Plus | Pro | Team)
|
||||
}
|
||||
Self::Unknown(_) => {
|
||||
// Unknown plans should use the API key.
|
||||
true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_string(&self) -> String {
|
||||
match self {
|
||||
Self::Known(known) => format!("{known:?}").to_lowercase(),
|
||||
Self::Unknown(s) => s.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub(crate) enum KnownPlan {
|
||||
Free,
|
||||
Plus,
|
||||
Pro,
|
||||
Team,
|
||||
Business,
|
||||
Enterprise,
|
||||
Edu,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
@@ -38,7 +99,7 @@ struct IdClaims {
|
||||
#[derive(Deserialize)]
|
||||
struct AuthClaims {
|
||||
#[serde(default)]
|
||||
chatgpt_plan_type: Option<String>,
|
||||
chatgpt_plan_type: Option<PlanType>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
@@ -112,6 +173,9 @@ mod tests {
|
||||
|
||||
let info = parse_id_token(&fake_jwt).expect("should parse");
|
||||
assert_eq!(info.email.as_deref(), Some("user@example.com"));
|
||||
assert_eq!(info.chatgpt_plan_type.as_deref(), Some("pro"));
|
||||
assert_eq!(
|
||||
info.chatgpt_plan_type,
|
||||
Some(PlanType::Known(KnownPlan::Pro))
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -29,6 +29,7 @@ codex-common = { path = "../common", features = [
|
||||
"cli",
|
||||
"elapsed",
|
||||
"sandbox_summary",
|
||||
"updates",
|
||||
] }
|
||||
codex-core = { path = "../core" }
|
||||
codex-file-search = { path = "../file-search" }
|
||||
@@ -48,7 +49,6 @@ ratatui = { version = "0.29.0", features = [
|
||||
] }
|
||||
ratatui-image = "8.0.0"
|
||||
regex-lite = "0.1"
|
||||
reqwest = { version = "0.12", features = ["json"] }
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = { version = "1", features = ["preserve_order"] }
|
||||
shlex = "1.3.0"
|
||||
|
||||
@@ -537,8 +537,8 @@ impl HistoryCell {
|
||||
lines.push(Line::from(" • Signed in with ChatGPT"));
|
||||
|
||||
let info = tokens.id_token;
|
||||
if let Some(email) = info.email {
|
||||
lines.push(Line::from(vec![" • Login: ".into(), email.into()]));
|
||||
if let Some(email) = &info.email {
|
||||
lines.push(Line::from(vec![" • Login: ".into(), email.clone().into()]));
|
||||
}
|
||||
|
||||
match auth.openai_api_key.as_deref() {
|
||||
@@ -549,9 +549,8 @@ impl HistoryCell {
|
||||
}
|
||||
_ => {
|
||||
let plan_text = info
|
||||
.chatgpt_plan_type
|
||||
.as_deref()
|
||||
.map(title_case)
|
||||
.get_chatgpt_plan_type()
|
||||
.map(|s| title_case(&s))
|
||||
.unwrap_or_else(|| "Unknown".to_string());
|
||||
lines.push(Line::from(vec![" • Plan: ".into(), plan_text.into()]));
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@ use codex_core::config::load_config_as_toml_with_cli_overrides;
|
||||
use codex_core::config_types::SandboxMode;
|
||||
use codex_core::protocol::AskForApproval;
|
||||
use codex_core::protocol::SandboxPolicy;
|
||||
use codex_login::load_auth;
|
||||
use codex_login::CodexAuth;
|
||||
use codex_ollama::DEFAULT_OSS_MODEL;
|
||||
use log_layer::TuiLogLayer;
|
||||
use std::fs::OpenOptions;
|
||||
@@ -48,8 +48,6 @@ mod text_formatting;
|
||||
mod tui;
|
||||
mod user_approval_widget;
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
mod updates;
|
||||
#[cfg(not(debug_assertions))]
|
||||
use color_eyre::owo_colors::OwoColorize;
|
||||
|
||||
@@ -211,7 +209,7 @@ pub async fn run_main(
|
||||
|
||||
#[allow(clippy::print_stderr)]
|
||||
#[cfg(not(debug_assertions))]
|
||||
if let Some(latest_version) = updates::get_upgrade_version(&config) {
|
||||
if let Some(latest_version) = codex_common::updates::get_upgrade_version(&config) {
|
||||
let current_version = env!("CARGO_PKG_VERSION");
|
||||
let exe = std::env::current_exe()?;
|
||||
let managed_by_npm = std::env::var_os("CODEX_MANAGED_BY_NPM").is_some();
|
||||
@@ -304,7 +302,7 @@ fn should_show_login_screen(config: &Config) -> bool {
|
||||
// Reading the OpenAI API key is an async operation because it may need
|
||||
// to refresh the token. Block on it.
|
||||
let codex_home = config.codex_home.clone();
|
||||
match load_auth(&codex_home, true) {
|
||||
match CodexAuth::from_codex_home(&codex_home) {
|
||||
Ok(Some(_)) => false,
|
||||
Ok(None) => true,
|
||||
Err(err) => {
|
||||
|
||||
Reference in New Issue
Block a user