Compare commits

...

1 Commits

Author SHA1 Message Date
Michael Bolin
88340071dd feat: codex app-server --default-chatgpt-proxy-auth 2026-01-30 15:08:59 -08:00
19 changed files with 457 additions and 72 deletions

1
codex-rs/Cargo.lock generated
View File

@@ -1084,6 +1084,7 @@ dependencies = [
"axum",
"base64",
"chrono",
"clap",
"codex-app-server-protocol",
"codex-arg0",
"codex-backend-client",

View File

@@ -915,7 +915,7 @@ mod tests {
#[test]
fn serialize_get_account() -> Result<()> {
let request = ClientRequest::GetAccount {
request_id: RequestId::Integer(6),
request_id: RequestId::Integer(7),
params: v2::GetAccountParams {
refresh_token: false,
},
@@ -923,7 +923,7 @@ mod tests {
assert_eq!(
json!({
"method": "account/read",
"id": 6,
"id": 7,
"params": {
"refreshToken": false
}

View File

@@ -32,6 +32,7 @@ codex-rmcp-client = { workspace = true }
codex-utils-absolute-path = { workspace = true }
codex-utils-json-to-toml = { workspace = true }
chrono = { workspace = true }
clap = { workspace = true, features = ["derive"] }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
mcp-types = { workspace = true }

View File

@@ -131,6 +131,7 @@ use codex_app_server_protocol::UserInput as V2UserInput;
use codex_app_server_protocol::UserSavedConfig;
use codex_app_server_protocol::build_turns_from_event_msgs;
use codex_backend_client::Client as BackendClient;
use codex_backend_client::UsageMetadata;
use codex_chatgpt::connectors;
use codex_core::AuthManager;
use codex_core::CodexAuth;
@@ -239,6 +240,8 @@ const THREAD_LIST_MAX_LIMIT: usize = 100;
// Duration before a ChatGPT login attempt is abandoned.
const LOGIN_CHATGPT_TIMEOUT: Duration = Duration::from_secs(10 * 60);
// Timeout for best-effort plan type lookups via the usage endpoint.
const PLAN_TYPE_FETCH_TIMEOUT: Duration = Duration::from_secs(5);
struct ActiveLogin {
shutdown_handle: ShutdownHandle,
login_id: Uuid,
@@ -1198,19 +1201,16 @@ impl CodexMessageProcessor {
match self.auth_manager.auth().await {
Some(auth) => {
let auth_mode = auth.api_auth_mode();
let (reported_auth_method, token_opt) = match auth.get_token() {
Ok(token) if !token.is_empty() => {
let tok = if include_token { Some(token) } else { None };
(Some(auth_mode), tok)
}
Ok(_) => (None, None),
let token_opt = match auth.bearer_token() {
Ok(Some(token)) if include_token && !token.is_empty() => Some(token),
Ok(_) => None,
Err(err) => {
tracing::warn!("failed to get token for auth status: {err}");
(None, None)
tracing::warn!("failed to get bearer token for auth status: {err}");
None
}
};
GetAuthStatusResponse {
auth_method: reported_auth_method,
auth_method: Some(auth_mode),
auth_token: token_opt,
requires_openai_auth: Some(true),
}
@@ -1265,6 +1265,26 @@ impl CodexMessageProcessor {
}
}
}
CodexAuth::ChatgptProxy(_) => {
let needs_usage_metadata = auth.account_plan_type().is_none()
|| auth.get_account_email().is_none()
|| auth.get_account_id().is_none();
let usage_metadata = if needs_usage_metadata {
self.fetch_usage_metadata_from_usage().await
} else {
None
};
let email = auth
.get_account_email()
.or_else(|| usage_metadata.as_ref().and_then(|meta| meta.email.clone()))
.unwrap_or_else(|| "unknown".to_string());
let plan_type = auth
.account_plan_type()
.or_else(|| usage_metadata.as_ref().map(|meta| meta.plan_type))
.unwrap_or(codex_protocol::account::PlanType::Unknown);
Account::Chatgpt { email, plan_type }
}
}),
None => None,
};
@@ -1297,6 +1317,13 @@ impl CodexMessageProcessor {
}
async fn fetch_account_rate_limits(&self) -> Result<CoreRateLimitSnapshot, JSONRPCErrorError> {
let (snapshot, _) = self.fetch_rate_limits_with_metadata().await?;
Ok(snapshot)
}
async fn fetch_rate_limits_with_metadata(
&self,
) -> Result<(CoreRateLimitSnapshot, UsageMetadata), JSONRPCErrorError> {
let Some(auth) = self.auth_manager.auth().await else {
return Err(JSONRPCErrorError {
code: INVALID_REQUEST_ERROR_CODE,
@@ -1321,7 +1348,7 @@ impl CodexMessageProcessor {
})?;
client
.get_rate_limits()
.get_rate_limits_with_metadata()
.await
.map_err(|err| JSONRPCErrorError {
code: INTERNAL_ERROR_CODE,
@@ -1330,6 +1357,35 @@ impl CodexMessageProcessor {
})
}
async fn fetch_usage_metadata_from_usage(&self) -> Option<UsageMetadata> {
match tokio::time::timeout(
PLAN_TYPE_FETCH_TIMEOUT,
self.fetch_rate_limits_with_metadata(),
)
.await
{
Ok(Ok((_snapshot, metadata))) => {
let _changed = self.auth_manager.update_chatgpt_proxy_account_metadata(
metadata.user_id.clone(),
metadata.account_id.clone(),
metadata.email.clone(),
Some(metadata.plan_type),
);
Some(metadata)
}
Ok(Err(err)) => {
let message = err.message;
warn!("failed to fetch usage metadata from usage endpoint: {message}");
None
}
Err(_) => {
let secs = PLAN_TYPE_FETCH_TIMEOUT.as_secs();
warn!("fetching usage metadata from usage endpoint timed out after {secs}s");
None
}
}
}
async fn get_user_saved_config(&self, request_id: RequestId) {
let service = ConfigService::new_with_defaults(self.config.codex_home.clone());
let user_saved_config: UserSavedConfig = match service.load_user_saved_config().await {

View File

@@ -10,6 +10,7 @@ use std::io::Result as IoResult;
use std::path::PathBuf;
use crate::message_processor::MessageProcessor;
use crate::message_processor::MessageProcessorArgs;
use crate::outgoing_message::OutgoingMessage;
use crate::outgoing_message::OutgoingMessageSender;
use codex_app_server_protocol::ConfigLayerSource;
@@ -168,6 +169,7 @@ pub async fn run_main(
cli_config_overrides: CliConfigOverrides,
loader_overrides: LoaderOverrides,
default_analytics_enabled: bool,
default_chatgpt_proxy_auth: bool,
) -> IoResult<()> {
// Set up channels.
let (incoming_tx, mut incoming_rx) = mpsc::channel::<JSONRPCMessage>(CHANNEL_CAPACITY);
@@ -290,15 +292,16 @@ pub async fn run_main(
let outgoing_message_sender = OutgoingMessageSender::new(outgoing_tx);
let cli_overrides: Vec<(String, TomlValue)> = cli_kv_overrides.clone();
let loader_overrides = loader_overrides_for_config_api;
let mut processor = MessageProcessor::new(
outgoing_message_sender,
let mut processor = MessageProcessor::new(MessageProcessorArgs {
outgoing: outgoing_message_sender,
codex_linux_sandbox_exe,
std::sync::Arc::new(config),
config: std::sync::Arc::new(config),
cli_overrides,
loader_overrides,
feedback.clone(),
default_chatgpt_proxy_auth,
feedback: feedback.clone(),
config_warnings,
);
});
let mut thread_created_rx = processor.thread_created_receiver();
async move {
let mut listen_for_threads = true;

View File

@@ -1,3 +1,4 @@
use clap::Parser;
use codex_app_server::run_main;
use codex_arg0::arg0_dispatch_or_else;
use codex_common::CliConfigOverrides;
@@ -8,8 +9,20 @@ use std::path::PathBuf;
// managed config file without writing to /etc.
const MANAGED_CONFIG_PATH_ENV_VAR: &str = "CODEX_APP_SERVER_MANAGED_CONFIG_PATH";
#[derive(Debug, Parser, Default, Clone)]
#[command(bin_name = "codex-app-server")]
struct AppServerCli {
#[clap(flatten)]
config_overrides: CliConfigOverrides,
/// Seed ChatGPT proxy auth (tokenless) on startup when no auth is present.
#[arg(long = "default-chatgpt-proxy-auth")]
default_chatgpt_proxy_auth: bool,
}
fn main() -> anyhow::Result<()> {
arg0_dispatch_or_else(|codex_linux_sandbox_exe| async move {
let cli = AppServerCli::parse();
let managed_config_path = managed_config_path_from_debug_env();
let loader_overrides = LoaderOverrides {
managed_config_path,
@@ -18,9 +31,10 @@ fn main() -> anyhow::Result<()> {
run_main(
codex_linux_sandbox_exe,
CliConfigOverrides::default(),
cli.config_overrides,
loader_overrides,
false,
cli.default_chatgpt_proxy_auth,
)
.await?;
Ok(())

View File

@@ -26,10 +26,12 @@ use codex_app_server_protocol::ServerNotification;
use codex_app_server_protocol::ServerRequestPayload;
use codex_core::AuthManager;
use codex_core::ThreadManager;
use codex_core::auth::AuthCredentialsStoreMode;
use codex_core::auth::ExternalAuthRefreshContext;
use codex_core::auth::ExternalAuthRefreshReason;
use codex_core::auth::ExternalAuthRefresher;
use codex_core::auth::ExternalAuthTokens;
use codex_core::auth::login_with_chatgpt_proxy;
use codex_core::config::Config;
use codex_core::config_loader::LoaderOverrides;
use codex_core::default_client::SetOriginatorError;
@@ -38,14 +40,27 @@ use codex_core::default_client::get_codex_user_agent;
use codex_core::default_client::set_default_originator;
use codex_feedback::CodexFeedback;
use codex_protocol::ThreadId;
use codex_protocol::config_types::ForcedLoginMethod;
use codex_protocol::protocol::SessionSource;
use tokio::sync::broadcast;
use tokio::time::Duration;
use tokio::time::timeout;
use toml::Value as TomlValue;
use tracing::warn;
const EXTERNAL_AUTH_REFRESH_TIMEOUT: Duration = Duration::from_secs(10);
pub(crate) struct MessageProcessorArgs {
pub(crate) outgoing: OutgoingMessageSender,
pub(crate) codex_linux_sandbox_exe: Option<PathBuf>,
pub(crate) config: Arc<Config>,
pub(crate) cli_overrides: Vec<(String, TomlValue)>,
pub(crate) loader_overrides: LoaderOverrides,
pub(crate) default_chatgpt_proxy_auth: bool,
pub(crate) feedback: CodexFeedback,
pub(crate) config_warnings: Vec<ConfigWarningNotification>,
}
#[derive(Clone)]
struct ExternalAuthRefreshBridge {
outgoing: Arc<OutgoingMessageSender>,
@@ -109,21 +124,40 @@ pub(crate) struct MessageProcessor {
impl MessageProcessor {
/// Create a new `MessageProcessor`, retaining a handle to the outgoing
/// `Sender` so handlers can enqueue messages to be written to stdout.
pub(crate) fn new(
outgoing: OutgoingMessageSender,
codex_linux_sandbox_exe: Option<PathBuf>,
config: Arc<Config>,
cli_overrides: Vec<(String, TomlValue)>,
loader_overrides: LoaderOverrides,
feedback: CodexFeedback,
config_warnings: Vec<ConfigWarningNotification>,
) -> Self {
pub(crate) fn new(args: MessageProcessorArgs) -> Self {
let MessageProcessorArgs {
outgoing,
codex_linux_sandbox_exe,
config,
cli_overrides,
loader_overrides,
default_chatgpt_proxy_auth,
feedback,
config_warnings,
} = args;
let outgoing = Arc::new(outgoing);
let auth_manager = AuthManager::shared(
config.codex_home.clone(),
false,
config.cli_auth_credentials_store_mode,
);
if default_chatgpt_proxy_auth
&& auth_manager.auth_cached().is_none()
&& !matches!(config.forced_login_method, Some(ForcedLoginMethod::Api))
{
let account_id = config.forced_chatgpt_workspace_id.as_deref();
if let Err(err) = login_with_chatgpt_proxy(
&config.codex_home,
account_id,
None,
None,
AuthCredentialsStoreMode::Ephemeral,
) {
warn!("failed to seed default ChatGPT proxy auth: {err}");
} else {
auth_manager.reload();
}
}
auth_manager.set_forced_chatgpt_workspace_id(config.forced_chatgpt_workspace_id.clone());
auth_manager.set_external_auth_refresher(Arc::new(ExternalAuthRefreshBridge {
outgoing: outgoing.clone(),

View File

@@ -163,6 +163,7 @@ pub fn write_chatgpt_auth(
openai_api_key: None,
tokens: Some(tokens),
last_refresh,
chatgpt_proxy: None,
};
save_auth(codex_home, &auth, cli_auth_credentials_store_mode).context("write auth.json")

View File

@@ -38,6 +38,14 @@ impl PathStyle {
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct UsageMetadata {
pub user_id: Option<String>,
pub account_id: Option<String>,
pub email: Option<String>,
pub plan_type: AccountPlanType,
}
#[derive(Clone, Debug)]
pub struct Client {
base_url: String,
@@ -75,10 +83,10 @@ impl Client {
}
pub fn from_auth(base_url: impl Into<String>, auth: &CodexAuth) -> Result<Self> {
let token = auth.get_token().map_err(anyhow::Error::from)?;
let mut client = Self::new(base_url)?
.with_user_agent(get_codex_user_agent())
.with_bearer_token(token);
let mut client = Self::new(base_url)?.with_user_agent(get_codex_user_agent());
if let Some(token) = auth.bearer_token().map_err(anyhow::Error::from)? {
client = client.with_bearer_token(token);
}
if let Some(account_id) = auth.get_account_id() {
client = client.with_chatgpt_account_id(account_id);
}
@@ -159,17 +167,34 @@ impl Client {
}
}
pub async fn get_rate_limits(&self) -> Result<RateLimitSnapshot> {
let url = match self.path_style {
fn usage_url(&self) -> String {
match self.path_style {
PathStyle::CodexApi => format!("{}/api/codex/usage", self.base_url),
PathStyle::ChatGptApi => format!("{}/wham/usage", self.base_url),
};
}
}
async fn fetch_usage_payload(&self) -> Result<RateLimitStatusPayload> {
let url = self.usage_url();
let req = self.http.get(&url).headers(self.headers());
let (body, ct) = self.exec_request(req, "GET", &url).await?;
let payload: RateLimitStatusPayload = self.decode_json(&url, &ct, &body)?;
self.decode_json(&url, &ct, &body)
}
pub async fn get_rate_limits(&self) -> Result<RateLimitSnapshot> {
let payload = self.fetch_usage_payload().await?;
Ok(Self::rate_limit_snapshot_from_payload(payload))
}
pub async fn get_rate_limits_with_metadata(
&self,
) -> Result<(RateLimitSnapshot, UsageMetadata)> {
let payload = self.fetch_usage_payload().await?;
let metadata = Self::usage_metadata_from_payload(&payload);
let snapshot = Self::rate_limit_snapshot_from_payload(payload);
Ok((snapshot, metadata))
}
pub async fn list_tasks(
&self,
limit: Option<i32>,
@@ -317,6 +342,15 @@ impl Client {
}
}
fn usage_metadata_from_payload(payload: &RateLimitStatusPayload) -> UsageMetadata {
UsageMetadata {
user_id: payload.user_id.clone(),
account_id: payload.account_id.clone(),
email: payload.email.clone(),
plan_type: Self::map_plan_type(payload.plan_type),
}
}
fn map_rate_limit_window(
window: Option<Option<Box<RateLimitWindowSnapshot>>>,
) -> Option<RateLimitWindow> {
@@ -374,3 +408,31 @@ impl Client {
Some((seconds_i64 + 59) / 60)
}
}
#[cfg(test)]
mod tests {
use super::Client;
use super::UsageMetadata;
use crate::types::PlanType;
use crate::types::RateLimitStatusPayload;
use pretty_assertions::assert_eq;
#[test]
fn usage_metadata_maps_optional_fields() {
let payload = RateLimitStatusPayload {
plan_type: PlanType::Plus,
user_id: Some("user-123".to_string()),
account_id: Some("acc-456".to_string()),
email: Some("user@example.com".to_string()),
rate_limit: None,
credits: None,
};
let metadata: UsageMetadata = Client::usage_metadata_from_payload(&payload);
assert_eq!(metadata.user_id, Some("user-123".to_string()));
assert_eq!(metadata.account_id, Some("acc-456".to_string()));
assert_eq!(metadata.email, Some("user@example.com".to_string()));
assert_eq!(metadata.plan_type, codex_protocol::account::PlanType::Plus);
}
}

View File

@@ -2,6 +2,7 @@ mod client;
pub mod types;
pub use client::Client;
pub use client::UsageMetadata;
pub use types::CodeTaskDetailsResponse;
pub use types::CodeTaskDetailsResponseExt;
pub use types::ConfigFileResponse;

View File

@@ -283,6 +283,10 @@ struct AppServerCommand {
/// See https://developers.openai.com/codex/config-advanced/#metrics for more details.
#[arg(long = "analytics-default-enabled")]
analytics_default_enabled: bool,
/// Seed ChatGPT proxy auth (tokenless) on startup when no auth is present.
#[arg(long = "default-chatgpt-proxy-auth")]
default_chatgpt_proxy_auth: bool,
}
#[derive(Debug, clap::Subcommand)]
@@ -535,6 +539,7 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
root_config_overrides,
codex_core::config_loader::LoaderOverrides::default(),
app_server_cli.analytics_default_enabled,
app_server_cli.default_chatgpt_proxy_auth,
)
.await?;
}
@@ -1263,6 +1268,19 @@ mod tests {
assert!(app_server.analytics_default_enabled);
}
#[test]
fn app_server_default_chatgpt_proxy_auth_disabled_without_flag() {
let app_server = app_server_from_args(["codex", "app-server"].as_ref());
assert!(!app_server.default_chatgpt_proxy_auth);
}
#[test]
fn app_server_default_chatgpt_proxy_auth_enabled_with_flag() {
let app_server =
app_server_from_args(["codex", "app-server", "--default-chatgpt-proxy-auth"].as_ref());
assert!(app_server.default_chatgpt_proxy_auth);
}
#[test]
fn features_enable_parses_feature_name() {
let cli = MultitoolCli::try_parse_from(["codex", "features", "enable", "unified_exec"])

View File

@@ -16,6 +16,16 @@ use serde::Serialize;
pub struct RateLimitStatusPayload {
#[serde(rename = "plan_type")]
pub plan_type: PlanType,
#[serde(default, skip_serializing_if = "Option::is_none", rename = "user_id")]
pub user_id: Option<String>,
#[serde(
default,
skip_serializing_if = "Option::is_none",
rename = "account_id"
)]
pub account_id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none", rename = "email")]
pub email: Option<String>,
#[serde(
rename = "rate_limit",
default,
@@ -36,6 +46,9 @@ impl RateLimitStatusPayload {
pub fn new(plan_type: PlanType) -> RateLimitStatusPayload {
RateLimitStatusPayload {
plan_type,
user_id: None,
account_id: None,
email: None,
rate_limit: None,
credits: None,
}

View File

@@ -177,9 +177,9 @@ pub(crate) fn auth_provider_from_auth(
}
if let Some(auth) = auth {
let token = auth.get_token()?;
let token = auth.bearer_token()?;
Ok(CoreAuthProvider {
token: Some(token),
token,
account_id: auth.get_account_id(),
})
} else {

View File

@@ -21,6 +21,7 @@ use codex_protocol::config_types::ForcedLoginMethod;
pub use crate::auth::storage::AuthCredentialsStoreMode;
pub use crate::auth::storage::AuthDotJson;
use crate::auth::storage::AuthStorageBackend;
use crate::auth::storage::ChatGptProxyAccount;
use crate::auth::storage::create_auth_storage;
use crate::config::Config;
use crate::error::RefreshTokenFailedError;
@@ -53,6 +54,7 @@ pub enum CodexAuth {
ApiKey(ApiKeyAuth),
Chatgpt(ChatgptAuth),
ChatgptAuthTokens(ChatgptAuthTokens),
ChatgptProxy(ChatgptProxy),
}
#[derive(Debug, Clone)]
@@ -71,6 +73,11 @@ pub struct ChatgptAuthTokens {
state: ChatgptAuthState,
}
#[derive(Debug, Clone)]
pub struct ChatgptProxy {
account: ChatGptProxyAccount,
}
#[derive(Debug, Clone)]
struct ChatgptAuthState {
auth_dot_json: Arc<Mutex<Option<AuthDotJson>>>,
@@ -160,6 +167,12 @@ impl CodexAuth {
return Ok(CodexAuth::from_api_key_with_client(api_key, client));
}
if let Some(proxy_account) = auth_dot_json.chatgpt_proxy.clone() {
return Ok(Self::ChatgptProxy(ChatgptProxy {
account: proxy_account,
}));
}
let storage_mode = auth_dot_json.storage_mode(auth_credentials_store_mode);
let state = ChatgptAuthState {
auth_dot_json: Arc::new(Mutex::new(Some(auth_dot_json))),
@@ -189,14 +202,16 @@ impl CodexAuth {
pub fn internal_auth_mode(&self) -> AuthMode {
match self {
Self::ApiKey(_) => AuthMode::ApiKey,
Self::Chatgpt(_) | Self::ChatgptAuthTokens(_) => AuthMode::Chatgpt,
Self::Chatgpt(_) | Self::ChatgptAuthTokens(_) | Self::ChatgptProxy(_) => {
AuthMode::Chatgpt
}
}
}
pub fn api_auth_mode(&self) -> ApiAuthMode {
match self {
Self::ApiKey(_) => ApiAuthMode::ApiKey,
Self::Chatgpt(_) => ApiAuthMode::Chatgpt,
Self::Chatgpt(_) | Self::ChatgptProxy(_) => ApiAuthMode::Chatgpt,
Self::ChatgptAuthTokens(_) => ApiAuthMode::ChatgptAuthTokens,
}
}
@@ -213,7 +228,7 @@ impl CodexAuth {
pub fn api_key(&self) -> Option<&str> {
match self {
Self::ApiKey(auth) => Some(auth.api_key.as_str()),
Self::Chatgpt(_) | Self::ChatgptAuthTokens(_) => None,
Self::Chatgpt(_) | Self::ChatgptAuthTokens(_) | Self::ChatgptProxy(_) => None,
}
}
@@ -230,25 +245,40 @@ impl CodexAuth {
}
}
/// Returns the token string used for bearer authentication.
pub fn get_token(&self) -> Result<String, std::io::Error> {
/// Returns the token string used for bearer authentication, if available.
pub fn bearer_token(&self) -> Result<Option<String>, std::io::Error> {
match self {
Self::ApiKey(auth) => Ok(auth.api_key.clone()),
Self::ApiKey(auth) => Ok(Some(auth.api_key.clone())),
Self::Chatgpt(_) | Self::ChatgptAuthTokens(_) => {
let access_token = self.get_token_data()?.access_token;
Ok(access_token)
Ok(Some(access_token))
}
Self::ChatgptProxy(_) => Ok(None),
}
}
/// Returns the token string used for bearer authentication.
pub fn get_token(&self) -> Result<String, std::io::Error> {
let Some(token) = self.bearer_token()? else {
return Err(std::io::Error::other("Bearer token is not available."));
};
Ok(token)
}
/// Returns `None` if `is_chatgpt_auth()` is false.
pub fn get_account_id(&self) -> Option<String> {
self.get_current_token_data().and_then(|t| t.account_id)
match self {
Self::ChatgptProxy(proxy) => proxy.account.account_id.clone(),
_ => self.get_current_token_data().and_then(|t| t.account_id),
}
}
/// Returns `None` if `is_chatgpt_auth()` is false.
pub fn get_account_email(&self) -> Option<String> {
self.get_current_token_data().and_then(|t| t.id_token.email)
match self {
Self::ChatgptProxy(proxy) => proxy.account.email.clone(),
_ => self.get_current_token_data().and_then(|t| t.id_token.email),
}
}
/// Account-facing plan classification derived from the current token.
@@ -256,6 +286,9 @@ impl CodexAuth {
/// mapped from the ID token's internal plan value. Prefer this when you
/// need to make UI or product decisions based on the user's subscription.
pub fn account_plan_type(&self) -> Option<AccountPlanType> {
if let Self::ChatgptProxy(proxy) = self {
return proxy.account.plan_type;
}
let map_known = |kp: &InternalKnownPlan| match kp {
InternalKnownPlan::Free => AccountPlanType::Free,
InternalKnownPlan::Go => AccountPlanType::Go,
@@ -275,12 +308,22 @@ impl CodexAuth {
})
}
/// Returns the ChatGPT workspace/account identifier when available.
pub fn chatgpt_workspace_id(&self) -> Option<String> {
match self {
Self::ChatgptProxy(proxy) => proxy.account.account_id.clone(),
_ => self
.get_current_token_data()
.and_then(|t| t.id_token.chatgpt_account_id.or(t.account_id)),
}
}
/// Returns `None` if `is_chatgpt_auth()` is false.
fn get_current_auth_json(&self) -> Option<AuthDotJson> {
let state = match self {
Self::Chatgpt(auth) => &auth.state,
Self::ChatgptAuthTokens(auth) => &auth.state,
Self::ApiKey(_) => return None,
Self::ApiKey(_) | Self::ChatgptProxy(_) => return None,
};
#[expect(clippy::unwrap_used)]
state.auth_dot_json.lock().unwrap().clone()
@@ -303,6 +346,7 @@ impl CodexAuth {
account_id: Some("account_id".to_string()),
}),
last_refresh: Some(Utc::now()),
chatgpt_proxy: None,
};
let client = crate::default_client::create_client();
@@ -382,6 +426,7 @@ pub fn login_with_api_key(
openai_api_key: Some(api_key.to_string()),
tokens: None,
last_refresh: None,
chatgpt_proxy: None,
};
save_auth(codex_home, &auth_dot_json, auth_credentials_store_mode)
}
@@ -400,6 +445,29 @@ pub fn login_with_chatgpt_auth_tokens(
)
}
/// Writes a tokenless ChatGPT proxy auth payload.
pub fn login_with_chatgpt_proxy(
codex_home: &Path,
account_id: Option<&str>,
email: Option<&str>,
plan_type: Option<AccountPlanType>,
auth_credentials_store_mode: AuthCredentialsStoreMode,
) -> std::io::Result<()> {
let auth_dot_json = AuthDotJson {
auth_mode: Some(ApiAuthMode::Chatgpt),
openai_api_key: None,
tokens: None,
last_refresh: None,
chatgpt_proxy: Some(ChatGptProxyAccount {
user_id: None,
account_id: account_id.map(str::to_string),
email: email.map(str::to_string),
plan_type,
}),
};
save_auth(codex_home, &auth_dot_json, auth_credentials_store_mode)
}
/// Persist the provided auth payload using the specified backend.
pub fn save_auth(
codex_home: &Path,
@@ -461,23 +529,10 @@ pub fn enforce_login_restrictions(config: &Config) -> std::io::Result<()> {
return Ok(());
}
let token_data = match auth.get_token_data() {
Ok(data) => data,
Err(err) => {
return logout_with_message(
&config.codex_home,
format!(
"Failed to load ChatGPT credentials while enforcing workspace restrictions: {err}. Logging out."
),
config.cli_auth_credentials_store_mode,
);
}
};
// workspace is the external identifier for account id.
let chatgpt_account_id = token_data.id_token.chatgpt_account_id.as_deref();
if chatgpt_account_id != Some(expected_account_id) {
let message = match chatgpt_account_id {
// Workspace is the external identifier for account id.
let chatgpt_account_id = auth.chatgpt_workspace_id();
if chatgpt_account_id.as_deref() != Some(expected_account_id) {
let message = match chatgpt_account_id.as_deref() {
Some(actual) => format!(
"Login is restricted to workspace {expected_account_id}, but current credentials belong to {actual}. Logging out."
),
@@ -548,21 +603,29 @@ fn load_auth(
codex_home.to_path_buf(),
AuthCredentialsStoreMode::Ephemeral,
);
if let Some(auth_dot_json) = ephemeral_storage.load()? {
let auth = build_auth(auth_dot_json, AuthCredentialsStoreMode::Ephemeral)?;
return Ok(Some(auth));
let ephemeral_auth = match ephemeral_storage.load()? {
Some(auth_dot_json) => Some(build_auth(
auth_dot_json,
AuthCredentialsStoreMode::Ephemeral,
)?),
None => None,
};
if let Some(auth) = ephemeral_auth.as_ref()
&& !matches!(auth, CodexAuth::ChatgptProxy(_))
{
return Ok(ephemeral_auth);
}
// If the caller explicitly requested ephemeral auth, there is no persisted fallback.
if auth_credentials_store_mode == AuthCredentialsStoreMode::Ephemeral {
return Ok(None);
return Ok(ephemeral_auth);
}
// Fall back to the configured persistent store (file/keyring/auto) for managed auth.
let storage = create_auth_storage(codex_home.to_path_buf(), auth_credentials_store_mode);
let auth_dot_json = match storage.load()? {
Some(auth) => auth,
None => return Ok(None),
None => return Ok(ephemeral_auth),
};
let auth = build_auth(auth_dot_json, auth_credentials_store_mode)?;
@@ -731,6 +794,7 @@ impl AuthDotJson {
openai_api_key: None,
tokens: Some(tokens),
last_refresh: Some(Utc::now()),
chatgpt_proxy: None,
}
}
@@ -1085,6 +1149,59 @@ impl AuthManager {
.and_then(|guard| guard.clone())
}
/// Best-effort in-memory update of ChatGPT proxy account metadata.
///
/// This does not persist to disk and only fills in missing fields (or
/// upgrades an `Unknown` plan type when a known one is available).
pub fn update_chatgpt_proxy_account_metadata(
&self,
user_id: Option<String>,
account_id: Option<String>,
email: Option<String>,
plan_type: Option<AccountPlanType>,
) -> bool {
let Ok(mut guard) = self.inner.write() else {
return false;
};
let Some(CodexAuth::ChatgptProxy(proxy)) = guard.auth.as_mut() else {
return false;
};
let mut changed = false;
if proxy.account.user_id.is_none()
&& let Some(user_id) = user_id
{
proxy.account.user_id = Some(user_id);
changed = true;
}
if proxy.account.account_id.is_none()
&& let Some(account_id) = account_id
{
proxy.account.account_id = Some(account_id);
changed = true;
}
if proxy.account.email.is_none()
&& let Some(email) = email
{
proxy.account.email = Some(email);
changed = true;
}
if let Some(plan_type) = plan_type
&& (proxy.account.plan_type.is_none()
|| proxy.account.plan_type == Some(AccountPlanType::Unknown))
&& plan_type != AccountPlanType::Unknown
{
proxy.account.plan_type = Some(plan_type);
changed = true;
}
changed
}
pub fn has_external_auth_refresher(&self) -> bool {
self.inner
.read()
@@ -1143,7 +1260,7 @@ impl AuthManager {
self.reload();
Ok(())
}
CodexAuth::ApiKey(_) => Ok(()),
CodexAuth::ApiKey(_) | CodexAuth::ChatgptProxy(_) => Ok(()),
}
}
@@ -1399,6 +1516,7 @@ mod tests {
account_id: None,
}),
last_refresh: Some(last_refresh),
chatgpt_proxy: None,
},
auth_dot_json
);
@@ -1424,6 +1542,31 @@ mod tests {
assert!(auth.get_token_data().is_err());
}
#[test]
fn ephemeral_proxy_auth_is_fallback_to_persisted_auth() -> std::io::Result<()> {
let dir = tempdir()?;
// Seed tokenless proxy auth in the ephemeral store.
login_with_chatgpt_proxy(
dir.path(),
Some("workspace-123"),
Some("proxy@example.com"),
Some(AccountPlanType::Plus),
AuthCredentialsStoreMode::Ephemeral,
)?;
// Persist a real API key in the managed store.
login_with_api_key(dir.path(), "sk-test-key", AuthCredentialsStoreMode::File)?;
// Managed auth should override ephemeral proxy auth.
let auth = load_auth(dir.path(), false, AuthCredentialsStoreMode::File)?
.expect("auth should be present");
assert_eq!(auth.internal_auth_mode(), AuthMode::ApiKey);
assert_eq!(auth.api_key(), Some("sk-test-key"));
Ok(())
}
#[test]
fn logout_removes_auth_file() -> Result<(), std::io::Error> {
let dir = tempdir()?;
@@ -1432,6 +1575,7 @@ mod tests {
openai_api_key: Some("sk-test-key".to_string()),
tokens: None,
last_refresh: None,
chatgpt_proxy: None,
};
super::save_auth(dir.path(), &auth_dot_json, AuthCredentialsStoreMode::File)?;
let auth_file = get_auth_file(dir.path());

View File

@@ -23,6 +23,7 @@ use crate::token_data::TokenData;
use codex_app_server_protocol::AuthMode;
use codex_keyring_store::DefaultKeyringStore;
use codex_keyring_store::KeyringStore;
use codex_protocol::account::PlanType as AccountPlanType;
use once_cell::sync::Lazy;
/// Determine where Codex should store CLI auth credentials.
@@ -54,6 +55,24 @@ pub struct AuthDotJson {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub last_refresh: Option<DateTime<Utc>>,
/// ChatGPT account metadata supplied by a trusted proxy.
#[serde(
default,
skip_serializing_if = "Option::is_none",
rename = "chatgptProxy"
)]
pub chatgpt_proxy: Option<ChatGptProxyAccount>,
}
/// Account metadata for the tokenless ChatGPT proxy auth mode.
#[derive(Deserialize, Serialize, Clone, Debug, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub struct ChatGptProxyAccount {
pub user_id: Option<String>,
pub account_id: Option<String>,
pub email: Option<String>,
pub plan_type: Option<AccountPlanType>,
}
pub(super) fn get_auth_file(codex_home: &Path) -> PathBuf {
@@ -353,6 +372,7 @@ mod tests {
openai_api_key: Some("test-key".to_string()),
tokens: None,
last_refresh: Some(Utc::now()),
chatgpt_proxy: None,
};
storage
@@ -373,6 +393,7 @@ mod tests {
openai_api_key: Some("test-key".to_string()),
tokens: None,
last_refresh: Some(Utc::now()),
chatgpt_proxy: None,
};
let file = get_auth_file(codex_home.path());
@@ -395,6 +416,7 @@ mod tests {
openai_api_key: Some("sk-test-key".to_string()),
tokens: None,
last_refresh: None,
chatgpt_proxy: None,
};
let storage = create_auth_storage(dir.path().to_path_buf(), AuthCredentialsStoreMode::File);
storage.save(&auth_dot_json)?;
@@ -418,6 +440,7 @@ mod tests {
openai_api_key: Some("sk-ephemeral".to_string()),
tokens: None,
last_refresh: Some(Utc::now()),
chatgpt_proxy: None,
};
storage.save(&auth_dot_json)?;
@@ -516,6 +539,7 @@ mod tests {
account_id: Some(format!("{prefix}-account-id")),
}),
last_refresh: None,
chatgpt_proxy: None,
}
}
@@ -532,6 +556,7 @@ mod tests {
openai_api_key: Some("sk-test".to_string()),
tokens: None,
last_refresh: None,
chatgpt_proxy: None,
};
seed_keyring_with_auth(
&mock_keyring,
@@ -574,6 +599,7 @@ mod tests {
account_id: Some("account".to_string()),
}),
last_refresh: Some(Utc::now()),
chatgpt_proxy: None,
};
storage.save(&auth)?;

View File

@@ -39,7 +39,7 @@ fn codex_apps_mcp_bearer_token_env_var() -> Option<String> {
}
fn codex_apps_mcp_bearer_token(auth: Option<&CodexAuth>) -> Option<String> {
let token = auth.and_then(|auth| auth.get_token().ok())?;
let token = auth.and_then(|auth| auth.bearer_token().ok()).flatten()?;
let token = token.trim();
if token.is_empty() {
None

View File

@@ -55,6 +55,7 @@ async fn refresh_token_succeeds_updates_storage() -> Result<()> {
openai_api_key: None,
tokens: Some(initial_tokens.clone()),
last_refresh: Some(initial_last_refresh),
chatgpt_proxy: None,
};
ctx.write_auth(&initial_auth)?;
@@ -117,6 +118,7 @@ async fn returns_fresh_tokens_as_is() -> Result<()> {
openai_api_key: None,
tokens: Some(initial_tokens.clone()),
last_refresh: Some(initial_last_refresh),
chatgpt_proxy: None,
};
ctx.write_auth(&initial_auth)?;
@@ -163,6 +165,7 @@ async fn refreshes_token_when_last_refresh_is_stale() -> Result<()> {
openai_api_key: None,
tokens: Some(initial_tokens.clone()),
last_refresh: Some(stale_refresh),
chatgpt_proxy: None,
};
ctx.write_auth(&initial_auth)?;
@@ -222,6 +225,7 @@ async fn refresh_token_returns_permanent_error_for_expired_refresh_token() -> Re
openai_api_key: None,
tokens: Some(initial_tokens.clone()),
last_refresh: Some(initial_last_refresh),
chatgpt_proxy: None,
};
ctx.write_auth(&initial_auth)?;
@@ -272,6 +276,7 @@ async fn refresh_token_returns_transient_error_on_server_failure() -> Result<()>
openai_api_key: None,
tokens: Some(initial_tokens.clone()),
last_refresh: Some(initial_last_refresh),
chatgpt_proxy: None,
};
ctx.write_auth(&initial_auth)?;
@@ -324,6 +329,7 @@ async fn unauthorized_recovery_reloads_then_refreshes_tokens() -> Result<()> {
openai_api_key: None,
tokens: Some(initial_tokens.clone()),
last_refresh: Some(initial_last_refresh),
chatgpt_proxy: None,
};
ctx.write_auth(&initial_auth)?;
@@ -333,6 +339,7 @@ async fn unauthorized_recovery_reloads_then_refreshes_tokens() -> Result<()> {
openai_api_key: None,
tokens: Some(disk_tokens.clone()),
last_refresh: Some(initial_last_refresh),
chatgpt_proxy: None,
};
save_auth(
ctx.codex_home.path(),
@@ -416,6 +423,7 @@ async fn unauthorized_recovery_skips_reload_on_account_mismatch() -> Result<()>
openai_api_key: None,
tokens: Some(initial_tokens.clone()),
last_refresh: Some(initial_last_refresh),
chatgpt_proxy: None,
};
ctx.write_auth(&initial_auth)?;
@@ -431,6 +439,7 @@ async fn unauthorized_recovery_skips_reload_on_account_mismatch() -> Result<()>
openai_api_key: None,
tokens: Some(disk_tokens),
last_refresh: Some(initial_last_refresh),
chatgpt_proxy: None,
};
save_auth(
ctx.codex_home.path(),
@@ -495,6 +504,7 @@ async fn unauthorized_recovery_requires_chatgpt_auth() -> Result<()> {
openai_api_key: Some("sk-test".to_string()),
tokens: None,
last_refresh: None,
chatgpt_proxy: None,
};
ctx.write_auth(&auth)?;

View File

@@ -564,6 +564,7 @@ pub(crate) async fn persist_tokens_async(
openai_api_key: api_key,
tokens: Some(tokens),
last_refresh: Some(Utc::now()),
chatgpt_proxy: None,
};
save_auth(&codex_home, &auth, auth_credentials_store_mode)
})

View File

@@ -91,7 +91,7 @@ pub(crate) fn compose_account_display(
let auth = auth_manager.auth_cached()?;
match auth {
CodexAuth::Chatgpt(_) | CodexAuth::ChatgptAuthTokens(_) => {
CodexAuth::Chatgpt(_) | CodexAuth::ChatgptAuthTokens(_) | CodexAuth::ChatgptProxy(_) => {
let email = auth.get_account_email();
let plan = plan
.map(|plan_type| title_case(format!("{plan_type:?}").as_str()))