Compare commits

..

8 Commits

Author SHA1 Message Date
gt-oai
761a37d5c1 blocking debug 2026-01-31 01:42:50 +00:00
gt-oai
e04cfc3601 Make cloud requirements load fail-closed 2026-01-31 00:28:03 +00:00
gt-oai
149f3aa27a Add enforce_residency to requirements (#10263)
Add `enforce_residency` to requirements.toml and thread it through to a
header on `default_client`.
2026-01-31 00:26:25 +00:00
gt-oai
a046481ad9 Wire up cloud reqs in exec, app-server (#10241)
We're fetching cloud requirements in TUI in
https://github.com/openai/codex/pull/10167.

This adds the same fetching in exec and app-server binaries also.
2026-01-30 23:53:41 +00:00
Michael Bolin
10ea117ee1 chore: implement Mul for TruncationPolicy (#10272)
Codex thought this was a good idea while working on
https://github.com/openai/codex/pull/10192.
2026-01-30 15:50:20 -08:00
Eric Traut
8d142fd63d Validate CODEX_HOME before resolving (#10249)
Summary
- require `CODEX_HOME` to point to an existing directory before
canonicalizing and surface clear errors otherwise
- share the same helper logic in both `core` and `rmcp-client` and add
unit tests that cover missing, non-directory, valid, and default paths

This addresses #9222
2026-01-30 15:46:33 -08:00
Yuvraj Angad Singh
13e85b1549 fix: update file search directory when session CWD changes (#9279)
## Summary

Fixes #9041

- Adds update_search_dir() method to FileSearchManager to allow updating
the search directory after initialization
- Calls this method when the session CWD changes: new session, resume,
or fork

## Problem

The FileSearchManager was created once with the initial search_dir and
never updated. When a user:

1. Starts Codex in a non-git directory (e.g., /tmp/random)
2. Resumes or forks a session from a different workspace
3. The @filename lookup still searched the original directory

This caused no matches to be returned even when files existed in the
current workspace.

## Solution

Update FileSearchManager.search_dir whenever the session working
directory changes:
- AppEvent::NewSession: Use current config CWD
- SessionSelection::Resume: Use resumed session CWD
- SessionSelection::Fork: Use forked session CWD

## Test plan

- [ ] Start Codex in /tmp/test-dir (non-git)
- [ ] Resume a session from a project with actual files
- [ ] Verify @filename returns matches from the resumed session
directory

---------

Co-authored-by: Eric Traut <etraut@openai.com>
2026-01-30 14:59:20 -08:00
sayan-oai
31d1e49340 fix: dont auto-enable web_search for azure (#10266)
seeing issues with azure after default-enabling web search: #10071,
#10257.

need to work with azure to fix api-side, for now turning off
default-enable of web_search for azure.

diff is big because i moved logic to reuse
2026-01-30 22:52:37 +00:00
56 changed files with 995 additions and 817 deletions

18
codex-rs/Cargo.lock generated
View File

@@ -1084,11 +1084,11 @@ dependencies = [
"axum",
"base64",
"chrono",
"clap",
"codex-app-server-protocol",
"codex-arg0",
"codex-backend-client",
"codex-chatgpt",
"codex-cloud-requirements",
"codex-common",
"codex-core",
"codex-execpolicy",
@@ -1297,6 +1297,7 @@ dependencies = [
name = "codex-cloud-requirements"
version = "0.0.0"
dependencies = [
"anyhow",
"async-trait",
"base64",
"codex-backend-client",
@@ -1306,6 +1307,7 @@ dependencies = [
"pretty_assertions",
"serde_json",
"tempfile",
"thiserror 2.0.17",
"tokio",
"toml 0.9.5",
"tracing",
@@ -1401,6 +1403,7 @@ dependencies = [
"codex-state",
"codex-utils-absolute-path",
"codex-utils-cargo-bin",
"codex-utils-home-dir",
"codex-utils-pty",
"codex-utils-readiness",
"codex-utils-string",
@@ -1408,7 +1411,6 @@ dependencies = [
"core-foundation 0.9.4",
"core_test_support",
"ctor 0.6.3",
"dirs",
"dunce",
"encoding_rs",
"env-flags",
@@ -1488,6 +1490,7 @@ dependencies = [
"assert_cmd",
"clap",
"codex-arg0",
"codex-cloud-requirements",
"codex-common",
"codex-core",
"codex-protocol",
@@ -1843,7 +1846,7 @@ dependencies = [
"codex-keyring-store",
"codex-protocol",
"codex-utils-cargo-bin",
"dirs",
"codex-utils-home-dir",
"futures",
"keyring",
"mcp-types",
@@ -2008,6 +2011,15 @@ dependencies = [
"thiserror 2.0.17",
]
[[package]]
name = "codex-utils-home-dir"
version = "0.0.0"
dependencies = [
"dirs",
"pretty_assertions",
"tempfile",
]
[[package]]
name = "codex-utils-image"
version = "0.0.0"

View File

@@ -43,6 +43,7 @@ members = [
"utils/cache",
"utils/image",
"utils/json-to-toml",
"utils/home-dir",
"utils/pty",
"utils/readiness",
"utils/string",
@@ -102,6 +103,7 @@ codex-utils-cache = { path = "utils/cache" }
codex-utils-cargo-bin = { path = "utils/cargo-bin" }
codex-utils-image = { path = "utils/image" }
codex-utils-json-to-toml = { path = "utils/json-to-toml" }
codex-utils-home-dir = { path = "utils/home-dir" }
codex-utils-pty = { path = "utils/pty" }
codex-utils-readiness = { path = "utils/readiness" }
codex-utils-string = { path = "utils/string" }

View File

@@ -915,7 +915,7 @@ mod tests {
#[test]
fn serialize_get_account() -> Result<()> {
let request = ClientRequest::GetAccount {
request_id: RequestId::Integer(7),
request_id: RequestId::Integer(6),
params: v2::GetAccountParams {
refresh_token: false,
},
@@ -923,7 +923,7 @@ mod tests {
assert_eq!(
json!({
"method": "account/read",
"id": 7,
"id": 6,
"params": {
"refreshToken": false
}

View File

@@ -497,6 +497,14 @@ pub struct ConfigReadResponse {
pub struct ConfigRequirements {
pub allowed_approval_policies: Option<Vec<AskForApproval>>,
pub allowed_sandbox_modes: Option<Vec<SandboxMode>>,
pub enforce_residency: Option<ResidencyRequirement>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
#[serde(rename_all = "lowercase")]
#[ts(export_to = "v2/")]
pub enum ResidencyRequirement {
Us,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]

View File

@@ -19,6 +19,7 @@ workspace = true
anyhow = { workspace = true }
async-trait = { workspace = true }
codex-arg0 = { workspace = true }
codex-cloud-requirements = { workspace = true }
codex-common = { workspace = true, features = ["cli"] }
codex-core = { workspace = true }
codex-backend-client = { workspace = true }
@@ -32,7 +33,6 @@ codex-rmcp-client = { workspace = true }
codex-utils-absolute-path = { workspace = true }
codex-utils-json-to-toml = { workspace = true }
chrono = { workspace = true }
clap = { workspace = true, features = ["derive"] }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
mcp-types = { workspace = true }

View File

@@ -103,7 +103,7 @@ Example (from OpenAI's official VSCode extension):
- `config/read` — fetch the effective config on disk after resolving config layering.
- `config/value/write` — write a single config key/value to the user's config.toml on disk.
- `config/batchWrite` — apply multiple config edits atomically to the user's config.toml on disk.
- `configRequirements/read` — fetch the loaded requirements allow-lists from `requirements.toml` and/or MDM (or `null` if none are configured).
- `configRequirements/read` — fetch the loaded requirements allow-lists and `enforceResidency` from `requirements.toml` and/or MDM (or `null` if none are configured).
### Example: Start or resume a thread

View File

@@ -131,7 +131,6 @@ use codex_app_server_protocol::UserInput as V2UserInput;
use codex_app_server_protocol::UserSavedConfig;
use codex_app_server_protocol::build_turns_from_event_msgs;
use codex_backend_client::Client as BackendClient;
use codex_backend_client::UsageMetadata;
use codex_chatgpt::connectors;
use codex_core::AuthManager;
use codex_core::CodexAuth;
@@ -153,6 +152,7 @@ use codex_core::config::ConfigService;
use codex_core::config::edit::ConfigEdit;
use codex_core::config::edit::ConfigEditsBuilder;
use codex_core::config::types::McpServerTransportConfig;
use codex_core::config_loader::CloudRequirementsLoader;
use codex_core::default_client::get_codex_user_agent;
use codex_core::error::CodexErr;
use codex_core::exec::ExecParams;
@@ -240,8 +240,6 @@ const THREAD_LIST_MAX_LIMIT: usize = 100;
// Duration before a ChatGPT login attempt is abandoned.
const LOGIN_CHATGPT_TIMEOUT: Duration = Duration::from_secs(10 * 60);
// Timeout for best-effort plan type lookups via the usage endpoint.
const PLAN_TYPE_FETCH_TIMEOUT: Duration = Duration::from_secs(5);
struct ActiveLogin {
shutdown_handle: ShutdownHandle,
login_id: Uuid,
@@ -266,6 +264,7 @@ pub(crate) struct CodexMessageProcessor {
codex_linux_sandbox_exe: Option<PathBuf>,
config: Arc<Config>,
cli_overrides: Vec<(String, TomlValue)>,
cloud_requirements: CloudRequirementsLoader,
conversation_listeners: HashMap<Uuid, oneshot::Sender<()>>,
listener_thread_ids_by_subscription: HashMap<Uuid, ThreadId>,
active_login: Arc<Mutex<Option<ActiveLogin>>>,
@@ -284,6 +283,17 @@ pub(crate) enum ApiVersion {
V2,
}
pub(crate) struct CodexMessageProcessorArgs {
pub(crate) auth_manager: Arc<AuthManager>,
pub(crate) thread_manager: Arc<ThreadManager>,
pub(crate) outgoing: Arc<OutgoingMessageSender>,
pub(crate) codex_linux_sandbox_exe: Option<PathBuf>,
pub(crate) config: Arc<Config>,
pub(crate) cli_overrides: Vec<(String, TomlValue)>,
pub(crate) cloud_requirements: CloudRequirementsLoader,
pub(crate) feedback: CodexFeedback,
}
impl CodexMessageProcessor {
async fn load_thread(
&self,
@@ -308,15 +318,17 @@ impl CodexMessageProcessor {
Ok((thread_id, thread))
}
pub fn new(
auth_manager: Arc<AuthManager>,
thread_manager: Arc<ThreadManager>,
outgoing: Arc<OutgoingMessageSender>,
codex_linux_sandbox_exe: Option<PathBuf>,
config: Arc<Config>,
cli_overrides: Vec<(String, TomlValue)>,
feedback: CodexFeedback,
) -> Self {
pub fn new(args: CodexMessageProcessorArgs) -> Self {
let CodexMessageProcessorArgs {
auth_manager,
thread_manager,
outgoing,
codex_linux_sandbox_exe,
config,
cli_overrides,
cloud_requirements,
feedback,
} = args;
Self {
auth_manager,
thread_manager,
@@ -324,6 +336,7 @@ impl CodexMessageProcessor {
codex_linux_sandbox_exe,
config,
cli_overrides,
cloud_requirements,
conversation_listeners: HashMap::new(),
listener_thread_ids_by_subscription: HashMap::new(),
active_login: Arc::new(Mutex::new(None)),
@@ -336,7 +349,10 @@ impl CodexMessageProcessor {
}
async fn load_latest_config(&self) -> Result<Config, JSONRPCErrorError> {
Config::load_with_cli_overrides(self.cli_overrides.clone())
codex_core::config::ConfigBuilder::default()
.cli_overrides(self.cli_overrides.clone())
.cloud_requirements(self.cloud_requirements.clone())
.build()
.await
.map_err(|err| JSONRPCErrorError {
code: INTERNAL_ERROR_CODE,
@@ -1201,16 +1217,19 @@ impl CodexMessageProcessor {
match self.auth_manager.auth().await {
Some(auth) => {
let auth_mode = auth.api_auth_mode();
let token_opt = match auth.bearer_token() {
Ok(Some(token)) if include_token && !token.is_empty() => Some(token),
Ok(_) => None,
let (reported_auth_method, token_opt) = match auth.get_token() {
Ok(token) if !token.is_empty() => {
let tok = if include_token { Some(token) } else { None };
(Some(auth_mode), tok)
}
Ok(_) => (None, None),
Err(err) => {
tracing::warn!("failed to get bearer token for auth status: {err}");
None
tracing::warn!("failed to get token for auth status: {err}");
(None, None)
}
};
GetAuthStatusResponse {
auth_method: Some(auth_mode),
auth_method: reported_auth_method,
auth_token: token_opt,
requires_openai_auth: Some(true),
}
@@ -1265,26 +1284,6 @@ impl CodexMessageProcessor {
}
}
}
CodexAuth::ChatgptProxy(_) => {
let needs_usage_metadata = auth.account_plan_type().is_none()
|| auth.get_account_email().is_none()
|| auth.get_account_id().is_none();
let usage_metadata = if needs_usage_metadata {
self.fetch_usage_metadata_from_usage().await
} else {
None
};
let email = auth
.get_account_email()
.or_else(|| usage_metadata.as_ref().and_then(|meta| meta.email.clone()))
.unwrap_or_else(|| "unknown".to_string());
let plan_type = auth
.account_plan_type()
.or_else(|| usage_metadata.as_ref().map(|meta| meta.plan_type))
.unwrap_or(codex_protocol::account::PlanType::Unknown);
Account::Chatgpt { email, plan_type }
}
}),
None => None,
};
@@ -1317,13 +1316,6 @@ impl CodexMessageProcessor {
}
async fn fetch_account_rate_limits(&self) -> Result<CoreRateLimitSnapshot, JSONRPCErrorError> {
let (snapshot, _) = self.fetch_rate_limits_with_metadata().await?;
Ok(snapshot)
}
async fn fetch_rate_limits_with_metadata(
&self,
) -> Result<(CoreRateLimitSnapshot, UsageMetadata), JSONRPCErrorError> {
let Some(auth) = self.auth_manager.auth().await else {
return Err(JSONRPCErrorError {
code: INVALID_REQUEST_ERROR_CODE,
@@ -1348,7 +1340,7 @@ impl CodexMessageProcessor {
})?;
client
.get_rate_limits_with_metadata()
.get_rate_limits()
.await
.map_err(|err| JSONRPCErrorError {
code: INTERNAL_ERROR_CODE,
@@ -1357,35 +1349,6 @@ impl CodexMessageProcessor {
})
}
async fn fetch_usage_metadata_from_usage(&self) -> Option<UsageMetadata> {
match tokio::time::timeout(
PLAN_TYPE_FETCH_TIMEOUT,
self.fetch_rate_limits_with_metadata(),
)
.await
{
Ok(Ok((_snapshot, metadata))) => {
let _changed = self.auth_manager.update_chatgpt_proxy_account_metadata(
metadata.user_id.clone(),
metadata.account_id.clone(),
metadata.email.clone(),
Some(metadata.plan_type),
);
Some(metadata)
}
Ok(Err(err)) => {
let message = err.message;
warn!("failed to fetch usage metadata from usage endpoint: {message}");
None
}
Err(_) => {
let secs = PLAN_TYPE_FETCH_TIMEOUT.as_secs();
warn!("fetching usage metadata from usage endpoint timed out after {secs}s");
None
}
}
}
async fn get_user_saved_config(&self, request_id: RequestId) {
let service = ConfigService::new_with_defaults(self.config.codex_home.clone());
let user_saved_config: UserSavedConfig = match service.load_user_saved_config().await {
@@ -1575,6 +1538,7 @@ impl CodexMessageProcessor {
&self.cli_overrides,
Some(request_overrides),
typesafe_overrides,
&self.cloud_requirements,
)
.await
{
@@ -1659,6 +1623,7 @@ impl CodexMessageProcessor {
&self.cli_overrides,
config,
typesafe_overrides,
&self.cloud_requirements,
)
.await
{
@@ -2406,6 +2371,7 @@ impl CodexMessageProcessor {
request_overrides,
typesafe_overrides,
history_cwd,
&self.cloud_requirements,
)
.await
{
@@ -2598,6 +2564,7 @@ impl CodexMessageProcessor {
request_overrides,
typesafe_overrides,
history_cwd,
&self.cloud_requirements,
)
.await
{
@@ -3392,6 +3359,7 @@ impl CodexMessageProcessor {
request_overrides,
typesafe_overrides,
history_cwd,
&self.cloud_requirements,
)
.await
{
@@ -3580,6 +3548,7 @@ impl CodexMessageProcessor {
request_overrides,
typesafe_overrides,
history_cwd,
&self.cloud_requirements,
)
.await
{
@@ -4850,6 +4819,7 @@ async fn derive_config_from_params(
cli_overrides: &[(String, TomlValue)],
request_overrides: Option<HashMap<String, serde_json::Value>>,
typesafe_overrides: ConfigOverrides,
cloud_requirements: &CloudRequirementsLoader,
) -> std::io::Result<Config> {
let merged_cli_overrides = cli_overrides
.iter()
@@ -4862,7 +4832,11 @@ async fn derive_config_from_params(
)
.collect::<Vec<_>>();
Config::load_with_cli_overrides_and_harness_overrides(merged_cli_overrides, typesafe_overrides)
codex_core::config::ConfigBuilder::default()
.cli_overrides(merged_cli_overrides)
.harness_overrides(typesafe_overrides)
.cloud_requirements(cloud_requirements.clone())
.build()
.await
}
@@ -4871,6 +4845,7 @@ async fn derive_config_for_cwd(
request_overrides: Option<HashMap<String, serde_json::Value>>,
typesafe_overrides: ConfigOverrides,
cwd: Option<PathBuf>,
cloud_requirements: &CloudRequirementsLoader,
) -> std::io::Result<Config> {
let merged_cli_overrides = cli_overrides
.iter()
@@ -4887,6 +4862,7 @@ async fn derive_config_for_cwd(
.cli_overrides(merged_cli_overrides)
.harness_overrides(typesafe_overrides)
.fallback_cwd(cwd)
.cloud_requirements(cloud_requirements.clone())
.build()
.await
}

View File

@@ -12,8 +12,10 @@ use codex_app_server_protocol::JSONRPCErrorError;
use codex_app_server_protocol::SandboxMode;
use codex_core::config::ConfigService;
use codex_core::config::ConfigServiceError;
use codex_core::config_loader::CloudRequirementsLoader;
use codex_core::config_loader::ConfigRequirementsToml;
use codex_core::config_loader::LoaderOverrides;
use codex_core::config_loader::ResidencyRequirement as CoreResidencyRequirement;
use codex_core::config_loader::SandboxModeRequirement as CoreSandboxModeRequirement;
use serde_json::json;
use std::path::PathBuf;
@@ -29,9 +31,15 @@ impl ConfigApi {
codex_home: PathBuf,
cli_overrides: Vec<(String, TomlValue)>,
loader_overrides: LoaderOverrides,
cloud_requirements: CloudRequirementsLoader,
) -> Self {
Self {
service: ConfigService::new(codex_home, cli_overrides, loader_overrides),
service: ConfigService::new(
codex_home,
cli_overrides,
loader_overrides,
cloud_requirements,
),
}
}
@@ -84,6 +92,9 @@ fn map_requirements_toml_to_api(requirements: ConfigRequirementsToml) -> ConfigR
.filter_map(map_sandbox_mode_requirement_to_api)
.collect()
}),
enforce_residency: requirements
.enforce_residency
.map(map_residency_requirement_to_api),
}
}
@@ -96,6 +107,14 @@ fn map_sandbox_mode_requirement_to_api(mode: CoreSandboxModeRequirement) -> Opti
}
}
fn map_residency_requirement_to_api(
residency: CoreResidencyRequirement,
) -> codex_app_server_protocol::ResidencyRequirement {
match residency {
CoreResidencyRequirement::Us => codex_app_server_protocol::ResidencyRequirement::Us,
}
}
fn map_error(err: ConfigServiceError) -> JSONRPCErrorError {
if let Some(code) = err.write_error_code() {
return config_write_error(code, err.to_string());
@@ -137,6 +156,7 @@ mod tests {
]),
mcp_servers: None,
rules: None,
enforce_residency: Some(CoreResidencyRequirement::Us),
};
let mapped = map_requirements_toml_to_api(requirements);
@@ -152,5 +172,9 @@ mod tests {
mapped.allowed_sandbox_modes,
Some(vec![SandboxMode::ReadOnly]),
);
assert_eq!(
mapped.enforce_residency,
Some(codex_app_server_protocol::ResidencyRequirement::Us),
);
}
}

View File

@@ -1,8 +1,11 @@
#![deny(clippy::print_stdout, clippy::print_stderr)]
use codex_cloud_requirements::cloud_requirements_loader;
use codex_common::CliConfigOverrides;
use codex_core::AuthManager;
use codex_core::config::Config;
use codex_core::config::ConfigBuilder;
use codex_core::config_loader::CloudRequirementsLoader;
use codex_core::config_loader::ConfigLayerStackOrdering;
use codex_core::config_loader::LoaderOverrides;
use std::io::ErrorKind;
@@ -169,7 +172,6 @@ pub async fn run_main(
cli_config_overrides: CliConfigOverrides,
loader_overrides: LoaderOverrides,
default_analytics_enabled: bool,
default_chatgpt_proxy_auth: bool,
) -> IoResult<()> {
// Set up channels.
let (incoming_tx, mut incoming_rx) = mpsc::channel::<JSONRPCMessage>(CHANNEL_CAPACITY);
@@ -206,11 +208,32 @@ pub async fn run_main(
format!("error parsing -c overrides: {e}"),
)
})?;
let cloud_requirements = match ConfigBuilder::default()
.cli_overrides(cli_kv_overrides.clone())
.loader_overrides(loader_overrides.clone())
.build()
.await
{
Ok(config) => {
let auth_manager = AuthManager::shared(
config.codex_home.clone(),
false,
config.cli_auth_credentials_store_mode,
);
cloud_requirements_loader(auth_manager, config.chatgpt_base_url)
}
Err(err) => {
warn!(error = %err, "Failed to preload config for cloud requirements");
// TODO(gt): Make cloud requirements preload failures blocking once we can fail-closed.
CloudRequirementsLoader::default()
}
};
let loader_overrides_for_config_api = loader_overrides.clone();
let mut config_warnings = Vec::new();
let config = match ConfigBuilder::default()
.cli_overrides(cli_kv_overrides.clone())
.loader_overrides(loader_overrides)
.cloud_requirements(cloud_requirements.clone())
.build()
.await
{
@@ -298,7 +321,7 @@ pub async fn run_main(
config: std::sync::Arc::new(config),
cli_overrides,
loader_overrides,
default_chatgpt_proxy_auth,
cloud_requirements: cloud_requirements.clone(),
feedback: feedback.clone(),
config_warnings,
});

View File

@@ -1,4 +1,3 @@
use clap::Parser;
use codex_app_server::run_main;
use codex_arg0::arg0_dispatch_or_else;
use codex_common::CliConfigOverrides;
@@ -9,20 +8,8 @@ use std::path::PathBuf;
// managed config file without writing to /etc.
const MANAGED_CONFIG_PATH_ENV_VAR: &str = "CODEX_APP_SERVER_MANAGED_CONFIG_PATH";
#[derive(Debug, Parser, Default, Clone)]
#[command(bin_name = "codex-app-server")]
struct AppServerCli {
#[clap(flatten)]
config_overrides: CliConfigOverrides,
/// Seed ChatGPT proxy auth (tokenless) on startup when no auth is present.
#[arg(long = "default-chatgpt-proxy-auth")]
default_chatgpt_proxy_auth: bool,
}
fn main() -> anyhow::Result<()> {
arg0_dispatch_or_else(|codex_linux_sandbox_exe| async move {
let cli = AppServerCli::parse();
let managed_config_path = managed_config_path_from_debug_env();
let loader_overrides = LoaderOverrides {
managed_config_path,
@@ -31,10 +18,9 @@ fn main() -> anyhow::Result<()> {
run_main(
codex_linux_sandbox_exe,
cli.config_overrides,
CliConfigOverrides::default(),
loader_overrides,
false,
cli.default_chatgpt_proxy_auth,
)
.await?;
Ok(())

View File

@@ -2,6 +2,7 @@ use std::path::PathBuf;
use std::sync::Arc;
use crate::codex_message_processor::CodexMessageProcessor;
use crate::codex_message_processor::CodexMessageProcessorArgs;
use crate::config_api::ConfigApi;
use crate::error_code::INVALID_REQUEST_ERROR_CODE;
use crate::outgoing_message::OutgoingMessageSender;
@@ -26,41 +27,28 @@ use codex_app_server_protocol::ServerNotification;
use codex_app_server_protocol::ServerRequestPayload;
use codex_core::AuthManager;
use codex_core::ThreadManager;
use codex_core::auth::AuthCredentialsStoreMode;
use codex_core::auth::ExternalAuthRefreshContext;
use codex_core::auth::ExternalAuthRefreshReason;
use codex_core::auth::ExternalAuthRefresher;
use codex_core::auth::ExternalAuthTokens;
use codex_core::auth::login_with_chatgpt_proxy;
use codex_core::config::Config;
use codex_core::config_loader::CloudRequirementsLoader;
use codex_core::config_loader::LoaderOverrides;
use codex_core::default_client::SetOriginatorError;
use codex_core::default_client::USER_AGENT_SUFFIX;
use codex_core::default_client::get_codex_user_agent;
use codex_core::default_client::set_default_client_residency_requirement;
use codex_core::default_client::set_default_originator;
use codex_feedback::CodexFeedback;
use codex_protocol::ThreadId;
use codex_protocol::config_types::ForcedLoginMethod;
use codex_protocol::protocol::SessionSource;
use tokio::sync::broadcast;
use tokio::time::Duration;
use tokio::time::timeout;
use toml::Value as TomlValue;
use tracing::warn;
const EXTERNAL_AUTH_REFRESH_TIMEOUT: Duration = Duration::from_secs(10);
pub(crate) struct MessageProcessorArgs {
pub(crate) outgoing: OutgoingMessageSender,
pub(crate) codex_linux_sandbox_exe: Option<PathBuf>,
pub(crate) config: Arc<Config>,
pub(crate) cli_overrides: Vec<(String, TomlValue)>,
pub(crate) loader_overrides: LoaderOverrides,
pub(crate) default_chatgpt_proxy_auth: bool,
pub(crate) feedback: CodexFeedback,
pub(crate) config_warnings: Vec<ConfigWarningNotification>,
}
#[derive(Clone)]
struct ExternalAuthRefreshBridge {
outgoing: Arc<OutgoingMessageSender>,
@@ -117,10 +105,22 @@ pub(crate) struct MessageProcessor {
outgoing: Arc<OutgoingMessageSender>,
codex_message_processor: CodexMessageProcessor,
config_api: ConfigApi,
config: Arc<Config>,
initialized: bool,
config_warnings: Vec<ConfigWarningNotification>,
}
pub(crate) struct MessageProcessorArgs {
pub(crate) outgoing: OutgoingMessageSender,
pub(crate) codex_linux_sandbox_exe: Option<PathBuf>,
pub(crate) config: Arc<Config>,
pub(crate) cli_overrides: Vec<(String, TomlValue)>,
pub(crate) loader_overrides: LoaderOverrides,
pub(crate) cloud_requirements: CloudRequirementsLoader,
pub(crate) feedback: CodexFeedback,
pub(crate) config_warnings: Vec<ConfigWarningNotification>,
}
impl MessageProcessor {
/// Create a new `MessageProcessor`, retaining a handle to the outgoing
/// `Sender` so handlers can enqueue messages to be written to stdout.
@@ -131,7 +131,7 @@ impl MessageProcessor {
config,
cli_overrides,
loader_overrides,
default_chatgpt_proxy_auth,
cloud_requirements,
feedback,
config_warnings,
} = args;
@@ -141,23 +141,6 @@ impl MessageProcessor {
false,
config.cli_auth_credentials_store_mode,
);
if default_chatgpt_proxy_auth
&& auth_manager.auth_cached().is_none()
&& !matches!(config.forced_login_method, Some(ForcedLoginMethod::Api))
{
let account_id = config.forced_chatgpt_workspace_id.as_deref();
if let Err(err) = login_with_chatgpt_proxy(
&config.codex_home,
account_id,
None,
None,
AuthCredentialsStoreMode::Ephemeral,
) {
warn!("failed to seed default ChatGPT proxy auth: {err}");
} else {
auth_manager.reload();
}
}
auth_manager.set_forced_chatgpt_workspace_id(config.forced_chatgpt_workspace_id.clone());
auth_manager.set_external_auth_refresher(Arc::new(ExternalAuthRefreshBridge {
outgoing: outgoing.clone(),
@@ -167,21 +150,28 @@ impl MessageProcessor {
auth_manager.clone(),
SessionSource::VSCode,
));
let codex_message_processor = CodexMessageProcessor::new(
let codex_message_processor = CodexMessageProcessor::new(CodexMessageProcessorArgs {
auth_manager,
thread_manager,
outgoing.clone(),
outgoing: outgoing.clone(),
codex_linux_sandbox_exe,
Arc::clone(&config),
cli_overrides.clone(),
config: Arc::clone(&config),
cli_overrides: cli_overrides.clone(),
cloud_requirements: cloud_requirements.clone(),
feedback,
});
let config_api = ConfigApi::new(
config.codex_home.clone(),
cli_overrides,
loader_overrides,
cloud_requirements,
);
let config_api = ConfigApi::new(config.codex_home.clone(), cli_overrides, loader_overrides);
Self {
outgoing,
codex_message_processor,
config_api,
config,
initialized: false,
config_warnings,
}
@@ -254,6 +244,7 @@ impl MessageProcessor {
}
}
}
set_default_client_residency_requirement(self.config.enforce_residency.value());
let user_agent_suffix = format!("{name}; {version}");
if let Ok(mut suffix) = USER_AGENT_SUFFIX.lock() {
*suffix = Some(user_agent_suffix);

View File

@@ -163,7 +163,6 @@ pub fn write_chatgpt_auth(
openai_api_key: None,
tokens: Some(tokens),
last_refresh,
chatgpt_proxy: None,
};
save_auth(codex_home, &auth, cli_auth_credentials_store_mode).context("write auth.json")

View File

@@ -38,14 +38,6 @@ impl PathStyle {
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct UsageMetadata {
pub user_id: Option<String>,
pub account_id: Option<String>,
pub email: Option<String>,
pub plan_type: AccountPlanType,
}
#[derive(Clone, Debug)]
pub struct Client {
base_url: String,
@@ -83,10 +75,10 @@ impl Client {
}
pub fn from_auth(base_url: impl Into<String>, auth: &CodexAuth) -> Result<Self> {
let mut client = Self::new(base_url)?.with_user_agent(get_codex_user_agent());
if let Some(token) = auth.bearer_token().map_err(anyhow::Error::from)? {
client = client.with_bearer_token(token);
}
let token = auth.get_token().map_err(anyhow::Error::from)?;
let mut client = Self::new(base_url)?
.with_user_agent(get_codex_user_agent())
.with_bearer_token(token);
if let Some(account_id) = auth.get_account_id() {
client = client.with_chatgpt_account_id(account_id);
}
@@ -167,34 +159,17 @@ impl Client {
}
}
fn usage_url(&self) -> String {
match self.path_style {
pub async fn get_rate_limits(&self) -> Result<RateLimitSnapshot> {
let url = match self.path_style {
PathStyle::CodexApi => format!("{}/api/codex/usage", self.base_url),
PathStyle::ChatGptApi => format!("{}/wham/usage", self.base_url),
}
}
async fn fetch_usage_payload(&self) -> Result<RateLimitStatusPayload> {
let url = self.usage_url();
};
let req = self.http.get(&url).headers(self.headers());
let (body, ct) = self.exec_request(req, "GET", &url).await?;
self.decode_json(&url, &ct, &body)
}
pub async fn get_rate_limits(&self) -> Result<RateLimitSnapshot> {
let payload = self.fetch_usage_payload().await?;
let payload: RateLimitStatusPayload = self.decode_json(&url, &ct, &body)?;
Ok(Self::rate_limit_snapshot_from_payload(payload))
}
pub async fn get_rate_limits_with_metadata(
&self,
) -> Result<(RateLimitSnapshot, UsageMetadata)> {
let payload = self.fetch_usage_payload().await?;
let metadata = Self::usage_metadata_from_payload(&payload);
let snapshot = Self::rate_limit_snapshot_from_payload(payload);
Ok((snapshot, metadata))
}
pub async fn list_tasks(
&self,
limit: Option<i32>,
@@ -342,15 +317,6 @@ impl Client {
}
}
fn usage_metadata_from_payload(payload: &RateLimitStatusPayload) -> UsageMetadata {
UsageMetadata {
user_id: payload.user_id.clone(),
account_id: payload.account_id.clone(),
email: payload.email.clone(),
plan_type: Self::map_plan_type(payload.plan_type),
}
}
fn map_rate_limit_window(
window: Option<Option<Box<RateLimitWindowSnapshot>>>,
) -> Option<RateLimitWindow> {
@@ -408,31 +374,3 @@ impl Client {
Some((seconds_i64 + 59) / 60)
}
}
#[cfg(test)]
mod tests {
use super::Client;
use super::UsageMetadata;
use crate::types::PlanType;
use crate::types::RateLimitStatusPayload;
use pretty_assertions::assert_eq;
#[test]
fn usage_metadata_maps_optional_fields() {
let payload = RateLimitStatusPayload {
plan_type: PlanType::Plus,
user_id: Some("user-123".to_string()),
account_id: Some("acc-456".to_string()),
email: Some("user@example.com".to_string()),
rate_limit: None,
credits: None,
};
let metadata: UsageMetadata = Client::usage_metadata_from_payload(&payload);
assert_eq!(metadata.user_id, Some("user-123".to_string()));
assert_eq!(metadata.account_id, Some("acc-456".to_string()));
assert_eq!(metadata.email, Some("user@example.com".to_string()));
assert_eq!(metadata.plan_type, codex_protocol::account::PlanType::Plus);
}
}

View File

@@ -2,7 +2,6 @@ mod client;
pub mod types;
pub use client::Client;
pub use client::UsageMetadata;
pub use types::CodeTaskDetailsResponse;
pub use types::CodeTaskDetailsResponseExt;
pub use types::ConfigFileResponse;

View File

@@ -283,10 +283,6 @@ struct AppServerCommand {
/// See https://developers.openai.com/codex/config-advanced/#metrics for more details.
#[arg(long = "analytics-default-enabled")]
analytics_default_enabled: bool,
/// Seed ChatGPT proxy auth (tokenless) on startup when no auth is present.
#[arg(long = "default-chatgpt-proxy-auth")]
default_chatgpt_proxy_auth: bool,
}
#[derive(Debug, clap::Subcommand)]
@@ -539,7 +535,6 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
root_config_overrides,
codex_core::config_loader::LoaderOverrides::default(),
app_server_cli.analytics_default_enabled,
app_server_cli.default_chatgpt_proxy_auth,
)
.await?;
}
@@ -1268,19 +1263,6 @@ mod tests {
assert!(app_server.analytics_default_enabled);
}
#[test]
fn app_server_default_chatgpt_proxy_auth_disabled_without_flag() {
let app_server = app_server_from_args(["codex", "app-server"].as_ref());
assert!(!app_server.default_chatgpt_proxy_auth);
}
#[test]
fn app_server_default_chatgpt_proxy_auth_enabled_with_flag() {
let app_server =
app_server_from_args(["codex", "app-server", "--default-chatgpt-proxy-auth"].as_ref());
assert!(app_server.default_chatgpt_proxy_auth);
}
#[test]
fn features_enable_parses_feature_name() {
let cli = MultitoolCli::try_parse_from(["codex", "features", "enable", "unified_exec"])

View File

@@ -14,10 +14,12 @@ codex-core = { workspace = true }
codex-otel = { workspace = true }
codex-protocol = { workspace = true }
tokio = { workspace = true, features = ["sync", "time"] }
thiserror = { workspace = true }
toml = { workspace = true }
tracing = { workspace = true }
[dev-dependencies]
anyhow = { workspace = true }
base64 = { workspace = true }
pretty_assertions = { workspace = true }
serde_json = { workspace = true }

View File

@@ -3,9 +3,7 @@
//! This crate fetches `requirements.toml` data from the backend as an alternative to loading it
//! from the local filesystem. It only applies to Enterprise ChatGPT customers.
//!
//! Today, fetching is best-effort: on error or timeout, Codex continues without cloud requirements.
//! We expect to tighten this so that Enterprise ChatGPT customers must successfully fetch these
//! requirements before Codex will run.
//! Enterprise ChatGPT customers must successfully fetch these requirements before Codex will run.
use async_trait::async_trait;
use codex_backend_client::Client as BackendClient;
@@ -14,21 +12,73 @@ use codex_core::auth::CodexAuth;
use codex_core::config_loader::CloudRequirementsLoader;
use codex_core::config_loader::ConfigRequirementsToml;
use codex_protocol::account::PlanType;
use std::io;
use std::sync::Arc;
use std::time::Duration;
use std::time::Instant;
use thiserror::Error;
use tokio::time::timeout;
/// This blocks codecs startup, so must be short.
const CLOUD_REQUIREMENTS_TIMEOUT: Duration = Duration::from_secs(5);
#[derive(Debug, Error, Clone, PartialEq, Eq)]
enum CloudRequirementsError {
#[error("cloud requirements user error: {0}")]
User(CloudRequirementsUserError),
#[error("cloud requirements network error: {0}")]
Network(CloudRequirementsNetworkError),
}
impl From<CloudRequirementsUserError> for CloudRequirementsError {
fn from(err: CloudRequirementsUserError) -> Self {
CloudRequirementsError::User(err)
}
}
impl From<CloudRequirementsNetworkError> for CloudRequirementsError {
fn from(err: CloudRequirementsNetworkError) -> Self {
CloudRequirementsError::Network(err)
}
}
impl From<CloudRequirementsError> for io::Error {
fn from(err: CloudRequirementsError) -> Self {
let kind = match &err {
CloudRequirementsError::User(_) => io::ErrorKind::InvalidData,
CloudRequirementsError::Network(CloudRequirementsNetworkError::Timeout { .. }) => {
io::ErrorKind::TimedOut
}
CloudRequirementsError::Network(_) => io::ErrorKind::Other,
};
io::Error::new(kind, err)
}
}
#[derive(Debug, Error, Clone, PartialEq, Eq)]
enum CloudRequirementsUserError {
#[error("failed to parse requirements TOML: {message}")]
InvalidToml { message: String },
}
#[derive(Debug, Error, Clone, PartialEq, Eq)]
enum CloudRequirementsNetworkError {
#[error("backend client initialization failed: {message}")]
BackendClient { message: String },
#[error("request failed: {message}")]
Request { message: String },
#[error("cloud requirements response missing contents")]
MissingContents,
#[error("timed out after {timeout_ms}ms")]
Timeout { timeout_ms: u64 },
#[error("cloud requirements task failed: {message}")]
Task { message: String },
}
#[async_trait]
trait RequirementsFetcher: Send + Sync {
/// Returns requirements as a TOML string.
///
/// TODO(gt): For now, returns an Option. But when we want to make this fail-closed, return a
/// Result.
async fn fetch_requirements(&self, auth: &CodexAuth) -> Option<String>;
async fn fetch_requirements(&self, auth: &CodexAuth) -> Result<String, CloudRequirementsError>;
}
struct BackendRequirementsFetcher {
@@ -43,7 +93,7 @@ impl BackendRequirementsFetcher {
#[async_trait]
impl RequirementsFetcher for BackendRequirementsFetcher {
async fn fetch_requirements(&self, auth: &CodexAuth) -> Option<String> {
async fn fetch_requirements(&self, auth: &CodexAuth) -> Result<String, CloudRequirementsError> {
let client = BackendClient::from_auth(self.base_url.clone(), auth)
.inspect_err(|err| {
tracing::warn!(
@@ -51,20 +101,28 @@ impl RequirementsFetcher for BackendRequirementsFetcher {
"Failed to construct backend client for cloud requirements"
);
})
.ok()?;
.map_err(|err| CloudRequirementsNetworkError::BackendClient {
message: err.to_string(),
})
.map_err(CloudRequirementsError::from)?;
let response = client
.get_config_requirements_file()
.await
.inspect_err(|err| tracing::warn!(error = %err, "Failed to fetch cloud requirements"))
.ok()?;
.map_err(|err| CloudRequirementsNetworkError::Request {
message: err.to_string(),
})
.map_err(CloudRequirementsError::from)?;
let Some(contents) = response.contents else {
tracing::warn!("Cloud requirements response missing contents");
return None;
return Err(CloudRequirementsError::from(
CloudRequirementsNetworkError::MissingContents,
));
};
Some(contents)
Ok(contents)
}
}
@@ -87,29 +145,50 @@ impl CloudRequirementsService {
}
}
async fn fetch_with_timeout(&self) -> Option<ConfigRequirementsToml> {
async fn fetch_with_timeout(
&self,
) -> Result<Option<ConfigRequirementsToml>, CloudRequirementsError> {
let _timer =
codex_otel::start_global_timer("codex.cloud_requirements.fetch.duration_ms", &[]);
let started_at = Instant::now();
let result = timeout(self.timeout, self.fetch())
.await
.inspect_err(|_| {
tracing::warn!("Timed out waiting for cloud requirements; continuing without them");
})
.ok()?;
let result = timeout(self.timeout, self.fetch()).await.map_err(|_| {
CloudRequirementsNetworkError::Timeout {
timeout_ms: self.timeout.as_millis() as u64,
}
})?;
let elapsed_ms = started_at.elapsed().as_millis();
match result.as_ref() {
Some(requirements) => {
Ok(Some(requirements)) => {
tracing::info!(
elapsed_ms = started_at.elapsed().as_millis(),
elapsed_ms,
status = "success",
requirements = ?requirements,
"Cloud requirements load completed"
);
println!(
"cloud_requirements status=success elapsed_ms={elapsed_ms} value={requirements:?}"
);
}
None => {
Ok(None) => {
tracing::info!(
elapsed_ms = started_at.elapsed().as_millis(),
"Cloud requirements load completed (none)"
elapsed_ms,
status = "none",
requirements = %"none",
"Cloud requirements load completed"
);
println!("cloud_requirements status=none elapsed_ms={elapsed_ms} value=none");
}
Err(err) => {
tracing::warn!(
elapsed_ms,
status = "error",
requirements = %"none",
error = %err,
"Cloud requirements load failed"
);
println!(
"cloud_requirements status=error elapsed_ms={elapsed_ms} value=none error={err}"
);
}
}
@@ -117,17 +196,19 @@ impl CloudRequirementsService {
result
}
async fn fetch(&self) -> Option<ConfigRequirementsToml> {
let auth = self.auth_manager.auth().await?;
async fn fetch(&self) -> Result<Option<ConfigRequirementsToml>, CloudRequirementsError> {
let auth = match self.auth_manager.auth().await {
Some(auth) => auth,
None => return Ok(None),
};
if !(auth.is_chatgpt_auth() && auth.account_plan_type() == Some(PlanType::Enterprise)) {
return None;
return Ok(None);
}
let contents = self.fetcher.fetch_requirements(&auth).await?;
parse_cloud_requirements(&contents)
.inspect_err(|err| tracing::warn!(error = %err, "Failed to parse cloud requirements"))
.ok()
.flatten()
.map_err(CloudRequirementsError::from)
}
}
@@ -143,20 +224,28 @@ pub fn cloud_requirements_loader(
let task = tokio::spawn(async move { service.fetch_with_timeout().await });
CloudRequirementsLoader::new(async move {
task.await
.map_err(|err| {
CloudRequirementsError::from(CloudRequirementsNetworkError::Task {
message: err.to_string(),
})
})
.and_then(std::convert::identity)
.map_err(io::Error::from)
.inspect_err(|err| tracing::warn!(error = %err, "Cloud requirements task failed"))
.ok()
.flatten()
})
}
fn parse_cloud_requirements(
contents: &str,
) -> Result<Option<ConfigRequirementsToml>, toml::de::Error> {
) -> Result<Option<ConfigRequirementsToml>, CloudRequirementsUserError> {
if contents.trim().is_empty() {
return Ok(None);
}
let requirements: ConfigRequirementsToml = toml::from_str(contents)?;
let requirements: ConfigRequirementsToml =
toml::from_str(contents).map_err(|err| CloudRequirementsUserError::InvalidToml {
message: err.to_string(),
})?;
if requirements.is_empty() {
Ok(None)
} else {
@@ -167,6 +256,7 @@ fn parse_cloud_requirements(
#[cfg(test)]
mod tests {
use super::*;
use anyhow::Result;
use base64::Engine;
use base64::engine::general_purpose::URL_SAFE_NO_PAD;
use codex_core::auth::AuthCredentialsStoreMode;
@@ -177,28 +267,28 @@ mod tests {
use std::path::Path;
use tempfile::tempdir;
fn write_auth_json(codex_home: &Path, value: serde_json::Value) -> std::io::Result<()> {
fn write_auth_json(codex_home: &Path, value: serde_json::Value) -> Result<()> {
std::fs::write(codex_home.join("auth.json"), serde_json::to_string(&value)?)?;
Ok(())
}
fn auth_manager_with_api_key() -> Arc<AuthManager> {
let tmp = tempdir().expect("tempdir");
fn auth_manager_with_api_key() -> Result<Arc<AuthManager>> {
let tmp = tempdir()?;
let auth_json = json!({
"OPENAI_API_KEY": "sk-test-key",
"tokens": null,
"last_refresh": null,
});
write_auth_json(tmp.path(), auth_json).expect("write auth");
Arc::new(AuthManager::new(
write_auth_json(tmp.path(), auth_json)?;
Ok(Arc::new(AuthManager::new(
tmp.path().to_path_buf(),
false,
AuthCredentialsStoreMode::File,
))
)))
}
fn auth_manager_with_plan(plan_type: &str) -> Arc<AuthManager> {
let tmp = tempdir().expect("tempdir");
fn auth_manager_with_plan(plan_type: &str) -> Result<Arc<AuthManager>> {
let tmp = tempdir()?;
let header = json!({ "alg": "none", "typ": "JWT" });
let auth_payload = json!({
"chatgpt_plan_type": plan_type,
@@ -209,8 +299,8 @@ mod tests {
"email": "user@example.com",
"https://api.openai.com/auth": auth_payload,
});
let header_b64 = URL_SAFE_NO_PAD.encode(serde_json::to_vec(&header).expect("header"));
let payload_b64 = URL_SAFE_NO_PAD.encode(serde_json::to_vec(&payload).expect("payload"));
let header_b64 = URL_SAFE_NO_PAD.encode(serde_json::to_vec(&header)?);
let payload_b64 = URL_SAFE_NO_PAD.encode(serde_json::to_vec(&payload)?);
let signature_b64 = URL_SAFE_NO_PAD.encode(b"sig");
let fake_jwt = format!("{header_b64}.{payload_b64}.{signature_b64}");
@@ -223,26 +313,31 @@ mod tests {
},
"last_refresh": null,
});
write_auth_json(tmp.path(), auth_json).expect("write auth");
Arc::new(AuthManager::new(
write_auth_json(tmp.path(), auth_json)?;
Ok(Arc::new(AuthManager::new(
tmp.path().to_path_buf(),
false,
AuthCredentialsStoreMode::File,
))
)))
}
fn parse_for_fetch(contents: Option<&str>) -> Option<ConfigRequirementsToml> {
contents.and_then(|contents| parse_cloud_requirements(contents).ok().flatten())
fn parse_for_fetch(
contents: Option<&str>,
) -> Result<Option<ConfigRequirementsToml>, CloudRequirementsUserError> {
contents.map(parse_cloud_requirements).unwrap_or(Ok(None))
}
struct StaticFetcher {
contents: Option<String>,
result: Result<String, CloudRequirementsError>,
}
#[async_trait::async_trait]
impl RequirementsFetcher for StaticFetcher {
async fn fetch_requirements(&self, _auth: &CodexAuth) -> Option<String> {
self.contents.clone()
async fn fetch_requirements(
&self,
_auth: &CodexAuth,
) -> Result<String, CloudRequirementsError> {
self.result.clone()
}
}
@@ -250,87 +345,115 @@ mod tests {
#[async_trait::async_trait]
impl RequirementsFetcher for PendingFetcher {
async fn fetch_requirements(&self, _auth: &CodexAuth) -> Option<String> {
async fn fetch_requirements(
&self,
_auth: &CodexAuth,
) -> Result<String, CloudRequirementsError> {
pending::<()>().await;
None
Ok(String::new())
}
}
#[tokio::test]
async fn fetch_cloud_requirements_skips_non_chatgpt_auth() {
let auth_manager = auth_manager_with_api_key();
async fn fetch_cloud_requirements_skips_non_chatgpt_auth() -> Result<()> {
let service = CloudRequirementsService::new(
auth_manager,
Arc::new(StaticFetcher { contents: None }),
auth_manager_with_api_key()?,
Arc::new(StaticFetcher {
result: Ok(String::new()),
}),
CLOUD_REQUIREMENTS_TIMEOUT,
);
let result = service.fetch().await;
assert!(result.is_none());
assert_eq!(service.fetch().await, Ok(None));
Ok(())
}
#[tokio::test]
async fn fetch_cloud_requirements_skips_non_enterprise_plan() {
let auth_manager = auth_manager_with_plan("pro");
async fn fetch_cloud_requirements_skips_non_enterprise_plan() -> Result<()> {
let service = CloudRequirementsService::new(
auth_manager,
Arc::new(StaticFetcher { contents: None }),
auth_manager_with_plan("pro")?,
Arc::new(StaticFetcher {
result: Ok(String::new()),
}),
CLOUD_REQUIREMENTS_TIMEOUT,
);
let result = service.fetch().await;
assert!(result.is_none());
assert_eq!(service.fetch().await, Ok(None));
Ok(())
}
#[tokio::test]
async fn fetch_cloud_requirements_handles_missing_contents() {
let result = parse_for_fetch(None);
assert!(result.is_none());
}
#[tokio::test]
async fn fetch_cloud_requirements_handles_empty_contents() {
let result = parse_for_fetch(Some(" "));
assert!(result.is_none());
}
#[tokio::test]
async fn fetch_cloud_requirements_handles_invalid_toml() {
let result = parse_for_fetch(Some("not = ["));
assert!(result.is_none());
}
#[tokio::test]
async fn fetch_cloud_requirements_ignores_empty_requirements() {
let result = parse_for_fetch(Some("# comment"));
assert!(result.is_none());
}
#[tokio::test]
async fn fetch_cloud_requirements_parses_valid_toml() {
let result = parse_for_fetch(Some("allowed_approval_policies = [\"never\"]"));
async fn fetch_cloud_requirements_returns_missing_contents_error() -> Result<()> {
let service = CloudRequirementsService::new(
auth_manager_with_plan("enterprise")?,
Arc::new(StaticFetcher {
result: Err(CloudRequirementsError::Network(
CloudRequirementsNetworkError::MissingContents,
)),
}),
CLOUD_REQUIREMENTS_TIMEOUT,
);
assert_eq!(
result,
Some(ConfigRequirementsToml {
service.fetch().await,
Err(CloudRequirementsError::Network(
CloudRequirementsNetworkError::MissingContents
))
);
Ok(())
}
#[tokio::test]
async fn fetch_cloud_requirements_handles_empty_contents() -> Result<()> {
assert_eq!(parse_for_fetch(Some(" ")), Ok(None));
Ok(())
}
#[tokio::test]
async fn fetch_cloud_requirements_handles_invalid_toml() -> Result<()> {
assert!(matches!(
parse_for_fetch(Some("not = [")),
Err(CloudRequirementsUserError::InvalidToml { .. })
));
Ok(())
}
#[tokio::test]
async fn fetch_cloud_requirements_ignores_empty_requirements() -> Result<()> {
assert_eq!(parse_for_fetch(Some("# comment")), Ok(None));
Ok(())
}
#[tokio::test]
async fn fetch_cloud_requirements_parses_valid_toml() -> Result<()> {
assert_eq!(
parse_for_fetch(Some("allowed_approval_policies = [\"never\"]")),
Ok(Some(ConfigRequirementsToml {
allowed_approval_policies: Some(vec![AskForApproval::Never]),
allowed_sandbox_modes: None,
mcp_servers: None,
rules: None,
})
enforce_residency: None,
}))
);
Ok(())
}
#[tokio::test(start_paused = true)]
async fn fetch_cloud_requirements_times_out() {
let auth_manager = auth_manager_with_plan("enterprise");
async fn fetch_cloud_requirements_times_out() -> Result<()> {
let service = CloudRequirementsService::new(
auth_manager,
auth_manager_with_plan("enterprise")?,
Arc::new(PendingFetcher),
CLOUD_REQUIREMENTS_TIMEOUT,
);
let handle = tokio::spawn(async move { service.fetch_with_timeout().await });
tokio::time::advance(CLOUD_REQUIREMENTS_TIMEOUT + Duration::from_millis(1)).await;
let result = handle.await.expect("cloud requirements task");
assert!(result.is_none());
assert_eq!(
handle.await?,
Err(CloudRequirementsError::Network(
CloudRequirementsNetworkError::Timeout {
timeout_ms: CLOUD_REQUIREMENTS_TIMEOUT.as_millis() as u64,
}
))
);
Ok(())
}
}

View File

@@ -33,6 +33,7 @@ pub use crate::endpoint::responses_websocket::ResponsesWebsocketConnection;
pub use crate::error::ApiError;
pub use crate::provider::Provider;
pub use crate::provider::WireApi;
pub use crate::provider::is_azure_responses_wire_base_url;
pub use crate::requests::ChatRequest;
pub use crate::requests::ChatRequestBuilder;
pub use crate::requests::ResponsesRequest;

View File

@@ -95,16 +95,7 @@ impl Provider {
}
pub fn is_azure_responses_endpoint(&self) -> bool {
if self.wire != WireApi::Responses {
return false;
}
if self.name.eq_ignore_ascii_case("azure") {
return true;
}
self.base_url.to_ascii_lowercase().contains("openai.azure.")
|| matches_azure_responses_base_url(&self.base_url)
is_azure_responses_wire_base_url(self.wire.clone(), &self.name, Some(&self.base_url))
}
pub fn websocket_url_for_path(&self, path: &str) -> Result<Url, url::ParseError> {
@@ -121,6 +112,23 @@ impl Provider {
}
}
pub fn is_azure_responses_wire_base_url(wire: WireApi, name: &str, base_url: Option<&str>) -> bool {
if wire != WireApi::Responses {
return false;
}
if name.eq_ignore_ascii_case("azure") {
return true;
}
let Some(base_url) = base_url else {
return false;
};
let base = base_url.to_ascii_lowercase();
base.contains("openai.azure.") || matches_azure_responses_base_url(&base)
}
fn matches_azure_responses_base_url(base_url: &str) -> bool {
const AZURE_MARKERS: [&str; 5] = [
"cognitiveservices.azure.",
@@ -129,6 +137,54 @@ fn matches_azure_responses_base_url(base_url: &str) -> bool {
"azurefd.",
"windows.net/openai",
];
let base = base_url.to_ascii_lowercase();
AZURE_MARKERS.iter().any(|marker| base.contains(marker))
AZURE_MARKERS.iter().any(|marker| base_url.contains(marker))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn detects_azure_responses_base_urls() {
let positive_cases = [
"https://foo.openai.azure.com/openai",
"https://foo.openai.azure.us/openai/deployments/bar",
"https://foo.cognitiveservices.azure.cn/openai",
"https://foo.aoai.azure.com/openai",
"https://foo.openai.azure-api.net/openai",
"https://foo.z01.azurefd.net/",
];
for base_url in positive_cases {
assert!(
is_azure_responses_wire_base_url(WireApi::Responses, "test", Some(base_url)),
"expected {base_url} to be detected as Azure"
);
}
assert!(is_azure_responses_wire_base_url(
WireApi::Responses,
"Azure",
Some("https://example.com")
));
let negative_cases = [
"https://api.openai.com/v1",
"https://example.com/openai",
"https://myproxy.azurewebsites.net/openai",
];
for base_url in negative_cases {
assert!(
!is_azure_responses_wire_base_url(WireApi::Responses, "test", Some(base_url)),
"expected {base_url} not to be detected as Azure"
);
}
assert!(!is_azure_responses_wire_base_url(
WireApi::Chat,
"Azure",
Some("https://foo.openai.azure.com/openai")
));
}
}

View File

@@ -16,16 +16,6 @@ use serde::Serialize;
pub struct RateLimitStatusPayload {
#[serde(rename = "plan_type")]
pub plan_type: PlanType,
#[serde(default, skip_serializing_if = "Option::is_none", rename = "user_id")]
pub user_id: Option<String>,
#[serde(
default,
skip_serializing_if = "Option::is_none",
rename = "account_id"
)]
pub account_id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none", rename = "email")]
pub email: Option<String>,
#[serde(
rename = "rate_limit",
default,
@@ -46,9 +36,6 @@ impl RateLimitStatusPayload {
pub fn new(plan_type: PlanType) -> RateLimitStatusPayload {
RateLimitStatusPayload {
plan_type,
user_id: None,
account_id: None,
email: None,
rate_limit: None,
credits: None,
}

View File

@@ -39,11 +39,11 @@ codex-protocol = { workspace = true }
codex-rmcp-client = { workspace = true }
codex-state = { workspace = true }
codex-utils-absolute-path = { workspace = true }
codex-utils-home-dir = { workspace = true }
codex-utils-pty = { workspace = true }
codex-utils-readiness = { workspace = true }
codex-utils-string = { workspace = true }
codex-windows-sandbox = { package = "codex-windows-sandbox", path = "../windows-sandbox-rs" }
dirs = { workspace = true }
dunce = { workspace = true }
encoding_rs = { workspace = true }
env-flags = { workspace = true }

View File

@@ -177,9 +177,9 @@ pub(crate) fn auth_provider_from_auth(
}
if let Some(auth) = auth {
let token = auth.bearer_token()?;
let token = auth.get_token()?;
Ok(CoreAuthProvider {
token,
token: Some(token),
account_id: auth.get_account_id(),
})
} else {

View File

@@ -21,7 +21,6 @@ use codex_protocol::config_types::ForcedLoginMethod;
pub use crate::auth::storage::AuthCredentialsStoreMode;
pub use crate::auth::storage::AuthDotJson;
use crate::auth::storage::AuthStorageBackend;
use crate::auth::storage::ChatGptProxyAccount;
use crate::auth::storage::create_auth_storage;
use crate::config::Config;
use crate::error::RefreshTokenFailedError;
@@ -54,7 +53,6 @@ pub enum CodexAuth {
ApiKey(ApiKeyAuth),
Chatgpt(ChatgptAuth),
ChatgptAuthTokens(ChatgptAuthTokens),
ChatgptProxy(ChatgptProxy),
}
#[derive(Debug, Clone)]
@@ -73,11 +71,6 @@ pub struct ChatgptAuthTokens {
state: ChatgptAuthState,
}
#[derive(Debug, Clone)]
pub struct ChatgptProxy {
account: ChatGptProxyAccount,
}
#[derive(Debug, Clone)]
struct ChatgptAuthState {
auth_dot_json: Arc<Mutex<Option<AuthDotJson>>>,
@@ -167,12 +160,6 @@ impl CodexAuth {
return Ok(CodexAuth::from_api_key_with_client(api_key, client));
}
if let Some(proxy_account) = auth_dot_json.chatgpt_proxy.clone() {
return Ok(Self::ChatgptProxy(ChatgptProxy {
account: proxy_account,
}));
}
let storage_mode = auth_dot_json.storage_mode(auth_credentials_store_mode);
let state = ChatgptAuthState {
auth_dot_json: Arc::new(Mutex::new(Some(auth_dot_json))),
@@ -202,16 +189,14 @@ impl CodexAuth {
pub fn internal_auth_mode(&self) -> AuthMode {
match self {
Self::ApiKey(_) => AuthMode::ApiKey,
Self::Chatgpt(_) | Self::ChatgptAuthTokens(_) | Self::ChatgptProxy(_) => {
AuthMode::Chatgpt
}
Self::Chatgpt(_) | Self::ChatgptAuthTokens(_) => AuthMode::Chatgpt,
}
}
pub fn api_auth_mode(&self) -> ApiAuthMode {
match self {
Self::ApiKey(_) => ApiAuthMode::ApiKey,
Self::Chatgpt(_) | Self::ChatgptProxy(_) => ApiAuthMode::Chatgpt,
Self::Chatgpt(_) => ApiAuthMode::Chatgpt,
Self::ChatgptAuthTokens(_) => ApiAuthMode::ChatgptAuthTokens,
}
}
@@ -228,7 +213,7 @@ impl CodexAuth {
pub fn api_key(&self) -> Option<&str> {
match self {
Self::ApiKey(auth) => Some(auth.api_key.as_str()),
Self::Chatgpt(_) | Self::ChatgptAuthTokens(_) | Self::ChatgptProxy(_) => None,
Self::Chatgpt(_) | Self::ChatgptAuthTokens(_) => None,
}
}
@@ -245,40 +230,25 @@ impl CodexAuth {
}
}
/// Returns the token string used for bearer authentication, if available.
pub fn bearer_token(&self) -> Result<Option<String>, std::io::Error> {
match self {
Self::ApiKey(auth) => Ok(Some(auth.api_key.clone())),
Self::Chatgpt(_) | Self::ChatgptAuthTokens(_) => {
let access_token = self.get_token_data()?.access_token;
Ok(Some(access_token))
}
Self::ChatgptProxy(_) => Ok(None),
}
}
/// Returns the token string used for bearer authentication.
pub fn get_token(&self) -> Result<String, std::io::Error> {
let Some(token) = self.bearer_token()? else {
return Err(std::io::Error::other("Bearer token is not available."));
};
Ok(token)
match self {
Self::ApiKey(auth) => Ok(auth.api_key.clone()),
Self::Chatgpt(_) | Self::ChatgptAuthTokens(_) => {
let access_token = self.get_token_data()?.access_token;
Ok(access_token)
}
}
}
/// Returns `None` if `is_chatgpt_auth()` is false.
pub fn get_account_id(&self) -> Option<String> {
match self {
Self::ChatgptProxy(proxy) => proxy.account.account_id.clone(),
_ => self.get_current_token_data().and_then(|t| t.account_id),
}
self.get_current_token_data().and_then(|t| t.account_id)
}
/// Returns `None` if `is_chatgpt_auth()` is false.
pub fn get_account_email(&self) -> Option<String> {
match self {
Self::ChatgptProxy(proxy) => proxy.account.email.clone(),
_ => self.get_current_token_data().and_then(|t| t.id_token.email),
}
self.get_current_token_data().and_then(|t| t.id_token.email)
}
/// Account-facing plan classification derived from the current token.
@@ -286,9 +256,6 @@ impl CodexAuth {
/// mapped from the ID token's internal plan value. Prefer this when you
/// need to make UI or product decisions based on the user's subscription.
pub fn account_plan_type(&self) -> Option<AccountPlanType> {
if let Self::ChatgptProxy(proxy) = self {
return proxy.account.plan_type;
}
let map_known = |kp: &InternalKnownPlan| match kp {
InternalKnownPlan::Free => AccountPlanType::Free,
InternalKnownPlan::Go => AccountPlanType::Go,
@@ -308,22 +275,12 @@ impl CodexAuth {
})
}
/// Returns the ChatGPT workspace/account identifier when available.
pub fn chatgpt_workspace_id(&self) -> Option<String> {
match self {
Self::ChatgptProxy(proxy) => proxy.account.account_id.clone(),
_ => self
.get_current_token_data()
.and_then(|t| t.id_token.chatgpt_account_id.or(t.account_id)),
}
}
/// Returns `None` if `is_chatgpt_auth()` is false.
fn get_current_auth_json(&self) -> Option<AuthDotJson> {
let state = match self {
Self::Chatgpt(auth) => &auth.state,
Self::ChatgptAuthTokens(auth) => &auth.state,
Self::ApiKey(_) | Self::ChatgptProxy(_) => return None,
Self::ApiKey(_) => return None,
};
#[expect(clippy::unwrap_used)]
state.auth_dot_json.lock().unwrap().clone()
@@ -346,7 +303,6 @@ impl CodexAuth {
account_id: Some("account_id".to_string()),
}),
last_refresh: Some(Utc::now()),
chatgpt_proxy: None,
};
let client = crate::default_client::create_client();
@@ -426,7 +382,6 @@ pub fn login_with_api_key(
openai_api_key: Some(api_key.to_string()),
tokens: None,
last_refresh: None,
chatgpt_proxy: None,
};
save_auth(codex_home, &auth_dot_json, auth_credentials_store_mode)
}
@@ -445,29 +400,6 @@ pub fn login_with_chatgpt_auth_tokens(
)
}
/// Writes a tokenless ChatGPT proxy auth payload.
pub fn login_with_chatgpt_proxy(
codex_home: &Path,
account_id: Option<&str>,
email: Option<&str>,
plan_type: Option<AccountPlanType>,
auth_credentials_store_mode: AuthCredentialsStoreMode,
) -> std::io::Result<()> {
let auth_dot_json = AuthDotJson {
auth_mode: Some(ApiAuthMode::Chatgpt),
openai_api_key: None,
tokens: None,
last_refresh: None,
chatgpt_proxy: Some(ChatGptProxyAccount {
user_id: None,
account_id: account_id.map(str::to_string),
email: email.map(str::to_string),
plan_type,
}),
};
save_auth(codex_home, &auth_dot_json, auth_credentials_store_mode)
}
/// Persist the provided auth payload using the specified backend.
pub fn save_auth(
codex_home: &Path,
@@ -529,10 +461,23 @@ pub fn enforce_login_restrictions(config: &Config) -> std::io::Result<()> {
return Ok(());
}
// Workspace is the external identifier for account id.
let chatgpt_account_id = auth.chatgpt_workspace_id();
if chatgpt_account_id.as_deref() != Some(expected_account_id) {
let message = match chatgpt_account_id.as_deref() {
let token_data = match auth.get_token_data() {
Ok(data) => data,
Err(err) => {
return logout_with_message(
&config.codex_home,
format!(
"Failed to load ChatGPT credentials while enforcing workspace restrictions: {err}. Logging out."
),
config.cli_auth_credentials_store_mode,
);
}
};
// workspace is the external identifier for account id.
let chatgpt_account_id = token_data.id_token.chatgpt_account_id.as_deref();
if chatgpt_account_id != Some(expected_account_id) {
let message = match chatgpt_account_id {
Some(actual) => format!(
"Login is restricted to workspace {expected_account_id}, but current credentials belong to {actual}. Logging out."
),
@@ -603,29 +548,21 @@ fn load_auth(
codex_home.to_path_buf(),
AuthCredentialsStoreMode::Ephemeral,
);
let ephemeral_auth = match ephemeral_storage.load()? {
Some(auth_dot_json) => Some(build_auth(
auth_dot_json,
AuthCredentialsStoreMode::Ephemeral,
)?),
None => None,
};
if let Some(auth) = ephemeral_auth.as_ref()
&& !matches!(auth, CodexAuth::ChatgptProxy(_))
{
return Ok(ephemeral_auth);
if let Some(auth_dot_json) = ephemeral_storage.load()? {
let auth = build_auth(auth_dot_json, AuthCredentialsStoreMode::Ephemeral)?;
return Ok(Some(auth));
}
// If the caller explicitly requested ephemeral auth, there is no persisted fallback.
if auth_credentials_store_mode == AuthCredentialsStoreMode::Ephemeral {
return Ok(ephemeral_auth);
return Ok(None);
}
// Fall back to the configured persistent store (file/keyring/auto) for managed auth.
let storage = create_auth_storage(codex_home.to_path_buf(), auth_credentials_store_mode);
let auth_dot_json = match storage.load()? {
Some(auth) => auth,
None => return Ok(ephemeral_auth),
None => return Ok(None),
};
let auth = build_auth(auth_dot_json, auth_credentials_store_mode)?;
@@ -794,7 +731,6 @@ impl AuthDotJson {
openai_api_key: None,
tokens: Some(tokens),
last_refresh: Some(Utc::now()),
chatgpt_proxy: None,
}
}
@@ -1149,59 +1085,6 @@ impl AuthManager {
.and_then(|guard| guard.clone())
}
/// Best-effort in-memory update of ChatGPT proxy account metadata.
///
/// This does not persist to disk and only fills in missing fields (or
/// upgrades an `Unknown` plan type when a known one is available).
pub fn update_chatgpt_proxy_account_metadata(
&self,
user_id: Option<String>,
account_id: Option<String>,
email: Option<String>,
plan_type: Option<AccountPlanType>,
) -> bool {
let Ok(mut guard) = self.inner.write() else {
return false;
};
let Some(CodexAuth::ChatgptProxy(proxy)) = guard.auth.as_mut() else {
return false;
};
let mut changed = false;
if proxy.account.user_id.is_none()
&& let Some(user_id) = user_id
{
proxy.account.user_id = Some(user_id);
changed = true;
}
if proxy.account.account_id.is_none()
&& let Some(account_id) = account_id
{
proxy.account.account_id = Some(account_id);
changed = true;
}
if proxy.account.email.is_none()
&& let Some(email) = email
{
proxy.account.email = Some(email);
changed = true;
}
if let Some(plan_type) = plan_type
&& (proxy.account.plan_type.is_none()
|| proxy.account.plan_type == Some(AccountPlanType::Unknown))
&& plan_type != AccountPlanType::Unknown
{
proxy.account.plan_type = Some(plan_type);
changed = true;
}
changed
}
pub fn has_external_auth_refresher(&self) -> bool {
self.inner
.read()
@@ -1260,7 +1143,7 @@ impl AuthManager {
self.reload();
Ok(())
}
CodexAuth::ApiKey(_) | CodexAuth::ChatgptProxy(_) => Ok(()),
CodexAuth::ApiKey(_) => Ok(()),
}
}
@@ -1516,7 +1399,6 @@ mod tests {
account_id: None,
}),
last_refresh: Some(last_refresh),
chatgpt_proxy: None,
},
auth_dot_json
);
@@ -1542,31 +1424,6 @@ mod tests {
assert!(auth.get_token_data().is_err());
}
#[test]
fn ephemeral_proxy_auth_is_fallback_to_persisted_auth() -> std::io::Result<()> {
let dir = tempdir()?;
// Seed tokenless proxy auth in the ephemeral store.
login_with_chatgpt_proxy(
dir.path(),
Some("workspace-123"),
Some("proxy@example.com"),
Some(AccountPlanType::Plus),
AuthCredentialsStoreMode::Ephemeral,
)?;
// Persist a real API key in the managed store.
login_with_api_key(dir.path(), "sk-test-key", AuthCredentialsStoreMode::File)?;
// Managed auth should override ephemeral proxy auth.
let auth = load_auth(dir.path(), false, AuthCredentialsStoreMode::File)?
.expect("auth should be present");
assert_eq!(auth.internal_auth_mode(), AuthMode::ApiKey);
assert_eq!(auth.api_key(), Some("sk-test-key"));
Ok(())
}
#[test]
fn logout_removes_auth_file() -> Result<(), std::io::Error> {
let dir = tempdir()?;
@@ -1575,7 +1432,6 @@ mod tests {
openai_api_key: Some("sk-test-key".to_string()),
tokens: None,
last_refresh: None,
chatgpt_proxy: None,
};
super::save_auth(dir.path(), &auth_dot_json, AuthCredentialsStoreMode::File)?;
let auth_file = get_auth_file(dir.path());

View File

@@ -23,7 +23,6 @@ use crate::token_data::TokenData;
use codex_app_server_protocol::AuthMode;
use codex_keyring_store::DefaultKeyringStore;
use codex_keyring_store::KeyringStore;
use codex_protocol::account::PlanType as AccountPlanType;
use once_cell::sync::Lazy;
/// Determine where Codex should store CLI auth credentials.
@@ -55,24 +54,6 @@ pub struct AuthDotJson {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub last_refresh: Option<DateTime<Utc>>,
/// ChatGPT account metadata supplied by a trusted proxy.
#[serde(
default,
skip_serializing_if = "Option::is_none",
rename = "chatgptProxy"
)]
pub chatgpt_proxy: Option<ChatGptProxyAccount>,
}
/// Account metadata for the tokenless ChatGPT proxy auth mode.
#[derive(Deserialize, Serialize, Clone, Debug, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub struct ChatGptProxyAccount {
pub user_id: Option<String>,
pub account_id: Option<String>,
pub email: Option<String>,
pub plan_type: Option<AccountPlanType>,
}
pub(super) fn get_auth_file(codex_home: &Path) -> PathBuf {
@@ -372,7 +353,6 @@ mod tests {
openai_api_key: Some("test-key".to_string()),
tokens: None,
last_refresh: Some(Utc::now()),
chatgpt_proxy: None,
};
storage
@@ -393,7 +373,6 @@ mod tests {
openai_api_key: Some("test-key".to_string()),
tokens: None,
last_refresh: Some(Utc::now()),
chatgpt_proxy: None,
};
let file = get_auth_file(codex_home.path());
@@ -416,7 +395,6 @@ mod tests {
openai_api_key: Some("sk-test-key".to_string()),
tokens: None,
last_refresh: None,
chatgpt_proxy: None,
};
let storage = create_auth_storage(dir.path().to_path_buf(), AuthCredentialsStoreMode::File);
storage.save(&auth_dot_json)?;
@@ -440,7 +418,6 @@ mod tests {
openai_api_key: Some("sk-ephemeral".to_string()),
tokens: None,
last_refresh: Some(Utc::now()),
chatgpt_proxy: None,
};
storage.save(&auth_dot_json)?;
@@ -539,7 +516,6 @@ mod tests {
account_id: Some(format!("{prefix}-account-id")),
}),
last_refresh: None,
chatgpt_proxy: None,
}
}
@@ -556,7 +532,6 @@ mod tests {
openai_api_key: Some("sk-test".to_string()),
tokens: None,
last_refresh: None,
chatgpt_proxy: None,
};
seed_keyring_with_auth(
&mock_keyring,
@@ -599,7 +574,6 @@ mod tests {
account_id: Some("account".to_string()),
}),
last_refresh: Some(Utc::now()),
chatgpt_proxy: None,
};
storage.save(&auth)?;

View File

@@ -635,6 +635,7 @@ impl Session {
per_turn_config.model_personality = session_configuration.personality;
per_turn_config.web_search_mode = Some(resolve_web_search_mode_for_turn(
per_turn_config.web_search_mode,
session_configuration.provider.is_azure_responses_endpoint(),
session_configuration.sandbox_policy.get(),
));
per_turn_config.features = config.features.clone();

View File

@@ -24,6 +24,7 @@ use crate::config_loader::ConfigRequirements;
use crate::config_loader::LoaderOverrides;
use crate::config_loader::McpServerIdentity;
use crate::config_loader::McpServerRequirement;
use crate::config_loader::ResidencyRequirement;
use crate::config_loader::Sourced;
use crate::config_loader::load_config_layers_state;
use crate::features::Feature;
@@ -57,7 +58,6 @@ use codex_protocol::openai_models::ReasoningEffort;
use codex_rmcp_client::OAuthCredentialsStoreMode;
use codex_utils_absolute_path::AbsolutePathBuf;
use codex_utils_absolute_path::AbsolutePathBufGuard;
use dirs::home_dir;
use schemars::JsonSchema;
use serde::Deserialize;
use serde::Serialize;
@@ -141,6 +141,11 @@ pub struct Config {
pub sandbox_policy: Constrained<SandboxPolicy>,
/// enforce_residency means web traffic cannot be routed outside of a
/// particular geography. HTTP clients should direct their requests
/// using backend-specific headers or URLs to enforce this.
pub enforce_residency: Constrained<Option<ResidencyRequirement>>,
/// True if the user passed in an override or set a value in config.toml
/// for either of approval_policy or sandbox_mode.
pub did_user_set_custom_approval_policy_or_sandbox_mode: bool,
@@ -367,7 +372,7 @@ pub struct ConfigBuilder {
cli_overrides: Option<Vec<(String, TomlValue)>>,
harness_overrides: Option<ConfigOverrides>,
loader_overrides: Option<LoaderOverrides>,
cloud_requirements: Option<CloudRequirementsLoader>,
cloud_requirements: CloudRequirementsLoader,
fallback_cwd: Option<PathBuf>,
}
@@ -393,7 +398,7 @@ impl ConfigBuilder {
}
pub fn cloud_requirements(mut self, cloud_requirements: CloudRequirementsLoader) -> Self {
self.cloud_requirements = Some(cloud_requirements);
self.cloud_requirements = cloud_requirements;
self
}
@@ -524,7 +529,7 @@ pub async fn load_config_as_toml_with_cli_overrides(
Some(cwd.clone()),
&cli_overrides,
LoaderOverrides::default(),
None,
CloudRequirementsLoader::default(),
)
.await?;
@@ -628,7 +633,7 @@ pub async fn load_global_mcp_servers(
cwd,
&cli_overrides,
LoaderOverrides::default(),
None,
CloudRequirementsLoader::default(),
)
.await?;
let merged_toml = config_layer_stack.effective_config();
@@ -1246,11 +1251,15 @@ fn resolve_web_search_mode(
pub(crate) fn resolve_web_search_mode_for_turn(
explicit_mode: Option<WebSearchMode>,
is_azure_responses_endpoint: bool,
sandbox_policy: &SandboxPolicy,
) -> WebSearchMode {
if let Some(mode) = explicit_mode {
return mode;
}
if is_azure_responses_endpoint {
return WebSearchMode::Disabled;
}
if matches!(sandbox_policy, SandboxPolicy::DangerFullAccess) {
WebSearchMode::Live
} else {
@@ -1513,6 +1522,7 @@ impl Config {
sandbox_policy: mut constrained_sandbox_policy,
mcp_servers,
exec_policy: _,
enforce_residency,
} = requirements;
constrained_approval_policy
@@ -1535,6 +1545,7 @@ impl Config {
cwd: resolved_cwd,
approval_policy: constrained_approval_policy,
sandbox_policy: constrained_sandbox_policy,
enforce_residency,
did_user_set_custom_approval_policy_or_sandbox_mode,
forced_auto_mode_downgraded_on_windows,
shell_environment_policy,
@@ -1749,27 +1760,12 @@ fn toml_uses_deprecated_instructions_file(value: &TomlValue) -> bool {
/// specified by the `CODEX_HOME` environment variable. If not set, defaults to
/// `~/.codex`.
///
/// - If `CODEX_HOME` is set, the value will be canonicalized and this
/// function will Err if the path does not exist.
/// - If `CODEX_HOME` is set, the value must exist and be a directory. The
/// value will be canonicalized and this function will Err otherwise.
/// - If `CODEX_HOME` is not set, this function does not verify that the
/// directory exists.
pub fn find_codex_home() -> std::io::Result<PathBuf> {
// Honor the `CODEX_HOME` environment variable when it is set to allow users
// (and tests) to override the default location.
if let Ok(val) = std::env::var("CODEX_HOME")
&& !val.is_empty()
{
return PathBuf::from(val).canonicalize();
}
let mut p = home_dir().ok_or_else(|| {
std::io::Error::new(
std::io::ErrorKind::NotFound,
"Could not find home directory",
)
})?;
p.push(".codex");
Ok(p)
codex_utils_home_dir::find_codex_home()
}
/// Returns the path to the folder where Codex logs are stored. Does not verify
@@ -2347,14 +2343,14 @@ trust_level = "trusted"
#[test]
fn web_search_mode_for_turn_defaults_to_cached_when_unset() {
let mode = resolve_web_search_mode_for_turn(None, &SandboxPolicy::ReadOnly);
let mode = resolve_web_search_mode_for_turn(None, false, &SandboxPolicy::ReadOnly);
assert_eq!(mode, WebSearchMode::Cached);
}
#[test]
fn web_search_mode_for_turn_defaults_to_live_for_danger_full_access() {
let mode = resolve_web_search_mode_for_turn(None, &SandboxPolicy::DangerFullAccess);
let mode = resolve_web_search_mode_for_turn(None, false, &SandboxPolicy::DangerFullAccess);
assert_eq!(mode, WebSearchMode::Live);
}
@@ -2363,12 +2359,20 @@ trust_level = "trusted"
fn web_search_mode_for_turn_prefers_explicit_value() {
let mode = resolve_web_search_mode_for_turn(
Some(WebSearchMode::Cached),
false,
&SandboxPolicy::DangerFullAccess,
);
assert_eq!(mode, WebSearchMode::Cached);
}
#[test]
fn web_search_mode_for_turn_disables_for_azure_responses_endpoint() {
let mode = resolve_web_search_mode_for_turn(None, true, &SandboxPolicy::DangerFullAccess);
assert_eq!(mode, WebSearchMode::Disabled);
}
#[test]
fn profile_legacy_toggles_override_base() -> std::io::Result<()> {
let codex_home = TempDir::new()?;
@@ -2631,9 +2635,14 @@ profile = "project"
};
let cwd = AbsolutePathBuf::try_from(codex_home.path())?;
let config_layer_stack =
load_config_layers_state(codex_home.path(), Some(cwd), &Vec::new(), overrides, None)
.await?;
let config_layer_stack = load_config_layers_state(
codex_home.path(),
Some(cwd),
&Vec::new(),
overrides,
CloudRequirementsLoader::default(),
)
.await?;
let cfg = deserialize_config_toml_with_base(
config_layer_stack.effective_config(),
codex_home.path(),
@@ -2760,7 +2769,7 @@ profile = "project"
Some(cwd),
&[("model".to_string(), TomlValue::String("cli".to_string()))],
overrides,
None,
CloudRequirementsLoader::default(),
)
.await?;
@@ -3770,6 +3779,7 @@ model_verbosity = "high"
model_provider: fixture.openai_provider.clone(),
approval_policy: Constrained::allow_any(AskForApproval::Never),
sandbox_policy: Constrained::allow_any(SandboxPolicy::new_read_only_policy()),
enforce_residency: Constrained::allow_any(None),
did_user_set_custom_approval_policy_or_sandbox_mode: true,
forced_auto_mode_downgraded_on_windows: false,
shell_environment_policy: ShellEnvironmentPolicy::default(),
@@ -3854,6 +3864,7 @@ model_verbosity = "high"
model_provider: fixture.openai_chat_completions_provider.clone(),
approval_policy: Constrained::allow_any(AskForApproval::UnlessTrusted),
sandbox_policy: Constrained::allow_any(SandboxPolicy::new_read_only_policy()),
enforce_residency: Constrained::allow_any(None),
did_user_set_custom_approval_policy_or_sandbox_mode: true,
forced_auto_mode_downgraded_on_windows: false,
shell_environment_policy: ShellEnvironmentPolicy::default(),
@@ -3953,6 +3964,7 @@ model_verbosity = "high"
model_provider: fixture.openai_provider.clone(),
approval_policy: Constrained::allow_any(AskForApproval::OnFailure),
sandbox_policy: Constrained::allow_any(SandboxPolicy::new_read_only_policy()),
enforce_residency: Constrained::allow_any(None),
did_user_set_custom_approval_policy_or_sandbox_mode: true,
forced_auto_mode_downgraded_on_windows: false,
shell_environment_policy: ShellEnvironmentPolicy::default(),
@@ -4038,6 +4050,7 @@ model_verbosity = "high"
model_provider: fixture.openai_provider.clone(),
approval_policy: Constrained::allow_any(AskForApproval::OnFailure),
sandbox_policy: Constrained::allow_any(SandboxPolicy::new_read_only_policy()),
enforce_residency: Constrained::allow_any(None),
did_user_set_custom_approval_policy_or_sandbox_mode: true,
forced_auto_mode_downgraded_on_windows: false,
shell_environment_policy: ShellEnvironmentPolicy::default(),

View File

@@ -2,6 +2,7 @@ use super::CONFIG_TOML_FILE;
use super::ConfigToml;
use crate::config::edit::ConfigEdit;
use crate::config::edit::ConfigEditsBuilder;
use crate::config_loader::CloudRequirementsLoader;
use crate::config_loader::ConfigLayerEntry;
use crate::config_loader::ConfigLayerStack;
use crate::config_loader::ConfigLayerStackOrdering;
@@ -109,6 +110,7 @@ pub struct ConfigService {
codex_home: PathBuf,
cli_overrides: Vec<(String, TomlValue)>,
loader_overrides: LoaderOverrides,
cloud_requirements: CloudRequirementsLoader,
}
impl ConfigService {
@@ -116,11 +118,13 @@ impl ConfigService {
codex_home: PathBuf,
cli_overrides: Vec<(String, TomlValue)>,
loader_overrides: LoaderOverrides,
cloud_requirements: CloudRequirementsLoader,
) -> Self {
Self {
codex_home,
cli_overrides,
loader_overrides,
cloud_requirements,
}
}
@@ -129,6 +133,7 @@ impl ConfigService {
codex_home,
cli_overrides: Vec::new(),
loader_overrides: LoaderOverrides::default(),
cloud_requirements: CloudRequirementsLoader::default(),
}
}
@@ -146,6 +151,7 @@ impl ConfigService {
.cli_overrides(self.cli_overrides.clone())
.loader_overrides(self.loader_overrides.clone())
.fallback_cwd(Some(cwd.to_path_buf()))
.cloud_requirements(self.cloud_requirements.clone())
.build()
.await
.map_err(|err| {
@@ -376,7 +382,7 @@ impl ConfigService {
cwd,
&self.cli_overrides,
self.loader_overrides.clone(),
None,
self.cloud_requirements.clone(),
)
.await
}
@@ -814,6 +820,7 @@ remote_compaction = true
managed_preferences_base64: None,
macos_managed_config_requirements_base64: None,
},
CloudRequirementsLoader::default(),
);
let response = service
@@ -896,6 +903,7 @@ remote_compaction = true
managed_preferences_base64: None,
macos_managed_config_requirements_base64: None,
},
CloudRequirementsLoader::default(),
);
let result = service
@@ -1000,6 +1008,7 @@ remote_compaction = true
managed_preferences_base64: None,
macos_managed_config_requirements_base64: None,
},
CloudRequirementsLoader::default(),
);
let error = service
@@ -1048,6 +1057,7 @@ remote_compaction = true
managed_preferences_base64: None,
macos_managed_config_requirements_base64: None,
},
CloudRequirementsLoader::default(),
);
let response = service
@@ -1095,6 +1105,7 @@ remote_compaction = true
managed_preferences_base64: None,
macos_managed_config_requirements_base64: None,
},
CloudRequirementsLoader::default(),
);
let result = service

View File

@@ -4,25 +4,29 @@ use futures::future::FutureExt;
use futures::future::Shared;
use std::fmt;
use std::future::Future;
use std::io;
use std::sync::Arc;
#[derive(Clone)]
pub struct CloudRequirementsLoader {
// TODO(gt): This should return a Result once we can fail-closed.
fut: Shared<BoxFuture<'static, Option<ConfigRequirementsToml>>>,
fut: Shared<BoxFuture<'static, Arc<io::Result<Option<ConfigRequirementsToml>>>>>,
}
impl CloudRequirementsLoader {
pub fn new<F>(fut: F) -> Self
where
F: Future<Output = Option<ConfigRequirementsToml>> + Send + 'static,
F: Future<Output = io::Result<Option<ConfigRequirementsToml>>> + Send + 'static,
{
Self {
fut: fut.boxed().shared(),
fut: fut.map(Arc::new).boxed().shared(),
}
}
pub async fn get(&self) -> Option<ConfigRequirementsToml> {
self.fut.clone().await
pub async fn get(&self) -> io::Result<Option<ConfigRequirementsToml>> {
match self.fut.clone().await.as_ref() {
Ok(requirements) => Ok(requirements.clone()),
Err(err) => Err(io::Error::new(err.kind(), err.to_string())),
}
}
}
@@ -32,6 +36,12 @@ impl fmt::Debug for CloudRequirementsLoader {
}
}
impl Default for CloudRequirementsLoader {
fn default() -> Self {
Self::new(async { Ok(None) })
}
}
#[cfg(test)]
mod tests {
use super::*;
@@ -46,11 +56,11 @@ mod tests {
let counter_clone = Arc::clone(&counter);
let loader = CloudRequirementsLoader::new(async move {
counter_clone.fetch_add(1, Ordering::SeqCst);
Some(ConfigRequirementsToml::default())
Ok(Some(ConfigRequirementsToml::default()))
});
let (first, second) = tokio::join!(loader.get(), loader.get());
assert_eq!(first, second);
assert_eq!(first.as_ref().ok(), second.as_ref().ok());
assert_eq!(counter.load(Ordering::SeqCst), 1);
}
}

View File

@@ -52,6 +52,7 @@ pub struct ConfigRequirements {
pub sandbox_policy: Constrained<SandboxPolicy>,
pub mcp_servers: Option<Sourced<BTreeMap<String, McpServerRequirement>>>,
pub(crate) exec_policy: Option<Sourced<RequirementsExecPolicy>>,
pub enforce_residency: Constrained<Option<ResidencyRequirement>>,
}
impl Default for ConfigRequirements {
@@ -61,6 +62,7 @@ impl Default for ConfigRequirements {
sandbox_policy: Constrained::allow_any(SandboxPolicy::ReadOnly),
mcp_servers: None,
exec_policy: None,
enforce_residency: Constrained::allow_any(None),
}
}
}
@@ -84,6 +86,7 @@ pub struct ConfigRequirementsToml {
pub allowed_sandbox_modes: Option<Vec<SandboxModeRequirement>>,
pub mcp_servers: Option<BTreeMap<String, McpServerRequirement>>,
pub rules: Option<RequirementsExecPolicyToml>,
pub enforce_residency: Option<ResidencyRequirement>,
}
/// Value paired with the requirement source it came from, for better error
@@ -114,6 +117,7 @@ pub struct ConfigRequirementsWithSources {
pub allowed_sandbox_modes: Option<Sourced<Vec<SandboxModeRequirement>>>,
pub mcp_servers: Option<Sourced<BTreeMap<String, McpServerRequirement>>>,
pub rules: Option<Sourced<RequirementsExecPolicyToml>>,
pub enforce_residency: Option<Sourced<ResidencyRequirement>>,
}
impl ConfigRequirementsWithSources {
@@ -146,6 +150,7 @@ impl ConfigRequirementsWithSources {
allowed_sandbox_modes,
mcp_servers,
rules,
enforce_residency,
}
);
}
@@ -156,12 +161,14 @@ impl ConfigRequirementsWithSources {
allowed_sandbox_modes,
mcp_servers,
rules,
enforce_residency,
} = self;
ConfigRequirementsToml {
allowed_approval_policies: allowed_approval_policies.map(|sourced| sourced.value),
allowed_sandbox_modes: allowed_sandbox_modes.map(|sourced| sourced.value),
mcp_servers: mcp_servers.map(|sourced| sourced.value),
rules: rules.map(|sourced| sourced.value),
enforce_residency: enforce_residency.map(|sourced| sourced.value),
}
}
}
@@ -193,12 +200,19 @@ impl From<SandboxMode> for SandboxModeRequirement {
}
}
#[derive(Deserialize, Debug, Clone, Copy, PartialEq, Eq)]
#[serde(rename_all = "lowercase")]
pub enum ResidencyRequirement {
Us,
}
impl ConfigRequirementsToml {
pub fn is_empty(&self) -> bool {
self.allowed_approval_policies.is_none()
&& self.allowed_sandbox_modes.is_none()
&& self.mcp_servers.is_none()
&& self.rules.is_none()
&& self.enforce_residency.is_none()
}
}
@@ -211,6 +225,7 @@ impl TryFrom<ConfigRequirementsWithSources> for ConfigRequirements {
allowed_sandbox_modes,
mcp_servers,
rules,
enforce_residency,
} = toml;
let approval_policy: Constrained<AskForApproval> = match allowed_approval_policies {
@@ -298,11 +313,33 @@ impl TryFrom<ConfigRequirementsWithSources> for ConfigRequirements {
None => None,
};
let enforce_residency: Constrained<Option<ResidencyRequirement>> = match enforce_residency {
Some(Sourced {
value: residency,
source: requirement_source,
}) => {
let required = Some(residency);
Constrained::new(required, move |candidate| {
if candidate == &required {
Ok(())
} else {
Err(ConstraintError::InvalidValue {
field_name: "enforce_residency",
candidate: format!("{candidate:?}"),
allowed: format!("{required:?}"),
requirement_source: requirement_source.clone(),
})
}
})?
}
None => Constrained::allow_any(None),
};
Ok(ConfigRequirements {
approval_policy,
sandbox_policy,
mcp_servers,
exec_policy,
enforce_residency,
})
}
}
@@ -329,6 +366,7 @@ mod tests {
allowed_sandbox_modes,
mcp_servers,
rules,
enforce_residency,
} = toml;
ConfigRequirementsWithSources {
allowed_approval_policies: allowed_approval_policies
@@ -337,6 +375,8 @@ mod tests {
.map(|value| Sourced::new(value, RequirementSource::Unknown)),
mcp_servers: mcp_servers.map(|value| Sourced::new(value, RequirementSource::Unknown)),
rules: rules.map(|value| Sourced::new(value, RequirementSource::Unknown)),
enforce_residency: enforce_residency
.map(|value| Sourced::new(value, RequirementSource::Unknown)),
}
}
@@ -350,6 +390,8 @@ mod tests {
SandboxModeRequirement::WorkspaceWrite,
SandboxModeRequirement::DangerFullAccess,
];
let enforce_residency = ResidencyRequirement::Us;
let enforce_source = source.clone();
// Intentionally constructed without `..Default::default()` so adding a new field to
// `ConfigRequirementsToml` forces this test to be updated.
@@ -358,6 +400,7 @@ mod tests {
allowed_sandbox_modes: Some(allowed_sandbox_modes.clone()),
mcp_servers: None,
rules: None,
enforce_residency: Some(enforce_residency),
};
target.merge_unset_fields(source.clone(), other);
@@ -372,6 +415,7 @@ mod tests {
allowed_sandbox_modes: Some(Sourced::new(allowed_sandbox_modes, source)),
mcp_servers: None,
rules: None,
enforce_residency: Some(Sourced::new(enforce_residency, enforce_source)),
}
);
}
@@ -401,6 +445,7 @@ mod tests {
allowed_sandbox_modes: None,
mcp_servers: None,
rules: None,
enforce_residency: None,
}
);
Ok(())
@@ -438,6 +483,7 @@ mod tests {
allowed_sandbox_modes: None,
mcp_servers: None,
rules: None,
enforce_residency: None,
}
);
Ok(())

View File

@@ -37,6 +37,7 @@ pub use config_requirements::ConfigRequirementsToml;
pub use config_requirements::McpServerIdentity;
pub use config_requirements::McpServerRequirement;
pub use config_requirements::RequirementSource;
pub use config_requirements::ResidencyRequirement;
pub use config_requirements::SandboxModeRequirement;
pub use config_requirements::Sourced;
pub use diagnostics::ConfigError;
@@ -101,7 +102,7 @@ pub async fn load_config_layers_state(
cwd: Option<AbsolutePathBuf>,
cli_overrides: &[(String, TomlValue)],
overrides: LoaderOverrides,
cloud_requirements: Option<CloudRequirementsLoader>, // TODO(gt): Once exec and app-server are wired up, we can remove the option.
cloud_requirements: CloudRequirementsLoader,
) -> io::Result<ConfigLayerStack> {
let mut config_requirements_toml = ConfigRequirementsWithSources::default();
@@ -114,9 +115,7 @@ pub async fn load_config_layers_state(
)
.await?;
if let Some(loader) = cloud_requirements
&& let Some(requirements) = loader.get().await
{
if let Some(requirements) = cloud_requirements.get().await? {
config_requirements_toml
.merge_unset_fields(RequirementSource::CloudRequirements, requirements);
}

View File

@@ -69,7 +69,7 @@ async fn returns_config_error_for_invalid_user_config_toml() {
Some(cwd),
&[] as &[(String, TomlValue)],
LoaderOverrides::default(),
None,
CloudRequirementsLoader::default(),
)
.await
.expect_err("expected error");
@@ -99,7 +99,7 @@ async fn returns_config_error_for_invalid_managed_config_toml() {
Some(cwd),
&[] as &[(String, TomlValue)],
overrides,
None,
CloudRequirementsLoader::default(),
)
.await
.expect_err("expected error");
@@ -188,7 +188,7 @@ extra = true
Some(cwd),
&[] as &[(String, TomlValue)],
overrides,
None,
CloudRequirementsLoader::default(),
)
.await
.expect("load config");
@@ -225,7 +225,7 @@ async fn returns_empty_when_all_layers_missing() {
Some(cwd),
&[] as &[(String, TomlValue)],
overrides,
None,
CloudRequirementsLoader::default(),
)
.await
.expect("load layers");
@@ -323,7 +323,7 @@ flag = false
Some(cwd),
&[] as &[(String, TomlValue)],
overrides,
None,
CloudRequirementsLoader::default(),
)
.await
.expect("load config");
@@ -363,7 +363,7 @@ allowed_sandbox_modes = ["read-only"]
),
),
},
None,
CloudRequirementsLoader::default(),
)
.await?;
@@ -424,7 +424,7 @@ allowed_approval_policies = ["never"]
),
),
},
None,
CloudRequirementsLoader::default(),
)
.await?;
@@ -451,6 +451,7 @@ async fn load_requirements_toml_produces_expected_constraints() -> anyhow::Resul
&requirements_file,
r#"
allowed_approval_policies = ["never", "on-request"]
enforce_residency = "us"
"#,
)
.await?;
@@ -465,7 +466,6 @@ allowed_approval_policies = ["never", "on-request"]
.cloned(),
Some(vec![AskForApproval::Never, AskForApproval::OnRequest])
);
let config_requirements: ConfigRequirements = config_requirements_toml.try_into()?;
assert_eq!(
config_requirements.approval_policy.value(),
@@ -480,6 +480,10 @@ allowed_approval_policies = ["never", "on-request"]
.can_set(&AskForApproval::OnFailure)
.is_err()
);
assert_eq!(
config_requirements.enforce_residency.value(),
Some(crate::config_loader::ResidencyRequirement::Us)
);
Ok(())
}
@@ -503,6 +507,7 @@ allowed_approval_policies = ["on-request"]
allowed_sandbox_modes: None,
mcp_servers: None,
rules: None,
enforce_residency: None,
},
);
load_requirements_toml(&mut config_requirements_toml, &requirements_file).await?;
@@ -537,16 +542,17 @@ async fn load_config_layers_includes_cloud_requirements() -> anyhow::Result<()>
allowed_sandbox_modes: None,
mcp_servers: None,
rules: None,
enforce_residency: None,
};
let expected = requirements.clone();
let cloud_requirements = CloudRequirementsLoader::new(async move { Some(requirements) });
let cloud_requirements = CloudRequirementsLoader::new(async move { Ok(Some(requirements)) });
let layers = load_config_layers_state(
&codex_home,
Some(cwd),
&[] as &[(String, TomlValue)],
LoaderOverrides::default(),
Some(cloud_requirements),
cloud_requirements,
)
.await?;
@@ -599,7 +605,7 @@ async fn project_layers_prefer_closest_cwd() -> std::io::Result<()> {
Some(cwd),
&[] as &[(String, TomlValue)],
LoaderOverrides::default(),
None,
CloudRequirementsLoader::default(),
)
.await?;
@@ -731,7 +737,7 @@ async fn project_layer_is_added_when_dot_codex_exists_without_config_toml() -> s
Some(cwd),
&[] as &[(String, TomlValue)],
LoaderOverrides::default(),
None,
CloudRequirementsLoader::default(),
)
.await?;
@@ -769,7 +775,7 @@ async fn codex_home_is_not_loaded_as_project_layer_from_home_dir() -> std::io::R
Some(cwd),
&[] as &[(String, TomlValue)],
LoaderOverrides::default(),
None,
CloudRequirementsLoader::default(),
)
.await?;
@@ -819,7 +825,7 @@ async fn codex_home_within_project_tree_is_not_double_loaded() -> std::io::Resul
Some(cwd),
&[] as &[(String, TomlValue)],
LoaderOverrides::default(),
None,
CloudRequirementsLoader::default(),
)
.await?;
@@ -888,7 +894,7 @@ async fn project_layers_disabled_when_untrusted_or_unknown() -> std::io::Result<
Some(cwd.clone()),
&[] as &[(String, TomlValue)],
LoaderOverrides::default(),
None,
CloudRequirementsLoader::default(),
)
.await?;
let project_layers_untrusted: Vec<_> = layers_untrusted
@@ -926,7 +932,7 @@ async fn project_layers_disabled_when_untrusted_or_unknown() -> std::io::Result<
Some(cwd),
&[] as &[(String, TomlValue)],
LoaderOverrides::default(),
None,
CloudRequirementsLoader::default(),
)
.await?;
let project_layers_unknown: Vec<_> = layers_unknown
@@ -987,7 +993,7 @@ async fn invalid_project_config_ignored_when_untrusted_or_unknown() -> std::io::
Some(cwd.clone()),
&[] as &[(String, TomlValue)],
LoaderOverrides::default(),
None,
CloudRequirementsLoader::default(),
)
.await?;
let project_layers: Vec<_> = layers
@@ -1043,7 +1049,7 @@ async fn cli_overrides_with_relative_paths_do_not_break_trust_check() -> std::io
Some(cwd),
&cli_overrides,
LoaderOverrides::default(),
None,
CloudRequirementsLoader::default(),
)
.await?;
@@ -1085,7 +1091,7 @@ async fn project_root_markers_supports_alternate_markers() -> std::io::Result<()
Some(cwd),
&[] as &[(String, TomlValue)],
LoaderOverrides::default(),
None,
CloudRequirementsLoader::default(),
)
.await?;

View File

@@ -266,7 +266,7 @@ impl ContextManager {
}
fn process_item(&self, item: &ResponseItem, policy: TruncationPolicy) -> ResponseItem {
let policy_with_serialization_budget = policy.mul(1.2);
let policy_with_serialization_budget = policy * 1.2;
match item {
ResponseItem::FunctionCallOutput { call_id, output } => {
let truncated =

View File

@@ -1,6 +1,8 @@
use crate::config_loader::ResidencyRequirement;
use crate::spawn::CODEX_SANDBOX_ENV_VAR;
use codex_client::CodexHttpClient;
pub use codex_client::CodexRequestBuilder;
use reqwest::header::HeaderMap;
use reqwest::header::HeaderValue;
use std::sync::LazyLock;
use std::sync::Mutex;
@@ -24,6 +26,7 @@ use std::sync::RwLock;
pub static USER_AGENT_SUFFIX: LazyLock<Mutex<Option<String>>> = LazyLock::new(|| Mutex::new(None));
pub const DEFAULT_ORIGINATOR: &str = "codex_cli_rs";
pub const CODEX_INTERNAL_ORIGINATOR_OVERRIDE_ENV_VAR: &str = "CODEX_INTERNAL_ORIGINATOR_OVERRIDE";
pub const RESIDENCY_HEADER_NAME: &str = "x-openai-internal-codex-residency";
#[derive(Debug, Clone)]
pub struct Originator {
@@ -31,6 +34,8 @@ pub struct Originator {
pub header_value: HeaderValue,
}
static ORIGINATOR: LazyLock<RwLock<Option<Originator>>> = LazyLock::new(|| RwLock::new(None));
static REQUIREMENTS_RESIDENCY: LazyLock<RwLock<Option<ResidencyRequirement>>> =
LazyLock::new(|| RwLock::new(None));
#[derive(Debug)]
pub enum SetOriginatorError {
@@ -74,6 +79,14 @@ pub fn set_default_originator(value: String) -> Result<(), SetOriginatorError> {
Ok(())
}
pub fn set_default_client_residency_requirement(enforce_residency: Option<ResidencyRequirement>) {
let Ok(mut guard) = REQUIREMENTS_RESIDENCY.write() else {
tracing::warn!("Failed to acquire requirements residency lock");
return;
};
*guard = enforce_residency;
}
pub fn originator() -> Originator {
if let Ok(guard) = ORIGINATOR.read()
&& let Some(originator) = guard.as_ref()
@@ -166,10 +179,17 @@ pub fn create_client() -> CodexHttpClient {
}
pub fn build_reqwest_client() -> reqwest::Client {
use reqwest::header::HeaderMap;
let mut headers = HeaderMap::new();
headers.insert("originator", originator().header_value);
if let Ok(guard) = REQUIREMENTS_RESIDENCY.read()
&& let Some(requirement) = guard.as_ref()
&& !headers.contains_key(RESIDENCY_HEADER_NAME)
{
let value = match requirement {
ResidencyRequirement::Us => HeaderValue::from_static("us"),
};
headers.insert(RESIDENCY_HEADER_NAME, value);
}
let ua = get_codex_user_agent();
let mut builder = reqwest::Client::builder()
@@ -214,6 +234,8 @@ mod tests {
async fn test_create_client_sets_default_headers() {
skip_if_no_network!();
set_default_client_residency_requirement(Some(ResidencyRequirement::Us));
use wiremock::Mock;
use wiremock::MockServer;
use wiremock::ResponseTemplate;
@@ -256,6 +278,13 @@ mod tests {
.get("user-agent")
.expect("user-agent header missing");
assert_eq!(ua_header.to_str().unwrap(), expected_ua);
let residency_header = headers
.get(RESIDENCY_HEADER_NAME)
.expect("residency header missing");
assert_eq!(residency_header.to_str().unwrap(), "us");
set_default_client_residency_requirement(None);
}
#[test]

View File

@@ -39,7 +39,7 @@ fn codex_apps_mcp_bearer_token_env_var() -> Option<String> {
}
fn codex_apps_mcp_bearer_token(auth: Option<&CodexAuth>) -> Option<String> {
let token = auth.and_then(|auth| auth.bearer_token().ok()).flatten()?;
let token = auth.and_then(|auth| auth.get_token().ok())?;
let token = token.trim();
if token.is_empty() {
None

View File

@@ -9,6 +9,7 @@ use crate::auth::AuthMode;
use crate::error::EnvVarError;
use codex_api::Provider as ApiProvider;
use codex_api::WireApi as ApiWireApi;
use codex_api::is_azure_responses_wire_base_url;
use codex_api::provider::RetryConfig as ApiRetryConfig;
use http::HeaderMap;
use http::header::HeaderName;
@@ -170,6 +171,15 @@ impl ModelProviderInfo {
})
}
pub(crate) fn is_azure_responses_endpoint(&self) -> bool {
let wire = match self.wire_api {
WireApi::Responses => ApiWireApi::Responses,
WireApi::Chat => ApiWireApi::Chat,
};
is_azure_responses_wire_base_url(wire, &self.name, self.base_url.as_deref())
}
/// If `env_key` is Some, returns the API key for this provider if present
/// (and non-empty) in the environment. If `env_key` is required but
/// cannot be found, returns an error.
@@ -432,87 +442,4 @@ env_http_headers = { "X-Example-Env-Header" = "EXAMPLE_ENV_VAR" }
let provider: ModelProviderInfo = toml::from_str(azure_provider_toml).unwrap();
assert_eq!(expected_provider, provider);
}
#[test]
fn detects_azure_responses_base_urls() {
let positive_cases = [
"https://foo.openai.azure.com/openai",
"https://foo.openai.azure.us/openai/deployments/bar",
"https://foo.cognitiveservices.azure.cn/openai",
"https://foo.aoai.azure.com/openai",
"https://foo.openai.azure-api.net/openai",
"https://foo.z01.azurefd.net/",
];
for base_url in positive_cases {
let provider = ModelProviderInfo {
name: "test".into(),
base_url: Some(base_url.into()),
env_key: None,
env_key_instructions: None,
experimental_bearer_token: None,
wire_api: WireApi::Responses,
query_params: None,
http_headers: None,
env_http_headers: None,
request_max_retries: None,
stream_max_retries: None,
stream_idle_timeout_ms: None,
requires_openai_auth: false,
supports_websockets: false,
};
let api = provider.to_api_provider(None).expect("api provider");
assert!(
api.is_azure_responses_endpoint(),
"expected {base_url} to be detected as Azure"
);
}
let named_provider = ModelProviderInfo {
name: "Azure".into(),
base_url: Some("https://example.com".into()),
env_key: None,
env_key_instructions: None,
experimental_bearer_token: None,
wire_api: WireApi::Responses,
query_params: None,
http_headers: None,
env_http_headers: None,
request_max_retries: None,
stream_max_retries: None,
stream_idle_timeout_ms: None,
requires_openai_auth: false,
supports_websockets: false,
};
let named_api = named_provider.to_api_provider(None).expect("api provider");
assert!(named_api.is_azure_responses_endpoint());
let negative_cases = [
"https://api.openai.com/v1",
"https://example.com/openai",
"https://myproxy.azurewebsites.net/openai",
];
for base_url in negative_cases {
let provider = ModelProviderInfo {
name: "test".into(),
base_url: Some(base_url.into()),
env_key: None,
env_key_instructions: None,
experimental_bearer_token: None,
wire_api: WireApi::Responses,
query_params: None,
http_headers: None,
env_http_headers: None,
request_max_retries: None,
stream_max_retries: None,
stream_idle_timeout_ms: None,
requires_openai_auth: false,
supports_websockets: false,
};
let api = provider.to_api_provider(None).expect("api provider");
assert!(
!api.is_azure_responses_endpoint(),
"expected {base_url} not to be detected as Azure"
);
}
}
}

View File

@@ -10,6 +10,7 @@ use tracing::warn;
use crate::config::Config;
use crate::config::types::SkillsConfig;
use crate::config_loader::CloudRequirementsLoader;
use crate::config_loader::LoaderOverrides;
use crate::config_loader::load_config_layers_state;
use crate::skills::SkillLoadOutcome;
@@ -88,7 +89,7 @@ impl SkillsManager {
Some(cwd_abs),
&cli_overrides,
LoaderOverrides::default(),
None,
CloudRequirementsLoader::default(),
)
.await
{

View File

@@ -34,18 +34,6 @@ impl From<TruncationPolicyConfig> for TruncationPolicy {
}
impl TruncationPolicy {
/// Scale the underlying budget by `multiplier`, rounding up to avoid under-budgeting.
pub fn mul(self, multiplier: f64) -> Self {
match self {
TruncationPolicy::Bytes(bytes) => {
TruncationPolicy::Bytes((bytes as f64 * multiplier).ceil() as usize)
}
TruncationPolicy::Tokens(tokens) => {
TruncationPolicy::Tokens((tokens as f64 * multiplier).ceil() as usize)
}
}
}
/// Returns a token budget derived from this policy.
///
/// - For `Tokens`, this is the explicit token limit.
@@ -73,6 +61,21 @@ impl TruncationPolicy {
}
}
impl std::ops::Mul<f64> for TruncationPolicy {
type Output = Self;
fn mul(self, multiplier: f64) -> Self::Output {
match self {
TruncationPolicy::Bytes(bytes) => {
TruncationPolicy::Bytes((bytes as f64 * multiplier).ceil() as usize)
}
TruncationPolicy::Tokens(tokens) => {
TruncationPolicy::Tokens((tokens as f64 * multiplier).ceil() as usize)
}
}
}
}
pub(crate) fn formatted_truncate_text(content: &str, policy: TruncationPolicy) -> String {
if content.len() <= policy.byte_budget() {
return content.to_string();

View File

@@ -55,7 +55,6 @@ async fn refresh_token_succeeds_updates_storage() -> Result<()> {
openai_api_key: None,
tokens: Some(initial_tokens.clone()),
last_refresh: Some(initial_last_refresh),
chatgpt_proxy: None,
};
ctx.write_auth(&initial_auth)?;
@@ -118,7 +117,6 @@ async fn returns_fresh_tokens_as_is() -> Result<()> {
openai_api_key: None,
tokens: Some(initial_tokens.clone()),
last_refresh: Some(initial_last_refresh),
chatgpt_proxy: None,
};
ctx.write_auth(&initial_auth)?;
@@ -165,7 +163,6 @@ async fn refreshes_token_when_last_refresh_is_stale() -> Result<()> {
openai_api_key: None,
tokens: Some(initial_tokens.clone()),
last_refresh: Some(stale_refresh),
chatgpt_proxy: None,
};
ctx.write_auth(&initial_auth)?;
@@ -225,7 +222,6 @@ async fn refresh_token_returns_permanent_error_for_expired_refresh_token() -> Re
openai_api_key: None,
tokens: Some(initial_tokens.clone()),
last_refresh: Some(initial_last_refresh),
chatgpt_proxy: None,
};
ctx.write_auth(&initial_auth)?;
@@ -276,7 +272,6 @@ async fn refresh_token_returns_transient_error_on_server_failure() -> Result<()>
openai_api_key: None,
tokens: Some(initial_tokens.clone()),
last_refresh: Some(initial_last_refresh),
chatgpt_proxy: None,
};
ctx.write_auth(&initial_auth)?;
@@ -329,7 +324,6 @@ async fn unauthorized_recovery_reloads_then_refreshes_tokens() -> Result<()> {
openai_api_key: None,
tokens: Some(initial_tokens.clone()),
last_refresh: Some(initial_last_refresh),
chatgpt_proxy: None,
};
ctx.write_auth(&initial_auth)?;
@@ -339,7 +333,6 @@ async fn unauthorized_recovery_reloads_then_refreshes_tokens() -> Result<()> {
openai_api_key: None,
tokens: Some(disk_tokens.clone()),
last_refresh: Some(initial_last_refresh),
chatgpt_proxy: None,
};
save_auth(
ctx.codex_home.path(),
@@ -423,7 +416,6 @@ async fn unauthorized_recovery_skips_reload_on_account_mismatch() -> Result<()>
openai_api_key: None,
tokens: Some(initial_tokens.clone()),
last_refresh: Some(initial_last_refresh),
chatgpt_proxy: None,
};
ctx.write_auth(&initial_auth)?;
@@ -439,7 +431,6 @@ async fn unauthorized_recovery_skips_reload_on_account_mismatch() -> Result<()>
openai_api_key: None,
tokens: Some(disk_tokens),
last_refresh: Some(initial_last_refresh),
chatgpt_proxy: None,
};
save_auth(
ctx.codex_home.path(),
@@ -504,7 +495,6 @@ async fn unauthorized_recovery_requires_chatgpt_auth() -> Result<()> {
openai_api_key: Some("sk-test".to_string()),
tokens: None,
last_refresh: None,
chatgpt_proxy: None,
};
ctx.write_auth(&auth)?;

View File

@@ -80,5 +80,5 @@ mod unstable_features_warning;
mod user_notification;
mod user_shell_cmd;
mod view_image;
mod web_search_cached;
mod web_search;
mod websocket_fallback;

View File

@@ -1,5 +1,7 @@
#![allow(clippy::unwrap_used)]
use codex_core::WireApi;
use codex_core::built_in_model_providers;
use codex_core::features::Feature;
use codex_core::protocol::SandboxPolicy;
use codex_protocol::config_types::WebSearchMode;
@@ -25,6 +27,15 @@ fn find_web_search_tool(body: &Value) -> &Value {
.expect("tools should include a web_search tool")
}
#[allow(clippy::expect_used)]
fn has_web_search_tool(body: &Value) -> bool {
body["tools"]
.as_array()
.expect("request body should include tools array")
.iter()
.any(|tool| tool.get("type").and_then(Value::as_str) == Some("web_search"))
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn web_search_mode_cached_sets_external_web_access_false() {
skip_if_no_network!();
@@ -174,3 +185,45 @@ async fn web_search_mode_updates_between_turns_with_sandbox_policy() {
"danger-full-access policy should default web_search to live"
);
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn web_search_mode_defaults_to_disabled_for_azure_responses() {
skip_if_no_network!();
let server = start_mock_server().await;
let sse = sse_completed("resp-1");
let resp_mock = responses::mount_sse_once(&server, sse).await;
let mut builder = test_codex()
.with_model("gpt-5-codex")
.with_config(|config| {
let base_url = config.model_provider.base_url.clone();
let mut provider = built_in_model_providers()["openai"].clone();
provider.name = "Azure".to_string();
provider.base_url = base_url;
provider.wire_api = WireApi::Responses;
config.model_provider_id = provider.name.clone();
config.model_provider = provider;
config.web_search_mode = None;
config.features.disable(Feature::WebSearchCached);
config.features.disable(Feature::WebSearchRequest);
});
let test = builder
.build(&server)
.await
.expect("create test Codex conversation");
test.submit_turn_with_policy(
"hello azure default web search",
SandboxPolicy::DangerFullAccess,
)
.await
.expect("submit turn");
let body = resp_mock.single_request().body_json();
assert_eq!(
has_web_search_tool(&body),
false,
"azure responses requests should disable web_search by default"
);
}

View File

@@ -241,7 +241,7 @@ async fn load_exec_policy() -> anyhow::Result<Policy> {
cwd,
&cli_overrides,
overrides,
None,
codex_core::config_loader::CloudRequirementsLoader::default(),
)
.await?;

View File

@@ -19,6 +19,7 @@ workspace = true
anyhow = { workspace = true }
clap = { workspace = true, features = ["derive"] }
codex-arg0 = { workspace = true }
codex-cloud-requirements = { workspace = true }
codex-common = { workspace = true, features = [
"cli",
"elapsed",

View File

@@ -13,6 +13,7 @@ pub mod exec_events;
pub use cli::Cli;
pub use cli::Command;
pub use cli::ReviewArgs;
use codex_cloud_requirements::cloud_requirements_loader;
use codex_common::oss::ensure_oss_provider_ready;
use codex_common::oss::get_default_model_for_oss_provider;
use codex_common::oss::ollama_chat_deprecation_notice;
@@ -24,6 +25,7 @@ use codex_core::OLLAMA_OSS_PROVIDER_ID;
use codex_core::ThreadManager;
use codex_core::auth::enforce_login_restrictions;
use codex_core::config::Config;
use codex_core::config::ConfigBuilder;
use codex_core::config::ConfigOverrides;
use codex_core::config::find_codex_home;
use codex_core::config::load_config_as_toml_with_cli_overrides;
@@ -64,6 +66,7 @@ use uuid::Uuid;
use crate::cli::Command as ExecCommand;
use crate::event_processor::CodexStatus;
use crate::event_processor::EventProcessor;
use codex_core::default_client::set_default_client_residency_requirement;
use codex_core::default_client::set_default_originator;
use codex_core::find_thread_path_by_id_str;
use codex_core::find_thread_path_by_name_str;
@@ -159,41 +162,52 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
// we load config.toml here to determine project state.
#[allow(clippy::print_stderr)]
let config_toml = {
let codex_home = match find_codex_home() {
Ok(codex_home) => codex_home,
Err(err) => {
eprintln!("Error finding codex home: {err}");
std::process::exit(1);
}
};
match load_config_as_toml_with_cli_overrides(
&codex_home,
&config_cwd,
cli_kv_overrides.clone(),
)
.await
{
Ok(config_toml) => config_toml,
Err(err) => {
let config_error = err
.get_ref()
.and_then(|err| err.downcast_ref::<ConfigLoadError>())
.map(ConfigLoadError::config_error);
if let Some(config_error) = config_error {
eprintln!(
"Error loading config.toml:\n{}",
format_config_error_with_source(config_error)
);
} else {
eprintln!("Error loading config.toml: {err}");
}
std::process::exit(1);
}
let codex_home = match find_codex_home() {
Ok(codex_home) => codex_home,
Err(err) => {
eprintln!("Error finding codex home: {err}");
std::process::exit(1);
}
};
#[allow(clippy::print_stderr)]
let config_toml = match load_config_as_toml_with_cli_overrides(
&codex_home,
&config_cwd,
cli_kv_overrides.clone(),
)
.await
{
Ok(config_toml) => config_toml,
Err(err) => {
let config_error = err
.get_ref()
.and_then(|err| err.downcast_ref::<ConfigLoadError>())
.map(ConfigLoadError::config_error);
if let Some(config_error) = config_error {
eprintln!(
"Error loading config.toml:\n{}",
format_config_error_with_source(config_error)
);
} else {
eprintln!("Error loading config.toml: {err}");
}
std::process::exit(1);
}
};
let cloud_auth_manager = AuthManager::shared(
codex_home.clone(),
false,
config_toml.cli_auth_credentials_store.unwrap_or_default(),
);
let chatgpt_base_url = config_toml
.chatgpt_base_url
.clone()
.unwrap_or_else(|| "https://chatgpt.com/backend-api/".to_string());
// TODO(gt): Make cloud requirements failures blocking once we can fail-closed.
let cloud_requirements = cloud_requirements_loader(cloud_auth_manager, chatgpt_base_url);
let model_provider = if oss {
let resolved = resolve_oss_provider(
oss_provider.as_deref(),
@@ -246,8 +260,13 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
additional_writable_roots: add_dir,
};
let config =
Config::load_with_cli_overrides_and_harness_overrides(cli_kv_overrides, overrides).await?;
let config = ConfigBuilder::default()
.cli_overrides(cli_kv_overrides)
.harness_overrides(overrides)
.cloud_requirements(cloud_requirements)
.build()
.await?;
set_default_client_residency_requirement(config.enforce_residency.value());
if let Err(err) = enforce_login_restrictions(&config) {
eprintln!("{err}");

View File

@@ -564,7 +564,6 @@ pub(crate) async fn persist_tokens_async(
openai_api_key: api_key,
tokens: Some(tokens),
last_refresh: Some(Utc::now()),
chatgpt_proxy: None,
};
save_auth(&codex_home, &auth, auth_credentials_store_mode)
})

View File

@@ -11,6 +11,7 @@ use codex_core::config::CONFIG_TOML_FILE;
use codex_core::config::Constrained;
use codex_core::config::ConstraintError;
use codex_core::config::find_codex_home;
use codex_core::config_loader::CloudRequirementsLoader;
use codex_core::config_loader::ConfigLayerStack;
use codex_core::config_loader::ConfigLayerStackOrdering;
use codex_core::config_loader::LoaderOverrides;
@@ -31,10 +32,15 @@ pub(crate) async fn build_config_state() -> Result<ConfigState> {
let codex_home = find_codex_home().context("failed to resolve CODEX_HOME")?;
let cli_overrides = Vec::new();
let overrides = LoaderOverrides::default();
let config_layer_stack =
load_config_layers_state(&codex_home, None, &cli_overrides, overrides, None)
.await
.context("failed to load Codex config")?;
let config_layer_stack = load_config_layers_state(
&codex_home,
None,
&cli_overrides,
overrides,
CloudRequirementsLoader::default(),
)
.await
.context("failed to load Codex config")?;
let cfg_path = codex_home.join(CONFIG_TOML_FILE);

View File

@@ -15,7 +15,7 @@ axum = { workspace = true, default-features = false, features = [
] }
codex-keyring-store = { workspace = true }
codex-protocol = { workspace = true }
dirs = { workspace = true }
codex-utils-home-dir = { workspace = true }
futures = { workspace = true, default-features = false, features = ["std"] }
keyring = { workspace = true, features = ["crypto-rust"] }
mcp-types = { path = "../mcp-types" }

View File

@@ -1,33 +0,0 @@
use dirs::home_dir;
use std::path::PathBuf;
/// This was copied from codex-core but codex-core depends on this crate.
/// TODO: move this to a shared crate lower in the dependency tree.
///
///
/// Returns the path to the Codex configuration directory, which can be
/// specified by the `CODEX_HOME` environment variable. If not set, defaults to
/// `~/.codex`.
///
/// - If `CODEX_HOME` is set, the value will be canonicalized and this
/// function will Err if the path does not exist.
/// - If `CODEX_HOME` is not set, this function does not verify that the
/// directory exists.
pub(crate) fn find_codex_home() -> std::io::Result<PathBuf> {
// Honor the `CODEX_HOME` environment variable when it is set to allow users
// (and tests) to override the default location.
if let Ok(val) = std::env::var("CODEX_HOME")
&& !val.is_empty()
{
return PathBuf::from(val).canonicalize();
}
let mut p = home_dir().ok_or_else(|| {
std::io::Error::new(
std::io::ErrorKind::NotFound,
"Could not find home directory",
)
})?;
p.push(".codex");
Ok(p)
}

View File

@@ -1,5 +1,4 @@
mod auth_status;
mod find_codex_home;
mod logging_client_handler;
mod oauth;
mod perform_oauth_login;

View File

@@ -48,7 +48,7 @@ use codex_keyring_store::KeyringStore;
use rmcp::transport::auth::AuthorizationManager;
use tokio::sync::Mutex;
use crate::find_codex_home::find_codex_home;
use codex_utils_home_dir::find_codex_home;
const KEYRING_SERVICE: &str = "Codex MCP Credentials";
const REFRESH_SKEW_MILLIS: u64 = 30_000;

View File

@@ -1370,10 +1370,7 @@ impl App {
self.shutdown_current_thread().await;
self.config = resume_config;
tui.set_notification_method(self.config.tui_notification_method);
self.file_search = FileSearchManager::new(
self.config.cwd.clone(),
self.app_event_tx.clone(),
);
self.file_search.update_search_dir(self.config.cwd.clone());
let init = self.chatwidget_init_for_forked_or_resumed_thread(
tui,
self.config.clone(),

View File

@@ -38,6 +38,17 @@ impl FileSearchManager {
}
}
/// Updates the directory used for file searches.
/// This should be called when the session's CWD changes on resume.
/// Drops the current session so it will be recreated with the new directory on next query.
pub fn update_search_dir(&mut self, new_dir: PathBuf) {
self.search_dir = new_dir;
#[expect(clippy::unwrap_used)]
let mut st = self.state.lock().unwrap();
st.session.take();
st.latest_query.clear();
}
/// Call whenever the user edits the `@` token.
pub fn on_user_query(&self, query: String) {
#[expect(clippy::unwrap_used)]

View File

@@ -27,6 +27,7 @@ use codex_core::config::resolve_oss_provider;
use codex_core::config_loader::CloudRequirementsLoader;
use codex_core::config_loader::ConfigLoadError;
use codex_core::config_loader::format_config_error_with_source;
use codex_core::default_client::set_default_client_residency_requirement;
use codex_core::find_thread_path_by_id_str;
use codex_core::find_thread_path_by_name_str;
use codex_core::path_utils;
@@ -276,6 +277,7 @@ pub async fn run_main(
cloud_requirements.clone(),
)
.await;
set_default_client_residency_requirement(config.enforce_residency.value());
if let Some(warning) = add_dir_warning_message(&cli.add_dir, config.sandbox_policy.get()) {
#[allow(clippy::print_stderr)]

View File

@@ -91,7 +91,7 @@ pub(crate) fn compose_account_display(
let auth = auth_manager.auth_cached()?;
match auth {
CodexAuth::Chatgpt(_) | CodexAuth::ChatgptAuthTokens(_) | CodexAuth::ChatgptProxy(_) => {
CodexAuth::Chatgpt(_) | CodexAuth::ChatgptAuthTokens(_) => {
let email = auth.get_account_email();
let plan = plan
.map(|plan_type| title_case(format!("{plan_type:?}").as_str()))

View File

@@ -0,0 +1,6 @@
load("//:defs.bzl", "codex_rust_crate")
codex_rust_crate(
name = "home-dir",
crate_name = "codex_utils_home_dir",
)

View File

@@ -0,0 +1,15 @@
[package]
name = "codex-utils-home-dir"
version.workspace = true
edition.workspace = true
license.workspace = true
[lints]
workspace = true
[dependencies]
dirs = { workspace = true }
[dev-dependencies]
pretty_assertions = { workspace = true }
tempfile = { workspace = true }

View File

@@ -0,0 +1,128 @@
use dirs::home_dir;
use std::path::PathBuf;
/// Returns the path to the Codex configuration directory, which can be
/// specified by the `CODEX_HOME` environment variable. If not set, defaults to
/// `~/.codex`.
///
/// - If `CODEX_HOME` is set, the value must exist and be a directory. The
/// value will be canonicalized and this function will Err otherwise.
/// - If `CODEX_HOME` is not set, this function does not verify that the
/// directory exists.
pub fn find_codex_home() -> std::io::Result<PathBuf> {
let codex_home_env = std::env::var("CODEX_HOME")
.ok()
.filter(|val| !val.is_empty());
find_codex_home_from_env(codex_home_env.as_deref())
}
fn find_codex_home_from_env(codex_home_env: Option<&str>) -> std::io::Result<PathBuf> {
// Honor the `CODEX_HOME` environment variable when it is set to allow users
// (and tests) to override the default location.
match codex_home_env {
Some(val) => {
let path = PathBuf::from(val);
let metadata = std::fs::metadata(&path).map_err(|err| match err.kind() {
std::io::ErrorKind::NotFound => std::io::Error::new(
std::io::ErrorKind::NotFound,
format!("CODEX_HOME points to {val:?}, but that path does not exist"),
),
_ => std::io::Error::new(
err.kind(),
format!("failed to read CODEX_HOME {val:?}: {err}"),
),
})?;
if !metadata.is_dir() {
Err(std::io::Error::new(
std::io::ErrorKind::InvalidInput,
format!("CODEX_HOME points to {val:?}, but that path is not a directory"),
))
} else {
path.canonicalize().map_err(|err| {
std::io::Error::new(
err.kind(),
format!("failed to canonicalize CODEX_HOME {val:?}: {err}"),
)
})
}
}
None => {
let mut p = home_dir().ok_or_else(|| {
std::io::Error::new(
std::io::ErrorKind::NotFound,
"Could not find home directory",
)
})?;
p.push(".codex");
Ok(p)
}
}
}
#[cfg(test)]
mod tests {
use super::find_codex_home_from_env;
use dirs::home_dir;
use pretty_assertions::assert_eq;
use std::fs;
use std::io::ErrorKind;
use tempfile::TempDir;
#[test]
fn find_codex_home_env_missing_path_is_fatal() {
let temp_home = TempDir::new().expect("temp home");
let missing = temp_home.path().join("missing-codex-home");
let missing_str = missing
.to_str()
.expect("missing codex home path should be valid utf-8");
let err = find_codex_home_from_env(Some(missing_str)).expect_err("missing CODEX_HOME");
assert_eq!(err.kind(), ErrorKind::NotFound);
assert!(
err.to_string().contains("CODEX_HOME"),
"unexpected error: {err}"
);
}
#[test]
fn find_codex_home_env_file_path_is_fatal() {
let temp_home = TempDir::new().expect("temp home");
let file_path = temp_home.path().join("codex-home.txt");
fs::write(&file_path, "not a directory").expect("write temp file");
let file_str = file_path
.to_str()
.expect("file codex home path should be valid utf-8");
let err = find_codex_home_from_env(Some(file_str)).expect_err("file CODEX_HOME");
assert_eq!(err.kind(), ErrorKind::InvalidInput);
assert!(
err.to_string().contains("not a directory"),
"unexpected error: {err}"
);
}
#[test]
fn find_codex_home_env_valid_directory_canonicalizes() {
let temp_home = TempDir::new().expect("temp home");
let temp_str = temp_home
.path()
.to_str()
.expect("temp codex home path should be valid utf-8");
let resolved = find_codex_home_from_env(Some(temp_str)).expect("valid CODEX_HOME");
let expected = temp_home
.path()
.canonicalize()
.expect("canonicalize temp home");
assert_eq!(resolved, expected);
}
#[test]
fn find_codex_home_without_env_uses_default_home_dir() {
let resolved = find_codex_home_from_env(None).expect("default CODEX_HOME");
let mut expected = home_dir().expect("home dir");
expected.push(".codex");
assert_eq!(resolved, expected);
}
}