Add openai_base_url config override for built-in provider (#12031)

We regularly get bug reports from users who mistakenly have the
`OPENAI_BASE_URL` environment variable set. This PR deprecates this
environment variable in favor of a top-level config key
`openai_base_url` that is used for the same purpose. By making it a
config key, it will be more visible to users. It will also participate
in all of the infrastructure we've added for layered and managed
configs.

Summary
- introduce the `openai_base_url` top-level config key, update
schema/tests, and route the built-in openai provider through it while
- fall back to deprecated `OPENAI_BASE_URL` env var but warn user of
deprecation when no `openai_base_url` config key is present
- update CLI, SDK, and TUI code to prefer the new config path (with a
deprecated env-var fallback) and document the SDK behavior change
This commit is contained in:
Eric Traut
2026-03-13 20:12:25 -06:00
committed by GitHub
parent b859a98e0f
commit 4b9d5c8c1b
21 changed files with 233 additions and 70 deletions

View File

@@ -52,8 +52,7 @@ async fn responses_mode_stream_cli() {
.arg(&repo_root)
.arg("hello?");
cmd.env("CODEX_HOME", home.path())
.env("OPENAI_API_KEY", "dummy")
.env("OPENAI_BASE_URL", format!("{}/v1", server.uri()));
.env("OPENAI_API_KEY", "dummy");
let output = cmd.output().unwrap();
println!("Status: {}", output.status);
@@ -89,6 +88,75 @@ async fn responses_mode_stream_cli() {
// assert!(page.items[0].created_at.is_some(), "missing created_at");
}
/// Ensures `OPENAI_BASE_URL` still works as a deprecated fallback.
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn responses_mode_stream_cli_supports_openai_base_url_env_fallback() {
skip_if_no_network!();
let server = MockServer::start().await;
let repo_root = repo_root();
let sse = responses::sse(vec![
responses::ev_response_created("resp-1"),
responses::ev_assistant_message("msg-1", "hi"),
responses::ev_completed("resp-1"),
]);
let resp_mock = responses::mount_sse_once(&server, sse).await;
let home = TempDir::new().unwrap();
let bin = codex_utils_cargo_bin::cargo_bin("codex").unwrap();
let mut cmd = AssertCommand::new(bin);
cmd.timeout(Duration::from_secs(30));
cmd.arg("exec")
.arg("--skip-git-repo-check")
.arg("-C")
.arg(&repo_root)
.arg("hello?");
cmd.env("CODEX_HOME", home.path())
.env("OPENAI_API_KEY", "dummy")
.env("OPENAI_BASE_URL", format!("{}/v1", server.uri()));
let output = cmd.output().unwrap();
assert!(output.status.success());
let request = resp_mock.single_request();
assert_eq!(request.path(), "/v1/responses");
}
/// Ensures `openai_base_url` config override routes built-in openai provider requests.
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn responses_mode_stream_cli_supports_openai_base_url_config_override() {
skip_if_no_network!();
let server = MockServer::start().await;
let repo_root = repo_root();
let sse = responses::sse(vec![
responses::ev_response_created("resp-1"),
responses::ev_assistant_message("msg-1", "hi"),
responses::ev_completed("resp-1"),
]);
let resp_mock = responses::mount_sse_once(&server, sse).await;
let home = TempDir::new().unwrap();
let bin = codex_utils_cargo_bin::cargo_bin("codex").unwrap();
let mut cmd = AssertCommand::new(bin);
cmd.timeout(Duration::from_secs(30));
cmd.arg("exec")
.arg("--skip-git-repo-check")
.arg("-c")
.arg(format!("openai_base_url=\"{}/v1\"", server.uri()))
.arg("-C")
.arg(&repo_root)
.arg("hello?");
cmd.env("CODEX_HOME", home.path())
.env("OPENAI_API_KEY", "dummy");
let output = cmd.output().unwrap();
assert!(output.status.success());
let request = resp_mock.single_request();
assert_eq!(request.path(), "/v1/responses");
}
/// Verify that passing `-c model_instructions_file=...` to the CLI
/// overrides the built-in base instructions by inspecting the request body
/// received by a mock OpenAI Responses endpoint.
@@ -136,8 +204,7 @@ async fn exec_cli_applies_model_instructions_file() {
.arg(&repo_root)
.arg("hello?\n");
cmd.env("CODEX_HOME", home.path())
.env("OPENAI_API_KEY", "dummy")
.env("OPENAI_BASE_URL", format!("{}/v1", server.uri()));
.env("OPENAI_API_KEY", "dummy");
let output = cmd.output().unwrap();
println!("Status: {}", output.status);
@@ -247,13 +314,14 @@ async fn responses_api_stream_cli() {
let mut cmd = AssertCommand::new(bin);
cmd.arg("exec")
.arg("--skip-git-repo-check")
.arg("-c")
.arg("openai_base_url=\"http://unused.local\"")
.arg("-C")
.arg(&repo_root)
.arg("hello?");
cmd.env("CODEX_HOME", home.path())
.env("OPENAI_API_KEY", "dummy")
.env("CODEX_RS_SSE_FIXTURE", fixture)
.env("OPENAI_BASE_URL", "http://unused.local");
.env("CODEX_RS_SSE_FIXTURE", fixture);
let output = cmd.output().unwrap();
assert!(output.status.success());
@@ -283,14 +351,14 @@ async fn integration_creates_and_checks_session_file() -> anyhow::Result<()> {
let mut cmd = AssertCommand::new(bin);
cmd.arg("exec")
.arg("--skip-git-repo-check")
.arg("-c")
.arg("openai_base_url=\"http://unused.local\"")
.arg("-C")
.arg(&repo_root)
.arg(&prompt);
cmd.env("CODEX_HOME", home.path())
.env(CODEX_API_KEY_ENV_VAR, "dummy")
.env("CODEX_RS_SSE_FIXTURE", &fixture)
// Required for CLI arg parsing even though fixture short-circuits network usage.
.env("OPENAI_BASE_URL", "http://unused.local");
.env("CODEX_RS_SSE_FIXTURE", &fixture);
let output = cmd.output().unwrap();
assert!(
@@ -404,6 +472,8 @@ async fn integration_creates_and_checks_session_file() -> anyhow::Result<()> {
let mut cmd2 = AssertCommand::new(bin2);
cmd2.arg("exec")
.arg("--skip-git-repo-check")
.arg("-c")
.arg("openai_base_url=\"http://unused.local\"")
.arg("-C")
.arg(&repo_root)
.arg(&prompt2)
@@ -411,8 +481,7 @@ async fn integration_creates_and_checks_session_file() -> anyhow::Result<()> {
.arg("--last");
cmd2.env("CODEX_HOME", home.path())
.env("OPENAI_API_KEY", "dummy")
.env("CODEX_RS_SSE_FIXTURE", &fixture)
.env("OPENAI_BASE_URL", "http://unused.local");
.env("CODEX_RS_SSE_FIXTURE", &fixture);
let output2 = cmd2.output().unwrap();
assert!(output2.status.success(), "resume codex-cli run failed");