Compare commits

...

7 Commits

Author SHA1 Message Date
Eugene Brevdo
d131992dbc feat(core): write template config 2025-06-30 14:26:46 -07:00
Michael Bolin
4cb3c76798 fix: softprops/action-gh-release@v2 should use existing tag instead of creating a new tag (#1436)
https://github.com/Homebrew/homebrew-core/pull/228521 details the issues
I was having with the **Source code (tar.gz)** artifact for our GitHub
releases not being quite right. I landed these PRs as stabs in the dark
to fix this:

- https://github.com/openai/codex/pull/1423
- https://github.com/openai/codex/pull/1430

Based on the insights from
https://github.com/Homebrew/homebrew-core/pull/228521, I think those
were wrong and the real problem was this:


6dad5c3b17/.github/workflows/rust-release.yml (L162)

That is, I was manufacturing a new tag name on the fly instead of using
the existing one.

This PR reverts #1423 and #1430 and hopefully fixes how `tag_name` is
set for the `softprops/action-gh-release@v2` step so the **Source code
(tar.gz)** includes the correct files. Assuming this works, this should
make the Homebrew formula straightforward.
2025-06-30 12:10:48 -07:00
Michael Bolin
6dad5c3b17 feat: add query_params option to ModelProviderInfo to support Azure (#1435)
As discovered in https://github.com/openai/codex/issues/1365, the Azure
provider needs to be able to specify `api-version` as a query param, so
this PR introduces a generic `query_params` option to the
`model_providers` config so that an Azure provider can be defined as
follows:

```toml
[model_providers.azure]
name = "Azure"
base_url = "https://YOUR_PROJECT_NAME.openai.azure.com/openai"
env_key = "AZURE_OPENAI_API_KEY"
query_params = { api-version = "2025-04-01-preview" }
```

This PR also updates the docs with this example.

While here, we also update `wire_api` to default to `"chat"`, as that is
likely the common case for someone defining an external provider.

Fixes https://github.com/openai/codex/issues/1365.
2025-06-30 11:39:54 -07:00
Michael Bolin
cd2d84d496 fix: need to check out the branch, not the tag (#1430)
This should have been done in https://github.com/openai/codex/pull/1423.
2025-06-29 10:18:50 -07:00
Michael Bolin
688100f7f4 chore: fix Rust release process so generated .tar.gz source works with Homebrew (#1423)
Looking at existing releases such as
https://github.com/openai/codex/releases/tag/codex-rs-b289c9207090b2e27494545d7b5404e063bd86f3-1-rust-v0.1.0-alpha.4,
the `.tar.gz` for the source code still seems to have `0.0.0` as the
`version` in `codex-rs/Cargo.toml` instead of what the tag seems to say
it should have:


b289c92070/codex-rs/Cargo.toml (L21)

ChatGPT claims:

> When GitHub generates the Source code (tar.gz) archive for a tag:
	•	It uses the commit the tag points to.
• But in some cases (e.g., shallow clones, GitHub CI, or local tools
that only clone the default branch), that commit may not be included,
and you might get an outdated view or nothing at all depending on how
it’s fetched.
	
Trying this recommended fix.
2025-06-28 19:46:44 -07:00
Michael Bolin
f30bf4bbcf fix: support pre-release identifiers in tags (#1422)
Had to update the regex in the GitHub workflow to allow suffixes like
`-alpha.4`.

Successfully ran:

```
./scripts/create_github_release.sh 0.1.0-alpha.4
```

to create
https://github.com/openai/codex/releases/tag/codex-rs-b289c9207090b2e27494545d7b5404e063bd86f3-1-rust-v0.1.0-alpha.4

and verified that when I run `codex --version`, it prints `codex-cli
0.1.0-alpha.4`.
2025-06-28 16:05:53 -07:00
Michael Bolin
1b7c8d2569 fix: build with codegen-units = 1 for profile.release (#1421)
Great suggestion from @zamazan4ik on
https://github.com/openai/codex/issues/1411.
2025-06-28 15:24:48 -07:00
11 changed files with 190 additions and 22 deletions

View File

@@ -15,9 +15,6 @@ concurrency:
group: ${{ github.workflow }}
cancel-in-progress: true
env:
TAG_REGEX: '^rust-v[0-9]+\.[0-9]+\.[0-9]+$'
jobs:
tag-check:
runs-on: ubuntu-latest
@@ -33,8 +30,8 @@ jobs:
# 1. Must be a tag and match the regex
[[ "${GITHUB_REF_TYPE}" == "tag" ]] \
|| { echo "❌ Not a tag push"; exit 1; }
[[ "${GITHUB_REF_NAME}" =~ ${TAG_REGEX} ]] \
|| { echo "❌ Tag '${GITHUB_REF_NAME}' != ${TAG_REGEX}"; exit 1; }
[[ "${GITHUB_REF_NAME}" =~ ^rust-v[0-9]+\.[0-9]+\.[0-9]+(-(alpha|beta)(\.[0-9]+)?)?$ ]] \
|| { echo "❌ Tag '${GITHUB_REF_NAME}' doesn't match expected format"; exit 1; }
# 2. Extract versions
tag_ver="${GITHUB_REF_NAME#rust-v}"
@@ -160,9 +157,7 @@ jobs:
release:
needs: build
name: release
runs-on: ubuntu-24.04
env:
RELEASE_TAG: codex-rs-${{ github.sha }}-${{ github.run_attempt }}-${{ github.ref_name }}
runs-on: ubuntu-latest
steps:
- uses: actions/download-artifact@v4
@@ -172,9 +167,19 @@ jobs:
- name: List
run: ls -R dist/
- uses: softprops/action-gh-release@v2
- name: Define release name
id: release_name
run: |
# Extract the version from the tag name, which is in the format
# "rust-v0.1.0".
version="${GITHUB_REF_NAME#rust-v}"
echo "name=${version}" >> $GITHUB_OUTPUT
- name: Create GitHub Release
uses: softprops/action-gh-release@v2
with:
tag_name: ${{ env.RELEASE_TAG }}
name: ${{ steps.release_name.outputs.name }}
tag_name: ${{ github.ref_name }}
files: dist/**
# For now, tag releases as "prerelease" because we are not claiming
# the Rust CLI is stable yet.
@@ -184,5 +189,5 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
tag: ${{ env.RELEASE_TAG }}
tag: ${{ github.ref_name }}
config: .github/dotslash-config.json

View File

@@ -37,3 +37,6 @@ lto = "fat"
# Because we bundle some of these executables with the TypeScript CLI, we
# remove everything to make the binary as small as possible.
strip = "symbols"
# See https://github.com/openai/codex/issues/1411 for details.
codegen-units = 1

View File

@@ -41,8 +41,11 @@ base_url = "https://api.openai.com/v1"
# using Codex with this provider. The value of the environment variable must be
# non-empty and will be used in the `Bearer TOKEN` HTTP header for the POST request.
env_key = "OPENAI_API_KEY"
# Valid values for wire_api are "chat" and "responses".
# Valid values for wire_api are "chat" and "responses". Defaults to "chat" if omitted.
wire_api = "chat"
# If necessary, extra query params that need to be added to the URL.
# See the Azure example below.
query_params = {}
```
Note this makes it possible to use Codex CLI with non-OpenAI models, so long as they use a wire API that is compatible with the OpenAI chat completions API. For example, you could define the following provider to use Codex CLI with Ollama running locally:
@@ -51,7 +54,6 @@ Note this makes it possible to use Codex CLI with non-OpenAI models, so long as
[model_providers.ollama]
name = "Ollama"
base_url = "http://localhost:11434/v1"
wire_api = "chat"
```
Or a third-party provider (using a distinct environment variable for the API key):
@@ -61,7 +63,17 @@ Or a third-party provider (using a distinct environment variable for the API key
name = "Mistral"
base_url = "https://api.mistral.ai/v1"
env_key = "MISTRAL_API_KEY"
wire_api = "chat"
```
Note that Azure requires `api-version` to be passed as a query parameter, so be sure to specify it as part of `query_params` when defining the Azure provider:
```toml
[model_providers.azure]
name = "Azure"
# Make sure you set the appropriate subdomain for this URL.
base_url = "https://YOUR_PROJECT_NAME.openai.azure.com/openai"
env_key = "AZURE_OPENAI_API_KEY" # Or "OPENAI_API_KEY", whichever you use.
query_params = { api-version = "2025-04-01-preview" }
```
## model_provider

View File

@@ -0,0 +1,45 @@
# Codex configuration template
# See https://github.com/openai/codex/blob/main/codex-rs/config.md for details.
# All values below represent defaults. Uncomment to override them.
# model = "codex-mini-latest"
# model_provider = "openai"
# approval_policy = "unless-allow-listed"
# disable_response_storage = false
# project_doc_max_bytes = 32768
# file_opener = "vscode"
# hide_agent_reasoning = false
# model_reasoning_effort = "medium"
# model_reasoning_summary = "auto"
[shell_environment_policy]
# inherit = "core"
# ignore_default_excludes = false
# exclude = []
# set = {}
# include_only = []
[sandbox]
# mode = "read-only"
# writable_roots = []
# network_access = false
[history]
# persistence = "save-all"
[tui]
# disable_mouse_capture = false
# Example provider override
#[model_providers.openai]
# name = "OpenAI"
# base_url = "https://api.openai.com/v1"
# env_key = "OPENAI_API_KEY"
# wire_api = "chat"
# Example profile
#[profiles.example]
# model = "o3"
# model_provider = "openai"
# approval_policy = "never"

View File

@@ -114,8 +114,7 @@ pub(crate) async fn stream_chat_completions(
"tools": tools_json,
});
let base_url = provider.base_url.trim_end_matches('/');
let url = format!("{}/chat/completions", base_url);
let url = provider.get_full_url();
debug!(
"POST to {url}: {}",

View File

@@ -123,9 +123,7 @@ impl ModelClient {
stream: true,
};
let base_url = self.provider.base_url.clone();
let base_url = base_url.trim_end_matches('/');
let url = format!("{}/responses", base_url);
let url = self.provider.get_full_url();
trace!("POST to {url}: {}", serde_json::to_string(&payload)?);
let mut attempt = 0;

View File

@@ -20,6 +20,8 @@ use std::path::Path;
use std::path::PathBuf;
use toml::Value as TomlValue;
const DEFAULT_CONFIG_TEMPLATE: &str = include_str!("../config_template.toml");
/// Maximum number of bytes of the documentation that will be embedded. Larger
/// files are *silently truncated* to this size so we do not take up too much of
/// the context window.
@@ -179,7 +181,8 @@ fn load_config_as_toml(codex_home: &Path) -> std::io::Result<TomlValue> {
}
},
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
tracing::info!("config.toml not found, using defaults");
tracing::info!("config.toml not found, writing template");
write_default_config_template(&config_path);
Ok(TomlValue::Table(Default::default()))
}
Err(e) => {
@@ -189,6 +192,19 @@ fn load_config_as_toml(codex_home: &Path) -> std::io::Result<TomlValue> {
}
}
fn write_default_config_template(config_path: &Path) {
if let Some(parent) = config_path.parent() {
if let Err(e) = std::fs::create_dir_all(parent) {
tracing::error!("Failed to create config dir: {e}");
return;
}
}
match std::fs::write(config_path, DEFAULT_CONFIG_TEMPLATE) {
Ok(_) => tracing::info!("wrote default config template at {}", config_path.display()),
Err(e) => tracing::error!("Failed to write default config template: {e}"),
}
}
/// Apply a single dotted-path override onto a TOML value.
fn apply_toml_override(root: &mut TomlValue, path: &str, value: TomlValue) {
use toml::value::Table;
@@ -658,6 +674,7 @@ disable_response_storage = true
env_key: Some("OPENAI_API_KEY".to_string()),
wire_api: crate::WireApi::Chat,
env_key_instructions: None,
query_params: None,
};
let model_provider_map = {
let mut model_provider_map = built_in_model_providers();

View File

@@ -23,9 +23,10 @@ use crate::openai_api_key::get_openai_api_key;
#[serde(rename_all = "lowercase")]
pub enum WireApi {
/// The experimental “Responses” API exposed by OpenAI at `/v1/responses`.
#[default]
Responses,
/// Regular Chat Completions compatible with `/v1/chat/completions`.
#[default]
Chat,
}
@@ -44,7 +45,32 @@ pub struct ModelProviderInfo {
pub env_key_instructions: Option<String>,
/// Which wire protocol this provider expects.
#[serde(default)]
pub wire_api: WireApi,
/// Optional query parameters to append to the base URL.
pub query_params: Option<HashMap<String, String>>,
}
impl ModelProviderInfo {
pub(crate) fn get_full_url(&self) -> String {
let query_string = self
.query_params
.as_ref()
.map_or_else(String::new, |params| {
let full_params = params
.iter()
.map(|(k, v)| format!("{k}={v}"))
.collect::<Vec<_>>()
.join("&");
format!("?{full_params}")
});
let base_url = &self.base_url;
match self.wire_api {
WireApi::Responses => format!("{base_url}/responses{query_string}"),
WireApi::Chat => format!("{base_url}/chat/completions{query_string}"),
}
}
}
impl ModelProviderInfo {
@@ -96,6 +122,7 @@ pub fn built_in_model_providers() -> HashMap<String, ModelProviderInfo> {
env_key: Some("OPENAI_API_KEY".into()),
env_key_instructions: Some("Create an API key (https://platform.openai.com) and export it as an environment variable.".into()),
wire_api: WireApi::Responses,
query_params: None,
},
),
]
@@ -103,3 +130,51 @@ pub fn built_in_model_providers() -> HashMap<String, ModelProviderInfo> {
.map(|(k, v)| (k.to_string(), v))
.collect()
}
#[cfg(test)]
mod tests {
#![allow(clippy::unwrap_used)]
use super::*;
#[test]
fn test_deserialize_ollama_model_provider_toml() {
let azure_provider_toml = r#"
name = "Ollama"
base_url = "http://localhost:11434/v1"
"#;
let expected_provider = ModelProviderInfo {
name: "Ollama".into(),
base_url: "http://localhost:11434/v1".into(),
env_key: None,
env_key_instructions: None,
wire_api: WireApi::Chat,
query_params: None,
};
let provider: ModelProviderInfo = toml::from_str(azure_provider_toml).unwrap();
assert_eq!(expected_provider, provider);
}
#[test]
fn test_deserialize_azure_model_provider_toml() {
let azure_provider_toml = r#"
name = "Azure"
base_url = "https://xxxxx.openai.azure.com/openai"
env_key = "AZURE_OPENAI_API_KEY"
query_params = { api-version = "2025-04-01-preview" }
"#;
let expected_provider = ModelProviderInfo {
name: "Azure".into(),
base_url: "https://xxxxx.openai.azure.com/openai".into(),
env_key: Some("AZURE_OPENAI_API_KEY".into()),
env_key_instructions: None,
wire_api: WireApi::Chat,
query_params: Some(maplit::hashmap! {
"api-version".to_string() => "2025-04-01-preview".to_string(),
}),
};
let provider: ModelProviderInfo = toml::from_str(azure_provider_toml).unwrap();
assert_eq!(expected_provider, provider);
}
}

View File

@@ -107,6 +107,7 @@ async fn keeps_previous_response_id_between_tasks() {
env_key: Some("PATH".into()),
env_key_instructions: None,
wire_api: codex_core::WireApi::Responses,
query_params: None,
};
// Init session

View File

@@ -96,6 +96,7 @@ async fn retries_on_early_close() {
env_key: Some("PATH".into()),
env_key_instructions: None,
wire_api: codex_core::WireApi::Responses,
query_params: None,
};
let ctrl_c = std::sync::Arc::new(tokio::sync::Notify::new());

View File

@@ -2,6 +2,13 @@
set -euo pipefail
# By default, this script uses a version based on the current date and time.
# If you want to specify a version, pass it as the first argument. Example:
#
# ./scripts/create_github_release.sh 0.1.0-alpha.4
#
# The value will be used to update the `version` field in `Cargo.toml`.
# Change to the root of the Cargo workspace.
cd "$(dirname "${BASH_SOURCE[0]}")/.."
@@ -15,7 +22,11 @@ fi
CURRENT_BRANCH=$(git symbolic-ref --short -q HEAD)
# Create a new branch for the release and make a commit with the new version.
VERSION=$(printf '0.0.%d' "$(date +%y%m%d%H%M)")
if [ $# -ge 1 ]; then
VERSION="$1"
else
VERSION=$(printf '0.0.%d' "$(date +%y%m%d%H%M)")
fi
TAG="rust-v$VERSION"
git checkout -b "$TAG"
perl -i -pe "s/^version = \".*\"/version = \"$VERSION\"/" Cargo.toml
@@ -23,4 +34,5 @@ git add Cargo.toml
git commit -m "Release $VERSION"
git tag -a "$TAG" -m "Release $VERSION"
git push origin "refs/tags/$TAG"
git checkout "$CURRENT_BRANCH"