[5/8] Rename Python SDK package to openai-codex (#21905)

## Why

The SDK should publish under the reserved public distribution name
`openai-codex`, and its import module should match that name in the
Python style. Since package names can contain hyphens but import modules
cannot, the public import path becomes `openai_codex`.

Keeping the rename separate from the public API surface change makes the
naming change easy to review and avoids mixing it with API curation.

## What

- Rename the SDK distribution from `openai-codex-app-server-sdk` to
`openai-codex`.
- Rename the import package from `codex_app_server` to `openai_codex`.
- Keep the runtime wheel as the separate `openai-codex-cli-bin`
dependency.
- Update docs, examples, notebooks, artifact scripts, lockfile metadata,
and tests for the new distribution/module names.

## Stack

1. #21891 `[1/8]` Pin Python SDK runtime dependency
2. #21893 `[2/8]` Generate Python SDK types from pinned runtime
3. #21895 `[3/8]` Run Python SDK tests in CI
4. #21896 `[4/8]` Define Python SDK public API surface
5. This PR `[5/8]` Rename Python SDK package to `openai-codex`
6. #21910 `[6/8]` Add high-level Python SDK approval mode
7. #22014 `[7/8]` Add Python SDK app-server integration harness
8. #22021 `[8/8]` Add Python SDK Ruff formatting

## Verification

- Updated package metadata and public API tests to assert the
distribution and import names.

Co-authored-by: Codex <noreply@openai.com>
This commit is contained in:
Ahmed Ibrahim
2026-05-12 00:59:25 +03:00
committed by GitHub
parent b4bc02439f
commit f1b84fac63
63 changed files with 152 additions and 152 deletions

View File

@@ -12,5 +12,5 @@ if src_str in sys.path:
sys.path.insert(0, src_str)
for module_name in list(sys.modules):
if module_name == "codex_app_server" or module_name.startswith("codex_app_server."):
if module_name == "openai_codex" or module_name.startswith("openai_codex."):
sys.modules.pop(module_name)

View File

@@ -172,12 +172,12 @@ def test_examples_readme_points_to_runtime_version_source_of_truth() -> None:
def test_runtime_distribution_name_is_consistent() -> None:
script = _load_update_script_module()
runtime_setup = _load_runtime_setup_module()
from codex_app_server import client as client_module
from codex_app_server import _version
from openai_codex import client as client_module
from openai_codex import _version
assert script.SDK_DISTRIBUTION_NAME == "openai-codex-app-server-sdk"
assert runtime_setup.SDK_PACKAGE_NAME == "openai-codex-app-server-sdk"
assert _version.DISTRIBUTION_NAME == "openai-codex-app-server-sdk"
assert script.SDK_DISTRIBUTION_NAME == "openai-codex"
assert runtime_setup.SDK_PACKAGE_NAME == "openai-codex"
assert _version.DISTRIBUTION_NAME == "openai-codex"
assert script.RUNTIME_DISTRIBUTION_NAME == "openai-codex-cli-bin"
assert runtime_setup.PACKAGE_NAME == "openai-codex-cli-bin"
assert client_module.RUNTIME_PKG_NAME == "openai-codex-cli-bin"
@@ -457,18 +457,18 @@ def test_stage_sdk_release_injects_exact_runtime_pin(tmp_path: Path) -> None:
)
pyproject = (staged / "pyproject.toml").read_text()
assert 'name = "openai-codex-app-server-sdk"' in pyproject
assert 'name = "openai-codex"' in pyproject
assert 'version = "0.116.0a1"' in pyproject
assert '"openai-codex-cli-bin==0.116.0a1"' in pyproject
assert (
'__version__ = "0.116.0a1"'
not in (staged / "src" / "codex_app_server" / "__init__.py").read_text()
not in (staged / "src" / "openai_codex" / "__init__.py").read_text()
)
assert (
'client_version: str = "0.116.0a1"'
not in (staged / "src" / "codex_app_server" / "client.py").read_text()
not in (staged / "src" / "openai_codex" / "client.py").read_text()
)
assert not any((staged / "src" / "codex_app_server").glob("bin/**"))
assert not any((staged / "src" / "openai_codex").glob("bin/**"))
def test_stage_sdk_release_replaces_existing_staging_dir(tmp_path: Path) -> None:
@@ -637,7 +637,7 @@ def test_stage_runtime_stages_binary_without_type_generation(tmp_path: Path) ->
def test_default_runtime_is_resolved_from_installed_runtime_package(
tmp_path: Path,
) -> None:
from codex_app_server import client as client_module
from openai_codex import client as client_module
fake_binary = tmp_path / ("codex.exe" if client_module.os.name == "nt" else "codex")
fake_binary.write_text("")
@@ -652,7 +652,7 @@ def test_default_runtime_is_resolved_from_installed_runtime_package(
def test_explicit_codex_bin_override_takes_priority(tmp_path: Path) -> None:
from codex_app_server import client as client_module
from openai_codex import client as client_module
explicit_binary = tmp_path / (
"custom-codex.exe" if client_module.os.name == "nt" else "custom-codex"
@@ -670,7 +670,7 @@ def test_explicit_codex_bin_override_takes_priority(tmp_path: Path) -> None:
def test_missing_runtime_package_requires_explicit_codex_bin() -> None:
from codex_app_server import client as client_module
from openai_codex import client as client_module
ops = client_module.CodexBinResolverOps(
installed_codex_path=lambda: (_ for _ in ()).throw(
@@ -684,7 +684,7 @@ def test_missing_runtime_package_requires_explicit_codex_bin() -> None:
def test_broken_runtime_package_does_not_fall_back() -> None:
from codex_app_server import client as client_module
from openai_codex import client as client_module
ops = client_module.CodexBinResolverOps(
installed_codex_path=lambda: (_ for _ in ()).throw(

View File

@@ -4,12 +4,12 @@ import asyncio
import time
from types import SimpleNamespace
from codex_app_server.async_client import AsyncAppServerClient
from codex_app_server.generated.v2_all import (
from openai_codex.async_client import AsyncAppServerClient
from openai_codex.generated.v2_all import (
AgentMessageDeltaNotification,
TurnCompletedNotification,
)
from codex_app_server.models import Notification, UnknownNotification
from openai_codex.models import Notification, UnknownNotification
def test_async_client_allows_concurrent_transport_calls() -> None:

View File

@@ -3,9 +3,9 @@ from __future__ import annotations
from pathlib import Path
from typing import Any
from codex_app_server.client import AppServerClient, _params_dict
from codex_app_server.generated.notification_registry import notification_turn_id
from codex_app_server.generated.v2_all import (
from openai_codex.client import AppServerClient, _params_dict
from openai_codex.generated.notification_registry import notification_turn_id
from openai_codex.generated.v2_all import (
AgentMessageDeltaNotification,
ApprovalsReviewer,
ThreadListParams,
@@ -14,7 +14,7 @@ from codex_app_server.generated.v2_all import (
TurnCompletedNotification,
WarningNotification,
)
from codex_app_server.models import Notification, UnknownNotification
from openai_codex.models import Notification, UnknownNotification
ROOT = Path(__file__).resolve().parents[1]
@@ -45,7 +45,7 @@ def test_generated_params_models_are_snake_case_and_dump_by_alias() -> None:
def test_generated_v2_bundle_has_single_shared_plan_type_definition() -> None:
source = (ROOT / "src" / "codex_app_server" / "generated" / "v2_all.py").read_text()
source = (ROOT / "src" / "openai_codex" / "generated" / "v2_all.py").read_text()
assert source.count("class PlanType(") == 1

View File

@@ -8,9 +8,9 @@ from pathlib import Path
ROOT = Path(__file__).resolve().parents[1]
GENERATED_TARGETS = [
Path("src/codex_app_server/generated/notification_registry.py"),
Path("src/codex_app_server/generated/v2_all.py"),
Path("src/codex_app_server/api.py"),
Path("src/openai_codex/generated/notification_registry.py"),
Path("src/openai_codex/generated/v2_all.py"),
Path("src/openai_codex/api.py"),
]

View File

@@ -7,9 +7,9 @@ from types import SimpleNamespace
import pytest
import codex_app_server.api as public_api_module
from codex_app_server.client import AppServerClient
from codex_app_server.generated.v2_all import (
import openai_codex.api as public_api_module
from openai_codex.client import AppServerClient
from openai_codex.generated.v2_all import (
AgentMessageDeltaNotification,
ItemCompletedNotification,
MessagePhase,
@@ -17,8 +17,8 @@ from codex_app_server.generated.v2_all import (
TurnCompletedNotification,
TurnStatus,
)
from codex_app_server.models import InitializeResponse, Notification
from codex_app_server.api import (
from openai_codex.models import InitializeResponse, Notification
from openai_codex.api import (
AsyncCodex,
AsyncThread,
AsyncTurnHandle,

View File

@@ -6,9 +6,9 @@ import tomllib
from pathlib import Path
from typing import Any
import codex_app_server
import codex_app_server.types as public_types
from codex_app_server import (
import openai_codex
import openai_codex.types as public_types
from openai_codex import (
AppServerConfig,
AsyncCodex,
AsyncThread,
@@ -16,7 +16,7 @@ from codex_app_server import (
RunResult,
Thread,
)
from codex_app_server.types import InitializeResponse
from openai_codex.types import InitializeResponse
EXPECTED_ROOT_EXPORTS = [
"__version__",
@@ -122,32 +122,32 @@ def test_package_and_default_client_versions_follow_project_version() -> None:
pyproject_path = Path(__file__).resolve().parents[1] / "pyproject.toml"
pyproject = tomllib.loads(pyproject_path.read_text())
assert codex_app_server.__version__ == pyproject["project"]["version"]
assert AppServerConfig().client_version == codex_app_server.__version__
assert openai_codex.__version__ == pyproject["project"]["version"]
assert AppServerConfig().client_version == openai_codex.__version__
def test_package_includes_py_typed_marker() -> None:
"""The wheel should advertise that inline type information is available."""
marker = resources.files("codex_app_server").joinpath("py.typed")
marker = resources.files("openai_codex").joinpath("py.typed")
assert marker.is_file()
def test_package_root_exports_only_public_api() -> None:
"""The package root should expose the supported SDK surface, not internals."""
assert codex_app_server.__all__ == EXPECTED_ROOT_EXPORTS
assert openai_codex.__all__ == EXPECTED_ROOT_EXPORTS
assert {
name: hasattr(codex_app_server, name) for name in EXPECTED_ROOT_EXPORTS
name: hasattr(openai_codex, name) for name in EXPECTED_ROOT_EXPORTS
} == {name: True for name in EXPECTED_ROOT_EXPORTS}
assert {
"AppServerClient": hasattr(codex_app_server, "AppServerClient"),
"AsyncAppServerClient": hasattr(codex_app_server, "AsyncAppServerClient"),
"InitializeResponse": hasattr(codex_app_server, "InitializeResponse"),
"ThreadStartParams": hasattr(codex_app_server, "ThreadStartParams"),
"TurnStartParams": hasattr(codex_app_server, "TurnStartParams"),
"AppServerClient": hasattr(openai_codex, "AppServerClient"),
"AsyncAppServerClient": hasattr(openai_codex, "AsyncAppServerClient"),
"InitializeResponse": hasattr(openai_codex, "InitializeResponse"),
"ThreadStartParams": hasattr(openai_codex, "ThreadStartParams"),
"TurnStartParams": hasattr(openai_codex, "TurnStartParams"),
"TurnCompletedNotification": hasattr(
codex_app_server, "TurnCompletedNotification"
openai_codex, "TurnCompletedNotification"
),
"TurnStatus": hasattr(codex_app_server, "TurnStatus"),
"TurnStatus": hasattr(openai_codex, "TurnStatus"),
} == {
"AppServerClient": False,
"AsyncAppServerClient": False,
@@ -162,7 +162,7 @@ def test_package_root_exports_only_public_api() -> None:
def test_package_star_import_matches_public_api() -> None:
"""Star imports should follow the same explicit public API list."""
namespace: dict[str, object] = {}
exec("from codex_app_server import *", namespace)
exec("from openai_codex import *", namespace)
exported = set(namespace) - {"__builtins__"}
assert exported == set(EXPECTED_ROOT_EXPORTS)
@@ -179,7 +179,7 @@ def test_types_module_exports_curated_public_types() -> None:
def test_types_star_import_matches_public_types() -> None:
"""Star imports from the type module should match its explicit export list."""
namespace: dict[str, object] = {}
exec("from codex_app_server.types import *", namespace)
exec("from openai_codex.types import *", namespace)
exported = set(namespace) - {"__builtins__"}
assert exported == set(EXPECTED_TYPES_EXPORTS)
@@ -189,11 +189,11 @@ def test_examples_use_public_import_surfaces() -> None:
"""Examples should teach users the public root and type-module imports only."""
examples_root = Path(__file__).resolve().parents[1] / "examples"
private_import_markers = [
"codex_app_server.api",
"codex_app_server.client",
"codex_app_server.generated",
"codex_app_server.models",
"codex_app_server.retry",
"openai_codex.api",
"openai_codex.client",
"openai_codex.generated",
"openai_codex.models",
"openai_codex.retry",
]
offenders = {

View File

@@ -205,7 +205,7 @@ def test_real_initialize_and_model_list(runtime_env: PreparedRuntimeEnv) -> None
textwrap.dedent(
"""
import json
from codex_app_server import Codex
from openai_codex import Codex
with Codex() as codex:
models = codex.models(include_hidden=True)
@@ -234,7 +234,7 @@ def test_real_thread_and_turn_start_smoke(runtime_env: PreparedRuntimeEnv) -> No
textwrap.dedent(
"""
import json
from codex_app_server import Codex, TextInput
from openai_codex import Codex, TextInput
with Codex() as codex:
thread = codex.thread_start(
@@ -271,7 +271,7 @@ def test_real_thread_run_convenience_smoke(runtime_env: PreparedRuntimeEnv) -> N
textwrap.dedent(
"""
import json
from codex_app_server import Codex
from openai_codex import Codex
with Codex() as codex:
thread = codex.thread_start(
@@ -304,7 +304,7 @@ def test_real_async_thread_turn_usage_and_ids_smoke(
"""
import asyncio
import json
from codex_app_server import AsyncCodex, TextInput
from openai_codex import AsyncCodex, TextInput
async def main():
async with AsyncCodex() as codex:
@@ -347,7 +347,7 @@ def test_real_async_thread_run_convenience_smoke(
"""
import asyncio
import json
from codex_app_server import AsyncCodex
from openai_codex import AsyncCodex
async def main():
async with AsyncCodex() as codex:
@@ -436,7 +436,7 @@ def test_real_streaming_smoke_turn_completed(runtime_env: PreparedRuntimeEnv) ->
textwrap.dedent(
"""
import json
from codex_app_server import Codex, TextInput
from openai_codex import Codex, TextInput
with Codex() as codex:
thread = codex.thread_start(
@@ -469,7 +469,7 @@ def test_real_turn_interrupt_smoke(runtime_env: PreparedRuntimeEnv) -> None:
textwrap.dedent(
"""
import json
from codex_app_server import Codex, TextInput
from openai_codex import Codex, TextInput
with Codex() as codex:
thread = codex.thread_start(