Compare commits

...

1 Commits

Author SHA1 Message Date
Michael Bolin
6aa4aed202 sdk/python: use standalone codex-app-server runtime 2026-04-24 16:19:11 -07:00
14 changed files with 521 additions and 206 deletions

View File

@@ -1,9 +1,9 @@
# Codex CLI Runtime for Python SDK
# Codex App Server Runtime for Python SDK
Platform-specific runtime package consumed by the published `codex-app-server-sdk`.
This package is staged during release so the SDK can pin an exact Codex CLI
This package is staged during release so the SDK can pin an exact Codex app-server
version without checking platform binaries into the repo.
`openai-codex-cli-bin` is intentionally wheel-only. Do not build or publish an
`openai-codex-app-server-bin` is intentionally wheel-only. Do not build or publish an
sdist for this package.

View File

@@ -16,12 +16,14 @@ class RuntimeBuildHook(BuildHookInterface):
del version
if self.target_name == "sdist":
raise RuntimeError(
"openai-codex-cli-bin is wheel-only; build and publish platform wheels only."
"openai-codex-app-server-bin is wheel-only; build and publish platform wheels only."
)
platform_tag = self.config.get("platform-tag") or os.environ.get(
"CODEX_CLI_BIN_PLATFORM_TAG"
"CODEX_APP_SERVER_BIN_PLATFORM_TAG"
)
if not isinstance(platform_tag, str) or not platform_tag:
platform_tag = os.environ.get("CODEX_CLI_BIN_PLATFORM_TAG")
if not isinstance(platform_tag, str) or not platform_tag:
platform_tag = _platform_tag()

View File

@@ -3,9 +3,9 @@ requires = ["hatchling>=1.24.0"]
build-backend = "hatchling.build"
[project]
name = "openai-codex-cli-bin"
name = "openai-codex-app-server-bin"
version = "0.0.0-dev"
description = "Pinned Codex CLI runtime for the Python SDK"
description = "Pinned Codex app-server runtime for the Python SDK"
readme = "README.md"
requires-python = ">=3.10"
license = { text = "Apache-2.0" }
@@ -35,8 +35,8 @@ exclude = [
]
[tool.hatch.build.targets.wheel]
packages = ["src/codex_cli_bin"]
include = ["src/codex_cli_bin/bin/**"]
packages = ["src/codex_app_server_bin", "src/codex_cli_bin"]
include = ["src/codex_app_server_bin/bin/**"]
[tool.hatch.build.targets.wheel.hooks.custom]

View File

@@ -0,0 +1,31 @@
from __future__ import annotations
import os
from pathlib import Path
PACKAGE_NAME = "openai-codex-app-server-bin"
def bundled_app_server_path() -> Path:
package_root = Path(__file__).resolve().parent
for exe in _candidate_binary_names():
path = package_root / "bin" / exe
if path.is_file():
return path
candidate_list = ", ".join(
str(package_root / "bin" / exe) for exe in _candidate_binary_names()
)
raise FileNotFoundError(
f"{PACKAGE_NAME} is installed but missing its packaged app-server binary. "
f"Checked: {candidate_list}"
)
def _candidate_binary_names() -> tuple[str, str]:
if os.name == "nt":
return ("codex-app-server.exe", "codex.exe")
return ("codex-app-server", "codex")
__all__ = ["PACKAGE_NAME", "bundled_app_server_path"]

View File

@@ -1,19 +1,13 @@
from __future__ import annotations
import os
from pathlib import Path
PACKAGE_NAME = "openai-codex-cli-bin"
from codex_app_server_bin import PACKAGE_NAME
from codex_app_server_bin import bundled_app_server_path
def bundled_codex_path() -> Path:
exe = "codex.exe" if os.name == "nt" else "codex"
path = Path(__file__).resolve().parent / "bin" / exe
if not path.is_file():
raise FileNotFoundError(
f"{PACKAGE_NAME} is installed but missing its packaged codex binary at {path}"
)
return path
return bundled_app_server_path()
__all__ = ["PACKAGE_NAME", "bundled_codex_path"]
__all__ = ["PACKAGE_NAME", "bundled_app_server_path", "bundled_codex_path"]

View File

@@ -7,6 +7,6 @@ exclude-newer = "2026-04-16T16:29:01.518541933Z"
exclude-newer-span = "P7D"
[[package]]
name = "openai-codex-cli-bin"
name = "openai-codex-app-server-bin"
version = "0.0.0.dev0"
source = { editable = "." }

View File

@@ -12,8 +12,8 @@ uv sync
source .venv/bin/activate
```
Published SDK builds pin an exact `openai-codex-cli-bin` runtime dependency. For local
repo development, either pass `AppServerConfig(codex_bin=...)` to point at a
Published SDK builds pin an exact `openai-codex-app-server-bin` runtime dependency. For local
repo development, either pass `AppServerConfig(app_server_bin=...)` to point at a
local build explicitly, or use the repo examples/notebook bootstrap which
installs the pinned runtime package automatically.
@@ -54,13 +54,13 @@ python examples/01_quickstart_constructor/async.py
The repo no longer checks `codex` binaries into `sdk/python`.
Published SDK builds are pinned to an exact `openai-codex-cli-bin` package version,
Published SDK builds are pinned to an exact `openai-codex-app-server-bin` package version,
and that runtime package carries the platform-specific binary for the target
wheel.
For local repo development, the checked-in `sdk/python-runtime` package is only
a template for staged release artifacts. Editable installs should use an
explicit `codex_bin` override for manual SDK usage; the repo examples and
explicit `app_server_bin` override for manual SDK usage; the repo examples and
notebook bootstrap the pinned runtime package automatically.
## Maintainer workflow
@@ -74,26 +74,26 @@ python scripts/update_sdk_artifacts.py \
--runtime-version 1.2.3
python scripts/update_sdk_artifacts.py \
stage-runtime \
/tmp/codex-python-release/openai-codex-cli-bin \
/path/to/codex \
/tmp/codex-python-release/openai-codex-app-server-bin \
/path/to/codex-app-server \
--runtime-version 1.2.3
```
This supports the CI release flow:
- run `generate-types` before packaging
- stage `codex-app-server-sdk` once with an exact `openai-codex-cli-bin==...` dependency
- stage `openai-codex-cli-bin` on each supported platform runner with the same pinned runtime version
- build and publish `openai-codex-cli-bin` as platform wheels only; do not publish an sdist
- stage `codex-app-server-sdk` once with an exact `openai-codex-app-server-bin==...` dependency
- stage `openai-codex-app-server-bin` on each supported platform runner with the same pinned runtime version
- build and publish `openai-codex-app-server-bin` as platform wheels only; do not publish an sdist
## Compatibility and versioning
- Package: `codex-app-server-sdk`
- Runtime package: `openai-codex-cli-bin`
- Runtime package: `openai-codex-app-server-bin`
- Current SDK version in this repo: `0.2.0`
- Python: `>=3.10`
- Target protocol: Codex `app-server` JSON-RPC v2
- Recommendation: keep SDK and `codex` CLI reasonably up to date together
- Recommendation: keep SDK and `codex-app-server` runtime reasonably up to date together
## Notes

View File

@@ -15,7 +15,7 @@ import urllib.request
import zipfile
from pathlib import Path
PACKAGE_NAME = "openai-codex-cli-bin"
PACKAGE_NAME = "openai-codex-app-server-bin"
PINNED_RUNTIME_VERSION = "0.116.0-alpha.1"
REPO_SLUG = "openai/codex"
@@ -39,7 +39,10 @@ def ensure_runtime_package_installed(
installed_version = _installed_runtime_version(python_executable)
normalized_requested = _normalized_package_version(requested_version)
if installed_version is not None and _normalized_package_version(installed_version) == normalized_requested:
if (
installed_version is not None
and _normalized_package_version(installed_version) == normalized_requested
):
return requested_version
with tempfile.TemporaryDirectory(prefix="codex-python-runtime-") as temp_root_str:
@@ -61,7 +64,10 @@ def ensure_runtime_package_installed(
importlib.invalidate_caches()
installed_version = _installed_runtime_version(python_executable)
if installed_version is None or _normalized_package_version(installed_version) != normalized_requested:
if (
installed_version is None
or _normalized_package_version(installed_version) != normalized_requested
):
raise RuntimeSetupError(
f"Expected {PACKAGE_NAME} {requested_version} in {python_executable}, "
f"but found {installed_version!r} after installation."
@@ -70,24 +76,46 @@ def ensure_runtime_package_installed(
def platform_asset_name() -> str:
return platform_asset_names()[0]
def platform_asset_names() -> tuple[str, str]:
system = platform.system().lower()
machine = platform.machine().lower()
if system == "darwin":
if machine in {"arm64", "aarch64"}:
return "codex-aarch64-apple-darwin.tar.gz"
return (
"codex-app-server-aarch64-apple-darwin.tar.gz",
"codex-aarch64-apple-darwin.tar.gz",
)
if machine in {"x86_64", "amd64"}:
return "codex-x86_64-apple-darwin.tar.gz"
return (
"codex-app-server-x86_64-apple-darwin.tar.gz",
"codex-x86_64-apple-darwin.tar.gz",
)
elif system == "linux":
if machine in {"aarch64", "arm64"}:
return "codex-aarch64-unknown-linux-musl.tar.gz"
return (
"codex-app-server-aarch64-unknown-linux-musl.tar.gz",
"codex-aarch64-unknown-linux-musl.tar.gz",
)
if machine in {"x86_64", "amd64"}:
return "codex-x86_64-unknown-linux-musl.tar.gz"
return (
"codex-app-server-x86_64-unknown-linux-musl.tar.gz",
"codex-x86_64-unknown-linux-musl.tar.gz",
)
elif system == "windows":
if machine in {"aarch64", "arm64"}:
return "codex-aarch64-pc-windows-msvc.exe.zip"
return (
"codex-app-server-aarch64-pc-windows-msvc.exe.zip",
"codex-aarch64-pc-windows-msvc.exe.zip",
)
if machine in {"x86_64", "amd64"}:
return "codex-x86_64-pc-windows-msvc.exe.zip"
return (
"codex-app-server-x86_64-pc-windows-msvc.exe.zip",
"codex-x86_64-pc-windows-msvc.exe.zip",
)
raise RuntimeSetupError(
f"Unsupported runtime artifact platform: system={platform.system()!r}, "
@@ -96,6 +124,18 @@ def platform_asset_name() -> str:
def runtime_binary_name() -> str:
return (
"codex-app-server.exe"
if platform.system().lower() == "windows"
else "codex-app-server"
)
def runtime_binary_names() -> tuple[str, str]:
return (runtime_binary_name(), legacy_runtime_binary_name())
def legacy_runtime_binary_name() -> str:
return "codex.exe" if platform.system().lower() == "windows" else "codex"
@@ -103,8 +143,8 @@ def _installed_runtime_version(python_executable: str | Path) -> str | None:
snippet = (
"import importlib.metadata, json, sys\n"
"try:\n"
" from codex_cli_bin import bundled_codex_path\n"
" bundled_codex_path()\n"
" from codex_app_server_bin import bundled_app_server_path\n"
" bundled_app_server_path()\n"
f" print(json.dumps({{'version': importlib.metadata.version({PACKAGE_NAME!r})}}))\n"
"except Exception:\n"
" sys.exit(1)\n"
@@ -152,88 +192,115 @@ def _release_metadata(version: str) -> dict[str, object]:
def _download_release_archive(version: str, temp_root: Path) -> Path:
asset_name = platform_asset_name()
archive_path = temp_root / asset_name
browser_download_url = (
f"https://github.com/{REPO_SLUG}/releases/download/rust-v{version}/{asset_name}"
)
request = urllib.request.Request(
browser_download_url,
headers={"User-Agent": "codex-python-runtime-setup"},
)
try:
with urllib.request.urlopen(request) as response, archive_path.open("wb") as fh:
shutil.copyfileobj(response, fh)
return archive_path
except urllib.error.HTTPError:
pass
asset_names = platform_asset_names()
for asset_name in asset_names:
archive_path = temp_root / asset_name
browser_download_url = f"https://github.com/{REPO_SLUG}/releases/download/rust-v{version}/{asset_name}"
request = urllib.request.Request(
browser_download_url,
headers={"User-Agent": "codex-python-runtime-setup"},
)
try:
with (
urllib.request.urlopen(request) as response,
archive_path.open("wb") as fh,
):
shutil.copyfileobj(response, fh)
return archive_path
except urllib.error.HTTPError:
continue
metadata = _release_metadata(version)
assets = metadata.get("assets")
if not isinstance(assets, list):
raise RuntimeSetupError(f"Release rust-v{version} returned malformed assets metadata.")
asset = next(
(
item
for item in assets
if isinstance(item, dict) and item.get("name") == asset_name
),
None,
)
if asset is None:
raise RuntimeSetupError(
f"Release rust-v{version} does not contain asset {asset_name} for this platform."
f"Release rust-v{version} returned malformed assets metadata."
)
api_url = asset.get("url")
if not isinstance(api_url, str):
api_url = None
matched_assets = [
item
for item in assets
if isinstance(item, dict) and item.get("name") in asset_names
]
if not matched_assets:
supported_assets = ", ".join(asset_names)
raise RuntimeSetupError(
f"Release rust-v{version} does not contain a supported runtime asset for this platform. "
f"Tried: {supported_assets}."
)
if api_url is not None:
token = _github_token()
if token is not None:
request = urllib.request.Request(
api_url,
headers=_github_api_headers("application/octet-stream"),
)
try:
with urllib.request.urlopen(request) as response, archive_path.open("wb") as fh:
shutil.copyfileobj(response, fh)
return archive_path
except urllib.error.HTTPError:
pass
for asset_name in asset_names:
asset = next(
(
item
for item in matched_assets
if isinstance(item, dict) and item.get("name") == asset_name
),
None,
)
if asset is None:
continue
archive_path = temp_root / asset_name
api_url = asset.get("url")
if not isinstance(api_url, str):
api_url = None
if api_url is not None:
token = _github_token()
if token is not None:
request = urllib.request.Request(
api_url,
headers=_github_api_headers("application/octet-stream"),
)
try:
with (
urllib.request.urlopen(request) as response,
archive_path.open("wb") as fh,
):
shutil.copyfileobj(response, fh)
return archive_path
except urllib.error.HTTPError:
pass
if shutil.which("gh") is None:
supported_assets = ", ".join(asset_names)
raise RuntimeSetupError(
f"Unable to download {asset_name} for rust-v{version}. "
f"Unable to download a supported runtime asset ({supported_assets}) for rust-v{version}. "
"Provide GH_TOKEN/GITHUB_TOKEN or install/authenticate GitHub CLI."
)
try:
subprocess.run(
[
"gh",
"release",
"download",
f"rust-v{version}",
"--repo",
REPO_SLUG,
"--pattern",
asset_name,
"--dir",
str(temp_root),
],
check=True,
text=True,
capture_output=True,
)
except subprocess.CalledProcessError as exc:
raise RuntimeSetupError(
f"gh release download failed for rust-v{version} asset {asset_name}.\n"
f"STDOUT:\n{exc.stdout}\nSTDERR:\n{exc.stderr}"
) from exc
return archive_path
last_error: subprocess.CalledProcessError | None = None
for asset_name in asset_names:
archive_path = temp_root / asset_name
try:
subprocess.run(
[
"gh",
"release",
"download",
f"rust-v{version}",
"--repo",
REPO_SLUG,
"--pattern",
asset_name,
"--dir",
str(temp_root),
],
check=True,
text=True,
capture_output=True,
)
return archive_path
except subprocess.CalledProcessError as exc:
last_error = exc
assert last_error is not None
supported_assets = ", ".join(asset_names)
raise RuntimeSetupError(
f"gh release download failed for rust-v{version} runtime assets ({supported_assets}).\n"
f"STDOUT:\n{last_error.stdout}\nSTDERR:\n{last_error.stderr}"
) from last_error
def _extract_runtime_binary(archive_path: Path, temp_root: Path) -> Path:
@@ -249,24 +316,33 @@ def _extract_runtime_binary(archive_path: Path, temp_root: Path) -> Path:
with zipfile.ZipFile(archive_path) as zip_file:
zip_file.extractall(extract_dir)
else:
raise RuntimeSetupError(f"Unsupported release archive format: {archive_path.name}")
raise RuntimeSetupError(
f"Unsupported release archive format: {archive_path.name}"
)
binary_name = runtime_binary_name()
archive_stem = archive_path.name.removesuffix(".tar.gz").removesuffix(".zip")
candidates = [
path
for path in extract_dir.rglob("*")
if path.is_file()
and (
path.name == binary_name
path.name in runtime_binary_names()
or path.name == archive_stem
or path.name.startswith("codex-")
)
]
if not candidates:
supported_binaries = ", ".join(runtime_binary_names())
raise RuntimeSetupError(
f"Failed to find {binary_name} in extracted runtime archive {archive_path.name}."
f"Failed to find one of {supported_binaries} in extracted runtime archive "
f"{archive_path.name}."
)
for binary_name in runtime_binary_names():
for candidate in candidates:
if candidate.name == binary_name:
return candidate
return candidates[0]
@@ -356,4 +432,5 @@ __all__ = [
"ensure_runtime_package_installed",
"pinned_runtime_version",
"platform_asset_name",
"platform_asset_names",
]

View File

@@ -54,13 +54,13 @@ This avoids duplicate ways to do the same operation and keeps behavior explicit.
Common causes:
- published runtime package (`openai-codex-cli-bin`) is not installed
- local `codex_bin` override points to a missing file
- published runtime package (`openai-codex-app-server-bin`) is not installed
- local `app_server_bin` override points to a missing file
- local auth/session is missing
- incompatible/old app-server
Maintainers stage releases by building the SDK once and the runtime once per
platform with the same pinned runtime version. Publish `openai-codex-cli-bin` as
platform with the same pinned runtime version. Publish `openai-codex-app-server-bin` as
platform wheels only; do not publish an sdist:
```bash
@@ -72,8 +72,8 @@ python scripts/update_sdk_artifacts.py \
--runtime-version 1.2.3
python scripts/update_sdk_artifacts.py \
stage-runtime \
/tmp/codex-python-release/openai-codex-cli-bin \
/path/to/codex \
/tmp/codex-python-release/openai-codex-app-server-bin \
/path/to/codex-app-server \
--runtime-version 1.2.3
```

View File

@@ -18,7 +18,7 @@ Requirements:
- Python `>=3.10`
- uv
- installed `openai-codex-cli-bin` runtime package, or an explicit `codex_bin` override
- installed `openai-codex-app-server-bin` runtime package, or an explicit `app_server_bin` override
- local Codex auth/session configured
## 2) Run your first turn (sync)
@@ -40,7 +40,7 @@ with Codex() as codex:
What happened:
- `Codex()` started and initialized `codex app-server`.
- `Codex()` started and initialized `codex-app-server`.
- `thread_start(...)` created a thread.
- `thread.run("...")` started a turn, consumed events until completion, and returned the final assistant response plus collected items and usage.
- `result.final_response` is `None` when no final-answer or phase-less assistant message item completes for the turn.

View File

@@ -21,12 +21,12 @@ source .venv/bin/activate
When running examples from this repo checkout, the SDK source uses the local
tree and does not bundle a runtime binary. The helper in `examples/_bootstrap.py`
uses the installed `openai-codex-cli-bin` runtime package.
uses the installed `openai-codex-app-server-bin` runtime package.
If the pinned `openai-codex-cli-bin` runtime is not already installed, the bootstrap
will download the matching GitHub release artifact, stage a temporary local
`openai-codex-cli-bin` package, install it into your active interpreter, and clean up
the temporary files afterward.
If the pinned `openai-codex-app-server-bin` runtime is not already installed, the
bootstrap will download the matching GitHub release artifact, stage a temporary
local `openai-codex-app-server-bin` package, install it into your active
interpreter, and clean up the temporary files afterward.
Current pinned runtime version: `0.116.0-alpha.1`
@@ -41,8 +41,8 @@ python examples/<example-folder>/async.py
The examples bootstrap local imports from `sdk/python/src` automatically, so no
SDK wheel install is required. You only need the Python dependencies for your
active interpreter and an installed `openai-codex-cli-bin` runtime package (either
already present or automatically provisioned by the bootstrap).
active interpreter and an installed `openai-codex-app-server-bin` runtime
package (either already present or automatically provisioned by the bootstrap).
## Recommended first run

View File

@@ -17,7 +17,8 @@ from dataclasses import dataclass
from pathlib import Path
from typing import Any, Callable, Sequence, get_args, get_origin
RUNTIME_DISTRIBUTION_NAME = "openai-codex-cli-bin"
RUNTIME_DISTRIBUTION_NAME = "openai-codex-app-server-bin"
RUNTIME_PACKAGE_DIR = "codex_app_server_bin"
def repo_root() -> Path:
@@ -52,11 +53,21 @@ def _is_windows() -> bool:
def runtime_binary_name() -> str:
return "codex-app-server.exe" if _is_windows() else "codex-app-server"
def legacy_runtime_binary_name() -> str:
return "codex.exe" if _is_windows() else "codex"
def staged_runtime_bin_path(root: Path) -> Path:
return root / "src" / "codex_cli_bin" / "bin" / runtime_binary_name()
def staged_runtime_bin_path(root: Path, binary_name: str | None = None) -> Path:
return (
root
/ "src"
/ RUNTIME_PACKAGE_DIR
/ "bin"
/ (binary_name or runtime_binary_name())
)
def run(cmd: list[str], cwd: Path) -> None:
@@ -178,7 +189,11 @@ def _rewrite_sdk_runtime_dependency(pyproject_text: str, runtime_version: str) -
)
raw_items = [item.strip() for item in match.group(1).split(",") if item.strip()]
raw_items = [item for item in raw_items if "codex-cli-bin" not in item]
raw_items = [
item
for item in raw_items
if "codex-cli-bin" not in item and "codex-app-server-bin" not in item
]
raw_items.append(f'"{RUNTIME_DISTRIBUTION_NAME}=={runtime_version}"')
replacement = "dependencies = [\n " + ",\n ".join(raw_items) + ",\n]"
return pyproject_text[: match.start()] + replacement + pyproject_text[match.end() :]
@@ -217,7 +232,7 @@ def stage_python_runtime_package(
pyproject_text = _rewrite_runtime_platform_tag(pyproject_text, platform_tag)
pyproject_path.write_text(pyproject_text)
out_bin = staged_runtime_bin_path(staging_dir)
out_bin = staged_runtime_bin_path(staging_dir, binary_path.name)
out_bin.parent.mkdir(parents=True, exist_ok=True)
shutil.copy2(binary_path, out_bin)
if not _is_windows():
@@ -995,7 +1010,7 @@ def build_parser() -> argparse.ArgumentParser:
stage_sdk_parser.add_argument(
"--runtime-version",
required=True,
help="Pinned openai-codex-cli-bin version for the staged SDK package",
help="Pinned openai-codex-app-server-bin version for the staged SDK package",
)
stage_sdk_parser.add_argument(
"--sdk-version",
@@ -1014,7 +1029,7 @@ def build_parser() -> argparse.ArgumentParser:
stage_runtime_parser.add_argument(
"runtime_binary",
type=Path,
help="Path to the codex binary to package for this platform",
help="Path to the app-server binary to package for this platform",
)
stage_runtime_parser.add_argument(
"--codex-version",

View File

@@ -47,7 +47,7 @@ from .retry import retry_on_overload
ModelT = TypeVar("ModelT", bound=BaseModel)
ApprovalHandler = Callable[[str, JsonObject | None], JsonObject]
RUNTIME_PKG_NAME = "openai-codex-cli-bin"
RUNTIME_PKG_NAME = "openai-codex-app-server-bin"
def _params_dict(
@@ -74,54 +74,75 @@ def _params_dict(
return dumped
if isinstance(params, dict):
return params
raise TypeError(f"Expected generated params model or dict, got {type(params).__name__}")
raise TypeError(
f"Expected generated params model or dict, got {type(params).__name__}"
)
def _installed_codex_path() -> Path:
def _installed_app_server_path() -> Path:
try:
from codex_cli_bin import bundled_codex_path
from codex_app_server_bin import bundled_app_server_path
except ImportError as exc:
raise FileNotFoundError(
"Unable to locate the pinned Codex runtime. Install the published SDK build "
f"with its {RUNTIME_PKG_NAME} dependency, or set AppServerConfig.codex_bin "
f"with its {RUNTIME_PKG_NAME} dependency, or set AppServerConfig.app_server_bin "
"explicitly."
) from exc
return bundled_codex_path()
return bundled_app_server_path()
@dataclass(frozen=True)
class CodexBinResolverOps:
installed_codex_path: Callable[[], Path]
class AppServerBinResolverOps:
installed_app_server_path: Callable[[], Path]
path_exists: Callable[[Path], bool]
def _default_codex_bin_resolver_ops() -> CodexBinResolverOps:
return CodexBinResolverOps(
installed_codex_path=_installed_codex_path,
CodexBinResolverOps = AppServerBinResolverOps
def _default_app_server_bin_resolver_ops() -> AppServerBinResolverOps:
return AppServerBinResolverOps(
installed_app_server_path=_installed_app_server_path,
path_exists=lambda path: path.exists(),
)
def resolve_codex_bin(config: "AppServerConfig", ops: CodexBinResolverOps) -> Path:
if config.codex_bin is not None:
codex_bin = Path(config.codex_bin)
if not ops.path_exists(codex_bin):
def resolve_app_server_bin(
config: "AppServerConfig", ops: AppServerBinResolverOps
) -> Path:
configured_bin = _configured_app_server_bin(config)
if configured_bin is not None:
app_server_bin = Path(configured_bin)
if not ops.path_exists(app_server_bin):
raise FileNotFoundError(
f"Codex binary not found at {codex_bin}. Set AppServerConfig.codex_bin "
"to a valid binary path."
f"Codex app-server binary not found at {app_server_bin}. Set "
"AppServerConfig.app_server_bin to a valid binary path."
)
return codex_bin
return app_server_bin
return ops.installed_codex_path()
return ops.installed_app_server_path()
def _resolve_codex_bin(config: "AppServerConfig") -> Path:
return resolve_codex_bin(config, _default_codex_bin_resolver_ops())
def _configured_app_server_bin(config: "AppServerConfig") -> str | None:
if config.app_server_bin is not None and config.codex_bin is not None:
raise ValueError(
"Set only one of AppServerConfig.app_server_bin or AppServerConfig.codex_bin."
)
return config.app_server_bin or config.codex_bin
def _resolve_app_server_bin(config: "AppServerConfig") -> Path:
return resolve_app_server_bin(config, _default_app_server_bin_resolver_ops())
def resolve_codex_bin(config: "AppServerConfig", ops: AppServerBinResolverOps) -> Path:
return resolve_app_server_bin(config, ops)
@dataclass(slots=True)
class AppServerConfig:
app_server_bin: str | None = None
codex_bin: str | None = None
launch_args_override: tuple[str, ...] | None = None
config_overrides: tuple[str, ...] = ()
@@ -133,8 +154,31 @@ class AppServerConfig:
experimental_api: bool = True
def _default_launch_args(app_server_bin: Path, config: AppServerConfig) -> list[str]:
if _is_legacy_codex_cli_binary(app_server_bin):
args = [str(app_server_bin)]
for kv in config.config_overrides:
args.extend(["--config", kv])
args.extend(["app-server", "--listen", "stdio://"])
return args
if config.config_overrides:
raise ValueError(
"AppServerConfig.config_overrides is only supported when launching the legacy "
"`codex` CLI runtime. With standalone `codex-app-server`, use "
"`thread_start(..., config=...)`, `thread_resume(..., config=...)`, or "
"`launch_args_override`."
)
return [str(app_server_bin), "--listen", "stdio://"]
def _is_legacy_codex_cli_binary(app_server_bin: Path) -> bool:
return app_server_bin.name.lower() == "codex.exe" or app_server_bin.name == "codex"
class AppServerClient:
"""Synchronous typed JSON-RPC client for `codex app-server` over stdio."""
"""Synchronous typed JSON-RPC client for Codex app-server over stdio."""
def __init__(
self,
@@ -165,11 +209,8 @@ class AppServerClient:
if self.config.launch_args_override is not None:
args = list(self.config.launch_args_override)
else:
codex_bin = _resolve_codex_bin(self.config)
args = [str(codex_bin)]
for kv in self.config.config_overrides:
args.extend(["--config", kv])
args.extend(["app-server", "--listen", "stdio://"])
app_server_bin = _resolve_app_server_bin(self.config)
args = _default_launch_args(app_server_bin, self.config)
env = os.environ.copy()
if self.config.env:
@@ -239,7 +280,9 @@ class AppServerClient:
def _request_raw(self, method: str, params: JsonObject | None = None) -> JsonValue:
request_id = str(uuid.uuid4())
self._write_message({"id": request_id, "method": method, "params": params or {}})
self._write_message(
{"id": request_id, "method": method, "params": params or {}}
)
while True:
msg = self._read_message()
@@ -301,8 +344,12 @@ class AppServerClient:
if self._active_turn_consumer == turn_id:
self._active_turn_consumer = None
def thread_start(self, params: V2ThreadStartParams | JsonObject | None = None) -> ThreadStartResponse:
return self.request("thread/start", _params_dict(params), response_model=ThreadStartResponse)
def thread_start(
self, params: V2ThreadStartParams | JsonObject | None = None
) -> ThreadStartResponse:
return self.request(
"thread/start", _params_dict(params), response_model=ThreadStartResponse
)
def thread_resume(
self,
@@ -310,12 +357,20 @@ class AppServerClient:
params: V2ThreadResumeParams | JsonObject | None = None,
) -> ThreadResumeResponse:
payload = {"threadId": thread_id, **_params_dict(params)}
return self.request("thread/resume", payload, response_model=ThreadResumeResponse)
return self.request(
"thread/resume", payload, response_model=ThreadResumeResponse
)
def thread_list(self, params: V2ThreadListParams | JsonObject | None = None) -> ThreadListResponse:
return self.request("thread/list", _params_dict(params), response_model=ThreadListResponse)
def thread_list(
self, params: V2ThreadListParams | JsonObject | None = None
) -> ThreadListResponse:
return self.request(
"thread/list", _params_dict(params), response_model=ThreadListResponse
)
def thread_read(self, thread_id: str, include_turns: bool = False) -> ThreadReadResponse:
def thread_read(
self, thread_id: str, include_turns: bool = False
) -> ThreadReadResponse:
return self.request(
"thread/read",
{"threadId": thread_id, "includeTurns": include_turns},
@@ -331,10 +386,18 @@ class AppServerClient:
return self.request("thread/fork", payload, response_model=ThreadForkResponse)
def thread_archive(self, thread_id: str) -> ThreadArchiveResponse:
return self.request("thread/archive", {"threadId": thread_id}, response_model=ThreadArchiveResponse)
return self.request(
"thread/archive",
{"threadId": thread_id},
response_model=ThreadArchiveResponse,
)
def thread_unarchive(self, thread_id: str) -> ThreadUnarchiveResponse:
return self.request("thread/unarchive", {"threadId": thread_id}, response_model=ThreadUnarchiveResponse)
return self.request(
"thread/unarchive",
{"threadId": thread_id},
response_model=ThreadUnarchiveResponse,
)
def thread_set_name(self, thread_id: str, name: str) -> ThreadSetNameResponse:
return self.request(
@@ -458,12 +521,16 @@ class AppServerClient:
model = NOTIFICATION_MODELS.get(method)
if model is None:
return Notification(method=method, payload=UnknownNotification(params=params_dict))
return Notification(
method=method, payload=UnknownNotification(params=params_dict)
)
try:
payload = model.model_validate(params_dict)
except Exception: # noqa: BLE001
return Notification(method=method, payload=UnknownNotification(params=params_dict))
return Notification(
method=method, payload=UnknownNotification(params=params_dict)
)
return Notification(method=method, payload=payload)
def _normalize_input_items(
@@ -476,7 +543,9 @@ class AppServerClient:
return [input_items]
return input_items
def _default_approval_handler(self, method: str, params: JsonObject | None) -> JsonObject:
def _default_approval_handler(
self, method: str, params: JsonObject | None
) -> JsonObject:
if method == "item/commandExecution/requestApproval":
return {"decision": "accept"}
if method == "item/fileChange/requestApproval":

View File

@@ -29,7 +29,9 @@ def _load_runtime_setup_module():
runtime_setup_path = ROOT / "_runtime_setup.py"
spec = importlib.util.spec_from_file_location("_runtime_setup", runtime_setup_path)
if spec is None or spec.loader is None:
raise AssertionError(f"Failed to load runtime setup module: {runtime_setup_path}")
raise AssertionError(
f"Failed to load runtime setup module: {runtime_setup_path}"
)
module = importlib.util.module_from_spec(spec)
sys.modules[spec.name] = module
spec.loader.exec_module(module)
@@ -151,12 +153,12 @@ def test_generate_v2_all_uses_titles_for_generated_names() -> None:
def test_runtime_package_template_has_no_checked_in_binaries() -> None:
runtime_root = ROOT.parent / "python-runtime" / "src" / "codex_cli_bin"
runtime_root = ROOT.parent / "python-runtime" / "src"
assert sorted(
path.name
str(path.relative_to(runtime_root))
for path in runtime_root.rglob("*")
if path.is_file() and "__pycache__" not in path.parts
) == ["__init__.py"]
) == ["codex_app_server_bin/__init__.py", "codex_cli_bin/__init__.py"]
def test_examples_readme_matches_pinned_runtime_version() -> None:
@@ -173,15 +175,19 @@ def test_runtime_distribution_name_is_consistent() -> None:
runtime_setup = _load_runtime_setup_module()
from codex_app_server import client as client_module
assert script.RUNTIME_DISTRIBUTION_NAME == "openai-codex-cli-bin"
assert runtime_setup.PACKAGE_NAME == "openai-codex-cli-bin"
assert client_module.RUNTIME_PKG_NAME == "openai-codex-cli-bin"
assert "importlib.metadata.version('codex-cli-bin')" not in (
ROOT / "_runtime_setup.py"
).read_text()
assert script.RUNTIME_DISTRIBUTION_NAME == "openai-codex-app-server-bin"
assert runtime_setup.PACKAGE_NAME == "openai-codex-app-server-bin"
assert client_module.RUNTIME_PKG_NAME == "openai-codex-app-server-bin"
assert (
"importlib.metadata.version('codex-cli-bin')"
not in (ROOT / "_runtime_setup.py").read_text()
)
assert runtime_setup.platform_asset_name().startswith("codex-app-server-")
def test_release_metadata_retries_without_invalid_auth(monkeypatch: pytest.MonkeyPatch) -> None:
def test_release_metadata_retries_without_invalid_auth(
monkeypatch: pytest.MonkeyPatch,
) -> None:
runtime_setup = _load_runtime_setup_module()
authorizations: list[str | None] = []
@@ -252,10 +258,10 @@ def test_runtime_package_is_wheel_only_and_builds_platform_specific_wheels() ->
elif isinstance(node.value, ast.JoinedStr):
build_data_assignments[node.targets[0].slice.value] = "joined-string"
assert pyproject["project"]["name"] == "openai-codex-cli-bin"
assert pyproject["project"]["name"] == "openai-codex-app-server-bin"
assert pyproject["tool"]["hatch"]["build"]["targets"]["wheel"] == {
"packages": ["src/codex_cli_bin"],
"include": ["src/codex_cli_bin/bin/**"],
"packages": ["src/codex_app_server_bin", "src/codex_cli_bin"],
"include": ["src/codex_app_server_bin/bin/**"],
"hooks": {"custom": {}},
}
assert pyproject["tool"]["hatch"]["build"]["targets"]["sdist"] == {
@@ -281,11 +287,35 @@ def test_stage_runtime_release_copies_binary_and_sets_version(tmp_path: Path) ->
)
assert staged == tmp_path / "runtime-stage"
assert script.staged_runtime_bin_path(staged).read_text() == "fake codex\n"
assert 'name = "openai-codex-cli-bin"' in (staged / "pyproject.toml").read_text()
assert (
script.staged_runtime_bin_path(staged, fake_binary.name).read_text()
== "fake codex\n"
)
assert (
'name = "openai-codex-app-server-bin"'
in (staged / "pyproject.toml").read_text()
)
assert 'version = "1.2.3"' in (staged / "pyproject.toml").read_text()
def test_stage_runtime_release_preserves_legacy_binary_name(tmp_path: Path) -> None:
script = _load_update_script_module()
legacy_binary = tmp_path / script.legacy_runtime_binary_name()
legacy_binary.write_text("fake legacy codex\n")
staged = script.stage_python_runtime_package(
tmp_path / "runtime-stage",
"1.2.3",
legacy_binary,
)
assert staged == tmp_path / "runtime-stage"
assert script.staged_runtime_bin_path(staged, legacy_binary.name).read_text() == (
"fake legacy codex\n"
)
assert not script.staged_runtime_bin_path(staged).exists()
def test_normalize_codex_version_accepts_release_tags_and_pep440_versions() -> None:
script = _load_update_script_module()
@@ -313,7 +343,10 @@ def test_stage_runtime_release_replaces_existing_staging_dir(tmp_path: Path) ->
assert staged == staging_dir
assert not old_file.exists()
assert script.staged_runtime_bin_path(staged).read_text() == "fake codex\n"
assert (
script.staged_runtime_bin_path(staged, fake_binary.name).read_text()
== "fake codex\n"
)
def test_stage_runtime_release_can_pin_wheel_platform_tag(tmp_path: Path) -> None:
@@ -338,7 +371,7 @@ def test_stage_sdk_release_injects_exact_runtime_pin(tmp_path: Path) -> None:
pyproject = (staged / "pyproject.toml").read_text()
assert 'version = "0.2.1"' in pyproject
assert '"openai-codex-cli-bin==1.2.3"' in pyproject
assert '"openai-codex-app-server-bin==1.2.3"' in pyproject
assert '"codex-cli-bin==1.2.3"' not in pyproject
assert not any((staged / "src" / "codex_app_server").glob("bin/**"))
@@ -454,61 +487,155 @@ def test_default_runtime_is_resolved_from_installed_runtime_package(
) -> None:
from codex_app_server import client as client_module
fake_binary = tmp_path / ("codex.exe" if client_module.os.name == "nt" else "codex")
fake_binary = tmp_path / (
"codex-app-server.exe" if client_module.os.name == "nt" else "codex-app-server"
)
fake_binary.write_text("")
ops = client_module.CodexBinResolverOps(
installed_codex_path=lambda: fake_binary,
ops = client_module.AppServerBinResolverOps(
installed_app_server_path=lambda: fake_binary,
path_exists=lambda path: path == fake_binary,
)
config = client_module.AppServerConfig()
assert config.app_server_bin is None
assert config.codex_bin is None
assert client_module.resolve_app_server_bin(config, ops) == fake_binary
assert client_module.resolve_codex_bin(config, ops) == fake_binary
def test_explicit_codex_bin_override_takes_priority(tmp_path: Path) -> None:
def test_explicit_app_server_bin_override_takes_priority(tmp_path: Path) -> None:
from codex_app_server import client as client_module
explicit_binary = tmp_path / (
"custom-app-server.exe"
if client_module.os.name == "nt"
else "custom-app-server"
)
explicit_binary.write_text("")
ops = client_module.AppServerBinResolverOps(
installed_app_server_path=lambda: (_ for _ in ()).throw(
AssertionError("packaged runtime should not be used")
),
path_exists=lambda path: path == explicit_binary,
)
config = client_module.AppServerConfig(app_server_bin=str(explicit_binary))
assert client_module.resolve_app_server_bin(config, ops) == explicit_binary
assert client_module.resolve_codex_bin(config, ops) == explicit_binary
def test_legacy_codex_bin_override_remains_supported(tmp_path: Path) -> None:
from codex_app_server import client as client_module
explicit_binary = tmp_path / (
"custom-codex.exe" if client_module.os.name == "nt" else "custom-codex"
)
explicit_binary.write_text("")
ops = client_module.CodexBinResolverOps(
installed_codex_path=lambda: (_ for _ in ()).throw(
ops = client_module.AppServerBinResolverOps(
installed_app_server_path=lambda: (_ for _ in ()).throw(
AssertionError("packaged runtime should not be used")
),
path_exists=lambda path: path == explicit_binary,
)
config = client_module.AppServerConfig(codex_bin=str(explicit_binary))
assert client_module.resolve_codex_bin(config, ops) == explicit_binary
assert client_module.resolve_app_server_bin(config, ops) == explicit_binary
def test_missing_runtime_package_requires_explicit_codex_bin() -> None:
def test_conflicting_runtime_bin_overrides_fail(tmp_path: Path) -> None:
from codex_app_server import client as client_module
ops = client_module.CodexBinResolverOps(
installed_codex_path=lambda: (_ for _ in ()).throw(
explicit_binary = tmp_path / "codex-app-server"
explicit_binary.write_text("")
ops = client_module.AppServerBinResolverOps(
installed_app_server_path=lambda: explicit_binary,
path_exists=lambda path: path == explicit_binary,
)
config = client_module.AppServerConfig(
app_server_bin=str(explicit_binary),
codex_bin=str(explicit_binary),
)
with pytest.raises(ValueError, match="Set only one"):
client_module.resolve_app_server_bin(config, ops)
def test_missing_runtime_package_requires_explicit_app_server_bin() -> None:
from codex_app_server import client as client_module
ops = client_module.AppServerBinResolverOps(
installed_app_server_path=lambda: (_ for _ in ()).throw(
FileNotFoundError("missing packaged runtime")
),
path_exists=lambda _path: False,
)
with pytest.raises(FileNotFoundError, match="missing packaged runtime"):
client_module.resolve_codex_bin(client_module.AppServerConfig(), ops)
client_module.resolve_app_server_bin(client_module.AppServerConfig(), ops)
def test_broken_runtime_package_does_not_fall_back() -> None:
from codex_app_server import client as client_module
ops = client_module.CodexBinResolverOps(
installed_codex_path=lambda: (_ for _ in ()).throw(
ops = client_module.AppServerBinResolverOps(
installed_app_server_path=lambda: (_ for _ in ()).throw(
FileNotFoundError("missing packaged binary")
),
path_exists=lambda _path: False,
)
with pytest.raises(FileNotFoundError) as exc_info:
client_module.resolve_codex_bin(client_module.AppServerConfig(), ops)
client_module.resolve_app_server_bin(client_module.AppServerConfig(), ops)
assert str(exc_info.value) == ("missing packaged binary")
def test_legacy_codex_binary_launch_args_keep_config_overrides() -> None:
from codex_app_server import client as client_module
legacy_binary = Path("codex.exe" if client_module.os.name == "nt" else "codex")
config = client_module.AppServerConfig(
codex_bin=str(legacy_binary),
config_overrides=("model=gpt-5.4", "sandbox=workspace-write"),
)
assert client_module._default_launch_args(legacy_binary, config) == [
str(legacy_binary),
"--config",
"model=gpt-5.4",
"--config",
"sandbox=workspace-write",
"app-server",
"--listen",
"stdio://",
]
def test_standalone_app_server_launch_args_use_direct_binary() -> None:
from codex_app_server import client as client_module
standalone_binary = Path(
"codex-app-server.exe" if client_module.os.name == "nt" else "codex-app-server"
)
assert client_module._default_launch_args(
standalone_binary,
client_module.AppServerConfig(app_server_bin=str(standalone_binary)),
) == [str(standalone_binary), "--listen", "stdio://"]
def test_standalone_app_server_rejects_legacy_config_overrides() -> None:
from codex_app_server import client as client_module
standalone_binary = Path(
"codex-app-server.exe" if client_module.os.name == "nt" else "codex-app-server"
)
with pytest.raises(ValueError, match="config_overrides"):
client_module._default_launch_args(
standalone_binary,
client_module.AppServerConfig(
app_server_bin=str(standalone_binary),
config_overrides=("model=gpt-5.4",),
),
)