mirror of
https://github.com/openai/codex.git
synced 2026-05-14 16:22:51 +00:00
## Why The Python SDK needs the same tight formatter/lint loop as the rest of the repo: a safe Ruff autofix pass, Ruff formatting, editor save behavior, and CI checks that catch drift. Without that loop, SDK changes can land with formatting or import ordering that differs from what reviewers and CI expect. ## What - Add Ruff configuration to `sdk/python/pyproject.toml`, excluding generated protocol code and notebooks from the normal lint/format pass. - Update `just fmt` so it still formats Rust and also runs Python SDK Ruff autofix and formatting. - Add Python SDK CI steps for `ruff check` and `ruff format --check` before pytest. - Recommend the Ruff VS Code extension and enable Python format/fix/organize-on-save so Cmd+S uses the same tooling. - Apply the resulting Ruff formatting to SDK Python files, examples, and the checked-in generated `v2_all.py` output emitted by the pinned generator. - Add a guard test for the `just fmt` recipe so it keeps working from both Rust and Python SDK working directories. ## Stack 1. #21891 `[1/8]` Pin Python SDK runtime dependency 2. #21893 `[2/8]` Generate Python SDK types from pinned runtime 3. #21895 `[3/8]` Run Python SDK tests in CI 4. #21896 `[4/8]` Define Python SDK public API surface 5. #21905 `[5/8]` Rename Python SDK package to `openai-codex` 6. #21910 `[6/8]` Add high-level Python SDK approval mode 7. #22014 `[7/8]` Add Python SDK app-server integration harness 8. This PR `[8/8]` Add Python SDK Ruff formatting ## Verification - Added `test_root_fmt_recipe_formats_rust_and_python_sdk` for the shared format recipe. - Ran `just fmt` after the recipe update. --------- Co-authored-by: Codex <noreply@openai.com>
702 lines
24 KiB
Python
702 lines
24 KiB
Python
from __future__ import annotations
|
|
|
|
import ast
|
|
import importlib.util
|
|
import io
|
|
import json
|
|
import sys
|
|
import urllib.error
|
|
from pathlib import Path
|
|
from typing import Sequence
|
|
|
|
import pytest
|
|
import tomllib
|
|
|
|
ROOT = Path(__file__).resolve().parents[1]
|
|
|
|
|
|
def _load_update_script_module():
|
|
"""Load the maintenance script as a module so tests exercise real helpers."""
|
|
script_path = ROOT / "scripts" / "update_sdk_artifacts.py"
|
|
spec = importlib.util.spec_from_file_location("update_sdk_artifacts", script_path)
|
|
if spec is None or spec.loader is None:
|
|
raise AssertionError(f"Failed to load script module: {script_path}")
|
|
module = importlib.util.module_from_spec(spec)
|
|
sys.modules[spec.name] = module
|
|
spec.loader.exec_module(module)
|
|
return module
|
|
|
|
|
|
def _load_runtime_setup_module():
|
|
"""Load runtime setup without importing the SDK package under test."""
|
|
runtime_setup_path = ROOT / "_runtime_setup.py"
|
|
spec = importlib.util.spec_from_file_location("_runtime_setup", runtime_setup_path)
|
|
if spec is None or spec.loader is None:
|
|
raise AssertionError(f"Failed to load runtime setup module: {runtime_setup_path}")
|
|
module = importlib.util.module_from_spec(spec)
|
|
sys.modules[spec.name] = module
|
|
spec.loader.exec_module(module)
|
|
return module
|
|
|
|
|
|
def test_generation_has_single_maintenance_entrypoint_script() -> None:
|
|
"""Keep artifact workflows routed through one script instead of side entrypoints."""
|
|
scripts = sorted(p.name for p in (ROOT / "scripts").glob("*.py"))
|
|
assert scripts == ["update_sdk_artifacts.py"]
|
|
|
|
|
|
def test_root_fmt_recipe_formats_rust_and_python_sdk() -> None:
|
|
"""The repo fmt command should work from Rust and Python SDK directories."""
|
|
justfile = ROOT.parents[1] / "justfile"
|
|
lines = justfile.read_text().splitlines()
|
|
fmt_index = lines.index("fmt:")
|
|
next_recipe_index = next(
|
|
index
|
|
for index in range(fmt_index + 1, len(lines))
|
|
if lines[index] and not lines[index].startswith((" ", "\t", "#"))
|
|
)
|
|
fmt_recipe = lines[fmt_index:next_recipe_index]
|
|
actual = {
|
|
"working_directory": lines[0],
|
|
"previous_attribute": lines[fmt_index - 1],
|
|
"commands": [line.strip() for line in fmt_recipe[1:] if line.strip()],
|
|
}
|
|
expected = {
|
|
"working_directory": 'set working-directory := "codex-rs"',
|
|
"previous_attribute": "# Format Rust and Python SDK code.",
|
|
"commands": [
|
|
"cargo fmt -- --config imports_granularity=Item 2>/dev/null",
|
|
"uv run --project ../sdk/python --extra dev ruff check --fix --fix-only ../sdk/python",
|
|
"uv run --project ../sdk/python --extra dev ruff format ../sdk/python",
|
|
],
|
|
}
|
|
|
|
assert actual == expected, (
|
|
"The root `just fmt` recipe must run Rust fmt and Python SDK Ruff. "
|
|
"Fix the `fmt` recipe in `justfile`, then run `just fmt`.\n"
|
|
f"Expected: {json.dumps(expected, indent=2)}\n"
|
|
f"Actual: {json.dumps(actual, indent=2)}"
|
|
)
|
|
|
|
|
|
def test_generate_types_wires_all_generation_steps() -> None:
|
|
"""The type generation command should refresh every schema-derived artifact."""
|
|
source = (ROOT / "scripts" / "update_sdk_artifacts.py").read_text()
|
|
tree = ast.parse(source)
|
|
|
|
generate_types_fn = next(
|
|
(
|
|
node
|
|
for node in tree.body
|
|
if isinstance(node, ast.FunctionDef) and node.name == "generate_types_from_schema_dir"
|
|
),
|
|
None,
|
|
)
|
|
assert generate_types_fn is not None
|
|
|
|
calls: list[str] = []
|
|
for node in generate_types_fn.body:
|
|
if isinstance(node, ast.Expr) and isinstance(node.value, ast.Call):
|
|
fn = node.value.func
|
|
if isinstance(fn, ast.Name):
|
|
calls.append(fn.id)
|
|
|
|
assert calls == [
|
|
"generate_v2_all",
|
|
"generate_notification_registry",
|
|
"generate_public_api_flat_methods",
|
|
]
|
|
|
|
|
|
def _load_runtime_schema_bundle(tmp_path: Path) -> dict:
|
|
"""Ask the pinned runtime package for a real schema bundle used by tests."""
|
|
script = _load_update_script_module()
|
|
schema_dir = script.generate_schema_from_pinned_runtime(tmp_path / "schema")
|
|
return json.loads(script.schema_bundle_path(schema_dir).read_text())
|
|
|
|
|
|
def test_schema_normalization_only_flattens_string_literal_oneofs(
|
|
tmp_path: Path,
|
|
) -> None:
|
|
"""Schema normalization should only flatten the enum-shaped oneOf variants."""
|
|
script = _load_update_script_module()
|
|
schema = _load_runtime_schema_bundle(tmp_path)
|
|
definitions = schema["definitions"]
|
|
flattened = [
|
|
name
|
|
for name, definition in definitions.items()
|
|
if isinstance(definition, dict) and script._flatten_string_enum_one_of(definition.copy())
|
|
]
|
|
|
|
assert flattened == [
|
|
"MessagePhase",
|
|
"TurnItemsView",
|
|
"PluginAvailability",
|
|
"AuthMode",
|
|
"InputModality",
|
|
"ExperimentalFeatureStage",
|
|
"CommandExecOutputStream",
|
|
"ProcessOutputStream",
|
|
]
|
|
|
|
|
|
def test_python_codegen_schema_annotation_adds_stable_variant_titles(
|
|
tmp_path: Path,
|
|
) -> None:
|
|
"""Schema annotations should give generated protocol classes stable names."""
|
|
script = _load_update_script_module()
|
|
schema = _load_runtime_schema_bundle(tmp_path)
|
|
script._annotate_schema(schema)
|
|
definitions = schema["definitions"]
|
|
|
|
server_notification_titles = {
|
|
variant.get("title")
|
|
for variant in definitions["ServerNotification"]["oneOf"]
|
|
if isinstance(variant, dict)
|
|
}
|
|
assert "ErrorServerNotification" in server_notification_titles
|
|
assert "ThreadStartedServerNotification" in server_notification_titles
|
|
assert "ErrorNotification" not in server_notification_titles
|
|
assert "Thread/startedNotification" not in server_notification_titles
|
|
|
|
ask_for_approval_titles = [
|
|
variant.get("title") for variant in definitions["AskForApproval"]["oneOf"]
|
|
]
|
|
assert ask_for_approval_titles == [
|
|
"AskForApprovalValue",
|
|
"GranularAskForApproval",
|
|
]
|
|
|
|
reasoning_summary_titles = [
|
|
variant.get("title") for variant in definitions["ReasoningSummary"]["oneOf"]
|
|
]
|
|
assert reasoning_summary_titles == [
|
|
"ReasoningSummaryValue",
|
|
"NoneReasoningSummary",
|
|
]
|
|
|
|
|
|
def test_generate_v2_all_uses_titles_for_generated_names() -> None:
|
|
source = (ROOT / "scripts" / "update_sdk_artifacts.py").read_text()
|
|
assert "--use-title-as-name" in source
|
|
assert "--use-annotated" in source
|
|
assert "--formatters" in source
|
|
assert "ruff-format" in source
|
|
|
|
|
|
def test_runtime_package_template_has_no_checked_in_binaries() -> None:
|
|
runtime_root = ROOT.parent / "python-runtime" / "src" / "codex_cli_bin"
|
|
assert sorted(
|
|
path.name
|
|
for path in runtime_root.rglob("*")
|
|
if path.is_file() and "__pycache__" not in path.parts
|
|
) == ["__init__.py"]
|
|
|
|
|
|
def test_examples_readme_points_to_runtime_version_source_of_truth() -> None:
|
|
"""Document that examples should point at the dependency pin, not release lore."""
|
|
readme = (ROOT / "examples" / "README.md").read_text()
|
|
assert "The pinned runtime version comes from the SDK package dependency." in readme
|
|
|
|
|
|
def test_runtime_distribution_name_is_consistent() -> None:
|
|
script = _load_update_script_module()
|
|
runtime_setup = _load_runtime_setup_module()
|
|
from openai_codex import _version, client as client_module
|
|
|
|
assert script.SDK_DISTRIBUTION_NAME == "openai-codex"
|
|
assert runtime_setup.SDK_PACKAGE_NAME == "openai-codex"
|
|
assert _version.DISTRIBUTION_NAME == "openai-codex"
|
|
assert script.RUNTIME_DISTRIBUTION_NAME == "openai-codex-cli-bin"
|
|
assert runtime_setup.PACKAGE_NAME == "openai-codex-cli-bin"
|
|
assert client_module.RUNTIME_PKG_NAME == "openai-codex-cli-bin"
|
|
assert (
|
|
"importlib.metadata.version('codex-cli-bin')"
|
|
not in (ROOT / "_runtime_setup.py").read_text()
|
|
)
|
|
|
|
|
|
def test_source_sdk_package_pins_published_runtime() -> None:
|
|
"""The source package metadata should pin the runtime wheel that ships schemas."""
|
|
script = _load_update_script_module()
|
|
pyproject = tomllib.loads((ROOT / "pyproject.toml").read_text())
|
|
|
|
assert {
|
|
"sdk_version": pyproject["project"]["version"],
|
|
"runtime_pin": script.pinned_runtime_version(),
|
|
"dependencies": pyproject["project"]["dependencies"],
|
|
} == {
|
|
"sdk_version": "0.131.0a4",
|
|
"runtime_pin": "0.131.0a4",
|
|
"dependencies": [
|
|
"pydantic>=2.12",
|
|
"openai-codex-cli-bin==0.131.0a4",
|
|
],
|
|
}
|
|
|
|
|
|
def test_release_metadata_retries_without_invalid_auth(
|
|
monkeypatch: pytest.MonkeyPatch,
|
|
) -> None:
|
|
runtime_setup = _load_runtime_setup_module()
|
|
authorizations: list[str | None] = []
|
|
|
|
def fake_urlopen(request):
|
|
authorization = request.headers.get("Authorization")
|
|
authorizations.append(authorization)
|
|
if authorization is not None:
|
|
raise urllib.error.HTTPError(
|
|
request.full_url,
|
|
401,
|
|
"Unauthorized",
|
|
hdrs=None,
|
|
fp=None,
|
|
)
|
|
return io.StringIO('{"assets": []}')
|
|
|
|
monkeypatch.setenv("GH_TOKEN", "invalid-token")
|
|
monkeypatch.setattr(runtime_setup.urllib.request, "urlopen", fake_urlopen)
|
|
|
|
assert runtime_setup._release_metadata("1.2.3") == {"assets": []}
|
|
assert authorizations == ["Bearer invalid-token", None]
|
|
|
|
|
|
def test_runtime_setup_uses_pep440_package_version_and_codex_release_tags() -> None:
|
|
"""The SDK uses PEP 440 package pins and converts only when fetching releases."""
|
|
runtime_setup = _load_runtime_setup_module()
|
|
pyproject = tomllib.loads((ROOT / "pyproject.toml").read_text())
|
|
|
|
assert runtime_setup.PACKAGE_NAME == "openai-codex-cli-bin"
|
|
assert runtime_setup.pinned_runtime_version() == pyproject["project"]["version"]
|
|
assert (
|
|
f"{runtime_setup.PACKAGE_NAME}=={pyproject['project']['version']}"
|
|
in pyproject["project"]["dependencies"]
|
|
)
|
|
assert runtime_setup._normalized_package_version("rust-v0.116.0-alpha.1") == "0.116.0a1"
|
|
assert runtime_setup._release_tag("0.116.0a1") == "rust-v0.116.0-alpha.1"
|
|
|
|
|
|
def test_runtime_package_is_wheel_only_and_builds_platform_specific_wheels() -> None:
|
|
pyproject = tomllib.loads((ROOT.parent / "python-runtime" / "pyproject.toml").read_text())
|
|
hook_source = (ROOT.parent / "python-runtime" / "hatch_build.py").read_text()
|
|
hook_tree = ast.parse(hook_source)
|
|
initialize_fn = next(
|
|
node
|
|
for node in ast.walk(hook_tree)
|
|
if isinstance(node, ast.FunctionDef) and node.name == "initialize"
|
|
)
|
|
|
|
sdist_guard = next(
|
|
(
|
|
node
|
|
for node in initialize_fn.body
|
|
if isinstance(node, ast.If)
|
|
and isinstance(node.test, ast.Compare)
|
|
and isinstance(node.test.left, ast.Attribute)
|
|
and isinstance(node.test.left.value, ast.Name)
|
|
and node.test.left.value.id == "self"
|
|
and node.test.left.attr == "target_name"
|
|
and len(node.test.ops) == 1
|
|
and isinstance(node.test.ops[0], ast.Eq)
|
|
and len(node.test.comparators) == 1
|
|
and isinstance(node.test.comparators[0], ast.Constant)
|
|
and node.test.comparators[0].value == "sdist"
|
|
),
|
|
None,
|
|
)
|
|
build_data_assignments = {}
|
|
for node in initialize_fn.body:
|
|
if (
|
|
not isinstance(node, ast.Assign)
|
|
or len(node.targets) != 1
|
|
or not isinstance(node.targets[0], ast.Subscript)
|
|
or not isinstance(node.targets[0].value, ast.Name)
|
|
or node.targets[0].value.id != "build_data"
|
|
or not isinstance(node.targets[0].slice, ast.Constant)
|
|
or not isinstance(node.targets[0].slice.value, str)
|
|
):
|
|
continue
|
|
if isinstance(node.value, ast.Constant):
|
|
build_data_assignments[node.targets[0].slice.value] = node.value.value
|
|
elif isinstance(node.value, ast.JoinedStr):
|
|
build_data_assignments[node.targets[0].slice.value] = "joined-string"
|
|
|
|
assert pyproject["project"]["name"] == "openai-codex-cli-bin"
|
|
assert pyproject["tool"]["hatch"]["build"]["targets"]["wheel"] == {
|
|
"packages": ["src/codex_cli_bin"],
|
|
"include": ["src/codex_cli_bin/bin/**"],
|
|
"hooks": {"custom": {}},
|
|
}
|
|
assert pyproject["tool"]["hatch"]["build"]["targets"]["sdist"] == {
|
|
"hooks": {"custom": {}},
|
|
}
|
|
assert sdist_guard is not None
|
|
assert build_data_assignments == {
|
|
"pure_python": False,
|
|
"infer_tag": False,
|
|
"tag": "joined-string",
|
|
}
|
|
|
|
|
|
def test_stage_runtime_release_copies_binary_and_sets_version(tmp_path: Path) -> None:
|
|
script = _load_update_script_module()
|
|
fake_binary = tmp_path / script.runtime_binary_name()
|
|
fake_binary.write_text("fake codex\n")
|
|
|
|
staged = script.stage_python_runtime_package(
|
|
tmp_path / "runtime-stage",
|
|
"1.2.3",
|
|
fake_binary,
|
|
)
|
|
|
|
assert staged == tmp_path / "runtime-stage"
|
|
assert script.staged_runtime_bin_path(staged).read_text() == "fake codex\n"
|
|
assert 'name = "openai-codex-cli-bin"' in (staged / "pyproject.toml").read_text()
|
|
assert 'version = "1.2.3"' in (staged / "pyproject.toml").read_text()
|
|
|
|
|
|
def test_normalize_codex_version_accepts_release_tags_and_pep440_versions() -> None:
|
|
script = _load_update_script_module()
|
|
|
|
assert script.normalize_codex_version("rust-v0.116.0-alpha.1") == "0.116.0a1"
|
|
assert script.normalize_codex_version("v0.116.0-beta.2") == "0.116.0b2"
|
|
assert script.normalize_codex_version("0.116.0rc3") == "0.116.0rc3"
|
|
assert script.normalize_codex_version("0.116.0") == "0.116.0"
|
|
|
|
|
|
def test_stage_runtime_release_replaces_existing_staging_dir(tmp_path: Path) -> None:
|
|
script = _load_update_script_module()
|
|
staging_dir = tmp_path / "runtime-stage"
|
|
old_file = staging_dir / "stale.txt"
|
|
old_file.parent.mkdir(parents=True)
|
|
old_file.write_text("stale")
|
|
|
|
fake_binary = tmp_path / script.runtime_binary_name()
|
|
fake_binary.write_text("fake codex\n")
|
|
|
|
staged = script.stage_python_runtime_package(
|
|
staging_dir,
|
|
"1.2.3",
|
|
fake_binary,
|
|
)
|
|
|
|
assert staged == staging_dir
|
|
assert not old_file.exists()
|
|
assert script.staged_runtime_bin_path(staged).read_text() == "fake codex\n"
|
|
|
|
|
|
def test_stage_runtime_release_can_pin_wheel_platform_tag(tmp_path: Path) -> None:
|
|
script = _load_update_script_module()
|
|
fake_binary = tmp_path / script.runtime_binary_name()
|
|
fake_binary.write_text("fake codex\n")
|
|
|
|
staged = script.stage_python_runtime_package(
|
|
tmp_path / "runtime-stage",
|
|
"0.116.0a1",
|
|
fake_binary,
|
|
platform_tag="musllinux_1_1_x86_64",
|
|
)
|
|
|
|
pyproject = (staged / "pyproject.toml").read_text()
|
|
assert 'platform-tag = "musllinux_1_1_x86_64"' in pyproject
|
|
|
|
|
|
def test_stage_runtime_release_copies_resource_binaries(tmp_path: Path) -> None:
|
|
"""Runtime staging should copy every helper binary into the wheel bin dir."""
|
|
script = _load_update_script_module()
|
|
fake_binary = tmp_path / script.runtime_binary_name()
|
|
helper = tmp_path / "helper"
|
|
fallback = tmp_path / "fallback-helper"
|
|
fake_binary.write_text("fake codex\n")
|
|
helper.write_text("fake helper\n")
|
|
fallback.write_text("fake fallback\n")
|
|
|
|
staged = script.stage_python_runtime_package(
|
|
tmp_path / "runtime-stage",
|
|
"1.2.3",
|
|
fake_binary,
|
|
resource_binaries=(helper, fallback),
|
|
)
|
|
|
|
assert {
|
|
path.relative_to(staged / "src" / "codex_cli_bin" / "bin").as_posix(): path.read_text()
|
|
for path in (staged / "src" / "codex_cli_bin" / "bin").iterdir()
|
|
} == {
|
|
script.runtime_binary_name(): "fake codex\n",
|
|
"fallback-helper": "fake fallback\n",
|
|
"helper": "fake helper\n",
|
|
}
|
|
|
|
|
|
def test_runtime_resource_binaries_are_included_by_wheel_config(
|
|
tmp_path: Path,
|
|
) -> None:
|
|
"""The runtime wheel config should include helper binaries beside Codex."""
|
|
script = _load_update_script_module()
|
|
fake_binary = tmp_path / script.runtime_binary_name()
|
|
helper = tmp_path / "helper"
|
|
fake_binary.write_text("fake codex\n")
|
|
helper.write_text("fake helper\n")
|
|
|
|
staged = script.stage_python_runtime_package(
|
|
tmp_path / "runtime-stage",
|
|
"1.2.3",
|
|
fake_binary,
|
|
resource_binaries=(helper,),
|
|
)
|
|
|
|
pyproject = tomllib.loads((staged / "pyproject.toml").read_text())
|
|
assert {
|
|
"include": pyproject["tool"]["hatch"]["build"]["targets"]["wheel"]["include"],
|
|
"helper": (staged / "src" / "codex_cli_bin" / "bin" / "helper").read_text(),
|
|
} == {
|
|
"include": ["src/codex_cli_bin/bin/**"],
|
|
"helper": "fake helper\n",
|
|
}
|
|
|
|
|
|
def test_stage_sdk_release_injects_exact_runtime_pin(tmp_path: Path) -> None:
|
|
script = _load_update_script_module()
|
|
staged = script.stage_python_sdk_package(
|
|
tmp_path / "sdk-stage",
|
|
"rust-v0.116.0-alpha.1",
|
|
)
|
|
|
|
pyproject = (staged / "pyproject.toml").read_text()
|
|
assert 'name = "openai-codex"' in pyproject
|
|
assert 'version = "0.116.0a1"' in pyproject
|
|
assert '"openai-codex-cli-bin==0.116.0a1"' in pyproject
|
|
assert (
|
|
'__version__ = "0.116.0a1"'
|
|
not in (staged / "src" / "openai_codex" / "__init__.py").read_text()
|
|
)
|
|
assert (
|
|
'client_version: str = "0.116.0a1"'
|
|
not in (staged / "src" / "openai_codex" / "client.py").read_text()
|
|
)
|
|
assert not any((staged / "src" / "openai_codex").glob("bin/**"))
|
|
|
|
|
|
def test_stage_sdk_release_replaces_existing_staging_dir(tmp_path: Path) -> None:
|
|
script = _load_update_script_module()
|
|
staging_dir = tmp_path / "sdk-stage"
|
|
old_file = staging_dir / "stale.txt"
|
|
old_file.parent.mkdir(parents=True)
|
|
old_file.write_text("stale")
|
|
|
|
staged = script.stage_python_sdk_package(staging_dir, "0.116.0a1")
|
|
|
|
assert staged == staging_dir
|
|
assert not old_file.exists()
|
|
|
|
|
|
def test_staged_sdk_and_runtime_versions_match(tmp_path: Path) -> None:
|
|
script = _load_update_script_module()
|
|
fake_binary = tmp_path / script.runtime_binary_name()
|
|
fake_binary.write_text("fake codex\n")
|
|
|
|
sdk_stage = script.stage_python_sdk_package(
|
|
tmp_path / "sdk-stage",
|
|
"rust-v0.116.0-alpha.1",
|
|
)
|
|
runtime_stage = script.stage_python_runtime_package(
|
|
tmp_path / "runtime-stage",
|
|
"rust-v0.116.0-alpha.1",
|
|
fake_binary,
|
|
)
|
|
|
|
sdk_pyproject = tomllib.loads((sdk_stage / "pyproject.toml").read_text())
|
|
runtime_pyproject = tomllib.loads((runtime_stage / "pyproject.toml").read_text())
|
|
|
|
assert sdk_pyproject["project"]["version"] == runtime_pyproject["project"]["version"]
|
|
assert sdk_pyproject["project"]["dependencies"] == [
|
|
"pydantic>=2.12",
|
|
"openai-codex-cli-bin==0.116.0a1",
|
|
]
|
|
|
|
|
|
def test_stage_sdk_runs_type_generation_before_staging(tmp_path: Path) -> None:
|
|
script = _load_update_script_module()
|
|
calls: list[str] = []
|
|
args = script.parse_args(
|
|
[
|
|
"stage-sdk",
|
|
str(tmp_path / "sdk-stage"),
|
|
"--codex-version",
|
|
"rust-v0.116.0-alpha.1",
|
|
]
|
|
)
|
|
|
|
def fake_generate_types() -> None:
|
|
calls.append("generate_types")
|
|
|
|
def fake_stage_sdk_package(_staging_dir: Path, codex_version: str) -> Path:
|
|
calls.append(f"stage_sdk:{codex_version}")
|
|
return tmp_path / "sdk-stage"
|
|
|
|
def fake_stage_runtime_package(
|
|
_staging_dir: Path,
|
|
_runtime_version: str,
|
|
_runtime_binary: Path,
|
|
_platform_tag: str | None,
|
|
_resource_binaries: Sequence[Path],
|
|
) -> Path:
|
|
raise AssertionError("runtime staging should not run for stage-sdk")
|
|
|
|
def fake_current_sdk_version() -> str:
|
|
return "0.116.0a1"
|
|
|
|
ops = script.CliOps(
|
|
generate_types=fake_generate_types,
|
|
stage_python_sdk_package=fake_stage_sdk_package,
|
|
stage_python_runtime_package=fake_stage_runtime_package,
|
|
current_sdk_version=fake_current_sdk_version,
|
|
)
|
|
|
|
script.run_command(args, ops)
|
|
|
|
assert calls == ["generate_types", "stage_sdk:0.116.0a1"]
|
|
|
|
|
|
def test_stage_sdk_rejects_mismatched_legacy_versions(tmp_path: Path) -> None:
|
|
script = _load_update_script_module()
|
|
args = script.parse_args(
|
|
[
|
|
"stage-sdk",
|
|
str(tmp_path / "sdk-stage"),
|
|
"--codex-version",
|
|
"0.116.0a1",
|
|
"--runtime-version",
|
|
"0.116.0a1",
|
|
"--sdk-version",
|
|
"0.115.0",
|
|
]
|
|
)
|
|
|
|
with pytest.raises(RuntimeError, match="versions must match"):
|
|
script.run_command(args, script.default_cli_ops())
|
|
|
|
|
|
def test_stage_runtime_stages_binary_without_type_generation(tmp_path: Path) -> None:
|
|
script = _load_update_script_module()
|
|
fake_binary = tmp_path / script.runtime_binary_name()
|
|
helper = tmp_path / "helper"
|
|
fallback = tmp_path / "fallback-helper"
|
|
fake_binary.write_text("fake codex\n")
|
|
helper.write_text("fake helper\n")
|
|
fallback.write_text("fake fallback\n")
|
|
calls: list[str] = []
|
|
args = script.parse_args(
|
|
[
|
|
"stage-runtime",
|
|
str(tmp_path / "runtime-stage"),
|
|
str(fake_binary),
|
|
"--codex-version",
|
|
"rust-v0.116.0-alpha.1",
|
|
"--platform-tag",
|
|
"musllinux_1_1_x86_64",
|
|
"--resource-binary",
|
|
str(helper),
|
|
"--resource-binary",
|
|
str(fallback),
|
|
]
|
|
)
|
|
|
|
def fake_generate_types() -> None:
|
|
calls.append("generate_types")
|
|
|
|
def fake_stage_sdk_package(_staging_dir: Path, _codex_version: str) -> Path:
|
|
raise AssertionError("sdk staging should not run for stage-runtime")
|
|
|
|
def fake_stage_runtime_package(
|
|
_staging_dir: Path,
|
|
codex_version: str,
|
|
_runtime_binary: Path,
|
|
platform_tag: str | None,
|
|
resource_binaries: Sequence[Path],
|
|
) -> Path:
|
|
calls.append(
|
|
f"stage_runtime:{codex_version}:{platform_tag}:"
|
|
f"{','.join(path.name for path in resource_binaries)}"
|
|
)
|
|
return tmp_path / "runtime-stage"
|
|
|
|
def fake_current_sdk_version() -> str:
|
|
return "0.116.0a1"
|
|
|
|
ops = script.CliOps(
|
|
generate_types=fake_generate_types,
|
|
stage_python_sdk_package=fake_stage_sdk_package,
|
|
stage_python_runtime_package=fake_stage_runtime_package,
|
|
current_sdk_version=fake_current_sdk_version,
|
|
)
|
|
|
|
script.run_command(args, ops)
|
|
|
|
assert calls == ["stage_runtime:0.116.0a1:musllinux_1_1_x86_64:helper,fallback-helper"]
|
|
|
|
|
|
def test_default_runtime_is_resolved_from_installed_runtime_package(
|
|
tmp_path: Path,
|
|
) -> None:
|
|
from openai_codex import client as client_module
|
|
|
|
fake_binary = tmp_path / ("codex.exe" if client_module.os.name == "nt" else "codex")
|
|
fake_binary.write_text("")
|
|
ops = client_module.CodexBinResolverOps(
|
|
installed_codex_path=lambda: fake_binary,
|
|
path_exists=lambda path: path == fake_binary,
|
|
)
|
|
|
|
config = client_module.AppServerConfig()
|
|
assert config.codex_bin is None
|
|
assert client_module.resolve_codex_bin(config, ops) == fake_binary
|
|
|
|
|
|
def test_explicit_codex_bin_override_takes_priority(tmp_path: Path) -> None:
|
|
from openai_codex import client as client_module
|
|
|
|
explicit_binary = tmp_path / (
|
|
"custom-codex.exe" if client_module.os.name == "nt" else "custom-codex"
|
|
)
|
|
explicit_binary.write_text("")
|
|
ops = client_module.CodexBinResolverOps(
|
|
installed_codex_path=lambda: (_ for _ in ()).throw(
|
|
AssertionError("packaged runtime should not be used")
|
|
),
|
|
path_exists=lambda path: path == explicit_binary,
|
|
)
|
|
|
|
config = client_module.AppServerConfig(codex_bin=str(explicit_binary))
|
|
assert client_module.resolve_codex_bin(config, ops) == explicit_binary
|
|
|
|
|
|
def test_missing_runtime_package_requires_explicit_codex_bin() -> None:
|
|
from openai_codex import client as client_module
|
|
|
|
ops = client_module.CodexBinResolverOps(
|
|
installed_codex_path=lambda: (_ for _ in ()).throw(
|
|
FileNotFoundError("missing packaged runtime")
|
|
),
|
|
path_exists=lambda _path: False,
|
|
)
|
|
|
|
with pytest.raises(FileNotFoundError, match="missing packaged runtime"):
|
|
client_module.resolve_codex_bin(client_module.AppServerConfig(), ops)
|
|
|
|
|
|
def test_broken_runtime_package_does_not_fall_back() -> None:
|
|
from openai_codex import client as client_module
|
|
|
|
ops = client_module.CodexBinResolverOps(
|
|
installed_codex_path=lambda: (_ for _ in ()).throw(
|
|
FileNotFoundError("missing packaged binary")
|
|
),
|
|
path_exists=lambda _path: False,
|
|
)
|
|
|
|
with pytest.raises(FileNotFoundError) as exc_info:
|
|
client_module.resolve_codex_bin(client_module.AppServerConfig(), ops)
|
|
|
|
assert str(exc_info.value) == ("missing packaged binary")
|