mirror of
https://github.com/openai/codex.git
synced 2026-04-26 07:35:29 +00:00
## TL;DR WIP esp the examples Thin the Python SDK public surface so the wrapper layer returns canonical app-server generated models directly. - keeps `Codex` / `AsyncCodex` / `Thread` / `Turn` and input helpers, but removes alias-only type layers and custom result models - `metadata` now returns `InitializeResponse` and `run()` returns the generated app-server `Turn` - updates docs, examples, notebook, and tests to use canonical generated types and regenerates `v2_all.py` against current schema - keeps the pinned runtime-package integration flow and real integration coverage ## Validation - `PYTHONPATH=sdk/python/src python3 -m pytest sdk/python/tests` - `GH_TOKEN="$(gh auth token)" RUN_REAL_CODEX_TESTS=1 PYTHONPATH=sdk/python/src python3 -m pytest sdk/python/tests -rs` --------- Co-authored-by: Codex <noreply@openai.com>
19 lines
563 B
Python
19 lines
563 B
Python
import sys
|
|
from pathlib import Path
|
|
|
|
_EXAMPLES_ROOT = Path(__file__).resolve().parents[1]
|
|
if str(_EXAMPLES_ROOT) not in sys.path:
|
|
sys.path.insert(0, str(_EXAMPLES_ROOT))
|
|
|
|
from _bootstrap import ensure_local_sdk_src, runtime_config, server_label
|
|
|
|
ensure_local_sdk_src()
|
|
|
|
from codex_app_server import Codex
|
|
|
|
with Codex(config=runtime_config()) as codex:
|
|
print("server:", server_label(codex.metadata))
|
|
models = codex.models()
|
|
print("models.count:", len(models.data))
|
|
print("models:", ", ".join(model.id for model in models.data[:5]) or "[none]")
|